summaryrefslogtreecommitdiff
path: root/pkgtools
diff options
context:
space:
mode:
Diffstat (limited to 'pkgtools')
-rw-r--r--pkgtools/pkglint/Makefile4
-rw-r--r--pkgtools/pkglint/PLIST8
-rw-r--r--pkgtools/pkglint/files/autofix.go16
-rw-r--r--pkgtools/pkglint/files/autofix_test.go109
-rw-r--r--pkgtools/pkglint/files/check_test.go70
-rw-r--r--pkgtools/pkglint/files/getopt/getopt_test.go4
-rw-r--r--pkgtools/pkglint/files/histogram/histogram_test.go4
-rw-r--r--pkgtools/pkglint/files/intqa/ideas.go3
-rw-r--r--pkgtools/pkglint/files/intqa/qa.go (renamed from pkgtools/pkglint/files/intqa/testnames.go)177
-rw-r--r--pkgtools/pkglint/files/intqa/qa_test.go (renamed from pkgtools/pkglint/files/intqa/testnames_test.go)268
-rw-r--r--pkgtools/pkglint/files/licenses.go3
-rw-r--r--pkgtools/pkglint/files/licenses/licenses_test.go4
-rw-r--r--pkgtools/pkglint/files/licenses_test.go24
-rw-r--r--pkgtools/pkglint/files/line.go6
-rw-r--r--pkgtools/pkglint/files/line_test.go18
-rw-r--r--pkgtools/pkglint/files/logging.go11
-rw-r--r--pkgtools/pkglint/files/logging_test.go12
-rw-r--r--pkgtools/pkglint/files/mklexer.go414
-rw-r--r--pkgtools/pkglint/files/mklexer_test.go750
-rw-r--r--pkgtools/pkglint/files/mkline.go18
-rw-r--r--pkgtools/pkglint/files/mkline_test.go4
-rw-r--r--pkgtools/pkglint/files/mklinechecker.go16
-rw-r--r--pkgtools/pkglint/files/mklinechecker_test.go23
-rw-r--r--pkgtools/pkglint/files/mklineparser.go3
-rw-r--r--pkgtools/pkglint/files/mklines.go2
-rw-r--r--pkgtools/pkglint/files/mkparser.go421
-rw-r--r--pkgtools/pkglint/files/mkparser_test.go735
-rw-r--r--pkgtools/pkglint/files/mkshparser.go4
-rw-r--r--pkgtools/pkglint/files/mkshparser_test.go77
-rw-r--r--pkgtools/pkglint/files/mkshtypes_test.go11
-rw-r--r--pkgtools/pkglint/files/mkshwalker_test.go9
-rw-r--r--pkgtools/pkglint/files/mktypes.go2
-rw-r--r--pkgtools/pkglint/files/mktypes_test.go12
-rw-r--r--pkgtools/pkglint/files/package.go46
-rw-r--r--pkgtools/pkglint/files/package_test.go52
-rw-r--r--pkgtools/pkglint/files/path.go76
-rw-r--r--pkgtools/pkglint/files/path_test.go92
-rw-r--r--pkgtools/pkglint/files/pkglint.14
-rw-r--r--pkgtools/pkglint/files/pkglint.go32
-rw-r--r--pkgtools/pkglint/files/pkglint_test.go43
-rw-r--r--pkgtools/pkglint/files/pkgsrc.go97
-rw-r--r--pkgtools/pkglint/files/pkgsrc_test.go157
-rw-r--r--pkgtools/pkglint/files/pkgver/vercmp_test.go4
-rw-r--r--pkgtools/pkglint/files/plist_test.go2
-rw-r--r--pkgtools/pkglint/files/shell.go2
-rw-r--r--pkgtools/pkglint/files/shell_test.go48
-rw-r--r--pkgtools/pkglint/files/shtokenizer.go13
-rw-r--r--pkgtools/pkglint/files/shtokenizer_test.go21
-rw-r--r--pkgtools/pkglint/files/shtypes_test.go6
-rw-r--r--pkgtools/pkglint/files/substcontext.go4
-rw-r--r--pkgtools/pkglint/files/testnames_test.go5
-rw-r--r--pkgtools/pkglint/files/textproc/lexer_test.go4
-rwxr-xr-xpkgtools/pkglint/files/trace/tracing_test.go4
-rw-r--r--pkgtools/pkglint/files/util.go66
-rw-r--r--pkgtools/pkglint/files/util_test.go59
-rw-r--r--pkgtools/pkglint/files/varalignblock.go2
-rw-r--r--pkgtools/pkglint/files/vardefs.go16
-rw-r--r--pkgtools/pkglint/files/vardefs_test.go59
-rw-r--r--pkgtools/pkglint/files/vartypecheck.go7
-rw-r--r--pkgtools/pkglint/files/vartypecheck_test.go7
60 files changed, 2459 insertions, 1711 deletions
diff --git a/pkgtools/pkglint/Makefile b/pkgtools/pkglint/Makefile
index c3f3bce7964..e1135530da9 100644
--- a/pkgtools/pkglint/Makefile
+++ b/pkgtools/pkglint/Makefile
@@ -1,6 +1,6 @@
-# $NetBSD: Makefile,v 1.610 2019/11/23 23:35:55 rillig Exp $
+# $NetBSD: Makefile,v 1.611 2019/11/27 22:10:06 rillig Exp $
-PKGNAME= pkglint-19.3.10
+PKGNAME= pkglint-19.3.11
CATEGORIES= pkgtools
DISTNAME= tools
MASTER_SITES= ${MASTER_SITE_GITHUB:=golang/}
diff --git a/pkgtools/pkglint/PLIST b/pkgtools/pkglint/PLIST
index c3dc5211938..a3d4e9d2983 100644
--- a/pkgtools/pkglint/PLIST
+++ b/pkgtools/pkglint/PLIST
@@ -1,4 +1,4 @@
-@comment $NetBSD: PLIST,v 1.17 2019/11/23 23:35:55 rillig Exp $
+@comment $NetBSD: PLIST,v 1.18 2019/11/27 22:10:06 rillig Exp $
bin/pkglint
gopkg/pkg/${GO_PLATFORM}/netbsd.org/pkglint.a
gopkg/pkg/${GO_PLATFORM}/netbsd.org/pkglint/getopt.a
@@ -30,8 +30,8 @@ gopkg/src/netbsd.org/pkglint/getopt/getopt_test.go
gopkg/src/netbsd.org/pkglint/histogram/histogram.go
gopkg/src/netbsd.org/pkglint/histogram/histogram_test.go
gopkg/src/netbsd.org/pkglint/intqa/ideas.go
-gopkg/src/netbsd.org/pkglint/intqa/testnames.go
-gopkg/src/netbsd.org/pkglint/intqa/testnames_test.go
+gopkg/src/netbsd.org/pkglint/intqa/qa.go
+gopkg/src/netbsd.org/pkglint/intqa/qa_test.go
gopkg/src/netbsd.org/pkglint/licenses.go
gopkg/src/netbsd.org/pkglint/licenses/licenses.go
gopkg/src/netbsd.org/pkglint/licenses/licenses.y
@@ -49,6 +49,8 @@ gopkg/src/netbsd.org/pkglint/lines.go
gopkg/src/netbsd.org/pkglint/lines_test.go
gopkg/src/netbsd.org/pkglint/logging.go
gopkg/src/netbsd.org/pkglint/logging_test.go
+gopkg/src/netbsd.org/pkglint/mklexer.go
+gopkg/src/netbsd.org/pkglint/mklexer_test.go
gopkg/src/netbsd.org/pkglint/mkline.go
gopkg/src/netbsd.org/pkglint/mkline_test.go
gopkg/src/netbsd.org/pkglint/mklinechecker.go
diff --git a/pkgtools/pkglint/files/autofix.go b/pkgtools/pkglint/files/autofix.go
index 0ab132f3e28..542dd82647d 100644
--- a/pkgtools/pkglint/files/autofix.go
+++ b/pkgtools/pkglint/files/autofix.go
@@ -29,7 +29,6 @@ type autofixShortTerm struct {
diagFormat string // Is logged only if it couldn't be fixed automatically
diagArgs []interface{} //
explanation []string // Is printed together with the diagnostic
- anyway bool // Print the diagnostic even if it cannot be autofixed
}
type autofixAction struct {
@@ -297,15 +296,6 @@ func (fix *Autofix) Describef(lineno int, format string, args ...interface{}) {
fix.actions = append(fix.actions, autofixAction{sprintf(format, args...), lineno})
}
-// Anyway has the effect of showing the diagnostic even when nothing can
-// be fixed automatically.
-//
-// As usual, the diagnostic is only shown if neither --show-autofix nor
-// --autofix mode is given.
-func (fix *Autofix) Anyway() {
- fix.anyway = !G.Logger.IsAutofix()
-}
-
// Apply does the actual work.
// Depending on the pkglint mode, it either:
//
@@ -330,7 +320,7 @@ func (fix *Autofix) Apply() {
fix.autofixShortTerm = autofixShortTerm{}
}
- if !(G.Logger.Relevant(fix.diagFormat) && (len(fix.actions) > 0 || fix.anyway)) {
+ if !(G.Logger.Relevant(fix.diagFormat) && (len(fix.actions) > 0 || !G.Logger.IsAutofix())) {
reset()
return
}
@@ -488,12 +478,12 @@ func SaveAutofixChanges(lines *Lines) (autofixed bool) {
}
err := tmpName.WriteString(text.String())
if err != nil {
- G.Logger.Errorf(tmpName, "Cannot write: %s", err)
+ G.Logger.TechErrorf(tmpName, "Cannot write: %s", err)
continue
}
err = tmpName.Rename(filename)
if err != nil {
- G.Logger.Errorf(tmpName, "Cannot overwrite with autofixed content: %s", err)
+ G.Logger.TechErrorf(tmpName, "Cannot overwrite with autofixed content: %s", err)
continue
}
autofixed = true
diff --git a/pkgtools/pkglint/files/autofix_test.go b/pkgtools/pkglint/files/autofix_test.go
index cd292c36350..4e76e93d52e 100644
--- a/pkgtools/pkglint/files/autofix_test.go
+++ b/pkgtools/pkglint/files/autofix_test.go
@@ -140,7 +140,7 @@ func (s *Suite) Test_Autofix__lonely_source(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("-Wall", "--source")
- G.Logger.Opts.LogVerbose = false // For realistic conditions; otherwise all diagnostics are logged.
+ G.Logger.verbose = false // For realistic conditions; otherwise all diagnostics are logged.
t.SetUpPackage("x11/xorg-cf-files",
".include \"../../x11/xorgproto/buildlink3.mk\"")
@@ -173,7 +173,7 @@ func (s *Suite) Test_Autofix__lonely_source_2(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("-Wall", "--source", "--explain")
- G.Logger.Opts.LogVerbose = false // For realistic conditions; otherwise all diagnostics are logged.
+ G.Logger.verbose = false // For realistic conditions; otherwise all diagnostics are logged.
t.SetUpPackage("print/tex-bibtex8",
"MAKE_FLAGS+=\tCFLAGS=${CFLAGS.${PKGSRC_COMPILER}}")
@@ -245,7 +245,8 @@ func (s *Suite) Test_Autofix__show_autofix_and_source_continuation_line(c *check
// shown even when they cannot be autofixed.
//
// This is typical when an autofix is provided for simple scenarios,
-// but the code actually found is a little more complicated.
+// but the code actually found is a little more complicated, like needing
+// special escaping for some of the characters or containing linebreaks.
func (s *Suite) Test_Autofix__show_unfixable_diagnostics_in_default_mode(c *check.C) {
t := s.Init(c)
@@ -258,17 +259,11 @@ func (s *Suite) Test_Autofix__show_unfixable_diagnostics_in_default_mode(c *chec
lines.Lines[0].Warnf("This warning is shown since the --show-autofix option is not given.")
fix := lines.Lines[1].Autofix()
- fix.Warnf("This warning cannot be fixed and is therefore not shown.")
+ fix.Warnf("This warning cannot be fixed, nevertheless it is shown (1).")
fix.Replace("XXX", "TODO")
fix.Apply()
- fix.Warnf("This warning cannot be fixed automatically but should be shown anyway.")
- fix.Replace("XXX", "TODO")
- fix.Anyway()
- fix.Apply()
-
- // If this warning should ever appear it is probably because fix.anyway is not reset properly.
- fix.Warnf("This warning cannot be fixed and is therefore not shown.")
+ fix.Warnf("This warning cannot be fixed, nevertheless it is shown (2).")
fix.Replace("XXX", "TODO")
fix.Apply()
@@ -279,7 +274,8 @@ func (s *Suite) Test_Autofix__show_unfixable_diagnostics_in_default_mode(c *chec
"WARN: Makefile:1: This warning is shown since the --show-autofix option is not given.",
"",
">\tline2",
- "WARN: Makefile:2: This warning cannot be fixed automatically but should be shown anyway.",
+ "WARN: Makefile:2: This warning cannot be fixed, nevertheless it is shown (1).",
+ "WARN: Makefile:2: This warning cannot be fixed, nevertheless it is shown (2).",
"",
">\tline3",
"WARN: Makefile:3: This warning is also shown.")
@@ -321,7 +317,7 @@ func (s *Suite) Test_Autofix__suppress_unfixable_warnings_with_show_autofix(c *c
"+\tTODO")
}
-// If an Autofix doesn't do anything, it must not log any diagnostics.
+// If an Autofix doesn't do anything, it nevertheless logs the diagnostics.
func (s *Suite) Test_Autofix__noop_replace(c *check.C) {
t := s.Init(c)
@@ -332,8 +328,14 @@ func (s *Suite) Test_Autofix__noop_replace(c *check.C) {
fix.ReplaceRegex(`\b[A-Z]{3,}\b`, "---censored---", -1)
fix.Apply()
- // No output since there was no all-uppercase word in the text.
- t.CheckOutputEmpty()
+ // This warning is wrong. This test therefore demonstrates that each
+ // autofix must be properly guarded to only apply when it actually
+ // does something.
+ //
+ // As of November 2019 there is no Rollback method since it was not
+ // needed yet.
+ t.CheckOutputLines(
+ "WARN: Makefile:14: All-uppercase words should not be used at all.")
}
func (s *Suite) Test_Autofix_Warnf__duplicate(c *check.C) {
@@ -550,9 +552,7 @@ func (s *Suite) Test_Autofix_ReplaceAt(c *check.C) {
t := s.Init(c)
lines := func(lines ...string) []string { return lines }
- diagnostics := lines
- autofixes := lines
- test := func(texts []string, rawIndex int, column int, from, to string, diagnostics []string, autofixes []string) {
+ test := func(texts []string, rawIndex int, column int, from, to string, diagnostics ...string) {
mainPart := func() {
mklines := t.NewMkLines("filename.mk", texts...)
@@ -562,17 +562,10 @@ func (s *Suite) Test_Autofix_ReplaceAt(c *check.C) {
fix := mkline.Autofix()
fix.Notef("Should be appended instead of assigned.")
fix.ReplaceAt(rawIndex, column, from, to)
- fix.Anyway()
fix.Apply()
}
- t.SetUpCommandLine("-Wall")
- mainPart()
- t.CheckOutput(diagnostics)
-
- t.SetUpCommandLine("-Wall", "--autofix")
- mainPart()
- t.CheckOutput(autofixes)
+ t.ExpectDiagnosticsAutofix(mainPart, diagnostics...)
}
test(
@@ -580,10 +573,9 @@ func (s *Suite) Test_Autofix_ReplaceAt(c *check.C) {
"VAR=value1 \\",
"\tvalue2"),
0, 3, "=", "+=",
- diagnostics(
- "NOTE: filename.mk:1: Should be appended instead of assigned."),
- autofixes(
- "AUTOFIX: filename.mk:1: Replacing \"=\" with \"+=\"."))
+
+ "NOTE: filename.mk:1: Should be appended instead of assigned.",
+ "AUTOFIX: filename.mk:1: Replacing \"=\" with \"+=\".")
// If the text at the precisely given position does not match,
// the note is still printed because of the fix.Anyway(), but
@@ -593,21 +585,19 @@ func (s *Suite) Test_Autofix_ReplaceAt(c *check.C) {
"VAR=value1 \\",
"\tvalue2"),
0, 3, "?", "+=",
- diagnostics(
- "NOTE: filename.mk:1--2: Should be appended instead of assigned."),
- autofixes(
- nil...))
+
+ "NOTE: filename.mk:1--2: Should be appended instead of assigned.")
// Getting the line number wrong is a strange programming error, and
// there does not need to be any code checking for this in the main code.
t.ExpectPanicMatches(
- func() { test(lines("VAR=value"), 10, 3, "from", "to", nil, nil) },
+ func() { test(lines("VAR=value"), 10, 3, "from", "to", nil...) },
`runtime error: index out of range.*`)
// It is a programming error to replace a string with itself, since that
// would produce confusing diagnostics.
t.ExpectAssert(
- func() { test(lines("VAR=value"), 0, 4, "value", "value", nil, nil) })
+ func() { test(lines("VAR=value"), 0, 4, "value", "value", nil...) })
// Getting the column number wrong may happen when a previous replacement
// has made the string shorter than before, therefore no panic in this case.
@@ -616,10 +606,8 @@ func (s *Suite) Test_Autofix_ReplaceAt(c *check.C) {
"VAR=value1 \\",
"\tvalue2"),
0, 20, "?", "+=",
- diagnostics(
- "NOTE: filename.mk:1--2: Should be appended instead of assigned."),
- autofixes(
- nil...))
+
+ "NOTE: filename.mk:1--2: Should be appended instead of assigned.")
}
func (s *Suite) Test_Autofix_ReplaceRegex__show_autofix(c *check.C) {
@@ -860,7 +848,7 @@ func (s *Suite) Test_Autofix_Custom__in_memory(c *check.C) {
// With the default command line options, this warning is printed.
// With the --show-autofix option this warning is NOT printed since it
// cannot be fixed automatically.
-func (s *Suite) Test_Autofix_Anyway__options(c *check.C) {
+func (s *Suite) Test_Autofix_Apply__show_autofix_option(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("--show-autofix")
@@ -870,7 +858,6 @@ func (s *Suite) Test_Autofix_Anyway__options(c *check.C) {
fix.Warnf("This autofix doesn't match.")
fix.Replace("doesn't match", "")
- fix.Anyway()
fix.Apply()
t.CheckOutputEmpty()
@@ -879,14 +866,13 @@ func (s *Suite) Test_Autofix_Anyway__options(c *check.C) {
fix.Warnf("This autofix doesn't match.")
fix.Replace("doesn't match", "")
- fix.Anyway()
fix.Apply()
t.CheckOutputLines(
"WARN: filename:3: This autofix doesn't match.")
}
-func (s *Suite) Test_Autofix_Anyway__autofix_option(c *check.C) {
+func (s *Suite) Test_Autofix_Apply__autofix_option(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("--autofix")
@@ -895,17 +881,13 @@ func (s *Suite) Test_Autofix_Anyway__autofix_option(c *check.C) {
fix := line.Autofix()
fix.Notef("This line is quite short.")
fix.Replace("not found", "needle")
- fix.Anyway()
fix.Apply()
- // The replacement text is not found, therefore the note should not be logged.
- // Because of fix.Anyway, the note should be logged anyway.
- // But because of the --autofix option, the note should not be logged.
- // Therefore, in the end nothing is shown in this case.
+ // Because of the --autofix option, the note is not logged.
t.CheckOutputEmpty()
}
-func (s *Suite) Test_Autofix_Anyway__autofix_and_show_autofix_options(c *check.C) {
+func (s *Suite) Test_Autofix_Apply__autofix_and_show_autofix_options_no_match(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("--autofix", "--show-autofix")
@@ -914,27 +896,11 @@ func (s *Suite) Test_Autofix_Anyway__autofix_and_show_autofix_options(c *check.C
fix := line.Autofix()
fix.Notef("This line is quite short.")
fix.Replace("not found", "needle")
- fix.Anyway()
fix.Apply()
- // The text to be replaced is not found. Because nothing is fixed here,
- // there's no need to log anything.
- //
- // But fix.Anyway is set, therefore the diagnostic should be logged even
- // though it cannot be fixed automatically. This comes handy in situations
- // where simple cases can be fixed automatically and more complex cases
- // (often involving special characters that need to be escaped properly)
- // should nevertheless result in a diagnostics.
- //
- // The --autofix option is set, which means that the diagnostics don't
- // get logged, only the actual fixes do.
- //
- // But then there's also the --show-autofix option, which logs the
- // corresponding diagnostic for each autofix that actually changes
- // something. But this autofix doesn't change anything, therefore even
- // the --show-autofix doesn't have an effect.
- //
- // Therefore, in the end nothing is logged in this case.
+ // Since at least one of the --show-autofix or --autofix options is given,
+ // only those autofixes that actually change something are logged.
+ // This one doesn't find the "not found" text, therefore it is not logged.
t.CheckOutputEmpty()
}
@@ -1045,7 +1011,6 @@ func (s *Suite) Test_Autofix_Apply__anyway_error(c *check.C) {
fix := mklines.mklines[1].Autofix()
fix.Errorf("From must be To.")
fix.Replace("from", "to")
- fix.Anyway()
fix.Apply()
mklines.SaveAutofixChanges()
@@ -1064,7 +1029,6 @@ func (s *Suite) Test_Autofix_Apply__source_autofix_no_change(c *check.C) {
fix := lines.Lines[0].Autofix()
fix.Notef("Word should be replaced, but pkglint is not sure which one.")
fix.Replace("word", "replacement")
- fix.Anyway()
fix.Apply()
lines.SaveAutofixChanges()
@@ -1169,7 +1133,8 @@ func (s *Suite) Test_Autofix_setDiag__no_testing_mode(c *check.C) {
fix.Replace("from", "to")
fix.Apply()
- t.CheckOutputEmpty()
+ t.CheckOutputLines(
+ "NOTE: file.mk:123: Note.")
}
func (s *Suite) Test_Autofix_setDiag__bad_call_sequence(c *check.C) {
diff --git a/pkgtools/pkglint/files/check_test.go b/pkgtools/pkglint/files/check_test.go
index 6f000552703..4d717e9bae7 100644
--- a/pkgtools/pkglint/files/check_test.go
+++ b/pkgtools/pkglint/files/check_test.go
@@ -88,23 +88,10 @@ func (s *Suite) TearDownTest(c *check.C) {
assertNil(err, "Cannot chdir back to previous dir: %s", err)
if t.seenSetupPkgsrc > 0 && !t.seenFinish && !t.seenMain {
- t.Errorf("After t.SetupPkgsrc(), either t.FinishSetUp() or t.Main() must be called.")
- }
-
- if out := t.Output(); out != "" {
- var msg strings.Builder
- msg.WriteString("\n")
- _, _ = fmt.Fprintf(&msg, "Unchecked output in %s; check with:\n", c.TestName())
- msg.WriteString("\n")
- msg.WriteString("t.CheckOutputLines(\n")
- lines := strings.Split(strings.TrimSpace(out), "\n")
- for i, line := range lines {
- _, _ = fmt.Fprintf(&msg, "\t%q%s\n", line, condStr(i == len(lines)-1, ")", ","))
- }
- _, _ = fmt.Fprintf(&msg, "\n")
- _, _ = os.Stderr.WriteString(msg.String())
+ t.InternalErrorf("After t.SetupPkgsrc(), either t.FinishSetUp() or t.Main() must be called.")
}
+ t.ReportUncheckedOutput()
t.tmpdir = ""
t.DisableTracing()
@@ -160,7 +147,9 @@ func (t *Tester) SetUpCommandLine(args ...string) {
//
// It also reveals diagnostics that are logged multiple times per
// line and thus can easily get annoying to the pkgsrc developers.
- G.Logger.Opts.LogVerbose = true
+ //
+ // To avoid running a check multiple times, see Line.once or MkLines.once.
+ G.Logger.verbose = true
t.seenSetUpCommandLine = true
}
@@ -623,7 +612,10 @@ func (t *Tester) SetUpHierarchy() (
case string:
addLine(arg)
case *MkLines:
- text := sprintf(".include %q", relpath(filename.Dir(), arg.lines.Filename))
+ fromDir := G.Pkgsrc.File(filename.Dir())
+ to := G.Pkgsrc.File(arg.lines.Filename)
+ rel := G.Pkgsrc.Relpath(fromDir, to)
+ text := sprintf(".include %q", rel)
addLine(text)
lines = append(lines, arg.lines.Lines...)
default:
@@ -678,14 +670,14 @@ func (s *Suite) Test_Tester_SetUpHierarchy(c *check.C) {
func (t *Tester) FinishSetUp() {
if t.seenSetupPkgsrc == 0 {
- t.Errorf("Unnecessary t.FinishSetUp() since t.SetUpPkgsrc() has not been called.")
+ t.InternalErrorf("Unnecessary t.FinishSetUp() since t.SetUpPkgsrc() has not been called.")
}
if !t.seenFinish {
t.seenFinish = true
G.Pkgsrc.LoadInfrastructure()
} else {
- t.Errorf("Redundant t.FinishSetup() since it was called multiple times.")
+ t.InternalErrorf("Redundant t.FinishSetup() since it was called multiple times.")
}
}
@@ -698,11 +690,11 @@ func (t *Tester) FinishSetUp() {
// Does not work in combination with SetUpOption.
func (t *Tester) Main(args ...string) int {
if t.seenFinish && !t.seenMain {
- t.Errorf("Calling t.FinishSetup() before t.Main() is redundant " +
+ t.InternalErrorf("Calling t.FinishSetup() before t.Main() is redundant " +
"since t.Main() loads the pkgsrc infrastructure.")
}
if t.seenSetUpCommandLine {
- t.Errorf("Calling t.SetupCommandLine() before t.Main() is redundant " +
+ t.InternalErrorf("Calling t.SetupCommandLine() before t.Main() is redundant " +
"since t.Main() accepts the command line options directly.")
}
@@ -741,7 +733,10 @@ func (t *Tester) CheckDeepEquals(actual interface{}, expected interface{}) bool
return t.c.Check(actual, check.DeepEquals, expected)
}
-func (t *Tester) Errorf(format string, args ...interface{}) {
+// InternalErrorf reports a consistency error in the tests.
+func (t *Tester) InternalErrorf(format string, args ...interface{}) {
+ // It is not possible to panic here since check.v1 would then
+ // ignore all subsequent tests.
_, _ = fmt.Fprintf(os.Stderr, "In %s: %s\n", t.testName, sprintf(format, args...))
}
@@ -817,6 +812,18 @@ func (t *Tester) ExpectAssert(action func()) {
t.Check(action, check.Panics, "Pkglint internal error")
}
+// ExpectDiagnosticsAutofix first runs the given action with -Wall, and
+// then another time with -Wall --autofix.
+func (t *Tester) ExpectDiagnosticsAutofix(action func(), diagnostics ...string) {
+ t.SetUpCommandLine("-Wall")
+ action()
+
+ t.SetUpCommandLine("-Wall", "--autofix")
+ action()
+
+ t.CheckOutput(diagnostics)
+}
+
// NewRawLines creates lines from line numbers and raw text, including newlines.
//
// Arguments are sequences of either (lineno, orignl) or (lineno, orignl, textnl).
@@ -1174,3 +1181,22 @@ func (t *Tester) Shquote(format string, rels ...Path) string {
}
return sprintf(format, subs...)
}
+
+func (t *Tester) ReportUncheckedOutput() {
+ out := t.Output()
+ if out == "" {
+ return
+ }
+
+ var msg strings.Builder
+ msg.WriteString("\n")
+ _, _ = fmt.Fprintf(&msg, "Unchecked output in %s; check with:\n", t.testName)
+ msg.WriteString("\n")
+ msg.WriteString("t.CheckOutputLines(\n")
+ lines := strings.Split(strings.TrimSpace(out), "\n")
+ for i, line := range lines {
+ _, _ = fmt.Fprintf(&msg, "\t%q%s\n", line, condStr(i == len(lines)-1, ")", ","))
+ }
+ _, _ = fmt.Fprintf(&msg, "\n")
+ _, _ = os.Stderr.WriteString(msg.String())
+}
diff --git a/pkgtools/pkglint/files/getopt/getopt_test.go b/pkgtools/pkglint/files/getopt/getopt_test.go
index acae20fb367..2f521124739 100644
--- a/pkgtools/pkglint/files/getopt/getopt_test.go
+++ b/pkgtools/pkglint/files/getopt/getopt_test.go
@@ -409,8 +409,8 @@ func (s *Suite) Test_Options_Help__with_flag_group(c *check.C) {
" (Prefix a flag with \"no-\" to disable it.)\n")
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
diff --git a/pkgtools/pkglint/files/histogram/histogram_test.go b/pkgtools/pkglint/files/histogram/histogram_test.go
index da1d7fa934e..4bab39aa821 100644
--- a/pkgtools/pkglint/files/histogram/histogram_test.go
+++ b/pkgtools/pkglint/files/histogram/histogram_test.go
@@ -28,8 +28,8 @@ func (s *Suite) Test_Histogram(c *check.C) {
"caption 2 two\n")
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
diff --git a/pkgtools/pkglint/files/intqa/ideas.go b/pkgtools/pkglint/files/intqa/ideas.go
index 2438b88a9d5..270821ac1b1 100644
--- a/pkgtools/pkglint/files/intqa/ideas.go
+++ b/pkgtools/pkglint/files/intqa/ideas.go
@@ -5,8 +5,5 @@ package intqa
// then the current package, then a package-qualified identifier.
// As if there were a "_ = XYZ" at the beginning of the function.
-// XXX: All methods should be defined in the same file as their receiver type.
-// If that is not possible, there should only be a small list of exceptions.
-
// XXX: If there is a constructor for a type, only that constructor may be used
// for constructing objects. All other forms (var x Type; x := &Type{}) should be forbidden.
diff --git a/pkgtools/pkglint/files/intqa/testnames.go b/pkgtools/pkglint/files/intqa/qa.go
index 3c35c228935..532f710a388 100644
--- a/pkgtools/pkglint/files/intqa/testnames.go
+++ b/pkgtools/pkglint/files/intqa/qa.go
@@ -38,12 +38,15 @@ const (
// The file of the test method does not correspond to the
// file of the testee.
EFile
+
+ // All methods of a type must be in the same file as the type definition.
+ EMethodsSameFile
)
-// TestNameChecker ensures that all test names follow a common naming scheme:
+// QAChecker ensures that all test names follow a common naming scheme:
// Test_${Type}_${Method}__${description_using_underscores}
// Each of the variable parts may be omitted.
-type TestNameChecker struct {
+type QAChecker struct {
errorf func(format string, args ...interface{})
filters []filter
@@ -56,11 +59,11 @@ type TestNameChecker struct {
out io.Writer
}
-// NewTestNameChecker creates a new checker.
+// NewQAChecker creates a new checker.
// By default, all errors are enabled;
// call Configure to disable them selectively.
-func NewTestNameChecker(errorf func(format string, args ...interface{})) *TestNameChecker {
- ck := TestNameChecker{errorf: errorf, out: os.Stderr}
+func NewQAChecker(errorf func(format string, args ...interface{})) *QAChecker {
+ ck := QAChecker{errorf: errorf, out: os.Stderr}
// For test fixtures from https://gopkg.in/check/v1.
ck.Configure("*_test.go", "*", "SetUpTest", -EMissingTest)
@@ -81,11 +84,11 @@ func NewTestNameChecker(errorf func(format string, args ...interface{})) *TestNa
// Individual errors can be enabled by giving their constant and disabled
// by negating them, such as -EMissingTestee. To reset everything, use
// either EAll or ENone.
-func (ck *TestNameChecker) Configure(filenames, typeNames, funcNames string, errors ...Error) {
+func (ck *QAChecker) Configure(filenames, typeNames, funcNames string, errors ...Error) {
ck.filters = append(ck.filters, filter{filenames, typeNames, funcNames, errors})
}
-func (ck *TestNameChecker) Check() {
+func (ck *QAChecker) Check() {
ck.load(".")
ck.checkTestees()
ck.checkTests()
@@ -94,7 +97,7 @@ func (ck *TestNameChecker) Check() {
}
// load loads all type, function and method names from the current package.
-func (ck *TestNameChecker) load(dir string) {
+func (ck *QAChecker) load(dir string) {
fileSet := token.NewFileSet()
pkgs, err := parser.ParseDir(fileSet, dir, nil, 0)
@@ -117,7 +120,7 @@ func (ck *TestNameChecker) load(dir string) {
}
// loadDecl adds a single type or function declaration to the known elements.
-func (ck *TestNameChecker) loadDecl(decl ast.Decl, filename string) {
+func (ck *QAChecker) loadDecl(decl ast.Decl, filename string) {
switch decl := decl.(type) {
case *ast.GenDecl:
@@ -125,26 +128,31 @@ func (ck *TestNameChecker) loadDecl(decl ast.Decl, filename string) {
switch spec := spec.(type) {
case *ast.TypeSpec:
typeName := spec.Name.Name
- ck.addCode(code{filename, typeName, "", 0})
+ ck.addCode(code{filename, typeName, "", 0}, nil)
}
}
case *ast.FuncDecl:
- typeName := ""
- if decl.Recv != nil {
- typeExpr := decl.Recv.List[0].Type.(ast.Expr)
- if star, ok := typeExpr.(*ast.StarExpr); ok {
- typeName = star.X.(*ast.Ident).Name
- } else {
- typeName = typeExpr.(*ast.Ident).Name
- }
+ code := ck.parseFuncDecl(filename, decl)
+ ck.addCode(code, decl)
+ }
+}
+
+func (*QAChecker) parseFuncDecl(filename string, decl *ast.FuncDecl) code {
+ typeName := ""
+ if decl.Recv != nil {
+ typeExpr := decl.Recv.List[0].Type.(ast.Expr)
+ if star, ok := typeExpr.(*ast.StarExpr); ok {
+ typeExpr = star.X
}
- funcName := decl.Name.Name
- ck.addCode(code{filename, typeName, funcName, 0})
+ typeName = typeExpr.(*ast.Ident).Name
}
+
+ funcName := decl.Name.Name
+ return code{filename, typeName, funcName, 0}
}
-func (ck *TestNameChecker) addCode(code code) {
+func (ck *QAChecker) addCode(code code, decl *ast.FuncDecl) {
if code.isTestScope() && code.isFunc() && code.Func == "TestMain" {
// This is not a test for Main, but a wrapper function of the test.
// Therefore it is completely ignored.
@@ -160,19 +168,39 @@ func (ck *TestNameChecker) addCode(code code) {
return
}
- if code.isTest() {
+ if ck.isTest(code, decl) {
ck.addTest(code)
} else {
ck.addTestee(code)
}
}
-func (ck *TestNameChecker) addTestee(code code) {
+func (*QAChecker) isTest(code code, decl *ast.FuncDecl) bool {
+ if !code.isTestScope() || !strings.HasPrefix(code.Func, "Test") {
+ return false
+ }
+ if decl.Type.Params.NumFields() != 1 {
+ return false
+ }
+
+ paramType := decl.Type.Params.List[0].Type
+ if star, ok := paramType.(*ast.StarExpr); ok {
+ paramType = star.X
+ }
+ if sel, ok := paramType.(*ast.SelectorExpr); ok {
+ paramType = sel.Sel
+ }
+
+ paramTypeName := paramType.(*ast.Ident).Name
+ return paramTypeName == "C" || paramTypeName == "T"
+}
+
+func (ck *QAChecker) addTestee(code code) {
code.order = ck.nextOrder()
ck.testees = append(ck.testees, &testee{code})
}
-func (ck *TestNameChecker) addTest(code code) {
+func (ck *QAChecker) addTest(code code) {
if !strings.HasPrefix(code.Func, "Test_") &&
code.Func != "Test" &&
ck.addError(
@@ -203,14 +231,14 @@ func (ck *TestNameChecker) addTest(code code) {
ck.tests = append(ck.tests, &test{code, testeeName, descr, nil})
}
-func (ck *TestNameChecker) nextOrder() int {
+func (ck *QAChecker) nextOrder() int {
id := ck.order
ck.order++
return id
}
// relate connects the tests to their testees.
-func (ck *TestNameChecker) relate() {
+func (ck *QAChecker) relate() {
testeesByPrefix := make(map[string]*testee)
for _, testee := range ck.testees {
prefix := join(testee.Type, "_", testee.Func)
@@ -222,7 +250,7 @@ func (ck *TestNameChecker) relate() {
}
}
-func (ck *TestNameChecker) checkTests() {
+func (ck *QAChecker) checkTests() {
for _, test := range ck.tests {
ck.checkTestFile(test)
ck.checkTestTestee(test)
@@ -230,13 +258,13 @@ func (ck *TestNameChecker) checkTests() {
}
}
-func (ck *TestNameChecker) checkTestFile(test *test) {
+func (ck *QAChecker) checkTestFile(test *test) {
testee := test.testee
if testee == nil || testee.file == test.file {
return
}
- correctTestFile := strings.TrimSuffix(testee.file, ".go") + "_test.go"
+ correctTestFile := testee.testFile()
if correctTestFile == test.file {
return
}
@@ -248,7 +276,7 @@ func (ck *TestNameChecker) checkTestFile(test *test) {
test.fullName(), testee.fullName(), correctTestFile, test.file)
}
-func (ck *TestNameChecker) checkTestTestee(test *test) {
+func (ck *QAChecker) checkTestTestee(test *test) {
testee := test.testee
if testee != nil || test.testeeName == "" {
return
@@ -265,7 +293,7 @@ func (ck *TestNameChecker) checkTestTestee(test *test) {
// checkTestDescr ensures that the type or function name of the testee
// does not accidentally end up in the description of the test. This could
// happen if there is a double underscore instead of a single underscore.
-func (ck *TestNameChecker) checkTestDescr(test *test) {
+func (ck *QAChecker) checkTestDescr(test *test) {
testee := test.testee
if testee == nil || testee.isMethod() || !isCamelCase(test.descr) {
return
@@ -278,32 +306,77 @@ func (ck *TestNameChecker) checkTestDescr(test *test) {
test.fullName(), test.descr)
}
-func (ck *TestNameChecker) checkTestees() {
+func (ck *QAChecker) checkTestees() {
+ ck.checkTesteesTest()
+ ck.checkTesteesMethodsSameFile()
+}
+
+// checkTesteesTest ensures that each testee has a corresponding unit test.
+func (ck *QAChecker) checkTesteesTest() {
tested := make(map[*testee]bool)
for _, test := range ck.tests {
tested[test.testee] = true
}
for _, testee := range ck.testees {
- ck.checkTesteeTest(testee, tested)
+ if tested[testee] {
+ continue
+ }
+
+ testName := "Test_" + join(testee.Type, "_", testee.Func)
+ ck.addError(
+ EMissingTest,
+ testee.code,
+ "Missing unit test %q for %q.",
+ testName, testee.fullName())
}
}
-func (ck *TestNameChecker) checkTesteeTest(testee *testee, tested map[*testee]bool) {
- if tested[testee] || testee.isType() {
- return
+// checkTesteesMethodsSameFile ensures that all methods of a type are
+// defined in the same file or in the corresponding test file.
+func (ck *QAChecker) checkTesteesMethodsSameFile() {
+ types := map[string]*testee{}
+ for _, testee := range ck.testees {
+ if testee.isType() {
+ types[testee.Type] = testee
+ }
}
- testName := "Test_" + join(testee.Type, "_", testee.Func)
- ck.addError(
- EMissingTest,
- testee.code,
- "Missing unit test %q for %q.",
- testName, testee.fullName())
+ for _, testee := range ck.testees {
+ if !testee.isMethod() {
+ continue
+ }
+ method := testee
+
+ typ := types[method.Type]
+ if typ == nil || method.file == typ.file {
+ continue
+ }
+
+ if method.isTestScope() == typ.isTestScope() {
+ ck.addError(
+ EMethodsSameFile,
+ testee.code,
+ "Method %s must be in %s, like its type.",
+ testee.fullName(), typ.file)
+ continue
+ }
+
+ correctFile := typ.testFile()
+ if method.file == correctFile {
+ continue
+ }
+
+ ck.addError(
+ EMethodsSameFile,
+ testee.code,
+ "Method %s must be in %s, corresponding to its type.",
+ testee.fullName(), correctFile)
+ }
}
// isRelevant checks whether the given error is enabled.
-func (ck *TestNameChecker) isRelevant(filename, typeName, funcName string, e Error) bool {
+func (ck *QAChecker) isRelevant(filename, typeName, funcName string, e Error) bool {
mask := ^uint64(0)
for _, filter := range ck.filters {
if matches(filename, filter.filenames) &&
@@ -315,7 +388,7 @@ func (ck *TestNameChecker) isRelevant(filename, typeName, funcName string, e Err
return mask&ck.errorsMask(0, e) != 0
}
-func (ck *TestNameChecker) errorsMask(mask uint64, errors ...Error) uint64 {
+func (ck *QAChecker) errorsMask(mask uint64, errors ...Error) uint64 {
for _, err := range errors {
if err == ENone {
mask = 0
@@ -332,7 +405,7 @@ func (ck *TestNameChecker) errorsMask(mask uint64, errors ...Error) uint64 {
// checkOrder ensures that the tests appear in the same order as their
// counterparts in the main code.
-func (ck *TestNameChecker) checkOrder() {
+func (ck *QAChecker) checkOrder() {
maxOrderByFile := make(map[string]*test)
for _, test := range ck.tests {
@@ -364,7 +437,7 @@ func (ck *TestNameChecker) checkOrder() {
}
}
-func (ck *TestNameChecker) addError(e Error, c code, format string, args ...interface{}) bool {
+func (ck *QAChecker) addError(e Error, c code, format string, args ...interface{}) bool {
relevant := ck.isRelevant(c.file, c.Type, c.Func, e)
if relevant {
ck.errors = append(ck.errors, fmt.Sprintf(format, args...))
@@ -372,7 +445,7 @@ func (ck *TestNameChecker) addError(e Error, c code, format string, args ...inte
return relevant
}
-func (ck *TestNameChecker) print() {
+func (ck *QAChecker) print() {
for _, msg := range ck.errors {
_, _ = fmt.Fprintln(ck.out, msg)
}
@@ -418,13 +491,17 @@ func (c *code) isFunc() bool { return c.Type == "" }
func (c *code) isType() bool { return c.Func == "" }
func (c *code) isMethod() bool { return c.Type != "" && c.Func != "" }
-func (c *code) isTest() bool {
- return c.isTestScope() && strings.HasPrefix(c.Func, "Test")
-}
func (c *code) isTestScope() bool {
return strings.HasSuffix(c.file, "_test.go")
}
+func (c *code) testFile() string {
+ if strings.HasSuffix(c.file, "_test.go") {
+ return c.file
+ }
+ return strings.TrimSuffix(c.file, ".go") + "_test.go"
+}
+
func isCamelCase(str string) bool {
for i := 0; i+1 < len(str); i++ {
if str[i] == '_' {
diff --git a/pkgtools/pkglint/files/intqa/testnames_test.go b/pkgtools/pkglint/files/intqa/qa_test.go
index 0a53fae70fc..ec83119cf7f 100644
--- a/pkgtools/pkglint/files/intqa/testnames_test.go
+++ b/pkgtools/pkglint/files/intqa/qa_test.go
@@ -4,6 +4,8 @@ import (
"bytes"
"fmt"
"go/ast"
+ "go/parser"
+ "go/token"
"gopkg.in/check.v1"
"io/ioutil"
"path"
@@ -12,7 +14,7 @@ import (
type Suite struct {
c *check.C
- ck *TestNameChecker
+ ck *QAChecker
summary string
}
@@ -21,19 +23,21 @@ func Test(t *testing.T) {
check.TestingT(t)
}
-func (s *Suite) Init(c *check.C) *TestNameChecker {
+func (s *Suite) Init(c *check.C) *QAChecker {
errorf := func(format string, args ...interface{}) {
s.summary = fmt.Sprintf(format, args...)
}
s.c = c
- s.ck = NewTestNameChecker(errorf)
+ s.ck = NewQAChecker(errorf)
s.ck.out = ioutil.Discard
return s.ck
}
func (s *Suite) TearDownTest(c *check.C) {
s.c = c
+ // The testees and tests are not validated to be checked
+ // since that would create too much boilerplate code.
s.CheckErrors(nil...)
s.CheckSummary("")
}
@@ -67,7 +71,7 @@ func (s *Suite) CheckSummary(summary string) {
s.summary = ""
}
-func (s *Suite) Test_NewTestNameChecker(c *check.C) {
+func (s *Suite) Test_NewQAChecker(c *check.C) {
ck := s.Init(c)
c.Check(ck.isRelevant("*_test.go", "Suite", "SetUpTest", EAll), check.Equals, true)
@@ -77,7 +81,7 @@ func (s *Suite) Test_NewTestNameChecker(c *check.C) {
c.Check(ck.isRelevant("*_test.go", "Suite", "TearDownTest", EMissingTest), check.Equals, false)
}
-func (s *Suite) Test_TestNameChecker_Configure(c *check.C) {
+func (s *Suite) Test_QAChecker_Configure(c *check.C) {
ck := s.Init(c)
ck.Configure("*", "*", "*", ENone) // overwrite initialization from Suite.Init
@@ -111,7 +115,7 @@ func (s *Suite) Test_TestNameChecker_Configure(c *check.C) {
c.Check(ck.isRelevant("", "", "", EMissingTest), check.Equals, false)
}
-func (s *Suite) Test_TestNameChecker_Configure__ignore_single_function(c *check.C) {
+func (s *Suite) Test_QAChecker_Configure__ignore_single_function(c *check.C) {
ck := s.Init(c)
ck.Configure("*", "*", "*", EAll)
@@ -127,32 +131,35 @@ func (s *Suite) Test_TestNameChecker_Configure__ignore_single_function(c *check.
c.Check(ck.isRelevant("file_test.go", "*", "TestMain", EAll), check.Equals, true)
}
-func (s *Suite) Test_TestNameChecker_Check(c *check.C) {
+func (s *Suite) Test_QAChecker_Check(c *check.C) {
ck := s.Init(c)
+ ck.Configure("*", "*", "", -EMissingTest)
ck.Configure("*", "Suite", "*", -EMissingTest)
ck.Check()
s.CheckErrors(
- "Missing unit test \"Test_TestNameChecker_addCode\" for \"TestNameChecker.addCode\".",
- "Missing unit test \"Test_TestNameChecker_relate\" for \"TestNameChecker.relate\".",
- "Missing unit test \"Test_TestNameChecker_isRelevant\" for \"TestNameChecker.isRelevant\".")
+ "Missing unit test \"Test_QAChecker_addCode\" for \"QAChecker.addCode\".",
+ "Missing unit test \"Test_QAChecker_relate\" for \"QAChecker.relate\".",
+ "Missing unit test \"Test_QAChecker_isRelevant\" for \"QAChecker.isRelevant\".")
s.CheckSummary("3 errors.")
}
-func (s *Suite) Test_TestNameChecker_load__filtered_nothing(c *check.C) {
+func (s *Suite) Test_QAChecker_load__filtered_nothing(c *check.C) {
ck := s.Init(c)
ck.Configure("*", "*", "*", ENone)
ck.load(".")
- c.Check(ck.testees, check.IsNil)
- c.Check(ck.tests, check.IsNil)
+ s.CheckTestees(
+ nil...)
+ s.CheckTests(
+ nil...)
}
-func (s *Suite) Test_TestNameChecker_load__filtered_only_Value(c *check.C) {
+func (s *Suite) Test_QAChecker_load__filtered_only_Value(c *check.C) {
ck := s.Init(c)
ck.Configure("*", "*", "*", ENone)
@@ -160,13 +167,14 @@ func (s *Suite) Test_TestNameChecker_load__filtered_only_Value(c *check.C) {
ck.load(".")
- c.Check(ck.testees, check.DeepEquals, []*testee{
- {code{"testnames_test.go", "Value", "", 0}},
- {code{"testnames_test.go", "Value", "Method", 1}}})
- c.Check(ck.tests, check.IsNil)
+ s.CheckTestees(
+ s.newTestee("qa_test.go", "Value", "", 0),
+ s.newTestee("qa_test.go", "Value", "Method", 1))
+ s.CheckTests(
+ nil...)
}
-func (s *Suite) Test_TestNameChecker_load__panic(c *check.C) {
+func (s *Suite) Test_QAChecker_load__panic(c *check.C) {
ck := s.Init(c)
c.Check(
@@ -175,34 +183,36 @@ func (s *Suite) Test_TestNameChecker_load__panic(c *check.C) {
`^open does-not-exist\b.*`)
}
-func (s *Suite) Test_TestNameChecker_loadDecl(c *check.C) {
+func (s *Suite) Test_QAChecker_loadDecl(c *check.C) {
ck := s.Init(c)
- typeDecl := func(name string) *ast.GenDecl {
- return &ast.GenDecl{Specs: []ast.Spec{&ast.TypeSpec{Name: &ast.Ident{Name: name}}}}
- }
- funcDecl := func(name string) *ast.FuncDecl {
- return &ast.FuncDecl{Name: &ast.Ident{Name: name}}
- }
- methodDecl := func(typeName, methodName string) *ast.FuncDecl {
- return &ast.FuncDecl{
- Name: &ast.Ident{Name: methodName},
- Recv: &ast.FieldList{List: []*ast.Field{{Type: &ast.Ident{Name: typeName}}}}}
+ load := func(filename, decl string) {
+ fset := token.NewFileSet()
+ file, err := parser.ParseFile(fset, filename, "package p; "+decl, 0)
+ c.Assert(err, check.IsNil)
+ ck.loadDecl(file.Decls[0], filename)
}
- ck.loadDecl(typeDecl("TypeName"), "file_test.go")
+ load("file_test.go", "type TypeName int")
s.CheckTestees(
s.newTestee("file_test.go", "TypeName", "", 0))
// The freestanding TestMain function is ignored by a hardcoded rule,
// independently of the configuration.
- ck.loadDecl(funcDecl("TestMain"), "file_test.go")
+ load("file_test.go", "func TestMain() {}")
+
+ s.CheckTestees(
+ nil...)
+ s.CheckTests(
+ nil...)
// The TestMain method on a type is relevant, but violates the naming rule.
// Therefore it is ignored.
- ck.loadDecl(methodDecl("Suite", "TestMain"), "file_test.go")
+ load("file_test.go", "func (Suite) TestMain(*check.C) {}")
+ s.CheckTestees(
+ nil...)
s.CheckTests(
nil...)
s.CheckErrors(
@@ -211,38 +221,101 @@ func (s *Suite) Test_TestNameChecker_loadDecl(c *check.C) {
// The above error can be disabled, and then the method is handled
// like any other test method.
ck.Configure("*", "Suite", "*", -EName)
- ck.loadDecl(methodDecl("Suite", "TestMain"), "file_test.go")
+ load("file_test.go", "func (Suite) TestMain(*check.C) {}")
+ s.CheckTestees(
+ nil...)
s.CheckTests(
s.newTest("file_test.go", "Suite", "TestMain", 1, "Main", "", nil))
// There is no special handling for TestMain method with a description.
- ck.loadDecl(methodDecl("Suite", "TestMain__descr"), "file_test.go")
+ load("file_test.go", "func (Suite) TestMain__descr(*check.C) {}")
+ s.CheckTestees(
+ nil...)
s.CheckTests(
s.newTest("file_test.go", "Suite", "TestMain__descr", 2, "Main", "descr", nil))
+ s.CheckErrors(
+ nil...)
+}
+
+func (s *Suite) Test_QAChecker_parseFuncDecl(c *check.C) {
+ _ = s.Init(c)
+
+ testFunc := func(filename, decl, typeName, funcName string) {
+ fset := token.NewFileSet()
+ file, err := parser.ParseFile(fset, filename, "package p; "+decl, 0)
+ c.Assert(err, check.IsNil)
+ fn := file.Decls[0].(*ast.FuncDecl)
+ actual := (*QAChecker).parseFuncDecl(nil, filename, fn)
+ c.Check(actual, check.Equals, code{filename, typeName, funcName, 0})
+ }
+
+ testFunc("f_test.go", "func (t Type) Test() {}",
+ "Type", "Test")
+ testFunc("f_test.go", "func (t Type) Test_Type_Method() {}",
+ "Type", "Test_Type_Method")
+ testFunc("f_test.go", "func Test() {}",
+ "", "Test")
+ testFunc("f_test.go", "func Test_Type_Method() {}",
+ "", "Test_Type_Method")
+}
+
+func (s *Suite) Test_QAChecker_isTest(c *check.C) {
+ _ = s.Init(c)
+
+ test := func(filename, typeName, funcName string, isTest bool) {
+ code := code{filename, typeName, funcName, 0}
+ c.Check((*QAChecker).isTest(nil, code, nil), check.Equals, isTest)
+ }
+
+ testFunc := func(filename, decl string, isTest bool) {
+ fset := token.NewFileSet()
+ file, err := parser.ParseFile(fset, filename, "package p; "+decl, 0)
+ c.Assert(err, check.IsNil)
+ fn := file.Decls[0].(*ast.FuncDecl)
+ code := (*QAChecker).parseFuncDecl(nil, filename, fn)
+ c.Check((*QAChecker).isTest(nil, code, fn), check.Equals, isTest)
+ }
+
+ test("f.go", "Type", "", false)
+ test("f.go", "", "Func", false)
+ test("f.go", "Type", "Method", false)
+ test("f.go", "Type", "Test", false)
+ test("f.go", "Type", "Test_Type_Method", false)
+ test("f.go", "", "Test_Type_Method", false)
+
+ testFunc("f_test.go", "func (t Type) Test(c *check.C) {}", true)
+ testFunc("f_test.go", "func (t Type) Test_Type_Method(c *check.C) {}", true)
+ testFunc("f_test.go", "func Test_Type_Method(c *check.C) {}", true)
+ testFunc("f_test.go", "func Test_Type_Method(c *C) {}", true)
+ testFunc("f_test.go", "func Test_Type_Method(c C) {}", true)
+ testFunc("f_test.go", "func Test_Type_Method(t *testing.T) {}", true)
+ testFunc("f_test.go", "func Test_Type_Method(X) {}", false)
+ testFunc("f_test.go", "func Test_Type_Method(int) {}", false)
}
-func (s *Suite) Test_TestNameChecker_addTestee(c *check.C) {
+func (s *Suite) Test_QAChecker_addTestee(c *check.C) {
ck := s.Init(c)
- code := code{"filename.go", "Type", "Method", 0}
- ck.addTestee(code)
+ ck.addTestee(code{"filename.go", "Type", "Method", 0})
- c.Check(ck.testees, check.DeepEquals, []*testee{{code}})
+ s.CheckTestees(
+ s.newTestee("filename.go", "Type", "Method", 0))
}
-func (s *Suite) Test_TestNameChecker_addTest(c *check.C) {
+func (s *Suite) Test_QAChecker_addTest(c *check.C) {
ck := s.Init(c)
ck.addTest(code{"filename.go", "Type", "Method", 0})
- c.Check(ck.tests, check.IsNil)
+ s.CheckTests(
+ nil...)
s.CheckErrors(
"Test \"Type.Method\" must start with \"Test_\".")
}
-func (s *Suite) Test_TestNameChecker_addTest__empty_description(c *check.C) {
+func (s *Suite) Test_QAChecker_addTest__empty_description(c *check.C) {
ck := s.Init(c)
ck.addTest(code{"f_test.go", "Suite", "Test_Method__", 0})
@@ -258,7 +331,7 @@ func (s *Suite) Test_TestNameChecker_addTest__empty_description(c *check.C) {
nil...)
}
-func (s *Suite) Test_TestNameChecker_addTest__suppressed_empty_description(c *check.C) {
+func (s *Suite) Test_QAChecker_addTest__suppressed_empty_description(c *check.C) {
ck := s.Init(c)
ck.Configure("*", "*", "*", -EName)
@@ -275,7 +348,7 @@ func (s *Suite) Test_TestNameChecker_addTest__suppressed_empty_description(c *ch
"Missing testee \"Method\" for test \"Suite.Test_Method__\".")
}
-func (s *Suite) Test_TestNameChecker_nextOrder(c *check.C) {
+func (s *Suite) Test_QAChecker_nextOrder(c *check.C) {
ck := s.Init(c)
c.Check(ck.nextOrder(), check.Equals, 0)
@@ -283,7 +356,7 @@ func (s *Suite) Test_TestNameChecker_nextOrder(c *check.C) {
c.Check(ck.nextOrder(), check.Equals, 2)
}
-func (s *Suite) Test_TestNameChecker_checkTests(c *check.C) {
+func (s *Suite) Test_QAChecker_checkTests(c *check.C) {
ck := s.Init(c)
ck.tests = append(ck.tests,
@@ -297,7 +370,7 @@ func (s *Suite) Test_TestNameChecker_checkTests(c *check.C) {
"must be in source_test.go instead of wrong_test.go.")
}
-func (s *Suite) Test_TestNameChecker_checkTestFile__global(c *check.C) {
+func (s *Suite) Test_QAChecker_checkTestFile__global(c *check.C) {
ck := s.Init(c)
ck.checkTestFile(&test{
@@ -311,7 +384,7 @@ func (s *Suite) Test_TestNameChecker_checkTestFile__global(c *check.C) {
"must be in other_test.go instead of demo_test.go.")
}
-func (s *Suite) Test_TestNameChecker_checkTestTestee__global(c *check.C) {
+func (s *Suite) Test_QAChecker_checkTestTestee__global(c *check.C) {
ck := s.Init(c)
ck.checkTestTestee(&test{
@@ -324,7 +397,7 @@ func (s *Suite) Test_TestNameChecker_checkTestTestee__global(c *check.C) {
nil...)
}
-func (s *Suite) Test_TestNameChecker_checkTestTestee__no_testee(c *check.C) {
+func (s *Suite) Test_QAChecker_checkTestTestee__no_testee(c *check.C) {
ck := s.Init(c)
ck.checkTestTestee(&test{
@@ -337,7 +410,7 @@ func (s *Suite) Test_TestNameChecker_checkTestTestee__no_testee(c *check.C) {
"Missing testee \"Missing\" for test \"Suite.Test_Missing\".")
}
-func (s *Suite) Test_TestNameChecker_checkTestTestee__testee_exists(c *check.C) {
+func (s *Suite) Test_QAChecker_checkTestTestee__testee_exists(c *check.C) {
ck := s.Init(c)
ck.checkTestTestee(&test{
@@ -350,7 +423,7 @@ func (s *Suite) Test_TestNameChecker_checkTestTestee__testee_exists(c *check.C)
nil...)
}
-func (s *Suite) Test_TestNameChecker_checkTestDescr__camel_case(c *check.C) {
+func (s *Suite) Test_QAChecker_checkTestDescr__camel_case(c *check.C) {
ck := s.Init(c)
ck.checkTestDescr(&test{
@@ -364,7 +437,7 @@ func (s *Suite) Test_TestNameChecker_checkTestDescr__camel_case(c *check.C) {
"must not use CamelCase in the first word.")
}
-func (s *Suite) Test_TestNameChecker_checkTestees(c *check.C) {
+func (s *Suite) Test_QAChecker_checkTestees(c *check.C) {
ck := s.Init(c)
ck.testees = []*testee{s.newTestee("s.go", "", "Func", 0)}
@@ -376,25 +449,50 @@ func (s *Suite) Test_TestNameChecker_checkTestees(c *check.C) {
"Missing unit test \"Test_Func\" for \"Func\".")
}
-func (s *Suite) Test_TestNameChecker_checkTesteeTest(c *check.C) {
+func (s *Suite) Test_QAChecker_checkTesteesTest(c *check.C) {
ck := s.Init(c)
- ck.checkTesteeTest(
- &testee{code{"demo.go", "Type", "", 0}},
- nil)
- ck.checkTesteeTest(
- &testee{code{"demo.go", "", "Func", 0}},
- nil)
- ck.checkTesteeTest(
- &testee{code{"demo.go", "Type", "Method", 0}},
- nil)
+ ck.addTestee(code{"demo.go", "Type", "", 0})
+ ck.addTestee(code{"demo.go", "", "Func", 0})
+ ck.addTestee(code{"demo.go", "Type", "Method", 0})
+ ck.addTestee(code{"demo.go", "OkType", "", 0})
+ ck.addTestee(code{"demo.go", "", "OkFunc", 0})
+ ck.addTestee(code{"demo.go", "OkType", "Method", 0})
+ ck.addTest(code{"demo_test.go", "", "Test_OkType", 0})
+ ck.addTest(code{"demo_test.go", "", "Test_OkFunc", 0})
+ ck.addTest(code{"demo_test.go", "", "Test_OkType_Method", 0})
+ ck.relate()
+
+ ck.checkTesteesTest()
s.CheckErrors(
+ "Missing unit test \"Test_Type\" for \"Type\".",
"Missing unit test \"Test_Func\" for \"Func\".",
"Missing unit test \"Test_Type_Method\" for \"Type.Method\".")
}
-func (s *Suite) Test_TestNameChecker_errorsMask(c *check.C) {
+func (s *Suite) Test_QAChecker_checkTesteesMethodsSameFile(c *check.C) {
+ ck := s.Init(c)
+
+ ck.addTestee(code{"main.go", "Main", "", 0})
+ ck.addTestee(code{"main.go", "Main", "MethodOk", 1})
+ ck.addTestee(code{"other.go", "Main", "MethodWrong", 2})
+ ck.addTestee(code{"main_test.go", "Main", "MethodOkTest", 3})
+ ck.addTestee(code{"other_test.go", "Main", "MethodWrongTest", 4})
+ ck.addTestee(code{"main_test.go", "T", "", 100})
+ ck.addTestee(code{"main_test.go", "T", "MethodOk", 101})
+ ck.addTestee(code{"other_test.go", "T", "MethodWrong", 102})
+
+ ck.checkTesteesMethodsSameFile()
+
+ s.CheckErrors(
+ "Method Main.MethodWrong must be in main.go, like its type.",
+ "Method Main.MethodWrongTest must be in main_test.go, "+
+ "corresponding to its type.",
+ "Method T.MethodWrong must be in main_test.go, like its type.")
+}
+
+func (s *Suite) Test_QAChecker_errorsMask(c *check.C) {
ck := s.Init(c)
c.Check(ck.errorsMask(0, EAll), check.Equals, ^uint64(0))
@@ -403,7 +501,7 @@ func (s *Suite) Test_TestNameChecker_errorsMask(c *check.C) {
c.Check(ck.errorsMask(2, EMissingTest), check.Equals, uint64(10))
}
-func (s *Suite) Test_TestNameChecker_checkOrder(c *check.C) {
+func (s *Suite) Test_QAChecker_checkOrder(c *check.C) {
ck := s.Init(c)
ck.addTestee(code{"f.go", "T", "", 10})
@@ -430,7 +528,7 @@ func (s *Suite) Test_TestNameChecker_checkOrder(c *check.C) {
"Test \"S.Test_T_M2__1\" must be ordered before \"S.Test_T_M3\".")
}
-func (s *Suite) Test_TestNameChecker_addError(c *check.C) {
+func (s *Suite) Test_QAChecker_addError(c *check.C) {
ck := s.Init(c)
ck.Configure("ignored*", "*", "*", -EName)
@@ -443,7 +541,7 @@ func (s *Suite) Test_TestNameChecker_addError(c *check.C) {
"E2")
}
-func (s *Suite) Test_TestNameChecker_print__empty(c *check.C) {
+func (s *Suite) Test_QAChecker_print__empty(c *check.C) {
var out bytes.Buffer
ck := s.Init(c)
ck.out = &out
@@ -453,7 +551,7 @@ func (s *Suite) Test_TestNameChecker_print__empty(c *check.C) {
c.Check(out.String(), check.Equals, "")
}
-func (s *Suite) Test_TestNameChecker_print__1_error(c *check.C) {
+func (s *Suite) Test_QAChecker_print__1_error(c *check.C) {
var out bytes.Buffer
ck := s.Init(c)
ck.out = &out
@@ -466,7 +564,7 @@ func (s *Suite) Test_TestNameChecker_print__1_error(c *check.C) {
s.CheckSummary("1 error.")
}
-func (s *Suite) Test_TestNameChecker_print__2_errors(c *check.C) {
+func (s *Suite) Test_QAChecker_print__2_errors(c *check.C) {
var out bytes.Buffer
ck := s.Init(c)
ck.out = &out
@@ -532,25 +630,6 @@ func (s *Suite) Test_code_isMethod(c *check.C) {
test("Type", "Method", true)
}
-func (s *Suite) Test_code_isTest(c *check.C) {
- _ = s.Init(c)
-
- test := func(filename, typeName, funcName string, isTest bool) {
- code := code{filename, typeName, funcName, 0}
- c.Check(code.isTest(), check.Equals, isTest)
- }
-
- test("f.go", "Type", "", false)
- test("f.go", "", "Func", false)
- test("f.go", "Type", "Method", false)
- test("f.go", "Type", "Test", false)
- test("f.go", "Type", "Test_Type_Method", false)
- test("f.go", "", "Test_Type_Method", false)
- test("f_test.go", "Type", "Test", true)
- test("f_test.go", "Type", "Test_Type_Method", true)
- test("f_test.go", "", "Test_Type_Method", true)
-}
-
func (s *Suite) Test_code_isTestScope(c *check.C) {
_ = s.Init(c)
@@ -566,6 +645,21 @@ func (s *Suite) Test_code_isTestScope(c *check.C) {
test("file_linux_test.go", true)
}
+func (s *Suite) Test_code_testFile(c *check.C) {
+ _ = s.Init(c)
+
+ test := func(filename string, testFile string) {
+ code := code{filename, "", "", 0}
+ c.Check(code.testFile(), check.Equals, testFile)
+ }
+
+ test("f.go", "f_test.go")
+ test("test.go", "test_test.go")
+ test("_test.go", "_test.go")
+ test("file_test.go", "file_test.go")
+ test("file_linux_test.go", "file_linux_test.go")
+}
+
func (s *Suite) Test_isCamelCase(c *check.C) {
_ = s.Init(c)
@@ -624,5 +718,5 @@ func (s *Suite) Test_Value_Method(c *check.C) {
type Value struct{}
// Method has no star on the receiver,
-// for code coverage of TestNameChecker.loadDecl.
+// for code coverage of QAChecker.loadDecl.
func (Value) Method() {}
diff --git a/pkgtools/pkglint/files/licenses.go b/pkgtools/pkglint/files/licenses.go
index 9b155649017..56f15907921 100644
--- a/pkgtools/pkglint/files/licenses.go
+++ b/pkgtools/pkglint/files/licenses.go
@@ -36,7 +36,8 @@ func (lc *LicenseChecker) checkName(license string) {
}
if !licenseFile.IsFile() {
- lc.MkLine.Warnf("License file %s does not exist.", cleanpath(licenseFile))
+ lc.MkLine.Warnf("License file %s does not exist.",
+ lc.MkLine.PathToFile(licenseFile))
}
switch license {
diff --git a/pkgtools/pkglint/files/licenses/licenses_test.go b/pkgtools/pkglint/files/licenses/licenses_test.go
index 469e44cf1d0..edf1174e3e1 100644
--- a/pkgtools/pkglint/files/licenses/licenses_test.go
+++ b/pkgtools/pkglint/files/licenses/licenses_test.go
@@ -131,8 +131,8 @@ func Test(t *testing.T) {
check.TestingT(t)
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
diff --git a/pkgtools/pkglint/files/licenses_test.go b/pkgtools/pkglint/files/licenses_test.go
index 62ffcf46246..f7afdd3d4db 100644
--- a/pkgtools/pkglint/files/licenses_test.go
+++ b/pkgtools/pkglint/files/licenses_test.go
@@ -11,7 +11,7 @@ func (s *Suite) Test_LicenseChecker_Check(c *check.C) {
"The licenses for most software are designed to take away ...")
test := func(licenseValue string, diagnostics ...string) {
- mklines := t.NewMkLines("Makefile",
+ mklines := t.SetUpFileMkLines("Makefile",
"LICENSE=\t"+licenseValue)
mklines.ForEach(func(mkline *MkLine) {
@@ -22,25 +22,33 @@ func (s *Suite) Test_LicenseChecker_Check(c *check.C) {
}
test("gpl-v2",
- "WARN: Makefile:1: License file ~/licenses/gpl-v2 does not exist.")
+ "WARN: ~/Makefile:1: License file licenses/gpl-v2 does not exist.")
test("no-profit shareware",
- "ERROR: Makefile:1: Parse error for license condition \"no-profit shareware\".")
+ "ERROR: ~/Makefile:1: Parse error for license condition \"no-profit shareware\".")
test("no-profit AND shareware",
- "WARN: Makefile:1: License file ~/licenses/no-profit does not exist.",
- "ERROR: Makefile:1: License \"no-profit\" must not be used.",
- "WARN: Makefile:1: License file ~/licenses/shareware does not exist.",
- "ERROR: Makefile:1: License \"shareware\" must not be used.")
+ "WARN: ~/Makefile:1: License file licenses/no-profit does not exist.",
+ "ERROR: ~/Makefile:1: License \"no-profit\" must not be used.",
+ "WARN: ~/Makefile:1: License file licenses/shareware does not exist.",
+ "ERROR: ~/Makefile:1: License \"shareware\" must not be used.")
test("gnu-gpl-v2",
nil...)
test("gnu-gpl-v2 AND gnu-gpl-v2 OR gnu-gpl-v2",
- "ERROR: Makefile:1: AND and OR operators in license conditions can only be combined using parentheses.")
+ "ERROR: ~/Makefile:1: AND and OR operators in license conditions "+
+ "can only be combined using parentheses.")
+
+ test("gnu-gpl-v2 AND (gnu-gpl-v2) OR gnu-gpl-v2",
+ "ERROR: ~/Makefile:1: AND and OR operators in license conditions "+
+ "can only be combined using parentheses.")
test("(gnu-gpl-v2 OR gnu-gpl-v2) AND gnu-gpl-v2",
nil...)
+
+ test("gnu-gpl-v2 OR (gnu-gpl-v2 AND gnu-gpl-v2)",
+ nil...)
}
func (s *Suite) Test_LicenseChecker_checkName__LICENSE_FILE(c *check.C) {
diff --git a/pkgtools/pkglint/files/line.go b/pkgtools/pkglint/files/line.go
index 03bd76718fd..ba875844463 100644
--- a/pkgtools/pkglint/files/line.go
+++ b/pkgtools/pkglint/files/line.go
@@ -119,7 +119,7 @@ func (line *Line) RefToLocation(other Location) string {
// This is typically used for arguments in diagnostics, which should always be
// relative to the line with which the diagnostic is associated.
func (line *Line) PathToFile(filePath Path) Path {
- return relpath(line.Filename.Dir(), filePath)
+ return G.Pkgsrc.Relpath(line.Filename.Dir(), filePath)
}
func (line *Line) IsMultiline() bool {
@@ -182,6 +182,10 @@ func (line *Line) String() string {
func (line *Line) Autofix() *Autofix {
if line.autofix == nil {
line.autofix = NewAutofix(line)
+ } else {
+ // This assertion fails if an Autofix is reused before
+ // its Apply method is called.
+ assert(line.autofix.autofixShortTerm.diagFormat == "")
}
return line.autofix
}
diff --git a/pkgtools/pkglint/files/line_test.go b/pkgtools/pkglint/files/line_test.go
index f636abf0173..0dcea7d1977 100644
--- a/pkgtools/pkglint/files/line_test.go
+++ b/pkgtools/pkglint/files/line_test.go
@@ -53,3 +53,21 @@ func (s *Suite) Test_Line_Fatalf__trace(c *check.C) {
"TRACE: - (*Suite).Test_Line_Fatalf__trace.func2()",
"FATAL: filename:123: Cannot continue because of \"reason 1\" and \"reason 2\".")
}
+
+func (s *Suite) Test_Line_Autofix__reuse_incomplete(c *check.C) {
+ t := s.Init(c)
+
+ line := t.NewLine("filename.mk", 1, "")
+
+ fix := line.Autofix()
+ fix.Warnf("Warning.")
+ // For some reason, the other required calls are left out.
+
+ t.ExpectAssert(func() { _ = line.Autofix() })
+
+ // Properly finish the standard call sequence for an Autofix.
+ fix.Apply()
+
+ t.CheckOutputLines(
+ "WARN: filename.mk:1: Warning.")
+}
diff --git a/pkgtools/pkglint/files/logging.go b/pkgtools/pkglint/files/logging.go
index f6e54ca2fbc..89201974fee 100644
--- a/pkgtools/pkglint/files/logging.go
+++ b/pkgtools/pkglint/files/logging.go
@@ -18,6 +18,7 @@ type Logger struct {
suppressExpl bool
prevLine *Line
+ verbose bool // allow duplicate diagnostics, even in the same line
logged Once
explained Once
histo *histogram.Histogram
@@ -34,7 +35,6 @@ type LoggerOpts struct {
Autofix,
Explain,
ShowSource,
- LogVerbose,
GccOutput,
Quiet bool
}
@@ -52,8 +52,6 @@ var (
AutofixLogLevel = &LogLevel{"AUTOFIX", "autofix"}
)
-var dummyLine = NewLineMulti("", 0, 0, "", nil)
-
// Explain outputs an explanation for the preceding diagnostic
// if the --explain option is given. Otherwise it just records
// that an explanation is available.
@@ -126,7 +124,7 @@ func (l *Logger) Diag(line *Line, level *LogLevel, format string, args ...interf
}
func (l *Logger) FirstTime(filename Path, linenos, msg string) bool {
- if l.Opts.LogVerbose {
+ if l.verbose {
return true
}
@@ -283,13 +281,13 @@ func (l *Logger) Logf(level *LogLevel, filename Path, lineno, format, msg string
}
}
-// Errorf logs a technical error on the error output.
+// TechErrorf logs a technical error on the error output.
//
// location must be a slash-separated filename, such as the one in
// Location.Filename. It may be followed by the usual ":123" for line numbers.
//
// For diagnostics, use Logf instead.
-func (l *Logger) Errorf(location Path, format string, args ...interface{}) {
+func (l *Logger) TechErrorf(location Path, format string, args ...interface{}) {
msg := sprintf(format, args...)
var diag string
if l.Opts.GccOutput {
@@ -359,6 +357,7 @@ type SeparatorWriter struct {
}
func NewSeparatorWriter(out io.Writer) *SeparatorWriter {
+ assertNotNil(out)
return &SeparatorWriter{out, 3, bytes.Buffer{}}
}
diff --git a/pkgtools/pkglint/files/logging_test.go b/pkgtools/pkglint/files/logging_test.go
index 5ee4c6372a0..c346f468c1c 100644
--- a/pkgtools/pkglint/files/logging_test.go
+++ b/pkgtools/pkglint/files/logging_test.go
@@ -248,6 +248,10 @@ func (s *Suite) Test_Logger_Diag__show_source_with_whole_file(c *check.C) {
func (s *Suite) Test_Logger_Diag__source_duplicates(c *check.C) {
t := s.Init(c)
+ // Up to pkglint 19.3.10, this variable had been reset during
+ // command line parsing. In 19.3.11 the command line option has
+ // been removed, therefore it must be reset manually.
+ G.Logger.verbose = false
t.SetUpPkgsrc()
t.CreateFileLines("category/dependency/patches/patch-aa",
CvsID,
@@ -682,7 +686,7 @@ func (s *Suite) Test_Logger_Logf__duplicate_messages(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("--explain")
- G.Logger.Opts.LogVerbose = false
+ G.Logger.verbose = false
line := t.NewLine("README.txt", 123, "text")
// Is logged because it is the first appearance of this warning.
@@ -780,7 +784,7 @@ func (s *Suite) Test_Logger_Logf__duplicate_autofix(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("--explain", "--autofix")
- G.Logger.Opts.LogVerbose = false // See SetUpTest
+ G.Logger.verbose = false // See SetUpTest
line := t.NewLine("README.txt", 123, "text")
fix := line.Autofix()
@@ -801,12 +805,12 @@ func (s *Suite) Test_Logger_Logf__panic(c *check.C) {
"Pkglint internal error: Diagnostic format \"No period\" must end in a period.")
}
-func (s *Suite) Test_Logger_Errorf__gcc_format(c *check.C) {
+func (s *Suite) Test_Logger_TechErrorf__gcc_format(c *check.C) {
t := s.Init(c)
t.SetUpCommandLine("--gcc-output-format")
- G.Logger.Errorf("filename", "Cannot be opened for %s.", "reading")
+ G.Logger.TechErrorf("filename", "Cannot be opened for %s.", "reading")
t.CheckOutputLines(
"filename: error: Cannot be opened for reading.")
diff --git a/pkgtools/pkglint/files/mklexer.go b/pkgtools/pkglint/files/mklexer.go
new file mode 100644
index 00000000000..92c89a2e3b1
--- /dev/null
+++ b/pkgtools/pkglint/files/mklexer.go
@@ -0,0 +1,414 @@
+package pkglint
+
+import (
+ "netbsd.org/pkglint/regex"
+ "netbsd.org/pkglint/textproc"
+ "strings"
+)
+
+// MkLexer splits a text into a sequence of variable uses
+// and plain text.
+type MkLexer struct {
+ lexer *textproc.Lexer
+ line *Line
+}
+
+func NewMkLexer(text string, line *Line) *MkLexer {
+ return &MkLexer{textproc.NewLexer(text), line}
+}
+
+// MkTokens splits a text like in the following example:
+// Text${VAR:Mmodifier}${VAR2}more text${VAR3}
+// into tokens like these:
+// Text
+// ${VAR:Mmodifier}
+// ${VAR2}
+// more text
+// ${VAR3}
+func (p *MkLexer) MkTokens() ([]*MkToken, string) {
+ lexer := p.lexer
+
+ var tokens []*MkToken
+ for !lexer.EOF() {
+ mark := lexer.Mark()
+ if varuse := p.VarUse(); varuse != nil {
+ tokens = append(tokens, &MkToken{Text: lexer.Since(mark), Varuse: varuse})
+ continue
+ }
+
+ for lexer.NextBytesFunc(func(b byte) bool { return b != '$' }) != "" || lexer.SkipString("$$") {
+ }
+ text := lexer.Since(mark)
+ if text != "" {
+ tokens = append(tokens, &MkToken{Text: text})
+ continue
+ }
+
+ break
+ }
+ return tokens, lexer.Rest()
+}
+
+func (p *MkLexer) VarUse() *MkVarUse {
+ rest := p.lexer.Rest()
+ if len(rest) < 2 || rest[0] != '$' {
+ return nil
+ }
+
+ switch rest[1] {
+ case '{', '(':
+ return p.varUseBrace(rest[1] == '(')
+
+ case '$':
+ // This is an escaped dollar character and not a variable use.
+ return nil
+
+ case '@', '<', ' ':
+ // These variable names are known to exist.
+ //
+ // Many others are also possible but not used in practice.
+ // In particular, when parsing the :C or :S modifier,
+ // the $ must not be interpreted as a variable name,
+ // even when it looks like $/ could refer to the "/" variable.
+ //
+ // TODO: Find out whether $" is a variable use when it appears in the :M modifier.
+ p.lexer.Skip(2)
+ return &MkVarUse{rest[1:2], nil}
+
+ default:
+ return p.varUseAlnum()
+ }
+}
+
+// varUseBrace parses:
+// ${VAR}
+// ${arbitrary text:L}
+// ${variable with invalid chars}
+// $(PARENTHESES)
+// ${VAR:Mpattern:C,:,colon,g:Q:Q:Q}
+func (p *MkLexer) varUseBrace(usingRoundParen bool) *MkVarUse {
+ lexer := p.lexer
+
+ beforeDollar := lexer.Mark()
+ lexer.Skip(2)
+
+ closing := byte('}')
+ if usingRoundParen {
+ closing = ')'
+ }
+
+ beforeVarname := lexer.Mark()
+ varname := p.Varname()
+ p.varUseText(closing)
+ varExpr := lexer.Since(beforeVarname)
+
+ modifiers := p.VarUseModifiers(varExpr, closing)
+
+ closed := lexer.SkipByte(closing)
+
+ if p.line != nil {
+ if !closed {
+ p.line.Warnf("Missing closing %q for %q.", string(rune(closing)), varExpr)
+ }
+
+ if usingRoundParen && closed {
+ parenVaruse := lexer.Since(beforeDollar)
+ edit := []byte(parenVaruse)
+ edit[1] = '{'
+ edit[len(edit)-1] = '}'
+ bracesVaruse := string(edit)
+
+ fix := p.line.Autofix()
+ fix.Warnf("Please use curly braces {} instead of round parentheses () for %s.", varExpr)
+ fix.Replace(parenVaruse, bracesVaruse)
+ fix.Apply()
+ }
+
+ if len(varExpr) > len(varname) && !(&MkVarUse{varExpr, modifiers}).IsExpression() {
+ p.line.Warnf("Invalid part %q after variable name %q.", varExpr[len(varname):], varname)
+ }
+ }
+
+ return &MkVarUse{varExpr, modifiers}
+}
+
+func (p *MkLexer) Varname() string {
+ lexer := p.lexer
+
+ // TODO: duplicated code in MatchVarassign
+ mark := lexer.Mark()
+ lexer.SkipByte('.')
+ for lexer.NextBytesSet(VarbaseBytes) != "" || p.VarUse() != nil {
+ }
+ if lexer.SkipByte('.') || hasPrefix(lexer.Since(mark), "SITES_") {
+ for lexer.NextBytesSet(VarparamBytes) != "" || p.VarUse() != nil {
+ }
+ }
+ return lexer.Since(mark)
+}
+
+// varUseText parses any text up to the next colon or closing mark.
+// Nested variable uses are parsed as well.
+//
+// This is used for the :L and :? modifiers since they accept arbitrary
+// text as the "variable name" and effectively interpret it as the variable
+// value instead.
+func (p *MkLexer) varUseText(closing byte) string {
+ lexer := p.lexer
+ start := lexer.Mark()
+ re := regcomp(regex.Pattern(condStr(closing == '}', `^([^$:}]|\$\$)+`, `^([^$:)]|\$\$)+`)))
+ for p.VarUse() != nil || lexer.SkipRegexp(re) {
+ }
+ return lexer.Since(start)
+}
+
+// VarUseModifiers parses the modifiers of a variable being used, such as :Q, :Mpattern.
+//
+// See the bmake manual page.
+func (p *MkLexer) VarUseModifiers(varname string, closing byte) []MkVarUseModifier {
+ lexer := p.lexer
+
+ var modifiers []MkVarUseModifier
+ // The :S and :C modifiers may be chained without using the : as separator.
+ mayOmitColon := false
+
+ for lexer.SkipByte(':') || mayOmitColon {
+ modifier := p.varUseModifier(varname, closing)
+ if modifier != "" {
+ modifiers = append(modifiers, MkVarUseModifier{modifier})
+ }
+ mayOmitColon = modifier != "" && (modifier[0] == 'S' || modifier[0] == 'C')
+ }
+ return modifiers
+}
+
+// varUseModifier parses a single variable modifier such as :Q or :S,from,to,.
+// The actual parsing starts after the leading colon.
+func (p *MkLexer) varUseModifier(varname string, closing byte) string {
+ lexer := p.lexer
+ mark := lexer.Mark()
+
+ switch lexer.PeekByte() {
+ case 'E', 'H', 'L', 'O', 'Q', 'R', 'T', 's', 't', 'u':
+ mod := lexer.NextBytesSet(textproc.Alnum)
+
+ switch mod {
+ case
+ "E", // Extension, e.g. path/file.suffix => suffix
+ "H", // Head, e.g. dir/subdir/file.suffix => dir/subdir
+ "L", // XXX: Shouldn't this be handled specially?
+ "O", // Order alphabetically
+ "Ox", // Shuffle
+ "Q", // Quote shell meta-characters
+ "R", // Strip the file suffix, e.g. path/file.suffix => file
+ "T", // Basename, e.g. path/file.suffix => file.suffix
+ "sh", // Evaluate the variable value as shell command
+ "tA", // Try to convert to absolute path
+ "tW", // Causes the value to be treated as a single word
+ "tl", // To lowercase
+ "tu", // To uppercase
+ "tw", // Causes the value to be treated as list of words
+ "u": // Remove adjacent duplicate words (like uniq(1))
+ return mod
+ }
+
+ if hasPrefix(mod, "ts") {
+ // See devel/bmake/files/var.c:/case 't'
+ sep := mod[2:] + p.varUseText(closing)
+ switch {
+ case sep == "":
+ lexer.SkipString(":")
+ case len(sep) == 1:
+ break
+ case matches(sep, `^\\\d+`):
+ break
+ default:
+ if p.line != nil {
+ p.line.Warnf("Invalid separator %q for :ts modifier of %q.", sep, varname)
+ p.line.Explain(
+ "The separator for the :ts modifier must be either a single character",
+ "or an escape sequence like \\t or \\n or an octal or decimal escape",
+ "sequence; see the bmake man page for further details.")
+ }
+ }
+ return lexer.Since(mark)
+ }
+
+ case '=', 'D', 'M', 'N', 'U':
+ lexer.Skip(1)
+ re := regcomp(regex.Pattern(condStr(closing == '}', `^([^$:\\}]|\$\$|\\.)+`, `^([^$:\\)]|\$\$|\\.)+`)))
+ for p.VarUse() != nil || lexer.SkipRegexp(re) {
+ }
+ arg := lexer.Since(mark)
+ return strings.Replace(arg, "\\:", ":", -1)
+
+ case 'C', 'S':
+ if ok, _, _, _, _ := p.varUseModifierSubst(closing); ok {
+ return lexer.Since(mark)
+ }
+
+ case '@':
+ if p.varUseModifierAt(lexer, varname) {
+ return lexer.Since(mark)
+ }
+
+ case '[':
+ if lexer.SkipRegexp(regcomp(`^\[(?:[-.\d]+|#)\]`)) {
+ return lexer.Since(mark)
+ }
+
+ case '?':
+ lexer.Skip(1)
+ p.varUseText(closing)
+ if lexer.SkipByte(':') {
+ p.varUseText(closing)
+ return lexer.Since(mark)
+ }
+ }
+
+ lexer.Reset(mark)
+
+ re := regcomp(regex.Pattern(condStr(closing == '}', `^([^:$}]|\$\$)+`, `^([^:$)]|\$\$)+`)))
+ for p.VarUse() != nil || lexer.SkipRegexp(re) {
+ }
+ modifier := lexer.Since(mark)
+
+ // ${SOURCES:%.c=%.o} or ${:!uname -a!:[2]}
+ if contains(modifier, "=") || (hasPrefix(modifier, "!") && hasSuffix(modifier, "!")) {
+ return modifier
+ }
+
+ if p.line != nil && modifier != "" {
+ p.line.Warnf("Invalid variable modifier %q for %q.", modifier, varname)
+ }
+
+ return ""
+}
+
+// varUseModifierSubst parses a :S,from,to, or a :C,from,to, modifier.
+func (p *MkLexer) varUseModifierSubst(closing byte) (ok bool, regex bool, from string, to string, options string) {
+ lexer := p.lexer
+ regex = lexer.PeekByte() == 'C'
+ lexer.Skip(1 /* the initial S or C */)
+
+ sep := lexer.PeekByte() // bmake allows _any_ separator, even letters.
+ if sep == -1 || byte(sep) == closing {
+ return
+ }
+
+ lexer.Skip(1)
+ separator := byte(sep)
+
+ unescape := func(s string) string {
+ return strings.Replace(s, "\\"+string(separator), string(separator), -1)
+ }
+
+ isOther := func(b byte) bool {
+ return b != separator && b != '$' && b != '\\'
+ }
+
+ skipOther := func() {
+ for {
+ switch {
+
+ case p.VarUse() != nil:
+ break
+
+ case lexer.SkipString("$$"):
+ break
+
+ case len(lexer.Rest()) >= 2 && lexer.PeekByte() == '\\' && separator != '\\':
+ _ = lexer.Skip(2)
+
+ case lexer.NextBytesFunc(isOther) != "":
+ break
+
+ default:
+ return
+ }
+ }
+ }
+
+ fromStart := lexer.Mark()
+ lexer.SkipByte('^')
+ skipOther()
+ lexer.SkipByte('$')
+ from = unescape(lexer.Since(fromStart))
+
+ if !lexer.SkipByte(separator) {
+ return
+ }
+
+ toStart := lexer.Mark()
+ skipOther()
+ to = unescape(lexer.Since(toStart))
+
+ if !lexer.SkipByte(separator) {
+ return
+ }
+
+ optionsStart := lexer.Mark()
+ lexer.NextBytesFunc(func(b byte) bool { return b == '1' || b == 'g' || b == 'W' })
+ options = lexer.Since(optionsStart)
+
+ ok = true
+ return
+}
+
+// varUseModifierAt parses a variable modifier like ":@v@echo ${v};@",
+// which expands the variable value in a loop.
+func (p *MkLexer) varUseModifierAt(lexer *textproc.Lexer, varname string) bool {
+ lexer.Skip(1 /* the initial @ */)
+
+ loopVar := lexer.NextBytesSet(AlnumDot)
+ if loopVar == "" || !lexer.SkipByte('@') {
+ return false
+ }
+
+ re := regcomp(`^([^$@\\]|\\.)+`)
+ for p.VarUse() != nil || lexer.SkipString("$$") || lexer.SkipRegexp(re) {
+ }
+
+ if !lexer.SkipByte('@') && p.line != nil {
+ p.line.Warnf("Modifier ${%s:@%s@...@} is missing the final \"@\".", varname, loopVar)
+ }
+
+ return true
+}
+
+func (p *MkLexer) varUseAlnum() *MkVarUse {
+ lexer := p.lexer
+
+ apparentVarname := textproc.NewLexer(lexer.Rest()[1:]).NextBytesSet(textproc.AlnumU)
+ if apparentVarname == "" {
+ return nil
+ }
+
+ lexer.Skip(2)
+
+ if p.line != nil {
+ if len(apparentVarname) > 1 {
+ p.line.Errorf("$%[1]s is ambiguous. Use ${%[1]s} if you mean a Make variable or $$%[1]s if you mean a shell variable.",
+ apparentVarname)
+ p.line.Explain(
+ "Only the first letter after the dollar is the variable name.",
+ "Everything following it is normal text, even if it looks like a variable name to human readers.")
+ } else {
+ p.line.Warnf("$%[1]s is ambiguous. Use ${%[1]s} if you mean a Make variable or $$%[1]s if you mean a shell variable.", apparentVarname)
+ p.line.Explain(
+ "In its current form, this variable is parsed as a Make variable.",
+ "For human readers though, $x looks more like a shell variable than a Make variable,",
+ "since Make variables are usually written using braces (BSD-style) or parentheses (GNU-style).")
+ }
+ }
+
+ return &MkVarUse{apparentVarname[:1], nil}
+}
+
+func (p *MkLexer) EOF() bool {
+ return p.lexer.EOF()
+}
+
+func (p *MkLexer) Rest() string {
+ return p.lexer.Rest()
+}
diff --git a/pkgtools/pkglint/files/mklexer_test.go b/pkgtools/pkglint/files/mklexer_test.go
new file mode 100644
index 00000000000..c11adbc1441
--- /dev/null
+++ b/pkgtools/pkglint/files/mklexer_test.go
@@ -0,0 +1,750 @@
+package pkglint
+
+import "gopkg.in/check.v1"
+
+func (s *Suite) Test_MkLexer_MkTokens(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ testRest := func(input string, expectedTokens []*MkToken, expectedRest string) {
+ line := t.NewLines("Test_MkLexer_MkTokens.mk", input).Lines[0]
+ p := NewMkLexer(input, line)
+ actualTokens, rest := p.MkTokens()
+ t.CheckDeepEquals(actualTokens, expectedTokens)
+ for i, expectedToken := range expectedTokens {
+ if i < len(actualTokens) {
+ t.CheckDeepEquals(*actualTokens[i], *expectedToken)
+ t.CheckDeepEquals(actualTokens[i].Varuse, expectedToken.Varuse)
+ }
+ }
+ t.CheckEquals(rest, expectedRest)
+ }
+ test := func(input string, expectedToken *MkToken) {
+ testRest(input, b.Tokens(expectedToken), "")
+ }
+ literal := b.TextToken
+ varuse := b.VaruseToken
+
+ // Everything except VarUses is passed through unmodified.
+
+ test("literal",
+ literal("literal"))
+
+ test("\\/share\\/ { print \"share directory\" }",
+ literal("\\/share\\/ { print \"share directory\" }"))
+
+ test("find . -name \\*.orig -o -name \\*.pre",
+ literal("find . -name \\*.orig -o -name \\*.pre"))
+
+ test("-e 's|\\$${EC2_HOME.*}|EC2_HOME}|g'",
+ literal("-e 's|\\$${EC2_HOME.*}|EC2_HOME}|g'"))
+
+ test("$$var1 $$var2 $$? $$",
+ literal("$$var1 $$var2 $$? $$"))
+
+ testRest("hello, ${W:L:tl}orld",
+ b.Tokens(
+ literal("hello, "),
+ varuse("W", "L", "tl"),
+ literal("orld")),
+ "")
+
+ testRest("ftp://${PKGNAME}/ ${MASTER_SITES:=subdir/}",
+ b.Tokens(
+ literal("ftp://"),
+ varuse("PKGNAME"),
+ literal("/ "),
+ varuse("MASTER_SITES", "=subdir/")),
+ "")
+
+ testRest("${VAR:S,a,b,c,d,e,f}",
+ b.Tokens(b.VaruseTextToken("${VAR:S,a,b,c,d,e,f}", "VAR", "S,a,b,")),
+ "")
+ t.CheckOutputLines(
+ "WARN: Test_MkLexer_MkTokens.mk:1: Invalid variable modifier \"c,d,e,f\" for \"VAR\".")
+
+ testRest("Text${VAR:Mmodifier}${VAR2}more text${VAR3}",
+ b.Tokens(
+ literal("Text"),
+ varuse("VAR", "Mmodifier"),
+ varuse("VAR2"),
+ literal("more text"),
+ varuse("VAR3")),
+ "")
+}
+
+func (s *Suite) Test_MkLexer_VarUse(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+ varuse := b.VaruseToken
+ varuseText := b.VaruseTextToken
+
+ testRest := func(input string, expectedTokens []*MkToken, expectedRest string, diagnostics ...string) {
+ line := t.NewLines("Test_MkLexer_VarUse.mk", input).Lines[0]
+ p := NewMkLexer(input, line)
+
+ actualTokens, rest := p.MkTokens()
+
+ t.CheckDeepEquals(actualTokens, expectedTokens)
+ for i, expectedToken := range expectedTokens {
+ if i < len(actualTokens) {
+ t.CheckDeepEquals(*actualTokens[i], *expectedToken)
+ t.CheckDeepEquals(actualTokens[i].Varuse, expectedToken.Varuse)
+ }
+ }
+ t.CheckEquals(rest, expectedRest)
+ t.CheckOutput(diagnostics)
+ }
+ test := func(input string, expectedToken *MkToken, diagnostics ...string) {
+ testRest(input, b.Tokens(expectedToken), "", diagnostics...)
+ }
+
+ t.Use(testRest, test, varuse, varuseText)
+
+ test("${VARIABLE}",
+ varuse("VARIABLE"))
+
+ test("${VARIABLE.param}",
+ varuse("VARIABLE.param"))
+
+ test("${VARIABLE.${param}}",
+ varuse("VARIABLE.${param}"))
+
+ test("${VARIABLE.hicolor-icon-theme}",
+ varuse("VARIABLE.hicolor-icon-theme"))
+
+ test("${VARIABLE.gtk+extra}",
+ varuse("VARIABLE.gtk+extra"))
+
+ test("${VARIABLE:S/old/new/}",
+ varuse("VARIABLE", "S/old/new/"))
+
+ test("${GNUSTEP_LFLAGS:S/-L//g}",
+ varuse("GNUSTEP_LFLAGS", "S/-L//g"))
+
+ test("${SUSE_VERSION:S/.//}",
+ varuse("SUSE_VERSION", "S/.//"))
+
+ test("${MASTER_SITE_GNOME:=sources/alacarte/0.13/}",
+ varuse("MASTER_SITE_GNOME", "=sources/alacarte/0.13/"))
+
+ test("${INCLUDE_DIRS:H:T}",
+ varuse("INCLUDE_DIRS", "H", "T"))
+
+ test("${A.${B.${C.${D}}}}",
+ varuse("A.${B.${C.${D}}}"))
+
+ test("${RUBY_VERSION:C/([0-9]+)\\.([0-9]+)\\.([0-9]+)/\\1/}",
+ varuse("RUBY_VERSION", "C/([0-9]+)\\.([0-9]+)\\.([0-9]+)/\\1/"))
+
+ test("${PERL5_${_var_}:Q}",
+ varuse("PERL5_${_var_}", "Q"))
+
+ test("${PKGNAME_REQD:C/(^.*-|^)py([0-9][0-9])-.*/\\2/}",
+ varuse("PKGNAME_REQD", "C/(^.*-|^)py([0-9][0-9])-.*/\\2/"))
+
+ test("${PYLIB:S|/|\\\\/|g}",
+ varuse("PYLIB", "S|/|\\\\/|g"))
+
+ test("${PKGNAME_REQD:C/ruby([0-9][0-9]+)-.*/\\1/}",
+ varuse("PKGNAME_REQD", "C/ruby([0-9][0-9]+)-.*/\\1/"))
+
+ test("${RUBY_SHLIBALIAS:S/\\//\\\\\\//}",
+ varuse("RUBY_SHLIBALIAS", "S/\\//\\\\\\//"))
+
+ test("${RUBY_VER_MAP.${RUBY_VER}:U${RUBY_VER}}",
+ varuse("RUBY_VER_MAP.${RUBY_VER}", "U${RUBY_VER}"))
+
+ test("${RUBY_VER_MAP.${RUBY_VER}:U18}",
+ varuse("RUBY_VER_MAP.${RUBY_VER}", "U18"))
+
+ test("${CONFIGURE_ARGS:S/ENABLE_OSS=no/ENABLE_OSS=yes/g}",
+ varuse("CONFIGURE_ARGS", "S/ENABLE_OSS=no/ENABLE_OSS=yes/g"))
+
+ test("${PLIST_RUBY_DIRS:S,DIR=\"PREFIX/,DIR=\",}",
+ varuse("PLIST_RUBY_DIRS", "S,DIR=\"PREFIX/,DIR=\","))
+
+ test("${LDFLAGS:S/-Wl,//g:Q}",
+ varuse("LDFLAGS", "S/-Wl,//g", "Q"))
+
+ test("${_PERL5_REAL_PACKLIST:S/^/${DESTDIR}/}",
+ varuse("_PERL5_REAL_PACKLIST", "S/^/${DESTDIR}/"))
+
+ test("${_PYTHON_VERSION:C/^([0-9])/\\1./1}",
+ varuse("_PYTHON_VERSION", "C/^([0-9])/\\1./1"))
+
+ test("${PKGNAME:S/py${_PYTHON_VERSION}/py${i}/}",
+ varuse("PKGNAME", "S/py${_PYTHON_VERSION}/py${i}/"))
+
+ test("${PKGNAME:C/-[0-9].*$/-[0-9]*/}",
+ varuse("PKGNAME", "C/-[0-9].*$/-[0-9]*/"))
+
+ // TODO: Does the $@ refer to ${.TARGET}, or is it just an unmatchable
+ // regular expression?
+ test("${PKGNAME:C/$@/target?/}",
+ varuse("PKGNAME", "C/$@/target?/"))
+
+ test("${PKGNAME:S/py${_PYTHON_VERSION}/py${i}/:C/-[0-9].*$/-[0-9]*/}",
+ varuse("PKGNAME", "S/py${_PYTHON_VERSION}/py${i}/", "C/-[0-9].*$/-[0-9]*/"))
+
+ test("${_PERL5_VARS:tl:S/^/-V:/}",
+ varuse("_PERL5_VARS", "tl", "S/^/-V:/"))
+
+ test("${_PERL5_VARS_OUT:M${_var_:tl}=*:S/^${_var_:tl}=${_PERL5_PREFIX:=/}//}",
+ varuse("_PERL5_VARS_OUT", "M${_var_:tl}=*", "S/^${_var_:tl}=${_PERL5_PREFIX:=/}//"))
+
+ test("${RUBY${RUBY_VER}_PATCHLEVEL}",
+ varuse("RUBY${RUBY_VER}_PATCHLEVEL"))
+
+ test("${DISTFILES:M*.gem}",
+ varuse("DISTFILES", "M*.gem"))
+
+ test("${LOCALBASE:S^/^_^}",
+ varuse("LOCALBASE", "S^/^_^"))
+
+ test("${SOURCES:%.c=%.o}",
+ varuse("SOURCES", "%.c=%.o"))
+
+ test("${GIT_TEMPLATES:@.t.@ ${EGDIR}/${GIT_TEMPLATEDIR}/${.t.} ${PREFIX}/${GIT_CORE_TEMPLATEDIR}/${.t.} @:M*}",
+ varuse("GIT_TEMPLATES", "@.t.@ ${EGDIR}/${GIT_TEMPLATEDIR}/${.t.} ${PREFIX}/${GIT_CORE_TEMPLATEDIR}/${.t.} @", "M*"))
+
+ test("${DISTNAME:C:_:-:}",
+ varuse("DISTNAME", "C:_:-:"))
+
+ test("${CF_FILES:H:O:u:S@^@${PKG_SYSCONFDIR}/@}",
+ varuse("CF_FILES", "H", "O", "u", "S@^@${PKG_SYSCONFDIR}/@"))
+
+ test("${ALT_GCC_RTS:S%${LOCALBASE}%%:S%/%%}",
+ varuse("ALT_GCC_RTS", "S%${LOCALBASE}%%", "S%/%%"))
+
+ test("${PREFIX:C;///*;/;g:C;/$;;}",
+ varuse("PREFIX", "C;///*;/;g", "C;/$;;"))
+
+ test("${GZIP_CMD:[1]:Q}",
+ varuse("GZIP_CMD", "[1]", "Q"))
+
+ test("${RUBY_RAILS_SUPPORTED:[#]}",
+ varuse("RUBY_RAILS_SUPPORTED", "[#]"))
+
+ test("${GZIP_CMD:[asdf]:Q}",
+ varuseText("${GZIP_CMD:[asdf]:Q}", "GZIP_CMD", "Q"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid variable modifier \"[asdf]\" for \"GZIP_CMD\".")
+
+ test("${DISTNAME:C/-[0-9]+$$//:C/_/-/}",
+ varuse("DISTNAME", "C/-[0-9]+$$//", "C/_/-/"))
+
+ test("${DISTNAME:slang%=slang2%}",
+ varuse("DISTNAME", "slang%=slang2%"))
+
+ test("${OSMAP_SUBSTVARS:@v@-e 's,\\@${v}\\@,${${v}},g' @}",
+ varuse("OSMAP_SUBSTVARS", "@v@-e 's,\\@${v}\\@,${${v}},g' @"))
+
+ test("${BRANDELF:D${BRANDELF} -t Linux ${LINUX_LDCONFIG}:U${TRUE}}",
+ varuse("BRANDELF", "D${BRANDELF} -t Linux ${LINUX_LDCONFIG}", "U${TRUE}"))
+
+ test("${${_var_}.*}",
+ varuse("${_var_}.*"))
+
+ test("${OPTIONS:@opt@printf 'Option %s is selected\n' ${opt:Q}';@}",
+ varuse("OPTIONS", "@opt@printf 'Option %s is selected\n' ${opt:Q}';@"))
+
+ /* weird features */
+ test("${${EMACS_VERSION_MAJOR}>22:?@comment :}",
+ varuse("${EMACS_VERSION_MAJOR}>22", "?@comment :"))
+
+ test("${empty(CFLAGS):?:-cflags ${CFLAGS:Q}}",
+ varuse("empty(CFLAGS)", "?:-cflags ${CFLAGS:Q}"))
+
+ test("${${PKGSRC_COMPILER}==gcc:?gcc:cc}",
+ varuse("${PKGSRC_COMPILER}==gcc", "?gcc:cc"))
+
+ test("${${XKBBASE}/xkbcomp:L:Q}",
+ varuse("${XKBBASE}/xkbcomp", "L", "Q"))
+
+ test("${${PKGBASE} ${PKGVERSION}:L}",
+ varuse("${PKGBASE} ${PKGVERSION}", "L"))
+
+ // The variable name is optional; the variable with the empty name always
+ // evaluates to the empty string. Bmake actively prevents this variable from
+ // ever being defined. Therefore the :U branch is always taken, and this
+ // in turn is used to implement the variables from the .for loops.
+ test("${:U}",
+ varuse("", "U"))
+
+ test("${:Ufixed value}",
+ varuse("", "Ufixed value"))
+
+ // This complicated expression returns the major.minor.patch version
+ // of the package given in ${d}.
+ //
+ // The :L modifier interprets the variable name not as a variable name
+ // but takes it as the variable value. Followed by the :sh modifier,
+ // this combination evaluates to the output of pkg_info.
+ //
+ // In this output, all non-digit characters are replaced with spaces so
+ // that the remaining value is a space-separated list of version parts.
+ // From these parts, the first 3 are taken and joined using a dot as separator.
+ test("${${${PKG_INFO} -E ${d} || echo:L:sh}:L:C/[^[0-9]]*/ /g:[1..3]:ts.}",
+ varuse("${${PKG_INFO} -E ${d} || echo:L:sh}", "L", "C/[^[0-9]]*/ /g", "[1..3]", "ts."))
+
+ // For :S and :C, the colon can be left out. It's confusing but possible.
+ test("${VAR:S/-//S/.//}",
+ varuseText("${VAR:S/-//S/.//}", "VAR", "S/-//", "S/.//"))
+
+ // The :S and :C modifiers accept an arbitrary character as separator. Here it is "a".
+ test("${VAR:Sahara}",
+ varuse("VAR", "Sahara"))
+
+ // The separator character can be left out, which means empty.
+ test("${VAR:ts}",
+ varuse("VAR", "ts"))
+
+ // The separator character can be a long octal number.
+ test("${VAR:ts\\000012}",
+ varuse("VAR", "ts\\000012"))
+
+ // Or even decimal.
+ test("${VAR:ts\\124}",
+ varuse("VAR", "ts\\124"))
+
+ // The :ts modifier only takes single-character separators.
+ test("${VAR:ts---}",
+ varuse("VAR", "ts---"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid separator \"---\" for :ts modifier of \"VAR\".")
+
+ test("$<",
+ varuseText("$<", "<")) // Same as ${.IMPSRC}
+
+ test("$(GNUSTEP_USER_ROOT)",
+ varuseText("$(GNUSTEP_USER_ROOT)", "GNUSTEP_USER_ROOT"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Please use curly braces {} instead of round parentheses () for GNUSTEP_USER_ROOT.")
+
+ // Opening brace, closing parenthesis.
+ // Warnings are only printed for balanced expressions.
+ test("${VAR)",
+ varuseText("${VAR)", "VAR)"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \"}\" for \"VAR)\".",
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid part \")\" after variable name \"VAR\".")
+
+ // Opening parenthesis, closing brace
+ // Warnings are only printed for balanced expressions.
+ test("$(VAR}",
+ varuseText("$(VAR}", "VAR}"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \")\" for \"VAR}\".",
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid part \"}\" after variable name \"VAR\".")
+
+ test("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}@}",
+ varuse("PLIST_SUBST_VARS", "@var@${var}=${${var}:Q}@"))
+
+ test("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}}",
+ varuseText("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}}",
+ "PLIST_SUBST_VARS", "@var@${var}=${${var}:Q}}"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Modifier ${PLIST_SUBST_VARS:@var@...@} is missing the final \"@\".",
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \"}\" for \"PLIST_SUBST_VARS\".")
+
+ // The replacement text may include closing braces, which is useful
+ // for AWK programs.
+ test("${PLIST_SUBST_VARS:@var@{${var}}@}",
+ varuseText("${PLIST_SUBST_VARS:@var@{${var}}@}",
+ "PLIST_SUBST_VARS", "@var@{${var}}@"),
+ nil...)
+
+ // Unfinished variable use
+ test("${",
+ varuseText("${", ""),
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \"}\" for \"\".")
+
+ // Unfinished nested variable use
+ test("${${",
+ varuseText("${${", "${"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \"}\" for \"\".",
+ "WARN: Test_MkLexer_VarUse.mk:1: Missing closing \"}\" for \"${\".")
+
+ test("${arbitrary :Mpattern:---:Q}",
+ varuseText("${arbitrary :Mpattern:---:Q}", "arbitrary ", "Mpattern", "Q"),
+ // TODO: Swap the order of these message
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid variable modifier \"---\" for \"arbitrary \".",
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid part \" \" after variable name \"arbitrary\".")
+
+ // Variable names containing spaces do not occur in pkgsrc.
+ // Technically they are possible:
+ //
+ // VARNAME= name with spaces
+ // ${VARNAME}= value
+ //
+ // all:
+ // @echo ${name with spaces:Q}''
+ test("${arbitrary text}",
+ varuse("arbitrary text"),
+ "WARN: Test_MkLexer_VarUse.mk:1: Invalid part \" text\" after variable name \"arbitrary\".")
+}
+
+func (s *Suite) Test_MkLexer_VarUse__ambiguous(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ t.SetUpCommandLine("--explain")
+
+ line := t.NewLine("module.mk", 123, "\t$Varname $X")
+ p := NewMkLexer(line.Text[1:], line)
+
+ tokens, rest := p.MkTokens()
+ t.CheckDeepEquals(tokens, b.Tokens(
+ b.VaruseTextToken("$V", "V"),
+ b.TextToken("arname "),
+ b.VaruseTextToken("$X", "X")))
+ t.CheckEquals(rest, "")
+
+ t.CheckOutputLines(
+ "ERROR: module.mk:123: $Varname is ambiguous. Use ${Varname} if you mean a Make variable or $$Varname if you mean a shell variable.",
+ "",
+ "\tOnly the first letter after the dollar is the variable name.",
+ "\tEverything following it is normal text, even if it looks like a",
+ "\tvariable name to human readers.",
+ "",
+ "WARN: module.mk:123: $X is ambiguous. Use ${X} if you mean a Make variable or $$X if you mean a shell variable.",
+ "",
+ "\tIn its current form, this variable is parsed as a Make variable. For",
+ "\thuman readers though, $x looks more like a shell variable than a",
+ "\tMake variable, since Make variables are usually written using braces",
+ "\t(BSD-style) or parentheses (GNU-style).",
+ "")
+}
+
+// Pkglint can replace $(VAR) with ${VAR}. It doesn't look at all components
+// of nested variables though because this case is not important enough to
+// invest much development time. It occurs so seldom that it is acceptable
+// to run pkglint multiple times in such a case.
+func (s *Suite) Test_MkLexer_varUseBrace__autofix_parentheses(c *check.C) {
+ t := s.Init(c)
+
+ test := func() {
+ mklines := t.SetUpFileMkLines("Makefile",
+ MkCvsID,
+ "COMMENT=\t$(P1) $(P2)) $(P3:Q) ${BRACES} $(A.$(B.$(C))) $(A:M\\#)",
+ "P1=\t\t${COMMENT}",
+ "P2=\t\t# nothing",
+ "P3=\t\t# nothing",
+ "BRACES=\t\t# nothing",
+ "C=\t\t# nothing",
+ "A=\t\t# nothing")
+
+ mklines.Check()
+ }
+
+ t.ExpectDiagnosticsAutofix(
+ test,
+
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for P1.",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for P2.",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for P3.",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for C.",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for B.$(C).",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for A.$(B.$(C)).",
+ "WARN: ~/Makefile:2: Please use curly braces {} instead of round parentheses () for A.",
+ "AUTOFIX: ~/Makefile:2: Replacing \"$(P1)\" with \"${P1}\".",
+ "AUTOFIX: ~/Makefile:2: Replacing \"$(P2)\" with \"${P2}\".",
+ "AUTOFIX: ~/Makefile:2: Replacing \"$(P3:Q)\" with \"${P3:Q}\".",
+ "AUTOFIX: ~/Makefile:2: Replacing \"$(C)\" with \"${C}\".")
+}
+
+func (s *Suite) Test_MkLexer_Varname(c *check.C) {
+ t := s.Init(c)
+
+ test := func(text string) {
+ line := t.NewLine("filename.mk", 1, text)
+ p := NewMkLexer(text, line)
+
+ varname := p.Varname()
+
+ t.CheckEquals(varname, text)
+ t.CheckEquals(p.Rest(), "")
+ }
+
+ testRest := func(text string, expectedVarname string, expectedRest string) {
+ line := t.NewLine("filename.mk", 1, text)
+ p := NewMkLexer(text, line)
+
+ varname := p.Varname()
+
+ t.CheckEquals(varname, expectedVarname)
+ t.CheckEquals(p.Rest(), expectedRest)
+ }
+
+ test("VARNAME")
+ test("VARNAME.param")
+ test("VARNAME.${param}")
+ test("SITES_${param}")
+ test("SITES_distfile-1.0.tar.gz")
+ test("SITES.gtk+-2.0")
+ test("PKGPATH.category/package")
+
+ testRest("VARNAME/rest", "VARNAME", "/rest")
+}
+
+func (s *Suite) Test_MkLexer_VarUseModifiers(c *check.C) {
+ t := s.Init(c)
+
+ varUse := NewMkTokenBuilder().VarUse
+ test := func(text string, varUse *MkVarUse, diagnostics ...string) {
+ line := t.NewLine("Makefile", 20, "\t"+text)
+ p := NewMkLexer(text, line)
+
+ actual := p.VarUse()
+
+ t.CheckDeepEquals(actual, varUse)
+ t.CheckEquals(p.Rest(), "")
+ t.CheckOutput(diagnostics)
+ }
+
+ // The !command! modifier is used so seldom that pkglint does not
+ // check whether the command is actually valid.
+ // At least not while parsing the modifier since at this point it might
+ // be still unknown which of the commands can be used and which cannot.
+ test("${VAR:!command!}", varUse("VAR", "!command!"))
+
+ test("${VAR:!command}", varUse("VAR"),
+ "WARN: Makefile:20: Invalid variable modifier \"!command\" for \"VAR\".")
+
+ test("${VAR:command!}", varUse("VAR"),
+ "WARN: Makefile:20: Invalid variable modifier \"command!\" for \"VAR\".")
+
+ // The :L modifier makes the variable value "echo hello", and the :[1]
+ // modifier extracts the "echo".
+ test("${echo hello:L:[1]}", varUse("echo hello", "L", "[1]"))
+
+ // bmake ignores the :[3] modifier, and the :L modifier just returns the
+ // variable name, in this case BUILD_DIRS.
+ test("${BUILD_DIRS:[3]:L}", varUse("BUILD_DIRS", "[3]", "L"))
+
+ test("${PATH:ts::Q}", varUse("PATH", "ts:", "Q"))
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__invalid_ts_modifier_with_warning(c *check.C) {
+ t := s.Init(c)
+
+ t.SetUpCommandLine("-Wall", "--explain")
+ line := t.NewLine("filename.mk", 123, "${VAR:tsabc}")
+ p := NewMkLexer("tsabc}", line)
+
+ modifier := p.varUseModifier("VAR", '}')
+
+ t.CheckEquals(modifier, "tsabc")
+ t.CheckEquals(p.Rest(), "}")
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid separator \"abc\" for :ts modifier of \"VAR\".",
+ "",
+ "\tThe separator for the :ts modifier must be either a single character",
+ "\tor an escape sequence like \\t or \\n or an octal or decimal escape",
+ "\tsequence; see the bmake man page for further details.",
+ "")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__invalid_ts_modifier_without_warning(c *check.C) {
+ t := s.Init(c)
+
+ p := NewMkLexer("tsabc}", nil)
+
+ modifier := p.varUseModifier("VAR", '}')
+
+ t.CheckEquals(modifier, "tsabc")
+ t.CheckEquals(p.Rest(), "}")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__square_bracket(c *check.C) {
+ t := s.Init(c)
+
+ line := t.NewLine("filename.mk", 123, "\t${VAR:[asdf]}")
+ p := NewMkLexer("[asdf]", line)
+
+ modifier := p.varUseModifier("VAR", '}')
+
+ t.CheckEquals(modifier, "")
+ t.CheckEquals(p.Rest(), "")
+
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid variable modifier \"[asdf]\" for \"VAR\".")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__condition_without_colon(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ line := t.NewLine("filename.mk", 123, "${${VAR}:?yes:no}${${VAR}:?yes}")
+ p := NewMkLexer(line.Text, line)
+
+ varUse1 := p.VarUse()
+ varUse2 := p.VarUse()
+
+ t.CheckDeepEquals(varUse1, b.VarUse("${VAR}", "?yes:no"))
+ t.CheckDeepEquals(varUse2, b.VarUse("${VAR}"))
+ t.CheckEquals(p.Rest(), "")
+
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid variable modifier \"?yes\" for \"${VAR}\".")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__malformed_in_parentheses(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ line := t.NewLine("filename.mk", 123, "$(${VAR}:?yes)")
+ p := NewMkLexer(line.Text, line)
+
+ varUse := p.VarUse()
+
+ t.CheckDeepEquals(varUse, b.VarUse("${VAR}"))
+ t.CheckEquals(p.Rest(), "")
+
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid variable modifier \"?yes\" for \"${VAR}\".",
+ "WARN: filename.mk:123: Please use curly braces {} instead of round parentheses () for ${VAR}.")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifier__varuse_in_malformed_modifier(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ line := t.NewLine("filename.mk", 123, "${${VAR}:?yes${INNER}}")
+ p := NewMkLexer(line.Text, line)
+
+ varUse := p.VarUse()
+
+ t.CheckDeepEquals(varUse, b.VarUse("${VAR}"))
+ t.CheckEquals(p.Rest(), "")
+
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid variable modifier \"?yes${INNER}\" for \"${VAR}\".")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifierSubst(c *check.C) {
+ t := s.Init(c)
+
+ varUse := NewMkTokenBuilder().VarUse
+ test := func(text string, varUse *MkVarUse, rest string, diagnostics ...string) {
+ line := t.NewLine("Makefile", 20, "\t"+text)
+ p := NewMkLexer(text, line)
+
+ actual := p.VarUse()
+
+ t.CheckDeepEquals(actual, varUse)
+ t.CheckEquals(p.Rest(), rest)
+ t.CheckOutput(diagnostics)
+ }
+
+ test("${VAR:S", varUse("VAR"), "",
+ "WARN: Makefile:20: Invalid variable modifier \"S\" for \"VAR\".",
+ "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
+
+ test("${VAR:S}", varUse("VAR"), "",
+ "WARN: Makefile:20: Invalid variable modifier \"S\" for \"VAR\".")
+
+ test("${VAR:S,}", varUse("VAR"), "",
+ "WARN: Makefile:20: Invalid variable modifier \"S,\" for \"VAR\".")
+
+ test("${VAR:S,from,to}", varUse("VAR"), "",
+ "WARN: Makefile:20: Invalid variable modifier \"S,from,to\" for \"VAR\".")
+
+ test("${VAR:S,from,to,}", varUse("VAR", "S,from,to,"), "")
+
+ test("${VAR:S,^from$,to,}", varUse("VAR", "S,^from$,to,"), "")
+
+ test("${VAR:S,@F@,${F},}", varUse("VAR", "S,@F@,${F},"), "")
+
+ test("${VAR:S,from,to,1}", varUse("VAR", "S,from,to,1"), "")
+ test("${VAR:S,from,to,g}", varUse("VAR", "S,from,to,g"), "")
+ test("${VAR:S,from,to,W}", varUse("VAR", "S,from,to,W"), "")
+
+ test("${VAR:S,from,to,1gW}", varUse("VAR", "S,from,to,1gW"), "")
+
+ // Inside the :S or :C modifiers, neither a colon nor the closing
+ // brace need to be escaped. Otherwise these patterns would become
+ // too difficult to read and write.
+ test("${VAR:C/[[:alnum:]]{2}/**/g}",
+ varUse("VAR", "C/[[:alnum:]]{2}/**/g"),
+ "")
+
+ // Some pkgsrc users really explore the darkest corners of bmake by using
+ // the backslash as the separator in the :S modifier. Sure, it works, it
+ // just looks totally unexpected to the average pkgsrc reader.
+ //
+ // Using the backslash as separator means that it cannot be used for anything
+ // else, not even for escaping other characters.
+ test("${VAR:S\\.post1\\\\1}",
+ varUse("VAR", "S\\.post1\\\\1"),
+ "")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifierAt__missing_at_after_variable_name(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ line := t.NewLine("filename.mk", 123, "${VAR:@varname}")
+ p := NewMkLexer(line.Text, line)
+
+ varUse := p.VarUse()
+
+ t.CheckDeepEquals(varUse, b.VarUse("VAR"))
+ t.CheckEquals(p.Rest(), "")
+ t.CheckOutputLines(
+ "WARN: filename.mk:123: Invalid variable modifier \"@varname\" for \"VAR\".")
+}
+
+func (s *Suite) Test_MkLexer_varUseModifierAt__dollar(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ line := t.NewLine("filename.mk", 123, "${VAR:@var@$$var@}")
+ p := NewMkLexer(line.Text, line)
+
+ varUse := p.VarUse()
+
+ t.CheckDeepEquals(varUse, b.VarUse("VAR", "@var@$$var@"))
+ t.CheckEquals(p.Rest(), "")
+ t.CheckOutputEmpty()
+}
+
+func (s *Suite) Test_MkLexer_varUseModifierAt__incomplete_without_warning(c *check.C) {
+ t := s.Init(c)
+ b := NewMkTokenBuilder()
+
+ p := NewMkLexer("${VAR:@var@$$var}rest", nil)
+
+ varUse := p.VarUse()
+
+ t.CheckDeepEquals(varUse, b.VarUse("VAR", "@var@$$var}rest"))
+ t.CheckEquals(p.Rest(), "")
+ t.CheckOutputEmpty()
+}
+
+func (s *Suite) Test_MkLexer_varUseModifierAt(c *check.C) {
+ t := s.Init(c)
+
+ varUse := NewMkTokenBuilder().VarUse
+ test := func(text string, varUse *MkVarUse, rest string, diagnostics ...string) {
+ line := t.NewLine("Makefile", 20, "\t"+text)
+ p := NewMkLexer(text, line)
+
+ actual := p.VarUse()
+
+ t.CheckDeepEquals(actual, varUse)
+ t.CheckEquals(p.Rest(), rest)
+ t.CheckOutput(diagnostics)
+ }
+
+ test("${VAR:@",
+ varUse("VAR"),
+ "",
+ "WARN: Makefile:20: Invalid variable modifier \"@\" for \"VAR\".",
+ "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
+
+ test("${VAR:@i@${i}}", varUse("VAR", "@i@${i}}"), "",
+ "WARN: Makefile:20: Modifier ${VAR:@i@...@} is missing the final \"@\".",
+ "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
+
+ test("${VAR:@i@${i}@}", varUse("VAR", "@i@${i}@"), "")
+
+ test("${PKG_GROUPS:@g@${g:Q}:${PKG_GID.${g}:Q}@:C/:*$//g}",
+ varUse("PKG_GROUPS", "@g@${g:Q}:${PKG_GID.${g}:Q}@", "C/:*$//g"),
+ "")
+}
diff --git a/pkgtools/pkglint/files/mkline.go b/pkgtools/pkglint/files/mkline.go
index bad8e5c106a..c5ff8e6cd29 100644
--- a/pkgtools/pkglint/files/mkline.go
+++ b/pkgtools/pkglint/files/mkline.go
@@ -330,9 +330,8 @@ func (mkline *MkLine) Tokenize(text string, warn bool) []*MkToken {
if warn {
line = mkline.Line
}
- p := NewMkParser(line, text)
- tokens = p.MkTokens()
- rest = p.Rest()
+ p := NewMkLexer(text, line)
+ tokens, rest = p.MkTokens()
}
if warn && rest != "" {
@@ -502,9 +501,8 @@ func (mkline *MkLine) ValueTokens() ([]*MkToken, string) {
// No error checking here since all this has already been done when the
// whole line was parsed in MkLineParser.Parse.
- p := NewMkParser(nil, value)
- assign.valueMk = p.MkTokens()
- assign.valueMkRest = p.Rest()
+ p := NewMkLexer(value, nil)
+ assign.valueMk, assign.valueMkRest = p.MkTokens()
return assign.valueMk, assign.valueMkRest
}
@@ -545,7 +543,8 @@ func (mkline *MkLine) Fields() []string {
func (*MkLine) WithoutMakeVariables(value string) string {
var valueNovar strings.Builder
- for _, token := range NewMkParser(nil, value).MkTokens() {
+ tokens, _ := NewMkLexer(value, nil).MkTokens()
+ for _, token := range tokens {
if token.Varuse == nil {
valueNovar.WriteString(token.Text)
}
@@ -567,7 +566,7 @@ func (mkline *MkLine) ResolveVarsInRelativePath(relativePath Path) Path {
tmp := relativePath
if tmp.ContainsText("PKGSRCDIR") {
- pkgsrcdir := relpath(basedir, G.Pkgsrc.File("."))
+ pkgsrcdir := G.Pkgsrc.Relpath(basedir, G.Pkgsrc.File("."))
if G.Testing {
// Relative pkgsrc paths usually only contain two or three levels.
@@ -789,7 +788,8 @@ func (mkline *MkLine) ForEachUsed(action func(varUse *MkVarUse, time VucTime)) {
return
}
- for _, token := range NewMkParser(nil, text).MkTokens() {
+ tokens, _ := NewMkLexer(text, nil).MkTokens()
+ for _, token := range tokens {
if token.Varuse != nil {
searchInVarUse(token.Varuse, time)
}
diff --git a/pkgtools/pkglint/files/mkline_test.go b/pkgtools/pkglint/files/mkline_test.go
index b913f7ed26b..7f9c2a7a8fd 100644
--- a/pkgtools/pkglint/files/mkline_test.go
+++ b/pkgtools/pkglint/files/mkline_test.go
@@ -282,7 +282,9 @@ func (s *Suite) Test_MkLine_ValueFields__compared_to_splitIntoShellTokens(c *che
url := "http://registry.gimp.org/file/fix-ca.c?action=download&id=9884&file="
mkline := t.NewMkLine("filename.mk", 123, "MASTER_SITES=\t"+url)
- words, rest := splitIntoShellTokens(dummyLine, url) // Doesn't really make sense
+ // Splitting a URL into shell tokens doesn't really make sense,
+ // but is used here anyway.
+ words, rest := splitIntoShellTokens(mkline.Line, url)
t.CheckDeepEquals(words, []string{
"http://registry.gimp.org/file/fix-ca.c?action=download",
diff --git a/pkgtools/pkglint/files/mklinechecker.go b/pkgtools/pkglint/files/mklinechecker.go
index 1243658f342..0a6152d7c6f 100644
--- a/pkgtools/pkglint/files/mklinechecker.go
+++ b/pkgtools/pkglint/files/mklinechecker.go
@@ -387,7 +387,7 @@ func (ck MkLineChecker) checkTextVarUse(text string, vartype *Vartype, time VucT
defer trace.Call(vartype, time)()
}
- tokens := NewMkParser(nil, text).MkTokens()
+ tokens, _ := NewMkLexer(text, nil).MkTokens()
for i, token := range tokens {
if token.Varuse != nil {
spaceLeft := i-1 < 0 || matches(tokens[i-1].Text, `[\t ]$`)
@@ -814,7 +814,6 @@ func (ck MkLineChecker) checkVarUseQuoting(varUse *MkVarUse, vartype *Vartype, v
fix.Explain(
seeGuide("Echoing a string exactly as-is", "echo-literal"))
fix.Replace("${"+varname+mod+"}", "${"+varname+correctMod+"}")
- fix.Anyway()
fix.Apply()
} else {
mkline.Warnf("Please use ${%s%s} instead of ${%s%s} and make sure"+
@@ -1103,7 +1102,6 @@ func (ck MkLineChecker) checkVarassignRightCategory() {
if len(categories) > 1 && categories[1] == expected {
fix.Replace(categories[0]+" "+categories[1], categories[1]+" "+categories[0])
}
- fix.Anyway()
fix.Apply()
}
@@ -1341,7 +1339,7 @@ func (ck MkLineChecker) checkInclude() {
case includedFile.HasSuffixPath("intltool/buildlink3.mk"):
mkline.Warnf("Please write \"USE_TOOLS+= intltool\" instead of this line.")
- case includedFile.HasSuffixText("/builtin.mk"): // TODO: maybe HasSuffixPath
+ case includedFile != "builtin.mk" && includedFile.HasSuffixPath("builtin.mk"):
if mkline.Basename != "hacks.mk" && !mkline.HasRationale() {
fix := mkline.Autofix()
fix.Errorf("%s must not be included directly. Include \"%s/buildlink3.mk\" instead.", includedFile, includedFile.Dir())
@@ -1671,7 +1669,6 @@ func (ck MkLineChecker) simplifyCondition(varuse *MkVarUse, fromEmpty bool, notE
"An entirely different case is when the pattern contains wildcards like ^, *, $.",
"In such a case, using the :M or :N modifiers is useful and preferred.")
fix.Replace(replace(varname, positive, pattern))
- fix.Anyway()
fix.Apply()
}
}
@@ -1741,7 +1738,6 @@ func (ck MkLineChecker) checkCompareVarStrCompiler(op string, value string) {
"Therefore, comparing it using == or != leads to wrong results in these cases.")
fix.Replace("${PKGSRC_COMPILER} "+op+" "+value, "${PKGSRC_COMPILER:"+matchOp+value+"}")
fix.Replace("${PKGSRC_COMPILER} "+op+" \""+value+"\"", "${PKGSRC_COMPILER:"+matchOp+value+"}")
- fix.Anyway()
fix.Apply()
}
@@ -1807,10 +1803,10 @@ func (ck MkLineChecker) checkDependencyRule(allowedTargets map[string]bool) {
}
func (ck MkLineChecker) checkDependencyTarget(target string, allowedTargets map[string]bool) {
- if target == ".PHONY" ||
- target == ".ORDER" ||
- NewMkParser(nil, target).VarUse() != nil ||
- allowedTargets[target] {
+ if target == ".PHONY" || target == ".ORDER" || allowedTargets[target] {
+ return
+ }
+ if NewMkLexer(target, nil).VarUse() != nil {
return
}
diff --git a/pkgtools/pkglint/files/mklinechecker_test.go b/pkgtools/pkglint/files/mklinechecker_test.go
index 28382422a75..43c123a3727 100644
--- a/pkgtools/pkglint/files/mklinechecker_test.go
+++ b/pkgtools/pkglint/files/mklinechecker_test.go
@@ -57,7 +57,7 @@ func (s *Suite) Test_MkLineChecker_Check__buildlink3_include_prefs(c *check.C) {
// relative path fails since that depends on the actual file system,
// not on syntactical paths; see os.Stat in CheckRelativePath.
//
- // TODO: Refactor relpath to be independent of a filesystem.
+ // TODO: Refactor Relpath to be independent of a filesystem.
mklines.Check()
@@ -102,7 +102,9 @@ func (s *Suite) Test_MkLineChecker_Check__varuse_modifier_L(c *check.C) {
t.CheckOutputLines(
"WARN: x11/xkeyboard-config/Makefile:3: "+
"Invalid part \"/xkbcomp\" after variable name \"${XKBBASE}\".",
- // TODO: Avoid this duplicate diagnostic.
+ // TODO: Avoid these duplicate diagnostics.
+ "WARN: x11/xkeyboard-config/Makefile:3: "+
+ "Invalid part \"/xkbcomp\" after variable name \"${XKBBASE}\".",
"WARN: x11/xkeyboard-config/Makefile:3: "+
"Invalid part \"/xkbcomp\" after variable name \"${XKBBASE}\".",
"WARN: x11/xkeyboard-config/Makefile:3: XKBBASE is used but not defined.")
@@ -125,6 +127,7 @@ func (s *Suite) Test_MkLineChecker_checkEmptyContinuation(c *check.C) {
mklines.Check()
t.CheckOutputLines(
+ "NOTE: ~/filename.mk:2--3: Trailing whitespace.",
"WARN: ~/filename.mk:3: This line looks empty but continues the previous line.")
}
@@ -2240,6 +2243,22 @@ func (s *Suite) Test_MkLineChecker_checkInclude__builtin_mk(c *check.C) {
"Include \"../../category/package/buildlink3.mk\" instead.")
}
+func (s *Suite) Test_MkLineChecker_checkInclude__buildlink3_mk_includes_builtin_mk(c *check.C) {
+ t := s.Init(c)
+
+ t.SetUpPkgsrc()
+ mklines := t.SetUpFileMkLines("category/package/buildlink3.mk",
+ MkCvsID,
+ ".include \"builtin.mk\"")
+ t.CreateFileLines("category/package/builtin.mk",
+ MkCvsID)
+ t.FinishSetUp()
+
+ mklines.Check()
+
+ t.CheckOutputEmpty()
+}
+
func (s *Suite) Test_MkLineChecker_checkInclude__builtin_mk_rationale(c *check.C) {
t := s.Init(c)
diff --git a/pkgtools/pkglint/files/mklineparser.go b/pkgtools/pkglint/files/mklineparser.go
index 94c911550f6..92a7b13faa1 100644
--- a/pkgtools/pkglint/files/mklineparser.go
+++ b/pkgtools/pkglint/files/mklineparser.go
@@ -85,6 +85,7 @@ func (p MkLineParser) parseVarassign(line *Line, text string, splitResult mkLine
fix.Notef("Unnecessary space after variable name %q.", varname)
fix.Replace(varname+a.spaceAfterVarname+op.String(), varname+op.String())
fix.Apply()
+ // FIXME: Preserve the alignment of the variable value.
}
}
@@ -299,7 +300,7 @@ func (MkLineParser) split(line *Line, text string, trimComment bool) mkLineSplit
mainWithSpaces = text
}
- parser := NewMkParser(line, mainWithSpaces)
+ parser := NewMkLexer(mainWithSpaces, line)
lexer := parser.lexer
parseOther := func() string {
diff --git a/pkgtools/pkglint/files/mklines.go b/pkgtools/pkglint/files/mklines.go
index ad518ff4cdc..4a33353c879 100644
--- a/pkgtools/pkglint/files/mklines.go
+++ b/pkgtools/pkglint/files/mklines.go
@@ -167,7 +167,7 @@ func (mklines *MkLines) collectDocumentedVariables() {
commentLines++
- parser := NewMkParser(nil, words[1])
+ parser := NewMkLexer(words[1], nil)
varname := parser.Varname()
if len(varname) < 3 {
break
diff --git a/pkgtools/pkglint/files/mkparser.go b/pkgtools/pkglint/files/mkparser.go
index 4305248f094..4d46a7a9b7f 100644
--- a/pkgtools/pkglint/files/mkparser.go
+++ b/pkgtools/pkglint/files/mkparser.go
@@ -1,7 +1,6 @@
package pkglint
import (
- "netbsd.org/pkglint/regex"
"netbsd.org/pkglint/textproc"
"strings"
)
@@ -10,6 +9,7 @@ import (
// things related to Makefiles.
type MkParser struct {
Line *Line
+ mklex *MkLexer
lexer *textproc.Lexer
EmitWarnings bool
}
@@ -31,387 +31,13 @@ func (p *MkParser) Rest() string {
// which means the # is a normal character and does not introduce a Makefile comment.
// For VarUse, this distinction is irrelevant.
func NewMkParser(line *Line, text string) *MkParser {
- return &MkParser{line, textproc.NewLexer(text), line != nil}
-}
-
-// MkTokens splits a text like in the following example:
-// Text${VAR:Mmodifier}${VAR2}more text${VAR3}
-// into tokens like these:
-// Text
-// ${VAR:Mmodifier}
-// ${VAR2}
-// more text
-// ${VAR3}
-func (p *MkParser) MkTokens() []*MkToken {
- lexer := p.lexer
-
- var tokens []*MkToken
- for !p.EOF() {
- mark := lexer.Mark()
- if varuse := p.VarUse(); varuse != nil {
- tokens = append(tokens, &MkToken{Text: lexer.Since(mark), Varuse: varuse})
- continue
- }
-
- for lexer.NextBytesFunc(func(b byte) bool { return b != '$' }) != "" || lexer.SkipString("$$") {
- }
- text := lexer.Since(mark)
- if text != "" {
- tokens = append(tokens, &MkToken{Text: text})
- continue
- }
-
- break
- }
- return tokens
-}
-
-func (p *MkParser) VarUse() *MkVarUse {
- rest := p.lexer.Rest()
- if len(rest) < 2 || rest[0] != '$' {
- return nil
- }
-
- switch rest[1] {
- case '{', '(':
- return p.varUseBrace(rest[1] == '(')
-
- case '$':
- // This is an escaped dollar character and not a variable use.
- return nil
-
- case '@', '<', ' ':
- // These variable names are known to exist.
- //
- // Many others are also possible but not used in practice.
- // In particular, when parsing the :C or :S modifier,
- // the $ must not be interpreted as a variable name,
- // even when it looks like $/ could refer to the "/" variable.
- //
- // TODO: Find out whether $" is a variable use when it appears in the :M modifier.
- p.lexer.Skip(2)
- return &MkVarUse{rest[1:2], nil}
-
- default:
- return p.varUseAlnum()
- }
-}
-
-// varUseBrace parses:
-// ${VAR}
-// ${arbitrary text:L}
-// ${variable with invalid chars}
-// $(PARENTHESES)
-// ${VAR:Mpattern:C,:,colon,g:Q:Q:Q}
-func (p *MkParser) varUseBrace(usingRoundParen bool) *MkVarUse {
- lexer := p.lexer
-
- beforeDollar := lexer.Mark()
- lexer.Skip(2)
-
- closing := byte('}')
- if usingRoundParen {
- closing = ')'
- }
-
- beforeVarname := lexer.Mark()
- varname := p.Varname()
- p.varUseText(closing)
- varExpr := lexer.Since(beforeVarname)
-
- modifiers := p.VarUseModifiers(varExpr, closing)
-
- closed := lexer.SkipByte(closing)
-
- if p.EmitWarnings {
- if !closed {
- p.Line.Warnf("Missing closing %q for %q.", string(rune(closing)), varExpr)
- }
-
- if usingRoundParen && closed {
- parenVaruse := lexer.Since(beforeDollar)
- edit := []byte(parenVaruse)
- edit[1] = '{'
- edit[len(edit)-1] = '}'
- bracesVaruse := string(edit)
-
- fix := p.Line.Autofix()
- fix.Warnf("Please use curly braces {} instead of round parentheses () for %s.", varExpr)
- fix.Replace(parenVaruse, bracesVaruse)
- fix.Apply()
- }
-
- if len(varExpr) > len(varname) && !(&MkVarUse{varExpr, modifiers}).IsExpression() {
- p.Line.Warnf("Invalid part %q after variable name %q.", varExpr[len(varname):], varname)
- }
- }
-
- return &MkVarUse{varExpr, modifiers}
-}
-
-func (p *MkParser) varUseAlnum() *MkVarUse {
- lexer := p.lexer
-
- apparentVarname := textproc.NewLexer(lexer.Rest()[1:]).NextBytesSet(textproc.AlnumU)
- if apparentVarname == "" {
- return nil
- }
-
- lexer.Skip(2)
-
- if p.EmitWarnings {
- if len(apparentVarname) > 1 {
- p.Line.Errorf("$%[1]s is ambiguous. Use ${%[1]s} if you mean a Make variable or $$%[1]s if you mean a shell variable.",
- apparentVarname)
- p.Line.Explain(
- "Only the first letter after the dollar is the variable name.",
- "Everything following it is normal text, even if it looks like a variable name to human readers.")
- } else {
- p.Line.Warnf("$%[1]s is ambiguous. Use ${%[1]s} if you mean a Make variable or $$%[1]s if you mean a shell variable.", apparentVarname)
- p.Line.Explain(
- "In its current form, this variable is parsed as a Make variable.",
- "For human readers though, $x looks more like a shell variable than a Make variable,",
- "since Make variables are usually written using braces (BSD-style) or parentheses (GNU-style).")
- }
- }
-
- return &MkVarUse{apparentVarname[:1], nil}
-}
-
-// VarUseModifiers parses the modifiers of a variable being used, such as :Q, :Mpattern.
-//
-// See the bmake manual page.
-func (p *MkParser) VarUseModifiers(varname string, closing byte) []MkVarUseModifier {
- lexer := p.lexer
-
- var modifiers []MkVarUseModifier
- // The :S and :C modifiers may be chained without using the : as separator.
- mayOmitColon := false
-
- for lexer.SkipByte(':') || mayOmitColon {
- modifier := p.varUseModifier(varname, closing)
- if modifier != "" {
- modifiers = append(modifiers, MkVarUseModifier{modifier})
- }
- mayOmitColon = modifier != "" && (modifier[0] == 'S' || modifier[0] == 'C')
- }
- return modifiers
-}
-
-// varUseModifier parses a single variable modifier such as :Q or :S,from,to,.
-// The actual parsing starts after the leading colon.
-func (p *MkParser) varUseModifier(varname string, closing byte) string {
- lexer := p.lexer
- mark := lexer.Mark()
-
- switch lexer.PeekByte() {
- case 'E', 'H', 'L', 'O', 'Q', 'R', 'T', 's', 't', 'u':
- mod := lexer.NextBytesSet(textproc.Alnum)
-
- switch mod {
- case
- "E", // Extension, e.g. path/file.suffix => suffix
- "H", // Head, e.g. dir/subdir/file.suffix => dir/subdir
- "L", // XXX: Shouldn't this be handled specially?
- "O", // Order alphabetically
- "Ox", // Shuffle
- "Q", // Quote shell meta-characters
- "R", // Strip the file suffix, e.g. path/file.suffix => file
- "T", // Basename, e.g. path/file.suffix => file.suffix
- "sh", // Evaluate the variable value as shell command
- "tA", // Try to convert to absolute path
- "tW", // Causes the value to be treated as a single word
- "tl", // To lowercase
- "tu", // To uppercase
- "tw", // Causes the value to be treated as list of words
- "u": // Remove adjacent duplicate words (like uniq(1))
- return mod
- }
-
- if hasPrefix(mod, "ts") {
- // See devel/bmake/files/var.c:/case 't'
- sep := mod[2:] + p.varUseText(closing)
- switch {
- case sep == "":
- lexer.SkipString(":")
- case len(sep) == 1:
- break
- case matches(sep, `^\\\d+`):
- break
- default:
- if p.EmitWarnings {
- p.Line.Warnf("Invalid separator %q for :ts modifier of %q.", sep, varname)
- p.Line.Explain(
- "The separator for the :ts modifier must be either a single character",
- "or an escape sequence like \\t or \\n or an octal or decimal escape",
- "sequence; see the bmake man page for further details.")
- }
- }
- return lexer.Since(mark)
- }
-
- case '=', 'D', 'M', 'N', 'U':
- lexer.Skip(1)
- re := regcomp(regex.Pattern(condStr(closing == '}', `^([^$:\\}]|\$\$|\\.)+`, `^([^$:\\)]|\$\$|\\.)+`)))
- for p.VarUse() != nil || lexer.SkipRegexp(re) {
- }
- arg := lexer.Since(mark)
- return strings.Replace(arg, "\\:", ":", -1)
-
- case 'C', 'S':
- if ok, _, _, _, _ := p.varUseModifierSubst(closing); ok {
- return lexer.Since(mark)
- }
-
- case '@':
- if p.varUseModifierAt(lexer, varname) {
- return lexer.Since(mark)
- }
-
- case '[':
- if lexer.SkipRegexp(regcomp(`^\[(?:[-.\d]+|#)\]`)) {
- return lexer.Since(mark)
- }
-
- case '?':
- lexer.Skip(1)
- p.varUseText(closing)
- if lexer.SkipByte(':') {
- p.varUseText(closing)
- return lexer.Since(mark)
- }
- }
-
- lexer.Reset(mark)
-
- re := regcomp(regex.Pattern(condStr(closing == '}', `^([^:$}]|\$\$)+`, `^([^:$)]|\$\$)+`)))
- for p.VarUse() != nil || lexer.SkipRegexp(re) {
- }
- modifier := lexer.Since(mark)
-
- // ${SOURCES:%.c=%.o} or ${:!uname -a!:[2]}
- if contains(modifier, "=") || (hasPrefix(modifier, "!") && hasSuffix(modifier, "!")) {
- return modifier
- }
-
- if p.EmitWarnings && modifier != "" {
- p.Line.Warnf("Invalid variable modifier %q for %q.", modifier, varname)
- }
-
- return ""
-}
-
-// varUseText parses any text up to the next colon or closing mark.
-// Nested variable uses are parsed as well.
-//
-// This is used for the :L and :? modifiers since they accept arbitrary
-// text as the "variable name" and effectively interpret it as the variable
-// value instead.
-func (p *MkParser) varUseText(closing byte) string {
- lexer := p.lexer
- start := lexer.Mark()
- re := regcomp(regex.Pattern(condStr(closing == '}', `^([^$:}]|\$\$)+`, `^([^$:)]|\$\$)+`)))
- for p.VarUse() != nil || lexer.SkipRegexp(re) {
- }
- return lexer.Since(start)
-}
-
-// varUseModifierSubst parses a :S,from,to, or a :C,from,to, modifier.
-func (p *MkParser) varUseModifierSubst(closing byte) (ok bool, regex bool, from string, to string, options string) {
- lexer := p.lexer
- regex = lexer.PeekByte() == 'C'
- lexer.Skip(1 /* the initial S or C */)
-
- sep := lexer.PeekByte() // bmake allows _any_ separator, even letters.
- if sep == -1 || byte(sep) == closing {
- return
- }
-
- lexer.Skip(1)
- separator := byte(sep)
-
- unescape := func(s string) string {
- return strings.Replace(s, "\\"+string(separator), string(separator), -1)
- }
-
- isOther := func(b byte) bool {
- return b != separator && b != '$' && b != '\\'
- }
-
- skipOther := func() {
- for {
- switch {
-
- case p.VarUse() != nil:
- break
-
- case lexer.SkipString("$$"):
- break
-
- case len(lexer.Rest()) >= 2 && lexer.PeekByte() == '\\' && separator != '\\':
- _ = lexer.Skip(2)
-
- case lexer.NextBytesFunc(isOther) != "":
- break
-
- default:
- return
- }
- }
- }
-
- fromStart := lexer.Mark()
- lexer.SkipByte('^')
- skipOther()
- lexer.SkipByte('$')
- from = unescape(lexer.Since(fromStart))
-
- if !lexer.SkipByte(separator) {
- return
- }
-
- toStart := lexer.Mark()
- skipOther()
- to = unescape(lexer.Since(toStart))
-
- if !lexer.SkipByte(separator) {
- return
- }
-
- optionsStart := lexer.Mark()
- lexer.NextBytesFunc(func(b byte) bool { return b == '1' || b == 'g' || b == 'W' })
- options = lexer.Since(optionsStart)
-
- ok = true
- return
-}
-
-// varUseModifierAt parses a variable modifier like ":@v@echo ${v};@",
-// which expands the variable value in a loop.
-func (p *MkParser) varUseModifierAt(lexer *textproc.Lexer, varname string) bool {
- lexer.Skip(1 /* the initial @ */)
-
- loopVar := lexer.NextBytesSet(AlnumDot)
- if loopVar == "" || !lexer.SkipByte('@') {
- return false
- }
-
- re := regcomp(`^([^$@\\]|\\.)+`)
- for p.VarUse() != nil || lexer.SkipString("$$") || lexer.SkipRegexp(re) {
- }
-
- if !lexer.SkipByte('@') && p.EmitWarnings {
- p.Line.Warnf("Modifier ${%s:@%s@...@} is missing the final \"@\".", varname, loopVar)
- }
-
- return true
+ mklex := NewMkLexer(text, line)
+ return &MkParser{line, mklex, mklex.lexer, line != nil}
}
// MkCond parses a condition like ${OPSYS} == "NetBSD".
//
// See devel/bmake/files/cond.c.
-//
-// FIXME: Move over to MkTokensParser
func (p *MkParser) MkCond() *MkCond {
and := p.mkCondAnd()
if and == nil {
@@ -556,7 +182,7 @@ func (p *MkParser) mkCondCompare() *MkCond {
func (p *MkParser) mkCondTerm() *MkCondTerm {
lexer := p.lexer
- if rhs := p.VarUse(); rhs != nil {
+ if rhs := p.mklex.VarUse(); rhs != nil {
return &MkCondTerm{Var: rhs}
}
@@ -566,7 +192,7 @@ func (p *MkParser) mkCondTerm() *MkCondTerm {
mark := lexer.Mark()
lexer.Skip(1)
- if quotedRHS := p.VarUse(); quotedRHS != nil {
+ if quotedRHS := p.mklex.VarUse(); quotedRHS != nil {
if lexer.SkipByte('"') {
return &MkCondTerm{Var: quotedRHS}
}
@@ -579,7 +205,7 @@ loop:
for {
m := lexer.Mark()
switch {
- case p.VarUse() != nil,
+ case p.mklex.VarUse() != nil,
lexer.NextBytesSet(textproc.Alnum) != "",
lexer.NextBytesFunc(func(b byte) bool { return b != '"' && b != '\\' }) != "":
rhsText.WriteString(lexer.Since(m))
@@ -612,14 +238,14 @@ func (p *MkParser) mkCondFunc() *MkCond {
switch funcName {
case "defined":
- varname := p.Varname()
+ varname := p.mklex.Varname()
if varname != "" && lexer.SkipByte(')') {
return &MkCond{Defined: varname}
}
case "empty":
- if varname := p.Varname(); varname != "" {
- modifiers := p.VarUseModifiers(varname, ')')
+ if varname := p.mklex.Varname(); varname != "" {
+ modifiers := p.mklex.VarUseModifiers(varname, ')')
if lexer.SkipByte(')') {
return &MkCond{Empty: &MkVarUse{varname, modifiers}}
}
@@ -631,7 +257,7 @@ func (p *MkParser) mkCondFunc() *MkCond {
case "commands", "exists", "make", "target":
argMark := lexer.Mark()
- for p.VarUse() != nil || lexer.NextBytesFunc(func(b byte) bool { return b != '$' && b != ')' }) != "" {
+ for p.mklex.VarUse() != nil || lexer.NextBytesFunc(func(b byte) bool { return b != '$' && b != ')' }) != "" {
}
arg := lexer.Since(argMark)
if lexer.SkipByte(')') {
@@ -643,21 +269,6 @@ func (p *MkParser) mkCondFunc() *MkCond {
return nil
}
-func (p *MkParser) Varname() string {
- lexer := p.lexer
-
- // TODO: duplicated code in MatchVarassign
- mark := lexer.Mark()
- lexer.SkipByte('.')
- for lexer.NextBytesSet(VarbaseBytes) != "" || p.VarUse() != nil {
- }
- if lexer.SkipByte('.') || hasPrefix(lexer.Since(mark), "SITES_") {
- for lexer.NextBytesSet(VarparamBytes) != "" || p.VarUse() != nil {
- }
- }
- return lexer.Since(mark)
-}
-
func (p *MkParser) Op() (bool, MkOperator) {
lexer := p.lexer
switch {
@@ -681,7 +292,7 @@ func (p *MkParser) PkgbasePattern() string {
start := lexer.Mark()
for {
- if p.VarUse() != nil ||
+ if p.mklex.VarUse() != nil ||
lexer.SkipRegexp(regcomp(`^[\w.*+,{}]+`)) ||
lexer.SkipRegexp(regcomp(`^\[[\w-]+\]`)) {
continue
@@ -716,7 +327,7 @@ func (*MkParser) isPkgbasePart(str string) bool {
return true
}
- varUse := NewMkParser(nil, lexer.Rest()).VarUse()
+ varUse := NewMkLexer(lexer.Rest(), nil).VarUse()
if varUse != nil {
return !contains(varUse.varname, "VER") && len(varUse.modifiers) == 0
}
@@ -740,7 +351,7 @@ func (p *MkParser) Dependency() *DependencyPattern {
parseVersion := func() string {
mark := lexer.Mark()
- for p.VarUse() != nil {
+ for p.mklex.VarUse() != nil {
}
if lexer.Since(mark) != "" {
return lexer.Since(mark)
@@ -799,7 +410,7 @@ func (p *MkParser) Dependency() *DependencyPattern {
if lexer.SkipByte('-') && lexer.Rest() != "" {
versionMark := lexer.Mark()
- for p.VarUse() != nil || lexer.SkipRegexp(regcomp(`^[\w\[\]*_.\-]+`)) {
+ for p.mklex.VarUse() != nil || lexer.SkipRegexp(regcomp(`^[\w\[\]*_.\-]+`)) {
}
if !lexer.SkipString("{,nb*}") {
@@ -822,7 +433,7 @@ func (p *MkParser) Dependency() *DependencyPattern {
// if there is a parse error or some trailing text.
// Parse errors are silently ignored.
func ToVarUse(str string) *MkVarUse {
- p := NewMkParser(nil, str)
+ p := NewMkLexer(str, nil)
varUse := p.VarUse()
if varUse == nil || !p.EOF() {
return nil
@@ -970,7 +581,7 @@ func (w *MkCondWalker) walkAtom(atom *MkCondTerm, callback *MkCondCallback) {
func (w *MkCondWalker) walkStr(str string, callback *MkCondCallback) {
if callback.VarUse != nil {
- tokens := NewMkParser(nil, str).MkTokens()
+ tokens, _ := NewMkLexer(str, nil).MkTokens()
for _, token := range tokens {
if token.Varuse != nil {
callback.VarUse(token.Varuse)
diff --git a/pkgtools/pkglint/files/mkparser_test.go b/pkgtools/pkglint/files/mkparser_test.go
index f103fae406f..e6b3c8ed668 100644
--- a/pkgtools/pkglint/files/mkparser_test.go
+++ b/pkgtools/pkglint/files/mkparser_test.go
@@ -5,707 +5,6 @@ import (
"strings"
)
-func (s *Suite) Test_MkParser_MkTokens(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- testRest := func(input string, expectedTokens []*MkToken, expectedRest string) {
- line := t.NewLines("Test_MkParser_MkTokens.mk", input).Lines[0]
- p := NewMkParser(line, input)
- actualTokens := p.MkTokens()
- t.CheckDeepEquals(actualTokens, expectedTokens)
- for i, expectedToken := range expectedTokens {
- if i < len(actualTokens) {
- t.CheckDeepEquals(*actualTokens[i], *expectedToken)
- t.CheckDeepEquals(actualTokens[i].Varuse, expectedToken.Varuse)
- }
- }
- t.CheckEquals(p.Rest(), expectedRest)
- }
- test := func(input string, expectedToken *MkToken) {
- testRest(input, b.Tokens(expectedToken), "")
- }
- literal := b.TextToken
- varuse := b.VaruseToken
-
- // Everything except VarUses is passed through unmodified.
-
- test("literal",
- literal("literal"))
-
- test("\\/share\\/ { print \"share directory\" }",
- literal("\\/share\\/ { print \"share directory\" }"))
-
- test("find . -name \\*.orig -o -name \\*.pre",
- literal("find . -name \\*.orig -o -name \\*.pre"))
-
- test("-e 's|\\$${EC2_HOME.*}|EC2_HOME}|g'",
- literal("-e 's|\\$${EC2_HOME.*}|EC2_HOME}|g'"))
-
- test("$$var1 $$var2 $$? $$",
- literal("$$var1 $$var2 $$? $$"))
-
- testRest("hello, ${W:L:tl}orld",
- b.Tokens(
- literal("hello, "),
- varuse("W", "L", "tl"),
- literal("orld")),
- "")
-
- testRest("ftp://${PKGNAME}/ ${MASTER_SITES:=subdir/}",
- b.Tokens(
- literal("ftp://"),
- varuse("PKGNAME"),
- literal("/ "),
- varuse("MASTER_SITES", "=subdir/")),
- "")
-
- testRest("${VAR:S,a,b,c,d,e,f}",
- b.Tokens(b.VaruseTextToken("${VAR:S,a,b,c,d,e,f}", "VAR", "S,a,b,")),
- "")
- t.CheckOutputLines(
- "WARN: Test_MkParser_MkTokens.mk:1: Invalid variable modifier \"c,d,e,f\" for \"VAR\".")
-
- testRest("Text${VAR:Mmodifier}${VAR2}more text${VAR3}",
- b.Tokens(
- literal("Text"),
- varuse("VAR", "Mmodifier"),
- varuse("VAR2"),
- literal("more text"),
- varuse("VAR3")),
- "")
-}
-
-func (s *Suite) Test_MkParser_VarUse(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
- varuse := b.VaruseToken
- varuseText := b.VaruseTextToken
-
- testRest := func(input string, expectedTokens []*MkToken, expectedRest string, diagnostics ...string) {
- line := t.NewLines("Test_MkParser_VarUse.mk", input).Lines[0]
- p := NewMkParser(line, input)
-
- actualTokens := p.MkTokens()
-
- t.CheckDeepEquals(actualTokens, expectedTokens)
- for i, expectedToken := range expectedTokens {
- if i < len(actualTokens) {
- t.CheckDeepEquals(*actualTokens[i], *expectedToken)
- t.CheckDeepEquals(actualTokens[i].Varuse, expectedToken.Varuse)
- }
- }
- t.CheckEquals(p.Rest(), expectedRest)
- t.CheckOutput(diagnostics)
- }
- test := func(input string, expectedToken *MkToken, diagnostics ...string) {
- testRest(input, b.Tokens(expectedToken), "", diagnostics...)
- }
-
- t.Use(testRest, test, varuse, varuseText)
-
- test("${VARIABLE}",
- varuse("VARIABLE"))
-
- test("${VARIABLE.param}",
- varuse("VARIABLE.param"))
-
- test("${VARIABLE.${param}}",
- varuse("VARIABLE.${param}"))
-
- test("${VARIABLE.hicolor-icon-theme}",
- varuse("VARIABLE.hicolor-icon-theme"))
-
- test("${VARIABLE.gtk+extra}",
- varuse("VARIABLE.gtk+extra"))
-
- test("${VARIABLE:S/old/new/}",
- varuse("VARIABLE", "S/old/new/"))
-
- test("${GNUSTEP_LFLAGS:S/-L//g}",
- varuse("GNUSTEP_LFLAGS", "S/-L//g"))
-
- test("${SUSE_VERSION:S/.//}",
- varuse("SUSE_VERSION", "S/.//"))
-
- test("${MASTER_SITE_GNOME:=sources/alacarte/0.13/}",
- varuse("MASTER_SITE_GNOME", "=sources/alacarte/0.13/"))
-
- test("${INCLUDE_DIRS:H:T}",
- varuse("INCLUDE_DIRS", "H", "T"))
-
- test("${A.${B.${C.${D}}}}",
- varuse("A.${B.${C.${D}}}"))
-
- test("${RUBY_VERSION:C/([0-9]+)\\.([0-9]+)\\.([0-9]+)/\\1/}",
- varuse("RUBY_VERSION", "C/([0-9]+)\\.([0-9]+)\\.([0-9]+)/\\1/"))
-
- test("${PERL5_${_var_}:Q}",
- varuse("PERL5_${_var_}", "Q"))
-
- test("${PKGNAME_REQD:C/(^.*-|^)py([0-9][0-9])-.*/\\2/}",
- varuse("PKGNAME_REQD", "C/(^.*-|^)py([0-9][0-9])-.*/\\2/"))
-
- test("${PYLIB:S|/|\\\\/|g}",
- varuse("PYLIB", "S|/|\\\\/|g"))
-
- test("${PKGNAME_REQD:C/ruby([0-9][0-9]+)-.*/\\1/}",
- varuse("PKGNAME_REQD", "C/ruby([0-9][0-9]+)-.*/\\1/"))
-
- test("${RUBY_SHLIBALIAS:S/\\//\\\\\\//}",
- varuse("RUBY_SHLIBALIAS", "S/\\//\\\\\\//"))
-
- test("${RUBY_VER_MAP.${RUBY_VER}:U${RUBY_VER}}",
- varuse("RUBY_VER_MAP.${RUBY_VER}", "U${RUBY_VER}"))
-
- test("${RUBY_VER_MAP.${RUBY_VER}:U18}",
- varuse("RUBY_VER_MAP.${RUBY_VER}", "U18"))
-
- test("${CONFIGURE_ARGS:S/ENABLE_OSS=no/ENABLE_OSS=yes/g}",
- varuse("CONFIGURE_ARGS", "S/ENABLE_OSS=no/ENABLE_OSS=yes/g"))
-
- test("${PLIST_RUBY_DIRS:S,DIR=\"PREFIX/,DIR=\",}",
- varuse("PLIST_RUBY_DIRS", "S,DIR=\"PREFIX/,DIR=\","))
-
- test("${LDFLAGS:S/-Wl,//g:Q}",
- varuse("LDFLAGS", "S/-Wl,//g", "Q"))
-
- test("${_PERL5_REAL_PACKLIST:S/^/${DESTDIR}/}",
- varuse("_PERL5_REAL_PACKLIST", "S/^/${DESTDIR}/"))
-
- test("${_PYTHON_VERSION:C/^([0-9])/\\1./1}",
- varuse("_PYTHON_VERSION", "C/^([0-9])/\\1./1"))
-
- test("${PKGNAME:S/py${_PYTHON_VERSION}/py${i}/}",
- varuse("PKGNAME", "S/py${_PYTHON_VERSION}/py${i}/"))
-
- test("${PKGNAME:C/-[0-9].*$/-[0-9]*/}",
- varuse("PKGNAME", "C/-[0-9].*$/-[0-9]*/"))
-
- // TODO: Does the $@ refer to ${.TARGET}, or is it just an unmatchable
- // regular expression?
- test("${PKGNAME:C/$@/target?/}",
- varuse("PKGNAME", "C/$@/target?/"))
-
- test("${PKGNAME:S/py${_PYTHON_VERSION}/py${i}/:C/-[0-9].*$/-[0-9]*/}",
- varuse("PKGNAME", "S/py${_PYTHON_VERSION}/py${i}/", "C/-[0-9].*$/-[0-9]*/"))
-
- test("${_PERL5_VARS:tl:S/^/-V:/}",
- varuse("_PERL5_VARS", "tl", "S/^/-V:/"))
-
- test("${_PERL5_VARS_OUT:M${_var_:tl}=*:S/^${_var_:tl}=${_PERL5_PREFIX:=/}//}",
- varuse("_PERL5_VARS_OUT", "M${_var_:tl}=*", "S/^${_var_:tl}=${_PERL5_PREFIX:=/}//"))
-
- test("${RUBY${RUBY_VER}_PATCHLEVEL}",
- varuse("RUBY${RUBY_VER}_PATCHLEVEL"))
-
- test("${DISTFILES:M*.gem}",
- varuse("DISTFILES", "M*.gem"))
-
- test("${LOCALBASE:S^/^_^}",
- varuse("LOCALBASE", "S^/^_^"))
-
- test("${SOURCES:%.c=%.o}",
- varuse("SOURCES", "%.c=%.o"))
-
- test("${GIT_TEMPLATES:@.t.@ ${EGDIR}/${GIT_TEMPLATEDIR}/${.t.} ${PREFIX}/${GIT_CORE_TEMPLATEDIR}/${.t.} @:M*}",
- varuse("GIT_TEMPLATES", "@.t.@ ${EGDIR}/${GIT_TEMPLATEDIR}/${.t.} ${PREFIX}/${GIT_CORE_TEMPLATEDIR}/${.t.} @", "M*"))
-
- test("${DISTNAME:C:_:-:}",
- varuse("DISTNAME", "C:_:-:"))
-
- test("${CF_FILES:H:O:u:S@^@${PKG_SYSCONFDIR}/@}",
- varuse("CF_FILES", "H", "O", "u", "S@^@${PKG_SYSCONFDIR}/@"))
-
- test("${ALT_GCC_RTS:S%${LOCALBASE}%%:S%/%%}",
- varuse("ALT_GCC_RTS", "S%${LOCALBASE}%%", "S%/%%"))
-
- test("${PREFIX:C;///*;/;g:C;/$;;}",
- varuse("PREFIX", "C;///*;/;g", "C;/$;;"))
-
- test("${GZIP_CMD:[1]:Q}",
- varuse("GZIP_CMD", "[1]", "Q"))
-
- test("${RUBY_RAILS_SUPPORTED:[#]}",
- varuse("RUBY_RAILS_SUPPORTED", "[#]"))
-
- test("${GZIP_CMD:[asdf]:Q}",
- varuseText("${GZIP_CMD:[asdf]:Q}", "GZIP_CMD", "Q"),
- "WARN: Test_MkParser_VarUse.mk:1: Invalid variable modifier \"[asdf]\" for \"GZIP_CMD\".")
-
- test("${DISTNAME:C/-[0-9]+$$//:C/_/-/}",
- varuse("DISTNAME", "C/-[0-9]+$$//", "C/_/-/"))
-
- test("${DISTNAME:slang%=slang2%}",
- varuse("DISTNAME", "slang%=slang2%"))
-
- test("${OSMAP_SUBSTVARS:@v@-e 's,\\@${v}\\@,${${v}},g' @}",
- varuse("OSMAP_SUBSTVARS", "@v@-e 's,\\@${v}\\@,${${v}},g' @"))
-
- test("${BRANDELF:D${BRANDELF} -t Linux ${LINUX_LDCONFIG}:U${TRUE}}",
- varuse("BRANDELF", "D${BRANDELF} -t Linux ${LINUX_LDCONFIG}", "U${TRUE}"))
-
- test("${${_var_}.*}",
- varuse("${_var_}.*"))
-
- test("${OPTIONS:@opt@printf 'Option %s is selected\n' ${opt:Q}';@}",
- varuse("OPTIONS", "@opt@printf 'Option %s is selected\n' ${opt:Q}';@"))
-
- /* weird features */
- test("${${EMACS_VERSION_MAJOR}>22:?@comment :}",
- varuse("${EMACS_VERSION_MAJOR}>22", "?@comment :"))
-
- test("${empty(CFLAGS):?:-cflags ${CFLAGS:Q}}",
- varuse("empty(CFLAGS)", "?:-cflags ${CFLAGS:Q}"))
-
- test("${${PKGSRC_COMPILER}==gcc:?gcc:cc}",
- varuse("${PKGSRC_COMPILER}==gcc", "?gcc:cc"))
-
- test("${${XKBBASE}/xkbcomp:L:Q}",
- varuse("${XKBBASE}/xkbcomp", "L", "Q"))
-
- test("${${PKGBASE} ${PKGVERSION}:L}",
- varuse("${PKGBASE} ${PKGVERSION}", "L"))
-
- // The variable name is optional; the variable with the empty name always
- // evaluates to the empty string. Bmake actively prevents this variable from
- // ever being defined. Therefore the :U branch is always taken, and this
- // in turn is used to implement the variables from the .for loops.
- test("${:U}",
- varuse("", "U"))
-
- test("${:Ufixed value}",
- varuse("", "Ufixed value"))
-
- // This complicated expression returns the major.minor.patch version
- // of the package given in ${d}.
- //
- // The :L modifier interprets the variable name not as a variable name
- // but takes it as the variable value. Followed by the :sh modifier,
- // this combination evaluates to the output of pkg_info.
- //
- // In this output, all non-digit characters are replaced with spaces so
- // that the remaining value is a space-separated list of version parts.
- // From these parts, the first 3 are taken and joined using a dot as separator.
- test("${${${PKG_INFO} -E ${d} || echo:L:sh}:L:C/[^[0-9]]*/ /g:[1..3]:ts.}",
- varuse("${${PKG_INFO} -E ${d} || echo:L:sh}", "L", "C/[^[0-9]]*/ /g", "[1..3]", "ts."))
-
- // For :S and :C, the colon can be left out. It's confusing but possible.
- test("${VAR:S/-//S/.//}",
- varuseText("${VAR:S/-//S/.//}", "VAR", "S/-//", "S/.//"))
-
- // The :S and :C modifiers accept an arbitrary character as separator. Here it is "a".
- test("${VAR:Sahara}",
- varuse("VAR", "Sahara"))
-
- // The separator character can be left out, which means empty.
- test("${VAR:ts}",
- varuse("VAR", "ts"))
-
- // The separator character can be a long octal number.
- test("${VAR:ts\\000012}",
- varuse("VAR", "ts\\000012"))
-
- // Or even decimal.
- test("${VAR:ts\\124}",
- varuse("VAR", "ts\\124"))
-
- // The :ts modifier only takes single-character separators.
- test("${VAR:ts---}",
- varuse("VAR", "ts---"),
- "WARN: Test_MkParser_VarUse.mk:1: Invalid separator \"---\" for :ts modifier of \"VAR\".")
-
- test("$<",
- varuseText("$<", "<")) // Same as ${.IMPSRC}
-
- test("$(GNUSTEP_USER_ROOT)",
- varuseText("$(GNUSTEP_USER_ROOT)", "GNUSTEP_USER_ROOT"),
- "WARN: Test_MkParser_VarUse.mk:1: Please use curly braces {} instead of round parentheses () for GNUSTEP_USER_ROOT.")
-
- // Opening brace, closing parenthesis.
- // Warnings are only printed for balanced expressions.
- test("${VAR)",
- varuseText("${VAR)", "VAR)"),
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \"}\" for \"VAR)\".",
- "WARN: Test_MkParser_VarUse.mk:1: Invalid part \")\" after variable name \"VAR\".")
-
- // Opening parenthesis, closing brace
- // Warnings are only printed for balanced expressions.
- test("$(VAR}",
- varuseText("$(VAR}", "VAR}"),
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \")\" for \"VAR}\".",
- "WARN: Test_MkParser_VarUse.mk:1: Invalid part \"}\" after variable name \"VAR\".")
-
- test("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}@}",
- varuse("PLIST_SUBST_VARS", "@var@${var}=${${var}:Q}@"))
-
- test("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}}",
- varuseText("${PLIST_SUBST_VARS:@var@${var}=${${var}:Q}}",
- "PLIST_SUBST_VARS", "@var@${var}=${${var}:Q}}"),
- "WARN: Test_MkParser_VarUse.mk:1: Modifier ${PLIST_SUBST_VARS:@var@...@} is missing the final \"@\".",
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \"}\" for \"PLIST_SUBST_VARS\".")
-
- // The replacement text may include closing braces, which is useful
- // for AWK programs.
- test("${PLIST_SUBST_VARS:@var@{${var}}@}",
- varuseText("${PLIST_SUBST_VARS:@var@{${var}}@}",
- "PLIST_SUBST_VARS", "@var@{${var}}@"),
- nil...)
-
- // Unfinished variable use
- test("${",
- varuseText("${", ""),
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \"}\" for \"\".")
-
- // Unfinished nested variable use
- test("${${",
- varuseText("${${", "${"),
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \"}\" for \"\".",
- "WARN: Test_MkParser_VarUse.mk:1: Missing closing \"}\" for \"${\".")
-
- test("${arbitrary :Mpattern:---:Q}",
- varuseText("${arbitrary :Mpattern:---:Q}", "arbitrary ", "Mpattern", "Q"),
- // TODO: Swap the order of these message
- "WARN: Test_MkParser_VarUse.mk:1: Invalid variable modifier \"---\" for \"arbitrary \".",
- "WARN: Test_MkParser_VarUse.mk:1: Invalid part \" \" after variable name \"arbitrary\".")
-
- // Variable names containing spaces do not occur in pkgsrc.
- // Technically they are possible:
- //
- // VARNAME= name with spaces
- // ${VARNAME}= value
- //
- // all:
- // @echo ${name with spaces:Q}''
- test("${arbitrary text}",
- varuse("arbitrary text"),
- "WARN: Test_MkParser_VarUse.mk:1: Invalid part \" text\" after variable name \"arbitrary\".")
-}
-
-func (s *Suite) Test_MkParser_VarUse__ambiguous(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- t.SetUpCommandLine("--explain")
-
- line := t.NewLine("module.mk", 123, "\t$Varname $X")
- p := NewMkParser(line, line.Text[1:])
-
- tokens := p.MkTokens()
- t.CheckDeepEquals(tokens, b.Tokens(
- b.VaruseTextToken("$V", "V"),
- b.TextToken("arname "),
- b.VaruseTextToken("$X", "X")))
-
- t.CheckOutputLines(
- "ERROR: module.mk:123: $Varname is ambiguous. Use ${Varname} if you mean a Make variable or $$Varname if you mean a shell variable.",
- "",
- "\tOnly the first letter after the dollar is the variable name.",
- "\tEverything following it is normal text, even if it looks like a",
- "\tvariable name to human readers.",
- "",
- "WARN: module.mk:123: $X is ambiguous. Use ${X} if you mean a Make variable or $$X if you mean a shell variable.",
- "",
- "\tIn its current form, this variable is parsed as a Make variable. For",
- "\thuman readers though, $x looks more like a shell variable than a",
- "\tMake variable, since Make variables are usually written using braces",
- "\t(BSD-style) or parentheses (GNU-style).",
- "")
-}
-
-// Pkglint can replace $(VAR) with ${VAR}. It doesn't look at all components
-// of nested variables though because this case is not important enough to
-// invest much development time. It occurs so seldom that it is acceptable
-// to run pkglint multiple times in such a case.
-func (s *Suite) Test_MkParser_VarUse__parentheses_autofix(c *check.C) {
- t := s.Init(c)
-
- t.SetUpCommandLine("--autofix")
- t.SetUpVartypes()
- lines := t.SetUpFileLines("Makefile",
- MkCvsID,
- "COMMENT=$(P1) $(P2)) $(P3:Q) ${BRACES} $(A.$(B.$(C)))")
- mklines := NewMkLines(lines)
-
- mklines.Check()
-
- t.CheckOutputLines(
- "AUTOFIX: ~/Makefile:2: Replacing \"$(P1)\" with \"${P1}\".",
- "AUTOFIX: ~/Makefile:2: Replacing \"$(P2)\" with \"${P2}\".",
- "AUTOFIX: ~/Makefile:2: Replacing \"$(P3:Q)\" with \"${P3:Q}\".",
- "AUTOFIX: ~/Makefile:2: Replacing \"$(C)\" with \"${C}\".")
- t.CheckFileLines("Makefile",
- MkCvsID,
- "COMMENT=${P1} ${P2}) ${P3:Q} ${BRACES} $(A.$(B.${C}))")
-}
-
-func (s *Suite) Test_MkParser_VarUseModifiers(c *check.C) {
- t := s.Init(c)
-
- varUse := NewMkTokenBuilder().VarUse
- test := func(text string, varUse *MkVarUse, diagnostics ...string) {
- line := t.NewLine("Makefile", 20, "\t"+text)
- p := NewMkParser(line, text)
-
- actual := p.VarUse()
-
- t.CheckDeepEquals(actual, varUse)
- t.CheckEquals(p.Rest(), "")
- t.CheckOutput(diagnostics)
- }
-
- // The !command! modifier is used so seldom that pkglint does not
- // check whether the command is actually valid.
- // At least not while parsing the modifier since at this point it might
- // be still unknown which of the commands can be used and which cannot.
- test("${VAR:!command!}", varUse("VAR", "!command!"))
-
- test("${VAR:!command}", varUse("VAR"),
- "WARN: Makefile:20: Invalid variable modifier \"!command\" for \"VAR\".")
-
- test("${VAR:command!}", varUse("VAR"),
- "WARN: Makefile:20: Invalid variable modifier \"command!\" for \"VAR\".")
-
- // The :L modifier makes the variable value "echo hello", and the :[1]
- // modifier extracts the "echo".
- test("${echo hello:L:[1]}", varUse("echo hello", "L", "[1]"))
-
- // bmake ignores the :[3] modifier, and the :L modifier just returns the
- // variable name, in this case BUILD_DIRS.
- test("${BUILD_DIRS:[3]:L}", varUse("BUILD_DIRS", "[3]", "L"))
-
- test("${PATH:ts::Q}", varUse("PATH", "ts:", "Q"))
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__invalid_ts_modifier_with_warning(c *check.C) {
- t := s.Init(c)
-
- t.SetUpCommandLine("-Wall", "--explain")
- line := t.NewLine("filename.mk", 123, "${VAR:tsabc}")
- p := NewMkParser(line, "tsabc}")
-
- modifier := p.varUseModifier("VAR", '}')
-
- t.CheckEquals(modifier, "tsabc")
- t.CheckEquals(p.Rest(), "}")
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid separator \"abc\" for :ts modifier of \"VAR\".",
- "",
- "\tThe separator for the :ts modifier must be either a single character",
- "\tor an escape sequence like \\t or \\n or an octal or decimal escape",
- "\tsequence; see the bmake man page for further details.",
- "")
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__invalid_ts_modifier_without_warning(c *check.C) {
- t := s.Init(c)
-
- p := NewMkParser(nil, "tsabc}")
-
- modifier := p.varUseModifier("VAR", '}')
-
- t.CheckEquals(modifier, "tsabc")
- t.CheckEquals(p.Rest(), "}")
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__square_bracket(c *check.C) {
- t := s.Init(c)
-
- line := t.NewLine("filename.mk", 123, "\t${VAR:[asdf]}")
- p := NewMkParser(line, "[asdf]")
-
- modifier := p.varUseModifier("VAR", '}')
-
- t.CheckEquals(modifier, "")
- t.CheckEquals(p.Rest(), "")
-
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid variable modifier \"[asdf]\" for \"VAR\".")
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__condition_without_colon(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- line := t.NewLine("filename.mk", 123, "${${VAR}:?yes:no}${${VAR}:?yes}")
- p := NewMkParser(line, line.Text)
-
- varUse1 := p.VarUse()
- varUse2 := p.VarUse()
-
- t.CheckDeepEquals(varUse1, b.VarUse("${VAR}", "?yes:no"))
- t.CheckDeepEquals(varUse2, b.VarUse("${VAR}"))
- t.CheckEquals(p.Rest(), "")
-
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid variable modifier \"?yes\" for \"${VAR}\".")
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__malformed_in_parentheses(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- line := t.NewLine("filename.mk", 123, "$(${VAR}:?yes)")
- p := NewMkParser(line, line.Text)
-
- varUse := p.VarUse()
-
- t.CheckDeepEquals(varUse, b.VarUse("${VAR}"))
- t.CheckEquals(p.Rest(), "")
-
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid variable modifier \"?yes\" for \"${VAR}\".",
- "WARN: filename.mk:123: Please use curly braces {} instead of round parentheses () for ${VAR}.")
-}
-
-func (s *Suite) Test_MkParser_varUseModifier__varuse_in_malformed_modifier(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- line := t.NewLine("filename.mk", 123, "${${VAR}:?yes${INNER}}")
- p := NewMkParser(line, line.Text)
-
- varUse := p.VarUse()
-
- t.CheckDeepEquals(varUse, b.VarUse("${VAR}"))
- t.CheckEquals(p.Rest(), "")
-
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid variable modifier \"?yes${INNER}\" for \"${VAR}\".")
-}
-
-func (s *Suite) Test_MkParser_varUseModifierSubst(c *check.C) {
- t := s.Init(c)
-
- varUse := NewMkTokenBuilder().VarUse
- test := func(text string, varUse *MkVarUse, rest string, diagnostics ...string) {
- line := t.NewLine("Makefile", 20, "\t"+text)
- p := NewMkParser(line, text)
-
- actual := p.VarUse()
-
- t.CheckDeepEquals(actual, varUse)
- t.CheckEquals(p.Rest(), rest)
- t.CheckOutput(diagnostics)
- }
-
- test("${VAR:S", varUse("VAR"), "",
- "WARN: Makefile:20: Invalid variable modifier \"S\" for \"VAR\".",
- "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
-
- test("${VAR:S}", varUse("VAR"), "",
- "WARN: Makefile:20: Invalid variable modifier \"S\" for \"VAR\".")
-
- test("${VAR:S,}", varUse("VAR"), "",
- "WARN: Makefile:20: Invalid variable modifier \"S,\" for \"VAR\".")
-
- test("${VAR:S,from,to}", varUse("VAR"), "",
- "WARN: Makefile:20: Invalid variable modifier \"S,from,to\" for \"VAR\".")
-
- test("${VAR:S,from,to,}", varUse("VAR", "S,from,to,"), "")
-
- test("${VAR:S,^from$,to,}", varUse("VAR", "S,^from$,to,"), "")
-
- test("${VAR:S,@F@,${F},}", varUse("VAR", "S,@F@,${F},"), "")
-
- test("${VAR:S,from,to,1}", varUse("VAR", "S,from,to,1"), "")
- test("${VAR:S,from,to,g}", varUse("VAR", "S,from,to,g"), "")
- test("${VAR:S,from,to,W}", varUse("VAR", "S,from,to,W"), "")
-
- test("${VAR:S,from,to,1gW}", varUse("VAR", "S,from,to,1gW"), "")
-
- // Inside the :S or :C modifiers, neither a colon nor the closing
- // brace need to be escaped. Otherwise these patterns would become
- // too difficult to read and write.
- test("${VAR:C/[[:alnum:]]{2}/**/g}",
- varUse("VAR", "C/[[:alnum:]]{2}/**/g"),
- "")
-
- // Some pkgsrc users really explore the darkest corners of bmake by using
- // the backslash as the separator in the :S modifier. Sure, it works, it
- // just looks totally unexpected to the average pkgsrc reader.
- //
- // Using the backslash as separator means that it cannot be used for anything
- // else, not even for escaping other characters.
- test("${VAR:S\\.post1\\\\1}",
- varUse("VAR", "S\\.post1\\\\1"),
- "")
-}
-
-func (s *Suite) Test_MkParser_varUseModifierAt__missing_at_after_variable_name(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- line := t.NewLine("filename.mk", 123, "${VAR:@varname}")
- p := NewMkParser(line, line.Text)
-
- varUse := p.VarUse()
-
- t.CheckDeepEquals(varUse, b.VarUse("VAR"))
- t.CheckEquals(p.Rest(), "")
- t.CheckOutputLines(
- "WARN: filename.mk:123: Invalid variable modifier \"@varname\" for \"VAR\".")
-}
-
-func (s *Suite) Test_MkParser_varUseModifierAt__dollar(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- line := t.NewLine("filename.mk", 123, "${VAR:@var@$$var@}")
- p := NewMkParser(line, line.Text)
-
- varUse := p.VarUse()
-
- t.CheckDeepEquals(varUse, b.VarUse("VAR", "@var@$$var@"))
- t.CheckEquals(p.Rest(), "")
- t.CheckOutputEmpty()
-}
-
-func (s *Suite) Test_MkParser_varUseModifierAt__incomplete_without_warning(c *check.C) {
- t := s.Init(c)
- b := NewMkTokenBuilder()
-
- p := NewMkParser(nil, "${VAR:@var@$$var}rest")
-
- varUse := p.VarUse()
-
- t.CheckDeepEquals(varUse, b.VarUse("VAR", "@var@$$var}rest"))
- t.CheckEquals(p.Rest(), "")
- t.CheckOutputEmpty()
-}
-
-func (s *Suite) Test_MkParser_varUseModifierAt(c *check.C) {
- t := s.Init(c)
-
- varUse := NewMkTokenBuilder().VarUse
- test := func(text string, varUse *MkVarUse, rest string, diagnostics ...string) {
- line := t.NewLine("Makefile", 20, "\t"+text)
- p := NewMkParser(line, text)
-
- actual := p.VarUse()
-
- t.CheckDeepEquals(actual, varUse)
- t.CheckEquals(p.Rest(), rest)
- t.CheckOutput(diagnostics)
- }
-
- test("${VAR:@",
- varUse("VAR"),
- "",
- "WARN: Makefile:20: Invalid variable modifier \"@\" for \"VAR\".",
- "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
-
- test("${VAR:@i@${i}}", varUse("VAR", "@i@${i}}"), "",
- "WARN: Makefile:20: Modifier ${VAR:@i@...@} is missing the final \"@\".",
- "WARN: Makefile:20: Missing closing \"}\" for \"VAR\".")
-
- test("${VAR:@i@${i}@}", varUse("VAR", "@i@${i}@"), "")
-
- test("${PKG_GROUPS:@g@${g:Q}:${PKG_GID.${g}:Q}@:C/:*$//g}",
- varUse("PKG_GROUPS", "@g@${g:Q}:${PKG_GID.${g}:Q}@", "C/:*$//g"),
- "")
-}
-
func (s *Suite) Test_MkParser_MkCond(c *check.C) {
t := s.Init(c)
b := NewMkTokenBuilder()
@@ -971,40 +270,6 @@ func (s *Suite) Test_MkParser_mkCondCompare(c *check.C) {
t.CheckOutputEmpty()
}
-func (s *Suite) Test_MkParser_Varname(c *check.C) {
- t := s.Init(c)
-
- test := func(text string) {
- line := t.NewLine("filename.mk", 1, text)
- p := NewMkParser(line, text)
-
- varname := p.Varname()
-
- t.CheckEquals(varname, text)
- t.CheckEquals(p.Rest(), "")
- }
-
- testRest := func(text string, expectedVarname string, expectedRest string) {
- line := t.NewLine("filename.mk", 1, text)
- p := NewMkParser(line, text)
-
- varname := p.Varname()
-
- t.CheckEquals(varname, expectedVarname)
- t.CheckEquals(p.Rest(), expectedRest)
- }
-
- test("VARNAME")
- test("VARNAME.param")
- test("VARNAME.${param}")
- test("SITES_${param}")
- test("SITES_distfile-1.0.tar.gz")
- test("SITES.gtk+-2.0")
- test("PKGPATH.category/package")
-
- testRest("VARNAME/rest", "VARNAME", "/rest")
-}
-
func (s *Suite) Test_MkParser_PkgbasePattern(c *check.C) {
t := s.Init(c)
diff --git a/pkgtools/pkglint/files/mkshparser.go b/pkgtools/pkglint/files/mkshparser.go
index 1fd0af412a5..25a11e8d96d 100644
--- a/pkgtools/pkglint/files/mkshparser.go
+++ b/pkgtools/pkglint/files/mkshparser.go
@@ -238,7 +238,7 @@ func (lex *ShellLexer) Lex(lval *shyySymType) (ttype int) {
lex.atCommandStart = false
case lex.atCommandStart && matches(token, `^[A-Za-z_]\w*=`):
ttype = tkASSIGNMENT_WORD
- p := NewShTokenizer(dummyLine, token, false) // Just for converting the string to a ShToken
+ p := NewShTokenizer(nil, token, false)
lval.Word = p.ShToken()
case hasPrefix(token, "#"):
// This doesn't work for multiline shell programs.
@@ -246,7 +246,7 @@ func (lex *ShellLexer) Lex(lval *shyySymType) (ttype int) {
return 0
default:
ttype = tkWORD
- p := NewShTokenizer(dummyLine, token, false) // Just for converting the string to a ShToken
+ p := NewShTokenizer(nil, token, false)
lval.Word = p.ShToken()
lex.atCommandStart = false
diff --git a/pkgtools/pkglint/files/mkshparser_test.go b/pkgtools/pkglint/files/mkshparser_test.go
index 0c828fdb8a4..835747bc63e 100644
--- a/pkgtools/pkglint/files/mkshparser_test.go
+++ b/pkgtools/pkglint/files/mkshparser_test.go
@@ -65,10 +65,11 @@ type ShSuite struct {
var _ = check.Suite(&ShSuite{})
func (s *ShSuite) SetUpTest(*check.C) {
- G = NewPkglint(nil, nil)
+ G = unusablePkglint()
}
func (s *ShSuite) TearDownTest(*check.C) {
+ s.t.ReportUncheckedOutput()
G = unusablePkglint()
}
@@ -268,10 +269,21 @@ func (s *ShSuite) Test_parseShellProgram__compound_command(c *check.C) {
b.Words("*"),
b.List().AddCommand(b.SimpleCommand("echo", "$i")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "WARN: MkShBuilder.Token.mk:1: $i is ambiguous. Use ${i} if you "+
+ "mean a Make variable or $$i if you mean a shell variable.",
+ "WARN: ShSuite.test.mk:1: $i is ambiguous. Use ${i} if you "+
+ "mean a Make variable or $$i if you mean a shell variable.")
+
s.test("case $i in esac",
b.List().AddCommand(b.Case(
b.Token("$i"))))
+ s.t.CheckOutputLines(
+ "WARN: MkShBuilder.Token.mk:1: $i is ambiguous. Use ${i} if you "+
+ "mean a Make variable or $$i if you mean a shell variable.",
+ "WARN: ShSuite.test.mk:1: $i is ambiguous. Use ${i} if you "+
+ "mean a Make variable or $$i if you mean a shell variable.")
}
func (s *ShSuite) Test_parseShellProgram__subshell(c *check.C) {
@@ -312,6 +324,12 @@ func (s *ShSuite) Test_parseShellProgram__for_clause(c *check.C) {
b.Words("\"$$@\""),
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
// Only linebreak is allowed, but not semicolon.
s.test("for var \n do echo $var ; done",
b.List().AddCommand(b.For(
@@ -319,30 +337,60 @@ func (s *ShSuite) Test_parseShellProgram__for_clause(c *check.C) {
b.Words("\"$$@\""),
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("for var in a b c ; do echo $var ; done",
b.List().AddCommand(b.For(
"var",
b.Words("a", "b", "c"),
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("for var \n \n \n in a b c ; do echo $var ; done",
b.List().AddCommand(b.For(
"var",
b.Words("a", "b", "c"),
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("for var \n in ; do echo $var ; done",
b.List().AddCommand(b.For(
"var",
nil,
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("for var in in esac ; do echo $var ; done",
b.List().AddCommand(b.For(
"var",
b.Words("in", "esac"),
b.List().AddCommand(b.SimpleCommand("echo", "$var")).AddSemicolon())))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("for var in \n do : ; done",
b.List().AddCommand(b.For(
"var",
@@ -366,6 +414,13 @@ func (s *ShSuite) Test_parseShellProgram__case_clause(c *check.C) {
s.test("case $var in esac",
b.List().AddCommand(b.Case(b.Token("$var"))))
+ s.t.CheckOutputLines(
+ "ERROR: MkShBuilder.Token.mk:1: $var is ambiguous. "+
+ "Use ${var} if you mean a Make variable "+
+ "or $$var if you mean a shell variable.",
+ "ERROR: ShSuite.test.mk:1: $var is ambiguous. Use ${var} if you "+
+ "mean a Make variable or $$var if you mean a shell variable.")
+
s.test("case selector in pattern) ;; pattern) esac",
b.List().AddCommand(b.Case(
b.Token("selector"),
@@ -658,7 +713,9 @@ func (s *ShSuite) Test_parseShellProgram__io_here(c *check.C) {
func (s *ShSuite) init(c *check.C) *MkShBuilder {
s.c = c
- s.t = &Tester{c: c, testName: c.TestName()}
+ tmpdir := NewPath("The ShSuite tests don't need a temporary directory")
+ s.t = &Tester{c: c, testName: c.TestName(), tmpdir: tmpdir}
+ G = NewPkglint(&s.t.stdout, &s.t.stderr)
return NewMkShBuilder()
}
@@ -666,7 +723,8 @@ func (s *ShSuite) test(program string, expected *MkShList) {
t := s.t
// See parseShellProgram
- tokens, rest := splitIntoShellTokens(dummyLine, program)
+ line := t.NewLine("ShSuite.test.mk", 1, "")
+ tokens, rest := splitIntoShellTokens(line, program)
t.CheckEquals(rest, "")
lexer := NewShellLexer(tokens, rest)
parser := shyyParserImpl{}
@@ -691,7 +749,8 @@ func (s *ShSuite) test(program string, expected *MkShList) {
func (s *ShSuite) testFail(program string, expectedRemaining ...string) {
t := s.t
- tokens, rest := splitIntoShellTokens(dummyLine, program)
+ line := t.NewLine("ShSuite.testFail.mk", 1, "")
+ tokens, rest := splitIntoShellTokens(line, program)
t.CheckEquals(rest, "")
lexer := ShellLexer{remaining: tokens, atCommandStart: true}
parser := shyyParserImpl{}
@@ -704,9 +763,11 @@ func (s *ShSuite) testFail(program string, expectedRemaining ...string) {
}
func (s *ShSuite) Test_ShellLexer_Lex__redirects(c *check.C) {
+ _ = s.init(c)
t := s.t
- tokens, rest := splitIntoShellTokens(dummyLine, "2>&1 <& <>file >>file <<EOF <<-EOF > /dev/stderr")
+ line := t.NewLine("filename.mk", 1, "")
+ tokens, rest := splitIntoShellTokens(line, "2>&1 <& <>file >>file <<EOF <<-EOF > /dev/stderr")
t.CheckDeepEquals(tokens, []string{"2>&", "1", "<&", "<>", "file", ">>", "file", "<<", "EOF", "<<-", "EOF", ">", "/dev/stderr"})
t.CheckEquals(rest, "")
@@ -757,7 +818,8 @@ func (s *ShSuite) Test_ShellLexer_Lex__keywords(c *check.C) {
t := s.t
testErr := func(program, error, remaining string) {
- tokens, rest := splitIntoShellTokens(dummyLine, program)
+ line := t.NewLine("filename.mk", 1, "")
+ tokens, rest := splitIntoShellTokens(line, program)
t.CheckEquals(rest, "")
lexer := ShellLexer{
@@ -944,7 +1006,8 @@ func (b *MkShBuilder) Redirected(cmd *MkShCommand, redirects ...*MkShRedirection
}
func (b *MkShBuilder) Token(mktext string) *ShToken {
- tokenizer := NewShTokenizer(dummyLine, mktext, false)
+ line := NewLine("MkShBuilder.Token.mk", 1, "", &RawLine{1, "\n", "\n"})
+ tokenizer := NewShTokenizer(line, mktext, false)
token := tokenizer.ShToken()
assertf(tokenizer.parser.EOF(), "Invalid token: %q", tokenizer.parser.Rest())
return token
diff --git a/pkgtools/pkglint/files/mkshtypes_test.go b/pkgtools/pkglint/files/mkshtypes_test.go
index 3a0b9963ba6..ea4d9916606 100644
--- a/pkgtools/pkglint/files/mkshtypes_test.go
+++ b/pkgtools/pkglint/files/mkshtypes_test.go
@@ -3,3 +3,14 @@ package pkglint
func (list *MkShList) AddSemicolon() *MkShList { return list.AddSeparator(sepSemicolon) }
func (list *MkShList) AddBackground() *MkShList { return list.AddSeparator(sepBackground) }
func (list *MkShList) AddNewline() *MkShList { return list.AddSeparator(sepNewline) }
+
+// AddCommand adds a command directly to a list of commands,
+// creating all the intermediate nodes for the syntactic representation.
+//
+// As soon as that representation is replaced with a semantic representation,
+// this method should no longer be necessary.
+func (list *MkShList) AddCommand(command *MkShCommand) *MkShList {
+ pipeline := NewMkShPipeline(false, []*MkShCommand{command})
+ andOr := NewMkShAndOr(pipeline)
+ return list.AddAndOr(andOr)
+}
diff --git a/pkgtools/pkglint/files/mkshwalker_test.go b/pkgtools/pkglint/files/mkshwalker_test.go
index 04218f80bd4..e68dc84650d 100644
--- a/pkgtools/pkglint/files/mkshwalker_test.go
+++ b/pkgtools/pkglint/files/mkshwalker_test.go
@@ -15,7 +15,8 @@ func (s *Suite) Test_MkShWalker_Walk(c *check.C) {
}
test := func(program string, output ...string) {
- list, err := parseShellProgram(dummyLine, program)
+ line := t.NewLine("filename.mk", 1, "")
+ list, err := parseShellProgram(line, program)
if !c.Check(err, check.IsNil) || !c.Check(list, check.NotNil) {
return
@@ -244,7 +245,8 @@ func (s *Suite) Test_MkShWalker_Walk__empty_callback(c *check.C) {
t := s.Init(c)
test := func(program string) {
- list, err := parseShellProgram(dummyLine, program)
+ line := t.NewLine("filename.mk", 1, "")
+ list, err := parseShellProgram(line, program)
assertNil(err, "")
walker := NewMkShWalker()
@@ -270,7 +272,8 @@ func (s *Suite) Test_MkShWalker_Walk__empty_callback(c *check.C) {
func (s *Suite) Test_MkShWalker_Walk__assertion(c *check.C) {
t := s.Init(c)
- list, err := parseShellProgram(dummyLine, "echo \"hello, world\"")
+ line := t.NewLine("filename.mk", 1, "")
+ list, err := parseShellProgram(line, "echo \"hello, world\"")
assertNil(err, "")
walker := NewMkShWalker()
diff --git a/pkgtools/pkglint/files/mktypes.go b/pkgtools/pkglint/files/mktypes.go
index db6c452d1df..e4044e2760a 100644
--- a/pkgtools/pkglint/files/mktypes.go
+++ b/pkgtools/pkglint/files/mktypes.go
@@ -43,7 +43,7 @@ func (m MkVarUseModifier) IsSuffixSubst() bool {
}
func (m MkVarUseModifier) MatchSubst() (ok bool, regex bool, from string, to string, options string) {
- p := NewMkParser(nil, m.Text)
+ p := NewMkLexer(m.Text, nil)
return p.varUseModifierSubst('}')
}
diff --git a/pkgtools/pkglint/files/mktypes_test.go b/pkgtools/pkglint/files/mktypes_test.go
index ad2cc690eda..a7c55e53f74 100644
--- a/pkgtools/pkglint/files/mktypes_test.go
+++ b/pkgtools/pkglint/files/mktypes_test.go
@@ -39,16 +39,6 @@ func (MkTokenBuilder) VarUse(varname string, modifiers ...string) *MkVarUse {
return &MkVarUse{varname, mods}
}
-// AddCommand adds a command directly to a list of commands,
-// creating all the intermediate nodes for the syntactic representation.
-// As soon as that representation is replaced with a semantic representation,
-// this method should no longer be necessary.
-func (list *MkShList) AddCommand(command *MkShCommand) *MkShList {
- pipeline := NewMkShPipeline(false, []*MkShCommand{command})
- andOr := NewMkShAndOr(pipeline)
- return list.AddAndOr(andOr)
-}
-
func (s *Suite) Test_MkVarUseModifier_MatchSubst(c *check.C) {
t := s.Init(c)
@@ -220,7 +210,7 @@ func (s *Suite) Test_MkVarUse_Mod(c *check.C) {
test := func(varUseText string, mod string) {
line := t.NewLine("filename.mk", 123, "")
- varUse := NewMkParser(line, varUseText).VarUse()
+ varUse := NewMkLexer(varUseText, line).VarUse()
t.CheckOutputEmpty()
t.CheckEquals(varUse.Mod(), mod)
}
diff --git a/pkgtools/pkglint/files/package.go b/pkgtools/pkglint/files/package.go
index ec9e3175b97..f1190076569 100644
--- a/pkgtools/pkglint/files/package.go
+++ b/pkgtools/pkglint/files/package.go
@@ -230,7 +230,7 @@ func (pkg *Package) parse(mklines *MkLines, allLines *MkLines, includingFileForU
filename := mklines.lines.Filename
if filename.Base() == "buildlink3.mk" {
builtin := cleanpath(filename.Dir().JoinNoClean("builtin.mk"))
- builtinRel := relpath(pkg.dir, builtin)
+ builtinRel := G.Pkgsrc.Relpath(pkg.dir, builtin)
if pkg.included.FirstTime(builtinRel.String()) && builtin.IsFile() {
builtinMkLines := LoadMk(builtin, MustSucceed|LogErrors)
pkg.parse(builtinMkLines, allLines, "")
@@ -297,7 +297,7 @@ func (pkg *Package) loadIncluded(mkline *MkLine, includingFile Path) (includedMk
dirname, _ := includingFile.Split()
dirname = cleanpath(dirname)
fullIncluded := joinPath(dirname, includedFile)
- relIncludedFile := relpath(pkg.dir, fullIncluded)
+ relIncludedFile := G.Pkgsrc.Relpath(pkg.dir, fullIncluded)
if !pkg.shouldDiveInto(includingFile, includedFile) {
return nil, true
@@ -371,7 +371,7 @@ func (pkg *Package) resolveIncludedFile(mkline *MkLine, includingFilename Path)
}
if mkline.Basename != "buildlink3.mk" {
- if includedFile.HasSuffixText("/buildlink3.mk") {
+ if includedFile.HasSuffixPath("buildlink3.mk") {
pkg.bl3[includedFile] = mkline
if trace.Tracing {
trace.Step1("Buildlink3 file in package: %q", includedText)
@@ -392,11 +392,9 @@ func (*Package) shouldDiveInto(includingFile, includedFile Path) bool {
return false
}
- // FIXME: includingFile may be "../../mk/../devel/readline/buildlink.mk" and thus contain "mk"
- // even though the resolved file is not part of the pkgsrc infrastructure.
- if includingFile.ContainsPath("mk") && !G.Pkgsrc.ToRel(includingFile).HasPrefixPath("wip/mk") {
- // TODO: try ".buildlink.mk", ".builtin.mk" instead, see wip/clfswm.
- return includingFile.HasSuffixText("buildlink3.mk") && includedFile.HasSuffixText("builtin.mk")
+ if G.Pkgsrc.IsInfraMain(includingFile) {
+ return includingFile.HasSuffixText(".buildlink3.mk") &&
+ includedFile.HasSuffixText(".builtin.mk")
}
return true
@@ -971,7 +969,7 @@ func (pkg *Package) checkUseLanguagesCompilerMk(mklines *MkLines) {
}
if mkline.Basename == "compiler.mk" {
- if relpath(pkg.dir, mkline.Filename) == "../../mk/compiler.mk" {
+ if G.Pkgsrc.Relpath(pkg.dir, mkline.Filename) == "../../mk/compiler.mk" {
return
}
}
@@ -1075,7 +1073,10 @@ func (pkg *Package) nbPart() string {
}
func (pkg *Package) pkgnameFromDistname(pkgname, distname string) (string, bool) {
- tokens := NewMkParser(nil, pkgname).MkTokens()
+ tokens, rest := NewMkLexer(pkgname, nil).MkTokens()
+ if rest != "" {
+ return "", false
+ }
// TODO: Make this resolving of variable references available to all other variables as well.
@@ -1206,9 +1207,7 @@ func (pkg *Package) checkDirent(dirent Path, mode os.FileMode) {
switch {
case mode.IsRegular():
- pkgsrcRel := G.Pkgsrc.ToRel(dirent)
- depth := pkgsrcRel.Count() - 1 // FIXME
- G.checkReg(dirent, basename, depth)
+ G.checkReg(dirent, basename, G.Pkgsrc.ToRel(dirent).Count())
case hasPrefix(basename, "work"):
if G.Opts.Import {
@@ -1377,6 +1376,13 @@ func (pkg *Package) checkIncludeConditionally(mkline *MkLine, indentation *Inden
}
}
+ dependingOn := func(varnames []string) string {
+ if len(varnames) == 0 {
+ return ""
+ }
+ return sprintf(" (depending on %s)", strings.Join(varnames, ", "))
+ }
+
if indentation.IsConditional() {
if other := pkg.unconditionalIncludes[key]; other != nil {
if !pkg.Once.FirstTimeSlice("checkIncludeConditionally", mkline.Location.String(), other.Location.String()) {
@@ -1384,9 +1390,11 @@ func (pkg *Package) checkIncludeConditionally(mkline *MkLine, indentation *Inden
}
mkline.Warnf(
- "%q is included conditionally here (depending on %s) "+
+ "%q is included conditionally here%s "+
"and unconditionally in %s.",
- cleanpath(mkline.IncludedFile()), strings.Join(mkline.ConditionalVars(), ", "), mkline.RefTo(other))
+ cleanpath(mkline.IncludedFile()),
+ dependingOn(mkline.ConditionalVars()),
+ mkline.RefTo(other))
explainPkgOptions(other, mkline)
}
@@ -1399,8 +1407,10 @@ func (pkg *Package) checkIncludeConditionally(mkline *MkLine, indentation *Inden
mkline.Warnf(
"%q is included unconditionally here "+
- "and conditionally in %s (depending on %s).",
- cleanpath(mkline.IncludedFile()), mkline.RefTo(other), strings.Join(other.ConditionalVars(), ", "))
+ "and conditionally in %s%s.",
+ cleanpath(mkline.IncludedFile()),
+ mkline.RefTo(other),
+ dependingOn(other.ConditionalVars()))
explainPkgOptions(mkline, other)
}
@@ -1441,7 +1451,7 @@ func (pkg *Package) File(relativeFileName Path) Path {
// Example:
// NewPackage("category/package").Rel("other/package") == "../../other/package"
func (pkg *Package) Rel(filename Path) Path {
- return relpath(pkg.dir, filename)
+ return G.Pkgsrc.Relpath(pkg.dir, filename)
}
// Returns whether the given file (relative to the package directory)
diff --git a/pkgtools/pkglint/files/package_test.go b/pkgtools/pkglint/files/package_test.go
index a24048a445a..3dba41feef4 100644
--- a/pkgtools/pkglint/files/package_test.go
+++ b/pkgtools/pkglint/files/package_test.go
@@ -116,7 +116,7 @@ func (s *Suite) Test_Package__relative_included_filenames_in_same_directory(c *c
// TODO: Since other.mk is referenced via "../../category/package",
// it would be nice if this relative path would be reflected in the output
// instead of referring just to "other.mk".
- // This needs to be fixed somewhere near relpath.
+ // This needs to be fixed somewhere near Relpath.
//
// The notes are in reverse order because they are produced when checking
// other.mk, and there the relative order is correct (line 2 before line 3).
@@ -1063,7 +1063,7 @@ func (s *Suite) Test_Package_resolveIncludedFile__skipping(c *check.C) {
func (s *Suite) Test_Package_shouldDiveInto(c *check.C) {
t := s.Init(c)
- t.Chdir(".")
+ t.Chdir("category/package")
test := func(including, included Path, expected bool) {
actual := (*Package)(nil).shouldDiveInto(including, included)
@@ -2432,6 +2432,15 @@ func (s *Suite) Test_Package_pkgnameFromDistname(c *check.C) {
"WARN: ~/category/package/Makefile:4: Invalid variable modifier \"c,d\" for \"DISTNAME\".")
test("${DISTFILE:C,\\..*,,}", "aspell-af-0.50-0", "")
+
+ // Parse error because of missing closing brace, parsing succeeds.
+ test("${DISTNAME:M", "package-1.0", "",
+ "WARN: ~/category/package/Makefile:4: "+
+ "Missing closing \"}\" for \"DISTNAME\".")
+
+ // Parse error with an unparseable rest.
+ test("$", "package-1.0", "",
+ nil...)
}
func (s *Suite) Test_Package_checkPossibleDowngrade(c *check.C) {
@@ -2989,6 +2998,45 @@ func (s *Suite) Test_Package_checkIncludeConditionally__no_explanation(c *check.
"(depending on OPSYS) and unconditionally in buildlink3.mk:12.")
}
+func (s *Suite) Test_Package_checkIncludeConditionally__conditionally_no_variable(c *check.C) {
+ t := s.Init(c)
+
+ t.SetUpOption("zlib", "")
+ t.SetUpPackage("category/package",
+ ".include \"../../devel/zlib/buildlink3.mk\"",
+ ".if exists(/usr/include)",
+ ".include \"../../sysutils/coreutils/buildlink3.mk\"",
+ ".endif")
+ t.CreateFileLines("mk/bsd.options.mk", "")
+ t.CreateFileLines("devel/zlib/buildlink3.mk", "")
+ t.CreateFileLines("sysutils/coreutils/buildlink3.mk", "")
+
+ t.CreateFileLines("category/package/options.mk",
+ MkCvsID,
+ "",
+ "PKG_OPTIONS_VAR=\tPKG_OPTIONS.package",
+ "PKG_SUPPORTED_OPTIONS=\t# none",
+ "",
+ ".include \"../../mk/bsd.options.mk\"",
+ "",
+ ".if exists(/usr/include)",
+ ". include \"../../devel/zlib/buildlink3.mk\"",
+ ".endif",
+ ".include \"../../sysutils/coreutils/buildlink3.mk\"")
+ t.Chdir("category/package")
+ t.FinishSetUp()
+
+ G.checkdirPackage(".")
+
+ t.CheckOutputLines(
+ "WARN: Makefile:20: \"../../devel/zlib/buildlink3.mk\" "+
+ "is included unconditionally here "+
+ "and conditionally in options.mk:9.",
+ "WARN: Makefile:22: \"../../sysutils/coreutils/buildlink3.mk\" "+
+ "is included conditionally here "+
+ "and unconditionally in options.mk:11.")
+}
+
func (s *Suite) Test_Package_checkIncludeConditionally__explain_PKG_OPTIONS_in_options_mk(c *check.C) {
t := s.Init(c)
diff --git a/pkgtools/pkglint/files/path.go b/pkgtools/pkglint/files/path.go
index 746c5572fc7..4824cc68d1e 100644
--- a/pkgtools/pkglint/files/path.go
+++ b/pkgtools/pkglint/files/path.go
@@ -13,6 +13,12 @@ import (
// Some paths may contain placeholders like @VAR@ or ${VAR}.
// The base directory of relative paths depends on the context
// in which the path is used.
+//
+// TODO: Consider adding several more specialized types of path:
+// TODO: CurrPath, relative to the current working directory
+// TODO: PkgsrcPath, relative to the pkgsrc root
+// TODO: PackagePath, relative to the package directory
+// TODO: RelativePath, relative to some other basedir
type Path string
func NewPath(name string) Path { return Path(name) }
@@ -32,10 +38,33 @@ func (p Path) Split() (dir Path, base string) {
return Path(strDir), strBase
}
+// Parts splits the path into its components.
+// Multiple adjacent slashes are treated like a single slash.
+// Parts that are single dots are skipped.
+// Absolute paths have an empty string as its first part.
+// All other parts are nonempty.
func (p Path) Parts() []string {
- return strings.FieldsFunc(string(p), func(r rune) bool { return r == '/' })
+ if p == "" {
+ return nil
+ }
+
+ parts := strings.Split(string(p), "/")
+ j := 0
+ for i, part := range parts {
+ if (i == 0 || part != "") && part != "." {
+ parts[j] = part
+ j++
+ }
+ }
+ parts = parts[:j]
+ if len(parts) == 0 {
+ return []string{"."}
+ }
+ return parts
}
+// Count returns the number of meaningful parts of the path.
+// See Parts.
func (p Path) Count() int { return len(p.Parts()) }
func (p Path) HasPrefixText(prefix string) bool {
@@ -45,8 +74,26 @@ func (p Path) HasPrefixText(prefix string) bool {
// HasPrefixPath returns whether the path starts with the given prefix.
// The basic unit of comparison is a path component, not a character.
func (p Path) HasPrefixPath(prefix Path) bool {
- return hasPrefix(string(p), string(prefix)) &&
- (len(p) == len(prefix) || p[len(prefix)] == '/')
+ // Handle the simple case first, without any memory allocations.
+ if hasPrefix(string(p), string(prefix)) {
+ return len(p) == len(prefix) || p[len(prefix)] == '/'
+ }
+
+ if prefix == "." {
+ return !p.IsAbs()
+ }
+
+ parts := p.Parts()
+ prefixParts := prefix.Parts()
+ if len(prefixParts) > len(parts) {
+ return false
+ }
+ for i, prefixPart := range prefixParts {
+ if parts[i] != prefixPart {
+ return false
+ }
+ }
+ return true
}
// TODO: Check each call whether ContainsPath is more appropriate; add tests
@@ -75,7 +122,6 @@ func (p Path) ContainsPathCanonical(sub Path) bool {
return cleaned.ContainsPath(sub)
}
-// TODO: Check each call whether HasSuffixPath is more appropriate; add tests
func (p Path) HasSuffixText(suffix string) bool {
return hasSuffix(string(p), suffix)
}
@@ -107,15 +153,35 @@ func (p Path) JoinNoClean(s Path) Path {
func (p Path) Clean() Path { return NewPath(path.Clean(string(p))) }
+// CleanDot returns the path with single dots removed and double slashes
+// collapsed.
+func (p Path) CleanDot() Path {
+ if !p.ContainsText(".") {
+ return p
+ }
+
+ var parts []string
+ for i, part := range p.Parts() {
+ if !(part == "." || i > 0 && part == "") { // See Parts
+ parts = append(parts, part)
+ }
+ }
+ if len(parts) == 0 {
+ return "."
+ }
+ return NewPath(strings.Join(parts, "/"))
+}
+
func (p Path) IsAbs() bool {
return p.HasPrefixText("/") || filepath.IsAbs(filepath.FromSlash(string(p)))
}
+// Rel returns the relative path from this path to the other.
func (p Path) Rel(other Path) Path {
fp := filepath.FromSlash(p.String())
fpOther := filepath.FromSlash(other.String())
rel, err := filepath.Rel(fp, fpOther)
- assertNil(err, "relpath from %q to %q", p, other)
+ assertNil(err, "Relpath from %q to %q", p, other)
return NewPath(filepath.ToSlash(rel))
}
diff --git a/pkgtools/pkglint/files/path_test.go b/pkgtools/pkglint/files/path_test.go
index 51b9aec1232..d766010a35d 100644
--- a/pkgtools/pkglint/files/path_test.go
+++ b/pkgtools/pkglint/files/path_test.go
@@ -104,11 +104,32 @@ func (s *Suite) Test_Path_Parts(c *check.C) {
t.CheckDeepEquals(NewPath(p).Parts(), parts)
}
- test("", []string{}...)
- test("././././", ".", ".", ".", ".") // No trailing ""
- test("/root", "root") // No leading ""
- test("filename", "filename")
- test("dir/filename", "dir", "filename")
+ // Only the empty path returns an empty slice.
+ test("", nil...)
+
+ // The standard cases for relative paths.
+ test("relative", "relative")
+ test("relative/subdir", "relative", "subdir")
+ test("relative////subdir", "relative", "subdir")
+ test("relative/..", "relative", "..")
+ test("relative/.", "relative")
+
+ // Leading dots are removed when they are followed by something.
+ test("./relative", "relative")
+
+ // A path consisting of only dots produces a single dot.
+ test("./././.", ".")
+
+ // Slashes at the end are treated like a single dot.
+ test("././././", ".")
+ test(".///////", ".")
+
+ // Absolute paths have an empty first component.
+ test("/", "")
+ test("/.", "")
+ test("/root", "", "root")
+
+ // The backslash is not a path separator.
test("dir/filename\\with\\backslash", "dir", "filename\\with\\backslash")
}
@@ -119,12 +140,36 @@ func (s *Suite) Test_Path_Count(c *check.C) {
t.CheckEquals(NewPath(p).Count(), count)
}
- test("", 0) // FIXME
- test("././././", 4)
- test("/root", 1) // FIXME
+ test("././././", 1)
+ test("/root", 2)
test("filename", 1)
test("dir/filename", 2)
test("dir/filename\\with\\backslash", 2)
+
+ // Only the empty path returns an empty slice.
+ test("", 0)
+
+ // The standard cases for canonical relative paths.
+ test("relative", 1)
+ test("relative/subdir", 2)
+ test("relative////subdir", 2)
+ test("relative/..", 2)
+ test("relative/.", 1)
+
+ // A path consisting of only dots produces a single dot.
+ test("./././.", 1)
+
+ // Slashes at the end are treated like a single dot.
+ test("././././", 1)
+ test(".///////", 1)
+
+ // Absolute paths have an empty first component.
+ test("/", 1)
+ test("/.", 1)
+ test("/root", 2)
+
+ // The backslash is not a path separator.
+ test("dir/filename\\with\\backslash", 2)
}
func (s *Suite) Test_Path_HasPrefixText(c *check.C) {
@@ -155,9 +200,21 @@ func (s *Suite) Test_Path_HasPrefixPath(c *check.C) {
test("", "x", false)
test("/root", "/r", false)
test("/root", "/root", true)
- test("/root", "/root/", false)
+
+ // Even though the textual representation of the prefix is longer than
+ // the path. The trailing slash marks the path as a directory, and
+ // there are only a few cases where the difference matters, such as
+ // in rsync and mkdir.
+ test("/root", "/root/", true)
+
test("/root/", "/root", true)
+ test("/root/", "root", false)
test("/root/subdir", "/root", true)
+ test("filename", ".", true)
+ test("filename", "./filename", true)
+ test("filename", "./file", false)
+ test("filename", "./filename/sub", false)
+ test("/anything", ".", false)
}
func (s *Suite) Test_Path_ContainsText(c *check.C) {
@@ -343,6 +400,23 @@ func (s *Suite) Test_Path_Clean(c *check.C) {
test("a/bb///../c", "a/c")
}
+func (s *Suite) Test_Path_CleanDot(c *check.C) {
+ t := s.Init(c)
+
+ test := func(p, result Path) {
+ t.CheckEquals(p.CleanDot(), result)
+ }
+
+ test("", "")
+ test(".", ".")
+ test("./././", ".")
+ test("a/bb///../c", "a/bb/../c")
+ test("./filename", "filename")
+ test("/absolute", "/absolute")
+ test("/usr/pkgsrc/wip/package", "/usr/pkgsrc/wip/package")
+ test("/usr/pkgsrc/wip/package/../mk/git-package.mk", "/usr/pkgsrc/wip/package/../mk/git-package.mk")
+}
+
func (s *Suite) Test_Path_IsAbs(c *check.C) {
t := s.Init(c)
diff --git a/pkgtools/pkglint/files/pkglint.1 b/pkgtools/pkglint/files/pkglint.1
index 88839eaf136..0c69c966a76 100644
--- a/pkgtools/pkglint/files/pkglint.1
+++ b/pkgtools/pkglint/files/pkglint.1
@@ -1,4 +1,4 @@
-.\" $NetBSD: pkglint.1,v 1.56 2019/06/30 20:56:19 rillig Exp $
+.\" $NetBSD: pkglint.1,v 1.57 2019/11/27 22:10:07 rillig Exp $
.\" From FreeBSD: portlint.1,v 1.8 1997/11/25 14:53:14 itojun Exp
.\"
.\" Copyright (c) 1997 by Jun-ichiro Itoh <itojun@itojun.org>.
@@ -8,7 +8,7 @@
.\" Thomas Klausner <wiz@NetBSD.org>, 2012.
.\" Roland Illig <rillig@NetBSD.org>, 2015-2019.
.\"
-.Dd June 17, 2019
+.Dd November 27, 2019
.Dt PKGLINT 1
.Os
.Sh NAME
diff --git a/pkgtools/pkglint/files/pkglint.go b/pkgtools/pkglint/files/pkglint.go
index 8a4ff984465..e612bb6a8e8 100644
--- a/pkgtools/pkglint/files/pkglint.go
+++ b/pkgtools/pkglint/files/pkglint.go
@@ -43,7 +43,7 @@ type Pkglint struct {
interner StringInterner
// cwd is the absolute path to the current working
- // directory. It is used for speeding up relpath and abspath.
+ // directory. It is used for speeding up Relpath and abspath.
// There is no other use for it.
cwd Path
@@ -106,7 +106,7 @@ type pkglintFatal struct{}
// G is the abbreviation for "global state";
// this and the tracer are the only global variables in this Go package.
var (
- G = NewPkglint(nil, nil)
+ G = NewPkglint(os.Stdout, os.Stderr)
trace tracePkg.Tracer
)
@@ -179,7 +179,8 @@ func (pkglint *Pkglint) setUpProfiling() func() {
f, err := os.Create("pkglint.pprof")
if err != nil {
- dummyLine.Fatalf("Cannot create profiling file: %s", err)
+ pkglint.Logger.TechErrorf("pkglint.pprof", "Cannot create profiling file: %s", err)
+ panic(pkglintFatal{})
}
atExit(func() { assertNil(f.Close(), "") })
@@ -217,11 +218,11 @@ func (pkglint *Pkglint) prepareMainLoop() {
// pkglint doesn't know where to load the infrastructure files from,
// and these are needed for virtually every single check.
// Therefore, the only sensible thing to do is to quit immediately.
- dummyLine.Fatalf("%q must be inside a pkgsrc tree.", firstDir)
+ NewLineWhole(firstDir).Fatalf("Must be inside a pkgsrc tree.")
}
pkglint.Pkgsrc = NewPkgsrc(joinPath(firstDir, relTopdir))
- pkglint.Wip = pkglint.Pkgsrc.ToRel(firstDir).HasPrefixPath("wip") // Same as in Pkglint.Check.
+ pkglint.Wip = pkglint.Pkgsrc.IsWip(firstDir) // See Pkglint.checkMode.
pkglint.Pkgsrc.LoadInfrastructure()
currentUser, err := user.Current()
@@ -244,7 +245,6 @@ func (pkglint *Pkglint) ParseCommandLine(args []string) int {
opts.AddFlagVar('h', "help", &gopts.ShowHelp, false, "show a detailed usage message")
opts.AddFlagVar('I', "dumpmakefile", &gopts.DumpMakefile, false, "dump the Makefile after parsing")
opts.AddFlagVar('i', "import", &gopts.Import, false, "prepare the import of a wip package")
- opts.AddFlagVar('m', "log-verbose", &lopts.LogVerbose, false, "allow the same diagnostic more than once")
opts.AddStrList('o', "only", &gopts.LogOnly, "only log diagnostics containing the given text")
opts.AddFlagVar('p', "profiling", &gopts.Profiling, false, "profile the executing program")
opts.AddFlagVar('q', "quiet", &lopts.Quiet, false, "don't show a summary line when finishing")
@@ -338,9 +338,8 @@ func (pkglint *Pkglint) checkMode(dirent Path, mode os.FileMode) {
}
if isReg {
- depth := pkgsrcRel.Count() - 1 // FIXME
pkglint.checkExecutable(dirent, mode)
- pkglint.checkReg(dirent, basename, depth)
+ pkglint.checkReg(dirent, basename, pkgsrcRel.Count())
return
}
@@ -448,7 +447,8 @@ func CheckLinesDescr(lines *Lines) {
ck.CheckValidCharacters()
if containsVarRef(line.Text) {
- for _, token := range NewMkParser(nil, line.Text).MkTokens() {
+ tokens, _ := NewMkLexer(line.Text, nil).MkTokens()
+ for _, token := range tokens {
if token.Varuse != nil && G.Pkgsrc.VariableType(nil, token.Varuse.varname) != nil {
line.Notef("Variables are not expanded in the DESCR file.")
}
@@ -546,9 +546,13 @@ func CheckFileMk(filename Path) {
mklines.SaveAutofixChanges()
}
+// checkReg checks the given regular file.
+// depth is 3 for files in the package directory, and 4 or more for files
+// deeper in the directory hierarchy, such as in files/ or patches/.
func (pkglint *Pkglint) checkReg(filename Path, basename string, depth int) {
- if depth == 2 && !pkglint.Wip {
+ if depth == 3 && !pkglint.Wip {
+ // FIXME: There's no good reason for prohibiting a README file.
if contains(basename, "README") || contains(basename, "TODO") {
NewLineWhole(filename).Errorf("Packages in main pkgsrc must not have a %s file.", basename)
// TODO: Add a convincing explanation.
@@ -560,8 +564,8 @@ func (pkglint *Pkglint) checkReg(filename Path, basename string, depth int) {
case hasSuffix(basename, "~"),
hasSuffix(basename, ".orig"),
hasSuffix(basename, ".rej"),
- contains(basename, "README") && depth == 2,
- contains(basename, "TODO") && depth == 2:
+ contains(basename, "README") && depth == 3,
+ contains(basename, "TODO") && depth == 3:
if pkglint.Opts.Import {
NewLineWhole(filename).Errorf("Must be cleaned up before committing the package.")
}
@@ -605,7 +609,7 @@ func (pkglint *Pkglint) checkReg(filename Path, basename string, depth int) {
CheckLinesPatch(lines)
}
- case matches(filename.String(), `(?:^|/)patches/manual[^/]*$`):
+ case filename.Dir().Base() == "patches" && matches(filename.Base(), `^manual[^/]*$`):
if trace.Tracing {
trace.Stepf("Unchecked file %q.", filename)
}
@@ -677,7 +681,7 @@ func (pkglint *Pkglint) checkExecutable(filename Path, mode os.FileMode) {
fix.Describef(0, "Clearing executable bits")
if autofix {
if err := filename.Chmod(mode &^ 0111); err != nil {
- G.Logger.Errorf(cleanpath(filename), "Cannot clear executable bits: %s", err)
+ G.Logger.TechErrorf(cleanpath(filename), "Cannot clear executable bits: %s", err)
}
}
})
diff --git a/pkgtools/pkglint/files/pkglint_test.go b/pkgtools/pkglint/files/pkglint_test.go
index ee2d6a87e48..f24cae3c12f 100644
--- a/pkgtools/pkglint/files/pkglint_test.go
+++ b/pkgtools/pkglint/files/pkglint_test.go
@@ -58,7 +58,6 @@ func (s *Suite) Test_Pkglint_Main__help(c *check.C) {
" -h, --help show a detailed usage message",
" -I, --dumpmakefile dump the Makefile after parsing",
" -i, --import prepare the import of a wip package",
- " -m, --log-verbose allow the same diagnostic more than once",
" -o, --only only log diagnostics containing the given text",
" -p, --profiling profile the executing program",
" -q, --quiet don't show a summary line when finishing",
@@ -102,7 +101,7 @@ func (s *Suite) Test_Pkglint_Main__no_args(c *check.C) {
// The "." from the error message is the implicit argument added in Pkglint.Main.
t.CheckEquals(exitcode, 1)
t.CheckOutputLines(
- "FATAL: \".\" must be inside a pkgsrc tree.")
+ "FATAL: .: Must be inside a pkgsrc tree.")
}
func (s *Suite) Test_Pkglint_Main__unknown_option(c *check.C) {
@@ -331,7 +330,19 @@ func (s *Suite) Test_Pkglint_Main__profiling_error(c *check.C) {
t.CheckEquals(exitcode, 1)
t.CheckOutputMatches(
- `FATAL: Cannot create profiling file: open pkglint\.pprof: .*`)
+ `ERROR: pkglint\.pprof: Cannot create profiling file: open pkglint\.pprof: .*`)
+}
+
+// Branch coverage for Logger.Logf, the level != Fatal case.
+func (s *Suite) Test_Pkglint_prepareMainLoop__fatal(c *check.C) {
+ t := s.Init(c)
+
+ t.Chdir(".")
+ t.Main("--profiling", t.File("does-not-exist").String())
+
+ t.CheckOutputLines(
+ "fileCache: 0 hits, 0 misses",
+ "FATAL: does-not-exist: Must be inside a pkgsrc tree.")
}
func (s *Suite) Test_Pkglint_ParseCommandLine__only(c *check.C) {
@@ -874,12 +885,12 @@ func (s *Suite) Test_Pkglint_checkReg__file_not_found(c *check.C) {
t.Chdir(".")
- G.checkReg("buildlink3.mk", "buildlink3.mk", 2)
- G.checkReg("DESCR", "DESCR", 2)
- G.checkReg("distinfo", "distinfo", 2)
- G.checkReg("MESSAGE", "MESSAGE", 2)
- G.checkReg("patches/patch-aa", "patch-aa", 2)
- G.checkReg("PLIST", "PLIST", 2)
+ G.checkReg("buildlink3.mk", "buildlink3.mk", 3)
+ G.checkReg("DESCR", "DESCR", 3)
+ G.checkReg("distinfo", "distinfo", 3)
+ G.checkReg("MESSAGE", "MESSAGE", 3)
+ G.checkReg("patches/patch-aa", "patch-aa", 3)
+ G.checkReg("PLIST", "PLIST", 3)
t.CheckOutputLines(
"ERROR: buildlink3.mk: Cannot be read.",
@@ -897,7 +908,7 @@ func (s *Suite) Test_Pkglint_checkReg__no_tracing(c *check.C) {
t.Chdir(".")
t.DisableTracing()
- G.checkReg("patches/manual-aa", "manual-aa", 2)
+ G.checkReg("patches/manual-aa", "manual-aa", 4)
t.CheckOutputEmpty()
}
@@ -1001,7 +1012,7 @@ func (s *Suite) Test_Pkglint_checkReg__unknown_file_in_patches(c *check.C) {
t.CreateFileDummyPatch("category/Makefile/patches/index")
- G.checkReg(t.File("category/Makefile/patches/index"), "index", 3)
+ G.checkReg(t.File("category/Makefile/patches/index"), "index", 4)
t.CheckOutputLines(
"WARN: ~/category/Makefile/patches/index: " +
@@ -1014,7 +1025,7 @@ func (s *Suite) Test_Pkglint_checkReg__patch_for_Makefile_fragment(c *check.C) {
t.CreateFileDummyPatch("category/package/patches/patch-compiler.mk")
t.Chdir("category/package")
- G.checkReg(t.File("patches/patch-compiler.mk"), "patch-compiler.mk", 3)
+ G.checkReg(t.File("patches/patch-compiler.mk"), "patch-compiler.mk", 4)
t.CheckOutputEmpty()
}
@@ -1024,7 +1035,7 @@ func (s *Suite) Test_Pkglint_checkReg__file_in_files(c *check.C) {
t.CreateFileLines("category/package/files/index")
- G.checkReg(t.File("category/package/files/index"), "index", 3)
+ G.checkReg(t.File("category/package/files/index"), "index", 4)
// These files are ignored since they could contain anything.
t.CheckOutputEmpty()
@@ -1036,8 +1047,8 @@ func (s *Suite) Test_Pkglint_checkReg__spec(c *check.C) {
t.CreateFileLines("category/package/spec")
t.CreateFileLines("regress/package/spec")
- G.checkReg(t.File("category/package/spec"), "spec", 2)
- G.checkReg(t.File("regress/package/spec"), "spec", 2)
+ G.checkReg(t.File("category/package/spec"), "spec", 3)
+ G.checkReg(t.File("regress/package/spec"), "spec", 3)
t.CheckOutputLines(
"WARN: ~/category/package/spec: Only packages in regress/ may have spec files.")
@@ -1047,7 +1058,7 @@ func (s *Suite) Test_Pkglint_checkExecutable(c *check.C) {
t := s.Init(c)
filename := t.CreateFileLines("file.mk")
- err := os.Chmod(filename.String(), 0555)
+ err := filename.Chmod(0555)
assertNil(err, "")
G.checkExecutable(filename, 0555)
diff --git a/pkgtools/pkglint/files/pkgsrc.go b/pkgtools/pkglint/files/pkgsrc.go
index 3e3dd1e3a5c..2c446ed765f 100644
--- a/pkgtools/pkglint/files/pkgsrc.go
+++ b/pkgtools/pkglint/files/pkgsrc.go
@@ -813,7 +813,7 @@ func (src *Pkgsrc) ListVersions(category Path, re regex.Pattern, repl string, er
}
if len(names) == 0 {
if errorIfEmpty {
- dummyLine.Errorf("Cannot find package versions of %q in %q.", re, src.File(category))
+ NewLineWhole(src.File(category)).Errorf("Cannot find package versions of %q.", re)
}
src.listVersions[cacheKey] = nil
return nil
@@ -1035,13 +1035,92 @@ func (src *Pkgsrc) Load(filename Path, options LoadOptions) *Lines {
return Load(src.File(filename), options)
}
+// Relpath returns the canonical relative path from the directory "from"
+// to the filesystem entry "to".
+//
+// The relative path is built by going from the "from" directory up to the
+// pkgsrc root and from there to the "to" filename. This produces the form
+// "../../category/package" that is found in DEPENDS and .include lines.
+//
+// Both from and to are interpreted relative to the current working directory,
+// unless they are absolute paths.
+//
+// This function should only be used if the relative path from one file to
+// another cannot be computed in another way. The preferred way is to take
+// the relative filenames directly from the .include or exists() where they
+// appear.
+//
+// TODO: Invent data types for all kinds of relative paths that occur in pkgsrc
+// and pkglint. Make sure that these paths cannot be accidentally mixed.
+func (src *Pkgsrc) Relpath(from, to Path) (result Path) {
+ cfrom := from.Clean()
+ cto := to.Clean()
+
+ if cfrom == cto {
+ return "."
+ }
+
+ // Take a shortcut for the common case from "dir" to "dir/subdir/...".
+ if cto.HasPrefixPath(cfrom) {
+ rel := cfrom.Rel(cto)
+ if !rel.HasPrefixPath("..") {
+ return rel
+ }
+ }
+
+ // Take a shortcut for the common case from "category/package" to ".".
+ // This is the most common variant in a complete pkgsrc scan.
+ if cto == "." {
+ fromParts := cfrom.Parts()
+ if len(fromParts) == 2 && fromParts[0] != ".." && fromParts[1] != ".." {
+ return "../.."
+ }
+ }
+
+ if cfrom == "." && !cto.IsAbs() {
+ return cto.Clean()
+ }
+
+ absFrom := abspath(cfrom)
+ absTopdir := abspath(src.topdir)
+ absTo := abspath(cto)
+
+ up := absFrom.Rel(absTopdir)
+ down := absTopdir.Rel(absTo)
+
+ if absFrom.HasPrefixPath(absTo) || absTo.HasPrefixPath(absFrom) {
+ return absFrom.Rel(absTo)
+ }
+
+ fromParts := absTopdir.Rel(absFrom).Parts()
+ toParts := down.Parts()
+
+ if len(fromParts) >= 2 && len(toParts) >= 2 {
+ if fromParts[0] == toParts[0] && fromParts[1] == toParts[1] {
+ var relParts []string
+ for _ = range fromParts[2:] {
+ relParts = append(relParts, "..")
+ }
+ relParts = append(relParts, toParts[2:]...)
+ return NewPath(strings.Join(relParts, "/")).CleanDot()
+ }
+ }
+
+ result = up.JoinNoClean(down).CleanDot()
+ return
+}
+
// File resolves a filename relative to the pkgsrc top directory.
//
// Example:
// NewPkgsrc("/usr/pkgsrc").File("distfiles") => "/usr/pkgsrc/distfiles"
func (src *Pkgsrc) File(relativeName Path) Path {
+ cleaned := relativeName.Clean()
+ if cleaned == "." {
+ return src.topdir.CleanDot()
+ }
// TODO: Package.File resolves variables, Pkgsrc.File doesn't. They should behave the same.
- return cleanpath(joinPath(src.topdir, relativeName))
+ return src.topdir.JoinNoClean(cleaned).CleanDot()
}
// ToRel returns the path of `filename`, relative to the pkgsrc top directory.
@@ -1049,16 +1128,26 @@ func (src *Pkgsrc) File(relativeName Path) Path {
// Example:
// NewPkgsrc("/usr/pkgsrc").ToRel("/usr/pkgsrc/distfiles") => "distfiles"
func (src *Pkgsrc) ToRel(filename Path) Path {
- return relpath(src.topdir, filename)
+ return src.Relpath(src.topdir, filename)
}
-// IsInfra returns whether the given filename (relative to the pkglint
+// IsInfra returns whether the given filename (relative to the current
// working directory) is part of the pkgsrc infrastructure.
func (src *Pkgsrc) IsInfra(filename Path) bool {
rel := src.ToRel(filename)
return rel.HasPrefixPath("mk") || rel.HasPrefixPath("wip/mk")
}
+func (src *Pkgsrc) IsInfraMain(filename Path) bool {
+ rel := src.ToRel(filename)
+ return rel.HasPrefixPath("mk")
+}
+
+func (src *Pkgsrc) IsWip(filename Path) bool {
+ rel := src.ToRel(filename)
+ return rel.HasPrefixPath("wip")
+}
+
// Change describes a modification to a single package, from the doc/CHANGES-* files.
type Change struct {
Location Location
diff --git a/pkgtools/pkglint/files/pkgsrc_test.go b/pkgtools/pkglint/files/pkgsrc_test.go
index ed1073ff32c..88fb5f434a7 100644
--- a/pkgtools/pkglint/files/pkgsrc_test.go
+++ b/pkgtools/pkglint/files/pkgsrc_test.go
@@ -1,6 +1,10 @@
package pkglint
-import "gopkg.in/check.v1"
+import (
+ "gopkg.in/check.v1"
+ "os"
+ "path/filepath"
+)
func (s *Suite) Test_Pkgsrc__frozen(c *check.C) {
t := s.Init(c)
@@ -827,7 +831,7 @@ func (s *Suite) Test_Pkgsrc_Latest__not_found(c *check.C) {
t.CheckEquals(latest, "")
t.CheckOutputLines(
- "ERROR: Cannot find package versions of \"^python[0-9]+$\" in \"~/lang\".")
+ "ERROR: ~/lang: Cannot find package versions of \"^python[0-9]+$\".")
}
// In 2017, PostgreSQL changed their versioning scheme to SemVer,
@@ -956,7 +960,7 @@ func (s *Suite) Test_Pkgsrc_ListVersions__no_basedir(c *check.C) {
c.Check(versions, check.HasLen, 0)
t.CheckOutputLines(
- "ERROR: Cannot find package versions of \"^python[0-9]+$\" in \"~/lang\".")
+ "ERROR: ~/lang: Cannot find package versions of \"^python[0-9]+$\".")
}
func (s *Suite) Test_Pkgsrc_ListVersions__no_subdirs(c *check.C) {
@@ -968,7 +972,7 @@ func (s *Suite) Test_Pkgsrc_ListVersions__no_subdirs(c *check.C) {
c.Check(versions, check.HasLen, 0)
t.CheckOutputLines(
- "ERROR: Cannot find package versions of \"^python[0-9]+$\" in \"~/lang\".")
+ "ERROR: ~/lang: Cannot find package versions of \"^python[0-9]+$\".")
}
// Ensures that failed lookups are also cached since they can be assumed
@@ -980,7 +984,7 @@ func (s *Suite) Test_Pkgsrc_ListVersions__error_is_cached(c *check.C) {
c.Check(versions, check.HasLen, 0)
t.CheckOutputLines(
- "ERROR: Cannot find package versions of \"^python[0-9]+$\" in \"~/lang\".")
+ "ERROR: ~/lang: Cannot find package versions of \"^python[0-9]+$\".")
versions2 := G.Pkgsrc.ListVersions("lang", `^python[0-9]+$`, "../../lang/$0", true)
@@ -1143,7 +1147,7 @@ func (s *Suite) Test_Pkgsrc_checkToplevelUnusedLicenses(c *check.C) {
t.Main("-r", "-Cglobal", ".")
t.CheckOutputLines(
- "WARN: ~/category/package2/Makefile:11: License file ~/licenses/missing does not exist.",
+ "WARN: ~/category/package2/Makefile:11: License file ../../licenses/missing does not exist.",
"WARN: ~/licenses/gnu-gpl-v2: This license seems to be unused.", // Added by Tester.SetUpPkgsrc
"WARN: ~/licenses/gnu-gpl-v3: This license seems to be unused.",
"3 warnings found.")
@@ -1169,6 +1173,147 @@ func (s *Suite) Test_Pkgsrc_ReadDir(c *check.C) {
t.CheckDeepEquals(names, []string{"aaa-subdir", "file", "subdir"})
}
+func (s *Suite) Test_Pkgsrc_Relpath(c *check.C) {
+ t := s.Init(c)
+
+ t.Chdir(".")
+ t.CheckEquals(G.Pkgsrc.topdir, t.tmpdir)
+
+ test := func(from, to Path, result Path) {
+ t.CheckEquals(G.Pkgsrc.Relpath(from, to), result)
+ }
+
+ // TODO: add tests going from each of (top, cat, pkg, pkgsub) to the others
+
+ test("some/dir", "some/directory", "../../some/directory")
+ test("some/directory", "some/dir", "../../some/dir")
+
+ test("category/package/.", ".", "../..")
+
+ // This case is handled by one of the shortcuts that avoid file system access.
+ test(
+ "./.",
+ "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
+ "meta-pkgs/kde/kf5.mk")
+
+ test(".hidden/dir", ".", "../..")
+ test("dir/.hidden", ".", "../..")
+
+ // This happens when "pkglint -r x11" is run.
+ G.Pkgsrc.topdir = "x11/.."
+
+ test(
+ "./.",
+ "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
+ "meta-pkgs/kde/kf5.mk")
+ test(
+ "x11/..",
+ "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
+ "meta-pkgs/kde/kf5.mk")
+
+ volume := NewPathSlash(filepath.VolumeName(t.tmpdir.String()))
+ G.Pkgsrc.topdir = volume.JoinNoClean("usr/pkgsrc")
+
+ // Taken from Test_MkLineChecker_CheckRelativePath__wip_mk
+ test(
+ G.Pkgsrc.File("wip/package"),
+ G.Pkgsrc.File("wip/package/../mk/git-package.mk"),
+ "../../wip/mk/git-package.mk")
+
+ // Taken from Test_Package_parse__relative
+ test(
+ G.Pkgsrc.File("category/package"),
+ G.Pkgsrc.File("category/package/../package/extra.mk"),
+ "extra.mk")
+
+ // Make sure that .. in later positions is resolved correctly as well.
+ test(
+ G.Pkgsrc.File("category/package"),
+ G.Pkgsrc.File("category/package/sub/../../package/extra.mk"),
+ "extra.mk")
+
+ G.Pkgsrc.topdir = t.tmpdir
+
+ test("some/dir", "some/dir/../..", "../..")
+ test("some/dir", "some/dir/./././../..", "../..")
+ test("some/dir", "some/dir/", ".")
+
+ test("some/dir", ".", "../..")
+ test("some/dir/.", ".", "../..")
+
+ chdir := func(path Path) {
+ // See Tester.Chdir; a direct Chdir works here since this test
+ // neither loads lines nor processes them.
+ assertNil(os.Chdir(path.String()), "Chdir %s", path)
+ G.cwd = abspath(path)
+ }
+
+ t.CreateFileLines("testdir/subdir/dummy")
+ chdir("testdir/subdir")
+
+ test(".", ".", ".")
+ test("./.", "./dir", "dir")
+
+ test("dir", ".", "..")
+ test("dir", "dir", ".")
+ test("dir", "dir/file", "file")
+ test("dir", "dir/..", "..")
+
+ test(".", "../../other/package", "../../other/package")
+
+ // Even though this path could be shortened to "../package",
+ // in pkgsrc the convention is to always go from a package
+ // directory up to the root and then back to the other package
+ // directory.
+ test(".", "../../testdir/package", "../../testdir/package")
+
+ chdir("..")
+
+ // When pkglint is run from a category directory to test
+ // the complete pkgsrc.
+ test("..", "../other/package", "other/package")
+
+ chdir(t.tmpdir.JoinNoClean(".."))
+
+ test(
+ "pkgsrc/category/package",
+ "pkgsrc/category/package/../../other/package",
+ "../../other/package")
+
+ test(
+ "pkgsrc/category/package",
+ "pkgsrc/category/package/../../category/other",
+ "../../category/other")
+
+ chdir(t.tmpdir.JoinNoClean("testdir").JoinNoClean("subdir"))
+
+ test("..", ".", "subdir")
+ test("../..", ".", "testdir/subdir")
+ test("../../", ".", "testdir/subdir")
+}
+
+func (s *Suite) Test_Pkgsrc_File(c *check.C) {
+ t := s.Init(c)
+
+ G.Pkgsrc.topdir = "$pkgsrcdir"
+
+ test := func(rel, resolved Path) {
+ t.CheckEquals(G.Pkgsrc.File(rel), resolved)
+ }
+
+ test(".", "$pkgsrcdir")
+ test("category", "$pkgsrcdir/category")
+
+ test(
+ "category/package/../../mk/bsd.prefs.mk",
+ "$pkgsrcdir/mk/bsd.prefs.mk")
+
+ G.Pkgsrc.topdir = "."
+
+ test(".", ".")
+ test("filename", "filename")
+}
+
func (s *Suite) Test_Change_Version(c *check.C) {
t := s.Init(c)
diff --git a/pkgtools/pkglint/files/pkgver/vercmp_test.go b/pkgtools/pkglint/files/pkgver/vercmp_test.go
index 2717d69a22b..a1b725a764a 100644
--- a/pkgtools/pkglint/files/pkgver/vercmp_test.go
+++ b/pkgtools/pkglint/files/pkgver/vercmp_test.go
@@ -93,8 +93,8 @@ func (s *Suite) Test_newVersion(c *check.C) {
&version{[]int{3, 0, 5, 0, 4, 5, 22, 1710}, 0})
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
diff --git a/pkgtools/pkglint/files/plist_test.go b/pkgtools/pkglint/files/plist_test.go
index 43cc041c3b9..146e55a9fda 100644
--- a/pkgtools/pkglint/files/plist_test.go
+++ b/pkgtools/pkglint/files/plist_test.go
@@ -707,7 +707,7 @@ func (s *Suite) Test_PlistChecker_checkPathShareIcons__using_gnome_icon_theme(c
// This variant is typical for recursive runs of pkglint.
G.Check("./graphics/gnome-icon-theme-extras")
- // Up to March 2019, a bug in relpath produced different behavior
+ // Up to March 2019, a bug in Relpath produced different behavior
// depending on the leading dot.
t.CheckOutputEmpty()
}
diff --git a/pkgtools/pkglint/files/shell.go b/pkgtools/pkglint/files/shell.go
index fd4f1c39f7a..98d3db152ed 100644
--- a/pkgtools/pkglint/files/shell.go
+++ b/pkgtools/pkglint/files/shell.go
@@ -116,7 +116,7 @@ func (scc *SimpleCommandChecker) handleCommandVariable() bool {
}
shellword := scc.strcmd.Name
- varuse := NewMkParser(nil, shellword).VarUse()
+ varuse := NewMkLexer(shellword, nil).VarUse()
if varuse == nil {
return false
}
diff --git a/pkgtools/pkglint/files/shell_test.go b/pkgtools/pkglint/files/shell_test.go
index 36dc8a06534..956c9f2d909 100644
--- a/pkgtools/pkglint/files/shell_test.go
+++ b/pkgtools/pkglint/files/shell_test.go
@@ -838,7 +838,7 @@ func (s *Suite) Test_ShellLineChecker_CheckShellCommandLine__implementation(c *c
// foobar="`echo \"foo bar\"`"
text := "foobar=\"`echo \\\"foo bar\\\"`\""
- tokens, rest := splitIntoShellTokens(dummyLine, text)
+ tokens, rest := splitIntoShellTokens(ck.mkline.Line, text)
t.CheckDeepEquals(tokens, []string{text})
t.CheckEquals(rest, "")
@@ -907,13 +907,17 @@ func (s *Suite) Test_ShellLineChecker_CheckShellCommandLine__shell_variables(c *
ck.CheckShellCommandLine(text)
t.CheckOutputLines(
+ // TODO: Avoid these duplicate diagnostics.
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"NOTE: Makefile:3: Please use the SUBST framework instead of ${SED} and ${MV}.",
+ "WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
+ "WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
+ "WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
+ "WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"WARN: Makefile:3: f is used but not defined.",
- // TODO: Avoid these duplicate diagnostics.
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.",
"WARN: Makefile:3: $f is ambiguous. Use ${f} if you mean a Make variable or $$f if you mean a shell variable.")
@@ -1557,19 +1561,21 @@ func (s *Suite) Test_ShellLineChecker_checkInstallCommand(c *check.C) {
func (s *Suite) Test_splitIntoShellTokens__line_continuation(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "if true; then \\")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "if true; then \\")
t.CheckDeepEquals(words, []string{"if", "true", ";", "then"})
t.CheckEquals(rest, "\\")
t.CheckOutputLines(
- "WARN: Internal pkglint error in ShTokenizer.ShAtom at \"\\\\\" (quoting=plain).")
+ "WARN: filename.mk:1: Internal pkglint error in ShTokenizer.ShAtom at \"\\\\\" (quoting=plain).")
}
func (s *Suite) Test_splitIntoShellTokens__dollar_slash(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "pax -s /.*~$$//g")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "pax -s /.*~$$//g")
t.CheckDeepEquals(words, []string{"pax", "-s", "/.*~$$//g"})
t.CheckEquals(rest, "")
@@ -1578,7 +1584,8 @@ func (s *Suite) Test_splitIntoShellTokens__dollar_slash(c *check.C) {
func (s *Suite) Test_splitIntoShellTokens__dollar_subshell(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "id=$$(${AWK} '{print}' < ${WRKSRC}/idfile) && echo \"$$id\"")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "id=$$(${AWK} '{print}' < ${WRKSRC}/idfile) && echo \"$$id\"")
t.CheckDeepEquals(words, []string{"id=$$(${AWK} '{print}' < ${WRKSRC}/idfile)", "&&", "echo", "\"$$id\""})
t.CheckEquals(rest, "")
@@ -1587,7 +1594,8 @@ func (s *Suite) Test_splitIntoShellTokens__dollar_subshell(c *check.C) {
func (s *Suite) Test_splitIntoShellTokens__semicolons(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "word1 word2;;;")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "word1 word2;;;")
t.CheckDeepEquals(words, []string{"word1", "word2", ";;", ";"})
t.CheckEquals(rest, "")
@@ -1597,7 +1605,8 @@ func (s *Suite) Test_splitIntoShellTokens__whitespace(c *check.C) {
t := s.Init(c)
text := "\t${RUN} cd ${WRKSRC}&&(${ECHO} ${PERL5:Q};${ECHO})|${BASH} ./install"
- words, rest := splitIntoShellTokens(dummyLine, text)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, text)
t.CheckDeepEquals(words, []string{
"${RUN}",
@@ -1611,7 +1620,8 @@ func (s *Suite) Test_splitIntoShellTokens__finished_dquot(c *check.C) {
t := s.Init(c)
text := "\"\""
- words, rest := splitIntoShellTokens(dummyLine, text)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, text)
t.CheckDeepEquals(words, []string{"\"\""})
t.CheckEquals(rest, "")
@@ -1621,7 +1631,8 @@ func (s *Suite) Test_splitIntoShellTokens__unfinished_dquot(c *check.C) {
t := s.Init(c)
text := "\t\""
- words, rest := splitIntoShellTokens(dummyLine, text)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, text)
c.Check(words, check.IsNil)
t.CheckEquals(rest, "\"")
@@ -1631,7 +1642,8 @@ func (s *Suite) Test_splitIntoShellTokens__unescaped_dollar_in_dquot(c *check.C)
t := s.Init(c)
text := "echo \"$$\""
- words, rest := splitIntoShellTokens(dummyLine, text)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, text)
t.CheckDeepEquals(words, []string{"echo", "\"$$\""})
t.CheckEquals(rest, "")
@@ -1643,7 +1655,8 @@ func (s *Suite) Test_splitIntoShellTokens__varuse_with_embedded_space_and_other_
t := s.Init(c)
varuseWord := "${GCONF_SCHEMAS:@.s.@${INSTALL_DATA} ${WRKSRC}/src/common/dbus/${.s.} ${DESTDIR}${GCONF_SCHEMAS_DIR}/@}"
- words, rest := splitIntoShellTokens(dummyLine, varuseWord)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, varuseWord)
t.CheckDeepEquals(words, []string{varuseWord})
t.CheckEquals(rest, "")
@@ -1655,7 +1668,8 @@ func (s *Suite) Test_splitIntoShellTokens__two_shell_variables(c *check.C) {
t := s.Init(c)
code := "echo $$i$$j"
- words, rest := splitIntoShellTokens(dummyLine, code)
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, code)
t.CheckDeepEquals(words, []string{"echo", "$$i$$j"})
t.CheckEquals(rest, "")
@@ -1664,7 +1678,8 @@ func (s *Suite) Test_splitIntoShellTokens__two_shell_variables(c *check.C) {
func (s *Suite) Test_splitIntoShellTokens__varuse_with_embedded_space(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "${VAR:S/ /_/g}")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "${VAR:S/ /_/g}")
t.CheckDeepEquals(words, []string{"${VAR:S/ /_/g}"})
t.CheckEquals(rest, "")
@@ -1673,7 +1688,8 @@ func (s *Suite) Test_splitIntoShellTokens__varuse_with_embedded_space(c *check.C
func (s *Suite) Test_splitIntoShellTokens__redirect(c *check.C) {
t := s.Init(c)
- words, rest := splitIntoShellTokens(dummyLine, "echo 1>output 2>>append 3>|clobber 4>&5 6<input >>append")
+ line := t.NewLine("filename.mk", 1, "")
+ words, rest := splitIntoShellTokens(line, "echo 1>output 2>>append 3>|clobber 4>&5 6<input >>append")
t.CheckDeepEquals(words, []string{
"echo",
@@ -1685,7 +1701,7 @@ func (s *Suite) Test_splitIntoShellTokens__redirect(c *check.C) {
">>", "append"})
t.CheckEquals(rest, "")
- words, rest = splitIntoShellTokens(dummyLine, "echo 1> output 2>> append 3>| clobber 4>& 5 6< input >> append")
+ words, rest = splitIntoShellTokens(line, "echo 1> output 2>> append 3>| clobber 4>& 5 6< input >> append")
t.CheckDeepEquals(words, []string{
"echo",
diff --git a/pkgtools/pkglint/files/shtokenizer.go b/pkgtools/pkglint/files/shtokenizer.go
index 531cfccb16c..527a38a9a56 100644
--- a/pkgtools/pkglint/files/shtokenizer.go
+++ b/pkgtools/pkglint/files/shtokenizer.go
@@ -3,14 +3,17 @@ package pkglint
import "netbsd.org/pkglint/textproc"
type ShTokenizer struct {
- parser *MkParser
+ parser *MkLexer
}
func NewShTokenizer(line *Line, text string, emitWarnings bool) *ShTokenizer {
// TODO: Switching to NewMkParser is nontrivial since emitWarnings must equal (line != nil).
// assert((line != nil) == emitWarnings)
- p := MkParser{line, textproc.NewLexer(text), emitWarnings}
- return &ShTokenizer{&p}
+ if line != nil {
+ emitWarnings = true
+ }
+ mklex := NewMkLexer(text, line)
+ return &ShTokenizer{mklex}
}
// ShAtom parses a basic building block of a shell program.
@@ -65,9 +68,9 @@ func (p *ShTokenizer) ShAtom(quoting ShQuoting) *ShAtom {
if atom == nil {
lexer.Reset(mark)
if hasPrefix(lexer.Rest(), "$${") {
- p.parser.Line.Warnf("Unclosed shell variable starting at %q.", shorten(lexer.Rest(), 20))
+ p.parser.line.Warnf("Unclosed shell variable starting at %q.", shorten(lexer.Rest(), 20))
} else {
- p.parser.Line.Warnf("Internal pkglint error in ShTokenizer.ShAtom at %q (quoting=%s).", lexer.Rest(), quoting)
+ p.parser.line.Warnf("Internal pkglint error in ShTokenizer.ShAtom at %q (quoting=%s).", lexer.Rest(), quoting)
}
}
return atom
diff --git a/pkgtools/pkglint/files/shtokenizer_test.go b/pkgtools/pkglint/files/shtokenizer_test.go
index 14f08659591..b713facdadc 100644
--- a/pkgtools/pkglint/files/shtokenizer_test.go
+++ b/pkgtools/pkglint/files/shtokenizer_test.go
@@ -90,9 +90,13 @@ func (s *Suite) Test_ShTokenizer__fuzzing(c *check.C) {
fuzzer.Char("\"'`$();|_#", 10)
fuzzer.Range('a', 'z', 5)
+ // This "real" line is necessary because the autofix
+ // in MkParser.varUseBrace checks this.
+ line := t.NewLine("Makefile", 1, "\t:")
+
defer fuzzer.CheckOk()
for i := 0; i < 1000; i++ {
- tokenizer := NewShTokenizer(dummyLine, fuzzer.Generate(50), false)
+ tokenizer := NewShTokenizer(line, fuzzer.Generate(50), false)
tokenizer.ShAtoms()
t.Output() // Discard the output, only react on panics.
}
@@ -105,7 +109,8 @@ func (s *Suite) Test_ShTokenizer_ShAtom(c *check.C) {
// testRest ensures that the given string is parsed to the expected
// atoms, and returns the remaining text.
testRest := func(s string, expectedAtoms []*ShAtom, expectedRest string) {
- p := NewShTokenizer(dummyLine, s, false)
+ line := t.NewLine("filename.mk", 1, "")
+ p := NewShTokenizer(line, s, false)
actualAtoms := p.ShAtoms()
@@ -512,7 +517,8 @@ func (s *Suite) Test_ShTokenizer_ShAtom__quoting(c *check.C) {
t := s.Init(c)
test := func(input, expectedOutput string) {
- p := NewShTokenizer(dummyLine, input, false)
+ line := t.NewLine("filename.mk", 1, "")
+ p := NewShTokenizer(line, input, false)
q := shqPlain
result := ""
for {
@@ -601,7 +607,8 @@ func (s *Suite) Test_ShTokenizer_ShToken(c *check.C) {
// testRest ensures that the given string is parsed to the expected
// tokens, and returns the remaining text.
testRest := func(str string, expected ...string) string {
- p := NewShTokenizer(dummyLine, str, false)
+ line := t.NewLine("testRest.mk", 1, "")
+ p := NewShTokenizer(line, str, false)
for _, exp := range expected {
t.CheckEquals(p.ShToken().MkText, exp)
}
@@ -609,7 +616,8 @@ func (s *Suite) Test_ShTokenizer_ShToken(c *check.C) {
}
test := func(str string, expected ...string) {
- p := NewShTokenizer(dummyLine, str, false)
+ line := t.NewLine("test.mk", 1, "")
+ p := NewShTokenizer(line, str, false)
for _, exp := range expected {
t.CheckEquals(p.ShToken().MkText, exp)
}
@@ -618,7 +626,8 @@ func (s *Suite) Test_ShTokenizer_ShToken(c *check.C) {
}
testNil := func(str string) {
- p := NewShTokenizer(dummyLine, str, false)
+ line := t.NewLine("testNil.mk", 1, "")
+ p := NewShTokenizer(line, str, false)
c.Check(p.ShToken(), check.IsNil)
t.CheckEquals(p.Rest(), "")
t.CheckOutputEmpty()
diff --git a/pkgtools/pkglint/files/shtypes_test.go b/pkgtools/pkglint/files/shtypes_test.go
index 0d6cfbdd953..b1e475d4592 100644
--- a/pkgtools/pkglint/files/shtypes_test.go
+++ b/pkgtools/pkglint/files/shtypes_test.go
@@ -17,7 +17,8 @@ func (s *Suite) Test_ShAtomType_String(c *check.C) {
func (s *Suite) Test_ShAtom_String(c *check.C) {
t := s.Init(c)
- tokenizer := NewShTokenizer(dummyLine, "${ECHO} \"hello, world\"", false)
+ line := t.NewLine("filename.mk", 1, "")
+ tokenizer := NewShTokenizer(line, "${ECHO} \"hello, world\"", false)
atoms := tokenizer.ShAtoms()
@@ -45,7 +46,8 @@ func (s *Suite) Test_NewShToken__no_atoms(c *check.C) {
func (s *Suite) Test_ShToken_String(c *check.C) {
t := s.Init(c)
- tokenizer := NewShTokenizer(dummyLine, "${ECHO} \"hello, world\"", false)
+ line := t.NewLine("filename.mk", 1, "")
+ tokenizer := NewShTokenizer(line, "${ECHO} \"hello, world\"", false)
t.CheckEquals(tokenizer.ShToken().String(), "ShToken([varuse(\"ECHO\")])")
t.CheckEquals(tokenizer.ShToken().String(), "ShToken([ShAtom(text, \"\\\"\", d) ShAtom(text, \"hello, world\", d) \"\\\"\"])")
diff --git a/pkgtools/pkglint/files/substcontext.go b/pkgtools/pkglint/files/substcontext.go
index d20a6a3791b..0e5fefe9b3e 100644
--- a/pkgtools/pkglint/files/substcontext.go
+++ b/pkgtools/pkglint/files/substcontext.go
@@ -142,6 +142,7 @@ func (ctx *SubstContext) Varassign(mkline *MkLine) {
fix.Replace("pre-patch", "post-extract")
fix.Replace("post-patch", "pre-configure")
fix.Apply()
+ // FIXME: Add test that has "SUBST_STAGE.id=pre-patch # or rather post-patch?"
}
if G.Pkg != nil && (value == "pre-configure" || value == "post-configure") {
@@ -270,7 +271,6 @@ func (ctx *SubstContext) suggestSubstVars(mkline *MkLine) {
if !mkline.HasComment() && len(tokens) == 2 && tokens[0] == "-e" {
fix.Replace(mkline.Text, alignWith(varop, mkline.ValueAlign())+varname)
}
- fix.Anyway()
fix.Apply()
ctx.curr.seenVars = true
@@ -280,7 +280,7 @@ func (ctx *SubstContext) suggestSubstVars(mkline *MkLine) {
// extractVarname extracts the variable name from a sed command of the form
// s,@VARNAME@,${VARNAME}, and some related variants thereof.
func (*SubstContext) extractVarname(token string) string {
- parser := NewMkParser(nil, token)
+ parser := NewMkLexer(token, nil)
lexer := parser.lexer
if !lexer.SkipByte('s') {
return ""
diff --git a/pkgtools/pkglint/files/testnames_test.go b/pkgtools/pkglint/files/testnames_test.go
index 0354d53ab0f..b7906eac9ec 100644
--- a/pkgtools/pkglint/files/testnames_test.go
+++ b/pkgtools/pkglint/files/testnames_test.go
@@ -8,10 +8,11 @@ import (
// Ensures that all test names follow a common naming scheme:
//
// Test_${Type}_${Method}__${description_using_underscores}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Configure("path.go", "*", "*", +intqa.EMissingTest)
ck.Configure("*yacc.go", "*", "*", intqa.ENone)
+ ck.Configure("*", "*", "", -intqa.EMissingTest)
ck.Check()
}
diff --git a/pkgtools/pkglint/files/textproc/lexer_test.go b/pkgtools/pkglint/files/textproc/lexer_test.go
index 039225b0da2..220eff39ad8 100644
--- a/pkgtools/pkglint/files/textproc/lexer_test.go
+++ b/pkgtools/pkglint/files/textproc/lexer_test.go
@@ -413,8 +413,8 @@ func (s *Suite) Test__Alpha(c *check.C) {
c.Check(set.Contains('z'), equals, true)
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
diff --git a/pkgtools/pkglint/files/trace/tracing_test.go b/pkgtools/pkglint/files/trace/tracing_test.go
index 2a122f39867..9f778744ad6 100755
--- a/pkgtools/pkglint/files/trace/tracing_test.go
+++ b/pkgtools/pkglint/files/trace/tracing_test.go
@@ -143,8 +143,8 @@ func (str) String() string {
return "It's a string"
}
-func (s *Suite) Test__test_names(c *check.C) {
- ck := intqa.NewTestNameChecker(c.Errorf)
+func (s *Suite) Test__qa(c *check.C) {
+ ck := intqa.NewQAChecker(c.Errorf)
ck.Configure("*", "*", "*", -intqa.EMissingTest)
ck.Check()
}
diff --git a/pkgtools/pkglint/files/util.go b/pkgtools/pkglint/files/util.go
index b9648c6580d..a3448055003 100644
--- a/pkgtools/pkglint/files/util.go
+++ b/pkgtools/pkglint/files/util.go
@@ -239,7 +239,7 @@ func assertf(cond bool, format string, args ...interface{}) {
}
func isEmptyDir(filename Path) bool {
- if filename.HasSuffixText("/CVS") {
+ if filename.HasSuffixPath("CVS") {
return true
}
@@ -457,6 +457,7 @@ func mkopSubst(s string, left bool, from string, right bool, to string, flags st
})
}
+// FIXME: Replace with Path.JoinNoClean
func joinPath(a, b Path, others ...Path) Path {
if len(others) == 0 {
return a + "/" + b
@@ -468,69 +469,6 @@ func joinPath(a, b Path, others ...Path) Path {
return NewPath(strings.Join(parts, "/"))
}
-// relpath returns the relative path from the directory "from"
-// to the filesystem entry "to".
-//
-// The relative path is built by going from the "from" directory via the
-// pkgsrc root to the "to" filename. This produces the form
-// "../../category/package" that is found in DEPENDS and .include lines.
-//
-// Both from and to are interpreted relative to the current working directory,
-// unless they are absolute paths.
-//
-// This function should only be used if the relative path from one file to
-// another cannot be computed in another way. The preferred way is to take
-// the relative filenames directly from the .include or exists() where they
-// appear.
-//
-// TODO: Invent data types for all kinds of relative paths that occur in pkgsrc
-// and pkglint. Make sure that these paths cannot be accidentally mixed.
-func relpath(from, to Path) (result Path) {
-
- if trace.Tracing {
- defer trace.Call(from, to, trace.Result(&result))()
- }
-
- cfrom := cleanpath(from)
- cto := cleanpath(to)
-
- if cfrom == cto {
- return "."
- }
-
- // Take a shortcut for the common case from "dir" to "dir/subdir/...".
- if cto.HasPrefixPath(cfrom) {
- return cleanpath(cto[len(cfrom)+1:])
- }
-
- // Take a shortcut for the common case from "category/package" to ".".
- // This is the most common variant in a complete pkgsrc scan.
- if cto == "." {
- fromParts := cfrom.Parts()
- if len(fromParts) == 2 && !hasPrefix(fromParts[0], ".") && !hasPrefix(fromParts[1], ".") {
- return "../.."
- }
- }
-
- if cfrom == "." && !cto.IsAbs() {
- return cto.Clean()
- }
-
- absFrom := abspath(cfrom)
- absTopdir := abspath(G.Pkgsrc.topdir)
- absTo := abspath(cto)
-
- toTop := absFrom.Rel(absTopdir)
- fromTop := absTopdir.Rel(absTo)
-
- result = cleanpath(toTop.JoinNoClean(fromTop))
-
- if trace.Tracing {
- trace.Stepf("relpath from %q to %q = %q", cfrom, cto, result)
- }
- return
-}
-
func abspath(filename Path) Path {
abs := filename
if !filename.IsAbs() {
diff --git a/pkgtools/pkglint/files/util_test.go b/pkgtools/pkglint/files/util_test.go
index 391583a8e35..8e00d9c50de 100644
--- a/pkgtools/pkglint/files/util_test.go
+++ b/pkgtools/pkglint/files/util_test.go
@@ -89,6 +89,11 @@ func (s *Suite) Test_isEmptyDir(c *check.C) {
t.CheckEquals(isEmptyDir(t.File(".")), true)
t.CheckEquals(isEmptyDir(t.File("CVS")), true)
+
+ t.Chdir(".")
+
+ t.CheckEquals(isEmptyDir("."), true)
+ t.CheckEquals(isEmptyDir("CVS"), true)
}
func (s *Suite) Test_isEmptyDir__and_getSubdirs(c *check.C) {
@@ -332,60 +337,6 @@ func (s *Suite) Test__regex_ReplaceFirst(c *check.C) {
t.CheckEquals(rest, "X+c+d")
}
-func (s *Suite) Test_relpath(c *check.C) {
- t := s.Init(c)
-
- t.Chdir(".")
- t.CheckEquals(G.Pkgsrc.topdir, t.tmpdir)
-
- test := func(from, to Path, result Path) {
- t.CheckEquals(relpath(from, to), result)
- }
-
- test("some/dir", "some/directory", "../../some/directory")
- test("some/directory", "some/dir", "../../some/dir")
-
- test("category/package/.", ".", "../..")
-
- // This case is handled by one of the shortcuts that avoid file system access.
- test(
- "./.",
- "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
- "meta-pkgs/kde/kf5.mk")
-
- test(".hidden/dir", ".", "../..")
- test("dir/.hidden", ".", "../..")
-
- // This happens when "pkglint -r x11" is run.
- G.Pkgsrc.topdir = "x11/.."
-
- test(
- "./.",
- "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
- "meta-pkgs/kde/kf5.mk")
- test(
- "x11/..",
- "x11/frameworkintegration/../../meta-pkgs/kde/kf5.mk",
- "meta-pkgs/kde/kf5.mk")
-}
-
-// Relpath is called so often that handling the most common calls
-// without file system IO makes sense.
-func (s *Suite) Test_relpath__quick(c *check.C) {
- t := s.Init(c)
-
- test := func(from, to Path, result Path) {
- t.CheckEquals(relpath(from, to), result)
- }
-
- test("some/dir", "some/dir/../..", "../..")
- test("some/dir", "some/dir/./././../..", "../..")
- test("some/dir", "some/dir/", ".")
-
- test("some/dir", ".", "../..")
- test("some/dir/.", ".", "../..")
-}
-
func (s *Suite) Test_cleanpath(c *check.C) {
t := s.Init(c)
diff --git a/pkgtools/pkglint/files/varalignblock.go b/pkgtools/pkglint/files/varalignblock.go
index 72ca6fd52bb..affee436726 100644
--- a/pkgtools/pkglint/files/varalignblock.go
+++ b/pkgtools/pkglint/files/varalignblock.go
@@ -676,7 +676,7 @@ func (VaralignSplitter) parseVarnameOp(parser *MkParser, initial bool) (string,
}
mark := lexer.Mark()
- _ = parser.Varname()
+ _ = parser.mklex.Varname()
lexer.SkipHspace()
ok, _ := parser.Op()
assert(ok)
diff --git a/pkgtools/pkglint/files/vardefs.go b/pkgtools/pkglint/files/vardefs.go
index 11344c06a3c..3a41434a7ed 100644
--- a/pkgtools/pkglint/files/vardefs.go
+++ b/pkgtools/pkglint/files/vardefs.go
@@ -406,6 +406,12 @@ func (reg *VarTypeRegistry) enumFrom(pkgsrc *Pkgsrc, filename Path, defval strin
return enum(joined)
}
+ if !G.Testing {
+ mklines.Whole().Fatalf(
+ "Must contain at least 1 variable definition for %s.",
+ joinSkipEmptyCambridge("or", varcanons...))
+ }
+
if trace.Tracing {
trace.Stepf("Enum from default value: %s", defval)
}
@@ -421,8 +427,13 @@ func (reg *VarTypeRegistry) enumFrom(pkgsrc *Pkgsrc, filename Path, defval strin
func (reg *VarTypeRegistry) enumFromDirs(pkgsrc *Pkgsrc, category Path, re regex.Pattern, repl string, defval string) *BasicType {
versions := pkgsrc.ListVersions(category, re, repl, false)
if len(versions) == 0 {
+ if !G.Testing {
+ NewLineWhole(category).Fatalf(
+ "Must contain at least 1 subdirectory matching %q.", re)
+ }
return enum(defval)
}
+
return enum(strings.Join(versions, " "))
}
@@ -440,8 +451,13 @@ func (reg *VarTypeRegistry) enumFromFiles(basedir Path, re regex.Pattern, repl s
}
}
if len(relevant) == 0 {
+ if !G.Testing {
+ NewLineWhole(basedir).Fatalf(
+ "Must contain at least 1 file matching %q.", re)
+ }
return enum(defval)
}
+
return enum(strings.Join(relevant, " "))
}
diff --git a/pkgtools/pkglint/files/vardefs_test.go b/pkgtools/pkglint/files/vardefs_test.go
index 06a342ff46c..6776c7cfde1 100644
--- a/pkgtools/pkglint/files/vardefs_test.go
+++ b/pkgtools/pkglint/files/vardefs_test.go
@@ -94,10 +94,36 @@ func (s *Suite) Test_VarTypeRegistry_enumFrom__no_tracing(c *check.C) {
t.CheckEquals(nonexistentType.AllowedEnums(), "defval")
}
+func (s *Suite) Test_VarTypeRegistry_enumFrom__no_testing(c *check.C) {
+ t := s.Init(c)
+
+ G.Testing = false
+
+ t.ExpectFatal(
+ t.SetUpVartypes,
+ "FATAL: ~/mk/compiler.mk: Cannot be read.")
+
+ t.CreateFileLines("mk/compiler.mk",
+ MkCvsID)
+
+ t.ExpectFatal(
+ t.SetUpVartypes,
+ "FATAL: ~/mk/compiler.mk: Must contain at least 1 variable "+
+ "definition for _COMPILERS or _PSEUDO_COMPILERS.")
+
+ t.CreateFileLines("mk/compiler.mk",
+ MkCvsID,
+ "_COMPILERS=\tgcc")
+
+ t.ExpectFatal(
+ t.SetUpVartypes,
+ "FATAL: ~/editors/emacs/modules.mk: Cannot be read.")
+}
+
func (s *Suite) Test_VarTypeRegistry_enumFromDirs(c *check.C) {
t := s.Init(c)
- // To make the test useful, these directories must differ from the
+ // To make the test observable, these directories must differ from the
// PYPKGPREFIX default value in vardefs.go.
t.CreateFileLines("lang/python28/Makefile", MkCvsID)
t.CreateFileLines("lang/python33/Makefile", MkCvsID)
@@ -112,6 +138,20 @@ func (s *Suite) Test_VarTypeRegistry_enumFromDirs(c *check.C) {
test("PYPKGPREFIX", "enum: py28 py33 (system-provided)")
}
+func (s *Suite) Test_VarTypeRegistry_enumFromDirs__no_testing(c *check.C) {
+ t := s.Init(c)
+
+ G.Testing = false
+
+ t.ExpectFatal(
+ func() {
+ G.Pkgsrc.vartypes.enumFromDirs(
+ &G.Pkgsrc, "category", `^pack.*`, "$0", "default")
+ },
+ "FATAL: category: Must contain at least 1 "+
+ "subdirectory matching \"^pack.*\".")
+}
+
func (s *Suite) Test_VarTypeRegistry_enumFromFiles(c *check.C) {
t := s.Init(c)
@@ -130,6 +170,20 @@ func (s *Suite) Test_VarTypeRegistry_enumFromFiles(c *check.C) {
test("OPSYS", "enum: NetBSD SunOS (system-provided)")
}
+func (s *Suite) Test_VarTypeRegistry_enumFromFiles__no_testing(c *check.C) {
+ t := s.Init(c)
+
+ G.Testing = false
+
+ t.ExpectFatal(
+ func() {
+ G.Pkgsrc.vartypes.enumFromFiles(
+ "mk/platform", `^(\w+)\.mk$`, "$1", "default")
+ },
+ "FATAL: mk/platform: Must contain at least 1 "+
+ "file matching \"^(\\\\w+)\\\\.mk$\".")
+}
+
func (s *Suite) Test_VarTypeRegistry_Init(c *check.C) {
t := s.Init(c)
@@ -178,7 +232,8 @@ func (s *Suite) Test_VarTypeRegistry_Init__no_testing(c *check.C) {
G.Testing = false
t.ExpectFatal(
t.FinishSetUp,
- "FATAL: ~/editors/emacs/modules.mk: Cannot be read.")
+ "FATAL: ~/mk/compiler.mk: Must contain at least 1 "+
+ "variable definition for _COMPILERS or _PSEUDO_COMPILERS.")
}
func (s *Suite) Test_VarTypeRegistry_Init__MASTER_SITES(c *check.C) {
diff --git a/pkgtools/pkglint/files/vartypecheck.go b/pkgtools/pkglint/files/vartypecheck.go
index 0233555cae7..4619f935c6f 100644
--- a/pkgtools/pkglint/files/vartypecheck.go
+++ b/pkgtools/pkglint/files/vartypecheck.go
@@ -726,8 +726,6 @@ func (cv *VartypeCheck) Homepage() {
"Defining MASTER_SITES=${HOMEPAGE} is ok, though.")
if baseURL != "" {
fix.Replace(wrong, fixedURL)
- } else {
- fix.Anyway()
}
fix.Apply()
}
@@ -1083,9 +1081,10 @@ func (cv *VartypeCheck) Pkgpath() {
cv.MkLine.Errorf("A main pkgsrc package must not depend on a pkgsrc-wip package.")
}
- if !G.Pkgsrc.File(pkgpath.JoinNoClean("Makefile")).IsFile() {
+ pkgdir := G.Pkgsrc.File(pkgpath)
+ if !pkgdir.JoinNoClean("Makefile").IsFile() {
cv.MkLine.Errorf("There is no package in %q.",
- cv.MkLine.PathToFile(G.Pkgsrc.File(pkgpath)))
+ cv.MkLine.PathToFile(pkgdir))
return
}
diff --git a/pkgtools/pkglint/files/vartypecheck_test.go b/pkgtools/pkglint/files/vartypecheck_test.go
index 64b1da229e8..4b8b54b66ec 100644
--- a/pkgtools/pkglint/files/vartypecheck_test.go
+++ b/pkgtools/pkglint/files/vartypecheck_test.go
@@ -897,13 +897,14 @@ func (s *Suite) Test_VartypeCheck_LdFlag(c *check.C) {
func (s *Suite) Test_VartypeCheck_License(c *check.C) {
t := s.Init(c)
+ t.Chdir(".")
t.SetUpPkgsrc() // Adds the gnu-gpl-v2 and 2-clause-bsd licenses
t.SetUpPackage("category/package")
t.FinishSetUp()
G.Pkg = NewPackage(t.File("category/package"))
- mklines := t.NewMkLines("perl5.mk",
+ mklines := t.SetUpFileMkLines("perl5.mk",
MkCvsID,
"PERL5_LICENSE= gnu-gpl-v2 OR artistic")
// Also registers the PERL5_LICENSE variable in the package.
@@ -920,7 +921,7 @@ func (s *Suite) Test_VartypeCheck_License(c *check.C) {
vt.Output(
"ERROR: filename.mk:2: Parse error for license condition \"AND mit\".",
- "WARN: filename.mk:3: License file ~/licenses/artistic does not exist.",
+ "WARN: filename.mk:3: License file licenses/artistic does not exist.",
"ERROR: filename.mk:4: Parse error for license condition \"${UNKNOWN_LICENSE}\".")
vt.Op(opAssignAppend)
@@ -930,7 +931,7 @@ func (s *Suite) Test_VartypeCheck_License(c *check.C) {
vt.Output(
"ERROR: filename.mk:11: Parse error for appended license condition \"gnu-gpl-v2\".",
- "WARN: filename.mk:12: License file ~/licenses/mit does not exist.")
+ "WARN: filename.mk:12: License file licenses/mit does not exist.")
}
func (s *Suite) Test_VartypeCheck_MachineGnuPlatform(c *check.C) {