diff options
| author | rillig <rillig@pkgsrc.org> | 2020-07-01 13:17:41 +0000 |
|---|---|---|
| committer | rillig <rillig@pkgsrc.org> | 2020-07-01 13:17:41 +0000 |
| commit | 9c3c8fcfeb2f84f2d35c43eaf5545ac5ff8ffc79 (patch) | |
| tree | c73af7a27698d509916508ca9de679ad2f7e3292 /pkgtools/pkglint/files | |
| parent | 8c649a27dc957d78eba16c05ce81d09ecbd655c4 (diff) | |
| download | pkgsrc-9c3c8fcfeb2f84f2d35c43eaf5545ac5ff8ffc79.tar.gz | |
pkgtools/pkglint: update to 20.2.1
Changes since 20.2.0:
Don't warn about a possibly redundant PKGNAME=${DISTNAME} assignment if
PKGNAME is defined somewhere else in the package Makefile.
Warn if NO_CONFIGURE=yes and REPLACE_* are combined.
Suggest to replace ${VAR:@l@-l${l}@} with the simpler ${VAR:S,^,-l,},
as well as ${VAR:@l@${l}suffix@} with the simpler ${VAR:=suffix}.
Allow lua in CATEGORIES.
Diffstat (limited to 'pkgtools/pkglint/files')
26 files changed, 794 insertions, 276 deletions
diff --git a/pkgtools/pkglint/files/autofix.go b/pkgtools/pkglint/files/autofix.go index a43d5dc1353..c1d177876d9 100644 --- a/pkgtools/pkglint/files/autofix.go +++ b/pkgtools/pkglint/files/autofix.go @@ -96,6 +96,7 @@ func (fix *Autofix) Explain(explanation ...string) { // Replace replaces "from" with "to", a single time. // If the text is not found exactly once, nothing is replaced at all. +// The diagnostic is given nevertheless, to allow humans to fix it. func (fix *Autofix) Replace(from string, to string) { fix.ReplaceAfter("", from, to) } diff --git a/pkgtools/pkglint/files/check_test.go b/pkgtools/pkglint/files/check_test.go index d07d772db14..96428730629 100644 --- a/pkgtools/pkglint/files/check_test.go +++ b/pkgtools/pkglint/files/check_test.go @@ -119,8 +119,6 @@ func Test__qa(t *testing.T) { ck.Configure("mkshparser.go", "*", "*", -intqa.EMissingTest) // TODO ck.Configure("mkshtypes.go", "*", "*", -intqa.EMissingTest) // TODO ck.Configure("mkshwalker.go", "*", "*", -intqa.EMissingTest) // TODO - ck.Configure("mktokenslexer.go", "*", "*", -intqa.EMissingTest) // TODO - ck.Configure("mktypes.go", "*", "*", -intqa.EMissingTest) // TODO ck.Configure("options.go", "*", "*", -intqa.EMissingTest) // TODO ck.Configure("package.go", "*", "*", -intqa.EMissingTest) // TODO ck.Configure("paragraph.go", "*", "*", -intqa.EMissingTest) // TODO diff --git a/pkgtools/pkglint/files/line.go b/pkgtools/pkglint/files/line.go index 5bf145b9c96..6313ce374e3 100644 --- a/pkgtools/pkglint/files/line.go +++ b/pkgtools/pkglint/files/line.go @@ -53,8 +53,6 @@ func (loc *Location) File(rel RelPath) CurrPath { // Line represents a line of text from a file. type Line struct { - // TODO: Consider storing pointers to the Filename and Basename instead of strings to save memory. - // But first find out where and why pkglint needs so much memory (200 MB for a full recursive run over pkgsrc + wip). Location Location Basename RelPath // the last component of the Filename @@ -66,8 +64,6 @@ type Line struct { raw []*RawLine // contains the original text including trailing newline fix *Autofix // any changes that pkglint would like to apply to the line once Once - - // XXX: Filename and Basename could be replaced with a pointer to a Lines object. } func NewLine(filename CurrPath, lineno int, text string, rawLine *RawLine) *Line { diff --git a/pkgtools/pkglint/files/mkcondchecker.go b/pkgtools/pkglint/files/mkcondchecker.go index ebcd1948320..42c39f01153 100644 --- a/pkgtools/pkglint/files/mkcondchecker.go +++ b/pkgtools/pkglint/files/mkcondchecker.go @@ -141,7 +141,7 @@ func (ck *MkCondChecker) checkEmptyType(varuse *MkVarUse) { continue } - switch modifier.Text { + switch modifier.String() { default: return case "O", "u": @@ -256,7 +256,7 @@ func (ck *MkCondChecker) simplify(varuse *MkVarUse, fromEmpty bool, neg bool) { fix := ck.MkLine.Autofix() fix.Notef("%s can be compared using the simpler \"%s\" "+ "instead of matching against %q.", - varname, to, ":"+modifier.Text) + varname, to, ":"+modifier.String()) // TODO: Quoted fix.Explain( "This variable has a single value, not a list of values.", "Therefore it feels strange to apply list operators like :M and :N onto it.", diff --git a/pkgtools/pkglint/files/mklexer.go b/pkgtools/pkglint/files/mklexer.go index 20b73a6fbb9..a3f0ca70a90 100644 --- a/pkgtools/pkglint/files/mklexer.go +++ b/pkgtools/pkglint/files/mklexer.go @@ -226,7 +226,7 @@ func (p *MkLexer) VarUseModifiers(varname string, closing byte) []MkVarUseModifi for lexer.SkipByte(':') || mayOmitColon { modifier := p.varUseModifier(varname, closing) if modifier != "" { - modifiers = append(modifiers, MkVarUseModifier{modifier}) + modifiers = append(modifiers, modifier) } mayOmitColon = modifier != "" && (modifier[0] == 'S' || modifier[0] == 'C') } @@ -235,7 +235,7 @@ func (p *MkLexer) VarUseModifiers(varname string, closing byte) []MkVarUseModifi // varUseModifier parses a single variable modifier such as :Q or :S,from,to,. // The actual parsing starts after the leading colon. -func (p *MkLexer) varUseModifier(varname string, closing byte) string { +func (p *MkLexer) varUseModifier(varname string, closing byte) MkVarUseModifier { lexer := p.lexer mark := lexer.Mark() @@ -260,32 +260,32 @@ func (p *MkLexer) varUseModifier(varname string, closing byte) string { "tu", // To uppercase "tw", // Causes the value to be treated as list of words "u": // Remove adjacent duplicate words (like uniq(1)) - return mod + return MkVarUseModifier(mod) } - if hasPrefix(mod, "ts") { + if MkVarUseModifier(mod).HasPrefix("ts") { return p.varUseModifierTs(mod, closing, lexer, varname, mark) } case 'D', 'U': - return p.varUseText(closing) + return MkVarUseModifier(p.varUseText(closing)) case 'M', 'N': return p.varUseModifierMatch(closing) case 'C', 'S': if ok, _, _, _, _ := p.varUseModifierSubst(closing); ok { - return lexer.Since(mark) + return MkVarUseModifier(lexer.Since(mark)) } case '@': if p.varUseModifierAt(lexer, varname) { - return lexer.Since(mark) + return MkVarUseModifier(lexer.Since(mark)) } case '[': if lexer.SkipRegexp(regcomp(`^\[(?:[-.\d]+|#)\]`)) { - return lexer.Since(mark) + return MkVarUseModifier(lexer.Since(mark)) } case '?': @@ -293,7 +293,7 @@ func (p *MkLexer) varUseModifier(varname string, closing byte) string { p.varUseText(closing) if lexer.SkipByte(':') { p.varUseText(closing) - return lexer.Since(mark) + return MkVarUseModifier(lexer.Since(mark)) } case ':': @@ -328,7 +328,7 @@ func (p *MkLexer) varUseModifier(varname string, closing byte) string { "but even these have only local consequences.") p.varUseText(closing) - return lexer.Since(mark) + return MkVarUseModifier(lexer.Since(mark)) } // ${SOURCES:%.c=%.o} @@ -342,14 +342,14 @@ func (p *MkLexer) varUseModifier(varname string, closing byte) string { "The :from=to modifier consumes all the text until the end of the variable.", "There cannot be any further modifiers after it.") } - return modifier + return MkVarUseModifier(modifier) } // ${:!uname -a!:[2]} lexer.Reset(mark) modifier = p.varUseText(closing) if hasPrefix(modifier, "!") && hasSuffix(modifier, "!") { - return modifier + return MkVarUseModifier(modifier) } if modifier != "" { @@ -365,7 +365,7 @@ func (p *MkLexer) varUseModifier(varname string, closing byte) string { // It is only extracted from varUseModifier to make the latter smaller. func (p *MkLexer) varUseModifierTs( mod string, closing byte, lexer *textproc.Lexer, varname string, - mark textproc.LexerMark) string { + mark textproc.LexerMark) MkVarUseModifier { // See devel/bmake/files/var.c:/case 't' sep := mod[2:] + p.varUseText(closing) @@ -383,13 +383,13 @@ func (p *MkLexer) varUseModifierTs( "or an escape sequence like \\t or \\n or an octal or decimal escape", "sequence; see the bmake man page for further details.") } - return lexer.Since(mark) + return MkVarUseModifier(lexer.Since(mark)) } // varUseModifierMatch parses an :M or :N pattern. // // See devel/bmake/files/var.c:/^ApplyModifiers/, case 'M'. -func (p *MkLexer) varUseModifierMatch(closing byte) string { +func (p *MkLexer) varUseModifierMatch(closing byte) MkVarUseModifier { lexer := p.lexer mark := lexer.Mark() lexer.Skip(1) @@ -427,7 +427,7 @@ func (p *MkLexer) varUseModifierMatch(closing byte) string { re := regex.Pattern(condStr(closing == '}', `\\([:}])`, `\\([:)])`)) arg = replaceAll(arg, re, "$1") } - return arg + return MkVarUseModifier(arg) } // varUseModifierSubst parses a :S,from,to, or a :C,from,to, modifier. diff --git a/pkgtools/pkglint/files/mklexer_test.go b/pkgtools/pkglint/files/mklexer_test.go index b8e7e5739f5..5532c7be525 100644 --- a/pkgtools/pkglint/files/mklexer_test.go +++ b/pkgtools/pkglint/files/mklexer_test.go @@ -605,8 +605,9 @@ func (s *Suite) Test_MkLexer_varUseModifier(c *check.C) { varUse := p.VarUse() - t.CheckDeepEquals(varUse.modifiers, []MkVarUseModifier{ - {"R"}, {"E"}, {"Ox"}, {"tA"}, {"tW"}, {"tw"}}) + t.CheckDeepEquals( + varUse.modifiers, + []MkVarUseModifier{"R", "E", "Ox", "tA", "tW", "tw"}) } func (s *Suite) Test_MkLexer_varUseModifier__S_parse_error(c *check.C) { @@ -617,7 +618,7 @@ func (s *Suite) Test_MkLexer_varUseModifier__S_parse_error(c *check.C) { mod := p.varUseModifier("VAR", '}') - t.CheckEquals(mod, "") + t.CheckEquals(mod, MkVarUseModifier("")) // XXX: The "S," has just disappeared. t.CheckEquals(p.Rest(), "}") @@ -634,7 +635,7 @@ func (s *Suite) Test_MkLexer_varUseModifier__invalid_ts_modifier_with_warning(c modifier := p.varUseModifier("VAR", '}') - t.CheckEquals(modifier, "tsabc") + t.CheckEquals(modifier, MkVarUseModifier("tsabc")) t.CheckEquals(p.Rest(), "}") t.CheckOutputLines( "WARN: filename.mk:123: Invalid separator \"abc\" for :ts modifier of \"VAR\".", @@ -652,7 +653,7 @@ func (s *Suite) Test_MkLexer_varUseModifier__invalid_ts_modifier_without_warning modifier := p.varUseModifier("VAR", '}') - t.CheckEquals(modifier, "tsabc") + t.CheckEquals(modifier, MkVarUseModifier("tsabc")) t.CheckEquals(p.Rest(), "}") } @@ -664,7 +665,7 @@ func (s *Suite) Test_MkLexer_varUseModifier__square_bracket(c *check.C) { modifier := p.varUseModifier("VAR", '}') - t.CheckEquals(modifier, "") + t.CheckEquals(modifier, MkVarUseModifier("")) t.CheckEquals(p.Rest(), "") t.CheckOutputLines( @@ -725,7 +726,7 @@ func (s *Suite) Test_MkLexer_varUseModifier__varuse_in_malformed_modifier(c *che func (s *Suite) Test_MkLexer_varUseModifier__eq_suffix_replacement(c *check.C) { t := s.Init(c) - test := func(input, modifier, rest string, diagnostics ...string) { + test := func(input string, modifier MkVarUseModifier, rest string, diagnostics ...string) { line := t.NewLine("filename.mk", 123, "") p := NewMkLexer(input, line) @@ -762,7 +763,7 @@ func (s *Suite) Test_MkLexer_varUseModifier__eq_suffix_replacement(c *check.C) { func (s *Suite) Test_MkLexer_varUseModifier__assigment(c *check.C) { t := s.Init(c) - test := func(varname, input, modifier, rest string, diagnostics ...string) { + test := func(varname, input string, modifier MkVarUseModifier, rest string, diagnostics ...string) { line := t.NewLine("filename.mk", 123, "") p := NewMkLexer(input, line) @@ -807,7 +808,7 @@ func (s *Suite) Test_MkLexer_varUseModifier__assigment(c *check.C) { func (s *Suite) Test_MkLexer_varUseModifierTs(c *check.C) { t := s.Init(c) - test := func(input string, closing byte, mod string, rest string, diagnostics ...string) { + test := func(input string, closing byte, mod MkVarUseModifier, rest string, diagnostics ...string) { diag := t.NewLine("filename.mk", 123, "") lex := NewMkLexer(input, diag) mark := lex.lexer.Mark() @@ -851,7 +852,7 @@ func (s *Suite) Test_MkLexer_varUseModifierTs(c *check.C) { func (s *Suite) Test_MkLexer_varUseModifierMatch(c *check.C) { t := s.Init(c) - testClosing := func(input string, closing byte, modifier, rest string, diagnostics ...string) { + testClosing := func(input string, closing byte, modifier MkVarUseModifier, rest string, diagnostics ...string) { line := t.NewLine("filename.mk", 123, "") p := NewMkLexer(input, line) @@ -862,10 +863,10 @@ func (s *Suite) Test_MkLexer_varUseModifierMatch(c *check.C) { t.CheckOutput(diagnostics) } - test := func(input, modifier, rest string, diagnostics ...string) { + test := func(input string, modifier MkVarUseModifier, rest string, diagnostics ...string) { testClosing(input, '}', modifier, rest, diagnostics...) } - testParen := func(input, modifier, rest string, diagnostics ...string) { + testParen := func(input string, modifier MkVarUseModifier, rest string, diagnostics ...string) { testClosing(input, ')', modifier, rest, diagnostics...) } @@ -905,7 +906,7 @@ func (s *Suite) Test_MkLexer_varUseModifierMatch(c *check.C) { func (s *Suite) Test_MkLexer_varUseModifierMatch__varmod_edge(c *check.C) { t := s.Init(c) - test := func(input, modifier, rest string, diagnostics ...string) { + test := func(input string, modifier MkVarUseModifier, rest string, diagnostics ...string) { line := t.NewLine("filename.mk", 123, "") p := NewMkLexer(input, line) diff --git a/pkgtools/pkglint/files/mkline.go b/pkgtools/pkglint/files/mkline.go index b7b4a3d47e2..b1e159b1bbf 100644 --- a/pkgtools/pkglint/files/mkline.go +++ b/pkgtools/pkglint/files/mkline.go @@ -872,7 +872,7 @@ func (mkline *MkLine) ForEachUsedVarUse(varuse *MkVarUse, time VucTime, action f } mkline.ForEachUsedText(varname, time, action) for _, mod := range varuse.modifiers { - mkline.ForEachUsedText(mod.Text, time, action) + mkline.ForEachUsedText(mod.String(), time, action) } } diff --git a/pkgtools/pkglint/files/mkparser_test.go b/pkgtools/pkglint/files/mkparser_test.go index 05a45a16fd6..e5faaa67fa6 100644 --- a/pkgtools/pkglint/files/mkparser_test.go +++ b/pkgtools/pkglint/files/mkparser_test.go @@ -12,13 +12,13 @@ func (s *Suite) Test_MkParser_MkCond(c *check.C) { cmp := func(left MkCondTerm, op string, right MkCondTerm) *MkCond { return &MkCond{Compare: &MkCondCompare{left, op, right}} } - cvar := func(name string, modifiers ...string) MkCondTerm { + cvar := func(name string, modifiers ...MkVarUseModifier) MkCondTerm { return MkCondTerm{Var: b.VarUse(name, modifiers...)} } cstr := func(s string) MkCondTerm { return MkCondTerm{Str: s} } cnum := func(s string) MkCondTerm { return MkCondTerm{Num: s} } - termVar := func(varname string, mods ...string) *MkCond { + termVar := func(varname string, mods ...MkVarUseModifier) *MkCond { return &MkCond{Term: &MkCondTerm{Var: b.VarUse(varname, mods...)}} } termNum := func(num string) *MkCond { @@ -34,7 +34,7 @@ func (s *Suite) Test_MkParser_MkCond(c *check.C) { call := func(name string, arg string) *MkCond { return &MkCond{Call: &MkCondCall{name, arg}} } - empty := func(varname string, mods ...string) *MkCond { + empty := func(varname string, mods ...MkVarUseModifier) *MkCond { return &MkCond{Empty: b.VarUse(varname, mods...)} } defined := func(varname string) *MkCond { return &MkCond{Defined: varname} } @@ -495,7 +495,7 @@ func (s *Suite) Test_MkCondWalker_Walk(c *check.C) { strs := make([]string, 1+len(varuse.modifiers)) strs[0] = varuse.varname for i, mod := range varuse.modifiers { - strs[1+i] = mod.Text + strs[1+i] = mod.String() } return strings.Join(strs, ":") } diff --git a/pkgtools/pkglint/files/mkshparser.go b/pkgtools/pkglint/files/mkshparser.go index 6586ed3f9e4..145473d1b47 100644 --- a/pkgtools/pkglint/files/mkshparser.go +++ b/pkgtools/pkglint/files/mkshparser.go @@ -250,15 +250,15 @@ func (lex *ShellLexer) Lex(lval *shyySymType) (ttype int) { lval.Word = p.ShToken() lex.atCommandStart = false - // Inside of a case statement, ${PATTERNS:@p@ (${p}) action ;; @} expands to + // Inside of a case statement, ${PATTERNS:@p@ (${p}) continue ;; @} expands to // a list of case-items, and after this list a new command starts. // This is necessary to return a following "esac" as tkESAC instead of a // simple word. if lex.sinceCase >= 0 && len(lval.Word.Atoms) == 1 { if varUse := lval.Word.Atoms[0].VarUse(); varUse != nil { if len(varUse.modifiers) > 0 { - lastModifier := varUse.modifiers[len(varUse.modifiers)-1].Text - if hasPrefix(lastModifier, "@") { + lastModifier := varUse.modifiers[len(varUse.modifiers)-1] + if lastModifier.HasPrefix("@") || lastModifier.HasPrefix("=") { lex.atCommandStart = true } } diff --git a/pkgtools/pkglint/files/mktokenslexer.go b/pkgtools/pkglint/files/mktokenslexer.go index 7924ed9c84c..396576744e3 100644 --- a/pkgtools/pkglint/files/mktokenslexer.go +++ b/pkgtools/pkglint/files/mktokenslexer.go @@ -60,6 +60,7 @@ func (m *MkTokensLexer) SkipMixed(n int) bool { use := m.NextVarUse() if use != nil { n -= len(use.Text) + assert(n >= 0) } else { skip := imin(len(m.Lexer.Rest()), n) assert(m.Lexer.Skip(skip)) diff --git a/pkgtools/pkglint/files/mktokenslexer_test.go b/pkgtools/pkglint/files/mktokenslexer_test.go index c74a4ee8674..ed20bd1479f 100644 --- a/pkgtools/pkglint/files/mktokenslexer_test.go +++ b/pkgtools/pkglint/files/mktokenslexer_test.go @@ -5,14 +5,6 @@ import ( "netbsd.org/pkglint/textproc" ) -func (s *Suite) Test_MkTokensLexer__empty_slice_returns_EOF(c *check.C) { - t := s.Init(c) - - lexer := NewMkTokensLexer(nil) - - t.CheckEquals(lexer.EOF(), true) -} - // A slice of a single token behaves like textproc.Lexer. func (s *Suite) Test_MkTokensLexer__single_plain_text_token(c *check.C) { t := s.Init(c) @@ -29,13 +21,133 @@ func (s *Suite) Test_MkTokensLexer__single_plain_text_token(c *check.C) { t.CheckEquals(lexer.EOF(), true) } +// When the MkTokensLexer is constructed, it gets a copy of the tokens array. +// In theory it would be possible to change the tokens after starting lexing, +// but there is no practical case where that would be useful. +// +// Since each slice is a separate view on the underlying array, modifying the +// size of the outside slice does not affect parsing. This is also only a +// theoretical case. +// +// Because all these cases are only theoretical, the MkTokensLexer doesn't +// bother to make this unnecessary copy and works on the shared slice. +func (s *Suite) Test_NewMkTokensLexer__shared_array(c *check.C) { + t := s.Init(c) + b := NewMkTokenBuilder() + + tokens := b.Tokens(b.VaruseToken("VAR")) + lexer := NewMkTokensLexer(tokens) + + t.CheckEquals(lexer.Rest(), "${VAR}") + + tokens[0].Text = "modified text" + tokens[0].Varuse = b.VarUse("MODIFIED", "Mpattern") + + t.CheckEquals(lexer.Rest(), "modified text") +} + +func (s *Suite) Test_MkTokensLexer_next(c *check.C) { + t := s.Init(c) + b := NewMkTokenBuilder() + + tokens := b.Tokens(b.VaruseToken("VAR")) + lexer := NewMkTokensLexer(tokens) + + t.CheckEquals(lexer.Lexer.Rest(), "") +} + +func (s *Suite) Test_MkTokensLexer_EOF__plain_text(c *check.C) { + t := s.Init(c) + b := NewMkTokenBuilder() + + lexer := NewMkTokensLexer(b.Tokens(b.TextToken("rest"))) + + t.CheckEquals(lexer.EOF(), false) + + lexer.SkipString("rest") + + t.CheckEquals(lexer.EOF(), true) +} + +func (s *Suite) Test_MkTokensLexer_EOF__varuse(c *check.C) { + t := s.Init(c) + b := NewMkTokenBuilder() + + lexer := NewMkTokensLexer(b.Tokens(b.VaruseToken("VAR"))) + + t.CheckEquals(lexer.EOF(), false) + + lexer.NextVarUse() + + t.CheckEquals(lexer.EOF(), true) +} + +func (s *Suite) Test_MkTokensLexer_Rest(c *check.C) { + t := s.Init(c) + b := NewMkTokenBuilder() + + lexer := NewMkTokensLexer(b.Tokens(b.VaruseToken("VAR"), b.TextToken("text"), b.VaruseToken("VAR2"))) + + t.CheckEquals(lexer.Rest(), "${VAR}text${VAR2}") + t.CheckEquals(lexer.NextVarUse().Text, "${VAR}") + t.CheckEquals(lexer.Rest(), "text${VAR2}") + t.CheckEquals(lexer.SkipString("text"), true) + t.CheckEquals(lexer.Rest(), "${VAR2}") + t.CheckEquals(lexer.NextVarUse().Text, "${VAR2}") + t.CheckEquals(lexer.Rest(), "") +} + +func (s *Suite) Test_MkTokensLexer_Skip(c *check.C) { + t := s.Init(c) + b := NewMkTokenBuilder() + + lexer := NewMkTokensLexer(b.Tokens(b.VaruseToken("VAR"), b.TextToken("text"), b.VaruseToken("VAR2"))) + + t.CheckEquals(lexer.Rest(), "${VAR}text${VAR2}") + t.CheckEquals(lexer.NextVarUse().Text, "${VAR}") + t.CheckEquals(lexer.Rest(), "text${VAR2}") + t.CheckEquals(lexer.Skip(4), true) + t.CheckEquals(lexer.Rest(), "${VAR2}") + t.CheckEquals(lexer.NextVarUse().Text, "${VAR2}") + t.CheckEquals(lexer.Rest(), "") +} + +func (s *Suite) Test_MkTokensLexer_SkipMixed__exact(c *check.C) { + t := s.Init(c) + b := NewMkTokenBuilder() + + lexer := NewMkTokensLexer(b.Tokens(b.VaruseToken("VAR"), b.TextToken("text"), b.VaruseToken("VAR2"))) + + t.CheckEquals(lexer.SkipMixed(17), true) + t.CheckEquals(lexer.EOF(), true) +} + +func (s *Suite) Test_MkTokensLexer_SkipMixed__short(c *check.C) { + t := s.Init(c) + b := NewMkTokenBuilder() + + lexer := NewMkTokensLexer(b.Tokens(b.VaruseToken("VAR"), b.TextToken("text"), b.VaruseToken("VAR2"))) + + // After 15 characters, the lexer would be in the middle of a MkVarUse. + t.ExpectAssert(func() { lexer.SkipMixed(15) }) +} + +func (s *Suite) Test_MkTokensLexer_SkipMixed__long(c *check.C) { + t := s.Init(c) + b := NewMkTokenBuilder() + + lexer := NewMkTokensLexer(b.Tokens(b.VaruseToken("VAR"), b.TextToken("text"), b.VaruseToken("VAR2"))) + + t.ExpectAssert(func() { lexer.SkipMixed(20) }) +} + // If the first element of the slice is a variable use, none of the plain // text patterns matches. // // The code that uses the MkTokensLexer needs to distinguish these cases // anyway, therefore it doesn't make sense to treat variable uses as plain // text. -func (s *Suite) Test_MkTokensLexer__single_varuse_token(c *check.C) { +func (s *Suite) Test_MkTokensLexer_NextVarUse__single_varuse_token(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() @@ -47,7 +159,7 @@ func (s *Suite) Test_MkTokensLexer__single_varuse_token(c *check.C) { t.CheckDeepEquals(lexer.NextVarUse(), tokens[0]) } -func (s *Suite) Test_MkTokensLexer__plain_then_varuse(c *check.C) { +func (s *Suite) Test_MkTokensLexer_NextVarUse__plain_then_varuse(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() @@ -61,7 +173,7 @@ func (s *Suite) Test_MkTokensLexer__plain_then_varuse(c *check.C) { t.CheckEquals(lexer.EOF(), true) } -func (s *Suite) Test_MkTokensLexer__varuse_varuse_varuse(c *check.C) { +func (s *Suite) Test_MkTokensLexer_NextVarUse__varuse_varuse_varuse(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() @@ -77,84 +189,99 @@ func (s *Suite) Test_MkTokensLexer__varuse_varuse_varuse(c *check.C) { t.Check(lexer.NextVarUse(), check.IsNil) } -func (s *Suite) Test_MkTokensLexer__mark_reset_since_in_initial_state(c *check.C) { +func (s *Suite) Test_MkTokensLexer_NextVarUse__varuse_when_plain_text(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() - tokens := b.Tokens( - b.VaruseToken("dirs", "O", "u"), - b.VaruseToken("VAR", "Mpattern"), - b.VaruseToken(".TARGET")) - lexer := NewMkTokensLexer(tokens) + lexer := NewMkTokensLexer(b.Tokens(b.TextToken("text"))) - start := lexer.Mark() - t.CheckDeepEquals(lexer.NextVarUse(), tokens[0]) - middle := lexer.Mark() - t.CheckEquals(lexer.Rest(), "${VAR:Mpattern}${.TARGET}") - lexer.Reset(start) - t.CheckEquals(lexer.Rest(), "${dirs:O:u}${VAR:Mpattern}${.TARGET}") - lexer.Reset(middle) - t.CheckEquals(lexer.Rest(), "${VAR:Mpattern}${.TARGET}") + t.Check(lexer.NextVarUse(), check.IsNil) + t.CheckEquals(lexer.NextString("te"), "te") + t.Check(lexer.NextVarUse(), check.IsNil) + t.CheckEquals(lexer.NextString("xt"), "xt") + t.Check(lexer.NextVarUse(), check.IsNil) } -func (s *Suite) Test_MkTokensLexer__mark_reset_since_inside_plain_text(c *check.C) { +func (s *Suite) Test_MkTokensLexer_NextVarUse__peek_after_varuse(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() - lexer := NewMkTokensLexer(b.Tokens( - b.TextToken("plain text"), - b.VaruseToken("VAR", "Mpattern"), - b.TextToken("rest"))) + tokens := b.Tokens( + b.VaruseToken("VAR"), + b.VaruseToken("VAR"), + b.TextToken("text")) + lexer := NewMkTokensLexer(tokens) - start := lexer.Mark() - t.CheckEquals(lexer.NextBytesSet(textproc.Alpha), "plain") - middle := lexer.Mark() - t.CheckEquals(lexer.Rest(), " text${VAR:Mpattern}rest") - lexer.Reset(start) - t.CheckEquals(lexer.Rest(), "plain text${VAR:Mpattern}rest") - lexer.Reset(middle) - t.CheckEquals(lexer.Rest(), " text${VAR:Mpattern}rest") + t.CheckDeepEquals(lexer.NextVarUse(), tokens[0]) + t.CheckEquals(lexer.PeekByte(), -1) + + t.CheckDeepEquals(lexer.NextVarUse(), tokens[1]) + t.CheckEquals(lexer.PeekByte(), int('t')) } -func (s *Suite) Test_MkTokensLexer__mark_reset_since_after_plain_text(c *check.C) { +// The code that creates the tokens for the lexer never puts two +// plain text MkTokens besides each other. There's no point in doing +// that since they could have been combined into a single token from +// the beginning. +func (s *Suite) Test_MkTokensLexer_NextVarUse__adjacent_plain_text(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() lexer := NewMkTokensLexer(b.Tokens( - b.TextToken("plain text"), - b.VaruseToken("VAR", "Mpattern"), - b.TextToken("rest"))) + b.TextToken("text1"), + b.TextToken("text2"))) - start := lexer.Mark() - t.CheckEquals(lexer.SkipString("plain text"), true) - end := lexer.Mark() - t.CheckEquals(lexer.Rest(), "${VAR:Mpattern}rest") - lexer.Reset(start) - t.CheckEquals(lexer.Rest(), "plain text${VAR:Mpattern}rest") - lexer.Reset(end) - t.CheckEquals(lexer.Rest(), "${VAR:Mpattern}rest") + // Returns false since the string is distributed over two separate tokens. + t.CheckEquals(lexer.SkipString("text1text2"), false) + + t.CheckEquals(lexer.SkipString("text1"), true) + + // This returns false since the internal lexer is not advanced to the + // next text token. To do that, all methods from the internal lexer + // would have to be redefined by MkTokensLexer in order to advance the + // internal lexer to the next token. + // + // Since this situation doesn't occur in practice, there's no point in + // implementing it. + t.CheckEquals(lexer.SkipString("text2"), false) + + // Just for covering the "Varuse != nil" branch in MkTokensLexer.NextVarUse. + t.Check(lexer.NextVarUse(), check.IsNil) + + // The string is still not found since the next token is only consumed + // by the NextVarUse above if it is indeed a VarUse. + t.CheckEquals(lexer.SkipString("text2"), false) } -func (s *Suite) Test_MkTokensLexer__mark_reset_since_after_varuse(c *check.C) { +func (s *Suite) Test_MkTokensLexer_Mark__multiple_marks_in_varuse(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() tokens := b.Tokens( - b.VaruseToken("VAR", "Mpattern"), - b.TextToken("rest")) + b.VaruseToken("VAR1"), + b.VaruseToken("VAR2"), + b.VaruseToken("VAR3")) lexer := NewMkTokensLexer(tokens) start := lexer.Mark() - t.CheckEquals(lexer.NextVarUse(), tokens[0]) + t.CheckDeepEquals(lexer.NextVarUse(), tokens[0]) + middle := lexer.Mark() + t.CheckDeepEquals(lexer.NextVarUse(), tokens[1]) + further := lexer.Mark() + t.CheckDeepEquals(lexer.NextVarUse(), tokens[2]) end := lexer.Mark() - t.CheckEquals(lexer.Rest(), "rest") + t.CheckEquals(lexer.Rest(), "") + lexer.Reset(middle) + t.CheckEquals(lexer.Rest(), "${VAR2}${VAR3}") + lexer.Reset(further) + t.CheckEquals(lexer.Rest(), "${VAR3}") lexer.Reset(start) - t.CheckEquals(lexer.Rest(), "${VAR:Mpattern}rest") + t.CheckEquals(lexer.Rest(), "${VAR1}${VAR2}${VAR3}") lexer.Reset(end) - t.CheckEquals(lexer.Rest(), "rest") + t.CheckEquals(lexer.Rest(), "") } -func (s *Suite) Test_MkTokensLexer__multiple_marks_in_same_plain_text(c *check.C) { +func (s *Suite) Test_MkTokensLexer_Mark__multiple_marks_in_same_plain_text(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() @@ -177,137 +304,96 @@ func (s *Suite) Test_MkTokensLexer__multiple_marks_in_same_plain_text(c *check.C t.CheckEquals(lexer.Rest(), "${VAR:Mpattern}rest") } -func (s *Suite) Test_MkTokensLexer__multiple_marks_in_varuse(c *check.C) { +func (s *Suite) Test_MkTokensLexer_Since(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() tokens := b.Tokens( - b.VaruseToken("VAR1"), - b.VaruseToken("VAR2"), - b.VaruseToken("VAR3")) + b.VaruseToken("dirs", "O", "u"), + b.VaruseToken("VAR", "Mpattern"), + b.VaruseToken(".TARGET")) lexer := NewMkTokensLexer(tokens) start := lexer.Mark() t.CheckDeepEquals(lexer.NextVarUse(), tokens[0]) middle := lexer.Mark() - t.CheckDeepEquals(lexer.NextVarUse(), tokens[1]) - further := lexer.Mark() - t.CheckDeepEquals(lexer.NextVarUse(), tokens[2]) - end := lexer.Mark() - t.CheckEquals(lexer.Rest(), "") - lexer.Reset(middle) - t.CheckEquals(lexer.Rest(), "${VAR2}${VAR3}") - lexer.Reset(further) - t.CheckEquals(lexer.Rest(), "${VAR3}") - lexer.Reset(start) - t.CheckEquals(lexer.Rest(), "${VAR1}${VAR2}${VAR3}") - lexer.Reset(end) - t.CheckEquals(lexer.Rest(), "") + t.CheckEquals(lexer.Since(start), "${dirs:O:u}") + t.CheckEquals(lexer.Since(middle), "") } -func (s *Suite) Test_MkTokensLexer__EOF_before_plain_text(c *check.C) { +func (s *Suite) Test_MkTokensLexer_Reset__initial_state(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() - lexer := NewMkTokensLexer(b.Tokens(b.TextToken("rest"))) + tokens := b.Tokens( + b.VaruseToken("dirs", "O", "u"), + b.VaruseToken("VAR", "Mpattern"), + b.VaruseToken(".TARGET")) + lexer := NewMkTokensLexer(tokens) - t.CheckEquals(lexer.EOF(), false) + start := lexer.Mark() + t.CheckDeepEquals(lexer.NextVarUse(), tokens[0]) + middle := lexer.Mark() + t.CheckEquals(lexer.Rest(), "${VAR:Mpattern}${.TARGET}") + lexer.Reset(start) + t.CheckEquals(lexer.Rest(), "${dirs:O:u}${VAR:Mpattern}${.TARGET}") + lexer.Reset(middle) + t.CheckEquals(lexer.Rest(), "${VAR:Mpattern}${.TARGET}") } -func (s *Suite) Test_MkTokensLexer__EOF_before_varuse(c *check.C) { +func (s *Suite) Test_MkTokensLexer_Reset__inside_plain_text(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() - lexer := NewMkTokensLexer(b.Tokens(b.VaruseToken("VAR"))) + lexer := NewMkTokensLexer(b.Tokens( + b.TextToken("plain text"), + b.VaruseToken("VAR", "Mpattern"), + b.TextToken("rest"))) - t.CheckEquals(lexer.EOF(), false) + start := lexer.Mark() + t.CheckEquals(lexer.NextBytesSet(textproc.Alpha), "plain") + middle := lexer.Mark() + t.CheckEquals(lexer.Rest(), " text${VAR:Mpattern}rest") + lexer.Reset(start) + t.CheckEquals(lexer.Rest(), "plain text${VAR:Mpattern}rest") + lexer.Reset(middle) + t.CheckEquals(lexer.Rest(), " text${VAR:Mpattern}rest") } -// When the MkTokensLexer is constructed, it gets a copy of the tokens array. -// In theory it would be possible to change the tokens after starting lexing, -// but there is no practical case where that would be useful. -// -// Since each slice is a separate view on the underlying array, modifying the -// size of the outside slice does not affect parsing. This is also only a -// theoretical case. -// -// Because all these cases are only theoretical, the MkTokensLexer doesn't -// bother to make this unnecessary copy and works on the shared slice. -func (s *Suite) Test_MkTokensLexer__constructor_uses_shared_array(c *check.C) { +func (s *Suite) Test_MkTokensLexer_Reset__after_plain_text(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() - tokens := b.Tokens(b.VaruseToken("VAR")) - lexer := NewMkTokensLexer(tokens) - - t.CheckEquals(lexer.Rest(), "${VAR}") - - tokens[0].Text = "modified text" - tokens[0].Varuse = b.VarUse("MODIFIED", "Mpattern") + lexer := NewMkTokensLexer(b.Tokens( + b.TextToken("plain text"), + b.VaruseToken("VAR", "Mpattern"), + b.TextToken("rest"))) - t.CheckEquals(lexer.Rest(), "modified text") + start := lexer.Mark() + t.CheckEquals(lexer.SkipString("plain text"), true) + end := lexer.Mark() + t.CheckEquals(lexer.Rest(), "${VAR:Mpattern}rest") + lexer.Reset(start) + t.CheckEquals(lexer.Rest(), "plain text${VAR:Mpattern}rest") + lexer.Reset(end) + t.CheckEquals(lexer.Rest(), "${VAR:Mpattern}rest") } -func (s *Suite) Test_MkTokensLexer__peek_after_varuse(c *check.C) { +func (s *Suite) Test_MkTokensLexer_Reset__after_varuse(c *check.C) { t := s.Init(c) b := NewMkTokenBuilder() tokens := b.Tokens( - b.VaruseToken("VAR"), - b.VaruseToken("VAR"), - b.TextToken("text")) + b.VaruseToken("VAR", "Mpattern"), + b.TextToken("rest")) lexer := NewMkTokensLexer(tokens) - t.CheckDeepEquals(lexer.NextVarUse(), tokens[0]) - t.CheckEquals(lexer.PeekByte(), -1) - - t.CheckDeepEquals(lexer.NextVarUse(), tokens[1]) - t.CheckEquals(lexer.PeekByte(), int('t')) -} - -func (s *Suite) Test_MkTokensLexer__varuse_when_plain_text(c *check.C) { - t := s.Init(c) - b := NewMkTokenBuilder() - - lexer := NewMkTokensLexer(b.Tokens(b.TextToken("text"))) - - t.Check(lexer.NextVarUse(), check.IsNil) - t.CheckEquals(lexer.NextString("te"), "te") - t.Check(lexer.NextVarUse(), check.IsNil) - t.CheckEquals(lexer.NextString("xt"), "xt") - t.Check(lexer.NextVarUse(), check.IsNil) -} - -// The code that creates the tokens for the lexer never puts two -// plain text MkTokens besides each other. There's no point in doing -// that since they could have been combined into a single token from -// the beginning. -func (s *Suite) Test_MkTokensLexer__adjacent_plain_text(c *check.C) { - t := s.Init(c) - b := NewMkTokenBuilder() - - lexer := NewMkTokensLexer(b.Tokens( - b.TextToken("text1"), - b.TextToken("text2"))) - - // Returns false since the string is distributed over two separate tokens. - t.CheckEquals(lexer.SkipString("text1text2"), false) - - t.CheckEquals(lexer.SkipString("text1"), true) - - // This returns false since the internal lexer is not advanced to the - // next text token. To do that, all methods from the internal lexer - // would have to be redefined by MkTokensLexer in order to advance the - // internal lexer to the next token. - // - // Since this situation doesn't occur in practice, there's no point in - // implementing it. - t.CheckEquals(lexer.SkipString("text2"), false) - - // Just for covering the "Varuse != nil" branch in MkTokensLexer.NextVarUse. - t.Check(lexer.NextVarUse(), check.IsNil) - - // The string is still not found since the next token is only consumed - // by the NextVarUse above if it is indeed a VarUse. - t.CheckEquals(lexer.SkipString("text2"), false) + start := lexer.Mark() + t.CheckEquals(lexer.NextVarUse(), tokens[0]) + end := lexer.Mark() + t.CheckEquals(lexer.Rest(), "rest") + lexer.Reset(start) + t.CheckEquals(lexer.Rest(), "${VAR:Mpattern}rest") + lexer.Reset(end) + t.CheckEquals(lexer.Rest(), "rest") } diff --git a/pkgtools/pkglint/files/mktypes.go b/pkgtools/pkglint/files/mktypes.go index 9ad08e0ae9d..9d58631e4ac 100644 --- a/pkgtools/pkglint/files/mktypes.go +++ b/pkgtools/pkglint/files/mktypes.go @@ -39,19 +39,31 @@ func NewMkVarUse(varname string, modifiers ...MkVarUseModifier) *MkVarUse { func (vu *MkVarUse) String() string { return sprintf("${%s%s}", vu.varname, vu.Mod()) } -type MkVarUseModifier struct { - Text string // The text of the modifier, without the initial colon. +// MkVarUseModifier stores the text of the modifier, without the initial colon. +// Examples: "Q", "S,from,to,g" +type MkVarUseModifier string + +func (m MkVarUseModifier) String() string { + return string(m) +} + +func (m MkVarUseModifier) Quoted() string { + return strings.Replace(string(m), ":", "\\:", -1) +} + +func (m MkVarUseModifier) HasPrefix(prefix string) bool { + return hasPrefix(m.String(), prefix) } -func (m MkVarUseModifier) IsQ() bool { return m.Text == "Q" } +func (m MkVarUseModifier) IsQ() bool { return m == "Q" } func (m MkVarUseModifier) IsSuffixSubst() bool { // XXX: There are other cases - return hasPrefix(m.Text, "=") + return m.HasPrefix("=") } func (m MkVarUseModifier) MatchSubst() (ok bool, regex bool, from string, to string, options string) { - p := NewMkLexer(m.Text, nil) + p := NewMkLexer(m.String(), nil) return p.varUseModifierSubst('}') } @@ -91,7 +103,7 @@ func (m MkVarUseModifier) Subst(str string) (bool, string) { ok, result := m.EvalSubst(str, leftAnchor, from, rightAnchor, to, options) if trace.Tracing && ok && result != str { - trace.Stepf("Subst: %q %q => %q", str, m.Text, result) + trace.Stepf("Subst: %q %q => %q", str, m.String(), result) } return ok, result } @@ -124,21 +136,22 @@ func (MkVarUseModifier) EvalSubst(s string, left bool, from string, right bool, // :Npattern => true, false, "pattern", true // :X => false func (m MkVarUseModifier) MatchMatch() (ok bool, positive bool, pattern string, exact bool) { - if hasPrefix(m.Text, "M") || hasPrefix(m.Text, "N") { + if m.HasPrefix("M") || m.HasPrefix("N") { + str := m.String() // See devel/bmake/files/str.c:^Str_Match - exact := !strings.ContainsAny(m.Text[1:], "*?[\\$") - return true, m.Text[0] == 'M', m.Text[1:], exact + exact := !strings.ContainsAny(str[1:], "*?[\\$") + return true, str[0] == 'M', str[1:], exact } return false, false, "", false } -func (m MkVarUseModifier) IsToLower() bool { return m.Text == "tl" } +func (m MkVarUseModifier) IsToLower() bool { return m == "tl" } // ChangesList returns true if applying this modifier to a variable // may change the expression from a list type to a non-list type // or vice versa. func (m MkVarUseModifier) ChangesList() bool { - text := m.Text + text := m.String() // See MkParser.varUseModifier for the meaning of these modifiers. switch text[0] { @@ -172,7 +185,7 @@ func (vu *MkVarUse) Mod() string { var mod strings.Builder for _, modifier := range vu.modifiers { mod.WriteString(":") - mod.WriteString(modifier.Text) + mod.WriteString(modifier.String()) } return mod.String() } @@ -184,7 +197,7 @@ func (vu *MkVarUse) IsExpression() bool { return false } mod := vu.modifiers[0] - return mod.Text == "L" || hasPrefix(mod.Text, "?") + return mod.String() == "L" || mod.HasPrefix("?") } func (vu *MkVarUse) IsQ() bool { @@ -194,7 +207,7 @@ func (vu *MkVarUse) IsQ() bool { func (vu *MkVarUse) HasModifier(prefix string) bool { for _, mod := range vu.modifiers { - if hasPrefix(mod.Text, prefix) { + if mod.HasPrefix(prefix) { return true } } diff --git a/pkgtools/pkglint/files/mktypes_test.go b/pkgtools/pkglint/files/mktypes_test.go index b927bd91e5d..d4fd590cf6f 100644 --- a/pkgtools/pkglint/files/mktypes_test.go +++ b/pkgtools/pkglint/files/mktypes_test.go @@ -9,19 +9,19 @@ type MkTokenBuilder struct{} func NewMkTokenBuilder() MkTokenBuilder { return MkTokenBuilder{} } -func (b MkTokenBuilder) VaruseToken(varname string, modifiers ...string) *MkToken { +func (b MkTokenBuilder) VaruseToken(varname string, modifiers ...MkVarUseModifier) *MkToken { var text strings.Builder text.WriteString("${") text.WriteString(varname) for _, modifier := range modifiers { text.WriteString(":") - text.WriteString(modifier) + text.WriteString(modifier.String()) // TODO: Quoted } text.WriteString("}") return &MkToken{Text: text.String(), Varuse: b.VarUse(varname, modifiers...)} } -func (b MkTokenBuilder) VaruseTextToken(text, varname string, modifiers ...string) *MkToken { +func (b MkTokenBuilder) VaruseTextToken(text, varname string, modifiers ...MkVarUseModifier) *MkToken { return &MkToken{Text: text, Varuse: b.VarUse(varname, modifiers...)} } @@ -31,18 +31,77 @@ func (MkTokenBuilder) TextToken(text string) *MkToken { func (MkTokenBuilder) Tokens(tokens ...*MkToken) []*MkToken { return tokens } -func (MkTokenBuilder) VarUse(varname string, modifiers ...string) *MkVarUse { - var mods []MkVarUseModifier - for _, modifier := range modifiers { - mods = append(mods, MkVarUseModifier{modifier}) +func (MkTokenBuilder) VarUse(varname string, modifiers ...MkVarUseModifier) *MkVarUse { + return NewMkVarUse(varname, modifiers...) +} + +func (s *Suite) Test_NewMkVarUse(c *check.C) { + t := s.Init(c) + + use := NewMkVarUse("VARNAME", "Q") + + t.CheckEquals(use.String(), "${VARNAME:Q}") + t.CheckEquals(use.varname, "VARNAME") + t.CheckDeepEquals(use.modifiers, []MkVarUseModifier{"Q"}) +} + +func (s *Suite) Test_MkVarUse_String(c *check.C) { + t := s.Init(c) + + use := NewMkVarUse("VARNAME", "S,:,colon,", "Q") + + t.CheckEquals(use.String(), "${VARNAME:S,:,colon,:Q}") +} + +func (s *Suite) Test_MkVarUseModifier_String(c *check.C) { + t := s.Init(c) + + test := func(mod MkVarUseModifier, str string) { + t.CheckEquals(mod.String(), str) + } + + test("Q", "Q") + test("S/from/to/1g", "S/from/to/1g") + test(":", ":") +} + +func (s *Suite) Test_MkVarUseModifier_Quoted(c *check.C) { + t := s.Init(c) + + test := func(mod MkVarUseModifier, quoted string) { + t.CheckEquals(mod.Quoted(), quoted) } - return NewMkVarUse(varname, mods...) + + test("Q", "Q") + test("S/from/to/1g", "S/from/to/1g") + test(":", "\\:") +} + +func (s *Suite) Test_MkVarUseModifier_HasPrefix(c *check.C) { + t := s.Init(c) + + t.CheckEquals(MkVarUseModifier("Q").HasPrefix("Q"), true) + t.CheckEquals(MkVarUseModifier("S/from/to/1g").HasPrefix("Q"), false) +} + +func (s *Suite) Test_MkVarUseModifier_IsQ(c *check.C) { + t := s.Init(c) + + t.CheckEquals(MkVarUseModifier("Q").IsQ(), true) + t.CheckEquals(MkVarUseModifier("S/from/to/1g").IsQ(), false) +} + +func (s *Suite) Test_MkVarUseModifier_IsSuffixSubst(c *check.C) { + t := s.Init(c) + + t.CheckEquals(MkVarUseModifier("=suffix").IsSuffixSubst(), true) + t.CheckEquals(MkVarUseModifier("S,=,eq,").IsSuffixSubst(), false) } func (s *Suite) Test_MkVarUseModifier_MatchSubst(c *check.C) { t := s.Init(c) - mod := MkVarUseModifier{"S/from/to/1g"} + mod := MkVarUseModifier("S/from/to/1g") ok, regex, from, to, options := mod.MatchSubst() @@ -56,7 +115,7 @@ func (s *Suite) Test_MkVarUseModifier_MatchSubst(c *check.C) { func (s *Suite) Test_MkVarUseModifier_MatchSubst__backslash(c *check.C) { t := s.Init(c) - mod := MkVarUseModifier{"S/\\//\\:/"} + mod := MkVarUseModifier("S/\\//\\:/") ok, regex, from, to, options := mod.MatchSubst() @@ -76,7 +135,7 @@ func (s *Suite) Test_MkVarUseModifier_MatchSubst__backslash(c *check.C) { func (s *Suite) Test_MkVarUseModifier_MatchSubst__backslash_as_separator(c *check.C) { t := s.Init(c) - mod := MkVarUseModifier{"S\\.post1\\\\1"} + mod := MkVarUseModifier("S\\.post1\\\\1") ok, regex, from, to, options := mod.MatchSubst() @@ -91,7 +150,7 @@ func (s *Suite) Test_MkVarUseModifier_Subst(c *check.C) { t := s.Init(c) test := func(mod, str string, ok bool, result string) { - m := MkVarUseModifier{mod} + m := MkVarUseModifier(mod) actualOk, actualResult := m.Subst(str) @@ -154,7 +213,7 @@ func (s *Suite) Test_MkVarUseModifier_EvalSubst(c *check.C) { t := s.Init(c) test := func(s string, left bool, from string, right bool, to string, flags string, ok bool, result string) { - mod := MkVarUseModifier{} + mod := MkVarUseModifier("") actualOk, actual := mod.EvalSubst(s, left, from, right, to, flags) @@ -195,13 +254,12 @@ func (s *Suite) Test_MkVarUseModifier_EvalSubst(c *check.C) { func (s *Suite) Test_MkVarUseModifier_MatchMatch(c *check.C) { t := s.Init(c) - testFail := func(modifier string) { - mod := MkVarUseModifier{modifier} + testFail := func(modifier MkVarUseModifier) { + mod := modifier ok, _, _, _ := mod.MatchMatch() t.CheckEquals(ok, false) } - test := func(modifier string, positive bool, pattern string, exact bool) { - mod := MkVarUseModifier{modifier} + test := func(mod MkVarUseModifier, positive bool, pattern string, exact bool) { actualOk, actualPositive, actualPattern, actualExact := mod.MatchMatch() t.CheckDeepEquals( []interface{}{actualOk, actualPositive, actualPattern, actualExact}, @@ -217,11 +275,17 @@ func (s *Suite) Test_MkVarUseModifier_MatchMatch(c *check.C) { test("Npattern", false, "pattern", true) } +func (s *Suite) Test_MkVarUseModifier_IsToLower(c *check.C) { + t := s.Init(c) + + t.CheckEquals(MkVarUseModifier("tl").IsToLower(), true) + t.CheckEquals(MkVarUseModifier("tu").IsToLower(), false) +} + func (s *Suite) Test_MkVarUseModifier_ChangesList(c *check.C) { t := s.Init(c) - test := func(modifier string, changes bool) { - mod := MkVarUseModifier{modifier} + test := func(mod MkVarUseModifier, changes bool) { t.CheckEquals(mod.ChangesList(), changes) } @@ -281,3 +345,32 @@ func (s *Suite) Test_MkVarUse_Mod(c *check.C) { test("${varname:Q}", ":Q") test("${PATH:ts::Q}", ":ts::Q") } + +func (s *Suite) Test_MkVarUse_IsExpression(c *check.C) { + t := s.Init(c) + + t.CheckEquals(ToVarUse("${VAR}").IsExpression(), false) + t.CheckEquals(ToVarUse("${expr:L}").IsExpression(), true) + t.CheckEquals(ToVarUse("${expr:?then:else}").IsExpression(), true) +} + +func (s *Suite) Test_MkVarUse_IsQ(c *check.C) { + t := s.Init(c) + + t.CheckEquals(ToVarUse("${VAR}").IsQ(), false) + t.CheckEquals(ToVarUse("${VAR:Q}").IsQ(), true) + t.CheckEquals(ToVarUse("${VAR:tl}").IsQ(), false) + t.CheckEquals(ToVarUse("${VAR:tl:Q}").IsQ(), true) + t.CheckEquals(ToVarUse("${VAR:Q:tl}").IsQ(), false) +} + +func (s *Suite) Test_MkVarUse_HasModifier(c *check.C) { + t := s.Init(c) + + t.CheckEquals(ToVarUse("${VAR}").HasModifier("Q"), false) + t.CheckEquals(ToVarUse("${VAR:Q}").HasModifier("Q"), true) + t.CheckEquals(ToVarUse("${VAR:tl}").HasModifier("Q"), false) + t.CheckEquals(ToVarUse("${VAR:tl}").HasModifier("t"), true) + t.CheckEquals(ToVarUse("${VAR:tl:Q}").HasModifier("Q"), true) + t.CheckEquals(ToVarUse("${VAR:Q:tl}").HasModifier("Q"), true) +} diff --git a/pkgtools/pkglint/files/mkvarusechecker.go b/pkgtools/pkglint/files/mkvarusechecker.go index 460af214f25..0e67e704e81 100644 --- a/pkgtools/pkglint/files/mkvarusechecker.go +++ b/pkgtools/pkglint/files/mkvarusechecker.go @@ -1,6 +1,9 @@ package pkglint -import "strings" +import ( + "netbsd.org/pkglint/textproc" + "strings" +) type MkVarUseChecker struct { use *MkVarUse @@ -72,9 +75,9 @@ func (ck *MkVarUseChecker) checkModifiers() { ck.checkModifiersSuffix() ck.checkModifiersRange() - // TODO: Add checks for a single modifier, among them: - // TODO: Suggest to replace ${VAR:@l@-l${l}@} with the simpler ${VAR:S,^,-l,}. - // TODO: Suggest to replace ${VAR:@l@${l}suffix@} with the simpler ${VAR:=suffix}. + for _, mod := range ck.use.modifiers { + ck.checkModifierLoop(mod) + } // TODO: Investigate why :Q is not checked at this exact place. } @@ -133,6 +136,55 @@ func (ck *MkVarUseChecker) checkModifiersRange() { fix.Apply() } +func (ck *MkVarUseChecker) checkModifierLoop(mod MkVarUseModifier) { + str := mod.String() + if !hasSuffix(str, "@") { + return + } + lex := textproc.NewLexer(str[:len(str)-1]) + if !lex.SkipByte('@') { + return + } + varname := lex.NextBytesSet(textproc.Alnum) + if varname == "" || !lex.SkipByte('@') { + return + } + body := lex.Rest() + // TODO: Are MkVarUse interpreted the same in the before/after modifiers? + if !matches(body, `^[-A-Za-z0-9 $();_{}]+$`) { + return + } + varnameUse := "${" + varname + "}" + + n := 0 + ck.MkLine.ForEachUsedText(str, VucRunTime, func(varUse *MkVarUse, time VucTime) { + if varUse.varname == varname { + n++ + } + }) + if n != 1 { + return + } + + if rest := strings.TrimSuffix(body, varnameUse); len(rest) < len(body) { + simpler := "S,^," + rest + "," + fix := ck.MkLine.Autofix() + fix.Notef("The modifier %q can be replaced with the simpler %q.", + str, simpler) + fix.Replace(str, simpler) + fix.Apply() + } + + if rest := strings.TrimPrefix(body, varnameUse); len(rest) < len(body) { + simpler := "=" + rest + fix := ck.MkLine.Autofix() + fix.Notef("The modifier %q can be replaced with the simpler %q.", + str, simpler) + fix.Replace(str, simpler) + fix.Apply() + } +} + func (ck *MkVarUseChecker) checkVarname(time VucTime) { varname := ck.use.varname if varname == "@" { diff --git a/pkgtools/pkglint/files/mkvarusechecker_test.go b/pkgtools/pkglint/files/mkvarusechecker_test.go index b56a2bd2f89..85141e1ee15 100644 --- a/pkgtools/pkglint/files/mkvarusechecker_test.go +++ b/pkgtools/pkglint/files/mkvarusechecker_test.go @@ -354,6 +354,69 @@ func (s *Suite) Test_MkVarUseChecker_checkModifiersRange(c *check.C) { mklines.Check() } +func (s *Suite) Test_MkVarUseChecker_checkModifierLoop(c *check.C) { + t := s.Init(c) + + autofixTest := func(before, after string, autofix bool) { + mklines := t.NewMkLines("filename.mk", + MkCvsID, + "VAR=\t"+before) + mklines.ForEach(func(mkline *MkLine) { + mkline.ForEachUsed(func(varUse *MkVarUse, time VucTime) { + ck := NewMkVarUseChecker(varUse, nil, mkline) + ck.checkModifiers() + }) + }) + if autofix { + t.CheckEquals(mklines.mklines[1].Text, "VAR=\t"+after) + } + } + + test := func(before, after string, diagnostics ...string) { + t.ExpectDiagnosticsAutofix( + func(autofix bool) { autofixTest(before, after, autofix) }, + diagnostics...) + } + + test("${VAR:@l@-l${l}@}", "${VAR:S,^,-l,}", + "NOTE: filename.mk:2: The modifier \"@l@-l${l}@\" "+ + "can be replaced with the simpler \"S,^,-l,\".", + "AUTOFIX: filename.mk:2: Replacing \"@l@-l${l}@\" with \"S,^,-l,\".") + + // The comma is used in the :S modifier as the separator, + // therefore the modifier is left as-is. + test("${VAR:@word@-Wl,${word}@}", "${VAR:@word@-Wl,${word}@}", + nil...) + + test("${VAR:@l@${l}suffix@}", "${VAR:=suffix}", + "NOTE: filename.mk:2: The modifier \"@l@${l}suffix@\" "+ + "can be replaced with the simpler \"=suffix\".", + "AUTOFIX: filename.mk:2: Replacing \"@l@${l}suffix@\" with \"=suffix\".") + + // Escaping the colon is not yet supported. + test("${VAR:@word@${word}: suffix@}", "${VAR:@word@${word}: suffix@}", + nil...) + + // The loop variable must be mentioned exactly once. + test("${VAR:@var@${var}${var}@}", "${VAR:@var@${var}${var}@}", + nil...) + + // Other variables are fine though. + test("${VAR:@var@${var}${OTHER}@}", "${VAR:=${OTHER}}", + "NOTE: filename.mk:2: The modifier \"@var@${var}${OTHER}@\" "+ + "can be replaced with the simpler \"=${OTHER}\".", + "AUTOFIX: filename.mk:2: Replacing \"@var@${var}${OTHER}@\" with \"=${OTHER}\".") + + // If the loop variable has modifiers, the :@var@ is probably appropriate. + test("${VAR:@var@${var:Q}@}", "${VAR:@var@${var:Q}@}", + nil...) + + test("${VAR:@p@${p}) continue;; @}", "${VAR:=) continue;; }", + "NOTE: filename.mk:2: The modifier \"@p@${p}) continue;; @\" "+ + "can be replaced with the simpler \"=) continue;; \".", + "AUTOFIX: filename.mk:2: Replacing \"@p@${p}) continue;; @\" with \"=) continue;; \".") +} + func (s *Suite) Test_MkVarUseChecker_checkVarname(c *check.C) { t := s.Init(c) diff --git a/pkgtools/pkglint/files/package.go b/pkgtools/pkglint/files/package.go index ddc6a463a51..d3c3db96a2d 100644 --- a/pkgtools/pkglint/files/package.go +++ b/pkgtools/pkglint/files/package.go @@ -55,8 +55,6 @@ type Package struct { // TODO: Include files with multiple-inclusion guard only once. // // TODO: Include files without multiple-inclusion guard as often as needed. - // - // TODO: Set an upper limit, to prevent denial of service. included Once // Does the package have any .includes? @@ -540,7 +538,7 @@ func (pkg *Package) loadPlistDirs(plistFilename CurrPath) { } for _, plistLine := range plistLines { if plistLine.HasPath() { - rank := NewPlistRank(plistLine.Basename) + rank := NewPlistRank(plistLine.Line.Basename) pkg.PlistLines.Add(plistLine, rank) } for _, cond := range plistLine.conditions { @@ -661,15 +659,9 @@ func (pkg *Package) checkfilePackageMakefile(filename CurrPath, mklines *MkLines pkg.checkDistinfoExists() - // TODO: There are other REPLACE_* variables which are probably also affected by NO_CONFIGURE. - vars := pkg.vars - if noConfigureLine := vars.FirstDefinition("NO_CONFIGURE"); noConfigureLine != nil { - if replacePerlLine := vars.FirstDefinition("REPLACE_PERL"); replacePerlLine != nil { - replacePerlLine.Warnf("REPLACE_PERL is ignored when NO_CONFIGURE is set (in %s).", - replacePerlLine.RelMkLine(noConfigureLine)) - } - } + pkg.checkReplaceInterpreter() + vars := pkg.vars if !vars.IsDefined("LICENSE") && !vars.IsDefined("META_PACKAGE") { line := NewLineWhole(filename) line.Errorf("Each package must define its LICENSE.") @@ -742,6 +734,34 @@ func (pkg *Package) checkfilePackageMakefile(filename CurrPath, mklines *MkLines SaveAutofixChanges(mklines.lines) } +func (pkg *Package) checkReplaceInterpreter() { + vars := pkg.vars + noConfigureLine := vars.FirstDefinition("NO_CONFIGURE") + if noConfigureLine == nil { + return + } + + // See mk/configure/replace-interpreter.mk. + varnames := [...]string{ + "REPLACE_AWK", + "REPLACE_BASH", + "REPLACE_CSH", + "REPLACE_KSH", + "REPLACE_PERL", + "REPLACE_PERL6", + "REPLACE_SH", + "REPLACE_INTERPRETER"} + + for _, varname := range varnames { + mkline := vars.FirstDefinition(varname) + if mkline == nil { + continue + } + mkline.Warnf("%s is ignored when NO_CONFIGURE is set (in %s).", + varname, mkline.RelMkLine(noConfigureLine)) + } +} + func (pkg *Package) checkDistinfoExists() { vars := pkg.vars @@ -1176,14 +1196,7 @@ func (pkg *Package) determineEffectivePkgVars() { } } - if pkgnameLine != nil && (pkgname == distname || pkgname == "${DISTNAME}") { - if !pkgnameLine.HasComment() { - pkgnameLine.Notef("This assignment is probably redundant " + - "since PKGNAME is ${DISTNAME} by default.") - pkgnameLine.Explain( - "To mark this assignment as necessary, add a comment to the end of this line.") - } - } + pkg.checkPkgnameRedundant(pkgnameLine, pkgname, distname) if pkgname == "" && distnameLine != nil && !containsVarUse(distname) && !matches(distname, rePkgname) { distnameLine.Warnf("As DISTNAME is not a valid package name, please define the PKGNAME explicitly.") @@ -1219,6 +1232,23 @@ func (pkg *Package) determineEffectivePkgVars() { } } +func (pkg *Package) checkPkgnameRedundant(pkgnameLine *MkLine, pkgname string, distname string) { + if pkgnameLine == nil || pkgnameLine.HasComment() { + return + } + if pkgname != distname && pkgname != "${DISTNAME}" { + return + } + pkgnameInfo := pkg.redundant.vars["PKGNAME"] + if len(pkgnameInfo.vari.WriteLocations()) >= 2 { + return + } + pkgnameLine.Notef("This assignment is probably redundant " + + "since PKGNAME is ${DISTNAME} by default.") + pkgnameLine.Explain( + "To mark this assignment as necessary, add a comment to the end of this line.") +} + // nbPart determines the smallest part of the package version number, // typically "nb13" or an empty string. // @@ -1253,7 +1283,7 @@ func (pkg *Package) pkgnameFromDistname(pkgname, distname string, diag Diagnoser newDistname = strings.ToLower(newDistname) } else if ok, subst := mod.Subst(newDistname); ok { if subst == newDistname && !containsVarUse(subst) { - diag.Notef("The modifier :%s does not have an effect.", mod.Text) + diag.Notef("The modifier :%s does not have an effect.", mod.String()) } newDistname = subst } else { diff --git a/pkgtools/pkglint/files/package_test.go b/pkgtools/pkglint/files/package_test.go index 302bbf08f76..c0ccb1b0c95 100644 --- a/pkgtools/pkglint/files/package_test.go +++ b/pkgtools/pkglint/files/package_test.go @@ -2847,6 +2847,46 @@ func (s *Suite) Test_Package_determineEffectivePkgVars__Python_prefix_late(c *ch "1 warning found.") } +// The infrastructure file mk/haskell.mk sets a default for PKGNAME +// that differs from the plain DISTNAME. This makes the assignment +// PKGNAME=${DISTNAME} non-redundant. +func (s *Suite) Test_Package_determineEffectivePkgVars__Haskell(c *check.C) { + t := s.Init(c) + + t.SetUpPackage("category/package", + "PKGNAME=\t${DISTNAME}", + ".include \"../../mk/haskell.mk\"") + t.CreateFileLines("mk/haskell.mk", + MkCvsID, + "PKGNAME?=\ths-${DISTNAME}") + t.FinishSetUp() + + G.Check(t.File("category/package")) + + // Up to 2020-06-28, pkglint wrongly produced a note about + // PKGNAME being "probably redundant". + t.CheckOutputEmpty() +} + +func (s *Suite) Test_Package_determineEffectivePkgVars__bsd_pkg_mk(c *check.C) { + t := s.Init(c) + + t.SetUpPackage("category/package", + "PKGNAME=\t${DISTNAME}") + t.CreateFileLines("mk/bsd.pkg.mk", + MkCvsID, + "PKGNAME?=\t${DISTNAME}") + t.FinishSetUp() + + G.Check(t.File("category/package")) + + // Contrary to the one from mk/haskell.mk, the default assignment in + // mk/bsd.pkg.mk is not included in the RedundantScope. + t.CheckOutputLines( + "NOTE: ~/category/package/Makefile:4: This assignment is probably " + + "redundant since PKGNAME is ${DISTNAME} by default.") +} + func (s *Suite) Test_Package_nbPart(c *check.C) { t := s.Init(c) @@ -2874,7 +2914,10 @@ func (s *Suite) Test_Package_pkgnameFromDistname(c *check.C) { } pkg := NewPackage(t.File("category/package")) - pkg.loadPackageMakefile() + _, allLines := pkg.loadPackageMakefile() + pkg.redundant = NewRedundantScope() // See Package.checkfilePackageMakefile. + pkg.redundant.IsRelevant = func(mkline *MkLine) bool { return false } + pkg.redundant.Check(allLines) pkg.determineEffectivePkgVars() t.CheckEquals(pkg.EffectivePkgname, expectedPkgname) t.CheckOutput(diagnostics) diff --git a/pkgtools/pkglint/files/pkglint_test.go b/pkgtools/pkglint/files/pkglint_test.go index ad6d23db643..f1d979bab8b 100644 --- a/pkgtools/pkglint/files/pkglint_test.go +++ b/pkgtools/pkglint/files/pkglint_test.go @@ -1475,6 +1475,14 @@ func (s *Suite) Test_InterPackage_Bl3__same_identifier(c *check.C) { G.Check("category/package2") t.CheckOutputLines( - "ERROR: category/package2/buildlink3.mk:3: Duplicate package identifier " + + "NOTE: category/package1/Makefile:4: The modifier \"@v@${v}@\" "+ + "can be replaced with the simpler \"S,^,,\".", + "NOTE: category/package1/Makefile:4: The modifier \"@v@${v}@\" "+ + "can be replaced with the simpler \"=\".", + "NOTE: category/package2/Makefile:4: The modifier \"@v@${v}@\" "+ + "can be replaced with the simpler \"S,^,,\".", + "NOTE: category/package2/Makefile:4: The modifier \"@v@${v}@\" "+ + "can be replaced with the simpler \"=\".", + "ERROR: category/package2/buildlink3.mk:3: Duplicate package identifier "+ "\"package1\" already appeared in ../../category/package1/buildlink3.mk:3.") } diff --git a/pkgtools/pkglint/files/plist.go b/pkgtools/pkglint/files/plist.go index b1de3d75de5..9263b4e1e96 100644 --- a/pkgtools/pkglint/files/plist.go +++ b/pkgtools/pkglint/files/plist.go @@ -156,7 +156,7 @@ func (ck *PlistChecker) checkLine(pline *PlistLine) { } else if m, cmd, arg := match2(text, `^@([a-z-]+)[\t ]*(.*)`); m { pline.CheckDirective(cmd, arg) - if cmd == "comment" && pline.Location.lineno > 1 { + if cmd == "comment" && pline.Line.Location.lineno > 1 { ck.nonAsciiAllowed = true } @@ -407,7 +407,7 @@ func (ck *PlistChecker) checkPathMan(pline *PlistLine) { "configured by the pkgsrc user.", "Compression and decompression takes place automatically,", "no matter if the .gz extension is mentioned in the PLIST or not.") - fix.ReplaceAt(0, len(pline.Text)-len(".gz"), ".gz", "") + fix.ReplaceAt(0, len(pline.Line.Text)-len(".gz"), ".gz", "") fix.Apply() } } @@ -528,12 +528,27 @@ func (ck *PlistChecker) checkOmf(plines []*PlistLine) { } type PlistLine struct { - *Line + Line *Line // XXX: Why "PLIST.docs" and not simply "docs"? conditions []string // e.g. PLIST.docs text string // Line.Text without any conditions of the form ${PLIST.cond} } +func (pline *PlistLine) Autofix() *Autofix { return pline.Line.Autofix() } + +func (pline *PlistLine) Errorf(format string, args ...interface{}) { + pline.Line.Errorf(format, args...) +} +func (pline *PlistLine) Warnf(format string, args ...interface{}) { + pline.Line.Warnf(format, args...) +} +func (pline *PlistLine) Explain(explanation ...string) { + pline.Line.Explain(explanation...) +} +func (pline *PlistLine) RelLine(other *Line) string { + return pline.Line.RelLine(other) +} + func (pline *PlistLine) HasPath() bool { return pline.text != "" && plistLineStart.Contains(pline.text[0]) } diff --git a/pkgtools/pkglint/files/plist_test.go b/pkgtools/pkglint/files/plist_test.go index 164ac9f82be..6a6e410a958 100644 --- a/pkgtools/pkglint/files/plist_test.go +++ b/pkgtools/pkglint/files/plist_test.go @@ -153,9 +153,25 @@ func (s *Suite) Test_CheckLinesPlist__sorting(c *check.C) { func (s *Suite) Test_CheckLinesPlist__sort_common(c *check.C) { t := s.Init(c) - // TODO: Examine what happens if there is a PLIST.common to be sorted. + t.SetUpPackage("category/package") + t.Chdir("category/package") + t.CreateFileLines("PLIST.common", + PlistCvsID, + "bin/common2", + "bin/common1") + t.CreateFileLines("PLIST.common_end", + PlistCvsID, + "bin/common_end2", + "bin/common_end1") + t.FinishSetUp() - t.CheckOutputEmpty() + G.Check(".") + + t.CheckOutputLines( + "WARN: PLIST.common:3: \"bin/common1\" "+ + "should be sorted before \"bin/common2\".", + "WARN: PLIST.common_end:3: \"bin/common_end1\" "+ + "should be sorted before \"bin/common_end2\".") } func (s *Suite) Test_PlistChecker__autofix(c *check.C) { @@ -442,10 +458,10 @@ func (s *Suite) Test_PlistChecker_Load__common_end(c *check.C) { // the package being checked, for cross-validation. t.Check(ck.allFiles["bin/plist"], check.IsNil) t.CheckEquals( - ck.allFiles["bin/plist_common"].String(), + ck.allFiles["bin/plist_common"].Line.String(), "PLIST.common:2: bin/plist_common") t.CheckEquals( - ck.allFiles["bin/plist_common_end"].String(), + ck.allFiles["bin/plist_common_end"].Line.String(), "PLIST.common_end:2: bin/plist_common_end") } @@ -1239,6 +1255,77 @@ func (s *Suite) Test_PlistChecker_checkOmf__ok(c *check.C) { nil...) } +func (s *Suite) Test_PlistLine_Autofix(c *check.C) { + t := s.Init(c) + + line := t.NewLine("PLIST", 2, "bin/program") + pline := PlistLine{line, nil, line.Text} + + fix := pline.Autofix() + fix.Warnf("Warning %s.", "message") + fix.Replace("program", "new-name") + fix.Apply() + + t.CheckOutputLines( + "WARN: PLIST:2: Warning message.") +} + +func (s *Suite) Test_PlistLine_Errorf(c *check.C) { + t := s.Init(c) + + line := t.NewLine("PLIST", 2, "bin/program") + pline := PlistLine{line, nil, line.Text} + + pline.Errorf("Error %s.", "message") + + t.CheckOutputLines( + "ERROR: PLIST:2: Error message.") +} + +func (s *Suite) Test_PlistLine_Warnf(c *check.C) { + t := s.Init(c) + + line := t.NewLine("PLIST", 2, "bin/program") + pline := PlistLine{line, nil, line.Text} + + pline.Warnf("Warning %s.", "message") + + t.CheckOutputLines( + "WARN: PLIST:2: Warning message.") +} + +func (s *Suite) Test_PlistLine_Explain(c *check.C) { + t := s.Init(c) + + t.SetUpCommandLine("--explain") + line := t.NewLine("PLIST", 2, "bin/program") + pline := PlistLine{line, nil, line.Text} + + pline.Warnf("Warning %s.", "message") + pline.Explain( + "Line 1.", + "Line 2.") + + t.CheckOutputLines( + "WARN: PLIST:2: Warning message.", + "", + "\tLine 1. Line 2.", + "") +} + +func (s *Suite) Test_PlistLine_RelLine(c *check.C) { + t := s.Init(c) + + t.SetUpCommandLine("--explain") + line := t.NewLine("PLIST", 2, "bin/program") + pline := PlistLine{line, nil, line.Text} + + pline.Warnf("Warning, see %s.", line.RelLine(line)) + + t.CheckOutputLines( + "WARN: PLIST:2: Warning, see line 2.") +} + func (s *Suite) Test_PlistLine_HasPath(c *check.C) { t := s.Init(c) @@ -1507,12 +1594,12 @@ func (s *Suite) Test_PlistLines_Add(c *check.C) { for _, line := range plistLines { if line.HasPath() { - lines.Add(line, NewPlistRank(line.Basename)) + lines.Add(line, NewPlistRank(line.Line.Basename)) } } for _, line := range plistCommonLines { if line.HasPath() { - lines.Add(line, NewPlistRank(line.Basename)) + lines.Add(line, NewPlistRank(line.Line.Basename)) } } diff --git a/pkgtools/pkglint/files/shell.go b/pkgtools/pkglint/files/shell.go index 3bea2dc4543..55d055f99ca 100644 --- a/pkgtools/pkglint/files/shell.go +++ b/pkgtools/pkglint/files/shell.go @@ -278,7 +278,7 @@ func (scc *SimpleCommandChecker) checkAutoMkdirs() { autoMkdirs := false if scc.mklines.pkg != nil { plistLine := scc.mklines.pkg.Plist.Dirs[prefixRel] - if plistLine != nil && !containsVarUse(plistLine.Text) { + if plistLine != nil && !containsVarUse(plistLine.Line.Text) { autoMkdirs = true } } diff --git a/pkgtools/pkglint/files/shell.y b/pkgtools/pkglint/files/shell.y index c762f8160a7..defd790375d 100644 --- a/pkgtools/pkglint/files/shell.y +++ b/pkgtools/pkglint/files/shell.y @@ -224,6 +224,7 @@ case_item : case_selector compound_list tkSEMISEMI linebreak { $$ = &MkShCaseItem{$1, $2, sepNone, nil} } case_item : tkWORD { + // Special case for ${SKIP:@p@ ${p}) continue;; @} $$ = &MkShCaseItem{Var: $1} } diff --git a/pkgtools/pkglint/files/shell_test.go b/pkgtools/pkglint/files/shell_test.go index b6ff89b4aba..dccf12e47ba 100644 --- a/pkgtools/pkglint/files/shell_test.go +++ b/pkgtools/pkglint/files/shell_test.go @@ -5,6 +5,33 @@ import ( "strings" ) +func (s *Suite) Test_SimpleCommandChecker__case_continue_with_loop(c *check.C) { + t := s.Init(c) + + code := "case $$fname in ${CHECK_PORTABILITY_SKIP:@p@${p}) continue;; @} esac" + line := t.NewLine("filename.mk", 123, "\t"+code) + + program, err := parseShellProgram(line, code) + assertNil(err, "parse error") + t.CheckEquals( + program.AndOrs[0].Pipes[0].Cmds[0].Compound.Case.Cases[0].Var.MkText, + "${CHECK_PORTABILITY_SKIP:@p@${p}) continue;; @}") +} + +func (s *Suite) Test_SimpleCommandChecker__case_continue_with_suffix(c *check.C) { + t := s.Init(c) + + code := "case $$fname in ${CHECK_PORTABILITY_SKIP:=) continue;; } esac" + line := t.NewLine("filename.mk", 123, "\t"+code) + + program, err := parseShellProgram(line, code) + assertNil(err, "parse error: parse error at []string{\"esac\"}") + + t.CheckEquals( + program.AndOrs[0].Pipes[0].Cmds[0].Compound.Case.Cases[0].Var.MkText, + "${CHECK_PORTABILITY_SKIP:=) continue;; }") +} + // When pkglint is called without -Wextra, the check for unknown shell // commands is disabled, as it is still unreliable. As of December 2019 // there are around 500 warnings in pkgsrc, and several of them are wrong. diff --git a/pkgtools/pkglint/files/shtokenizer_test.go b/pkgtools/pkglint/files/shtokenizer_test.go index 92730815645..1075f17bfaf 100644 --- a/pkgtools/pkglint/files/shtokenizer_test.go +++ b/pkgtools/pkglint/files/shtokenizer_test.go @@ -140,10 +140,12 @@ func (s *Suite) Test_ShTokenizer_ShAtom(c *check.C) { operator := func(s string) *ShAtom { return atom(shtOperator, s) } comment := func(s string) *ShAtom { return atom(shtComment, s) } - mkvar := func(varname string, modifiers ...string) *ShAtom { + mkvar := func(varname string, modifiers ...MkVarUseModifier) *ShAtom { text := "${" + varname for _, modifier := range modifiers { - text += ":" + strings.Replace(strings.Replace(modifier, "\\", "\\\\", -1), ":", "\\:", -1) + escapedBackslash := strings.Replace(modifier.String(), "\\", "\\\\", -1) + escapedColon := strings.Replace(escapedBackslash, ":", "\\:", -1) + text += ":" + escapedColon // TODO: modifier.Quoted } text += "}" varuse := NewMkTokenBuilder().VarUse(varname, modifiers...) diff --git a/pkgtools/pkglint/files/vartypecheck.go b/pkgtools/pkglint/files/vartypecheck.go index 3cf8b45e3a0..15679d4703c 100644 --- a/pkgtools/pkglint/files/vartypecheck.go +++ b/pkgtools/pkglint/files/vartypecheck.go @@ -179,7 +179,7 @@ func (cv *VartypeCheck) Category() { "gnome", "gnustep", "japanese", "java", "kde", "korean", - "linux", "local", + "linux", "local", "lua", "perl5", "plan9", "python", "R", "ruby", "scm", @@ -580,11 +580,11 @@ func (cv *VartypeCheck) FetchURL() { } } - if len(varUse.modifiers) != 1 || !hasPrefix(varUse.modifiers[0].Text, "=") { + if len(varUse.modifiers) != 1 || !varUse.modifiers[0].HasPrefix("=") { continue } - subdir := varUse.modifiers[0].Text[1:] + subdir := varUse.modifiers[0].String()[1:] if !hasSuffix(subdir, "/") { cv.Errorf("The subdirectory in %s must end with a slash.", name) } @@ -1274,7 +1274,7 @@ func (cv *VartypeCheck) SedCommands() { } // The :C modifier is similar enough for parsing. - ok, _, from, _, _ := MkVarUseModifier{"C" + command[1:]}.MatchSubst() + ok, _, from, _, _ := MkVarUseModifier("C" + command[1:]).MatchSubst() if !ok { return } diff --git a/pkgtools/pkglint/files/vartypecheck_test.go b/pkgtools/pkglint/files/vartypecheck_test.go index 8c316ee1793..83fff1b5aeb 100644 --- a/pkgtools/pkglint/files/vartypecheck_test.go +++ b/pkgtools/pkglint/files/vartypecheck_test.go @@ -241,6 +241,7 @@ func (s *Suite) Test_VartypeCheck_Category(c *check.C) { "korean", "linux", "local", + "lua", "plan9", "R", "ruby", |
