Mon Jul 1 22:25:52 2019 UTC ()
pkgtools/pkglint: update to 5.7.15

Changes since 5.7.14:

* Added a check for packages that have been removed from the file system
  but not been recorded in doc/CHANGES. This will help produce more
  accurate release statistics.

* Small refactorings, as usual.


(rillig)
diff -r1.586 -r1.587 pkgsrc/pkgtools/pkglint/Makefile
diff -r1.25 -r1.26 pkgsrc/pkgtools/pkglint/files/autofix.go
diff -r1.25 -r1.26 pkgsrc/pkgtools/pkglint/files/autofix_test.go
diff -r1.25 -r1.26 pkgsrc/pkgtools/pkglint/files/pkgsrc_test.go
diff -r1.4 -r1.5 pkgsrc/pkgtools/pkglint/files/linelexer.go
diff -r1.51 -r1.52 pkgsrc/pkgtools/pkglint/files/mklines.go
diff -r1.30 -r1.31 pkgsrc/pkgtools/pkglint/files/mkparser.go
diff -r1.57 -r1.58 pkgsrc/pkgtools/pkglint/files/package.go
diff -r1.48 -r1.49 pkgsrc/pkgtools/pkglint/files/package_test.go
diff -r1.44 -r1.45 pkgsrc/pkgtools/pkglint/files/pkglint_test.go
diff -r1.29 -r1.30 pkgsrc/pkgtools/pkglint/files/pkgsrc.go
diff -r1.18 -r1.19 pkgsrc/pkgtools/pkglint/files/shtokenizer.go
diff -r1.27 -r1.28 pkgsrc/pkgtools/pkglint/files/substcontext_test.go
diff -r1.47 -r1.48 pkgsrc/pkgtools/pkglint/files/util.go

cvs diff -r1.586 -r1.587 pkgsrc/pkgtools/pkglint/Makefile (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/Makefile 2019/06/30 20:56:18 1.586
+++ pkgsrc/pkgtools/pkglint/Makefile 2019/07/01 22:25:52 1.587
@@ -1,16 +1,16 @@ @@ -1,16 +1,16 @@
1# $NetBSD: Makefile,v 1.586 2019/06/30 20:56:18 rillig Exp $ 1# $NetBSD: Makefile,v 1.587 2019/07/01 22:25:52 rillig Exp $
2 2
3PKGNAME= pkglint-5.7.14 3PKGNAME= pkglint-5.7.15
4CATEGORIES= pkgtools 4CATEGORIES= pkgtools
5DISTNAME= tools 5DISTNAME= tools
6MASTER_SITES= ${MASTER_SITE_GITHUB:=golang/} 6MASTER_SITES= ${MASTER_SITE_GITHUB:=golang/}
7GITHUB_PROJECT= tools 7GITHUB_PROJECT= tools
8GITHUB_TAG= 92d8274bd7b8a4c65f24bafe401a029e58392704 8GITHUB_TAG= 92d8274bd7b8a4c65f24bafe401a029e58392704
9 9
10MAINTAINER= rillig@NetBSD.org 10MAINTAINER= rillig@NetBSD.org
11HOMEPAGE= https://github.com/rillig/pkglint 11HOMEPAGE= https://github.com/rillig/pkglint
12COMMENT= Verifier for NetBSD packages 12COMMENT= Verifier for NetBSD packages
13LICENSE= 2-clause-bsd 13LICENSE= 2-clause-bsd
14CONFLICTS+= pkglint4-[0-9]* 14CONFLICTS+= pkglint4-[0-9]*
15 15
16USE_TOOLS+= pax 16USE_TOOLS+= pax

cvs diff -r1.25 -r1.26 pkgsrc/pkgtools/pkglint/files/Attic/autofix.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/autofix.go 2019/06/30 20:56:18 1.25
+++ pkgsrc/pkgtools/pkglint/files/Attic/autofix.go 2019/07/01 22:25:52 1.26
@@ -42,29 +42,26 @@ type autofixAction struct { @@ -42,29 +42,26 @@ type autofixAction struct {
42// autofix action is not directly related to a diagnostic. 42// autofix action is not directly related to a diagnostic.
43// 43//
44// To prevent confusion, the code using this magic value must ensure 44// To prevent confusion, the code using this magic value must ensure
45// to log a diagnostic by other means. 45// to log a diagnostic by other means.
46const SilentAutofixFormat = "SilentAutofixFormat" 46const SilentAutofixFormat = "SilentAutofixFormat"
47 47
48// AutofixFormat is a special value that is used for logging 48// AutofixFormat is a special value that is used for logging
49// diagnostics like "Replacing \"old\" with \"new\".". 49// diagnostics like "Replacing \"old\" with \"new\".".
50// 50//
51// Since these are not really diagnostics, duplicates are not suppressed. 51// Since these are not really diagnostics, duplicates are not suppressed.
52const AutofixFormat = "AutofixFormat" 52const AutofixFormat = "AutofixFormat"
53 53
54func NewAutofix(line *Line) *Autofix { 54func NewAutofix(line *Line) *Autofix {
55 // FIXME: replacing the returned value with 
56 // &Autofix{line: line, autofixShortTerm: autofixShortTerm{anyway: true}} 
57 // makes some tests output source code without diagnostic. 
58 return &Autofix{line: line} 55 return &Autofix{line: line}
59} 56}
60 57
61// Errorf remembers the error for logging it later when Apply is called. 58// Errorf remembers the error for logging it later when Apply is called.
62func (fix *Autofix) Errorf(format string, args ...interface{}) { 59func (fix *Autofix) Errorf(format string, args ...interface{}) {
63 fix.setDiag(Error, format, args) 60 fix.setDiag(Error, format, args)
64} 61}
65 62
66// Warnf remembers the warning for logging it later when Apply is called. 63// Warnf remembers the warning for logging it later when Apply is called.
67func (fix *Autofix) Warnf(format string, args ...interface{}) { 64func (fix *Autofix) Warnf(format string, args ...interface{}) {
68 fix.setDiag(Warn, format, args) 65 fix.setDiag(Warn, format, args)
69} 66}
70 67

cvs diff -r1.25 -r1.26 pkgsrc/pkgtools/pkglint/files/Attic/autofix_test.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/autofix_test.go 2019/06/30 20:56:18 1.25
+++ pkgsrc/pkgtools/pkglint/files/Attic/autofix_test.go 2019/07/01 22:25:52 1.26
@@ -978,26 +978,51 @@ func (s *Suite) Test_Autofix_Apply__anyw @@ -978,26 +978,51 @@ func (s *Suite) Test_Autofix_Apply__anyw
978 978
979 fix := mklines.mklines[1].Autofix() 979 fix := mklines.mklines[1].Autofix()
980 fix.Errorf("From must be To.") 980 fix.Errorf("From must be To.")
981 fix.Replace("from", "to") 981 fix.Replace("from", "to")
982 fix.Anyway() 982 fix.Anyway()
983 fix.Apply() 983 fix.Apply()
984 984
985 mklines.SaveAutofixChanges() 985 mklines.SaveAutofixChanges()
986 986
987 t.Check(G.Logger.errors, equals, 0) 987 t.Check(G.Logger.errors, equals, 0)
988 t.CheckOutputEmpty() 988 t.CheckOutputEmpty()
989} 989}
990 990
 991func (s *Suite) Test_Autofix_Apply__source_autofix_no_change(c *check.C) {
 992 t := s.Init(c)
 993
 994 t.SetUpCommandLine("--autofix", "--source")
 995 lines := t.SetUpFileLines("filename",
 996 "word word word")
 997
 998 fix := lines.Lines[0].Autofix()
 999 fix.Notef("Word should be replaced, but pkglint is not sure which one.")
 1000 fix.Replace("word", "replacement")
 1001 fix.Anyway()
 1002 fix.Apply()
 1003
 1004 lines.SaveAutofixChanges()
 1005
 1006 // Nothing is replaced since, as of June 2019, pkglint doesn't
 1007 // know which of the three "word" should be replaced.
 1008 //
 1009 // The note is not logged since fix.Anyway only applies when neither
 1010 // --show-autofix nor --autofix is given in the command line.
 1011 t.CheckOutputEmpty()
 1012 t.CheckFileLines("filename",
 1013 "word word word")
 1014}
 1015
991// Ensures that without explanations, the separator between the individual 1016// Ensures that without explanations, the separator between the individual
992// diagnostics are generated. 1017// diagnostics are generated.
993func (s *Suite) Test_Autofix_Apply__source_without_explain(c *check.C) { 1018func (s *Suite) Test_Autofix_Apply__source_without_explain(c *check.C) {
994 t := s.Init(c) 1019 t := s.Init(c)
995 1020
996 t.SetUpCommandLine("--source", "--explain", "--show-autofix") 1021 t.SetUpCommandLine("--source", "--explain", "--show-autofix")
997 line := t.NewLine("filename", 5, "text") 1022 line := t.NewLine("filename", 5, "text")
998 1023
999 fix := line.Autofix() 1024 fix := line.Autofix()
1000 fix.Notef("This line is quite short.") 1025 fix.Notef("This line is quite short.")
1001 fix.Replace("text", "replacement") 1026 fix.Replace("text", "replacement")
1002 fix.Apply() 1027 fix.Apply()
1003 1028

cvs diff -r1.25 -r1.26 pkgsrc/pkgtools/pkglint/files/Attic/pkgsrc_test.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/pkgsrc_test.go 2019/06/30 20:56:19 1.25
+++ pkgsrc/pkgtools/pkglint/files/Attic/pkgsrc_test.go 2019/07/01 22:25:52 1.26
@@ -249,26 +249,65 @@ func (s *Suite) Test_Pkgsrc_loadDocChang @@ -249,26 +249,65 @@ func (s *Suite) Test_Pkgsrc_loadDocChang
249 249
250 t.SetUpPkgsrc() 250 t.SetUpPkgsrc()
251 t.CreateFileLines("doc/CHANGES-2018", 251 t.CreateFileLines("doc/CHANGES-2018",
252 CvsID, 252 CvsID,
253 "", 253 "",
254 "\tUpdated pkgpath to 1.0 [author 2018-01-01]", 254 "\tUpdated pkgpath to 1.0 [author 2018-01-01]",
255 "\tRenamed pkgpath to new-pkg [author 2018-02-01]", 255 "\tRenamed pkgpath to new-pkg [author 2018-02-01]",
256 "\tMoved pkgpath to category/new-pkg [author 2018-03-01]") 256 "\tMoved pkgpath to category/new-pkg [author 2018-03-01]")
257 t.FinishSetUp() 257 t.FinishSetUp()
258 258
259 t.Check(G.Pkgsrc.LastChange["pkgpath"].Action, equals, Moved) 259 t.Check(G.Pkgsrc.LastChange["pkgpath"].Action, equals, Moved)
260} 260}
261 261
 262func (s *Suite) Test_Pkgsrc_checkRemovedAfterLastFreeze(c *check.C) {
 263 t := s.Init(c)
 264
 265 t.SetUpCommandLine("-Wall", "--source")
 266 t.SetUpPkgsrc()
 267 t.CreateFileLines("doc/CHANGES-2019",
 268 CvsID,
 269 "",
 270 "\tUpdated category/updated-before to 1.0 [updater 2019-04-01]",
 271 "\tmk/bsd.pkg.mk: started freeze for pkgsrc-2019Q1 branch [freezer 2019-06-21]",
 272 "\tmk/bsd.pkg.mk: freeze ended for pkgsrc-2019Q1 branch [freezer 2019-06-25]",
 273 "\tUpdated category/updated-after to 1.0 [updater 2019-07-01]",
 274 "\tAdded category/added-after version 1.0 [updater 2019-07-01]",
 275 "\tMoved category/moved-from to category/moved-to [author 2019-07-02]",
 276 "\tDowngraded category/downgraded to 1.0 [author 2019-07-03]")
 277 t.FinishSetUp()
 278
 279 // It doesn't matter whether the last visible package change was before
 280 // or after the latest freeze. The crucial point is that the most
 281 // interesting change is the invisible one, which is the removal.
 282 // And for finding the removal reliably, it doesn't matter how long ago
 283 // the last package change was.
 284
 285 // The empty lines in the following output demonstrate the cheating
 286 // by creating fake lines from Change.Location.
 287 t.CheckOutputLines(
 288 "ERROR: ~/doc/CHANGES-2019:3: Package category/updated-before "+
 289 "must either exist or be marked as removed.",
 290 "",
 291 "ERROR: ~/doc/CHANGES-2019:6: Package category/updated-after "+
 292 "must either exist or be marked as removed.",
 293 "",
 294 "ERROR: ~/doc/CHANGES-2019:7: Package category/added-after "+
 295 "must either exist or be marked as removed.",
 296 "",
 297 "ERROR: ~/doc/CHANGES-2019:9: Package category/downgraded "+
 298 "must either exist or be marked as removed.")
 299}
 300
262func (s *Suite) Test_Pkgsrc_loadDocChanges__not_found(c *check.C) { 301func (s *Suite) Test_Pkgsrc_loadDocChanges__not_found(c *check.C) {
263 t := s.Init(c) 302 t := s.Init(c)
264 303
265 t.SetUpPkgsrc() 304 t.SetUpPkgsrc()
266 t.Remove("doc/CHANGES-2018") 305 t.Remove("doc/CHANGES-2018")
267 t.Remove("doc/TODO") 306 t.Remove("doc/TODO")
268 t.Remove("doc") 307 t.Remove("doc")
269 308
270 t.ExpectFatal( 309 t.ExpectFatal(
271 t.FinishSetUp, 310 t.FinishSetUp,
272 "FATAL: ~/doc: Cannot be read for loading the package changes.") 311 "FATAL: ~/doc: Cannot be read for loading the package changes.")
273} 312}
274 313
@@ -915,51 +954,52 @@ func (s *Suite) Test_Pkgsrc_guessVariabl @@ -915,51 +954,52 @@ func (s *Suite) Test_Pkgsrc_guessVariabl
915 t.CheckOutputLines( 954 t.CheckOutputLines(
916 "WARN: filename.mk:2: The pathname pattern \"\\\"bad*pathname\\\"\" " + 955 "WARN: filename.mk:2: The pathname pattern \"\\\"bad*pathname\\\"\" " +
917 "contains the invalid characters \"\\\"\\\"\".") 956 "contains the invalid characters \"\\\"\\\"\".")
918} 957}
919 958
920func (s *Suite) Test_Pkgsrc__frozen(c *check.C) { 959func (s *Suite) Test_Pkgsrc__frozen(c *check.C) {
921 t := s.Init(c) 960 t := s.Init(c)
922 961
923 t.SetUpPackage("category/package") 962 t.SetUpPackage("category/package")
924 t.CreateFileLines("doc/CHANGES-2018", 963 t.CreateFileLines("doc/CHANGES-2018",
925 "\tmk/bsd.pkg.mk: started freeze for pkgsrc-2018Q2 branch [freezer 2018-03-25]") 964 "\tmk/bsd.pkg.mk: started freeze for pkgsrc-2018Q2 branch [freezer 2018-03-25]")
926 t.FinishSetUp() 965 t.FinishSetUp()
927 966
928 t.Check(G.Pkgsrc.FreezeStart, equals, "2018-03-25") 967 t.Check(G.Pkgsrc.LastFreezeStart, equals, "2018-03-25")
929} 968}
930 969
931func (s *Suite) Test_Pkgsrc__not_frozen(c *check.C) { 970func (s *Suite) Test_Pkgsrc__not_frozen(c *check.C) {
932 t := s.Init(c) 971 t := s.Init(c)
933 972
934 t.SetUpPackage("category/package") 973 t.SetUpPackage("category/package")
935 t.CreateFileLines("doc/CHANGES-2018", 974 t.CreateFileLines("doc/CHANGES-2018",
936 "\tmk/bsd.pkg.mk: started freeze for pkgsrc-2018Q2 branch [freezer 2018-03-25]", 975 "\tmk/bsd.pkg.mk: started freeze for pkgsrc-2018Q2 branch [freezer 2018-03-25]",
937 "\tmk/bsd.pkg.mk: freeze ended for pkgsrc-2018Q2 branch [freezer 2018-03-27]") 976 "\tmk/bsd.pkg.mk: freeze ended for pkgsrc-2018Q2 branch [freezer 2018-03-27]")
938 t.FinishSetUp() 977 t.FinishSetUp()
939 978
940 t.Check(G.Pkgsrc.FreezeStart, equals, "") 979 t.Check(G.Pkgsrc.LastFreezeStart, equals, "2018-03-25")
 980 t.Check(G.Pkgsrc.LastFreezeEnd, equals, "2018-03-27")
941} 981}
942 982
943func (s *Suite) Test_Pkgsrc__frozen_with_typo(c *check.C) { 983func (s *Suite) Test_Pkgsrc__frozen_with_typo(c *check.C) {
944 t := s.Init(c) 984 t := s.Init(c)
945 985
946 t.SetUpPackage("category/package") 986 t.SetUpPackage("category/package")
947 t.CreateFileLines("doc/CHANGES-2018", 987 t.CreateFileLines("doc/CHANGES-2018",
948 // The closing bracket is missing. 988 // The closing bracket is missing.
949 "\tmk/bsd.pkg.mk: started freeze for pkgsrc-2018Q2 branch [freezer 2018-03-25") 989 "\tmk/bsd.pkg.mk: started freeze for pkgsrc-2018Q2 branch [freezer 2018-03-25")
950 t.FinishSetUp() 990 t.FinishSetUp()
951 991
952 t.Check(G.Pkgsrc.FreezeStart, equals, "") 992 t.Check(G.Pkgsrc.LastFreezeStart, equals, "")
953} 993}
954 994
955func (s *Suite) Test_Change_Version(c *check.C) { 995func (s *Suite) Test_Change_Version(c *check.C) {
956 t := s.Init(c) 996 t := s.Init(c)
957 997
958 loc := Location{"doc/CHANGES-2019", 5, 5} 998 loc := Location{"doc/CHANGES-2019", 5, 5}
959 added := Change{loc, Added, "category/path", "1.0", "author", "2019-01-01"} 999 added := Change{loc, Added, "category/path", "1.0", "author", "2019-01-01"}
960 updated := Change{loc, Updated, "category/path", "1.0", "author", "2019-01-01"} 1000 updated := Change{loc, Updated, "category/path", "1.0", "author", "2019-01-01"}
961 downgraded := Change{loc, Downgraded, "category/path", "1.0", "author", "2019-01-01"} 1001 downgraded := Change{loc, Downgraded, "category/path", "1.0", "author", "2019-01-01"}
962 removed := Change{loc, Removed, "category/path", "1.0", "author", "2019-01-01"} 1002 removed := Change{loc, Removed, "category/path", "1.0", "author", "2019-01-01"}
963 1003
964 t.Check(added.Version(), equals, "1.0") 1004 t.Check(added.Version(), equals, "1.0")
965 t.Check(updated.Version(), equals, "1.0") 1005 t.Check(updated.Version(), equals, "1.0")

cvs diff -r1.4 -r1.5 pkgsrc/pkgtools/pkglint/files/Attic/linelexer.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/linelexer.go 2019/06/30 20:56:19 1.4
+++ pkgsrc/pkgtools/pkglint/files/Attic/linelexer.go 2019/07/01 22:25:52 1.5
@@ -38,27 +38,27 @@ func (llex *LinesLexer) Skip() bool { @@ -38,27 +38,27 @@ func (llex *LinesLexer) Skip() bool {
38 return true 38 return true
39} 39}
40 40
41func (llex *LinesLexer) Undo() { 41func (llex *LinesLexer) Undo() {
42 llex.index-- 42 llex.index--
43} 43}
44 44
45func (llex *LinesLexer) NextRegexp(re regex.Pattern) []string { 45func (llex *LinesLexer) NextRegexp(re regex.Pattern) []string {
46 if trace.Tracing { 46 if trace.Tracing {
47 defer trace.Call(llex.CurrentLine().Text, re)() 47 defer trace.Call(llex.CurrentLine().Text, re)()
48 } 48 }
49 49
50 if !llex.EOF() { 50 if !llex.EOF() {
51 if m := G.res.Match(llex.lines.Lines[llex.index].Text, re); m != nil { 51 if m := match(llex.lines.Lines[llex.index].Text, re); m != nil {
52 llex.index++ 52 llex.index++
53 return m 53 return m
54 } 54 }
55 } 55 }
56 return nil 56 return nil
57} 57}
58 58
59func (llex *LinesLexer) SkipRegexp(re regex.Pattern) bool { 59func (llex *LinesLexer) SkipRegexp(re regex.Pattern) bool {
60 return llex.NextRegexp(re) != nil 60 return llex.NextRegexp(re) != nil
61} 61}
62 62
63func (llex *LinesLexer) SkipPrefix(prefix string) bool { 63func (llex *LinesLexer) SkipPrefix(prefix string) bool {
64 if trace.Tracing { 64 if trace.Tracing {

cvs diff -r1.51 -r1.52 pkgsrc/pkgtools/pkglint/files/Attic/mklines.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/mklines.go 2019/06/30 20:56:19 1.51
+++ pkgsrc/pkgtools/pkglint/files/Attic/mklines.go 2019/07/01 22:25:52 1.52
@@ -441,27 +441,27 @@ func (mklines *MkLines) collectDocumente @@ -441,27 +441,27 @@ func (mklines *MkLines) collectDocumente
441 words := strings.Fields(text) 441 words := strings.Fields(text)
442 if len(words) <= 1 { 442 if len(words) <= 1 {
443 break 443 break
444 } 444 }
445 445
446 commentLines++ 446 commentLines++
447 447
448 parser := NewMkParser(nil, words[1], false) 448 parser := NewMkParser(nil, words[1], false)
449 varname := parser.Varname() 449 varname := parser.Varname()
450 if len(varname) < 3 { 450 if len(varname) < 3 {
451 break 451 break
452 } 452 }
453 if hasSuffix(varname, ".") { 453 if hasSuffix(varname, ".") {
454 if !parser.lexer.SkipRegexp(G.res.Compile(`^<\w+>`)) { 454 if !parser.lexer.SkipRegexp(regcomp(`^<\w+>`)) {
455 break 455 break
456 } 456 }
457 varname += "*" 457 varname += "*"
458 } 458 }
459 parser.lexer.SkipByte(':') 459 parser.lexer.SkipByte(':')
460 460
461 varcanon := varnameCanon(varname) 461 varcanon := varnameCanon(varname)
462 if varcanon == strings.ToUpper(varcanon) && matches(varcanon, `[A-Z]`) && parser.EOF() { 462 if varcanon == strings.ToUpper(varcanon) && matches(varcanon, `[A-Z]`) && parser.EOF() {
463 scope.Define(varcanon, mkline) 463 scope.Define(varcanon, mkline)
464 scope.Use(varcanon, mkline, VucRunTime) 464 scope.Use(varcanon, mkline, VucRunTime)
465 } 465 }
466 466
467 if words[1] == "Copyright" { 467 if words[1] == "Copyright" {

cvs diff -r1.30 -r1.31 pkgsrc/pkgtools/pkglint/files/Attic/mkparser.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/mkparser.go 2019/06/30 20:56:19 1.30
+++ pkgsrc/pkgtools/pkglint/files/Attic/mkparser.go 2019/07/01 22:25:52 1.31
@@ -241,85 +241,85 @@ func (p *MkParser) varUseModifier(varnam @@ -241,85 +241,85 @@ func (p *MkParser) varUseModifier(varnam
241 if p.EmitWarnings { 241 if p.EmitWarnings {
242 p.Line.Warnf("Invalid separator %q for :ts modifier of %q.", sep, varname) 242 p.Line.Warnf("Invalid separator %q for :ts modifier of %q.", sep, varname)
243 p.Line.Explain( 243 p.Line.Explain(
244 "The separator for the :ts modifier must be either a single character", 244 "The separator for the :ts modifier must be either a single character",
245 "or an escape sequence like \\t or \\n or an octal or decimal escape", 245 "or an escape sequence like \\t or \\n or an octal or decimal escape",
246 "sequence; see the bmake man page for further details.") 246 "sequence; see the bmake man page for further details.")
247 } 247 }
248 } 248 }
249 return lexer.Since(mark) 249 return lexer.Since(mark)
250 } 250 }
251 251
252 case '=', 'D', 'M', 'N', 'U': 252 case '=', 'D', 'M', 'N', 'U':
253 lexer.Skip(1) 253 lexer.Skip(1)
254 re := G.res.Compile(regex.Pattern(ifelseStr(closing == '}', `^([^$:\\}]|\$\$|\\.)+`, `^([^$:\\)]|\$\$|\\.)+`))) 254 re := regcomp(regex.Pattern(ifelseStr(closing == '}', `^([^$:\\}]|\$\$|\\.)+`, `^([^$:\\)]|\$\$|\\.)+`)))
255 for p.VarUse() != nil || lexer.SkipRegexp(re) { 255 for p.VarUse() != nil || lexer.SkipRegexp(re) {
256 } 256 }
257 arg := lexer.Since(mark) 257 arg := lexer.Since(mark)
258 return strings.Replace(arg, "\\:", ":", -1) 258 return strings.Replace(arg, "\\:", ":", -1)
259 259
260 case 'C', 'S': 260 case 'C', 'S':
261 if ok, _, _, _, _ := p.varUseModifierSubst(closing); ok { 261 if ok, _, _, _, _ := p.varUseModifierSubst(closing); ok {
262 return lexer.Since(mark) 262 return lexer.Since(mark)
263 } 263 }
264 264
265 case '@': 265 case '@':
266 if p.varUseModifierAt(lexer, varname) { 266 if p.varUseModifierAt(lexer, varname) {
267 return lexer.Since(mark) 267 return lexer.Since(mark)
268 } 268 }
269 269
270 case '[': 270 case '[':
271 if lexer.SkipRegexp(G.res.Compile(`^\[(?:[-.\d]+|#)\]`)) { 271 if lexer.SkipRegexp(regcomp(`^\[(?:[-.\d]+|#)\]`)) {
272 return lexer.Since(mark) 272 return lexer.Since(mark)
273 } 273 }
274 274
275 case '?': 275 case '?':
276 lexer.Skip(1) 276 lexer.Skip(1)
277 p.varUseText(closing) 277 p.varUseText(closing)
278 if lexer.SkipByte(':') { 278 if lexer.SkipByte(':') {
279 p.varUseText(closing) 279 p.varUseText(closing)
280 return lexer.Since(mark) 280 return lexer.Since(mark)
281 } 281 }
282 } 282 }
283 283
284 lexer.Reset(mark) 284 lexer.Reset(mark)
285 285
286 re := G.res.Compile(regex.Pattern(ifelseStr(closing == '}', `^([^:$}]|\$\$)+`, `^([^:$)]|\$\$)+`))) 286 re := regcomp(regex.Pattern(ifelseStr(closing == '}', `^([^:$}]|\$\$)+`, `^([^:$)]|\$\$)+`)))
287 for p.VarUse() != nil || lexer.SkipRegexp(re) { 287 for p.VarUse() != nil || lexer.SkipRegexp(re) {
288 } 288 }
289 modifier := lexer.Since(mark) 289 modifier := lexer.Since(mark)
290 290
291 // ${SOURCES:%.c=%.o} or ${:!uname -a:[2]} 291 // ${SOURCES:%.c=%.o} or ${:!uname -a:[2]}
292 if contains(modifier, "=") || (hasPrefix(modifier, "!") && hasSuffix(modifier, "!")) { 292 if contains(modifier, "=") || (hasPrefix(modifier, "!") && hasSuffix(modifier, "!")) {
293 return modifier 293 return modifier
294 } 294 }
295 295
296 if p.EmitWarnings && modifier != "" { 296 if p.EmitWarnings && modifier != "" {
297 p.Line.Warnf("Invalid variable modifier %q for %q.", modifier, varname) 297 p.Line.Warnf("Invalid variable modifier %q for %q.", modifier, varname)
298 } 298 }
299 299
300 return "" 300 return ""
301} 301}
302 302
303// varUseText parses any text up to the next colon or closing mark. 303// varUseText parses any text up to the next colon or closing mark.
304// Nested variable uses are parsed as well. 304// Nested variable uses are parsed as well.
305// 305//
306// This is used for the :L and :? modifiers since they accept arbitrary 306// This is used for the :L and :? modifiers since they accept arbitrary
307// text as the "variable name" and effectively interpret it as the variable 307// text as the "variable name" and effectively interpret it as the variable
308// value instead. 308// value instead.
309func (p *MkParser) varUseText(closing byte) string { 309func (p *MkParser) varUseText(closing byte) string {
310 lexer := p.lexer 310 lexer := p.lexer
311 start := lexer.Mark() 311 start := lexer.Mark()
312 re := G.res.Compile(regex.Pattern(ifelseStr(closing == '}', `^([^$:}]|\$\$)+`, `^([^$:)]|\$\$)+`))) 312 re := regcomp(regex.Pattern(ifelseStr(closing == '}', `^([^$:}]|\$\$)+`, `^([^$:)]|\$\$)+`)))
313 for p.VarUse() != nil || lexer.SkipRegexp(re) { 313 for p.VarUse() != nil || lexer.SkipRegexp(re) {
314 } 314 }
315 return lexer.Since(start) 315 return lexer.Since(start)
316} 316}
317 317
318// varUseModifierSubst parses a :S,from,to, or a :C,from,to, modifier. 318// varUseModifierSubst parses a :S,from,to, or a :C,from,to, modifier.
319func (p *MkParser) varUseModifierSubst(closing byte) (ok bool, regex bool, from string, to string, options string) { 319func (p *MkParser) varUseModifierSubst(closing byte) (ok bool, regex bool, from string, to string, options string) {
320 lexer := p.lexer 320 lexer := p.lexer
321 regex = lexer.PeekByte() == 'C' 321 regex = lexer.PeekByte() == 'C'
322 lexer.Skip(1 /* the initial S or C */) 322 lexer.Skip(1 /* the initial S or C */)
323 323
324 sep := lexer.PeekByte() // bmake allows _any_ separator, even letters. 324 sep := lexer.PeekByte() // bmake allows _any_ separator, even letters.
325 if sep == -1 || byte(sep) == closing { 325 if sep == -1 || byte(sep) == closing {
@@ -381,27 +381,27 @@ func (p *MkParser) varUseModifierSubst(c @@ -381,27 +381,27 @@ func (p *MkParser) varUseModifierSubst(c
381 return 381 return
382} 382}
383 383
384// varUseModifierAt parses a variable modifier like ":@v@echo ${v};@", 384// varUseModifierAt parses a variable modifier like ":@v@echo ${v};@",
385// which expands the variable value in a loop. 385// which expands the variable value in a loop.
386func (p *MkParser) varUseModifierAt(lexer *textproc.Lexer, varname string) bool { 386func (p *MkParser) varUseModifierAt(lexer *textproc.Lexer, varname string) bool {
387 lexer.Skip(1 /* the initial @ */) 387 lexer.Skip(1 /* the initial @ */)
388 388
389 loopVar := lexer.NextBytesSet(AlnumDot) 389 loopVar := lexer.NextBytesSet(AlnumDot)
390 if loopVar == "" || !lexer.SkipByte('@') { 390 if loopVar == "" || !lexer.SkipByte('@') {
391 return false 391 return false
392 } 392 }
393 393
394 re := G.res.Compile(`^([^$@\\]|\\.)+`) 394 re := regcomp(`^([^$@\\]|\\.)+`)
395 for p.VarUse() != nil || lexer.SkipString("$$") || lexer.SkipRegexp(re) { 395 for p.VarUse() != nil || lexer.SkipString("$$") || lexer.SkipRegexp(re) {
396 } 396 }
397 397
398 if !lexer.SkipByte('@') && p.EmitWarnings { 398 if !lexer.SkipByte('@') && p.EmitWarnings {
399 p.Line.Warnf("Modifier ${%s:@%s@...@} is missing the final \"@\".", varname, loopVar) 399 p.Line.Warnf("Modifier ${%s:@%s@...@} is missing the final \"@\".", varname, loopVar)
400 } 400 }
401 401
402 return true 402 return true
403} 403}
404 404
405// MkCond parses a condition like ${OPSYS} == "NetBSD". 405// MkCond parses a condition like ${OPSYS} == "NetBSD".
406// 406//
407// See devel/bmake/files/cond.c. 407// See devel/bmake/files/cond.c.
@@ -488,39 +488,39 @@ func (p *MkParser) mkCondAtom() *MkCond  @@ -488,39 +488,39 @@ func (p *MkParser) mkCondAtom() *MkCond
488 lhs := p.VarUse() 488 lhs := p.VarUse()
489 mark := lexer.Mark() 489 mark := lexer.Mark()
490 if lhs == nil && lexer.SkipByte('"') { 490 if lhs == nil && lexer.SkipByte('"') {
491 if quotedLHS := p.VarUse(); quotedLHS != nil && lexer.SkipByte('"') { 491 if quotedLHS := p.VarUse(); quotedLHS != nil && lexer.SkipByte('"') {
492 lhs = quotedLHS 492 lhs = quotedLHS
493 } else { 493 } else {
494 lexer.Reset(mark) 494 lexer.Reset(mark)
495 } 495 }
496 } 496 }
497 497
498 if lhs != nil { 498 if lhs != nil {
499 lexer.SkipHspace() 499 lexer.SkipHspace()
500 500
501 if m := lexer.NextRegexp(G.res.Compile(`^(<|<=|==|!=|>=|>)[\t ]*(0x[0-9A-Fa-f]+|\d+(?:\.\d+)?)`)); m != nil { 501 if m := lexer.NextRegexp(regcomp(`^(<|<=|==|!=|>=|>)[\t ]*(0x[0-9A-Fa-f]+|\d+(?:\.\d+)?)`)); m != nil {
502 return &MkCond{CompareVarNum: &MkCondCompareVarNum{lhs, m[1], m[2]}} 502 return &MkCond{CompareVarNum: &MkCondCompareVarNum{lhs, m[1], m[2]}}
503 } 503 }
504 504
505 m := lexer.NextRegexp(G.res.Compile(`^(?:<|<=|==|!=|>=|>)`)) 505 m := lexer.NextRegexp(regcomp(`^(?:<|<=|==|!=|>=|>)`))
506 if m == nil { 506 if m == nil {
507 return &MkCond{Var: lhs} // See devel/bmake/files/cond.c:/\* For \.if \$/ 507 return &MkCond{Var: lhs} // See devel/bmake/files/cond.c:/\* For \.if \$/
508 } 508 }
509 lexer.SkipHspace() 509 lexer.SkipHspace()
510 510
511 op := m[0] 511 op := m[0]
512 if op == "==" || op == "!=" { 512 if op == "==" || op == "!=" {
513 if mrhs := lexer.NextRegexp(G.res.Compile(`^"([^"\$\\]*)"`)); mrhs != nil { 513 if mrhs := lexer.NextRegexp(regcomp(`^"([^"\$\\]*)"`)); mrhs != nil {
514 return &MkCond{CompareVarStr: &MkCondCompareVarStr{lhs, op, mrhs[1]}} 514 return &MkCond{CompareVarStr: &MkCondCompareVarStr{lhs, op, mrhs[1]}}
515 } 515 }
516 } 516 }
517 517
518 if str := lexer.NextBytesSet(textproc.AlnumU); str != "" { 518 if str := lexer.NextBytesSet(textproc.AlnumU); str != "" {
519 return &MkCond{CompareVarStr: &MkCondCompareVarStr{lhs, op, str}} 519 return &MkCond{CompareVarStr: &MkCondCompareVarStr{lhs, op, str}}
520 } 520 }
521 521
522 if rhs := p.VarUse(); rhs != nil { 522 if rhs := p.VarUse(); rhs != nil {
523 return &MkCond{CompareVarVar: &MkCondCompareVarVar{lhs, op, rhs}} 523 return &MkCond{CompareVarVar: &MkCondCompareVarVar{lhs, op, rhs}}
524 } 524 }
525 525
526 if lexer.PeekByte() == '"' { 526 if lexer.PeekByte() == '"' {
@@ -549,27 +549,27 @@ func (p *MkParser) mkCondAtom() *MkCond  @@ -549,27 +549,27 @@ func (p *MkParser) mkCondAtom() *MkCond
549 rhsText.WriteByte(lexer.Since(m)[1]) 549 rhsText.WriteByte(lexer.Since(m)[1])
550 550
551 case lexer.SkipByte('"'): 551 case lexer.SkipByte('"'):
552 return &MkCond{CompareVarStr: &MkCondCompareVarStr{lhs, op, rhsText.String()}} 552 return &MkCond{CompareVarStr: &MkCondCompareVarStr{lhs, op, rhsText.String()}}
553 default: 553 default:
554 break loop 554 break loop
555 } 555 }
556 } 556 }
557 lexer.Reset(mark) 557 lexer.Reset(mark)
558 } 558 }
559 } 559 }
560 560
561 // See devel/bmake/files/cond.c:/^CondCvtArg 561 // See devel/bmake/files/cond.c:/^CondCvtArg
562 if m := lexer.NextRegexp(G.res.Compile(`^(?:0x[0-9A-Fa-f]+|\d+(?:\.\d+)?)`)); m != nil { 562 if m := lexer.NextRegexp(regcomp(`^(?:0x[0-9A-Fa-f]+|\d+(?:\.\d+)?)`)); m != nil {
563 return &MkCond{Num: m[0]} 563 return &MkCond{Num: m[0]}
564 } 564 }
565 } 565 }
566 lexer.Reset(mark) 566 lexer.Reset(mark)
567 return nil 567 return nil
568} 568}
569 569
570func (p *MkParser) mkCondFunc() *MkCond { 570func (p *MkParser) mkCondFunc() *MkCond {
571 lexer := p.lexer 571 lexer := p.lexer
572 mark := lexer.Mark() 572 mark := lexer.Mark()
573 573
574 funcName := lexer.NextBytesSet(textproc.Lower) 574 funcName := lexer.NextBytesSet(textproc.Lower)
575 lexer.SkipHspace() 575 lexer.SkipHspace()
@@ -642,28 +642,28 @@ func (*MkParser) isPkgbasePart(str strin @@ -642,28 +642,28 @@ func (*MkParser) isPkgbasePart(str strin
642 return !contains(varUse.varname, "VER") && len(varUse.modifiers) == 0 642 return !contains(varUse.varname, "VER") && len(varUse.modifiers) == 0
643 } 643 }
644 644
645 return false 645 return false
646} 646}
647 647
648func (p *MkParser) PkgbasePattern() string { 648func (p *MkParser) PkgbasePattern() string {
649 649
650 lexer := p.lexer 650 lexer := p.lexer
651 start := lexer.Mark() 651 start := lexer.Mark()
652 652
653 for { 653 for {
654 if p.VarUse() != nil || 654 if p.VarUse() != nil ||
655 lexer.SkipRegexp(G.res.Compile(`^[\w.*+,{}]+`)) || 655 lexer.SkipRegexp(regcomp(`^[\w.*+,{}]+`)) ||
656 lexer.SkipRegexp(G.res.Compile(`^\[[\w-]+\]`)) { 656 lexer.SkipRegexp(regcomp(`^\[[\w-]+\]`)) {
657 continue 657 continue
658 } 658 }
659 659
660 if lexer.PeekByte() == '-' && p.isPkgbasePart(lexer.Rest()[1:]) { 660 if lexer.PeekByte() == '-' && p.isPkgbasePart(lexer.Rest()[1:]) {
661 lexer.Skip(1) 661 lexer.Skip(1)
662 } else { 662 } else {
663 break 663 break
664 } 664 }
665 } 665 }
666 666
667 pkgbase := lexer.Since(start) 667 pkgbase := lexer.Since(start)
668 if strings.Count(pkgbase, "{") == strings.Count(pkgbase, "}") { 668 if strings.Count(pkgbase, "{") == strings.Count(pkgbase, "}") {
669 return pkgbase 669 return pkgbase
@@ -686,27 +686,27 @@ type DependencyPattern struct { @@ -686,27 +686,27 @@ type DependencyPattern struct {
686// Dependency parses a dependency pattern like "pkg>=1<2" or "pkg-[0-9]*". 686// Dependency parses a dependency pattern like "pkg>=1<2" or "pkg-[0-9]*".
687func (p *MkParser) Dependency() *DependencyPattern { 687func (p *MkParser) Dependency() *DependencyPattern {
688 lexer := p.lexer 688 lexer := p.lexer
689 689
690 parseVersion := func() string { 690 parseVersion := func() string {
691 mark := lexer.Mark() 691 mark := lexer.Mark()
692 692
693 for p.VarUse() != nil { 693 for p.VarUse() != nil {
694 } 694 }
695 if lexer.Since(mark) != "" { 695 if lexer.Since(mark) != "" {
696 return lexer.Since(mark) 696 return lexer.Since(mark)
697 } 697 }
698 698
699 m := lexer.NextRegexp(G.res.Compile(`^\d[\w.]*`)) 699 m := lexer.NextRegexp(regcomp(`^\d[\w.]*`))
700 if m != nil { 700 if m != nil {
701 return m[0] 701 return m[0]
702 } 702 }
703 703
704 return "" 704 return ""
705 } 705 }
706 706
707 var dp DependencyPattern 707 var dp DependencyPattern
708 mark := lexer.Mark() 708 mark := lexer.Mark()
709 dp.Pkgbase = p.PkgbasePattern() 709 dp.Pkgbase = p.PkgbasePattern()
710 if dp.Pkgbase == "" { 710 if dp.Pkgbase == "" {
711 return nil 711 return nil
712 } 712 }
@@ -739,27 +739,27 @@ func (p *MkParser) Dependency() *Depende @@ -739,27 +739,27 @@ func (p *MkParser) Dependency() *Depende
739 dp.Upper = version 739 dp.Upper = version
740 } else { 740 } else {
741 lexer.Reset(mark2) 741 lexer.Reset(mark2)
742 } 742 }
743 } 743 }
744 744
745 if dp.LowerOp != "" || dp.UpperOp != "" { 745 if dp.LowerOp != "" || dp.UpperOp != "" {
746 return &dp 746 return &dp
747 } 747 }
748 748
749 if lexer.SkipByte('-') && lexer.Rest() != "" { 749 if lexer.SkipByte('-') && lexer.Rest() != "" {
750 versionMark := lexer.Mark() 750 versionMark := lexer.Mark()
751 751
752 for p.VarUse() != nil || lexer.SkipRegexp(G.res.Compile(`^[\w\[\]*_.\-]+`)) { 752 for p.VarUse() != nil || lexer.SkipRegexp(regcomp(`^[\w\[\]*_.\-]+`)) {
753 } 753 }
754 754
755 if !lexer.SkipString("{,nb*}") { 755 if !lexer.SkipString("{,nb*}") {
756 lexer.SkipString("{,nb[0-9]*}") 756 lexer.SkipString("{,nb[0-9]*}")
757 } 757 }
758 758
759 dp.Wildcard = lexer.Since(versionMark) 759 dp.Wildcard = lexer.Since(versionMark)
760 return &dp 760 return &dp
761 } 761 }
762 762
763 if ToVarUse(dp.Pkgbase) != nil { 763 if ToVarUse(dp.Pkgbase) != nil {
764 return &dp 764 return &dp
765 } 765 }

cvs diff -r1.57 -r1.58 pkgsrc/pkgtools/pkglint/files/Attic/package.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/package.go 2019/06/30 20:56:19 1.57
+++ pkgsrc/pkgtools/pkglint/files/Attic/package.go 2019/07/01 22:25:52 1.58
@@ -1204,28 +1204,28 @@ func (pkg *Package) checkOwnerMaintainer @@ -1204,28 +1204,28 @@ func (pkg *Package) checkOwnerMaintainer
1204 seeGuide("Package components, Makefile", "components.Makefile")) 1204 seeGuide("Package components, Makefile", "components.Makefile"))
1205 } 1205 }
1206 1206
1207 if maintainer != "" { 1207 if maintainer != "" {
1208 line := NewLineWhole(filename) 1208 line := NewLineWhole(filename)
1209 line.Notef("Please only commit changes that %s would approve.", maintainer) 1209 line.Notef("Please only commit changes that %s would approve.", maintainer)
1210 line.Explain( 1210 line.Explain(
1211 "See the pkgsrc guide, section \"Package components\",", 1211 "See the pkgsrc guide, section \"Package components\",",
1212 "keyword \"maintainer\", for more information.") 1212 "keyword \"maintainer\", for more information.")
1213 } 1213 }
1214} 1214}
1215 1215
1216func (pkg *Package) checkFreeze(filename string) { 1216func (pkg *Package) checkFreeze(filename string) {
1217 freezeStart := G.Pkgsrc.FreezeStart 1217 freezeStart := G.Pkgsrc.LastFreezeStart
1218 if freezeStart == "" { 1218 if freezeStart == "" || G.Pkgsrc.LastFreezeEnd != "" {
1219 return 1219 return
1220 } 1220 }
1221 1221
1222 if !isLocallyModified(filename) { 1222 if !isLocallyModified(filename) {
1223 return 1223 return
1224 } 1224 }
1225 1225
1226 line := NewLineWhole(filename) 1226 line := NewLineWhole(filename)
1227 line.Notef("Pkgsrc is frozen since %s.", freezeStart) 1227 line.Notef("Pkgsrc is frozen since %s.", freezeStart)
1228 line.Explain( 1228 line.Explain(
1229 "During a pkgsrc freeze, changes to pkgsrc should only be made very carefully.", 1229 "During a pkgsrc freeze, changes to pkgsrc should only be made very carefully.",
1230 "See https://www.netbsd.org/developers/pkgsrc/ for the exact rules.") 1230 "See https://www.netbsd.org/developers/pkgsrc/ for the exact rules.")
1231} 1231}

cvs diff -r1.48 -r1.49 pkgsrc/pkgtools/pkglint/files/Attic/package_test.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/package_test.go 2019/06/30 20:56:19 1.48
+++ pkgsrc/pkgtools/pkglint/files/Attic/package_test.go 2019/07/01 22:25:52 1.49
@@ -952,48 +952,48 @@ func (s *Suite) Test_Package_load__extra @@ -952,48 +952,48 @@ func (s *Suite) Test_Package_load__extra
952 // for collecting the used variables. And then a second time in 952 // for collecting the used variables. And then a second time in
953 // Package.check to perform the actual checks. 953 // Package.check to perform the actual checks.
954 // 954 //
955 // The above diagnostics are only those from parsing the file, to 955 // The above diagnostics are only those from parsing the file, to
956 // correctly classify the lines. This is because the main purpose 956 // correctly classify the lines. This is because the main purpose
957 // of Package.load above is to load the files and collect some 957 // of Package.load above is to load the files and collect some
958 // data, not to perform the actual checks. 958 // data, not to perform the actual checks.
959 // 959 //
960 // Therefore, the below lines contain two more diagnostics. 960 // Therefore, the below lines contain two more diagnostics.
961 "WARN: gnu-style.mk:1: Please use curly braces {} instead of round parentheses () for CC.", 961 "WARN: gnu-style.mk:1: Please use curly braces {} instead of round parentheses () for CC.",
962 "ERROR: gnu-style.mk:1: Unknown Makefile line format: \"ifeq ($(CC),gcc)\".", 962 "ERROR: gnu-style.mk:1: Unknown Makefile line format: \"ifeq ($(CC),gcc)\".",
963 "ERROR: gnu-style.mk:3: Unknown Makefile line format: \"else\".", 963 "ERROR: gnu-style.mk:3: Unknown Makefile line format: \"else\".",
964 "ERROR: gnu-style.mk:5: Unknown Makefile line format: \"endif\".", 964 "ERROR: gnu-style.mk:5: Unknown Makefile line format: \"endif\".",
965 "ERROR: gnu-style.mk:1: Expected \"# $NetBSD: package_test.go,v 1.48 2019/06/30 20:56:19 rillig Exp $\".", 965 "ERROR: gnu-style.mk:1: Expected \""+MkCvsID+"\".",
966 "WARN: gnu-style.mk:2: IS_GCC is defined but not used.", 966 "WARN: gnu-style.mk:2: IS_GCC is defined but not used.",
967 967
968 // There is no warning about files/gnu-style.mk since pkglint 968 // There is no warning about files/gnu-style.mk since pkglint
969 // doesn't even attempt at guessing the file type. Files placed 969 // doesn't even attempt at guessing the file type. Files placed
970 // in this directory can have an arbitrary format. 970 // in this directory can have an arbitrary format.
971 971
972 "ERROR: ../../category/other/distinfo: Patch \"../../category/package/patches/"+ 972 "ERROR: ../../category/other/distinfo: Patch \"../../category/package/patches/"+
973 "patch-Makefile.mk\" is not recorded. Run \""+confMake+" makepatchsum\".", 973 "patch-Makefile.mk\" is not recorded. Run \""+confMake+" makepatchsum\".",
974 974
975 // All *.mk files from PKGDIR are loaded to see which variables 975 // All *.mk files from PKGDIR are loaded to see which variables
976 // they define, in order to make the check for unused variables 976 // they define, in order to make the check for unused variables
977 // more reliable. 977 // more reliable.
978 // 978 //
979 // All files that belong to the package itself, and not to pkgsrc 979 // All files that belong to the package itself, and not to pkgsrc
980 // should therefore be placed in the files/ directory. 980 // should therefore be placed in the files/ directory.
981 "WARN: ../../category/other/gnu-style.mk:1: "+ 981 "WARN: ../../category/other/gnu-style.mk:1: "+
982 "Please use curly braces {} instead of round parentheses () for CC.", 982 "Please use curly braces {} instead of round parentheses () for CC.",
983 "ERROR: ../../category/other/gnu-style.mk:1: Unknown Makefile line format: \"ifeq ($(CC),gcc)\".", 983 "ERROR: ../../category/other/gnu-style.mk:1: Unknown Makefile line format: \"ifeq ($(CC),gcc)\".",
984 "ERROR: ../../category/other/gnu-style.mk:3: Unknown Makefile line format: \"else\".", 984 "ERROR: ../../category/other/gnu-style.mk:3: Unknown Makefile line format: \"else\".",
985 "ERROR: ../../category/other/gnu-style.mk:5: Unknown Makefile line format: \"endif\".", 985 "ERROR: ../../category/other/gnu-style.mk:5: Unknown Makefile line format: \"endif\".",
986 "ERROR: ../../category/other/gnu-style.mk:1: Expected \"# $NetBSD: package_test.go,v 1.48 2019/06/30 20:56:19 rillig Exp $\".", 986 "ERROR: ../../category/other/gnu-style.mk:1: Expected \""+MkCvsID+"\".",
987 "WARN: ../../category/other/gnu-style.mk:2: IS_GCC is defined but not used.", 987 "WARN: ../../category/other/gnu-style.mk:2: IS_GCC is defined but not used.",
988 988
989 "ERROR: patches/patch-Makefile.mk: Contains no patch.", 989 "ERROR: patches/patch-Makefile.mk: Contains no patch.",
990 "WARN: patches/readme.mk: Patch files should be named \"patch-\", followed by letters, '-', '_', '.', and digits only.") 990 "WARN: patches/readme.mk: Patch files should be named \"patch-\", followed by letters, '-', '_', '.', and digits only.")
991} 991}
992 992
993func (s *Suite) Test_Package_loadPackageMakefile(c *check.C) { 993func (s *Suite) Test_Package_loadPackageMakefile(c *check.C) {
994 t := s.Init(c) 994 t := s.Init(c)
995 995
996 t.CreateFileLines("category/package/Makefile", 996 t.CreateFileLines("category/package/Makefile",
997 MkCvsID, 997 MkCvsID,
998 "", 998 "",
999 "PKGNAME=pkgname-1.67", 999 "PKGNAME=pkgname-1.67",

cvs diff -r1.44 -r1.45 pkgsrc/pkgtools/pkglint/files/Attic/pkglint_test.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/pkglint_test.go 2019/06/30 20:56:19 1.44
+++ pkgsrc/pkgtools/pkglint/files/Attic/pkglint_test.go 2019/07/01 22:25:52 1.45
@@ -236,27 +236,27 @@ func (s *Suite) Test_Pkglint_Main__compl @@ -236,27 +236,27 @@ func (s *Suite) Test_Pkglint_Main__compl
236 "(Run \"pkglint -F\" to automatically fix some issues.)") 236 "(Run \"pkglint -F\" to automatically fix some issues.)")
237} 237}
238 238
239func (s *Suite) Test_Pkglint_Main__autofix_exitcode(c *check.C) { 239func (s *Suite) Test_Pkglint_Main__autofix_exitcode(c *check.C) {
240 t := s.Init(c) 240 t := s.Init(c)
241 241
242 t.SetUpPkgsrc() 242 t.SetUpPkgsrc()
243 t.CreateFileLines("filename.mk", 243 t.CreateFileLines("filename.mk",
244 "") 244 "")
245 245
246 exitcode := t.Main("-Wall", "--autofix", t.File("filename.mk")) 246 exitcode := t.Main("-Wall", "--autofix", t.File("filename.mk"))
247 247
248 t.CheckOutputLines( 248 t.CheckOutputLines(
249 "AUTOFIX: ~/filename.mk:1: Inserting a line \"# $NetBSD: pkglint_test.go,v 1.44 2019/06/30 20:56:19 rillig Exp $\" before this line.") 249 "AUTOFIX: ~/filename.mk:1: Inserting a line \"" + MkCvsID + "\" before this line.")
250 t.Check(exitcode, equals, 0) 250 t.Check(exitcode, equals, 0)
251} 251}
252 252
253// Run pkglint in a realistic environment. 253// Run pkglint in a realistic environment.
254// 254//
255// env \ 255// env \
256// PKGLINT_TESTDIR="..." \ 256// PKGLINT_TESTDIR="..." \
257// PKGLINT_TESTCMDLINE="-r" \ 257// PKGLINT_TESTCMDLINE="-r" \
258// go test -covermode=count -test.coverprofile pkglint.cov 258// go test -covermode=count -test.coverprofile pkglint.cov
259// 259//
260// go tool cover -html=pkglint.cov -o coverage.html 260// go tool cover -html=pkglint.cov -o coverage.html
261// 261//
262// To measure the branch coverage of pkglint checking a complete pkgsrc installation, 262// To measure the branch coverage of pkglint checking a complete pkgsrc installation,

cvs diff -r1.29 -r1.30 pkgsrc/pkgtools/pkglint/files/Attic/pkgsrc.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/pkgsrc.go 2019/06/30 20:56:19 1.29
+++ pkgsrc/pkgtools/pkglint/files/Attic/pkgsrc.go 2019/07/01 22:25:52 1.30
@@ -26,54 +26,56 @@ type Pkgsrc struct { @@ -26,54 +26,56 @@ type Pkgsrc struct {
26 // within the bsd.pkg.mk file. 26 // within the bsd.pkg.mk file.
27 buildDefs map[string]bool 27 buildDefs map[string]bool
28 28
29 Tools *Tools 29 Tools *Tools
30 30
31 MasterSiteURLToVar map[string]string // "https://github.com/" => "MASTER_SITE_GITHUB" 31 MasterSiteURLToVar map[string]string // "https://github.com/" => "MASTER_SITE_GITHUB"
32 MasterSiteVarToURL map[string]string // "MASTER_SITE_GITHUB" => "https://github.com/" 32 MasterSiteVarToURL map[string]string // "MASTER_SITE_GITHUB" => "https://github.com/"
33 33
34 PkgOptions map[string]string // "x11" => "Provides X11 support" 34 PkgOptions map[string]string // "x11" => "Provides X11 support"
35 35
36 suggestedUpdates []SuggestedUpdate 36 suggestedUpdates []SuggestedUpdate
37 suggestedWipUpdates []SuggestedUpdate 37 suggestedWipUpdates []SuggestedUpdate
38 38
39 LastChange map[string]*Change 39 LastChange map[string]*Change
40 FreezeStart string // e.g. "2018-01-01", or "" 40 LastFreezeStart string // e.g. "2018-01-01", or ""
 41 LastFreezeEnd string // e.g. "2018-01-01", or ""
41 42
42 listVersions map[string][]string // See Pkgsrc.ListVersions 43 listVersions map[string][]string // See Pkgsrc.ListVersions
43 44
44 // Variables that may be overridden by the pkgsrc user. 45 // Variables that may be overridden by the pkgsrc user.
45 // They are typically defined in mk/defaults/mk.conf. 46 // They are typically defined in mk/defaults/mk.conf.
46 // 47 //
47 // Whenever a package uses such a variable, it must add the variable name 48 // Whenever a package uses such a variable, it must add the variable name
48 // to BUILD_DEFS. 49 // to BUILD_DEFS.
49 UserDefinedVars Scope 50 UserDefinedVars Scope
50 51
51 Deprecated map[string]string 52 Deprecated map[string]string
52 vartypes VarTypeRegistry 53 vartypes VarTypeRegistry
53} 54}
54 55
55func NewPkgsrc(dir string) Pkgsrc { 56func NewPkgsrc(dir string) Pkgsrc {
56 return Pkgsrc{ 57 return Pkgsrc{
57 dir, 58 dir,
58 make(map[string]bool), 59 make(map[string]bool),
59 NewTools(), 60 NewTools(),
60 make(map[string]string), 61 make(map[string]string),
61 make(map[string]string), 62 make(map[string]string),
62 make(map[string]string), 63 make(map[string]string),
63 nil, 64 nil,
64 nil, 65 nil,
65 make(map[string]*Change), 66 make(map[string]*Change),
66 "", 67 "",
 68 "",
67 make(map[string][]string), 69 make(map[string][]string),
68 NewScope(), 70 NewScope(),
69 make(map[string]string), 71 make(map[string]string),
70 NewVarTypeRegistry()} 72 NewVarTypeRegistry()}
71} 73}
72 74
73func (src *Pkgsrc) loadDefaultBuildDefs() { 75func (src *Pkgsrc) loadDefaultBuildDefs() {
74 76
75 // Some user-defined variables do not influence the binary 77 // Some user-defined variables do not influence the binary
76 // package at all and therefore do not have to be added to 78 // package at all and therefore do not have to be added to
77 // BUILD_DEFS; therefore they are marked as "already added". 79 // BUILD_DEFS; therefore they are marked as "already added".
78 src.addBuildDefs( 80 src.addBuildDefs(
79 "DISTDIR", 81 "DISTDIR",
@@ -507,31 +509,34 @@ func (src *Pkgsrc) loadDocChangesFromFil @@ -507,31 +509,34 @@ func (src *Pkgsrc) loadDocChangesFromFil
507 year := "" 509 year := ""
508 if _, yyyy := match1(filename, `-(\d\d\d\d)$`); yyyy >= "2018" { 510 if _, yyyy := match1(filename, `-(\d\d\d\d)$`); yyyy >= "2018" {
509 year = yyyy 511 year = yyyy
510 } 512 }
511 513
512 infra := false 514 infra := false
513 lines := Load(filename, MustSucceed|NotEmpty) 515 lines := Load(filename, MustSucceed|NotEmpty)
514 var changes []*Change 516 var changes []*Change
515 for _, line := range lines.Lines { 517 for _, line := range lines.Lines {
516 518
517 if hasPrefix(line.Text, "\tmk/") { 519 if hasPrefix(line.Text, "\tmk/") {
518 infra = true 520 infra = true
519 if hasPrefix(line.Text, "\tmk/bsd.pkg.mk: started freeze for") { 521 if hasPrefix(line.Text, "\tmk/bsd.pkg.mk: started freeze for") {
520 if m, freezeDate := match1(line.Text, `(\d\d\d\d-\d\d-\d\d)\]$`); m { 522 if m, date := match1(line.Text, `(\d\d\d\d-\d\d-\d\d)\]$`); m {
521 src.FreezeStart = freezeDate 523 src.LastFreezeStart = date
 524 src.LastFreezeEnd = ""
522 } 525 }
523 } else if hasPrefix(line.Text, "\tmk/bsd.pkg.mk: freeze ended for") { 526 } else if hasPrefix(line.Text, "\tmk/bsd.pkg.mk: freeze ended for") {
524 src.FreezeStart = "" 527 if m, date := match1(line.Text, `(\d\d\d\d-\d\d-\d\d)\]$`); m {
 528 src.LastFreezeEnd = date
 529 }
525 } 530 }
526 } 531 }
527 if infra { 532 if infra {
528 if hasSuffix(line.Text, "]") { 533 if hasSuffix(line.Text, "]") {
529 infra = false 534 infra = false
530 } 535 }
531 continue 536 continue
532 } 537 }
533 538
534 change := src.parseDocChange(line, warn) 539 change := src.parseDocChange(line, warn)
535 if change == nil { 540 if change == nil {
536 continue 541 continue
537 } 542 }
@@ -593,26 +598,60 @@ func (src *Pkgsrc) loadDocChanges() { @@ -593,26 +598,60 @@ func (src *Pkgsrc) loadDocChanges() {
593 } 598 }
594 } 599 }
595 600
596 src.LastChange = make(map[string]*Change) 601 src.LastChange = make(map[string]*Change)
597 for _, filename := range filenames { 602 for _, filename := range filenames {
598 changes := src.loadDocChangesFromFile(docDir + "/" + filename) 603 changes := src.loadDocChangesFromFile(docDir + "/" + filename)
599 for _, change := range changes { 604 for _, change := range changes {
600 src.LastChange[change.Pkgpath] = change 605 src.LastChange[change.Pkgpath] = change
601 if change.Action == Renamed || change.Action == Moved { 606 if change.Action == Renamed || change.Action == Moved {
602 src.LastChange[change.Target()] = change 607 src.LastChange[change.Target()] = change
603 } 608 }
604 } 609 }
605 } 610 }
 611
 612 src.checkRemovedAfterLastFreeze()
 613}
 614
 615func (src *Pkgsrc) checkRemovedAfterLastFreeze() {
 616 if src.LastFreezeStart == "" || G.Wip {
 617 return
 618 }
 619
 620 var wrong []*Change
 621 for pkgpath, change := range src.LastChange {
 622 switch change.Action {
 623 case Added, Updated, Downgraded:
 624 if !dirExists(src.File(pkgpath)) {
 625 wrong = append(wrong, change)
 626 }
 627 }
 628 }
 629
 630 sort.Slice(wrong, func(i, j int) bool {
 631 ei, ej := wrong[i], wrong[j]
 632 if ei.Date != ej.Date {
 633 return ei.Date < ej.Date
 634 }
 635 return ei.Location.firstLine < ej.Location.firstLine
 636 })
 637
 638 for _, change := range wrong {
 639 // It's a bit cheated to construct a Line from only a Location,
 640 // without the wrong text. That's only because I'm too lazy loading
 641 // the file again, and the original text is not lying around anywhere.
 642 line := NewLineMulti(change.Location.Filename, int(change.Location.firstLine), int(change.Location.lastLine), "", nil)
 643 line.Errorf("Package %s must either exist or be marked as removed.", change.Pkgpath)
 644 }
606} 645}
607 646
608func (src *Pkgsrc) loadUserDefinedVars() { 647func (src *Pkgsrc) loadUserDefinedVars() {
609 mklines := src.LoadMk("mk/defaults/mk.conf", MustSucceed|NotEmpty) 648 mklines := src.LoadMk("mk/defaults/mk.conf", MustSucceed|NotEmpty)
610 649
611 for _, mkline := range mklines.mklines { 650 for _, mkline := range mklines.mklines {
612 if mkline.IsVarassign() || mkline.IsCommentedVarassign() { 651 if mkline.IsVarassign() || mkline.IsCommentedVarassign() {
613 src.UserDefinedVars.Define(mkline.Varname(), mkline) 652 src.UserDefinedVars.Define(mkline.Varname(), mkline)
614 } 653 }
615 } 654 }
616} 655}
617 656
618func (src *Pkgsrc) initDeprecatedVars() { 657func (src *Pkgsrc) initDeprecatedVars() {

cvs diff -r1.18 -r1.19 pkgsrc/pkgtools/pkglint/files/Attic/shtokenizer.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/shtokenizer.go 2019/06/30 20:56:19 1.18
+++ pkgsrc/pkgtools/pkglint/files/Attic/shtokenizer.go 2019/07/01 22:25:52 1.19
@@ -127,48 +127,48 @@ func (p *ShTokenizer) shAtomBackt() *ShA @@ -127,48 +127,48 @@ func (p *ShTokenizer) shAtomBackt() *ShA
127 return op 127 return op
128 } 128 }
129 lexer := p.parser.lexer 129 lexer := p.parser.lexer
130 mark := lexer.Mark() 130 mark := lexer.Mark()
131 switch { 131 switch {
132 case lexer.SkipByte('"'): 132 case lexer.SkipByte('"'):
133 return &ShAtom{shtText, lexer.Since(mark), shqBacktDquot, nil} 133 return &ShAtom{shtText, lexer.Since(mark), shqBacktDquot, nil}
134 case lexer.SkipByte('`'): 134 case lexer.SkipByte('`'):
135 return &ShAtom{shtText, lexer.Since(mark), shqPlain, nil} 135 return &ShAtom{shtText, lexer.Since(mark), shqPlain, nil}
136 case lexer.SkipByte('\''): 136 case lexer.SkipByte('\''):
137 return &ShAtom{shtText, lexer.Since(mark), shqBacktSquot, nil} 137 return &ShAtom{shtText, lexer.Since(mark), shqBacktSquot, nil}
138 case lexer.NextHspace() != "": 138 case lexer.NextHspace() != "":
139 return &ShAtom{shtSpace, lexer.Since(mark), q, nil} 139 return &ShAtom{shtSpace, lexer.Since(mark), q, nil}
140 case lexer.SkipRegexp(G.res.Compile("^#[^`]*")): 140 case lexer.SkipRegexp(regcomp("^#[^`]*")):
141 return &ShAtom{shtComment, lexer.Since(mark), q, nil} 141 return &ShAtom{shtComment, lexer.Since(mark), q, nil}
142 } 142 }
143 return p.shAtomInternal(q, false, false) 143 return p.shAtomInternal(q, false, false)
144} 144}
145 145
146// In pkgsrc, the $(...) subshell syntax is not used, in order to preserve 146// In pkgsrc, the $(...) subshell syntax is not used, in order to preserve
147// compatibility with /bin/sh from Solaris 7. 147// compatibility with /bin/sh from Solaris 7.
148func (p *ShTokenizer) shAtomSubsh() *ShAtom { 148func (p *ShTokenizer) shAtomSubsh() *ShAtom {
149 const q = shqSubsh 149 const q = shqSubsh
150 lexer := p.parser.lexer 150 lexer := p.parser.lexer
151 mark := lexer.Mark() 151 mark := lexer.Mark()
152 switch { 152 switch {
153 case lexer.NextHspace() != "": 153 case lexer.NextHspace() != "":
154 return &ShAtom{shtSpace, lexer.Since(mark), q, nil} 154 return &ShAtom{shtSpace, lexer.Since(mark), q, nil}
155 case lexer.SkipByte('"'): 155 case lexer.SkipByte('"'):
156 return &ShAtom{shtText, lexer.Since(mark), shqSubshDquot, nil} 156 return &ShAtom{shtText, lexer.Since(mark), shqSubshDquot, nil}
157 case lexer.SkipByte('\''): 157 case lexer.SkipByte('\''):
158 return &ShAtom{shtText, lexer.Since(mark), shqSubshSquot, nil} 158 return &ShAtom{shtText, lexer.Since(mark), shqSubshSquot, nil}
159 case lexer.SkipByte('`'): 159 case lexer.SkipByte('`'):
160 return &ShAtom{shtText, lexer.Since(mark), shqSubshBackt, nil} 160 return &ShAtom{shtText, lexer.Since(mark), shqSubshBackt, nil}
161 case lexer.SkipRegexp(G.res.Compile(`^#[^)]*`)): 161 case lexer.SkipRegexp(regcomp(`^#[^)]*`)):
162 return &ShAtom{shtComment, lexer.Since(mark), q, nil} 162 return &ShAtom{shtComment, lexer.Since(mark), q, nil}
163 case lexer.SkipByte(')'): 163 case lexer.SkipByte(')'):
164 // The closing parenthesis can have multiple meanings: 164 // The closing parenthesis can have multiple meanings:
165 // - end of a subshell, such as (echo "in a subshell") 165 // - end of a subshell, such as (echo "in a subshell")
166 // - end of a subshell variable expression, such as var=$$(echo "from a subshell") 166 // - end of a subshell variable expression, such as var=$$(echo "from a subshell")
167 // - end of a case pattern 167 // - end of a case pattern
168 // In the "subshell variable expression" case, the atom type 168 // In the "subshell variable expression" case, the atom type
169 // could be shtText since it is part of a text node. On the 169 // could be shtText since it is part of a text node. On the
170 // other hand, pkglint doesn't tokenize shell programs correctly 170 // other hand, pkglint doesn't tokenize shell programs correctly
171 // anyway. This needs to be fixed someday. 171 // anyway. This needs to be fixed someday.
172 return &ShAtom{shtOperator, lexer.Since(mark), shqPlain, nil} 172 return &ShAtom{shtOperator, lexer.Since(mark), shqPlain, nil}
173 } 173 }
174 if op := p.shOperator(q); op != nil { 174 if op := p.shOperator(q); op != nil {
@@ -181,27 +181,27 @@ func (p *ShTokenizer) shAtomDquotBackt() @@ -181,27 +181,27 @@ func (p *ShTokenizer) shAtomDquotBackt()
181 const q = shqDquotBackt 181 const q = shqDquotBackt
182 if op := p.shOperator(q); op != nil { 182 if op := p.shOperator(q); op != nil {
183 return op 183 return op
184 } 184 }
185 lexer := p.parser.lexer 185 lexer := p.parser.lexer
186 mark := lexer.Mark() 186 mark := lexer.Mark()
187 switch { 187 switch {
188 case lexer.SkipByte('`'): 188 case lexer.SkipByte('`'):
189 return &ShAtom{shtText, lexer.Since(mark), shqDquot, nil} 189 return &ShAtom{shtText, lexer.Since(mark), shqDquot, nil}
190 case lexer.SkipByte('"'): 190 case lexer.SkipByte('"'):
191 return &ShAtom{shtText, lexer.Since(mark), shqDquotBacktDquot, nil} 191 return &ShAtom{shtText, lexer.Since(mark), shqDquotBacktDquot, nil}
192 case lexer.SkipByte('\''): 192 case lexer.SkipByte('\''):
193 return &ShAtom{shtText, lexer.Since(mark), shqDquotBacktSquot, nil} 193 return &ShAtom{shtText, lexer.Since(mark), shqDquotBacktSquot, nil}
194 case lexer.SkipRegexp(G.res.Compile("^#[^`]*")): 194 case lexer.SkipRegexp(regcomp("^#[^`]*")):
195 return &ShAtom{shtComment, lexer.Since(mark), q, nil} 195 return &ShAtom{shtComment, lexer.Since(mark), q, nil}
196 case lexer.NextHspace() != "": 196 case lexer.NextHspace() != "":
197 return &ShAtom{shtSpace, lexer.Since(mark), q, nil} 197 return &ShAtom{shtSpace, lexer.Since(mark), q, nil}
198 } 198 }
199 return p.shAtomInternal(q, false, false) 199 return p.shAtomInternal(q, false, false)
200} 200}
201 201
202func (p *ShTokenizer) shAtomBacktDquot() *ShAtom { 202func (p *ShTokenizer) shAtomBacktDquot() *ShAtom {
203 const q = shqBacktDquot 203 const q = shqBacktDquot
204 lexer := p.parser.lexer 204 lexer := p.parser.lexer
205 mark := lexer.Mark() 205 mark := lexer.Mark()
206 switch { 206 switch {
207 case lexer.SkipByte('"'): 207 case lexer.SkipByte('"'):
@@ -290,41 +290,41 @@ func (p *ShTokenizer) shAtomDquotBacktSq @@ -290,41 +290,41 @@ func (p *ShTokenizer) shAtomDquotBacktSq
290func (p *ShTokenizer) shAtomInternal(q ShQuoting, dquot, squot bool) *ShAtom { 290func (p *ShTokenizer) shAtomInternal(q ShQuoting, dquot, squot bool) *ShAtom {
291 if shVarUse := p.shVarUse(q); shVarUse != nil { 291 if shVarUse := p.shVarUse(q); shVarUse != nil {
292 return shVarUse 292 return shVarUse
293 } 293 }
294 294
295 lexer := p.parser.lexer 295 lexer := p.parser.lexer
296 mark := lexer.Mark() 296 mark := lexer.Mark()
297 297
298loop: 298loop:
299 for { 299 for {
300 _ = `^[\t "$&'();<>\\|]+` // These are not allowed in shqPlain. 300 _ = `^[\t "$&'();<>\\|]+` // These are not allowed in shqPlain.
301 301
302 switch { 302 switch {
303 case lexer.SkipRegexp(G.res.Compile(`^[!#%*+,\-./0-9:=?@A-Z\[\]^_a-z{}~]+`)): 303 case lexer.SkipRegexp(regcomp(`^[!#%*+,\-./0-9:=?@A-Z\[\]^_a-z{}~]+`)):
304 break 304 break
305 case dquot && lexer.SkipRegexp(G.res.Compile(`^[\t &'();<>|]+`)): 305 case dquot && lexer.SkipRegexp(regcomp(`^[\t &'();<>|]+`)):
306 break 306 break
307 case squot && lexer.SkipByte('`'): 307 case squot && lexer.SkipByte('`'):
308 break 308 break
309 case squot && lexer.SkipRegexp(G.res.Compile(`^[\t "&();<>\\|]+`)): 309 case squot && lexer.SkipRegexp(regcomp(`^[\t "&();<>\\|]+`)):
310 break 310 break
311 case squot && lexer.SkipString("$$"): 311 case squot && lexer.SkipString("$$"):
312 break 312 break
313 case squot: 313 case squot:
314 break loop 314 break loop
315 case lexer.SkipString("\\$$"): 315 case lexer.SkipString("\\$$"):
316 break 316 break
317 case lexer.SkipRegexp(G.res.Compile(`^\\[^$]`)): 317 case lexer.SkipRegexp(regcomp(`^\\[^$]`)):
318 break 318 break
319 case matches(lexer.Rest(), `^\$\$[^!#(*\-0-9?@A-Z_a-z{]`): 319 case matches(lexer.Rest(), `^\$\$[^!#(*\-0-9?@A-Z_a-z{]`):
320 lexer.NextString("$$") 320 lexer.NextString("$$")
321 case lexer.Rest() == "$$": 321 case lexer.Rest() == "$$":
322 lexer.Skip(2) 322 lexer.Skip(2)
323 case lexer.Rest() == "$": 323 case lexer.Rest() == "$":
324 lexer.Skip(1) 324 lexer.Skip(1)
325 default: 325 default:
326 break loop 326 break loop
327 } 327 }
328 } 328 }
329 329
330 if token := lexer.Since(mark); token != "" { 330 if token := lexer.Since(mark); token != "" {
@@ -343,62 +343,62 @@ func (p *ShTokenizer) shVarUse(q ShQuoti @@ -343,62 +343,62 @@ func (p *ShTokenizer) shVarUse(q ShQuoti
343 if !lexer.SkipString("$$") { 343 if !lexer.SkipString("$$") {
344 return nil 344 return nil
345 } 345 }
346 346
347 if lexer.TestByteSet(textproc.Digit) { 347 if lexer.TestByteSet(textproc.Digit) {
348 lexer.Skip(1) 348 lexer.Skip(1)
349 text := lexer.Since(beforeDollar) 349 text := lexer.Since(beforeDollar)
350 return &ShAtom{shtShVarUse, text, q, text[2:]} 350 return &ShAtom{shtShVarUse, text, q, text[2:]}
351 } 351 }
352 352
353 brace := lexer.SkipByte('{') 353 brace := lexer.SkipByte('{')
354 354
355 varnameStart := lexer.Mark() 355 varnameStart := lexer.Mark()
356 if !lexer.SkipRegexp(G.res.Compile(`^(?:[!#*\-?@]|\$\$|[A-Za-z_]\w*|\d+)`)) { 356 if !lexer.SkipRegexp(regcomp(`^(?:[!#*\-?@]|\$\$|[A-Za-z_]\w*|\d+)`)) {
357 lexer.Reset(beforeDollar) 357 lexer.Reset(beforeDollar)
358 return nil 358 return nil
359 } 359 }
360 360
361 shVarname := lexer.Since(varnameStart) 361 shVarname := lexer.Since(varnameStart)
362 if shVarname == "$$" { 362 if shVarname == "$$" {
363 shVarname = "$" 363 shVarname = "$"
364 } 364 }
365 365
366 if brace { 366 if brace {
367 lexer.SkipRegexp(G.res.Compile(`^(?:##?|%%?|:?[+\-=?])[^$\\{}]*`)) 367 lexer.SkipRegexp(regcomp(`^(?:##?|%%?|:?[+\-=?])[^$\\{}]*`))
368 if !lexer.SkipByte('}') { 368 if !lexer.SkipByte('}') {
369 lexer.Reset(beforeDollar) 369 lexer.Reset(beforeDollar)
370 return nil 370 return nil
371 } 371 }
372 } 372 }
373 373
374 return &ShAtom{shtShVarUse, lexer.Since(beforeDollar), q, shVarname} 374 return &ShAtom{shtShVarUse, lexer.Since(beforeDollar), q, shVarname}
375} 375}
376 376
377func (p *ShTokenizer) shOperator(q ShQuoting) *ShAtom { 377func (p *ShTokenizer) shOperator(q ShQuoting) *ShAtom {
378 lexer := p.parser.lexer 378 lexer := p.parser.lexer
379 mark := lexer.Mark() 379 mark := lexer.Mark()
380 switch { 380 switch {
381 case lexer.SkipString("||"), 381 case lexer.SkipString("||"),
382 lexer.SkipString("&&"), 382 lexer.SkipString("&&"),
383 lexer.SkipString(";;"), 383 lexer.SkipString(";;"),
384 lexer.NextBytesFunc(func(b byte) bool { return b == '\n' }) != "", 384 lexer.NextBytesFunc(func(b byte) bool { return b == '\n' }) != "",
385 lexer.SkipByte(';'), 385 lexer.SkipByte(';'),
386 lexer.SkipByte('('), 386 lexer.SkipByte('('),
387 lexer.SkipByte(')'), 387 lexer.SkipByte(')'),
388 lexer.SkipByte('|'), 388 lexer.SkipByte('|'),
389 lexer.SkipByte('&'): 389 lexer.SkipByte('&'):
390 return &ShAtom{shtOperator, lexer.Since(mark), q, nil} 390 return &ShAtom{shtOperator, lexer.Since(mark), q, nil}
391 case lexer.SkipRegexp(G.res.Compile(`^\d*(?:<<-|<<|<&|<>|>>|>&|>\||<|>)`)): 391 case lexer.SkipRegexp(regcomp(`^\d*(?:<<-|<<|<&|<>|>>|>&|>\||<|>)`)):
392 return &ShAtom{shtOperator, lexer.Since(mark), q, nil} 392 return &ShAtom{shtOperator, lexer.Since(mark), q, nil}
393 } 393 }
394 return nil 394 return nil
395} 395}
396 396
397func (p *ShTokenizer) ShAtoms() []*ShAtom { 397func (p *ShTokenizer) ShAtoms() []*ShAtom {
398 var atoms []*ShAtom 398 var atoms []*ShAtom
399 q := shqPlain 399 q := shqPlain
400 for { 400 for {
401 atom := p.ShAtom(q) 401 atom := p.ShAtom(q)
402 if atom == nil { 402 if atom == nil {
403 return atoms 403 return atoms
404 } 404 }

cvs diff -r1.27 -r1.28 pkgsrc/pkgtools/pkglint/files/Attic/substcontext_test.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/substcontext_test.go 2019/06/30 20:56:19 1.27
+++ pkgsrc/pkgtools/pkglint/files/Attic/substcontext_test.go 2019/07/01 22:25:52 1.28
@@ -684,27 +684,27 @@ func (s *Suite) Test_SubstContext_sugges @@ -684,27 +684,27 @@ func (s *Suite) Test_SubstContext_sugges
684 "can be replaced with \"SUBST_VARS.pfx+= PREFIX\".") 684 "can be replaced with \"SUBST_VARS.pfx+= PREFIX\".")
685 685
686 t.SetUpCommandLine("--autofix") 686 t.SetUpCommandLine("--autofix")
687 687
688 mklines.Check() 688 mklines.Check()
689 689
690 t.CheckOutputLines( 690 t.CheckOutputLines(
691 "AUTOFIX: subst.mk:6: Replacing \"SUBST_SED.pfx=\\t\\t-e s,@PREFIX@,${PREFIX},g\" "+ 691 "AUTOFIX: subst.mk:6: Replacing \"SUBST_SED.pfx=\\t\\t-e s,@PREFIX@,${PREFIX},g\" "+
692 "with \"SUBST_VARS.pfx=\\t\\tPREFIX\".", 692 "with \"SUBST_VARS.pfx=\\t\\tPREFIX\".",
693 "AUTOFIX: subst.mk:7: Replacing \"SUBST_SED.pfx+=\\t\\t-e s,@PREFIX@,${PREFIX},g\" "+ 693 "AUTOFIX: subst.mk:7: Replacing \"SUBST_SED.pfx+=\\t\\t-e s,@PREFIX@,${PREFIX},g\" "+
694 "with \"SUBST_VARS.pfx+=\\tPREFIX\".") 694 "with \"SUBST_VARS.pfx+=\\tPREFIX\".")
695 695
696 t.CheckFileLinesDetab("subst.mk", 696 t.CheckFileLinesDetab("subst.mk",
697 "# $NetBSD: substcontext_test.go,v 1.27 2019/06/30 20:56:19 rillig Exp $", 697 MkCvsID,
698 "", 698 "",
699 "SUBST_CLASSES+= pfx", 699 "SUBST_CLASSES+= pfx",
700 "SUBST_STAGE.pfx= pre-configure", 700 "SUBST_STAGE.pfx= pre-configure",
701 "SUBST_FILES.pfx= filename", 701 "SUBST_FILES.pfx= filename",
702 "SUBST_VARS.pfx= PREFIX", 702 "SUBST_VARS.pfx= PREFIX",
703 "SUBST_VARS.pfx+= PREFIX") 703 "SUBST_VARS.pfx+= PREFIX")
704} 704}
705 705
706func (s *Suite) Test_SubstContext_suggestSubstVars__autofix_plus_sed(c *check.C) { 706func (s *Suite) Test_SubstContext_suggestSubstVars__autofix_plus_sed(c *check.C) {
707 t := s.Init(c) 707 t := s.Init(c)
708 708
709 t.SetUpVartypes() 709 t.SetUpVartypes()
710 t.Chdir(".") 710 t.Chdir(".")
@@ -723,27 +723,27 @@ func (s *Suite) Test_SubstContext_sugges @@ -723,27 +723,27 @@ func (s *Suite) Test_SubstContext_sugges
723 t.CheckOutputLines( 723 t.CheckOutputLines(
724 "NOTE: subst.mk:6: The substitution command \"s,@PREFIX@,${PREFIX},g\" " + 724 "NOTE: subst.mk:6: The substitution command \"s,@PREFIX@,${PREFIX},g\" " +
725 "can be replaced with \"SUBST_VARS.pfx= PREFIX\".") 725 "can be replaced with \"SUBST_VARS.pfx= PREFIX\".")
726 726
727 t.SetUpCommandLine("-Wall", "--autofix") 727 t.SetUpCommandLine("-Wall", "--autofix")
728 728
729 mklines.Check() 729 mklines.Check()
730 730
731 t.CheckOutputLines( 731 t.CheckOutputLines(
732 "AUTOFIX: subst.mk:6: Replacing \"SUBST_SED.pfx=\\t\\t-e s,@PREFIX@,${PREFIX},g\" " + 732 "AUTOFIX: subst.mk:6: Replacing \"SUBST_SED.pfx=\\t\\t-e s,@PREFIX@,${PREFIX},g\" " +
733 "with \"SUBST_VARS.pfx=\\t\\tPREFIX\".") 733 "with \"SUBST_VARS.pfx=\\t\\tPREFIX\".")
734 734
735 t.CheckFileLinesDetab("subst.mk", 735 t.CheckFileLinesDetab("subst.mk",
736 "# $NetBSD: substcontext_test.go,v 1.27 2019/06/30 20:56:19 rillig Exp $", 736 MkCvsID,
737 "", 737 "",
738 "SUBST_CLASSES+= pfx", 738 "SUBST_CLASSES+= pfx",
739 "SUBST_STAGE.pfx= pre-configure", 739 "SUBST_STAGE.pfx= pre-configure",
740 "SUBST_FILES.pfx= filename", 740 "SUBST_FILES.pfx= filename",
741 "SUBST_VARS.pfx= PREFIX", 741 "SUBST_VARS.pfx= PREFIX",
742 // TODO: If this subst class is used nowhere else, pkglint could 742 // TODO: If this subst class is used nowhere else, pkglint could
743 // replace this += with a simple =. 743 // replace this += with a simple =.
744 "SUBST_SED.pfx+= -e s,@PREFIX@,other,g") 744 "SUBST_SED.pfx+= -e s,@PREFIX@,other,g")
745} 745}
746 746
747func (s *Suite) Test_SubstContext_suggestSubstVars__autofix_plus_vars(c *check.C) { 747func (s *Suite) Test_SubstContext_suggestSubstVars__autofix_plus_vars(c *check.C) {
748 t := s.Init(c) 748 t := s.Init(c)
749 749
@@ -757,27 +757,27 @@ func (s *Suite) Test_SubstContext_sugges @@ -757,27 +757,27 @@ func (s *Suite) Test_SubstContext_sugges
757 "SUBST_CLASSES+=\tid", 757 "SUBST_CLASSES+=\tid",
758 "SUBST_STAGE.id=\tpre-configure", 758 "SUBST_STAGE.id=\tpre-configure",
759 "SUBST_FILES.id=\tfilename", 759 "SUBST_FILES.id=\tfilename",
760 "SUBST_SED.id=\t-e s,@PREFIX@,${PREFIX},g", 760 "SUBST_SED.id=\t-e s,@PREFIX@,${PREFIX},g",
761 "SUBST_VARS.id=\tPKGMANDIR") 761 "SUBST_VARS.id=\tPKGMANDIR")
762 762
763 mklines.Check() 763 mklines.Check()
764 764
765 t.CheckOutputLines( 765 t.CheckOutputLines(
766 "AUTOFIX: subst.mk:6: Replacing \"SUBST_SED.id=\\t-e s,@PREFIX@,${PREFIX},g\" " + 766 "AUTOFIX: subst.mk:6: Replacing \"SUBST_SED.id=\\t-e s,@PREFIX@,${PREFIX},g\" " +
767 "with \"SUBST_VARS.id=\\tPREFIX\".") 767 "with \"SUBST_VARS.id=\\tPREFIX\".")
768 768
769 t.CheckFileLinesDetab("subst.mk", 769 t.CheckFileLinesDetab("subst.mk",
770 "# $NetBSD: substcontext_test.go,v 1.27 2019/06/30 20:56:19 rillig Exp $", 770 MkCvsID,
771 "", 771 "",
772 "SUBST_CLASSES+= id", 772 "SUBST_CLASSES+= id",
773 "SUBST_STAGE.id= pre-configure", 773 "SUBST_STAGE.id= pre-configure",
774 "SUBST_FILES.id= filename", 774 "SUBST_FILES.id= filename",
775 "SUBST_VARS.id= PREFIX", 775 "SUBST_VARS.id= PREFIX",
776 // FIXME: This must be += instead of = since the previous line already uses =. 776 // FIXME: This must be += instead of = since the previous line already uses =.
777 // Luckily the check for redundant assignments catches this already. 777 // Luckily the check for redundant assignments catches this already.
778 "SUBST_VARS.id= PKGMANDIR") 778 "SUBST_VARS.id= PKGMANDIR")
779} 779}
780 780
781func (s *Suite) Test_SubstContext_suggestSubstVars__autofix_indentation(c *check.C) { 781func (s *Suite) Test_SubstContext_suggestSubstVars__autofix_indentation(c *check.C) {
782 t := s.Init(c) 782 t := s.Init(c)
783 783
@@ -791,27 +791,27 @@ func (s *Suite) Test_SubstContext_sugges @@ -791,27 +791,27 @@ func (s *Suite) Test_SubstContext_sugges
791 "SUBST_CLASSES+=\t\t\tfix-paths", 791 "SUBST_CLASSES+=\t\t\tfix-paths",
792 "SUBST_STAGE.fix-paths=\t\tpre-configure", 792 "SUBST_STAGE.fix-paths=\t\tpre-configure",
793 "SUBST_MESSAGE.fix-paths=\tMessage", 793 "SUBST_MESSAGE.fix-paths=\tMessage",
794 "SUBST_FILES.fix-paths=\t\tfilename", 794 "SUBST_FILES.fix-paths=\t\tfilename",
795 "SUBST_SED.fix-paths=\t\t-e s,@PREFIX@,${PREFIX},g") 795 "SUBST_SED.fix-paths=\t\t-e s,@PREFIX@,${PREFIX},g")
796 796
797 mklines.Check() 797 mklines.Check()
798 798
799 t.CheckOutputLines( 799 t.CheckOutputLines(
800 "AUTOFIX: subst.mk:7: Replacing \"SUBST_SED.fix-paths=\\t\\t-e s,@PREFIX@,${PREFIX},g\" " + 800 "AUTOFIX: subst.mk:7: Replacing \"SUBST_SED.fix-paths=\\t\\t-e s,@PREFIX@,${PREFIX},g\" " +
801 "with \"SUBST_VARS.fix-paths=\\t\\tPREFIX\".") 801 "with \"SUBST_VARS.fix-paths=\\t\\tPREFIX\".")
802 802
803 t.CheckFileLinesDetab("subst.mk", 803 t.CheckFileLinesDetab("subst.mk",
804 "# $NetBSD: substcontext_test.go,v 1.27 2019/06/30 20:56:19 rillig Exp $", 804 MkCvsID,
805 "", 805 "",
806 "SUBST_CLASSES+= fix-paths", 806 "SUBST_CLASSES+= fix-paths",
807 "SUBST_STAGE.fix-paths= pre-configure", 807 "SUBST_STAGE.fix-paths= pre-configure",
808 "SUBST_MESSAGE.fix-paths= Message", 808 "SUBST_MESSAGE.fix-paths= Message",
809 "SUBST_FILES.fix-paths= filename", 809 "SUBST_FILES.fix-paths= filename",
810 "SUBST_VARS.fix-paths= PREFIX") 810 "SUBST_VARS.fix-paths= PREFIX")
811} 811}
812 812
813func (s *Suite) Test_SubstContext_extractVarname(c *check.C) { 813func (s *Suite) Test_SubstContext_extractVarname(c *check.C) {
814 t := s.Init(c) 814 t := s.Init(c)
815 815
816 test := func(input, expected string) { 816 test := func(input, expected string) {
817 t.Check((*SubstContext).extractVarname(nil, input), equals, expected) 817 t.Check((*SubstContext).extractVarname(nil, input), equals, expected)

cvs diff -r1.47 -r1.48 pkgsrc/pkgtools/pkglint/files/Attic/util.go (expand / switch to unified diff)

--- pkgsrc/pkgtools/pkglint/files/Attic/util.go 2019/06/30 20:56:19 1.47
+++ pkgsrc/pkgtools/pkglint/files/Attic/util.go 2019/07/01 22:25:52 1.48
@@ -32,26 +32,32 @@ func (ynu YesNoUnknown) String() string  @@ -32,26 +32,32 @@ func (ynu YesNoUnknown) String() string
32// Short names for commonly used functions. 32// Short names for commonly used functions.
33func contains(s, substr string) bool { 33func contains(s, substr string) bool {
34 return strings.Contains(s, substr) 34 return strings.Contains(s, substr)
35} 35}
36func hasPrefix(s, prefix string) bool { 36func hasPrefix(s, prefix string) bool {
37 return strings.HasPrefix(s, prefix) 37 return strings.HasPrefix(s, prefix)
38} 38}
39func hasSuffix(s, suffix string) bool { 39func hasSuffix(s, suffix string) bool {
40 return strings.HasSuffix(s, suffix) 40 return strings.HasSuffix(s, suffix)
41} 41}
42func sprintf(format string, args ...interface{}) string { 42func sprintf(format string, args ...interface{}) string {
43 return fmt.Sprintf(format, args...) 43 return fmt.Sprintf(format, args...)
44} 44}
 45func regcomp(re regex.Pattern) *regexp.Regexp {
 46 return G.res.Compile(re)
 47}
 48func match(s string, re regex.Pattern) []string {
 49 return G.res.Match(s, re)
 50}
45func matches(s string, re regex.Pattern) bool { 51func matches(s string, re regex.Pattern) bool {
46 return G.res.Matches(s, re) 52 return G.res.Matches(s, re)
47} 53}
48func match1(s string, re regex.Pattern) (matched bool, m1 string) { 54func match1(s string, re regex.Pattern) (matched bool, m1 string) {
49 return G.res.Match1(s, re) 55 return G.res.Match1(s, re)
50} 56}
51func match2(s string, re regex.Pattern) (matched bool, m1, m2 string) { 57func match2(s string, re regex.Pattern) (matched bool, m1, m2 string) {
52 return G.res.Match2(s, re) 58 return G.res.Match2(s, re)
53} 59}
54func match3(s string, re regex.Pattern) (matched bool, m1, m2, m3 string) { 60func match3(s string, re regex.Pattern) (matched bool, m1, m2, m3 string) {
55 return G.res.Match3(s, re) 61 return G.res.Match3(s, re)
56} 62}
57func match4(s string, re regex.Pattern) (matched bool, m1, m2, m3, m4 string) { 63func match4(s string, re regex.Pattern) (matched bool, m1, m2, m3, m4 string) {