3 # Released to the public domain, by Tim Peters, 28 February 2000.
5 """checkappend.py -- search for multi-argument .append() calls.
7 Usage: specify one or more file or directory paths:
8 checkappend [-v] file_or_dir [file_or_dir] ...
10 Each file_or_dir is checked for multi-argument .append() calls. When
11 a directory, all .py files in the directory, and recursively in its
12 subdirectories, are checked.
14 Use -v for status msgs. Use -vv for more status msgs.
16 In the absence of -v, the only output is pairs of the form
19 line containing the suspicious append
21 Note that this finds multi-argument append calls regardless of whether
22 they're attached to list objects. If a module defines a class with an
23 append method that takes more than one argument, calls to that method
26 Note that this will not find multi-argument list.append calls made via a
27 bound method object. For example, this is not caught:
30 push = somelist.append
46 sys
.stderr
.write("\n")
52 opts
, args
= getopt
.getopt(sys
.argv
[1:], "v")
53 except getopt
.error
as msg
:
54 errprint(str(msg
) + "\n\n" + __doc__
)
56 for opt
, optarg
in opts
:
66 if os
.path
.isdir(file) and not os
.path
.islink(file):
68 print("%r: listing directory" % (file,))
69 names
= os
.listdir(file)
71 fullname
= os
.path
.join(file, name
)
72 if ((os
.path
.isdir(fullname
) and
73 not os
.path
.islink(fullname
))
74 or os
.path
.normcase(name
[-3:]) == ".py"):
80 except IOError as msg
:
81 errprint("%r: I/O Error: %s" % (file, msg
))
85 print("checking %r ..." % (file,))
87 ok
= AppendChecker(file, f
).run()
89 print("%r: Clean bill of health." % (file,))
98 def __init__(self
, fname
, file):
101 self
.state
= FIND_DOT
106 tokens
= tokenize
.generate_tokens(self
.file.readline
)
107 for _token
in tokens
:
108 self
.tokeneater(*_token
)
109 except tokenize
.TokenError
as msg
:
110 errprint("%r: Token Error: %s" % (self
.fname
, msg
))
111 self
.nerrors
= self
.nerrors
+ 1
112 return self
.nerrors
== 0
114 def tokeneater(self
, type, token
, start
, end
, line
,
115 NEWLINE
=tokenize
.NEWLINE
,
116 JUNK
=(tokenize
.COMMENT
, tokenize
.NL
),
125 elif state
is FIND_DOT
:
126 if type is OP
and token
== ".":
129 elif state
is FIND_APPEND
:
130 if type is NAME
and token
== "append":
132 self
.lineno
= start
[0]
137 elif state
is FIND_LPAREN
:
138 if type is OP
and token
== "(":
144 elif state
is FIND_COMMA
:
146 if token
in ("(", "{", "["):
147 self
.level
= self
.level
+ 1
148 elif token
in (")", "}", "]"):
149 self
.level
= self
.level
- 1
152 elif token
== "," and self
.level
== 1:
153 self
.nerrors
= self
.nerrors
+ 1
154 print("%s(%d):\n%s" % (self
.fname
, self
.lineno
,
156 # don't gripe about this stmt again
159 elif state
is FIND_STMT
:
164 raise SystemError("unknown internal state '%r'" % (state
,))
168 if __name__
== '__main__':