• Home
  • Raw
  • Download

Lines Matching +full:no +full:- +full:verbose

5 """reindent [-d][-r][-v] [ path ... ]
7 -d (--dryrun) Dry run. Analyze, but don't make any changes to, files.
8 -r (--recurse) Recurse. Search for all .py files in subdirectories too.
9 -n (--nobackup) No backup. Does not make a ".bak" file before reindenting.
10 -v (--verbose) Verbose. Print informative msgs; else no output.
11 -h (--help) Help. Print this usage information and exit.
13 Change Python (.py) files to use 4-space indents and no hard tab characters.
17 If no paths are given on the command line, reindent operates as a filter,
19 source to standard output. In this case, the -d, -r and -v flags are
23 path, all .py files within the directory will be examined, and, if the -r
29 file is a fixed-point for future runs (i.e., running reindent on the
39 you'd prefer. You can always use the --nobackup option to prevent this.
54 verbose = 0 variable
73 global verbose, recurse, dryrun, makebackup
76 ["dryrun", "recurse", "nobackup", "verbose", "help"])
81 if o in ('-d', '--dryrun'):
83 elif o in ('-r', '--recurse'):
85 elif o in ('-n', '--nobackup'):
87 elif o in ('-v', '--verbose'):
88 verbose += 1
89 elif o in ('-h', '--help'):
102 if verbose:
113 if verbose:
124 if verbose:
132 if verbose:
137 if verbose:
141 if verbose:
154 while i > 0 and line[i-1] in JUNK:
155 i -= 1
167 # File lines, rstripped & tab-expanded. Stub at start is so
168 # that we can use tokenize's 1-based line numbering easily.
169 # Note that a line is all-blank iff it's "\n".
176 # comment line. indentlevel is -1 for comment lines, as a
185 while lines and lines[-1] == "\n":
194 # Copy over initial empty lines -- there's nothing to do until
198 for i in range(len(stats)-1):
209 want = have2want.get(have, -1)
212 for j in range(i+1, len(stats)-1):
222 for j in range(i-1, -1, -1):
225 want = have + getlspace(after[jline-1]) - \
229 # Still no luck -- leave it alone.
235 diff = want - have
246 remove = min(getlspace(line), -diff)
253 # Line-getter for tokenize.
262 # Line-eater for tokenize.
283 self.level -= 1
287 self.stats.append((sline, -1))