StatementGrouper simply raises StopIteration if there are no statements.
grouper = StatementGrouper(encoding='utf-8')
for line in lines:
grouper.process_line(line)
- if grouper.statements:
- for statement in grouper.get_statements():
- print("----- -----")
- if find_error(statement):
- print("ERRORS IN QUERY")
- print_tokens(statement, encoding='utf-8')
- print()
- statement._pprint_tree()
+ for statement in grouper.get_statements():
+ print("----- -----")
+ if find_error(statement):
+ print("ERRORS IN QUERY")
+ print_tokens(statement, encoding='utf-8')
+ print()
+ statement._pprint_tree()
print("-----/-----")
tokens = grouper.close()
if tokens:
grouper = StatementGrouper(encoding='utf-8')
for line in lines:
grouper.process_line(line)
- if grouper.statements:
- for statement in grouper.get_statements():
- print("----- -----")
- if find_error(statement):
- print("ERRORS IN QUERY")
- process_statement(statement)
- print_tokens(statement, encoding='utf-8')
- print()
- statement._pprint_tree()
+ for statement in grouper.get_statements():
+ print("----- -----")
+ if find_error(statement):
+ print("ERRORS IN QUERY")
+ process_statement(statement)
+ print_tokens(statement, encoding='utf-8')
+ print()
+ statement._pprint_tree()
print("-----/-----")
tokens = grouper.close()
if tokens:
cur_pos += len(line)
pbar.display(cur_pos)
grouper.process_line(line)
- if grouper.statements:
- for statement in grouper.get_statements():
- if got_directive and is_newline_statement(statement):
- # Condense a sequence of newlines after a /*! directive */;
- got_directive = False
- continue
- got_directive = is_directive_statement(statement)
- if got_directive:
- continue
- process_statement(statement)
- print_tokens(statement, outfile=outfile,
- encoding=output_encoding)
+ for statement in grouper.get_statements():
+ if got_directive and is_newline_statement(statement):
+ # Condense a sequence of newlines after a /*! directive */;
+ got_directive = False
+ continue
+ got_directive = is_directive_statement(statement)
+ if got_directive:
+ continue
+ process_statement(statement)
+ print_tokens(statement, outfile=outfile,
+ encoding=output_encoding)
tokens = grouper.close()
if tokens:
for token in tokens:
for stmt in self.statements:
yield stmt
self.statements = []
+ raise StopIteration
def close(self):
if not self.lines: