Skip to content

Commit

Permalink
Address Deepsource suggestions
Browse files Browse the repository at this point in the history
  • Loading branch information
jmanuel1 committed Nov 9, 2024
1 parent a8edcc0 commit cb6670f
Show file tree
Hide file tree
Showing 4 changed files with 196 additions and 178 deletions.
156 changes: 80 additions & 76 deletions concat/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,90 +57,94 @@ def func(name: str) -> IO[AnyStr]:
'--tokenize',
action='store_true',
default=False,
help='tokenize input from the given file and print the tokens as a JSON array',
help=(
'tokenize input from the given file and print the tokens as a JSON '
'array'
),
)


def tokenize_printing_errors() -> list[concat.lex.Token]:
token_results = concat.lex.tokenize(args.file.read())
tokens = list[concat.lex.Token]()
for r in token_results:
if r.type == 'token':
tokens.append(r.token)
elif r.type == 'indent-err':
position = (r.err.lineno or 1, r.err.offset or 0)
message = r.err.msg
print('Indentation error:')
print(
create_indentation_error_message(args.file, position, message)
)
elif r.type == 'token-err':
position = r.location
message = str(r.err)
print('Lexical error:')
print(create_lexical_error_message(args.file, position, message))
else:
assert_never(r)
return tokens


def batch_main():
try:
tokens = tokenize_printing_errors()
concat_ast = parse(tokens)
recovered_parsing_failures = concat_ast.parsing_failures
for failure in recovered_parsing_failures:
print('Parse Error:')
print(create_parsing_failure_message(args.file, tokens, failure))
source_dir = os.path.dirname(filename)
typecheck(concat_ast, source_dir)
python_ast = transpile_ast(concat_ast)
except concat.typecheck.StaticAnalysisError as e:
if e.path is None:
in_path = ''
else:
in_path = ' in file ' + str(e.path)
print(f'Static Analysis Error{in_path}:\n')
print(e, 'in line:')
if e.location:
if e.path is not None:
with e.path.open() as f:
print(get_line_at(f, e.location), end='')
else:
print(get_line_at(args.file, e.location), end='')
print(' ' * e.location[1] + '^')
if args.verbose:
raise
except concat.parser_combinators.ParseError as e:
print('Parse Error:')
print(
create_parsing_failure_message(
args.file, tokens, e.args[0].failures
)
)
except Exception:
print('An internal error has occurred.')
print('This is a bug in Concat.')
raise
else:
concat.execute.execute(
filename,
python_ast,
{},
should_log_stacks=args.debug,
import_resolution_start_directory=source_dir,
)
if list(concat_ast.parsing_failures):
sys.exit(1)
finally:
args.file.close()


def main():
# interactive mode
if args.file.isatty():
concat.stdlib.repl.repl([], [], args.debug)
else:
try:
token_results = concat.lex.tokenize(args.file.read())
tokens = list[concat.lex.Token]()
for r in token_results:
if r.type == 'token':
tokens.append(r.token)
elif r.type == 'indent-err':
position = (r.err.lineno or 1, r.err.offset or 0)
message = r.err.msg
print('Indentation error:')
print(
create_indentation_error_message(
args.file, position, message
)
)
elif r.type == 'token-err':
position = r.location
message = str(r.err)
print('Lexical error:')
print(
create_lexical_error_message(
args.file, position, message
)
)
else:
assert_never(r)
concat_ast = parse(tokens)
recovered_parsing_failures = concat_ast.parsing_failures
for failure in recovered_parsing_failures:
print('Parse Error:')
print(
create_parsing_failure_message(args.file, tokens, failure)
)
source_dir = os.path.dirname(filename)
typecheck(concat_ast, source_dir)
python_ast = transpile_ast(concat_ast)
except concat.typecheck.StaticAnalysisError as e:
if e.path is None:
in_path = ''
else:
in_path = ' in file ' + str(e.path)
print(f'Static Analysis Error{in_path}:\n')
print(e, 'in line:')
if e.location:
if e.path is not None:
with e.path.open() as f:
print(get_line_at(f, e.location), end='')
else:
print(get_line_at(args.file, e.location), end='')
print(' ' * e.location[1] + '^')
if args.verbose:
raise
except concat.parser_combinators.ParseError as e:
print('Parse Error:')
print(
create_parsing_failure_message(
args.file, tokens, e.args[0].failures
)
)
except Exception:
print('An internal error has occurred.')
print('This is a bug in Concat.')
raise
else:
concat.execute.execute(
filename,
python_ast,
{},
should_log_stacks=args.debug,
import_resolution_start_directory=source_dir,
)
if list(concat_ast.parsing_failures):
sys.exit(1)
finally:
args.file.close()
batch_main()


# We should pass any unknown args onto the program we're about to run.
Expand Down
13 changes: 11 additions & 2 deletions concat/error_reporting.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,22 @@ def create_lexical_error_message(
file: TextIO, location: concat.astutils.Location, message: str
) -> str:
line = get_line_at(file, location)
message = f'Cannot tokenize file at line {location[0]}, column {location[1] + 1}:\n{line.rstrip()}\n{' ' * location[1] + '^'}\n'
message = (
f'Cannot tokenize file at line {location[0]}, '
f'column {location[1] + 1}:\n'
f'{line.rstrip()}\n'
f'{' ' * location[1] + '^'}\n'
)
return message


def create_indentation_error_message(
file: TextIO, location: concat.astutils.Location, message: str
) -> str:
line = get_line_at(file, location)
message = f'Malformed indentation at line {location[0]}, column {location[1] + 1}:\n{line.rstrip()}\n'
message = (
f'Malformed indentation at line {location[0]}, '
f'column {location[1] + 1}:\n'
f'{line.rstrip()}\n'
)
return message
Loading

0 comments on commit cb6670f

Please sign in to comment.