Skip to content

Commit

Permalink
pythongh-104825: Remove implicit newline in the line attribute in tok…
Browse files Browse the repository at this point in the history
…ens emitted in the tokenize module
  • Loading branch information
pablogsal committed May 24, 2023
1 parent e561c09 commit e81810d
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 4 deletions.
4 changes: 2 additions & 2 deletions Lib/test/test_tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def k(x):
e.exception.msg,
'unindent does not match any outer indentation level')
self.assertEqual(e.exception.offset, 9)
self.assertEqual(e.exception.text, ' x += 5\n')
self.assertEqual(e.exception.text, ' x += 5')

def test_int(self):
# Ordinary integers and binary operators
Expand Down Expand Up @@ -1157,7 +1157,7 @@ def readline():

# skip the initial encoding token and the end tokens
tokens = list(_tokenize(readline(), encoding='utf-8'))[:-2]
expected_tokens = [TokenInfo(3, '"ЉЊЈЁЂ"', (1, 0), (1, 7), '"ЉЊЈЁЂ"\n')]
expected_tokens = [TokenInfo(3, '"ЉЊЈЁЂ"', (1, 0), (1, 7), '"ЉЊЈЁЂ"')]
self.assertEqual(tokens, expected_tokens,
"bytes not decoded with encoding")

Expand Down
4 changes: 2 additions & 2 deletions Lib/tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -518,8 +518,8 @@ def error(message, filename=None, location=None):
if args.exact:
token_type = token.exact_type
token_range = "%d,%d-%d,%d:" % (token.start + token.end)
print("%-20s%-15s%-15r" %
(token_range, tok_name[token_type], token.string))
print("%-20s%-15s%-15r%-15r" %
(token_range, tok_name[token_type], token.string, token.line))
except IndentationError as err:
line, column = err.args[1][1:3]
error(err.args[0], filename, (line, column))
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
Tokens emitted by the :mod:`tokenize` module do not include an implicit
``\n`` character in the ``line`` attribute anymore. Patch by Pablo Galindo
4 changes: 4 additions & 0 deletions Python/Python-tokenize.c
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,8 @@ _tokenizer_error(struct tok_state *tok)
int result = 0;

Py_ssize_t size = tok->inp - tok->buf;
assert(tok->buf[size-1] == '\n');
size -= 1; // Remove the newline character from the end of the line
error_line = PyUnicode_DecodeUTF8(tok->buf, size, "replace");
if (!error_line) {
result = -1;
Expand Down Expand Up @@ -193,6 +195,8 @@ tokenizeriter_next(tokenizeriterobject *it)
}

Py_ssize_t size = it->tok->inp - it->tok->buf;
assert(it->tok->buf[size-1] == '\n');
size -= 1; // Remove the newline character from the end of the line
PyObject *line = PyUnicode_DecodeUTF8(it->tok->buf, size, "replace");
if (line == NULL) {
Py_DECREF(str);
Expand Down

0 comments on commit e81810d

Please sign in to comment.