gh-140576: Fixed crash produced by lexer in case of dedented zero byte (#140583)
This commit is contained in:
@@ -3183,6 +3183,7 @@ async def f():
|
||||
f'__{
|
||||
x:d
|
||||
}__'""",
|
||||
" a\n\x00",
|
||||
]:
|
||||
with self.subTest(case=case):
|
||||
self.assertRaises(tokenize.TokenError, get_tokens, case)
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
Fixed crash in :func:`tokenize.generate_tokens` in case of
|
||||
specific incorrect input. Patch by Mikhail Efimov.
|
||||
@@ -539,6 +539,9 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
|
||||
return MAKE_TOKEN(ERRORTOKEN);
|
||||
}
|
||||
}
|
||||
else if (c == EOF && PyErr_Occurred()) {
|
||||
return MAKE_TOKEN(ERRORTOKEN);
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user