gh-140576: Fixed crash produced by lexer in case of dedented zero byte (#140583)

This commit is contained in:
Mikhail Efimov
2025-10-29 16:27:35 +03:00
committed by GitHub
parent 6ff62ac4fb
commit 8706167474
3 changed files with 6 additions and 0 deletions

View File

@@ -3183,6 +3183,7 @@ async def f():
f'__{
x:d
}__'""",
" a\n\x00",
]:
with self.subTest(case=case):
self.assertRaises(tokenize.TokenError, get_tokens, case)

View File

@@ -0,0 +1,2 @@
Fixed crash in :func:`tokenize.generate_tokens` in case of
specific incorrect input. Patch by Mikhail Efimov.

View File

@@ -539,6 +539,9 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
return MAKE_TOKEN(ERRORTOKEN);
}
}
else if (c == EOF && PyErr_Occurred()) {
return MAKE_TOKEN(ERRORTOKEN);
}
else {
break;
}