From 83b2185db5d63d2b4764936a03b44cee55134f0c Mon Sep 17 00:00:00 2001 From: sunmy2019 <59365878+sunmy2019@users.noreply.github.com> Date: Tue, 31 Mar 2026 22:37:16 +0800 Subject: [PATCH 1/2] Fix format specifiers in vgetargskeywordsfast_impl for improved error messages --- Python/getargs.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Python/getargs.c b/Python/getargs.c index 1bf99fe33c8a35..13e0ae7c1675a0 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -2203,7 +2203,7 @@ vgetargskeywordsfast_impl(PyObject *const *args, Py_ssize_t nargs, else { keyword = PyTuple_GET_ITEM(kwtuple, i - pos); PyErr_Format(PyExc_TypeError, "%.200s%s missing required " - "argument '%U' (pos %zd)", + "argument '%U' (pos %d)", (parser->fname == NULL) ? "function" : parser->fname, (parser->fname == NULL) ? "" : "()", keyword, i+1); @@ -2244,7 +2244,7 @@ vgetargskeywordsfast_impl(PyObject *const *args, Py_ssize_t nargs, /* arg present in tuple and in dict */ PyErr_Format(PyExc_TypeError, "argument for %.200s%s given by name ('%U') " - "and position (%zd)", + "and position (%d)", (parser->fname == NULL) ? "function" : parser->fname, (parser->fname == NULL) ? "" : "()", keyword, i+1); From 90884795e22e60dfcb3504f1d2d292d4eed32838 Mon Sep 17 00:00:00 2001 From: sunmy2019 <59365878+sunmy2019@users.noreply.github.com> Date: Thu, 2 Apr 2026 05:28:14 +0800 Subject: [PATCH 2/2] Fix f-string syntax error message in tok_get_normal_mode --- Parser/lexer/lexer.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Parser/lexer/lexer.c b/Parser/lexer/lexer.c index cc89f0b9cc9af0..dd5f9e7a8d1ea6 100644 --- a/Parser/lexer/lexer.c +++ b/Parser/lexer/lexer.c @@ -1106,7 +1106,7 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t tokenizer_mode *the_current_tok = TOK_GET_MODE(tok); if (the_current_tok->f_string_quote == quote && the_current_tok->f_string_quote_size == quote_size) { - return MAKE_TOKEN(_PyTokenizer_syntaxerror(tok, "f-string: expecting '}'", start)); + return MAKE_TOKEN(_PyTokenizer_syntaxerror(tok, "f-string: expecting '}'")); } }