summaryrefslogtreecommitdiff
path: root/Parser/tokenizer.c
diff options
context:
space:
mode:
authorMark Hammond <mhammond@skippinet.com.au>2003-01-14 23:15:22 +0000
committerMark Hammond <mhammond@skippinet.com.au>2003-01-14 23:15:22 +0000
commit9ed819d6e2bf790f1da31868b2f7c3c089f22dc1 (patch)
treedd9c68db3faede955f2314619939e31536601016 /Parser/tokenizer.c
parent6f6fe68acff091143c7651f960fb4bc9fa8c82a0 (diff)
downloadcpython-9ed819d6e2bf790f1da31868b2f7c3c089f22dc1.tar.gz
Fix [ 665014 ] files with long lines and an encoding crash.
Ensure that the 'size' arg is correctly passed to the encoding reader to prevent buffer overflows.
Diffstat (limited to 'Parser/tokenizer.c')
-rw-r--r--Parser/tokenizer.c3
1 files changed, 2 insertions, 1 deletions
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index d7a223ae0f..aaed637a2f 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -346,7 +346,8 @@ fp_readl(char *s, int size, struct tok_state *tok)
PyObject* utf8;
PyObject* buf = tok->decoding_buffer;
if (buf == NULL) {
- PyObject *args = PyTuple_New(0);
+ /* Ask for one less byte so we can terminate it */
+ PyObject *args = Py_BuildValue("(i)", size-1);
if (args == NULL)
return error_ret(tok);
buf = PyObject_Call(tok->decoding_readline, args, NULL);