From 00fe8a7b2317f11bff754c73a87b69445fe0b33d Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Sun, 28 Jun 2009 03:18:59 +0000 Subject: Merged revisions 72912,72920,72940 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ........ r72912 | benjamin.peterson | 2009-05-25 08:13:44 -0500 (Mon, 25 May 2009) | 5 lines add a SETUP_WITH opcode It speeds up the with statement and correctly looks up the special methods involved. ........ r72920 | benjamin.peterson | 2009-05-25 15:12:57 -0500 (Mon, 25 May 2009) | 1 line take into account the fact that SETUP_WITH pushes a finally block ........ r72940 | benjamin.peterson | 2009-05-26 07:49:59 -0500 (Tue, 26 May 2009) | 1 line teach the peepholer about SETUP_WITH ........ --- Python/peephole.c | 3 +++ 1 file changed, 3 insertions(+) (limited to 'Python/peephole.c') diff --git a/Python/peephole.c b/Python/peephole.c index de1b2aca0d..23735b0a31 100644 --- a/Python/peephole.c +++ b/Python/peephole.c @@ -251,6 +251,7 @@ markblocks(unsigned char *code, Py_ssize_t len) case SETUP_LOOP: case SETUP_EXCEPT: case SETUP_FINALLY: + case SETUP_WITH: j = GETJUMPTGT(code, i); blocks[j] = 1; break; @@ -566,6 +567,7 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, case SETUP_LOOP: case SETUP_EXCEPT: case SETUP_FINALLY: + case SETUP_WITH: tgt = GETJUMPTGT(codestr, i); /* Replace JUMP_* to a RETURN into just a RETURN */ if (UNCONDITIONAL_JUMP(opcode) && @@ -648,6 +650,7 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, case SETUP_LOOP: case SETUP_EXCEPT: case SETUP_FINALLY: + case SETUP_WITH: j = addrmap[GETARG(codestr, i) + i + 3] - addrmap[i] - 3; SETARG(codestr, i, j); break; -- cgit v1.2.1 From 4002bcf6d0a51bff1bd8ad8e252b17e4c4856019 Mon Sep 17 00:00:00 2001 From: Alexandre Vassalotti Date: Tue, 21 Jul 2009 02:51:58 +0000 Subject: Merged revisions 73683,73786 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ........ r73683 | georg.brandl | 2009-06-29 10:44:49 -0400 (Mon, 29 Jun 2009) | 1 line Fix error handling in PyCode_Optimize, by Alexander Schremmer at EuroPython sprint. ........ r73786 | benjamin.peterson | 2009-07-02 18:56:16 -0400 (Thu, 02 Jul 2009) | 1 line condense with assertRaises ........ --- Python/peephole.c | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) (limited to 'Python/peephole.c') diff --git a/Python/peephole.c b/Python/peephole.c index 23735b0a31..9163091272 100644 --- a/Python/peephole.c +++ b/Python/peephole.c @@ -330,7 +330,7 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, /* Bail out if an exception is set */ if (PyErr_Occurred()) - goto exitUnchanged; + goto exitError; /* Bypass optimization when the lineno table is too complex */ assert(PyBytes_Check(lineno_obj)); @@ -348,7 +348,7 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, /* Make a modifiable copy of the code string */ codestr = (unsigned char *)PyMem_Malloc(codelen); if (codestr == NULL) - goto exitUnchanged; + goto exitError; codestr = (unsigned char *)memcpy(codestr, PyBytes_AS_STRING(code), codelen); @@ -363,11 +363,11 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, /* Mapping to new jump targets after NOPs are removed */ addrmap = (int *)PyMem_Malloc(codelen * sizeof(int)); if (addrmap == NULL) - goto exitUnchanged; + goto exitError; blocks = markblocks(codestr, codelen); if (blocks == NULL) - goto exitUnchanged; + goto exitError; assert(PyList_Check(consts)); for (i=0 ; i Date: Thu, 22 Oct 2009 11:22:50 +0000 Subject: Peephole constant folding had missed UNARY_POSITIVE. --- Python/peephole.c | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'Python/peephole.c') diff --git a/Python/peephole.c b/Python/peephole.c index 9163091272..104db8cb97 100644 --- a/Python/peephole.c +++ b/Python/peephole.c @@ -197,6 +197,9 @@ fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts) case UNARY_INVERT: newconst = PyNumber_Invert(v); break; + case UNARY_POSITIVE: + newconst = PyNumber_Positive(v); + break; default: /* Called with an unknown opcode */ PyErr_Format(PyExc_SystemError, @@ -500,6 +503,7 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, LOAD_CONST c1 UNARY_OP --> LOAD_CONST unary_op(c) */ case UNARY_NEGATIVE: case UNARY_INVERT: + case UNARY_POSITIVE: if (lastlc >= 1 && ISBASICBLOCK(blocks, i-3, 4) && fold_unaryops_on_constants(&codestr[i-3], consts)) { -- cgit v1.2.1 From 21626fd023758b13af68da0aaf036123fd60b6a7 Mon Sep 17 00:00:00 2001 From: Antoine Pitrou Date: Sat, 16 Jan 2010 18:37:38 +0000 Subject: Issue #6690: Optimize the bytecode for expressions such as `x in {1, 2, 3}`, where the right hand operand is a set of constants, by turning the set into a frozenset and pre-building it as a constant. The comparison operation is made against the constant instead of building a new set each time it is executed (a similar optimization already existed which turned a list of constants into a pre-built tuple). Patch and additional tests by Dave Malcolm. --- Python/peephole.c | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) (limited to 'Python/peephole.c') diff --git a/Python/peephole.c b/Python/peephole.c index 104db8cb97..eeb31d54bd 100644 --- a/Python/peephole.c +++ b/Python/peephole.c @@ -31,7 +31,8 @@ new constant (c1, c2, ... cn) can be appended. Called with codestr pointing to the first LOAD_CONST. Bails out with no change if one or more of the LOAD_CONSTs is missing. - Also works for BUILD_LIST when followed by an "in" or "not in" test. + Also works for BUILD_LIST and BUILT_SET when followed by an "in" or "not in" + test; for BUILD_SET it assembles a frozenset rather than a tuple. */ static int tuple_of_constants(unsigned char *codestr, Py_ssize_t n, PyObject *consts) @@ -41,7 +42,7 @@ tuple_of_constants(unsigned char *codestr, Py_ssize_t n, PyObject *consts) /* Pre-conditions */ assert(PyList_CheckExact(consts)); - assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST); + assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST || codestr[n*3] == BUILD_SET); assert(GETARG(codestr, (n*3)) == n); for (i=0 ; i= 0 && j <= lastlc && ((opcode == BUILD_TUPLE && ISBASICBLOCK(blocks, h, 3*(j+1))) || - (opcode == BUILD_LIST && + ((opcode == BUILD_LIST || opcode == BUILD_SET) && codestr[i+3]==COMPARE_OP && ISBASICBLOCK(blocks, h, 3*(j+2)) && (GETARG(codestr,i+3)==6 || -- cgit v1.2.1 From 567b94adaaceb622c33f9be1998735dfb95f1707 Mon Sep 17 00:00:00 2001 From: Antoine Pitrou Date: Sun, 9 May 2010 15:52:27 +0000 Subject: Recorded merge of revisions 81029 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ........ r81029 | antoine.pitrou | 2010-05-09 16:46:46 +0200 (dim., 09 mai 2010) | 3 lines Untabify C files. Will watch buildbots. ........ --- Python/peephole.c | 1232 ++++++++++++++++++++++++++--------------------------- 1 file changed, 616 insertions(+), 616 deletions(-) (limited to 'Python/peephole.c') diff --git a/Python/peephole.c b/Python/peephole.c index eeb31d54bd..7deb02d267 100644 --- a/Python/peephole.c +++ b/Python/peephole.c @@ -12,271 +12,271 @@ #include "opcode.h" #define GETARG(arr, i) ((int)((arr[i+2]<<8) + arr[i+1])) -#define UNCONDITIONAL_JUMP(op) (op==JUMP_ABSOLUTE || op==JUMP_FORWARD) +#define UNCONDITIONAL_JUMP(op) (op==JUMP_ABSOLUTE || op==JUMP_FORWARD) #define CONDITIONAL_JUMP(op) (op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \ - || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP) + || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP) #define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP \ - || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \ - || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP) + || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \ + || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP) #define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP) #define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3)) #define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255 #define CODESIZE(op) (HAS_ARG(op) ? 3 : 1) #define ISBASICBLOCK(blocks, start, bytes) \ - (blocks[start]==blocks[start+bytes-1]) + (blocks[start]==blocks[start+bytes-1]) /* Replace LOAD_CONST c1. LOAD_CONST c2 ... LOAD_CONST cn BUILD_TUPLE n - with LOAD_CONST (c1, c2, ... cn). + with LOAD_CONST (c1, c2, ... cn). The consts table must still be in list form so that the new constant (c1, c2, ... cn) can be appended. Called with codestr pointing to the first LOAD_CONST. - Bails out with no change if one or more of the LOAD_CONSTs is missing. + Bails out with no change if one or more of the LOAD_CONSTs is missing. Also works for BUILD_LIST and BUILT_SET when followed by an "in" or "not in" test; for BUILD_SET it assembles a frozenset rather than a tuple. */ static int tuple_of_constants(unsigned char *codestr, Py_ssize_t n, PyObject *consts) { - PyObject *newconst, *constant; - Py_ssize_t i, arg, len_consts; - - /* Pre-conditions */ - assert(PyList_CheckExact(consts)); - assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST || codestr[n*3] == BUILD_SET); - assert(GETARG(codestr, (n*3)) == n); - for (i=0 ; i 20) { - Py_DECREF(newconst); - return 0; - } - - /* Append folded constant into consts table */ - len_consts = PyList_GET_SIZE(consts); - if (PyList_Append(consts, newconst)) { - Py_DECREF(newconst); - return 0; - } - Py_DECREF(newconst); - - /* Write NOP NOP NOP NOP LOAD_CONST newconst */ - memset(codestr, NOP, 4); - codestr[4] = LOAD_CONST; - SETARG(codestr, 4, len_consts); - return 1; + PyObject *newconst, *v, *w; + Py_ssize_t len_consts, size; + int opcode; + + /* Pre-conditions */ + assert(PyList_CheckExact(consts)); + assert(codestr[0] == LOAD_CONST); + assert(codestr[3] == LOAD_CONST); + + /* Create new constant */ + v = PyList_GET_ITEM(consts, GETARG(codestr, 0)); + w = PyList_GET_ITEM(consts, GETARG(codestr, 3)); + opcode = codestr[6]; + switch (opcode) { + case BINARY_POWER: + newconst = PyNumber_Power(v, w, Py_None); + break; + case BINARY_MULTIPLY: + newconst = PyNumber_Multiply(v, w); + break; + case BINARY_TRUE_DIVIDE: + newconst = PyNumber_TrueDivide(v, w); + break; + case BINARY_FLOOR_DIVIDE: + newconst = PyNumber_FloorDivide(v, w); + break; + case BINARY_MODULO: + newconst = PyNumber_Remainder(v, w); + break; + case BINARY_ADD: + newconst = PyNumber_Add(v, w); + break; + case BINARY_SUBTRACT: + newconst = PyNumber_Subtract(v, w); + break; + case BINARY_SUBSCR: + newconst = PyObject_GetItem(v, w); + break; + case BINARY_LSHIFT: + newconst = PyNumber_Lshift(v, w); + break; + case BINARY_RSHIFT: + newconst = PyNumber_Rshift(v, w); + break; + case BINARY_AND: + newconst = PyNumber_And(v, w); + break; + case BINARY_XOR: + newconst = PyNumber_Xor(v, w); + break; + case BINARY_OR: + newconst = PyNumber_Or(v, w); + break; + default: + /* Called with an unknown opcode */ + PyErr_Format(PyExc_SystemError, + "unexpected binary operation %d on a constant", + opcode); + return 0; + } + if (newconst == NULL) { + PyErr_Clear(); + return 0; + } + size = PyObject_Size(newconst); + if (size == -1) + PyErr_Clear(); + else if (size > 20) { + Py_DECREF(newconst); + return 0; + } + + /* Append folded constant into consts table */ + len_consts = PyList_GET_SIZE(consts); + if (PyList_Append(consts, newconst)) { + Py_DECREF(newconst); + return 0; + } + Py_DECREF(newconst); + + /* Write NOP NOP NOP NOP LOAD_CONST newconst */ + memset(codestr, NOP, 4); + codestr[4] = LOAD_CONST; + SETARG(codestr, 4, len_consts); + return 1; } static int fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts) { - PyObject *newconst=NULL, *v; - Py_ssize_t len_consts; - int opcode; - - /* Pre-conditions */ - assert(PyList_CheckExact(consts)); - assert(codestr[0] == LOAD_CONST); - - /* Create new constant */ - v = PyList_GET_ITEM(consts, GETARG(codestr, 0)); - opcode = codestr[3]; - switch (opcode) { - case UNARY_NEGATIVE: - /* Preserve the sign of -0.0 */ - if (PyObject_IsTrue(v) == 1) - newconst = PyNumber_Negative(v); - break; - case UNARY_INVERT: - newconst = PyNumber_Invert(v); - break; - case UNARY_POSITIVE: - newconst = PyNumber_Positive(v); - break; - default: - /* Called with an unknown opcode */ - PyErr_Format(PyExc_SystemError, - "unexpected unary operation %d on a constant", - opcode); - return 0; - } - if (newconst == NULL) { - PyErr_Clear(); - return 0; - } - - /* Append folded constant into consts table */ - len_consts = PyList_GET_SIZE(consts); - if (PyList_Append(consts, newconst)) { - Py_DECREF(newconst); - return 0; - } - Py_DECREF(newconst); - - /* Write NOP LOAD_CONST newconst */ - codestr[0] = NOP; - codestr[1] = LOAD_CONST; - SETARG(codestr, 1, len_consts); - return 1; + PyObject *newconst=NULL, *v; + Py_ssize_t len_consts; + int opcode; + + /* Pre-conditions */ + assert(PyList_CheckExact(consts)); + assert(codestr[0] == LOAD_CONST); + + /* Create new constant */ + v = PyList_GET_ITEM(consts, GETARG(codestr, 0)); + opcode = codestr[3]; + switch (opcode) { + case UNARY_NEGATIVE: + /* Preserve the sign of -0.0 */ + if (PyObject_IsTrue(v) == 1) + newconst = PyNumber_Negative(v); + break; + case UNARY_INVERT: + newconst = PyNumber_Invert(v); + break; + case UNARY_POSITIVE: + newconst = PyNumber_Positive(v); + break; + default: + /* Called with an unknown opcode */ + PyErr_Format(PyExc_SystemError, + "unexpected unary operation %d on a constant", + opcode); + return 0; + } + if (newconst == NULL) { + PyErr_Clear(); + return 0; + } + + /* Append folded constant into consts table */ + len_consts = PyList_GET_SIZE(consts); + if (PyList_Append(consts, newconst)) { + Py_DECREF(newconst); + return 0; + } + Py_DECREF(newconst); + + /* Write NOP LOAD_CONST newconst */ + codestr[0] = NOP; + codestr[1] = LOAD_CONST; + SETARG(codestr, 1, len_consts); + return 1; } static unsigned int * markblocks(unsigned char *code, Py_ssize_t len) { - unsigned int *blocks = (unsigned int *)PyMem_Malloc(len*sizeof(int)); - int i,j, opcode, blockcnt = 0; - - if (blocks == NULL) { - PyErr_NoMemory(); - return NULL; - } - memset(blocks, 0, len*sizeof(int)); - - /* Mark labels in the first pass */ - for (i=0 ; i= 255. EXTENDED_ARG can appear before MAKE_FUNCTION; in this case both opcodes are skipped. EXTENDED_ARG preceding any other opcode causes the optimizer to bail. Optimizations are restricted to simple transformations occuring within a - single basic block. All transformations keep the code size the same or - smaller. For those that reduce size, the gaps are initially filled with - NOPs. Later those NOPs are removed and the jump addresses retargeted in + single basic block. All transformations keep the code size the same or + smaller. For those that reduce size, the gaps are initially filled with + NOPs. Later those NOPs are removed and the jump addresses retargeted in a single pass. Line numbering is adjusted accordingly. */ PyObject * PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, PyObject *lineno_obj) { - Py_ssize_t i, j, codelen; - int nops, h, adj; - int tgt, tgttgt, opcode; - unsigned char *codestr = NULL; - unsigned char *lineno; - int *addrmap = NULL; - int new_line, cum_orig_line, last_line, tabsiz; - int cumlc=0, lastlc=0; /* Count runs of consecutive LOAD_CONSTs */ - unsigned int *blocks = NULL; - char *name; - - /* Bail out if an exception is set */ - if (PyErr_Occurred()) - goto exitError; - - /* Bypass optimization when the lineno table is too complex */ - assert(PyBytes_Check(lineno_obj)); - lineno = (unsigned char*)PyBytes_AS_STRING(lineno_obj); - tabsiz = PyBytes_GET_SIZE(lineno_obj); - if (memchr(lineno, 255, tabsiz) != NULL) - goto exitUnchanged; - - /* Avoid situations where jump retargeting could overflow */ - assert(PyBytes_Check(code)); - codelen = PyBytes_GET_SIZE(code); - if (codelen > 32700) - goto exitUnchanged; - - /* Make a modifiable copy of the code string */ - codestr = (unsigned char *)PyMem_Malloc(codelen); - if (codestr == NULL) - goto exitError; - codestr = (unsigned char *)memcpy(codestr, - PyBytes_AS_STRING(code), codelen); - - /* Verify that RETURN_VALUE terminates the codestring. This allows - the various transformation patterns to look ahead several - instructions without additional checks to make sure they are not - looking beyond the end of the code string. - */ - if (codestr[codelen-1] != RETURN_VALUE) - goto exitUnchanged; - - /* Mapping to new jump targets after NOPs are removed */ - addrmap = (int *)PyMem_Malloc(codelen * sizeof(int)); - if (addrmap == NULL) - goto exitError; - - blocks = markblocks(codestr, codelen); - if (blocks == NULL) - goto exitError; - assert(PyList_Check(consts)); - - for (i=0 ; i a is not b - not a in b --> a not in b - not a is not b --> a is b - not a not in b --> a in b - */ - case COMPARE_OP: - j = GETARG(codestr, i); - if (j < 6 || j > 9 || - codestr[i+3] != UNARY_NOT || - !ISBASICBLOCK(blocks,i,4)) - continue; - SETARG(codestr, i, (j^1)); - codestr[i+3] = NOP; - break; - - /* Replace LOAD_GLOBAL/LOAD_NAME None/True/False - with LOAD_CONST None/True/False */ - case LOAD_NAME: - case LOAD_GLOBAL: - j = GETARG(codestr, i); - name = _PyUnicode_AsString(PyTuple_GET_ITEM(names, j)); - h = load_global(codestr, i, name, consts); - if (h < 0) - goto exitError; - else if (h == 0) - continue; - cumlc = lastlc + 1; - break; - - /* Skip over LOAD_CONST trueconst - POP_JUMP_IF_FALSE xx. This improves - "while 1" performance. */ - case LOAD_CONST: - cumlc = lastlc + 1; - j = GETARG(codestr, i); - if (codestr[i+3] != POP_JUMP_IF_FALSE || - !ISBASICBLOCK(blocks,i,6) || - !PyObject_IsTrue(PyList_GET_ITEM(consts, j))) - continue; - memset(codestr+i, NOP, 6); - cumlc = 0; - break; - - /* Try to fold tuples of constants (includes a case for lists and sets - which are only used for "in" and "not in" tests). - Skip over BUILD_SEQN 1 UNPACK_SEQN 1. - Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2. - Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */ - case BUILD_TUPLE: - case BUILD_LIST: - case BUILD_SET: - j = GETARG(codestr, i); - h = i - 3 * j; - if (h >= 0 && - j <= lastlc && - ((opcode == BUILD_TUPLE && - ISBASICBLOCK(blocks, h, 3*(j+1))) || - ((opcode == BUILD_LIST || opcode == BUILD_SET) && - codestr[i+3]==COMPARE_OP && - ISBASICBLOCK(blocks, h, 3*(j+2)) && - (GETARG(codestr,i+3)==6 || - GETARG(codestr,i+3)==7))) && - tuple_of_constants(&codestr[h], j, consts)) { - assert(codestr[i] == LOAD_CONST); - cumlc = 1; - break; - } - if (codestr[i+3] != UNPACK_SEQUENCE || - !ISBASICBLOCK(blocks,i,6) || - j != GETARG(codestr, i+3)) - continue; - if (j == 1) { - memset(codestr+i, NOP, 6); - } else if (j == 2) { - codestr[i] = ROT_TWO; - memset(codestr+i+1, NOP, 5); - } else if (j == 3) { - codestr[i] = ROT_THREE; - codestr[i+1] = ROT_TWO; - memset(codestr+i+2, NOP, 4); - } - break; - - /* Fold binary ops on constants. - LOAD_CONST c1 LOAD_CONST c2 BINOP --> LOAD_CONST binop(c1,c2) */ - case BINARY_POWER: - case BINARY_MULTIPLY: - case BINARY_TRUE_DIVIDE: - case BINARY_FLOOR_DIVIDE: - case BINARY_MODULO: - case BINARY_ADD: - case BINARY_SUBTRACT: - case BINARY_SUBSCR: - case BINARY_LSHIFT: - case BINARY_RSHIFT: - case BINARY_AND: - case BINARY_XOR: - case BINARY_OR: - if (lastlc >= 2 && - ISBASICBLOCK(blocks, i-6, 7) && - fold_binops_on_constants(&codestr[i-6], consts)) { - i -= 2; - assert(codestr[i] == LOAD_CONST); - cumlc = 1; - } - break; - - /* Fold unary ops on constants. - LOAD_CONST c1 UNARY_OP --> LOAD_CONST unary_op(c) */ - case UNARY_NEGATIVE: - case UNARY_INVERT: - case UNARY_POSITIVE: - if (lastlc >= 1 && - ISBASICBLOCK(blocks, i-3, 4) && - fold_unaryops_on_constants(&codestr[i-3], consts)) { - i -= 2; - assert(codestr[i] == LOAD_CONST); - cumlc = 1; - } - break; - - /* Simplify conditional jump to conditional jump where the - result of the first test implies the success of a similar - test or the failure of the opposite test. - Arises in code like: - "if a and b:" - "if a or b:" - "a and b or c" - "(a and b) and c" - x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_FALSE_OR_POP z - --> x:JUMP_IF_FALSE_OR_POP z - x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_TRUE_OR_POP z - --> x:POP_JUMP_IF_FALSE y+3 - where y+3 is the instruction following the second test. - */ - case JUMP_IF_FALSE_OR_POP: - case JUMP_IF_TRUE_OR_POP: - tgt = GETJUMPTGT(codestr, i); - j = codestr[tgt]; - if (CONDITIONAL_JUMP(j)) { - /* NOTE: all possible jumps here are - absolute! */ - if (JUMPS_ON_TRUE(j) == JUMPS_ON_TRUE(opcode)) { - /* The second jump will be - taken iff the first is. */ - tgttgt = GETJUMPTGT(codestr, tgt); - /* The current opcode inherits - its target's stack behaviour */ - codestr[i] = j; - SETARG(codestr, i, tgttgt); - goto reoptimize_current; - } else { - /* The second jump is not taken - if the first is (so jump past - it), and all conditional - jumps pop their argument when - they're not taken (so change - the first jump to pop its - argument when it's taken). */ - if (JUMPS_ON_TRUE(opcode)) - codestr[i] = POP_JUMP_IF_TRUE; - else - codestr[i] = POP_JUMP_IF_FALSE; - SETARG(codestr, i, (tgt + 3)); - goto reoptimize_current; - } - } - /* Intentional fallthrough */ - - /* Replace jumps to unconditional jumps */ - case POP_JUMP_IF_FALSE: - case POP_JUMP_IF_TRUE: - case FOR_ITER: - case JUMP_FORWARD: - case JUMP_ABSOLUTE: - case CONTINUE_LOOP: - case SETUP_LOOP: - case SETUP_EXCEPT: - case SETUP_FINALLY: - case SETUP_WITH: - tgt = GETJUMPTGT(codestr, i); - /* Replace JUMP_* to a RETURN into just a RETURN */ - if (UNCONDITIONAL_JUMP(opcode) && - codestr[tgt] == RETURN_VALUE) { - codestr[i] = RETURN_VALUE; - memset(codestr+i+1, NOP, 2); - continue; - } - if (!UNCONDITIONAL_JUMP(codestr[tgt])) - continue; - tgttgt = GETJUMPTGT(codestr, tgt); - if (opcode == JUMP_FORWARD) /* JMP_ABS can go backwards */ - opcode = JUMP_ABSOLUTE; - if (!ABSOLUTE_JUMP(opcode)) - tgttgt -= i + 3; /* Calc relative jump addr */ - if (tgttgt < 0) /* No backward relative jumps */ - continue; - codestr[i] = opcode; - SETARG(codestr, i, tgttgt); - break; - - case EXTENDED_ARG: - if (codestr[i+3] != MAKE_FUNCTION) - goto exitUnchanged; - /* don't visit MAKE_FUNCTION as GETARG will be wrong */ - i += 3; - break; - - /* Replace RETURN LOAD_CONST None RETURN with just RETURN */ - /* Remove unreachable JUMPs after RETURN */ - case RETURN_VALUE: - if (i+4 >= codelen) - continue; - if (codestr[i+4] == RETURN_VALUE && - ISBASICBLOCK(blocks,i,5)) - memset(codestr+i+1, NOP, 4); - else if (UNCONDITIONAL_JUMP(codestr[i+1]) && - ISBASICBLOCK(blocks,i,4)) - memset(codestr+i+1, NOP, 3); - break; - } - } - - /* Fixup linenotab */ - for (i=0, nops=0 ; i 32700) + goto exitUnchanged; + + /* Make a modifiable copy of the code string */ + codestr = (unsigned char *)PyMem_Malloc(codelen); + if (codestr == NULL) + goto exitError; + codestr = (unsigned char *)memcpy(codestr, + PyBytes_AS_STRING(code), codelen); + + /* Verify that RETURN_VALUE terminates the codestring. This allows + the various transformation patterns to look ahead several + instructions without additional checks to make sure they are not + looking beyond the end of the code string. + */ + if (codestr[codelen-1] != RETURN_VALUE) + goto exitUnchanged; + + /* Mapping to new jump targets after NOPs are removed */ + addrmap = (int *)PyMem_Malloc(codelen * sizeof(int)); + if (addrmap == NULL) + goto exitError; + + blocks = markblocks(codestr, codelen); + if (blocks == NULL) + goto exitError; + assert(PyList_Check(consts)); + + for (i=0 ; i a is not b + not a in b --> a not in b + not a is not b --> a is b + not a not in b --> a in b + */ + case COMPARE_OP: + j = GETARG(codestr, i); + if (j < 6 || j > 9 || + codestr[i+3] != UNARY_NOT || + !ISBASICBLOCK(blocks,i,4)) + continue; + SETARG(codestr, i, (j^1)); + codestr[i+3] = NOP; + break; + + /* Replace LOAD_GLOBAL/LOAD_NAME None/True/False + with LOAD_CONST None/True/False */ + case LOAD_NAME: + case LOAD_GLOBAL: + j = GETARG(codestr, i); + name = _PyUnicode_AsString(PyTuple_GET_ITEM(names, j)); + h = load_global(codestr, i, name, consts); + if (h < 0) + goto exitError; + else if (h == 0) + continue; + cumlc = lastlc + 1; + break; + + /* Skip over LOAD_CONST trueconst + POP_JUMP_IF_FALSE xx. This improves + "while 1" performance. */ + case LOAD_CONST: + cumlc = lastlc + 1; + j = GETARG(codestr, i); + if (codestr[i+3] != POP_JUMP_IF_FALSE || + !ISBASICBLOCK(blocks,i,6) || + !PyObject_IsTrue(PyList_GET_ITEM(consts, j))) + continue; + memset(codestr+i, NOP, 6); + cumlc = 0; + break; + + /* Try to fold tuples of constants (includes a case for lists and sets + which are only used for "in" and "not in" tests). + Skip over BUILD_SEQN 1 UNPACK_SEQN 1. + Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2. + Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */ + case BUILD_TUPLE: + case BUILD_LIST: + case BUILD_SET: + j = GETARG(codestr, i); + h = i - 3 * j; + if (h >= 0 && + j <= lastlc && + ((opcode == BUILD_TUPLE && + ISBASICBLOCK(blocks, h, 3*(j+1))) || + ((opcode == BUILD_LIST || opcode == BUILD_SET) && + codestr[i+3]==COMPARE_OP && + ISBASICBLOCK(blocks, h, 3*(j+2)) && + (GETARG(codestr,i+3)==6 || + GETARG(codestr,i+3)==7))) && + tuple_of_constants(&codestr[h], j, consts)) { + assert(codestr[i] == LOAD_CONST); + cumlc = 1; + break; + } + if (codestr[i+3] != UNPACK_SEQUENCE || + !ISBASICBLOCK(blocks,i,6) || + j != GETARG(codestr, i+3)) + continue; + if (j == 1) { + memset(codestr+i, NOP, 6); + } else if (j == 2) { + codestr[i] = ROT_TWO; + memset(codestr+i+1, NOP, 5); + } else if (j == 3) { + codestr[i] = ROT_THREE; + codestr[i+1] = ROT_TWO; + memset(codestr+i+2, NOP, 4); + } + break; + + /* Fold binary ops on constants. + LOAD_CONST c1 LOAD_CONST c2 BINOP --> LOAD_CONST binop(c1,c2) */ + case BINARY_POWER: + case BINARY_MULTIPLY: + case BINARY_TRUE_DIVIDE: + case BINARY_FLOOR_DIVIDE: + case BINARY_MODULO: + case BINARY_ADD: + case BINARY_SUBTRACT: + case BINARY_SUBSCR: + case BINARY_LSHIFT: + case BINARY_RSHIFT: + case BINARY_AND: + case BINARY_XOR: + case BINARY_OR: + if (lastlc >= 2 && + ISBASICBLOCK(blocks, i-6, 7) && + fold_binops_on_constants(&codestr[i-6], consts)) { + i -= 2; + assert(codestr[i] == LOAD_CONST); + cumlc = 1; + } + break; + + /* Fold unary ops on constants. + LOAD_CONST c1 UNARY_OP --> LOAD_CONST unary_op(c) */ + case UNARY_NEGATIVE: + case UNARY_INVERT: + case UNARY_POSITIVE: + if (lastlc >= 1 && + ISBASICBLOCK(blocks, i-3, 4) && + fold_unaryops_on_constants(&codestr[i-3], consts)) { + i -= 2; + assert(codestr[i] == LOAD_CONST); + cumlc = 1; + } + break; + + /* Simplify conditional jump to conditional jump where the + result of the first test implies the success of a similar + test or the failure of the opposite test. + Arises in code like: + "if a and b:" + "if a or b:" + "a and b or c" + "(a and b) and c" + x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_FALSE_OR_POP z + --> x:JUMP_IF_FALSE_OR_POP z + x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_TRUE_OR_POP z + --> x:POP_JUMP_IF_FALSE y+3 + where y+3 is the instruction following the second test. + */ + case JUMP_IF_FALSE_OR_POP: + case JUMP_IF_TRUE_OR_POP: + tgt = GETJUMPTGT(codestr, i); + j = codestr[tgt]; + if (CONDITIONAL_JUMP(j)) { + /* NOTE: all possible jumps here are + absolute! */ + if (JUMPS_ON_TRUE(j) == JUMPS_ON_TRUE(opcode)) { + /* The second jump will be + taken iff the first is. */ + tgttgt = GETJUMPTGT(codestr, tgt); + /* The current opcode inherits + its target's stack behaviour */ + codestr[i] = j; + SETARG(codestr, i, tgttgt); + goto reoptimize_current; + } else { + /* The second jump is not taken + if the first is (so jump past + it), and all conditional + jumps pop their argument when + they're not taken (so change + the first jump to pop its + argument when it's taken). */ + if (JUMPS_ON_TRUE(opcode)) + codestr[i] = POP_JUMP_IF_TRUE; + else + codestr[i] = POP_JUMP_IF_FALSE; + SETARG(codestr, i, (tgt + 3)); + goto reoptimize_current; + } + } + /* Intentional fallthrough */ + + /* Replace jumps to unconditional jumps */ + case POP_JUMP_IF_FALSE: + case POP_JUMP_IF_TRUE: + case FOR_ITER: + case JUMP_FORWARD: + case JUMP_ABSOLUTE: + case CONTINUE_LOOP: + case SETUP_LOOP: + case SETUP_EXCEPT: + case SETUP_FINALLY: + case SETUP_WITH: + tgt = GETJUMPTGT(codestr, i); + /* Replace JUMP_* to a RETURN into just a RETURN */ + if (UNCONDITIONAL_JUMP(opcode) && + codestr[tgt] == RETURN_VALUE) { + codestr[i] = RETURN_VALUE; + memset(codestr+i+1, NOP, 2); + continue; + } + if (!UNCONDITIONAL_JUMP(codestr[tgt])) + continue; + tgttgt = GETJUMPTGT(codestr, tgt); + if (opcode == JUMP_FORWARD) /* JMP_ABS can go backwards */ + opcode = JUMP_ABSOLUTE; + if (!ABSOLUTE_JUMP(opcode)) + tgttgt -= i + 3; /* Calc relative jump addr */ + if (tgttgt < 0) /* No backward relative jumps */ + continue; + codestr[i] = opcode; + SETARG(codestr, i, tgttgt); + break; + + case EXTENDED_ARG: + if (codestr[i+3] != MAKE_FUNCTION) + goto exitUnchanged; + /* don't visit MAKE_FUNCTION as GETARG will be wrong */ + i += 3; + break; + + /* Replace RETURN LOAD_CONST None RETURN with just RETURN */ + /* Remove unreachable JUMPs after RETURN */ + case RETURN_VALUE: + if (i+4 >= codelen) + continue; + if (codestr[i+4] == RETURN_VALUE && + ISBASICBLOCK(blocks,i,5)) + memset(codestr+i+1, NOP, 4); + else if (UNCONDITIONAL_JUMP(codestr[i+1]) && + ISBASICBLOCK(blocks,i,4)) + memset(codestr+i+1, NOP, 3); + break; + } + } + + /* Fixup linenotab */ + for (i=0, nops=0 ; i Date: Sun, 22 Aug 2010 08:39:49 +0000 Subject: Issue 8403: Don't mask KeyboardInterrupt during peephole operation. --- Python/peephole.c | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) (limited to 'Python/peephole.c') diff --git a/Python/peephole.c b/Python/peephole.c index 7deb02d267..9d06963e2c 100644 --- a/Python/peephole.c +++ b/Python/peephole.c @@ -159,13 +159,16 @@ fold_binops_on_constants(unsigned char *codestr, PyObject *consts) return 0; } if (newconst == NULL) { - PyErr_Clear(); + if(!PyErr_ExceptionMatches(PyExc_KeyboardInterrupt)) + PyErr_Clear(); return 0; } size = PyObject_Size(newconst); - if (size == -1) + if (size == -1) { + if (PyErr_ExceptionMatches(PyExc_KeyboardInterrupt)) + return 0; PyErr_Clear(); - else if (size > 20) { + } else if (size > 20) { Py_DECREF(newconst); return 0; } @@ -219,7 +222,8 @@ fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts) return 0; } if (newconst == NULL) { - PyErr_Clear(); + if(!PyErr_ExceptionMatches(PyExc_KeyboardInterrupt)) + PyErr_Clear(); return 0; } -- cgit v1.2.1 From c16ad2eeb23a20667e832e09ad88e012952d8988 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 30 Nov 2010 09:41:01 +0000 Subject: Remove redundant includes of headers that are already included by Python.h. --- Python/peephole.c | 2 -- 1 file changed, 2 deletions(-) (limited to 'Python/peephole.c') diff --git a/Python/peephole.c b/Python/peephole.c index 9d06963e2c..f972e1611e 100644 --- a/Python/peephole.c +++ b/Python/peephole.c @@ -4,10 +4,8 @@ #include "Python-ast.h" #include "node.h" -#include "pyarena.h" #include "ast.h" #include "code.h" -#include "compile.h" #include "symtable.h" #include "opcode.h" -- cgit v1.2.1 From 1461e055cc52862f93f225a129b332dbd64f193c Mon Sep 17 00:00:00 2001 From: Raymond Hettinger Date: Tue, 15 Mar 2011 14:50:16 -0700 Subject: Issue 11510: Fix BUILD_SET optimizer bug. --- Python/peephole.c | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'Python/peephole.c') diff --git a/Python/peephole.c b/Python/peephole.c index f972e1611e..6985043917 100644 --- a/Python/peephole.c +++ b/Python/peephole.c @@ -475,7 +475,8 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, } if (codestr[i+3] != UNPACK_SEQUENCE || !ISBASICBLOCK(blocks,i,6) || - j != GETARG(codestr, i+3)) + j != GETARG(codestr, i+3) || + opcode == BUILD_SET) continue; if (j == 1) { memset(codestr+i, NOP, 6); -- cgit v1.2.1