Line data Source code
1 : /* Peephole optimizations for bytecode compiler. */
2 :
3 : #include "Python.h"
4 :
5 : #include "Python-ast.h"
6 : #include "node.h"
7 : #include "pyarena.h"
8 : #include "ast.h"
9 : #include "code.h"
10 : #include "compile.h"
11 : #include "symtable.h"
12 : #include "opcode.h"
13 :
14 : #define GETARG(arr, i) ((int)((arr[i+2]<<8) + arr[i+1]))
15 : #define UNCONDITIONAL_JUMP(op) (op==JUMP_ABSOLUTE || op==JUMP_FORWARD)
16 : #define CONDITIONAL_JUMP(op) (op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
17 : || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
18 : #define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP \
19 : || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \
20 : || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
21 : #define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP)
22 : #define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3))
23 : #define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255
24 : #define CODESIZE(op) (HAS_ARG(op) ? 3 : 1)
25 : #define ISBASICBLOCK(blocks, start, bytes) \
26 : (blocks[start]==blocks[start+bytes-1])
27 :
28 : /* Replace LOAD_CONST c1. LOAD_CONST c2 ... LOAD_CONST cn BUILD_TUPLE n
29 : with LOAD_CONST (c1, c2, ... cn).
30 : The consts table must still be in list form so that the
31 : new constant (c1, c2, ... cn) can be appended.
32 : Called with codestr pointing to the first LOAD_CONST.
33 : Bails out with no change if one or more of the LOAD_CONSTs is missing.
34 : Also works for BUILD_LIST when followed by an "in" or "not in" test.
35 : */
36 : static int
37 193 : tuple_of_constants(unsigned char *codestr, Py_ssize_t n, PyObject *consts)
38 : {
39 : PyObject *newconst, *constant;
40 : Py_ssize_t i, arg, len_consts;
41 :
42 : /* Pre-conditions */
43 : assert(PyList_CheckExact(consts));
44 : assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST);
45 : assert(GETARG(codestr, (n*3)) == n);
46 193 : for (i=0 ; i<n ; i++)
47 : assert(codestr[i*3] == LOAD_CONST);
48 :
49 : /* Buildup new tuple of constants */
50 193 : newconst = PyTuple_New(n);
51 193 : if (newconst == NULL)
52 0 : return 0;
53 193 : len_consts = PyList_GET_SIZE(consts);
54 598 : for (i=0 ; i<n ; i++) {
55 405 : arg = GETARG(codestr, (i*3));
56 : assert(arg < len_consts);
57 405 : constant = PyList_GET_ITEM(consts, arg);
58 405 : Py_INCREF(constant);
59 405 : PyTuple_SET_ITEM(newconst, i, constant);
60 : }
61 :
62 : /* Append folded constant onto consts */
63 193 : if (PyList_Append(consts, newconst)) {
64 0 : Py_DECREF(newconst);
65 0 : return 0;
66 : }
67 193 : Py_DECREF(newconst);
68 :
69 : /* Write NOPs over old LOAD_CONSTS and
70 : add a new LOAD_CONST newconst on top of the BUILD_TUPLE n */
71 193 : memset(codestr, NOP, n*3);
72 193 : codestr[n*3] = LOAD_CONST;
73 193 : SETARG(codestr, (n*3), len_consts);
74 193 : return 1;
75 : }
76 :
77 : /* Replace LOAD_CONST c1. LOAD_CONST c2 BINOP
78 : with LOAD_CONST binop(c1,c2)
79 : The consts table must still be in list form so that the
80 : new constant can be appended.
81 : Called with codestr pointing to the first LOAD_CONST.
82 : Abandons the transformation if the folding fails (i.e. 1+'a').
83 : If the new constant is a sequence, only folds when the size
84 : is below a threshold value. That keeps pyc files from
85 : becoming large in the presence of code like: (None,)*1000.
86 : */
87 : static int
88 0 : fold_binops_on_constants(unsigned char *codestr, PyObject *consts)
89 : {
90 : PyObject *newconst, *v, *w;
91 : Py_ssize_t len_consts, size;
92 : int opcode;
93 :
94 : /* Pre-conditions */
95 : assert(PyList_CheckExact(consts));
96 : assert(codestr[0] == LOAD_CONST);
97 : assert(codestr[3] == LOAD_CONST);
98 :
99 : /* Create new constant */
100 0 : v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
101 0 : w = PyList_GET_ITEM(consts, GETARG(codestr, 3));
102 0 : opcode = codestr[6];
103 0 : switch (opcode) {
104 : case BINARY_POWER:
105 0 : newconst = PyNumber_Power(v, w, Py_None);
106 0 : break;
107 : case BINARY_MULTIPLY:
108 0 : newconst = PyNumber_Multiply(v, w);
109 0 : break;
110 : case BINARY_DIVIDE:
111 : /* Cannot fold this operation statically since
112 : the result can depend on the run-time presence
113 : of the -Qnew flag */
114 0 : return 0;
115 : case BINARY_TRUE_DIVIDE:
116 0 : newconst = PyNumber_TrueDivide(v, w);
117 0 : break;
118 : case BINARY_FLOOR_DIVIDE:
119 0 : newconst = PyNumber_FloorDivide(v, w);
120 0 : break;
121 : case BINARY_MODULO:
122 0 : newconst = PyNumber_Remainder(v, w);
123 0 : break;
124 : case BINARY_ADD:
125 0 : newconst = PyNumber_Add(v, w);
126 0 : break;
127 : case BINARY_SUBTRACT:
128 0 : newconst = PyNumber_Subtract(v, w);
129 0 : break;
130 : case BINARY_SUBSCR:
131 : /* #5057: if v is unicode, there might be differences between
132 : wide and narrow builds in cases like '\U00012345'[0] or
133 : '\U00012345abcdef'[3], so it's better to skip the optimization
134 : in order to produce compatible pycs.
135 : */
136 0 : if (PyUnicode_Check(v))
137 0 : return 0;
138 0 : newconst = PyObject_GetItem(v, w);
139 0 : break;
140 : case BINARY_LSHIFT:
141 0 : newconst = PyNumber_Lshift(v, w);
142 0 : break;
143 : case BINARY_RSHIFT:
144 0 : newconst = PyNumber_Rshift(v, w);
145 0 : break;
146 : case BINARY_AND:
147 0 : newconst = PyNumber_And(v, w);
148 0 : break;
149 : case BINARY_XOR:
150 0 : newconst = PyNumber_Xor(v, w);
151 0 : break;
152 : case BINARY_OR:
153 0 : newconst = PyNumber_Or(v, w);
154 0 : break;
155 : default:
156 : /* Called with an unknown opcode */
157 0 : PyErr_Format(PyExc_SystemError,
158 : "unexpected binary operation %d on a constant",
159 : opcode);
160 0 : return 0;
161 : }
162 0 : if (newconst == NULL) {
163 0 : PyErr_Clear();
164 0 : return 0;
165 : }
166 0 : size = PyObject_Size(newconst);
167 0 : if (size == -1)
168 0 : PyErr_Clear();
169 0 : else if (size > 20) {
170 0 : Py_DECREF(newconst);
171 0 : return 0;
172 : }
173 :
174 : /* Append folded constant into consts table */
175 0 : len_consts = PyList_GET_SIZE(consts);
176 0 : if (PyList_Append(consts, newconst)) {
177 0 : Py_DECREF(newconst);
178 0 : return 0;
179 : }
180 0 : Py_DECREF(newconst);
181 :
182 : /* Write NOP NOP NOP NOP LOAD_CONST newconst */
183 0 : memset(codestr, NOP, 4);
184 0 : codestr[4] = LOAD_CONST;
185 0 : SETARG(codestr, 4, len_consts);
186 0 : return 1;
187 : }
188 :
189 : static int
190 0 : fold_unaryops_on_constants(unsigned char *codestr, PyObject *consts)
191 : {
192 0 : PyObject *newconst=NULL, *v;
193 : Py_ssize_t len_consts;
194 : int opcode;
195 :
196 : /* Pre-conditions */
197 : assert(PyList_CheckExact(consts));
198 : assert(codestr[0] == LOAD_CONST);
199 :
200 : /* Create new constant */
201 0 : v = PyList_GET_ITEM(consts, GETARG(codestr, 0));
202 0 : opcode = codestr[3];
203 0 : switch (opcode) {
204 : case UNARY_NEGATIVE:
205 : /* Preserve the sign of -0.0 */
206 0 : if (PyObject_IsTrue(v) == 1)
207 0 : newconst = PyNumber_Negative(v);
208 0 : break;
209 : case UNARY_CONVERT:
210 0 : newconst = PyObject_Repr(v);
211 0 : break;
212 : case UNARY_INVERT:
213 0 : newconst = PyNumber_Invert(v);
214 0 : break;
215 : default:
216 : /* Called with an unknown opcode */
217 0 : PyErr_Format(PyExc_SystemError,
218 : "unexpected unary operation %d on a constant",
219 : opcode);
220 0 : return 0;
221 : }
222 0 : if (newconst == NULL) {
223 0 : PyErr_Clear();
224 0 : return 0;
225 : }
226 :
227 : /* Append folded constant into consts table */
228 0 : len_consts = PyList_GET_SIZE(consts);
229 0 : if (PyList_Append(consts, newconst)) {
230 0 : Py_DECREF(newconst);
231 0 : return 0;
232 : }
233 0 : Py_DECREF(newconst);
234 :
235 : /* Write NOP LOAD_CONST newconst */
236 0 : codestr[0] = NOP;
237 0 : codestr[1] = LOAD_CONST;
238 0 : SETARG(codestr, 1, len_consts);
239 0 : return 1;
240 : }
241 :
242 : static unsigned int *
243 1041 : markblocks(unsigned char *code, Py_ssize_t len)
244 : {
245 1041 : unsigned int *blocks = PyMem_New(unsigned int, len);
246 1041 : int i,j, opcode, blockcnt = 0;
247 :
248 1041 : if (blocks == NULL) {
249 0 : PyErr_NoMemory();
250 0 : return NULL;
251 : }
252 1041 : memset(blocks, 0, len*sizeof(int));
253 :
254 : /* Mark labels in the first pass */
255 44217 : for (i=0 ; i<len ; i+=CODESIZE(opcode)) {
256 43176 : opcode = code[i];
257 43176 : switch (opcode) {
258 : case FOR_ITER:
259 : case JUMP_FORWARD:
260 : case JUMP_IF_FALSE_OR_POP:
261 : case JUMP_IF_TRUE_OR_POP:
262 : case POP_JUMP_IF_FALSE:
263 : case POP_JUMP_IF_TRUE:
264 : case JUMP_ABSOLUTE:
265 : case CONTINUE_LOOP:
266 : case SETUP_LOOP:
267 : case SETUP_EXCEPT:
268 : case SETUP_FINALLY:
269 : case SETUP_WITH:
270 3677 : j = GETJUMPTGT(code, i);
271 3677 : blocks[j] = 1;
272 3677 : break;
273 : }
274 : }
275 : /* Build block numbers in the second pass */
276 120301 : for (i=0 ; i<len ; i++) {
277 119260 : blockcnt += blocks[i]; /* increment blockcnt over labels */
278 119260 : blocks[i] = blockcnt;
279 : }
280 1041 : return blocks;
281 : }
282 :
283 : /* Perform basic peephole optimizations to components of a code object.
284 : The consts object should still be in list form to allow new constants
285 : to be appended.
286 :
287 : To keep the optimizer simple, it bails out (does nothing) for code
288 : containing extended arguments or that has a length over 32,700. That
289 : allows us to avoid overflow and sign issues. Likewise, it bails when
290 : the lineno table has complex encoding for gaps >= 255.
291 :
292 : Optimizations are restricted to simple transformations occurring within a
293 : single basic block. All transformations keep the code size the same or
294 : smaller. For those that reduce size, the gaps are initially filled with
295 : NOPs. Later those NOPs are removed and the jump addresses retargeted in
296 : a single pass. Line numbering is adjusted accordingly. */
297 :
298 : PyObject *
299 1043 : PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names,
300 : PyObject *lineno_obj)
301 : {
302 : Py_ssize_t i, j, codelen;
303 : int nops, h, adj;
304 : int tgt, tgttgt, opcode;
305 1043 : unsigned char *codestr = NULL;
306 : unsigned char *lineno;
307 1043 : int *addrmap = NULL;
308 : int new_line, cum_orig_line, last_line, tabsiz;
309 1043 : int cumlc=0, lastlc=0; /* Count runs of consecutive LOAD_CONSTs */
310 1043 : unsigned int *blocks = NULL;
311 : char *name;
312 :
313 : /* Bail out if an exception is set */
314 1043 : if (PyErr_Occurred())
315 0 : goto exitError;
316 :
317 : /* Bypass optimization when the lineno table is too complex */
318 : assert(PyString_Check(lineno_obj));
319 1043 : lineno = (unsigned char*)PyString_AS_STRING(lineno_obj);
320 1043 : tabsiz = PyString_GET_SIZE(lineno_obj);
321 1043 : if (memchr(lineno, 255, tabsiz) != NULL)
322 2 : goto exitUnchanged;
323 :
324 : /* Avoid situations where jump retargeting could overflow */
325 : assert(PyString_Check(code));
326 1041 : codelen = PyString_GET_SIZE(code);
327 1041 : if (codelen > 32700)
328 0 : goto exitUnchanged;
329 :
330 : /* Make a modifiable copy of the code string */
331 1041 : codestr = (unsigned char *)PyMem_Malloc(codelen);
332 1041 : if (codestr == NULL)
333 0 : goto exitError;
334 2082 : codestr = (unsigned char *)memcpy(codestr,
335 1041 : PyString_AS_STRING(code), codelen);
336 :
337 : /* Verify that RETURN_VALUE terminates the codestring. This allows
338 : the various transformation patterns to look ahead several
339 : instructions without additional checks to make sure they are not
340 : looking beyond the end of the code string.
341 : */
342 1041 : if (codestr[codelen-1] != RETURN_VALUE)
343 0 : goto exitUnchanged;
344 :
345 : /* Mapping to new jump targets after NOPs are removed */
346 1041 : addrmap = PyMem_New(int, codelen);
347 1041 : if (addrmap == NULL) {
348 0 : PyErr_NoMemory();
349 0 : goto exitError;
350 : }
351 :
352 1041 : blocks = markblocks(codestr, codelen);
353 1041 : if (blocks == NULL)
354 0 : goto exitError;
355 : assert(PyList_Check(consts));
356 :
357 45225 : for (i=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
358 : reoptimize_current:
359 44446 : opcode = codestr[i];
360 :
361 44446 : lastlc = cumlc;
362 44446 : cumlc = 0;
363 :
364 44446 : switch (opcode) {
365 : /* Replace UNARY_NOT POP_JUMP_IF_FALSE
366 : with POP_JUMP_IF_TRUE */
367 : case UNARY_NOT:
368 250 : if (codestr[i+1] != POP_JUMP_IF_FALSE
369 226 : || !ISBASICBLOCK(blocks,i,4))
370 29 : continue;
371 221 : j = GETARG(codestr, i+1);
372 221 : codestr[i] = POP_JUMP_IF_TRUE;
373 221 : SETARG(codestr, i, j);
374 221 : codestr[i+3] = NOP;
375 221 : goto reoptimize_current;
376 :
377 : /* not a is b --> a is not b
378 : not a in b --> a not in b
379 : not a is not b --> a is b
380 : not a not in b --> a in b
381 : */
382 : case COMPARE_OP:
383 979 : j = GETARG(codestr, i);
384 1147 : if (j < 6 || j > 9 ||
385 168 : codestr[i+3] != UNARY_NOT ||
386 0 : !ISBASICBLOCK(blocks,i,4))
387 979 : continue;
388 0 : SETARG(codestr, i, (j^1));
389 0 : codestr[i+3] = NOP;
390 0 : break;
391 :
392 : /* Replace LOAD_GLOBAL/LOAD_NAME None
393 : with LOAD_CONST None */
394 : case LOAD_NAME:
395 : case LOAD_GLOBAL:
396 4662 : j = GETARG(codestr, i);
397 4662 : name = PyString_AsString(PyTuple_GET_ITEM(names, j));
398 4662 : if (name == NULL || strcmp(name, "None") != 0)
399 4334 : continue;
400 1465 : for (j=0 ; j < PyList_GET_SIZE(consts) ; j++) {
401 1391 : if (PyList_GET_ITEM(consts, j) == Py_None)
402 254 : break;
403 : }
404 328 : if (j == PyList_GET_SIZE(consts)) {
405 74 : if (PyList_Append(consts, Py_None) == -1)
406 0 : goto exitError;
407 : }
408 : assert(PyList_GET_ITEM(consts, j) == Py_None);
409 328 : codestr[i] = LOAD_CONST;
410 328 : SETARG(codestr, i, j);
411 328 : cumlc = lastlc + 1;
412 328 : break;
413 :
414 : /* Skip over LOAD_CONST trueconst
415 : POP_JUMP_IF_FALSE xx. This improves
416 : "while 1" performance. */
417 : case LOAD_CONST:
418 5049 : cumlc = lastlc + 1;
419 5049 : j = GETARG(codestr, i);
420 5049 : if (codestr[i+3] != POP_JUMP_IF_FALSE ||
421 0 : !ISBASICBLOCK(blocks,i,6) ||
422 0 : !PyObject_IsTrue(PyList_GET_ITEM(consts, j)))
423 5049 : continue;
424 0 : memset(codestr+i, NOP, 6);
425 0 : cumlc = 0;
426 0 : break;
427 :
428 : /* Try to fold tuples of constants (includes a case for lists
429 : which are only used for "in" and "not in" tests).
430 : Skip over BUILD_SEQN 1 UNPACK_SEQN 1.
431 : Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.
432 : Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */
433 : case BUILD_TUPLE:
434 : case BUILD_LIST:
435 770 : j = GETARG(codestr, i);
436 770 : h = i - 3 * j;
437 1540 : if (h >= 0 &&
438 1112 : j <= lastlc &&
439 193 : ((opcode == BUILD_TUPLE &&
440 342 : ISBASICBLOCK(blocks, h, 3*(j+1))) ||
441 149 : (opcode == BUILD_LIST &&
442 151 : codestr[i+3]==COMPARE_OP &&
443 4 : ISBASICBLOCK(blocks, h, 3*(j+2)) &&
444 4 : (GETARG(codestr,i+3)==6 ||
445 195 : GETARG(codestr,i+3)==7))) &&
446 193 : tuple_of_constants(&codestr[h], j, consts)) {
447 : assert(codestr[i] == LOAD_CONST);
448 193 : cumlc = 1;
449 193 : break;
450 : }
451 579 : if (codestr[i+3] != UNPACK_SEQUENCE ||
452 4 : !ISBASICBLOCK(blocks,i,6) ||
453 2 : j != GETARG(codestr, i+3))
454 575 : continue;
455 2 : if (j == 1) {
456 0 : memset(codestr+i, NOP, 6);
457 2 : } else if (j == 2) {
458 2 : codestr[i] = ROT_TWO;
459 2 : memset(codestr+i+1, NOP, 5);
460 0 : } else if (j == 3) {
461 0 : codestr[i] = ROT_THREE;
462 0 : codestr[i+1] = ROT_TWO;
463 0 : memset(codestr+i+2, NOP, 4);
464 : }
465 2 : break;
466 :
467 : /* Fold binary ops on constants.
468 : LOAD_CONST c1 LOAD_CONST c2 BINOP --> LOAD_CONST binop(c1,c2) */
469 : case BINARY_POWER:
470 : case BINARY_MULTIPLY:
471 : case BINARY_TRUE_DIVIDE:
472 : case BINARY_FLOOR_DIVIDE:
473 : case BINARY_MODULO:
474 : case BINARY_ADD:
475 : case BINARY_SUBTRACT:
476 : case BINARY_SUBSCR:
477 : case BINARY_LSHIFT:
478 : case BINARY_RSHIFT:
479 : case BINARY_AND:
480 : case BINARY_XOR:
481 : case BINARY_OR:
482 564 : if (lastlc >= 2 &&
483 0 : ISBASICBLOCK(blocks, i-6, 7) &&
484 0 : fold_binops_on_constants(&codestr[i-6], consts)) {
485 0 : i -= 2;
486 : assert(codestr[i] == LOAD_CONST);
487 0 : cumlc = 1;
488 : }
489 564 : break;
490 :
491 : /* Fold unary ops on constants.
492 : LOAD_CONST c1 UNARY_OP --> LOAD_CONST unary_op(c) */
493 : case UNARY_NEGATIVE:
494 : case UNARY_CONVERT:
495 : case UNARY_INVERT:
496 4 : if (lastlc >= 1 &&
497 0 : ISBASICBLOCK(blocks, i-3, 4) &&
498 0 : fold_unaryops_on_constants(&codestr[i-3], consts)) {
499 0 : i -= 2;
500 : assert(codestr[i] == LOAD_CONST);
501 0 : cumlc = 1;
502 : }
503 4 : break;
504 :
505 : /* Simplify conditional jump to conditional jump where the
506 : result of the first test implies the success of a similar
507 : test or the failure of the opposite test.
508 : Arises in code like:
509 : "if a and b:"
510 : "if a or b:"
511 : "a and b or c"
512 : "(a and b) and c"
513 : x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_FALSE_OR_POP z
514 : --> x:JUMP_IF_FALSE_OR_POP z
515 : x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_TRUE_OR_POP z
516 : --> x:POP_JUMP_IF_FALSE y+3
517 : where y+3 is the instruction following the second test.
518 : */
519 : case JUMP_IF_FALSE_OR_POP:
520 : case JUMP_IF_TRUE_OR_POP:
521 62 : tgt = GETJUMPTGT(codestr, i);
522 62 : j = codestr[tgt];
523 62 : if (CONDITIONAL_JUMP(j)) {
524 : /* NOTE: all possible jumps here are absolute! */
525 41 : if (JUMPS_ON_TRUE(j) == JUMPS_ON_TRUE(opcode)) {
526 : /* The second jump will be
527 : taken iff the first is. */
528 30 : tgttgt = GETJUMPTGT(codestr, tgt);
529 : /* The current opcode inherits
530 : its target's stack behaviour */
531 30 : codestr[i] = j;
532 30 : SETARG(codestr, i, tgttgt);
533 30 : goto reoptimize_current;
534 : } else {
535 : /* The second jump is not taken if the first is (so
536 : jump past it), and all conditional jumps pop their
537 : argument when they're not taken (so change the
538 : first jump to pop its argument when it's taken). */
539 11 : if (JUMPS_ON_TRUE(opcode))
540 9 : codestr[i] = POP_JUMP_IF_TRUE;
541 : else
542 2 : codestr[i] = POP_JUMP_IF_FALSE;
543 11 : SETARG(codestr, i, (tgt + 3));
544 11 : goto reoptimize_current;
545 : }
546 : }
547 : /* Intentional fallthrough */
548 :
549 : /* Replace jumps to unconditional jumps */
550 : case POP_JUMP_IF_FALSE:
551 : case POP_JUMP_IF_TRUE:
552 : case FOR_ITER:
553 : case JUMP_FORWARD:
554 : case JUMP_ABSOLUTE:
555 : case CONTINUE_LOOP:
556 : case SETUP_LOOP:
557 : case SETUP_EXCEPT:
558 : case SETUP_FINALLY:
559 : case SETUP_WITH:
560 3177 : tgt = GETJUMPTGT(codestr, i);
561 : /* Replace JUMP_* to a RETURN into just a RETURN */
562 4346 : if (UNCONDITIONAL_JUMP(opcode) &&
563 1169 : codestr[tgt] == RETURN_VALUE) {
564 0 : codestr[i] = RETURN_VALUE;
565 0 : memset(codestr+i+1, NOP, 2);
566 0 : continue;
567 : }
568 3177 : if (!UNCONDITIONAL_JUMP(codestr[tgt]))
569 2744 : continue;
570 433 : tgttgt = GETJUMPTGT(codestr, tgt);
571 433 : if (opcode == JUMP_FORWARD) /* JMP_ABS can go backwards */
572 247 : opcode = JUMP_ABSOLUTE;
573 433 : if (!ABSOLUTE_JUMP(opcode))
574 46 : tgttgt -= i + 3; /* Calc relative jump addr */
575 433 : if (tgttgt < 0) /* No backward relative jumps */
576 11 : continue;
577 422 : codestr[i] = opcode;
578 422 : SETARG(codestr, i, tgttgt);
579 422 : break;
580 :
581 : case EXTENDED_ARG:
582 0 : goto exitUnchanged;
583 :
584 : /* Replace RETURN LOAD_CONST None RETURN with just RETURN */
585 : /* Remove unreachable JUMPs after RETURN */
586 : case RETURN_VALUE:
587 1599 : if (i+4 >= codelen)
588 1041 : continue;
589 586 : if (codestr[i+4] == RETURN_VALUE &&
590 28 : ISBASICBLOCK(blocks,i,5))
591 0 : memset(codestr+i+1, NOP, 4);
592 1077 : else if (UNCONDITIONAL_JUMP(codestr[i+1]) &&
593 519 : ISBASICBLOCK(blocks,i,4))
594 500 : memset(codestr+i+1, NOP, 3);
595 558 : break;
596 : }
597 : }
598 :
599 : /* Fixup linenotab */
600 46035 : for (i=0, nops=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
601 44994 : addrmap[i] = i - nops;
602 44994 : if (codestr[i] == NOP)
603 2946 : nops++;
604 : }
605 1041 : cum_orig_line = 0;
606 1041 : last_line = 0;
607 9322 : for (i=0 ; i < tabsiz ; i+=2) {
608 8281 : cum_orig_line += lineno[i];
609 8281 : new_line = addrmap[cum_orig_line];
610 : assert (new_line - last_line < 255);
611 8281 : lineno[i] =((unsigned char)(new_line - last_line));
612 8281 : last_line = new_line;
613 : }
614 :
615 : /* Remove NOPs and fixup jump targets */
616 47076 : for (i=0, h=0 ; i<codelen ; ) {
617 44994 : opcode = codestr[i];
618 44994 : switch (opcode) {
619 : case NOP:
620 2946 : i++;
621 2946 : continue;
622 :
623 : case JUMP_ABSOLUTE:
624 : case CONTINUE_LOOP:
625 : case POP_JUMP_IF_FALSE:
626 : case POP_JUMP_IF_TRUE:
627 : case JUMP_IF_FALSE_OR_POP:
628 : case JUMP_IF_TRUE_OR_POP:
629 2024 : j = addrmap[GETARG(codestr, i)];
630 2024 : SETARG(codestr, i, j);
631 2024 : break;
632 :
633 : case FOR_ITER:
634 : case JUMP_FORWARD:
635 : case SETUP_LOOP:
636 : case SETUP_EXCEPT:
637 : case SETUP_FINALLY:
638 : case SETUP_WITH:
639 1153 : j = addrmap[GETARG(codestr, i) + i + 3] - addrmap[i] - 3;
640 1153 : SETARG(codestr, i, j);
641 1153 : break;
642 : }
643 42048 : adj = CODESIZE(opcode);
644 200410 : while (adj--)
645 116314 : codestr[h++] = codestr[i++];
646 : }
647 : assert(h + nops == codelen);
648 :
649 1041 : code = PyString_FromStringAndSize((char *)codestr, h);
650 1041 : PyMem_Free(addrmap);
651 1041 : PyMem_Free(codestr);
652 1041 : PyMem_Free(blocks);
653 1041 : return code;
654 :
655 : exitError:
656 0 : code = NULL;
657 :
658 : exitUnchanged:
659 2 : if (blocks != NULL)
660 0 : PyMem_Free(blocks);
661 2 : if (addrmap != NULL)
662 0 : PyMem_Free(addrmap);
663 2 : if (codestr != NULL)
664 0 : PyMem_Free(codestr);
665 2 : Py_XINCREF(code);
666 2 : return code;
667 : }
|