PageRenderTime 65ms CodeModel.GetById 8ms RepoModel.GetById 0ms app.codeStats 0ms

/rpython/jit/backend/llgraph/runner.py

https://bitbucket.org/pypy/pypy/
Python | 1589 lines | 1513 code | 41 blank | 35 comment | 57 complexity | 151ac6d685ff87d596aeaa12ffdc9b5f MD5 | raw file
Possible License(s): AGPL-3.0, BSD-3-Clause, Apache-2.0

Large files files are truncated, but you can click here to view the full file

  1. import py, weakref
  2. from rpython.jit.backend import model
  3. from rpython.jit.backend.llgraph import support
  4. from rpython.jit.backend.llsupport import symbolic
  5. from rpython.jit.metainterp.history import AbstractDescr
  6. from rpython.jit.metainterp.history import Const, getkind
  7. from rpython.jit.metainterp.history import INT, REF, FLOAT, VOID
  8. from rpython.jit.metainterp.resoperation import rop
  9. from rpython.jit.metainterp.optimizeopt import intbounds
  10. from rpython.jit.metainterp.optimize import SpeculativeError
  11. from rpython.jit.codewriter import longlong, heaptracker
  12. from rpython.jit.codewriter.effectinfo import EffectInfo
  13. from rpython.rtyper.llinterp import LLInterpreter, LLException
  14. from rpython.rtyper.lltypesystem import lltype, llmemory, rffi, rstr
  15. from rpython.rtyper.lltypesystem.lloperation import llop
  16. from rpython.rtyper import rclass
  17. from rpython.rlib.clibffi import FFI_DEFAULT_ABI
  18. from rpython.rlib.rarithmetic import ovfcheck, r_uint, r_ulonglong
  19. from rpython.rlib.objectmodel import Symbolic
  20. class LLAsmInfo(object):
  21. def __init__(self, lltrace):
  22. self.ops_offset = None
  23. self.lltrace = lltrace
  24. class LLTrace(object):
  25. has_been_freed = False
  26. invalid = False
  27. def __init__(self, inputargs, operations):
  28. # We need to clone the list of operations because the
  29. # front-end will mutate them under our feet again. We also
  30. # need to make sure things get freed.
  31. _cache={}
  32. def mapping(box):
  33. if isinstance(box, Const) or box is None:
  34. return box
  35. try:
  36. newbox = _cache[box]
  37. except KeyError:
  38. newbox = _cache[box] = box.__class__()
  39. if hasattr(box, 'accum') and box.accum:
  40. newbox.accum = box.accum
  41. return newbox
  42. #
  43. self.inputargs = map(mapping, inputargs)
  44. self.operations = []
  45. for op in operations:
  46. opnum = op.getopnum()
  47. if opnum == rop.GUARD_VALUE:
  48. # we don't care about the value 13 here, because we gonna
  49. # fish it from the extra slot on frame anyway
  50. op.getdescr().make_a_counter_per_value(op, 13)
  51. if op.getdescr() is not None:
  52. if op.is_guard() or op.getopnum() == rop.FINISH:
  53. newdescr = op.getdescr()
  54. else:
  55. newdescr = WeakrefDescr(op.getdescr())
  56. else:
  57. newdescr = None
  58. newop = op.copy_and_change(op.getopnum(),
  59. map(mapping, op.getarglist()),
  60. newdescr)
  61. _cache[op] = newop
  62. if op.getfailargs() is not None:
  63. newop.setfailargs(map(mapping, op.getfailargs()))
  64. self.operations.append(newop)
  65. class WeakrefDescr(AbstractDescr):
  66. def __init__(self, realdescr):
  67. self.realdescrref = weakref.ref(realdescr)
  68. self.final_descr = getattr(realdescr, 'final_descr', False)
  69. class ExecutionFinished(Exception):
  70. def __init__(self, deadframe):
  71. self.deadframe = deadframe
  72. class Jump(Exception):
  73. def __init__(self, jump_target, args):
  74. self.jump_target = jump_target
  75. self.args = args
  76. class CallDescr(AbstractDescr):
  77. def __init__(self, RESULT, ARGS, extrainfo, ABI=FFI_DEFAULT_ABI):
  78. self.RESULT = RESULT
  79. self.ARGS = ARGS
  80. self.ABI = ABI
  81. self.extrainfo = extrainfo
  82. def __repr__(self):
  83. return 'CallDescr(%r, %r, %r)' % (self.RESULT, self.ARGS,
  84. self.extrainfo)
  85. def get_extra_info(self):
  86. return self.extrainfo
  87. def get_arg_types(self):
  88. return ''.join([getkind(ARG)[0] for ARG in self.ARGS])
  89. def get_result_type(self):
  90. return getkind(self.RESULT)[0]
  91. get_normalized_result_type = get_result_type
  92. class TypeIDSymbolic(Symbolic):
  93. def __init__(self, STRUCT_OR_ARRAY):
  94. self.STRUCT_OR_ARRAY = STRUCT_OR_ARRAY
  95. def __eq__(self, other):
  96. return self.STRUCT_OR_ARRAY is other.STRUCT_OR_ARRAY
  97. def __ne__(self, other):
  98. return not self == other
  99. class SizeDescr(AbstractDescr):
  100. def __init__(self, S, vtable, runner):
  101. assert not isinstance(vtable, bool)
  102. self.S = S
  103. self._vtable = vtable
  104. self._is_object = bool(vtable)
  105. self._runner = runner
  106. def get_all_fielddescrs(self):
  107. return self.all_fielddescrs
  108. def is_object(self):
  109. return self._is_object
  110. def get_vtable(self):
  111. assert self._vtable is not None
  112. if self._vtable is Ellipsis:
  113. self._vtable = heaptracker.get_vtable_for_gcstruct(self._runner,
  114. self.S)
  115. return heaptracker.adr2int(llmemory.cast_ptr_to_adr(self._vtable))
  116. def is_immutable(self):
  117. return heaptracker.is_immutable_struct(self.S)
  118. def get_type_id(self):
  119. assert isinstance(self.S, lltype.GcStruct)
  120. return TypeIDSymbolic(self.S) # integer-like symbolic
  121. def __repr__(self):
  122. return 'SizeDescr(%r)' % (self.S,)
  123. class FieldDescr(AbstractDescr):
  124. def __init__(self, S, fieldname):
  125. self.S = S
  126. self.fieldname = fieldname
  127. self.FIELD = getattr(S, fieldname)
  128. self.index = heaptracker.get_fielddescr_index_in(S, fieldname)
  129. self._is_pure = S._immutable_field(fieldname) != False
  130. def is_always_pure(self):
  131. return self._is_pure
  132. def get_parent_descr(self):
  133. return self.parent_descr
  134. def get_vinfo(self):
  135. return self.vinfo
  136. def get_index(self):
  137. return self.index
  138. def __repr__(self):
  139. return 'FieldDescr(%r, %r)' % (self.S, self.fieldname)
  140. def sort_key(self):
  141. return self.fieldname
  142. def is_pointer_field(self):
  143. return getkind(self.FIELD) == 'ref'
  144. def is_float_field(self):
  145. return getkind(self.FIELD) == 'float'
  146. def is_field_signed(self):
  147. return _is_signed_kind(self.FIELD)
  148. def is_integer_bounded(self):
  149. return getkind(self.FIELD) == 'int' \
  150. and rffi.sizeof(self.FIELD) < symbolic.WORD
  151. def get_integer_min(self):
  152. if getkind(self.FIELD) != 'int':
  153. assert False
  154. return intbounds.get_integer_min(
  155. not _is_signed_kind(self.FIELD), rffi.sizeof(self.FIELD))
  156. def get_integer_max(self):
  157. if getkind(self.FIELD) != 'int':
  158. assert False
  159. return intbounds.get_integer_max(
  160. not _is_signed_kind(self.FIELD), rffi.sizeof(self.FIELD))
  161. def _is_signed_kind(TYPE):
  162. return (TYPE is not lltype.Bool and isinstance(TYPE, lltype.Number) and
  163. rffi.cast(TYPE, -1) == -1)
  164. class ArrayDescr(AbstractDescr):
  165. all_interiorfielddescrs = None
  166. def __init__(self, A, runner):
  167. self.A = self.OUTERA = A
  168. self._is_pure = A._immutable_field(None)
  169. self.concrete_type = '\x00'
  170. if isinstance(A, lltype.Struct):
  171. self.A = A._flds[A._arrayfld]
  172. def is_array_of_primitives(self):
  173. kind = getkind(self.A.OF)
  174. return kind == 'float' or \
  175. kind == 'int'
  176. def is_always_pure(self):
  177. return self._is_pure
  178. def get_all_fielddescrs(self):
  179. return self.all_interiorfielddescrs
  180. def __repr__(self):
  181. return 'ArrayDescr(%r)' % (self.OUTERA,)
  182. def is_array_of_pointers(self):
  183. return getkind(self.A.OF) == 'ref'
  184. def is_array_of_floats(self):
  185. return getkind(self.A.OF) == 'float'
  186. def is_item_signed(self):
  187. return _is_signed_kind(self.A.OF)
  188. def is_array_of_structs(self):
  189. return isinstance(self.A.OF, lltype.Struct)
  190. def is_item_integer_bounded(self):
  191. return getkind(self.A.OF) == 'int' \
  192. and rffi.sizeof(self.A.OF) < symbolic.WORD
  193. def get_item_size_in_bytes(self):
  194. return rffi.sizeof(self.A.OF)
  195. def get_item_integer_min(self):
  196. if getkind(self.A.OF) != 'int':
  197. assert False
  198. return intbounds.get_integer_min(
  199. not _is_signed_kind(self.A.OF), rffi.sizeof(self.A.OF))
  200. def get_item_integer_max(self):
  201. if getkind(self.A.OF) != 'int':
  202. assert False
  203. return intbounds.get_integer_max(
  204. not _is_signed_kind(self.A.OF), rffi.sizeof(self.A.OF))
  205. def get_type_id(self):
  206. assert isinstance(self.A, lltype.GcArray)
  207. return TypeIDSymbolic(self.A) # integer-like symbolic
  208. class InteriorFieldDescr(AbstractDescr):
  209. def __init__(self, A, fieldname, runner):
  210. self.A = A
  211. self.fieldname = fieldname
  212. self.FIELD = getattr(A.OF, fieldname)
  213. self.arraydescr = runner.arraydescrof(A)
  214. self.fielddescr = runner.fielddescrof(A.OF, fieldname)
  215. def get_index(self):
  216. return self.fielddescr.get_index()
  217. def get_arraydescr(self):
  218. return self.arraydescr
  219. def get_field_descr(self):
  220. return self.fielddescr
  221. def __repr__(self):
  222. return 'InteriorFieldDescr(%r, %r)' % (self.A, self.fieldname)
  223. def sort_key(self):
  224. return self.fieldname
  225. def is_pointer_field(self):
  226. return getkind(self.FIELD) == 'ref'
  227. def is_float_field(self):
  228. return getkind(self.FIELD) == 'float'
  229. def is_integer_bounded(self):
  230. return getkind(self.FIELD) == 'int' \
  231. and rffi.sizeof(self.FIELD) < symbolic.WORD
  232. def get_integer_min(self):
  233. if getkind(self.FIELD) != 'int':
  234. assert False
  235. return intbounds.get_integer_min(
  236. not _is_signed_kind(self.FIELD), rffi.sizeof(self.FIELD))
  237. def get_integer_max(self):
  238. if getkind(self.FIELD) != 'int':
  239. assert False
  240. return intbounds.get_integer_max(
  241. not _is_signed_kind(self.FIELD), rffi.sizeof(self.FIELD))
  242. _example_res = {'v': None,
  243. 'r': lltype.nullptr(llmemory.GCREF.TO),
  244. 'i': 0,
  245. 'f': 0.0}
  246. class LLGraphCPU(model.AbstractCPU):
  247. from rpython.jit.metainterp.typesystem import llhelper as ts
  248. supports_floats = True
  249. supports_longlong = r_uint is not r_ulonglong
  250. supports_singlefloats = True
  251. supports_guard_gc_type = True
  252. translate_support_code = False
  253. is_llgraph = True
  254. vector_extension = True
  255. vector_register_size = 16 # in bytes
  256. vector_horizontal_operations = True
  257. vector_pack_slots = True
  258. def __init__(self, rtyper, stats=None, *ignored_args, **kwds):
  259. model.AbstractCPU.__init__(self)
  260. self.rtyper = rtyper
  261. self.llinterp = LLInterpreter(rtyper)
  262. self.descrs = {}
  263. class MiniStats:
  264. pass
  265. self.stats = stats or MiniStats()
  266. self.vinfo_for_tests = kwds.get('vinfo_for_tests', None)
  267. def stitch_bridge(self, faildescr, target):
  268. faildescr._llgraph_bridge = target[0].lltrace
  269. def compile_loop(self, inputargs, operations, looptoken, jd_id=0,
  270. unique_id=0, log=True, name='', logger=None):
  271. clt = model.CompiledLoopToken(self, looptoken.number)
  272. looptoken.compiled_loop_token = clt
  273. lltrace = LLTrace(inputargs, operations)
  274. clt._llgraph_loop = lltrace
  275. clt._llgraph_alltraces = [lltrace]
  276. self._record_labels(lltrace)
  277. def compile_bridge(self, faildescr, inputargs, operations,
  278. original_loop_token, log=True, logger=None):
  279. clt = original_loop_token.compiled_loop_token
  280. clt.compiling_a_bridge()
  281. lltrace = LLTrace(inputargs, operations)
  282. faildescr._llgraph_bridge = lltrace
  283. clt._llgraph_alltraces.append(lltrace)
  284. self._record_labels(lltrace)
  285. return LLAsmInfo(lltrace)
  286. def _record_labels(self, lltrace):
  287. for i, op in enumerate(lltrace.operations):
  288. if op.getopnum() == rop.LABEL:
  289. _getdescr(op)._llgraph_target = (lltrace, i)
  290. def invalidate_loop(self, looptoken):
  291. for trace in looptoken.compiled_loop_token._llgraph_alltraces:
  292. trace.invalid = True
  293. def redirect_call_assembler(self, oldlooptoken, newlooptoken):
  294. oldc = oldlooptoken.compiled_loop_token
  295. newc = newlooptoken.compiled_loop_token
  296. oldtrace = oldc._llgraph_loop
  297. newtrace = newc._llgraph_loop
  298. OLD = [box.type for box in oldtrace.inputargs]
  299. NEW = [box.type for box in newtrace.inputargs]
  300. assert OLD == NEW
  301. assert not hasattr(oldc, '_llgraph_redirected')
  302. oldc._llgraph_redirected = newc
  303. oldc._llgraph_alltraces = newc._llgraph_alltraces
  304. def free_loop_and_bridges(self, compiled_loop_token):
  305. for c in compiled_loop_token._llgraph_alltraces:
  306. c.has_been_freed = True
  307. compiled_loop_token._llgraph_alltraces = []
  308. compiled_loop_token._llgraph_loop = None
  309. model.AbstractCPU.free_loop_and_bridges(self, compiled_loop_token)
  310. def make_execute_token(self, *argtypes):
  311. return self._execute_token
  312. def _execute_token(self, loop_token, *args):
  313. loopc = loop_token.compiled_loop_token
  314. while hasattr(loopc, '_llgraph_redirected'):
  315. loopc = loopc._llgraph_redirected
  316. lltrace = loopc._llgraph_loop
  317. frame = LLFrame(self, lltrace.inputargs, args)
  318. try:
  319. frame.execute(lltrace)
  320. assert False
  321. except ExecutionFinished as e:
  322. return e.deadframe
  323. def get_value_direct(self, deadframe, tp, index):
  324. v = deadframe._extra_value
  325. if tp == 'i':
  326. assert lltype.typeOf(v) == lltype.Signed
  327. elif tp == 'r':
  328. assert lltype.typeOf(v) == llmemory.GCREF
  329. elif tp == 'f':
  330. assert lltype.typeOf(v) == longlong.FLOATSTORAGE
  331. else:
  332. assert False
  333. return v
  334. def get_int_value(self, deadframe, index):
  335. v = deadframe._values[index]
  336. assert lltype.typeOf(v) == lltype.Signed
  337. return v
  338. def get_ref_value(self, deadframe, index):
  339. v = deadframe._values[index]
  340. assert lltype.typeOf(v) == llmemory.GCREF
  341. return v
  342. def get_float_value(self, deadframe, index):
  343. v = deadframe._values[index]
  344. assert lltype.typeOf(v) == longlong.FLOATSTORAGE
  345. return v
  346. def get_latest_descr(self, deadframe):
  347. return deadframe._latest_descr
  348. def grab_exc_value(self, deadframe):
  349. if deadframe._last_exception is not None:
  350. result = deadframe._last_exception.args[1]
  351. gcref = lltype.cast_opaque_ptr(llmemory.GCREF, result)
  352. else:
  353. gcref = lltype.nullptr(llmemory.GCREF.TO)
  354. return gcref
  355. def force(self, force_token):
  356. frame = force_token
  357. assert isinstance(frame, LLFrame)
  358. assert frame.forced_deadframe is None
  359. values = []
  360. for box in frame.force_guard_op.getfailargs():
  361. if box is not None:
  362. if box is not frame.current_op:
  363. value = frame.env[box]
  364. else:
  365. value = 0 # box.getvalue() # 0 or 0.0 or NULL
  366. else:
  367. value = None
  368. values.append(value)
  369. frame.forced_deadframe = LLDeadFrame(
  370. _getdescr(frame.force_guard_op), values)
  371. return frame.forced_deadframe
  372. def set_savedata_ref(self, deadframe, data):
  373. deadframe._saved_data = data
  374. def get_savedata_ref(self, deadframe):
  375. assert deadframe._saved_data is not None
  376. return deadframe._saved_data
  377. # ------------------------------------------------------------
  378. def setup_descrs(self):
  379. all_descrs = []
  380. for k, v in self.descrs.iteritems():
  381. v.descr_index = len(all_descrs)
  382. all_descrs.append(v)
  383. return all_descrs
  384. def fetch_all_descrs(self):
  385. return self.descrs.values()
  386. def calldescrof(self, FUNC, ARGS, RESULT, effect_info):
  387. key = ('call', getkind(RESULT),
  388. tuple([getkind(A) for A in ARGS]),
  389. effect_info)
  390. try:
  391. return self.descrs[key]
  392. except KeyError:
  393. descr = CallDescr(RESULT, ARGS, effect_info)
  394. self.descrs[key] = descr
  395. return descr
  396. def sizeof(self, S, vtable=lltype.nullptr(rclass.OBJECT_VTABLE)):
  397. key = ('size', S)
  398. try:
  399. descr = self.descrs[key]
  400. except KeyError:
  401. descr = SizeDescr(S, vtable, self)
  402. self.descrs[key] = descr
  403. descr.all_fielddescrs = heaptracker.all_fielddescrs(self, S,
  404. get_field_descr=LLGraphCPU.fielddescrof)
  405. if descr._is_object and vtable is not Ellipsis:
  406. assert vtable
  407. heaptracker.testing_gcstruct2vtable.setdefault(S, vtable)
  408. return descr
  409. def fielddescrof(self, S, fieldname):
  410. key = ('field', S, fieldname)
  411. try:
  412. return self.descrs[key]
  413. except KeyError:
  414. descr = FieldDescr(S, fieldname)
  415. self.descrs[key] = descr
  416. if (isinstance(S, lltype.GcStruct) and
  417. heaptracker.has_gcstruct_a_vtable(S)):
  418. vtable = Ellipsis
  419. else:
  420. vtable = None
  421. descr.parent_descr = self.sizeof(S, vtable)
  422. if self.vinfo_for_tests is not None:
  423. descr.vinfo = self.vinfo_for_tests
  424. return descr
  425. def arraydescrof(self, A):
  426. key = ('array', A)
  427. try:
  428. return self.descrs[key]
  429. except KeyError:
  430. descr = ArrayDescr(A, self)
  431. self.descrs[key] = descr
  432. if isinstance(A, lltype.Array) and isinstance(A.OF, lltype.Struct):
  433. descrs = heaptracker.all_interiorfielddescrs(self,
  434. A, get_field_descr=LLGraphCPU.interiorfielddescrof)
  435. descr.all_interiorfielddescrs = descrs
  436. return descr
  437. def interiorfielddescrof(self, A, fieldname):
  438. key = ('interiorfield', A, fieldname)
  439. try:
  440. return self.descrs[key]
  441. except KeyError:
  442. descr = InteriorFieldDescr(A, fieldname, self)
  443. self.descrs[key] = descr
  444. return descr
  445. def _calldescr_dynamic_for_tests(self, atypes, rtype,
  446. abiname='FFI_DEFAULT_ABI'):
  447. # XXX WTF is that and why it breaks all abstractions?
  448. from rpython.jit.backend.llsupport import ffisupport
  449. return ffisupport.calldescr_dynamic_for_tests(self, atypes, rtype,
  450. abiname)
  451. def calldescrof_dynamic(self, cif_description, extrainfo):
  452. # XXX WTF, this is happy nonsense
  453. from rpython.jit.backend.llsupport.ffisupport import get_ffi_type_kind
  454. from rpython.jit.backend.llsupport.ffisupport import UnsupportedKind
  455. ARGS = []
  456. try:
  457. for itp in range(cif_description.nargs):
  458. arg = cif_description.atypes[itp]
  459. kind = get_ffi_type_kind(self, arg)
  460. if kind != VOID:
  461. ARGS.append(support.kind2TYPE[kind[0]])
  462. RESULT = support.kind2TYPE[get_ffi_type_kind(self, cif_description.rtype)[0]]
  463. except UnsupportedKind:
  464. return None
  465. key = ('call_dynamic', RESULT, tuple(ARGS),
  466. extrainfo, cif_description.abi)
  467. try:
  468. return self.descrs[key]
  469. except KeyError:
  470. descr = CallDescr(RESULT, ARGS, extrainfo, ABI=cif_description.abi)
  471. self.descrs[key] = descr
  472. return descr
  473. def check_is_object(self, gcptr):
  474. """Check if the given, non-null gcptr refers to an rclass.OBJECT
  475. or not at all (an unrelated GcStruct or a GcArray). Only usable
  476. in the llgraph backend, or after translation of a real backend."""
  477. ptr = lltype.normalizeptr(gcptr._obj.container._as_ptr())
  478. T = lltype.typeOf(ptr).TO
  479. return heaptracker.has_gcstruct_a_vtable(T) or T is rclass.OBJECT
  480. def get_actual_typeid(self, gcptr):
  481. """Fetch the actual typeid of the given gcptr, as an integer.
  482. Only usable in the llgraph backend, or after translation of a
  483. real backend. (Here in the llgraph backend, returns a
  484. TypeIDSymbolic instead of a real integer.)"""
  485. ptr = lltype.normalizeptr(gcptr._obj.container._as_ptr())
  486. return TypeIDSymbolic(lltype.typeOf(ptr).TO)
  487. # ------------------------------------------------------------
  488. def maybe_on_top_of_llinterp(self, func, args, RESULT):
  489. ptr = llmemory.cast_int_to_adr(func).ptr
  490. if hasattr(ptr._obj, 'graph'):
  491. res = self.llinterp.eval_graph(ptr._obj.graph, args)
  492. else:
  493. res = ptr._obj._callable(*args)
  494. if RESULT is lltype.Void:
  495. return None
  496. return support.cast_result(RESULT, res)
  497. def _do_call(self, func, args_i, args_r, args_f, calldescr):
  498. TP = llmemory.cast_int_to_adr(func).ptr._obj._TYPE
  499. args = support.cast_call_args(TP.ARGS, args_i, args_r, args_f)
  500. return self.maybe_on_top_of_llinterp(func, args, TP.RESULT)
  501. bh_call_i = _do_call
  502. bh_call_r = _do_call
  503. bh_call_f = _do_call
  504. bh_call_v = _do_call
  505. def bh_getfield_gc(self, p, descr):
  506. p = support.cast_arg(lltype.Ptr(descr.S), p)
  507. return support.cast_result(descr.FIELD, getattr(p, descr.fieldname))
  508. bh_getfield_gc_i = bh_getfield_gc
  509. bh_getfield_gc_r = bh_getfield_gc
  510. bh_getfield_gc_f = bh_getfield_gc
  511. bh_getfield_raw = bh_getfield_gc
  512. bh_getfield_raw_i = bh_getfield_raw
  513. bh_getfield_raw_r = bh_getfield_raw
  514. bh_getfield_raw_f = bh_getfield_raw
  515. def bh_setfield_gc(self, p, newvalue, descr):
  516. p = support.cast_arg(lltype.Ptr(descr.S), p)
  517. setattr(p, descr.fieldname, support.cast_arg(descr.FIELD, newvalue))
  518. bh_setfield_gc_i = bh_setfield_gc
  519. bh_setfield_gc_r = bh_setfield_gc
  520. bh_setfield_gc_f = bh_setfield_gc
  521. bh_setfield_raw = bh_setfield_gc
  522. bh_setfield_raw_i = bh_setfield_raw
  523. bh_setfield_raw_f = bh_setfield_raw
  524. def bh_arraylen_gc(self, a, descr):
  525. array = a._obj.container
  526. if descr.A is not descr.OUTERA:
  527. array = getattr(array, descr.OUTERA._arrayfld)
  528. return array.getlength()
  529. def bh_getarrayitem_gc(self, a, index, descr):
  530. a = support.cast_arg(lltype.Ptr(descr.A), a)
  531. array = a._obj
  532. assert index >= 0
  533. return support.cast_result(descr.A.OF, array.getitem(index))
  534. bh_getarrayitem_gc_pure_i = bh_getarrayitem_gc
  535. bh_getarrayitem_gc_pure_r = bh_getarrayitem_gc
  536. bh_getarrayitem_gc_pure_f = bh_getarrayitem_gc
  537. bh_getarrayitem_gc_i = bh_getarrayitem_gc
  538. bh_getarrayitem_gc_r = bh_getarrayitem_gc
  539. bh_getarrayitem_gc_f = bh_getarrayitem_gc
  540. bh_getarrayitem_raw = bh_getarrayitem_gc
  541. bh_getarrayitem_raw_i = bh_getarrayitem_raw
  542. bh_getarrayitem_raw_r = bh_getarrayitem_raw
  543. bh_getarrayitem_raw_f = bh_getarrayitem_raw
  544. def bh_setarrayitem_gc(self, a, index, item, descr):
  545. a = support.cast_arg(lltype.Ptr(descr.A), a)
  546. array = a._obj
  547. array.setitem(index, support.cast_arg(descr.A.OF, item))
  548. bh_setarrayitem_gc_i = bh_setarrayitem_gc
  549. bh_setarrayitem_gc_r = bh_setarrayitem_gc
  550. bh_setarrayitem_gc_f = bh_setarrayitem_gc
  551. bh_setarrayitem_raw = bh_setarrayitem_gc
  552. bh_setarrayitem_raw_i = bh_setarrayitem_raw
  553. bh_setarrayitem_raw_r = bh_setarrayitem_raw
  554. bh_setarrayitem_raw_f = bh_setarrayitem_raw
  555. def bh_getinteriorfield_gc(self, a, index, descr):
  556. array = a._obj.container
  557. return support.cast_result(descr.FIELD,
  558. getattr(array.getitem(index), descr.fieldname))
  559. bh_getinteriorfield_gc_i = bh_getinteriorfield_gc
  560. bh_getinteriorfield_gc_r = bh_getinteriorfield_gc
  561. bh_getinteriorfield_gc_f = bh_getinteriorfield_gc
  562. def bh_setinteriorfield_gc(self, a, index, item, descr):
  563. array = a._obj.container
  564. setattr(array.getitem(index), descr.fieldname,
  565. support.cast_arg(descr.FIELD, item))
  566. bh_setinteriorfield_gc_i = bh_setinteriorfield_gc
  567. bh_setinteriorfield_gc_r = bh_setinteriorfield_gc
  568. bh_setinteriorfield_gc_f = bh_setinteriorfield_gc
  569. def bh_raw_load_i(self, struct, offset, descr):
  570. ll_p = rffi.cast(rffi.CCHARP, struct)
  571. ll_p = rffi.cast(lltype.Ptr(descr.A), rffi.ptradd(ll_p, offset))
  572. value = ll_p[0]
  573. return support.cast_result(descr.A.OF, value)
  574. def bh_raw_load_f(self, struct, offset, descr):
  575. ll_p = rffi.cast(rffi.CCHARP, struct)
  576. ll_p = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE),
  577. rffi.ptradd(ll_p, offset))
  578. return ll_p[0]
  579. def bh_raw_load(self, struct, offset, descr):
  580. if descr.A.OF == lltype.Float:
  581. return self.bh_raw_load_f(struct, offset, descr)
  582. else:
  583. return self.bh_raw_load_i(struct, offset, descr)
  584. def bh_gc_load_indexed_i(self, struct, index, scale, base_ofs, bytes):
  585. if bytes == 1: T = rffi.UCHAR
  586. elif bytes == 2: T = rffi.USHORT
  587. elif bytes == 4: T = rffi.UINT
  588. elif bytes == 8: T = rffi.ULONGLONG
  589. elif bytes == -1: T = rffi.SIGNEDCHAR
  590. elif bytes == -2: T = rffi.SHORT
  591. elif bytes == -4: T = rffi.INT
  592. elif bytes == -8: T = rffi.LONGLONG
  593. else: raise NotImplementedError(bytes)
  594. x = llop.gc_load_indexed(T, struct, index, scale, base_ofs)
  595. return lltype.cast_primitive(lltype.Signed, x)
  596. def bh_gc_load_indexed_f(self, struct, index, scale, base_ofs, bytes):
  597. if bytes != 8:
  598. raise Exception("gc_load_indexed_f is only for 'double'!")
  599. return llop.gc_load_indexed(longlong.FLOATSTORAGE,
  600. struct, index, scale, base_ofs)
  601. def bh_increment_debug_counter(self, addr):
  602. p = rffi.cast(rffi.CArrayPtr(lltype.Signed), addr)
  603. p[0] += 1
  604. def unpack_arraydescr_size(self, arraydescr):
  605. from rpython.jit.backend.llsupport.symbolic import get_array_token
  606. from rpython.jit.backend.llsupport.descr import get_type_flag, FLAG_SIGNED
  607. assert isinstance(arraydescr, ArrayDescr)
  608. basesize, itemsize, _ = get_array_token(arraydescr.A, False)
  609. flag = get_type_flag(arraydescr.A.OF)
  610. is_signed = (flag == FLAG_SIGNED)
  611. return basesize, itemsize, is_signed
  612. def bh_raw_store_i(self, struct, offset, newvalue, descr):
  613. ll_p = rffi.cast(rffi.CCHARP, struct)
  614. ll_p = rffi.cast(lltype.Ptr(descr.A), rffi.ptradd(ll_p, offset))
  615. if descr.A.OF == lltype.SingleFloat:
  616. newvalue = longlong.int2singlefloat(newvalue)
  617. ll_p[0] = rffi.cast(descr.A.OF, newvalue)
  618. def bh_raw_store_f(self, struct, offset, newvalue, descr):
  619. ll_p = rffi.cast(rffi.CCHARP, struct)
  620. ll_p = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE),
  621. rffi.ptradd(ll_p, offset))
  622. ll_p[0] = newvalue
  623. def bh_raw_store(self, struct, offset, newvalue, descr):
  624. if descr.A.OF == lltype.Float:
  625. self.bh_raw_store_f(struct, offset, newvalue, descr)
  626. else:
  627. self.bh_raw_store_i(struct, offset, newvalue, descr)
  628. def bh_newstr(self, length):
  629. return lltype.cast_opaque_ptr(llmemory.GCREF,
  630. lltype.malloc(rstr.STR, length,
  631. zero=True))
  632. def bh_strlen(self, s):
  633. return s._obj.container.chars.getlength()
  634. def bh_strgetitem(self, s, item):
  635. assert item >= 0
  636. return ord(s._obj.container.chars.getitem(item))
  637. def bh_strsetitem(self, s, item, v):
  638. s._obj.container.chars.setitem(item, chr(v))
  639. def bh_copystrcontent(self, src, dst, srcstart, dststart, length):
  640. src = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), src)
  641. dst = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), dst)
  642. assert 0 <= srcstart <= srcstart + length <= len(src.chars)
  643. assert 0 <= dststart <= dststart + length <= len(dst.chars)
  644. rstr.copy_string_contents(src, dst, srcstart, dststart, length)
  645. def bh_newunicode(self, length):
  646. return lltype.cast_opaque_ptr(llmemory.GCREF,
  647. lltype.malloc(rstr.UNICODE, length,
  648. zero=True))
  649. def bh_unicodelen(self, string):
  650. return string._obj.container.chars.getlength()
  651. def bh_unicodegetitem(self, string, index):
  652. assert index >= 0
  653. return ord(string._obj.container.chars.getitem(index))
  654. def bh_unicodesetitem(self, string, index, newvalue):
  655. string._obj.container.chars.setitem(index, unichr(newvalue))
  656. def bh_copyunicodecontent(self, src, dst, srcstart, dststart, length):
  657. src = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), src)
  658. dst = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), dst)
  659. assert 0 <= srcstart <= srcstart + length <= len(src.chars)
  660. assert 0 <= dststart <= dststart + length <= len(dst.chars)
  661. rstr.copy_unicode_contents(src, dst, srcstart, dststart, length)
  662. def bh_new(self, sizedescr):
  663. return lltype.cast_opaque_ptr(llmemory.GCREF,
  664. lltype.malloc(sizedescr.S, zero=True))
  665. def bh_new_with_vtable(self, descr):
  666. result = lltype.malloc(descr.S, zero=True)
  667. result_as_objptr = lltype.cast_pointer(rclass.OBJECTPTR, result)
  668. result_as_objptr.typeptr = support.cast_from_int(rclass.CLASSTYPE,
  669. descr.get_vtable())
  670. return lltype.cast_opaque_ptr(llmemory.GCREF, result)
  671. def bh_new_array(self, length, arraydescr):
  672. array = lltype.malloc(arraydescr.A, length, zero=True)
  673. assert getkind(arraydescr.A.OF) != 'ref' # getkind crashes on structs
  674. return lltype.cast_opaque_ptr(llmemory.GCREF, array)
  675. def bh_new_array_clear(self, length, arraydescr):
  676. array = lltype.malloc(arraydescr.A, length, zero=True)
  677. return lltype.cast_opaque_ptr(llmemory.GCREF, array)
  678. def bh_classof(self, struct):
  679. struct = lltype.cast_opaque_ptr(rclass.OBJECTPTR, struct)
  680. result_adr = llmemory.cast_ptr_to_adr(struct.typeptr)
  681. return heaptracker.adr2int(result_adr)
  682. def bh_new_raw_buffer(self, size):
  683. return lltype.malloc(rffi.CCHARP.TO, size, flavor='raw')
  684. # vector operations
  685. vector_arith_code = """
  686. def bh_vec_{0}_{1}(self, vx, vy, count):
  687. assert len(vx) == len(vy) == count
  688. return [_vx {2} _vy for _vx,_vy in zip(vx,vy)]
  689. """
  690. exec py.code.Source(vector_arith_code.format('int','add','+')).compile()
  691. exec py.code.Source(vector_arith_code.format('int','sub','-')).compile()
  692. exec py.code.Source(vector_arith_code.format('int','mul','*')).compile()
  693. exec py.code.Source(vector_arith_code.format('int','and','&')).compile()
  694. exec py.code.Source(vector_arith_code.format('int','or','|')).compile()
  695. exec py.code.Source(vector_arith_code.format('float','add','+')).compile()
  696. exec py.code.Source(vector_arith_code.format('float','sub','-')).compile()
  697. exec py.code.Source(vector_arith_code.format('float','mul','*')).compile()
  698. exec py.code.Source(vector_arith_code.format('float','truediv','/')).compile()
  699. exec py.code.Source(vector_arith_code.format('float','eq','==')).compile()
  700. def bh_vec_float_neg(self, vx, count):
  701. return [e * -1 for e in vx]
  702. def bh_vec_float_abs(self, vx, count):
  703. return [abs(e) for e in vx]
  704. def bh_vec_float_eq(self, vx, vy, count):
  705. assert len(vx) == len(vy) == count
  706. return [_vx == _vy for _vx,_vy in zip(vx,vy)]
  707. def bh_vec_float_ne(self, vx, vy, count):
  708. assert len(vx) == len(vy) == count
  709. return [_vx != _vy for _vx,_vy in zip(vx,vy)]
  710. bh_vec_int_eq = bh_vec_float_eq
  711. bh_vec_int_ne = bh_vec_float_ne
  712. def bh_vec_int_is_true(self, vx, count):
  713. return map(lambda x: bool(x), vx)
  714. def bh_vec_int_is_false(self, vx, count):
  715. return map(lambda x: not bool(x), vx)
  716. def bh_vec_int_xor(self, vx, vy, count):
  717. return [int(x) ^ int(y) for x,y in zip(vx,vy)]
  718. def bh_vec_cast_float_to_singlefloat(self, vx, count):
  719. from rpython.rlib.rarithmetic import r_singlefloat
  720. return [longlong.singlefloat2int(r_singlefloat(longlong.getrealfloat(v)))
  721. for v in vx]
  722. def bh_vec_cast_singlefloat_to_float(self, vx, count):
  723. return [longlong.getfloatstorage(float(longlong.int2singlefloat(v)))
  724. for v in vx]
  725. a = float(a)
  726. return longlong.getfloatstorage(a)
  727. def bh_vec_cast_float_to_int(self, vx, count):
  728. return [int(x) for x in vx]
  729. def bh_vec_cast_int_to_float(self, vx, count):
  730. return [float(x) for x in vx]
  731. def bh_vec_f(self, count):
  732. return [0.0] * count
  733. def bh_vec_i(self, count):
  734. return [0] * count
  735. def _bh_vec_pack(self, tv, sv, index, count, newcount):
  736. while len(tv) < newcount: tv.append(None)
  737. if not isinstance(sv, list):
  738. tv[index] = sv
  739. return tv
  740. for i in range(count):
  741. tv[index+i] = sv[i]
  742. return tv
  743. bh_vec_pack_f = _bh_vec_pack
  744. bh_vec_pack_i = _bh_vec_pack
  745. def _bh_vec_unpack(self, vx, index, count, newcount):
  746. return vx[index:index+count]
  747. bh_vec_unpack_f = _bh_vec_unpack
  748. bh_vec_unpack_i = _bh_vec_unpack
  749. def _bh_vec_expand(self, x, count):
  750. return [x] * count
  751. bh_vec_expand_f = _bh_vec_expand
  752. bh_vec_expand_i = _bh_vec_expand
  753. def bh_vec_int_signext(self, vx, ext, count):
  754. return [heaptracker.int_signext(_vx, ext) for _vx in vx]
  755. def build_getarrayitem(func):
  756. def method(self, struct, offset, descr, _count):
  757. values = []
  758. count = self.vector_register_size // descr.get_item_size_in_bytes()
  759. assert _count == count
  760. assert count > 0
  761. for i in range(count):
  762. val = func(self, struct, offset + i, descr)
  763. values.append(val)
  764. return values
  765. return method
  766. bh_vec_getarrayitem_gc_i = build_getarrayitem(bh_getarrayitem_gc)
  767. bh_vec_getarrayitem_gc_f = build_getarrayitem(bh_getarrayitem_gc)
  768. bh_vec_getarrayitem_raw_i = build_getarrayitem(bh_getarrayitem_raw)
  769. bh_vec_getarrayitem_raw_f = build_getarrayitem(bh_getarrayitem_raw)
  770. del build_getarrayitem
  771. def _bh_vec_raw_load(self, struct, offset, descr, _count):
  772. values = []
  773. stride = descr.get_item_size_in_bytes()
  774. count = self.vector_register_size // descr.get_item_size_in_bytes()
  775. assert _count == count
  776. assert count > 0
  777. for i in range(count):
  778. val = self.bh_raw_load(struct, offset + i*stride, descr)
  779. values.append(val)
  780. return values
  781. bh_vec_raw_load_i = _bh_vec_raw_load
  782. bh_vec_raw_load_f = _bh_vec_raw_load
  783. def bh_vec_raw_store(self, struct, offset, newvalues, descr, count):
  784. stride = descr.get_item_size_in_bytes()
  785. for i,n in enumerate(newvalues):
  786. self.bh_raw_store(struct, offset + i*stride, n, descr)
  787. def bh_vec_setarrayitem_raw(self, struct, offset, newvalues, descr, count):
  788. for i,n in enumerate(newvalues):
  789. self.bh_setarrayitem_raw(struct, offset + i, n, descr)
  790. def bh_vec_setarrayitem_gc(self, struct, offset, newvalues, descr, count):
  791. for i,n in enumerate(newvalues):
  792. self.bh_setarrayitem_gc(struct, offset + i, n, descr)
  793. def store_fail_descr(self, deadframe, descr):
  794. pass # I *think*
  795. def protect_speculative_field(self, p, fielddescr):
  796. if not p:
  797. raise SpeculativeError
  798. p = p._obj.container._as_ptr()
  799. try:
  800. lltype.cast_pointer(lltype.Ptr(fielddescr.S), p)
  801. except lltype.InvalidCast:
  802. raise SpeculativeError
  803. def protect_speculative_array(self, p, arraydescr):
  804. if not p:
  805. raise SpeculativeError
  806. p = p._obj.container
  807. if lltype.typeOf(p) != arraydescr.A:
  808. raise SpeculativeError
  809. def protect_speculative_string(self, p):
  810. if not p:
  811. raise SpeculativeError
  812. p = p._obj.container
  813. if lltype.typeOf(p) != rstr.STR:
  814. raise SpeculativeError
  815. def protect_speculative_unicode(self, p):
  816. if not p:
  817. raise SpeculativeError
  818. p = p._obj.container
  819. if lltype.typeOf(p) != rstr.UNICODE:
  820. raise SpeculativeError
  821. class LLDeadFrame(object):
  822. _TYPE = llmemory.GCREF
  823. def __init__(self, latest_descr, values,
  824. last_exception=None, saved_data=None,
  825. extra_value=None):
  826. self._latest_descr = latest_descr
  827. self._values = values
  828. self._last_exception = last_exception
  829. self._saved_data = saved_data
  830. self._extra_value = extra_value
  831. class LLFrame(object):
  832. _TYPE = llmemory.GCREF
  833. forced_deadframe = None
  834. overflow_flag = False
  835. last_exception = None
  836. force_guard_op = None
  837. def __init__(self, cpu, argboxes, args):
  838. self.env = {}
  839. self.cpu = cpu
  840. assert len(argboxes) == len(args)
  841. for box, arg in zip(argboxes, args):
  842. self.setenv(box, arg)
  843. def __eq__(self, other):
  844. # this is here to avoid crashes in 'token == TOKEN_TRACING_RESCALL'
  845. from rpython.jit.metainterp.virtualizable import TOKEN_NONE
  846. from rpython.jit.metainterp.virtualizable import TOKEN_TRACING_RESCALL
  847. if isinstance(other, LLFrame):
  848. return self is other
  849. if other == TOKEN_NONE or other == TOKEN_TRACING_RESCALL:
  850. return False
  851. assert 0
  852. def __ne__(self, other):
  853. return not (self == other)
  854. def _identityhash(self):
  855. return hash(self)
  856. def setenv(self, box, arg):
  857. if box.is_vector() and box.count > 1:
  858. if box.datatype == INT:
  859. for i,a in enumerate(arg):
  860. if isinstance(a, bool):
  861. arg[i] = int(a)
  862. assert all([lltype.typeOf(a) == lltype.Signed for a in arg])
  863. elif box.datatype == FLOAT:
  864. assert all([lltype.typeOf(a) == longlong.FLOATSTORAGE or \
  865. lltype.typeOf(a) == lltype.Signed for a in arg])
  866. else:
  867. raise AssertionError(box)
  868. elif box.type == INT:
  869. # typecheck the result
  870. if isinstance(arg, bool):
  871. arg = int(arg)
  872. assert lltype.typeOf(arg) == lltype.Signed
  873. elif box.type == REF:
  874. assert lltype.typeOf(arg) == llmemory.GCREF
  875. elif box.type == FLOAT:
  876. assert lltype.typeOf(arg) == longlong.FLOATSTORAGE
  877. else:
  878. raise AssertionError(box)
  879. #
  880. self.env[box] = arg
  881. def lookup(self, arg):
  882. if isinstance(arg, Const):
  883. return arg.value
  884. return self.env[arg]
  885. def execute(self, lltrace):
  886. self.lltrace = lltrace
  887. del lltrace
  888. i = 0
  889. while True:
  890. assert not self.lltrace.has_been_freed
  891. op = self.lltrace.operations[i]
  892. args = [self.lookup(arg) for arg in op.getarglist()]
  893. self.current_op = op # for label
  894. self.current_index = i
  895. execute = getattr(self, 'execute_' + op.getopname())
  896. try:
  897. resval = execute(_getdescr(op), *args)
  898. except Jump as j:
  899. self.lltrace, i = j.jump_target
  900. if i >= 0:
  901. label_op = self.lltrace.operations[i]
  902. i += 1
  903. targetargs = label_op.getarglist()
  904. else:
  905. targetargs = self.lltrace.inputargs
  906. i = 0
  907. self.do_renaming(targetargs, j.args)
  908. continue
  909. if op.type != 'v':
  910. self.setenv(op, resval)
  911. else:
  912. assert resval is None
  913. i += 1
  914. def do_renaming(self, newargs, newvalues):
  915. assert len(newargs) == len(newvalues)
  916. self.env = {}
  917. self.framecontent = {}
  918. for new, newvalue in zip(newargs, newvalues):
  919. self.setenv(new, newvalue)
  920. # -----------------------------------------------------
  921. def _accumulate(self, descr, failargs, values):
  922. info = descr.rd_vector_info
  923. while info:
  924. i = info.getpos_in_failargs()
  925. value = values[i]
  926. assert isinstance(value, list)
  927. if info.accum_operation == '+':
  928. value = sum(value)
  929. elif info.accum_operation == '*':
  930. def prod(acc, x): return acc * x
  931. value = reduce(prod, value, 1)
  932. else:
  933. raise NotImplementedError("accum operator in fail guard")
  934. values[i] = value
  935. info = info.next()
  936. def fail_guard(self, descr, saved_data=None, extra_value=None,
  937. propagate_exception=False):
  938. if not propagate_exception:
  939. assert self.last_exception is None
  940. values = []
  941. for box in self.current_op.getfailargs():
  942. if box is not None:
  943. value = self.env[box]
  944. else:
  945. value = None
  946. values.append(value)
  947. self._accumulate(descr, self.current_op.getfailargs(), values)
  948. if hasattr(descr, '_llgraph_bridge'):
  949. if propagate_exception:
  950. assert (descr._llgraph_bridge.operations[0].opnum in
  951. (rop.SAVE_EXC_CLASS, rop.GUARD_EXCEPTION,
  952. rop.GUARD_NO_EXCEPTION))
  953. target = (descr._llgraph_bridge, -1)
  954. values = [value for value in values if value is not None]
  955. raise Jump(target, values)
  956. else:
  957. raise ExecutionFinished(LLDeadFrame(descr, values,
  958. self.last_exception,
  959. saved_data, extra_value))
  960. def execute_force_spill(self, _, arg):
  961. pass
  962. def execute_finish(self, descr, *args):
  963. raise ExecutionFinished(LLDeadFrame(descr, args))
  964. def execute_label(self, descr, *args):
  965. argboxes = self.current_op.getarglist()
  966. self.do_renaming(argboxes, args)
  967. def _test_true(self, arg):
  968. assert arg in (0, 1)
  969. return arg
  970. def _test_false(self, arg):
  971. assert arg in (0, 1)
  972. return arg
  973. def execute_vec_guard_true(self, descr, arg):
  974. assert isinstance(arg, list)
  975. if not all(arg):
  976. self.fail_guard(descr)
  977. def execute_vec_guard_false(self, descr, arg):
  978. assert isinstance(arg, list)
  979. if any(arg):
  980. self.fail_guard(descr)
  981. def execute_guard_true(self, descr, arg):
  982. if not self._test_true(arg):
  983. self.fail_guard(descr)
  984. def execute_guard_false(self, descr, arg):
  985. if self._test_false(arg):
  986. self.fail_guard(descr)
  987. def execute_guard_value(self, descr, arg1, arg2):
  988. if arg1 != arg2:
  989. self.fail_guard(descr, extra_value=arg1)
  990. def execute_guard_nonnull(self, descr, arg):
  991. if not arg:
  992. self.fail_guard(descr)
  993. def execute_guard_isnull(self, descr, arg):
  994. if arg:
  995. self.fail_guard(descr)
  996. def execute_guard_class(self, descr, arg, klass):
  997. value = lltype.cast_opaque_ptr(rclass.OBJECTPTR, arg)
  998. expected_class = llmemory.cast_adr_to_ptr(
  999. llmemory.cast_int_to_adr(klass),
  1000. rclass.CLASSTYPE)
  1001. if value.typeptr != expected_class:
  1002. self.fail_guard(descr)
  1003. def execute_guard_nonnull_class(self, descr, arg, klass):
  1004. self.execute_guard_nonnull(descr, arg)
  1005. self.execute_guard_class(descr, arg, klass)
  1006. def execute_guard_gc_type(self, descr, arg, typeid):
  1007. assert isinstance(typeid, TypeIDSymbolic)
  1008. TYPE = arg._obj.container._TYPE
  1009. if TYPE != typeid.STRUCT_OR_ARRAY:
  1010. self.fail_guard(descr)
  1011. def execute_guard_is_object(self, descr, arg):
  1012. TYPE = arg._obj.container._TYPE
  1013. while TYPE is not rclass.OBJECT:
  1014. if not isinstance(TYPE, lltype.GcStruct): # or TYPE is None
  1015. self.fail_guard(descr)
  1016. return
  1017. _, TYPE = TYPE._first_struct()
  1018. def execute_guard_subclass(self, descr, arg, klass):
  1019. value = lltype.cast_opaque_ptr(rclass.OBJECTPTR, arg)
  1020. expected_class = llmemory.cast_adr_to_ptr(
  1021. llmemory.cast_int_to_adr(klass),
  1022. rclass.CLASSTYPE)
  1023. if (expected_class.subclassrange_min
  1024. <= value.typeptr.subclassrange_min
  1025. <= expected_class.subclassrange_max):
  1026. pass
  1027. else:
  1028. self.fail_guard(descr)
  1029. def execute_guard_no_exception(self, descr):
  1030. if self.last_exception is not None:
  1031. self.fail_guard(descr, propagate_exception=True)
  1032. def execute_guard_exception(self, descr, excklass):
  1033. lle = self.last_exception
  1034. if lle is None:
  1035. gotklass = lltype.nullptr(rclass.CLASSTYPE.TO)
  1036. else:
  1037. gotklass = lle.args[0]
  1038. excklass = llmemory.cast_adr_to_ptr(
  1039. llmemory.cast_int_to_adr(excklass),
  1040. rclass.CLASSTYPE)
  1041. if gotklass != excklass:
  1042. self.fail_guard(descr, propagate_exception=True)
  1043. #
  1044. res = lle.args[1]
  1045. self.last_exception = None
  1046. return support.cast_to_ptr(res)
  1047. def execute_guard_not_forced(self, descr):
  1048. if self.forced_deadframe is not None:
  1049. saved_data = self.forced_deadframe._saved_data
  1050. self.fail_guard(descr, saved_data, propagate_exception=True)
  1051. self.force_guard_op = self.current_op
  1052. execute_guard_not_forced_2 = execute_guard_not_forced
  1053. def execute_guard_not_invalidated(self, descr):
  1054. if self.lltrace.invalid:
  1055. self.fail_guard(descr)
  1056. def execute_int_add_ovf(self, _, x, y):
  1057. try:
  1058. z = ovfcheck(x + y)
  1059. except OverflowError:
  1060. ovf = True
  1061. z = 0
  1062. else:
  1063. ovf = False
  1064. self.overflow_flag = ovf
  1065. return z
  1066. def execute_int_sub_ovf(self, _, x, y):
  1067. try:
  1068. z = ovfcheck(x - y)
  1069. except OverflowError:
  1070. ovf = True
  1071. z = 0
  1072. else:
  1073. ovf = False
  1074. self.overflow_flag = ovf
  1075. return z
  1076. def execute_int_mul_ovf(self, _, x, y):
  1077. try:
  1078. z = ovfcheck(x * y)
  1079. except OverflowError:
  1080. ovf = True
  1081. z = 0
  1082. else:
  1083. ovf = False
  1084. self.overflow_flag = ovf
  1085. return z
  1086. def execute_guard_no_overflow(self, descr):
  1087. if self.overflow_flag:
  1088. self.fail_guard(descr)
  1089. def execute_guard_overflow(self, descr):
  1090. if not self.overflow_flag:
  1091. self.fail_guard(descr)
  1092. def execute_jump(self, descr, *args):
  1093. raise Jump(descr._llgraph_target, args)
  1094. def _do_math_sqrt(self, value):
  1095. import math
  1096. y = support.cast_from_floatstorage(lltype.Float, value)
  1097. x = math.sqrt(y)
  1098. return support.cast_to_floatstorage(x)
  1099. def execute_cond_call(self, calldescr, cond, func, *args):
  1100. if not cond:
  1101. return
  1102. # cond_call can't have a return value
  1103. self.execute_call_n(calldescr, func, *args)
  1104. def _execute_call(self, calldescr, func, *args):
  1105. effectinfo = calldescr.get_extra_info()
  1106. if effectinfo is not None and hasattr(effectinfo, 'oopspecindex'):
  1107. oopspecindex = effectinfo.oopspecindex
  1108. if oopspecindex == EffectInfo.OS_MATH_SQRT:
  1109. return self._do_math_sqrt(args[0])
  1110. TP = llmemory.cast_int_to_adr(func).ptr._obj._TYPE
  1111. call_args = support.cast_call_args_in_order(TP.ARGS, args)
  1112. try:
  1113. res = self.cpu.maybe_on_top_of_llinterp(func, call_args, TP.RESULT)
  1114. self.last_exception = None
  1115. except LLException as lle:
  1116. self.last_exception = lle
  1117. res = _example_res[getkind(TP.RESULT)[0]]
  1118. return res
  1119. execute_call_i = _execute_call
  1120. execute_call_r = _execute_call
  1121. execute_call_f = _execute_call
  1122. execute_call_n = _execute_call
  1123. def _execute_call_may_force(self, calldescr, func, *args):
  1124. guard_op = self.lltrace.operations[self.current_index + 1]
  1125. assert guard_op.getopnum() == rop.GUARD_NOT_FORCED
  1126. self.force_guard_op = guard_op
  1127. res = self._execute_call(calldescr, func, *args)
  1128. del self.force_guard_op
  1129. return res
  1130. execute_call_may_force_n = _execute_call_may_force
  1131. execute_call_may_force_r = _execute_call_may_force
  1132. execute_call_may_force_f = _execute_call_may_force
  1133. execute_call_may_force_i = _execute_call_may_force
  1134. def _execute_call_release_gil(self, descr, saveerr, func, *args):
  1135. if hasattr(descr, '_original_func_'):
  1136. func = descr._original_func_ # see pyjitpl.py
  1137. # we want to call the function that does the aroundstate
  1138. # manipulation here (as a hack, instead of really doing
  1139. # the aroundstate manipulation ourselves)
  1140. return self._execute_call_may_force(descr, func, *args)
  1141. guard_op = self.lltrace.operations[self.current_index + 1]
  1142. assert guard_op.getopnum() == rop.GUARD_NOT_FORCED
  1143. self.force_guard_op = guard_op
  1144. call_args = support.cast_call_args_in_order(descr.ARGS, args)
  1145. #
  1146. func_adr = llmemory.cast_int_to_adr(func)
  1147. if hasattr(func_adr.ptr._

Large files files are truncated, but you can click here to view the full file