PageRenderTime 50ms CodeModel.GetById 15ms RepoModel.GetById 1ms app.codeStats 0ms

/pypy/jit/backend/llsupport/llmodel.py

https://bitbucket.org/pypy/pypy/
Python | 654 lines | 506 code | 94 blank | 54 comment | 47 complexity | 8c147ac65b798f3643d9a60da89c7b5b MD5 | raw file
Possible License(s): AGPL-3.0, BSD-3-Clause, Apache-2.0
  1. from pypy.rpython.lltypesystem import lltype, llmemory, rffi, rclass, rstr
  2. from pypy.rpython.lltypesystem.lloperation import llop
  3. from pypy.rpython.llinterp import LLInterpreter
  4. from pypy.rpython.annlowlevel import llhelper
  5. from pypy.rlib.objectmodel import we_are_translated, specialize
  6. from pypy.jit.metainterp import history
  7. from pypy.jit.codewriter import heaptracker, longlong
  8. from pypy.jit.backend.model import AbstractCPU
  9. from pypy.jit.backend.llsupport import symbolic
  10. from pypy.jit.backend.llsupport.symbolic import WORD, unroll_basic_sizes
  11. from pypy.jit.backend.llsupport.descr import (
  12. get_size_descr, get_field_descr, get_array_descr,
  13. get_call_descr, get_interiorfield_descr, get_dynamic_interiorfield_descr,
  14. FieldDescr, ArrayDescr, CallDescr, InteriorFieldDescr)
  15. from pypy.jit.backend.llsupport.asmmemmgr import AsmMemoryManager
  16. class AbstractLLCPU(AbstractCPU):
  17. from pypy.jit.metainterp.typesystem import llhelper as ts
  18. def __init__(self, rtyper, stats, opts, translate_support_code=False,
  19. gcdescr=None):
  20. assert type(opts) is not bool
  21. self.opts = opts
  22. from pypy.jit.backend.llsupport.gc import get_ll_description
  23. AbstractCPU.__init__(self)
  24. self.rtyper = rtyper
  25. self.stats = stats
  26. self.translate_support_code = translate_support_code
  27. if translate_support_code:
  28. translator = rtyper.annotator.translator
  29. else:
  30. translator = None
  31. self.gc_ll_descr = get_ll_description(gcdescr, translator, rtyper)
  32. if translator and translator.config.translation.gcremovetypeptr:
  33. self.vtable_offset = None
  34. else:
  35. self.vtable_offset, _ = symbolic.get_field_token(rclass.OBJECT,
  36. 'typeptr',
  37. translate_support_code)
  38. self._setup_prebuilt_error('ovf', OverflowError)
  39. self._setup_prebuilt_error('zer', ZeroDivisionError)
  40. if translate_support_code:
  41. self._setup_exception_handling_translated()
  42. else:
  43. self._setup_exception_handling_untranslated()
  44. self.saved_exc_value = lltype.nullptr(llmemory.GCREF.TO)
  45. self.asmmemmgr = AsmMemoryManager()
  46. self.setup()
  47. if translate_support_code:
  48. self._setup_on_leave_jitted_translated()
  49. else:
  50. self._setup_on_leave_jitted_untranslated()
  51. def setup(self):
  52. pass
  53. def _setup_prebuilt_error(self, prefix, Class):
  54. if self.rtyper is not None: # normal case
  55. bk = self.rtyper.annotator.bookkeeper
  56. clsdef = bk.getuniqueclassdef(Class)
  57. ll_inst = self.rtyper.exceptiondata.get_standard_ll_exc_instance(
  58. self.rtyper, clsdef)
  59. else:
  60. # for tests, a random emulated ll_inst will do
  61. ll_inst = lltype.malloc(rclass.OBJECT)
  62. ll_inst.typeptr = lltype.malloc(rclass.OBJECT_VTABLE,
  63. immortal=True)
  64. setattr(self, '_%s_error_vtable' % prefix,
  65. llmemory.cast_ptr_to_adr(ll_inst.typeptr))
  66. setattr(self, '_%s_error_inst' % prefix, ll_inst)
  67. def _setup_exception_handling_untranslated(self):
  68. # for running un-translated only, all exceptions occurring in the
  69. # llinterpreter are stored in '_exception_emulator', which is then
  70. # read back by the machine code reading at the address given by
  71. # pos_exception() and pos_exc_value().
  72. _exception_emulator = lltype.malloc(rffi.CArray(lltype.Signed), 2,
  73. zero=True, flavor='raw',
  74. immortal=True)
  75. self._exception_emulator = _exception_emulator
  76. def _store_exception(lle):
  77. self._last_exception = lle # keepalive
  78. tp_i = rffi.cast(lltype.Signed, lle.args[0])
  79. v_i = rffi.cast(lltype.Signed, lle.args[1])
  80. _exception_emulator[0] = tp_i
  81. _exception_emulator[1] = v_i
  82. self.debug_ll_interpreter = LLInterpreter(self.rtyper)
  83. self.debug_ll_interpreter._store_exception = _store_exception
  84. def pos_exception():
  85. return rffi.cast(lltype.Signed, _exception_emulator)
  86. def pos_exc_value():
  87. return (rffi.cast(lltype.Signed, _exception_emulator) +
  88. rffi.sizeof(lltype.Signed))
  89. def save_exception():
  90. # copy from _exception_emulator to the real attributes on self
  91. v_i = _exception_emulator[1]
  92. _exception_emulator[0] = 0
  93. _exception_emulator[1] = 0
  94. self.saved_exc_value = rffi.cast(llmemory.GCREF, v_i)
  95. def save_exception_memoryerr():
  96. save_exception()
  97. if not self.saved_exc_value:
  98. self.saved_exc_value = "memoryerror!" # for tests
  99. self.pos_exception = pos_exception
  100. self.pos_exc_value = pos_exc_value
  101. self.save_exception = save_exception
  102. self.save_exception_memoryerr = save_exception_memoryerr
  103. self.insert_stack_check = lambda: (0, 0, 0)
  104. def _setup_exception_handling_translated(self):
  105. def pos_exception():
  106. addr = llop.get_exception_addr(llmemory.Address)
  107. return heaptracker.adr2int(addr)
  108. def pos_exc_value():
  109. addr = llop.get_exc_value_addr(llmemory.Address)
  110. return heaptracker.adr2int(addr)
  111. def save_exception():
  112. addr = llop.get_exception_addr(llmemory.Address)
  113. addr.address[0] = llmemory.NULL
  114. addr = llop.get_exc_value_addr(llmemory.Address)
  115. exc_value = rffi.cast(llmemory.GCREF, addr.address[0])
  116. addr.address[0] = llmemory.NULL
  117. # from now on, the state is again consistent -- no more RPython
  118. # exception is set. The following code produces a write barrier
  119. # in the assignment to self.saved_exc_value, as needed.
  120. self.saved_exc_value = exc_value
  121. def save_exception_memoryerr():
  122. from pypy.rpython.annlowlevel import cast_instance_to_base_ptr
  123. save_exception()
  124. if not self.saved_exc_value:
  125. exc = MemoryError()
  126. exc = cast_instance_to_base_ptr(exc)
  127. exc = lltype.cast_opaque_ptr(llmemory.GCREF, exc)
  128. self.saved_exc_value = exc
  129. from pypy.rlib import rstack
  130. STACK_CHECK_SLOWPATH = lltype.Ptr(lltype.FuncType([lltype.Signed],
  131. lltype.Void))
  132. def insert_stack_check():
  133. endaddr = rstack._stack_get_end_adr()
  134. lengthaddr = rstack._stack_get_length_adr()
  135. f = llhelper(STACK_CHECK_SLOWPATH, rstack.stack_check_slowpath)
  136. slowpathaddr = rffi.cast(lltype.Signed, f)
  137. return endaddr, lengthaddr, slowpathaddr
  138. self.pos_exception = pos_exception
  139. self.pos_exc_value = pos_exc_value
  140. self.save_exception = save_exception
  141. self.save_exception_memoryerr = save_exception_memoryerr
  142. self.insert_stack_check = insert_stack_check
  143. def _setup_on_leave_jitted_untranslated(self):
  144. # assume we don't need a backend leave in this case
  145. self.on_leave_jitted_save_exc = self.save_exception
  146. self.on_leave_jitted_memoryerr = self.save_exception_memoryerr
  147. self.on_leave_jitted_noexc = lambda : None
  148. def _setup_on_leave_jitted_translated(self):
  149. on_leave_jitted_hook = self.get_on_leave_jitted_hook()
  150. save_exception = self.save_exception
  151. save_exception_memoryerr = self.save_exception_memoryerr
  152. def on_leave_jitted_noexc():
  153. on_leave_jitted_hook()
  154. def on_leave_jitted_save_exc():
  155. save_exception()
  156. on_leave_jitted_hook()
  157. def on_leave_jitted_memoryerr():
  158. save_exception_memoryerr()
  159. on_leave_jitted_hook()
  160. self.on_leave_jitted_noexc = on_leave_jitted_noexc
  161. self.on_leave_jitted_save_exc = on_leave_jitted_save_exc
  162. self.on_leave_jitted_memoryerr = on_leave_jitted_memoryerr
  163. def get_on_leave_jitted_hook(self):
  164. return lambda : None
  165. _ON_JIT_LEAVE_FUNC = lltype.Ptr(lltype.FuncType([], lltype.Void))
  166. def get_on_leave_jitted_int(self, save_exception,
  167. default_to_memoryerror=False):
  168. if default_to_memoryerror:
  169. f = llhelper(self._ON_JIT_LEAVE_FUNC, self.on_leave_jitted_memoryerr)
  170. elif save_exception:
  171. f = llhelper(self._ON_JIT_LEAVE_FUNC, self.on_leave_jitted_save_exc)
  172. else:
  173. f = llhelper(self._ON_JIT_LEAVE_FUNC, self.on_leave_jitted_noexc)
  174. return rffi.cast(lltype.Signed, f)
  175. def grab_exc_value(self):
  176. exc = self.saved_exc_value
  177. self.saved_exc_value = lltype.nullptr(llmemory.GCREF.TO)
  178. return exc
  179. def free_loop_and_bridges(self, compiled_loop_token):
  180. AbstractCPU.free_loop_and_bridges(self, compiled_loop_token)
  181. blocks = compiled_loop_token.asmmemmgr_blocks
  182. if blocks is not None:
  183. compiled_loop_token.asmmemmgr_blocks = None
  184. for rawstart, rawstop in blocks:
  185. self.gc_ll_descr.freeing_block(rawstart, rawstop)
  186. self.asmmemmgr.free(rawstart, rawstop)
  187. # ------------------- helpers and descriptions --------------------
  188. @staticmethod
  189. def _cast_int_to_gcref(x):
  190. # dangerous! only use if you are sure no collection could occur
  191. # between reading the integer and casting it to a pointer
  192. return rffi.cast(llmemory.GCREF, x)
  193. @staticmethod
  194. def cast_gcref_to_int(x):
  195. return rffi.cast(lltype.Signed, x)
  196. @staticmethod
  197. def cast_int_to_adr(x):
  198. return rffi.cast(llmemory.Address, x)
  199. @staticmethod
  200. def cast_adr_to_int(x):
  201. return rffi.cast(lltype.Signed, x)
  202. def sizeof(self, S):
  203. return get_size_descr(self.gc_ll_descr, S)
  204. def fielddescrof(self, STRUCT, fieldname):
  205. return get_field_descr(self.gc_ll_descr, STRUCT, fieldname)
  206. def unpack_fielddescr(self, fielddescr):
  207. assert isinstance(fielddescr, FieldDescr)
  208. return fielddescr.offset
  209. unpack_fielddescr._always_inline_ = True
  210. def unpack_fielddescr_size(self, fielddescr):
  211. assert isinstance(fielddescr, FieldDescr)
  212. ofs = fielddescr.offset
  213. size = fielddescr.field_size
  214. sign = fielddescr.is_field_signed()
  215. return ofs, size, sign
  216. unpack_fielddescr_size._always_inline_ = True
  217. def arraydescrof(self, A):
  218. return get_array_descr(self.gc_ll_descr, A)
  219. def interiorfielddescrof(self, A, fieldname):
  220. return get_interiorfield_descr(self.gc_ll_descr, A, fieldname)
  221. def interiorfielddescrof_dynamic(self, offset, width, fieldsize,
  222. is_pointer, is_float, is_signed):
  223. return get_dynamic_interiorfield_descr(self.gc_ll_descr,
  224. offset, width, fieldsize,
  225. is_pointer, is_float, is_signed)
  226. def unpack_arraydescr(self, arraydescr):
  227. assert isinstance(arraydescr, ArrayDescr)
  228. return arraydescr.basesize
  229. unpack_arraydescr._always_inline_ = True
  230. def unpack_arraydescr_size(self, arraydescr):
  231. assert isinstance(arraydescr, ArrayDescr)
  232. ofs = arraydescr.basesize
  233. size = arraydescr.itemsize
  234. sign = arraydescr.is_item_signed()
  235. return ofs, size, sign
  236. unpack_arraydescr_size._always_inline_ = True
  237. def calldescrof(self, FUNC, ARGS, RESULT, extrainfo):
  238. return get_call_descr(self.gc_ll_descr, ARGS, RESULT, extrainfo)
  239. def calldescrof_dynamic(self, ffi_args, ffi_result, extrainfo, ffi_flags):
  240. from pypy.jit.backend.llsupport import ffisupport
  241. return ffisupport.get_call_descr_dynamic(self, ffi_args, ffi_result,
  242. extrainfo, ffi_flags)
  243. def get_overflow_error(self):
  244. ovf_vtable = self.cast_adr_to_int(self._ovf_error_vtable)
  245. ovf_inst = lltype.cast_opaque_ptr(llmemory.GCREF,
  246. self._ovf_error_inst)
  247. return ovf_vtable, ovf_inst
  248. def get_zero_division_error(self):
  249. zer_vtable = self.cast_adr_to_int(self._zer_error_vtable)
  250. zer_inst = lltype.cast_opaque_ptr(llmemory.GCREF,
  251. self._zer_error_inst)
  252. return zer_vtable, zer_inst
  253. # ____________________________________________________________
  254. def bh_arraylen_gc(self, arraydescr, array):
  255. assert isinstance(arraydescr, ArrayDescr)
  256. ofs = arraydescr.lendescr.offset
  257. return rffi.cast(rffi.CArrayPtr(lltype.Signed), array)[ofs/WORD]
  258. @specialize.argtype(2)
  259. def bh_getarrayitem_gc_i(self, arraydescr, gcref, itemindex):
  260. ofs, size, sign = self.unpack_arraydescr_size(arraydescr)
  261. # --- start of GC unsafe code (no GC operation!) ---
  262. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
  263. for STYPE, UTYPE, itemsize in unroll_basic_sizes:
  264. if size == itemsize:
  265. if sign:
  266. items = rffi.cast(rffi.CArrayPtr(STYPE), items)
  267. val = items[itemindex]
  268. val = rffi.cast(lltype.Signed, val)
  269. else:
  270. items = rffi.cast(rffi.CArrayPtr(UTYPE), items)
  271. val = items[itemindex]
  272. val = rffi.cast(lltype.Signed, val)
  273. # --- end of GC unsafe code ---
  274. return val
  275. else:
  276. raise NotImplementedError("size = %d" % size)
  277. def bh_getarrayitem_gc_r(self, arraydescr, gcref, itemindex):
  278. ofs = self.unpack_arraydescr(arraydescr)
  279. # --- start of GC unsafe code (no GC operation!) ---
  280. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
  281. items = rffi.cast(rffi.CArrayPtr(lltype.Signed), items)
  282. pval = self._cast_int_to_gcref(items[itemindex])
  283. # --- end of GC unsafe code ---
  284. return pval
  285. @specialize.argtype(2)
  286. def bh_getarrayitem_gc_f(self, arraydescr, gcref, itemindex):
  287. ofs = self.unpack_arraydescr(arraydescr)
  288. # --- start of GC unsafe code (no GC operation!) ---
  289. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
  290. items = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), items)
  291. fval = items[itemindex]
  292. # --- end of GC unsafe code ---
  293. return fval
  294. @specialize.argtype(2)
  295. def bh_setarrayitem_gc_i(self, arraydescr, gcref, itemindex, newvalue):
  296. ofs, size, sign = self.unpack_arraydescr_size(arraydescr)
  297. # --- start of GC unsafe code (no GC operation!) ---
  298. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
  299. for TYPE, _, itemsize in unroll_basic_sizes:
  300. if size == itemsize:
  301. items = rffi.cast(rffi.CArrayPtr(TYPE), items)
  302. items[itemindex] = rffi.cast(TYPE, newvalue)
  303. # --- end of GC unsafe code ---
  304. return
  305. else:
  306. raise NotImplementedError("size = %d" % size)
  307. def bh_setarrayitem_gc_r(self, arraydescr, gcref, itemindex, newvalue):
  308. ofs = self.unpack_arraydescr(arraydescr)
  309. self.gc_ll_descr.do_write_barrier(gcref, newvalue)
  310. # --- start of GC unsafe code (no GC operation!) ---
  311. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
  312. items = rffi.cast(rffi.CArrayPtr(lltype.Signed), items)
  313. items[itemindex] = self.cast_gcref_to_int(newvalue)
  314. # --- end of GC unsafe code ---
  315. @specialize.argtype(2)
  316. def bh_setarrayitem_gc_f(self, arraydescr, gcref, itemindex, newvalue):
  317. ofs = self.unpack_arraydescr(arraydescr)
  318. # --- start of GC unsafe code (no GC operation!) ---
  319. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
  320. items = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), items)
  321. items[itemindex] = newvalue
  322. # --- end of GC unsafe code ---
  323. bh_setarrayitem_raw_i = bh_setarrayitem_gc_i
  324. bh_setarrayitem_raw_f = bh_setarrayitem_gc_f
  325. bh_getarrayitem_raw_i = bh_getarrayitem_gc_i
  326. bh_getarrayitem_raw_f = bh_getarrayitem_gc_f
  327. def bh_getinteriorfield_gc_i(self, gcref, itemindex, descr):
  328. assert isinstance(descr, InteriorFieldDescr)
  329. arraydescr = descr.arraydescr
  330. ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
  331. ofs += descr.fielddescr.offset
  332. fieldsize = descr.fielddescr.field_size
  333. sign = descr.fielddescr.is_field_signed()
  334. fullofs = itemindex * size + ofs
  335. # --- start of GC unsafe code (no GC operation!) ---
  336. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), fullofs)
  337. for STYPE, UTYPE, itemsize in unroll_basic_sizes:
  338. if fieldsize == itemsize:
  339. if sign:
  340. item = rffi.cast(rffi.CArrayPtr(STYPE), items)
  341. val = item[0]
  342. val = rffi.cast(lltype.Signed, val)
  343. else:
  344. item = rffi.cast(rffi.CArrayPtr(UTYPE), items)
  345. val = item[0]
  346. val = rffi.cast(lltype.Signed, val)
  347. # --- end of GC unsafe code ---
  348. return val
  349. else:
  350. raise NotImplementedError("size = %d" % fieldsize)
  351. def bh_getinteriorfield_gc_r(self, gcref, itemindex, descr):
  352. assert isinstance(descr, InteriorFieldDescr)
  353. arraydescr = descr.arraydescr
  354. ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
  355. ofs += descr.fielddescr.offset
  356. # --- start of GC unsafe code (no GC operation!) ---
  357. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs +
  358. size * itemindex)
  359. items = rffi.cast(rffi.CArrayPtr(lltype.Signed), items)
  360. pval = self._cast_int_to_gcref(items[0])
  361. # --- end of GC unsafe code ---
  362. return pval
  363. def bh_getinteriorfield_gc_f(self, gcref, itemindex, descr):
  364. assert isinstance(descr, InteriorFieldDescr)
  365. arraydescr = descr.arraydescr
  366. ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
  367. ofs += descr.fielddescr.offset
  368. # --- start of GC unsafe code (no GC operation!) ---
  369. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs +
  370. size * itemindex)
  371. items = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), items)
  372. fval = items[0]
  373. # --- end of GC unsafe code ---
  374. return fval
  375. def bh_setinteriorfield_gc_i(self, gcref, itemindex, descr, value):
  376. assert isinstance(descr, InteriorFieldDescr)
  377. arraydescr = descr.arraydescr
  378. ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
  379. ofs += descr.fielddescr.offset
  380. fieldsize = descr.fielddescr.field_size
  381. ofs = itemindex * size + ofs
  382. # --- start of GC unsafe code (no GC operation!) ---
  383. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
  384. for TYPE, _, itemsize in unroll_basic_sizes:
  385. if fieldsize == itemsize:
  386. items = rffi.cast(rffi.CArrayPtr(TYPE), items)
  387. items[0] = rffi.cast(TYPE, value)
  388. # --- end of GC unsafe code ---
  389. return
  390. else:
  391. raise NotImplementedError("size = %d" % fieldsize)
  392. def bh_setinteriorfield_gc_r(self, gcref, itemindex, descr, newvalue):
  393. assert isinstance(descr, InteriorFieldDescr)
  394. arraydescr = descr.arraydescr
  395. ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
  396. ofs += descr.fielddescr.offset
  397. self.gc_ll_descr.do_write_barrier(gcref, newvalue)
  398. # --- start of GC unsafe code (no GC operation!) ---
  399. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref),
  400. ofs + size * itemindex)
  401. items = rffi.cast(rffi.CArrayPtr(lltype.Signed), items)
  402. items[0] = self.cast_gcref_to_int(newvalue)
  403. # --- end of GC unsafe code ---
  404. def bh_setinteriorfield_gc_f(self, gcref, itemindex, descr, newvalue):
  405. assert isinstance(descr, InteriorFieldDescr)
  406. arraydescr = descr.arraydescr
  407. ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
  408. ofs += descr.fielddescr.offset
  409. # --- start of GC unsafe code (no GC operation!) ---
  410. items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref),
  411. ofs + size * itemindex)
  412. items = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), items)
  413. items[0] = newvalue
  414. # --- end of GC unsafe code ---
  415. def bh_strlen(self, string):
  416. s = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), string)
  417. return len(s.chars)
  418. def bh_unicodelen(self, string):
  419. u = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), string)
  420. return len(u.chars)
  421. def bh_strgetitem(self, string, index):
  422. s = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), string)
  423. return ord(s.chars[index])
  424. def bh_unicodegetitem(self, string, index):
  425. u = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), string)
  426. return ord(u.chars[index])
  427. @specialize.argtype(1)
  428. def _base_do_getfield_i(self, struct, fielddescr):
  429. ofs, size, sign = self.unpack_fielddescr_size(fielddescr)
  430. # --- start of GC unsafe code (no GC operation!) ---
  431. fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
  432. for STYPE, UTYPE, itemsize in unroll_basic_sizes:
  433. if size == itemsize:
  434. # Note that in the common case where size==sizeof(Signed),
  435. # both cases of what follows are doing the same thing.
  436. # But gcc is clever enough to figure this out :-)
  437. if sign:
  438. val = rffi.cast(rffi.CArrayPtr(STYPE), fieldptr)[0]
  439. val = rffi.cast(lltype.Signed, val)
  440. else:
  441. val = rffi.cast(rffi.CArrayPtr(UTYPE), fieldptr)[0]
  442. val = rffi.cast(lltype.Signed, val)
  443. # --- end of GC unsafe code ---
  444. return val
  445. else:
  446. raise NotImplementedError("size = %d" % size)
  447. @specialize.argtype(1)
  448. def _base_do_getfield_r(self, struct, fielddescr):
  449. ofs = self.unpack_fielddescr(fielddescr)
  450. # --- start of GC unsafe code (no GC operation!) ---
  451. fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
  452. pval = rffi.cast(rffi.CArrayPtr(lltype.Signed), fieldptr)[0]
  453. pval = self._cast_int_to_gcref(pval)
  454. # --- end of GC unsafe code ---
  455. return pval
  456. @specialize.argtype(1)
  457. def _base_do_getfield_f(self, struct, fielddescr):
  458. ofs = self.unpack_fielddescr(fielddescr)
  459. # --- start of GC unsafe code (no GC operation!) ---
  460. fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
  461. fval = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), fieldptr)[0]
  462. # --- end of GC unsafe code ---
  463. return fval
  464. bh_getfield_gc_i = _base_do_getfield_i
  465. bh_getfield_gc_r = _base_do_getfield_r
  466. bh_getfield_gc_f = _base_do_getfield_f
  467. bh_getfield_raw_i = _base_do_getfield_i
  468. bh_getfield_raw_r = _base_do_getfield_r
  469. bh_getfield_raw_f = _base_do_getfield_f
  470. @specialize.argtype(1)
  471. def _base_do_setfield_i(self, struct, fielddescr, newvalue):
  472. ofs, size, sign = self.unpack_fielddescr_size(fielddescr)
  473. # --- start of GC unsafe code (no GC operation!) ---
  474. fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
  475. for TYPE, _, itemsize in unroll_basic_sizes:
  476. if size == itemsize:
  477. fieldptr = rffi.cast(rffi.CArrayPtr(TYPE), fieldptr)
  478. fieldptr[0] = rffi.cast(TYPE, newvalue)
  479. # --- end of GC unsafe code ---
  480. return
  481. else:
  482. raise NotImplementedError("size = %d" % size)
  483. @specialize.argtype(1)
  484. def _base_do_setfield_r(self, struct, fielddescr, newvalue):
  485. ofs = self.unpack_fielddescr(fielddescr)
  486. assert lltype.typeOf(struct) is not lltype.Signed, (
  487. "can't handle write barriers for setfield_raw")
  488. self.gc_ll_descr.do_write_barrier(struct, newvalue)
  489. # --- start of GC unsafe code (no GC operation!) ---
  490. fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
  491. fieldptr = rffi.cast(rffi.CArrayPtr(lltype.Signed), fieldptr)
  492. fieldptr[0] = self.cast_gcref_to_int(newvalue)
  493. # --- end of GC unsafe code ---
  494. @specialize.argtype(1)
  495. def _base_do_setfield_f(self, struct, fielddescr, newvalue):
  496. ofs = self.unpack_fielddescr(fielddescr)
  497. # --- start of GC unsafe code (no GC operation!) ---
  498. fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
  499. fieldptr = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), fieldptr)
  500. fieldptr[0] = newvalue
  501. # --- end of GC unsafe code ---
  502. bh_setfield_gc_i = _base_do_setfield_i
  503. bh_setfield_gc_r = _base_do_setfield_r
  504. bh_setfield_gc_f = _base_do_setfield_f
  505. bh_setfield_raw_i = _base_do_setfield_i
  506. bh_setfield_raw_r = _base_do_setfield_r
  507. bh_setfield_raw_f = _base_do_setfield_f
  508. def bh_new(self, sizedescr):
  509. return self.gc_ll_descr.gc_malloc(sizedescr)
  510. def bh_new_with_vtable(self, sizedescr, vtable):
  511. res = self.gc_ll_descr.gc_malloc(sizedescr)
  512. if self.vtable_offset is not None:
  513. as_array = rffi.cast(rffi.CArrayPtr(lltype.Signed), res)
  514. as_array[self.vtable_offset/WORD] = vtable
  515. return res
  516. def bh_classof(self, struct):
  517. struct = lltype.cast_opaque_ptr(rclass.OBJECTPTR, struct)
  518. result_adr = llmemory.cast_ptr_to_adr(struct.typeptr)
  519. return heaptracker.adr2int(result_adr)
  520. def bh_new_array(self, arraydescr, length):
  521. return self.gc_ll_descr.gc_malloc_array(arraydescr, length)
  522. def bh_newstr(self, length):
  523. return self.gc_ll_descr.gc_malloc_str(length)
  524. def bh_newunicode(self, length):
  525. return self.gc_ll_descr.gc_malloc_unicode(length)
  526. def bh_strsetitem(self, string, index, newvalue):
  527. s = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), string)
  528. s.chars[index] = chr(newvalue)
  529. def bh_unicodesetitem(self, string, index, newvalue):
  530. u = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), string)
  531. u.chars[index] = unichr(newvalue)
  532. def bh_copystrcontent(self, src, dst, srcstart, dststart, length):
  533. src = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), src)
  534. dst = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), dst)
  535. rstr.copy_string_contents(src, dst, srcstart, dststart, length)
  536. def bh_copyunicodecontent(self, src, dst, srcstart, dststart, length):
  537. src = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), src)
  538. dst = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), dst)
  539. rstr.copy_unicode_contents(src, dst, srcstart, dststart, length)
  540. def bh_call_i(self, func, calldescr, args_i, args_r, args_f):
  541. assert isinstance(calldescr, CallDescr)
  542. if not we_are_translated():
  543. calldescr.verify_types(args_i, args_r, args_f, history.INT + 'S')
  544. return calldescr.call_stub_i(func, args_i, args_r, args_f)
  545. def bh_call_r(self, func, calldescr, args_i, args_r, args_f):
  546. assert isinstance(calldescr, CallDescr)
  547. if not we_are_translated():
  548. calldescr.verify_types(args_i, args_r, args_f, history.REF)
  549. return calldescr.call_stub_r(func, args_i, args_r, args_f)
  550. def bh_call_f(self, func, calldescr, args_i, args_r, args_f):
  551. assert isinstance(calldescr, CallDescr)
  552. if not we_are_translated():
  553. calldescr.verify_types(args_i, args_r, args_f, history.FLOAT + 'L')
  554. return calldescr.call_stub_f(func, args_i, args_r, args_f)
  555. def bh_call_v(self, func, calldescr, args_i, args_r, args_f):
  556. assert isinstance(calldescr, CallDescr)
  557. if not we_are_translated():
  558. calldescr.verify_types(args_i, args_r, args_f, history.VOID)
  559. # the 'i' return value is ignored (and nonsense anyway)
  560. calldescr.call_stub_i(func, args_i, args_r, args_f)