PageRenderTime 58ms CodeModel.GetById 17ms RepoModel.GetById 1ms app.codeStats 0ms

/rpython/memory/gctransform/framework.py

https://bitbucket.org/pypy/pypy/
Python | 1666 lines | 1476 code | 110 blank | 80 comment | 151 complexity | 110177748c218fc97c2c2646d5b644de MD5 | raw file
Possible License(s): AGPL-3.0, BSD-3-Clause, Apache-2.0

Large files files are truncated, but you can click here to view the full file

  1. from rpython.annotator import model as annmodel
  2. from rpython.rtyper.llannotation import SomeAddress, SomePtr
  3. from rpython.rlib import rgc
  4. from rpython.rlib.objectmodel import specialize
  5. from rpython.rlib.unroll import unrolling_iterable
  6. from rpython.rtyper import rmodel, annlowlevel
  7. from rpython.rtyper.lltypesystem import lltype, llmemory, rffi, llgroup
  8. from rpython.rtyper.lltypesystem.lloperation import llop
  9. from rpython.memory import gctypelayout
  10. from rpython.memory.gctransform.log import log
  11. from rpython.memory.gctransform.support import get_rtti, ll_call_destructor
  12. from rpython.memory.gctransform.support import ll_report_finalizer_error
  13. from rpython.memory.gctransform.transform import GCTransformer
  14. from rpython.memory.gctypelayout import ll_weakref_deref, WEAKREF, WEAKREFPTR
  15. from rpython.memory.gctypelayout import FIN_TRIGGER_FUNC, FIN_HANDLER_ARRAY
  16. from rpython.tool.sourcetools import func_with_new_name
  17. from rpython.translator.backendopt.collectanalyze import CollectAnalyzer
  18. from rpython.translator.backendopt.finalizer import FinalizerAnalyzer
  19. from rpython.translator.backendopt.support import var_needsgc
  20. import types
  21. TYPE_ID = llgroup.HALFWORD
  22. def propagate_no_write_barrier_needed(result, block, mallocvars,
  23. collect_analyzer, entrymap,
  24. startindex=0):
  25. # We definitely know that no write barrier is needed in the 'block'
  26. # for any of the variables in 'mallocvars'. Propagate this information
  27. # forward. Note that "definitely know" implies that we just did either
  28. # a fixed-size malloc (variable-size might require card marking), or
  29. # that we just did a full write barrier (not just for card marking).
  30. if 1: # keep indentation
  31. for i, op in enumerate(block.operations):
  32. if i < startindex:
  33. continue
  34. if op.opname in ("cast_pointer", "same_as"):
  35. if op.args[0] in mallocvars:
  36. mallocvars[op.result] = True
  37. elif op.opname in ("setfield", "setarrayitem", "setinteriorfield"):
  38. TYPE = op.args[-1].concretetype
  39. if (op.args[0] in mallocvars and
  40. isinstance(TYPE, lltype.Ptr) and
  41. TYPE.TO._gckind == "gc"):
  42. result.add(op)
  43. else:
  44. if collect_analyzer.analyze(op):
  45. return
  46. for exit in block.exits:
  47. if len(entrymap[exit.target]) != 1:
  48. continue
  49. newmallocvars = {}
  50. for i, var in enumerate(exit.args):
  51. if var in mallocvars:
  52. newmallocvars[exit.target.inputargs[i]] = True
  53. if newmallocvars:
  54. propagate_no_write_barrier_needed(result, exit.target,
  55. newmallocvars,
  56. collect_analyzer, entrymap)
  57. def find_initializing_stores(collect_analyzer, graph, entrymap):
  58. # a bit of a hackish analysis: if a block contains a malloc and check that
  59. # the result is not zero, then the block following the True link will
  60. # usually initialize the newly allocated object
  61. result = set()
  62. mallocnum = 0
  63. blockset = set(graph.iterblocks())
  64. while blockset:
  65. block = blockset.pop()
  66. if len(block.operations) < 2:
  67. continue
  68. mallocop = block.operations[-2]
  69. checkop = block.operations[-1]
  70. if not (mallocop.opname == "malloc" and
  71. checkop.opname == "ptr_nonzero" and
  72. mallocop.result is checkop.args[0] and
  73. block.exitswitch is checkop.result):
  74. continue
  75. rtti = get_rtti(mallocop.args[0].value)
  76. if rtti is not None and hasattr(rtti._obj, 'destructor_funcptr'):
  77. continue
  78. exits = [exit for exit in block.exits if exit.llexitcase]
  79. if len(exits) != 1:
  80. continue
  81. exit = exits[0]
  82. if len(entrymap[exit.target]) != 1:
  83. continue
  84. try:
  85. index = exit.args.index(mallocop.result)
  86. except ValueError:
  87. continue
  88. target = exit.target
  89. mallocvars = {target.inputargs[index]: True}
  90. mallocnum += 1
  91. propagate_no_write_barrier_needed(result, target, mallocvars,
  92. collect_analyzer, entrymap)
  93. #if result:
  94. # print "found %s initializing stores in %s" % (len(result), graph.name)
  95. return result
  96. def find_clean_setarrayitems(collect_analyzer, graph):
  97. result = set()
  98. for block in graph.iterblocks():
  99. cache = set()
  100. for op in block.operations:
  101. if op.opname == 'getarrayitem':
  102. cache.add((op.args[0], op.result))
  103. elif op.opname == 'setarrayitem':
  104. if (op.args[0], op.args[2]) in cache:
  105. result.add(op)
  106. elif collect_analyzer.analyze(op):
  107. cache = set()
  108. return result
  109. class BaseFrameworkGCTransformer(GCTransformer):
  110. root_stack_depth = None # for tests to override
  111. def __init__(self, translator):
  112. from rpython.memory.gc.base import choose_gc_from_config
  113. super(BaseFrameworkGCTransformer, self).__init__(translator,
  114. inline=True)
  115. if hasattr(self, 'GC_PARAMS'):
  116. # for tests: the GC choice can be specified as class attributes
  117. GCClass = self.GCClass
  118. GC_PARAMS = self.GC_PARAMS
  119. else:
  120. # for regular translation: pick the GC from the config
  121. GCClass, GC_PARAMS = choose_gc_from_config(translator.config)
  122. self.GCClass = GCClass
  123. if hasattr(translator, '_jit2gc'):
  124. self.layoutbuilder = translator._jit2gc['layoutbuilder']
  125. finished_minor_collection = translator._jit2gc.get(
  126. 'invoke_after_minor_collection', None)
  127. else:
  128. self.layoutbuilder = TransformerLayoutBuilder(translator, GCClass)
  129. finished_minor_collection = None
  130. self.layoutbuilder.transformer = self
  131. self.get_type_id = self.layoutbuilder.get_type_id
  132. # set up GCData with the llgroup from the layoutbuilder, which
  133. # will grow as more TYPE_INFO members are added to it
  134. gcdata = gctypelayout.GCData(self.layoutbuilder.type_info_group)
  135. # initialize the following two fields with a random non-NULL address,
  136. # to make the annotator happy. The fields are patched in finish()
  137. # to point to a real array.
  138. foo = lltype.malloc(lltype.FixedSizeArray(llmemory.Address, 1),
  139. immortal=True, zero=True)
  140. a_random_address = llmemory.cast_ptr_to_adr(foo)
  141. gcdata.static_root_start = a_random_address # patched in finish()
  142. gcdata.static_root_nongcend = a_random_address # patched in finish()
  143. gcdata.static_root_end = a_random_address # patched in finish()
  144. gcdata.max_type_id = 13 # patched in finish()
  145. gcdata.typeids_z = a_random_address # patched in finish()
  146. gcdata.typeids_list = a_random_address # patched in finish()
  147. gcdata.finalizer_handlers = a_random_address # patched in finish()
  148. self.gcdata = gcdata
  149. self.malloc_fnptr_cache = {}
  150. self.finalizer_queue_indexes = {}
  151. self.finalizer_handlers = []
  152. gcdata.gc = GCClass(translator.config.translation, **GC_PARAMS)
  153. root_walker = self.build_root_walker()
  154. root_walker.finished_minor_collection_func = finished_minor_collection
  155. self.root_walker = root_walker
  156. gcdata.set_query_functions(gcdata.gc)
  157. gcdata.gc.set_root_walker(root_walker)
  158. self.num_pushs = 0
  159. self.write_barrier_calls = 0
  160. self.write_barrier_from_array_calls = 0
  161. def frameworkgc_setup():
  162. # run-time initialization code
  163. root_walker.setup_root_walker()
  164. gcdata.gc.setup()
  165. gcdata.gc.post_setup()
  166. def frameworkgc__teardown():
  167. # run-time teardown code for tests!
  168. gcdata.gc._teardown()
  169. bk = self.translator.annotator.bookkeeper
  170. r_typeid16 = rffi.platform.numbertype_to_rclass[TYPE_ID]
  171. s_typeid16 = annmodel.SomeInteger(knowntype=r_typeid16)
  172. # the point of this little dance is to not annotate
  173. # self.gcdata.static_root_xyz as constants. XXX is it still needed??
  174. data_classdef = bk.getuniqueclassdef(gctypelayout.GCData)
  175. data_classdef.generalize_attr('static_root_start', SomeAddress())
  176. data_classdef.generalize_attr('static_root_nongcend', SomeAddress())
  177. data_classdef.generalize_attr('static_root_end', SomeAddress())
  178. data_classdef.generalize_attr('max_type_id', annmodel.SomeInteger())
  179. data_classdef.generalize_attr('typeids_z', SomeAddress())
  180. data_classdef.generalize_attr('typeids_list', SomeAddress())
  181. data_classdef.generalize_attr('finalizer_handlers', SomeAddress())
  182. annhelper = annlowlevel.MixLevelHelperAnnotator(self.translator.rtyper)
  183. def getfn(ll_function, args_s, s_result, inline=False,
  184. minimal_transform=True):
  185. graph = annhelper.getgraph(ll_function, args_s, s_result)
  186. if minimal_transform:
  187. self.need_minimal_transform(graph)
  188. if inline:
  189. self.graphs_to_inline[graph] = True
  190. return annhelper.graph2const(graph)
  191. self.frameworkgc_setup_ptr = getfn(frameworkgc_setup, [],
  192. annmodel.s_None)
  193. # for tests
  194. self.frameworkgc__teardown_ptr = getfn(frameworkgc__teardown, [],
  195. annmodel.s_None)
  196. self.annotate_walker_functions(getfn)
  197. if translator.config.translation.rweakref:
  198. self.weakref_deref_ptr = self.inittime_helper(
  199. ll_weakref_deref, [llmemory.WeakRefPtr], llmemory.Address)
  200. classdef = bk.getuniqueclassdef(GCClass)
  201. s_gc = annmodel.SomeInstance(classdef)
  202. self._declare_functions(GCClass, getfn, s_gc, s_typeid16)
  203. # thread support
  204. if translator.config.translation.continuation:
  205. root_walker.stacklet_support = True
  206. if translator.config.translation.thread:
  207. root_walker.need_thread_support(self, getfn)
  208. if root_walker.stacklet_support:
  209. root_walker.need_stacklet_support(self, getfn)
  210. self.layoutbuilder.encode_type_shapes_now()
  211. self.create_custom_trace_funcs(gcdata.gc, translator.rtyper)
  212. annhelper.finish() # at this point, annotate all mix-level helpers
  213. annhelper.backend_optimize()
  214. self.check_custom_trace_funcs(gcdata.gc, translator.rtyper)
  215. self.collect_analyzer = CollectAnalyzer(self.translator)
  216. self.collect_analyzer.analyze_all()
  217. s_gc = self.translator.annotator.bookkeeper.valueoftype(GCClass)
  218. r_gc = self.translator.rtyper.getrepr(s_gc)
  219. self.c_const_gc = rmodel.inputconst(r_gc, self.gcdata.gc)
  220. s_gc_data = self.translator.annotator.bookkeeper.valueoftype(
  221. gctypelayout.GCData)
  222. r_gc_data = self.translator.rtyper.getrepr(s_gc_data)
  223. self.c_const_gcdata = rmodel.inputconst(r_gc_data, self.gcdata)
  224. self.malloc_zero_filled = GCClass.malloc_zero_filled
  225. HDR = self.HDR = self.gcdata.gc.gcheaderbuilder.HDR
  226. size_gc_header = self.gcdata.gc.gcheaderbuilder.size_gc_header
  227. vtableinfo = (HDR, size_gc_header, self.gcdata.gc.typeid_is_in_field)
  228. self.c_vtableinfo = rmodel.inputconst(lltype.Void, vtableinfo)
  229. tig = self.layoutbuilder.type_info_group._as_ptr()
  230. self.c_type_info_group = rmodel.inputconst(lltype.typeOf(tig), tig)
  231. sko = llmemory.sizeof(gcdata.TYPE_INFO)
  232. self.c_vtinfo_skip_offset = rmodel.inputconst(lltype.typeOf(sko), sko)
  233. def _declare_functions(self, GCClass, getfn, s_gc, s_typeid16):
  234. from rpython.memory.gc.base import ARRAY_TYPEID_MAP
  235. from rpython.memory.gc import inspector
  236. s_gcref = SomePtr(llmemory.GCREF)
  237. gcdata = self.gcdata
  238. #use the GC flag to find which malloc method to use
  239. #malloc_zero_filled == Ture -> malloc_fixedsize/varsize_clear
  240. #malloc_zero_filled == Flase -> malloc_fixedsize/varsize
  241. malloc_fixedsize_meth = None
  242. if GCClass.malloc_zero_filled:
  243. malloc_fixedsize_clear_meth = GCClass.malloc_fixedsize_clear.im_func
  244. self.malloc_fixedsize_ptr = getfn(
  245. malloc_fixedsize_clear_meth,
  246. [s_gc, s_typeid16,
  247. annmodel.SomeInteger(nonneg=True),
  248. annmodel.SomeBool(),
  249. annmodel.SomeBool(),
  250. annmodel.SomeBool()], s_gcref,
  251. inline = False)
  252. self.malloc_varsize_ptr = getfn(
  253. GCClass.malloc_varsize_clear.im_func,
  254. [s_gc, s_typeid16]
  255. + [annmodel.SomeInteger(nonneg=True) for i in range(4)], s_gcref)
  256. else:
  257. malloc_fixedsize_meth = GCClass.malloc_fixedsize.im_func
  258. self.malloc_fixedsize_ptr = getfn(
  259. malloc_fixedsize_meth,
  260. [s_gc, s_typeid16,
  261. annmodel.SomeInteger(nonneg=True),
  262. annmodel.SomeBool(),
  263. annmodel.SomeBool(),
  264. annmodel.SomeBool()], s_gcref,
  265. inline = False)
  266. self.malloc_varsize_ptr = getfn(
  267. GCClass.malloc_varsize.im_func,
  268. [s_gc, s_typeid16]
  269. + [annmodel.SomeInteger(nonneg=True) for i in range(4)], s_gcref)
  270. self.collect_ptr = getfn(GCClass.collect.im_func,
  271. [s_gc, annmodel.SomeInteger()], annmodel.s_None)
  272. self.can_move_ptr = getfn(GCClass.can_move.im_func,
  273. [s_gc, SomeAddress()],
  274. annmodel.SomeBool())
  275. if hasattr(GCClass, 'shrink_array'):
  276. self.shrink_array_ptr = getfn(
  277. GCClass.shrink_array.im_func,
  278. [s_gc, SomeAddress(),
  279. annmodel.SomeInteger(nonneg=True)], annmodel.s_Bool)
  280. else:
  281. self.shrink_array_ptr = None
  282. if hasattr(GCClass, 'heap_stats'):
  283. self.heap_stats_ptr = getfn(GCClass.heap_stats.im_func,
  284. [s_gc], SomePtr(lltype.Ptr(ARRAY_TYPEID_MAP)),
  285. minimal_transform=False)
  286. self.get_member_index_ptr = getfn(
  287. GCClass.get_member_index.im_func,
  288. [s_gc, annmodel.SomeInteger(knowntype=llgroup.r_halfword)],
  289. annmodel.SomeInteger())
  290. self.gc_gettypeid_ptr = getfn(GCClass.get_type_id_cast,
  291. [s_gc, SomeAddress()],
  292. annmodel.SomeInteger())
  293. if hasattr(GCClass, 'writebarrier_before_copy'):
  294. self.wb_before_copy_ptr = \
  295. getfn(GCClass.writebarrier_before_copy.im_func,
  296. [s_gc] + [SomeAddress()] * 2 +
  297. [annmodel.SomeInteger()] * 3, annmodel.SomeBool())
  298. elif GCClass.needs_write_barrier:
  299. raise NotImplementedError("GC needs write barrier, but does not provide writebarrier_before_copy functionality")
  300. # in some GCs we can inline the common case of
  301. # malloc_fixedsize(typeid, size, False, False, False)
  302. if getattr(GCClass, 'inline_simple_malloc', False):
  303. # make a copy of this function so that it gets annotated
  304. # independently and the constants are folded inside
  305. if malloc_fixedsize_meth is None:
  306. malloc_fast_meth = malloc_fixedsize_clear_meth
  307. self.malloc_fast_is_clearing = True
  308. else:
  309. malloc_fast_meth = malloc_fixedsize_meth
  310. self.malloc_fast_is_clearing = False
  311. malloc_fast = func_with_new_name(
  312. malloc_fast_meth,
  313. "malloc_fast")
  314. s_False = annmodel.SomeBool()
  315. s_False.const = False
  316. self.malloc_fast_ptr = getfn(
  317. malloc_fast,
  318. [s_gc, s_typeid16,
  319. annmodel.SomeInteger(nonneg=True),
  320. s_False, s_False, s_False], s_gcref,
  321. inline = True)
  322. else:
  323. self.malloc_fast_ptr = None
  324. # in some GCs we can also inline the common case of
  325. # malloc_varsize(typeid, length, (3 constant sizes), True, False)
  326. self.malloc_varsize_fast_ptr = None
  327. if getattr(GCClass, 'inline_simple_malloc_varsize', False):
  328. # make a copy of this function so that it gets annotated
  329. # independently and the constants are folded inside
  330. if hasattr(GCClass, 'malloc_varsize'):
  331. malloc_varsize_fast = func_with_new_name(
  332. GCClass.malloc_varsize.im_func,
  333. "malloc_varsize_fast")
  334. elif hasattr(GCClass, 'malloc_varsize_clear'):
  335. malloc_varsize_fast = func_with_new_name(
  336. GCClass.malloc_varsize_clear.im_func,
  337. "malloc_varsize_clear_fast")
  338. s_False = annmodel.SomeBool()
  339. s_False.const = False
  340. self.malloc_varsize_fast_ptr = getfn(
  341. malloc_varsize_fast,
  342. [s_gc, s_typeid16,
  343. annmodel.SomeInteger(nonneg=True),
  344. annmodel.SomeInteger(nonneg=True),
  345. annmodel.SomeInteger(nonneg=True),
  346. annmodel.SomeInteger(nonneg=True)], s_gcref,
  347. inline = True)
  348. if getattr(GCClass, 'raw_malloc_memory_pressure', False):
  349. def raw_malloc_memory_pressure_varsize(length, itemsize):
  350. totalmem = length * itemsize
  351. if totalmem > 0:
  352. gcdata.gc.raw_malloc_memory_pressure(totalmem)
  353. #else: probably an overflow -- the following rawmalloc
  354. # will fail then
  355. def raw_malloc_memory_pressure(sizehint):
  356. gcdata.gc.raw_malloc_memory_pressure(sizehint)
  357. self.raw_malloc_memory_pressure_varsize_ptr = getfn(
  358. raw_malloc_memory_pressure_varsize,
  359. [annmodel.SomeInteger(), annmodel.SomeInteger()],
  360. annmodel.s_None, minimal_transform = False)
  361. self.raw_malloc_memory_pressure_ptr = getfn(
  362. raw_malloc_memory_pressure,
  363. [annmodel.SomeInteger()],
  364. annmodel.s_None, minimal_transform = False)
  365. self.identityhash_ptr = getfn(GCClass.identityhash.im_func,
  366. [s_gc, s_gcref],
  367. annmodel.SomeInteger(),
  368. minimal_transform=False, inline=True)
  369. if getattr(GCClass, 'obtain_free_space', False):
  370. self.obtainfreespace_ptr = getfn(GCClass.obtain_free_space.im_func,
  371. [s_gc, annmodel.SomeInteger()],
  372. SomeAddress())
  373. if GCClass.moving_gc:
  374. self.id_ptr = getfn(GCClass.id.im_func,
  375. [s_gc, s_gcref], annmodel.SomeInteger(),
  376. inline = True,
  377. minimal_transform = False)
  378. else:
  379. self.id_ptr = None
  380. self.get_rpy_roots_ptr = getfn(inspector.get_rpy_roots,
  381. [s_gc],
  382. rgc.s_list_of_gcrefs(),
  383. minimal_transform=False)
  384. self.get_rpy_referents_ptr = getfn(inspector.get_rpy_referents,
  385. [s_gc, s_gcref],
  386. rgc.s_list_of_gcrefs(),
  387. minimal_transform=False)
  388. self.get_rpy_memory_usage_ptr = getfn(inspector.get_rpy_memory_usage,
  389. [s_gc, s_gcref],
  390. annmodel.SomeInteger(),
  391. minimal_transform=False)
  392. self.get_rpy_type_index_ptr = getfn(inspector.get_rpy_type_index,
  393. [s_gc, s_gcref],
  394. annmodel.SomeInteger(),
  395. minimal_transform=False)
  396. self.is_rpy_instance_ptr = getfn(inspector.is_rpy_instance,
  397. [s_gc, s_gcref],
  398. annmodel.SomeBool(),
  399. minimal_transform=False)
  400. self.dump_rpy_heap_ptr = getfn(inspector.dump_rpy_heap,
  401. [s_gc, annmodel.SomeInteger()],
  402. annmodel.s_Bool,
  403. minimal_transform=False)
  404. self.get_typeids_z_ptr = getfn(inspector.get_typeids_z,
  405. [s_gc],
  406. SomePtr(lltype.Ptr(rgc.ARRAY_OF_CHAR)),
  407. minimal_transform=False)
  408. self.get_typeids_list_ptr = getfn(inspector.get_typeids_list,
  409. [s_gc],
  410. SomePtr(lltype.Ptr(
  411. lltype.Array(llgroup.HALFWORD))),
  412. minimal_transform=False)
  413. self.set_max_heap_size_ptr = getfn(GCClass.set_max_heap_size.im_func,
  414. [s_gc,
  415. annmodel.SomeInteger(nonneg=True)],
  416. annmodel.s_None)
  417. if hasattr(GCClass, 'rawrefcount_init'):
  418. self.rawrefcount_init_ptr = getfn(
  419. GCClass.rawrefcount_init,
  420. [s_gc, SomePtr(GCClass.RAWREFCOUNT_DEALLOC_TRIGGER)],
  421. annmodel.s_None)
  422. self.rawrefcount_create_link_pypy_ptr = getfn(
  423. GCClass.rawrefcount_create_link_pypy,
  424. [s_gc, s_gcref, SomeAddress()],
  425. annmodel.s_None)
  426. self.rawrefcount_create_link_pyobj_ptr = getfn(
  427. GCClass.rawrefcount_create_link_pyobj,
  428. [s_gc, s_gcref, SomeAddress()],
  429. annmodel.s_None)
  430. self.rawrefcount_from_obj_ptr = getfn(
  431. GCClass.rawrefcount_from_obj, [s_gc, s_gcref], SomeAddress(),
  432. inline = True)
  433. self.rawrefcount_to_obj_ptr = getfn(
  434. GCClass.rawrefcount_to_obj, [s_gc, SomeAddress()], s_gcref,
  435. inline = True)
  436. self.rawrefcount_next_dead_ptr = getfn(
  437. GCClass.rawrefcount_next_dead, [s_gc], SomeAddress(),
  438. inline = True)
  439. if GCClass.can_usually_pin_objects:
  440. self.pin_ptr = getfn(GCClass.pin,
  441. [s_gc, SomeAddress()],
  442. annmodel.SomeBool())
  443. self.unpin_ptr = getfn(GCClass.unpin,
  444. [s_gc, SomeAddress()],
  445. annmodel.s_None)
  446. self._is_pinned_ptr = getfn(GCClass._is_pinned,
  447. [s_gc, SomeAddress()],
  448. annmodel.SomeBool())
  449. self.write_barrier_ptr = None
  450. self.write_barrier_from_array_ptr = None
  451. if GCClass.needs_write_barrier:
  452. self.write_barrier_ptr = getfn(GCClass.write_barrier.im_func,
  453. [s_gc, SomeAddress()],
  454. annmodel.s_None,
  455. inline=True)
  456. func = getattr(gcdata.gc, 'remember_young_pointer', None)
  457. if func is not None:
  458. # func should not be a bound method, but a real function
  459. assert isinstance(func, types.FunctionType)
  460. self.write_barrier_failing_case_ptr = getfn(func,
  461. [SomeAddress()],
  462. annmodel.s_None)
  463. func = getattr(GCClass, 'write_barrier_from_array', None)
  464. if func is not None:
  465. self.write_barrier_from_array_ptr = getfn(func.im_func,
  466. [s_gc, SomeAddress(),
  467. annmodel.SomeInteger()],
  468. annmodel.s_None,
  469. inline=True)
  470. func = getattr(gcdata.gc,
  471. 'jit_remember_young_pointer_from_array',
  472. None)
  473. if func is not None:
  474. # func should not be a bound method, but a real function
  475. assert isinstance(func, types.FunctionType)
  476. self.write_barrier_from_array_failing_case_ptr = \
  477. getfn(func,
  478. [SomeAddress()],
  479. annmodel.s_None)
  480. self.malloc_nonmovable_ptr = getfn(
  481. GCClass.malloc_fixed_or_varsize_nonmovable,
  482. [s_gc, s_typeid16, annmodel.SomeInteger()],
  483. s_gcref)
  484. self.register_finalizer_ptr = getfn(GCClass.register_finalizer,
  485. [s_gc,
  486. annmodel.SomeInteger(),
  487. s_gcref],
  488. annmodel.s_None)
  489. def create_custom_trace_funcs(self, gc, rtyper):
  490. custom_trace_funcs = tuple(rtyper.custom_trace_funcs)
  491. rtyper.custom_trace_funcs = custom_trace_funcs
  492. # too late to register new custom trace functions afterwards
  493. custom_trace_funcs_unrolled = unrolling_iterable(
  494. [(self.get_type_id(TP), func) for TP, func in custom_trace_funcs])
  495. @specialize.arg(2)
  496. def custom_trace_dispatcher(obj, typeid, callback, arg):
  497. for type_id_exp, func in custom_trace_funcs_unrolled:
  498. if (llop.combine_ushort(lltype.Signed, typeid, 0) ==
  499. llop.combine_ushort(lltype.Signed, type_id_exp, 0)):
  500. func(gc, obj, callback, arg)
  501. return
  502. else:
  503. assert False
  504. gc.custom_trace_dispatcher = custom_trace_dispatcher
  505. for TP, func in custom_trace_funcs:
  506. self.gcdata._has_got_custom_trace(self.get_type_id(TP))
  507. specialize.arg(2)(func)
  508. def check_custom_trace_funcs(self, gc, rtyper):
  509. # detect if one of the custom trace functions uses the GC
  510. # (it must not!)
  511. for TP, func in rtyper.custom_trace_funcs:
  512. def no_op_callback(obj, arg):
  513. pass
  514. def ll_check_no_collect(obj):
  515. func(gc, obj, no_op_callback, None)
  516. annhelper = annlowlevel.MixLevelHelperAnnotator(rtyper)
  517. graph1 = annhelper.getgraph(ll_check_no_collect, [SomeAddress()],
  518. annmodel.s_None)
  519. annhelper.finish()
  520. collect_analyzer = CollectAnalyzer(self.translator)
  521. if collect_analyzer.analyze_direct_call(graph1):
  522. raise Exception(
  523. "the custom trace hook %r for %r can cause "
  524. "the GC to be called!" % (func, TP))
  525. def consider_constant(self, TYPE, value):
  526. self.layoutbuilder.consider_constant(TYPE, value, self.gcdata.gc)
  527. #def get_type_id(self, TYPE):
  528. # this method is attached to the instance and redirects to
  529. # layoutbuilder.get_type_id().
  530. def special_funcptr_for_type(self, TYPE):
  531. return self.layoutbuilder.special_funcptr_for_type(TYPE)
  532. def gc_header_for(self, obj, needs_hash=False):
  533. hdr = self.gcdata.gc.gcheaderbuilder.header_of_object(obj)
  534. withhash, flag = self.gcdata.gc.withhash_flag_is_in_field
  535. x = getattr(hdr, withhash)
  536. TYPE = lltype.typeOf(x)
  537. x = lltype.cast_primitive(lltype.Signed, x)
  538. if needs_hash:
  539. x |= flag # set the flag in the header
  540. else:
  541. x &= ~flag # clear the flag in the header
  542. x = lltype.cast_primitive(TYPE, x)
  543. setattr(hdr, withhash, x)
  544. return hdr
  545. def get_hash_offset(self, T):
  546. type_id = self.get_type_id(T)
  547. assert not self.gcdata.q_is_varsize(type_id)
  548. return self.gcdata.q_fixed_size(type_id)
  549. def finish_tables(self):
  550. group = self.layoutbuilder.close_table()
  551. log.info("assigned %s typeids" % (len(group.members), ))
  552. log.info("added %s push/pop stack root instructions" % (
  553. self.num_pushs, ))
  554. if self.write_barrier_ptr:
  555. log.info("inserted %s write barrier calls" % (
  556. self.write_barrier_calls, ))
  557. if self.write_barrier_from_array_ptr:
  558. log.info("inserted %s write_barrier_from_array calls" % (
  559. self.write_barrier_from_array_calls, ))
  560. # XXX because we call inputconst already in replace_malloc, we can't
  561. # modify the instance, we have to modify the 'rtyped instance'
  562. # instead. horrors. is there a better way?
  563. s_gcdata = self.translator.annotator.bookkeeper.immutablevalue(
  564. self.gcdata)
  565. r_gcdata = self.translator.rtyper.getrepr(s_gcdata)
  566. ll_instance = rmodel.inputconst(r_gcdata, self.gcdata).value
  567. addresses_of_static_ptrs = (
  568. self.layoutbuilder.addresses_of_static_ptrs_in_nongc +
  569. self.layoutbuilder.addresses_of_static_ptrs)
  570. log.info("found %s static roots" % (len(addresses_of_static_ptrs), ))
  571. ll_static_roots_inside = lltype.malloc(lltype.Array(llmemory.Address),
  572. len(addresses_of_static_ptrs),
  573. immortal=True)
  574. for i in range(len(addresses_of_static_ptrs)):
  575. ll_static_roots_inside[i] = addresses_of_static_ptrs[i]
  576. ll_instance.inst_static_root_start = llmemory.cast_ptr_to_adr(ll_static_roots_inside) + llmemory.ArrayItemsOffset(lltype.Array(llmemory.Address))
  577. ll_instance.inst_static_root_nongcend = ll_instance.inst_static_root_start + llmemory.sizeof(llmemory.Address) * len(self.layoutbuilder.addresses_of_static_ptrs_in_nongc)
  578. ll_instance.inst_static_root_end = ll_instance.inst_static_root_start + llmemory.sizeof(llmemory.Address) * len(addresses_of_static_ptrs)
  579. newgcdependencies = []
  580. newgcdependencies.append(ll_static_roots_inside)
  581. ll_instance.inst_max_type_id = len(group.members)
  582. #
  583. typeids_z, typeids_list = self.write_typeid_list()
  584. ll_typeids_z = lltype.malloc(rgc.ARRAY_OF_CHAR,
  585. len(typeids_z),
  586. immortal=True)
  587. for i in range(len(typeids_z)):
  588. ll_typeids_z[i] = typeids_z[i]
  589. ll_instance.inst_typeids_z = llmemory.cast_ptr_to_adr(ll_typeids_z)
  590. newgcdependencies.append(ll_typeids_z)
  591. #
  592. ll_typeids_list = lltype.malloc(lltype.Array(llgroup.HALFWORD),
  593. len(typeids_list),
  594. immortal=True)
  595. for i in range(len(typeids_list)):
  596. ll_typeids_list[i] = typeids_list[i]
  597. ll_instance.inst_typeids_list= llmemory.cast_ptr_to_adr(ll_typeids_list)
  598. newgcdependencies.append(ll_typeids_list)
  599. #
  600. handlers = self.finalizer_handlers
  601. ll_handlers = lltype.malloc(FIN_HANDLER_ARRAY, len(handlers),
  602. immortal=True)
  603. for i in range(len(handlers)):
  604. ll_handlers[i].deque = handlers[i][0]
  605. ll_handlers[i].trigger = handlers[i][1]
  606. ll_instance.inst_finalizer_handlers = llmemory.cast_ptr_to_adr(
  607. ll_handlers)
  608. newgcdependencies.append(ll_handlers)
  609. #
  610. return newgcdependencies
  611. def get_finish_tables(self):
  612. # We must first make sure that the type_info_group's members
  613. # are all followed. Do it repeatedly while new members show up.
  614. # Once it is really done, do finish_tables().
  615. seen = 0
  616. while seen < len(self.layoutbuilder.type_info_group.members):
  617. curtotal = len(self.layoutbuilder.type_info_group.members)
  618. yield self.layoutbuilder.type_info_group.members[seen:curtotal]
  619. seen = curtotal
  620. yield self.finish_tables()
  621. def write_typeid_list(self):
  622. """write out the list of type ids together with some info"""
  623. from rpython.tool.udir import udir
  624. # XXX not ideal since it is not per compilation, but per run
  625. # XXX argh argh, this only gives the member index but not the
  626. # real typeid, which is a complete mess to obtain now...
  627. all_ids = self.layoutbuilder.id_of_type.items()
  628. list_data = []
  629. ZERO = rffi.cast(llgroup.HALFWORD, 0)
  630. for _, typeinfo in all_ids:
  631. while len(list_data) <= typeinfo.index:
  632. list_data.append(ZERO)
  633. list_data[typeinfo.index] = typeinfo
  634. #
  635. all_ids = [(typeinfo.index, TYPE) for (TYPE, typeinfo) in all_ids]
  636. all_ids = dict(all_ids)
  637. f = udir.join("typeids.txt").open("w")
  638. for index in range(len(self.layoutbuilder.type_info_group.members)):
  639. f.write("member%-4d %s\n" % (index, all_ids.get(index, '?')))
  640. f.close()
  641. try:
  642. import zlib
  643. z_data = zlib.compress(udir.join("typeids.txt").read(), 9)
  644. except ImportError:
  645. z_data = ''
  646. return z_data, list_data
  647. def transform_graph(self, graph):
  648. func = getattr(graph, 'func', None)
  649. if func and getattr(func, '_gc_no_collect_', False):
  650. if self.collect_analyzer.analyze_direct_call(graph):
  651. print '!'*79
  652. ca = CollectAnalyzer(self.translator)
  653. ca.verbose = True
  654. ca.analyze_direct_call(graph)
  655. # ^^^ for the dump of which operation in which graph actually
  656. # causes it to return True
  657. raise Exception("'no_collect' function can trigger collection:"
  658. " %s" % func)
  659. if self.write_barrier_ptr:
  660. from rpython.flowspace.model import mkentrymap
  661. self._entrymap = mkentrymap(graph)
  662. self.clean_sets = (
  663. find_initializing_stores(self.collect_analyzer, graph,
  664. self._entrymap))
  665. if self.gcdata.gc.can_optimize_clean_setarrayitems():
  666. self.clean_sets = self.clean_sets.union(
  667. find_clean_setarrayitems(self.collect_analyzer, graph))
  668. super(BaseFrameworkGCTransformer, self).transform_graph(graph)
  669. if self.write_barrier_ptr:
  670. self.clean_sets = None
  671. def gct_direct_call(self, hop):
  672. if self.collect_analyzer.analyze(hop.spaceop):
  673. livevars = self.push_roots(hop)
  674. self.default(hop)
  675. self.pop_roots(hop, livevars)
  676. else:
  677. if hop.spaceop.opname == "direct_call":
  678. self.mark_call_cannotcollect(hop, hop.spaceop.args[0])
  679. self.default(hop)
  680. def mark_call_cannotcollect(self, hop, name):
  681. pass
  682. gct_indirect_call = gct_direct_call
  683. def gct_fv_gc_malloc(self, hop, flags, TYPE, *args):
  684. op = hop.spaceop
  685. PTRTYPE = op.result.concretetype
  686. assert PTRTYPE.TO == TYPE
  687. type_id = self.get_type_id(TYPE)
  688. c_type_id = rmodel.inputconst(TYPE_ID, type_id)
  689. info = self.layoutbuilder.get_info(type_id)
  690. c_size = rmodel.inputconst(lltype.Signed, info.fixedsize)
  691. fptrs = self.special_funcptr_for_type(TYPE)
  692. has_finalizer = "destructor" in fptrs or "old_style_finalizer" in fptrs
  693. has_light_finalizer = "destructor" in fptrs
  694. c_has_finalizer = rmodel.inputconst(lltype.Bool, has_finalizer)
  695. c_has_light_finalizer = rmodel.inputconst(lltype.Bool,
  696. has_light_finalizer)
  697. is_varsize = op.opname.endswith('_varsize') or flags.get('varsize')
  698. if flags.get('nonmovable'):
  699. if not is_varsize:
  700. v_length = rmodel.inputconst(lltype.Signed, 0)
  701. else:
  702. v_length = op.args[-1]
  703. malloc_ptr = self.malloc_nonmovable_ptr
  704. args = [self.c_const_gc, c_type_id, v_length]
  705. elif not is_varsize:
  706. zero = flags.get('zero', False)
  707. if (self.malloc_fast_ptr is not None and
  708. not c_has_finalizer.value and
  709. (self.malloc_fast_is_clearing or not zero)):
  710. malloc_ptr = self.malloc_fast_ptr
  711. else:
  712. malloc_ptr = self.malloc_fixedsize_ptr
  713. args = [self.c_const_gc, c_type_id, c_size,
  714. c_has_finalizer, c_has_light_finalizer,
  715. rmodel.inputconst(lltype.Bool, False)]
  716. else:
  717. assert not c_has_finalizer.value
  718. info_varsize = self.layoutbuilder.get_info_varsize(type_id)
  719. v_length = op.args[-1]
  720. c_ofstolength = rmodel.inputconst(lltype.Signed,
  721. info_varsize.ofstolength)
  722. c_varitemsize = rmodel.inputconst(lltype.Signed,
  723. info_varsize.varitemsize)
  724. if self.malloc_varsize_fast_ptr is not None:
  725. malloc_ptr = self.malloc_varsize_fast_ptr
  726. else:
  727. malloc_ptr = self.malloc_varsize_ptr
  728. args = [self.c_const_gc, c_type_id, v_length, c_size,
  729. c_varitemsize, c_ofstolength]
  730. livevars = self.push_roots(hop)
  731. v_result = hop.genop("direct_call", [malloc_ptr] + args,
  732. resulttype=llmemory.GCREF)
  733. self.pop_roots(hop, livevars)
  734. return v_result
  735. gct_fv_gc_malloc_varsize = gct_fv_gc_malloc
  736. def gct_gc__collect(self, hop):
  737. op = hop.spaceop
  738. if len(op.args) == 1:
  739. v_gen = op.args[0]
  740. else:
  741. # pick a number larger than expected different gc gens :-)
  742. v_gen = rmodel.inputconst(lltype.Signed, 9)
  743. livevars = self.push_roots(hop)
  744. hop.genop("direct_call", [self.collect_ptr, self.c_const_gc, v_gen],
  745. resultvar=op.result)
  746. self.pop_roots(hop, livevars)
  747. def gct_gc_can_move(self, hop):
  748. op = hop.spaceop
  749. v_addr = hop.genop('cast_ptr_to_adr',
  750. [op.args[0]], resulttype=llmemory.Address)
  751. hop.genop("direct_call", [self.can_move_ptr, self.c_const_gc, v_addr],
  752. resultvar=op.result)
  753. def gct_shrink_array(self, hop):
  754. if self.shrink_array_ptr is None:
  755. return GCTransformer.gct_shrink_array(self, hop)
  756. op = hop.spaceop
  757. v_addr = hop.genop('cast_ptr_to_adr',
  758. [op.args[0]], resulttype=llmemory.Address)
  759. v_length = op.args[1]
  760. hop.genop("direct_call", [self.shrink_array_ptr, self.c_const_gc,
  761. v_addr, v_length],
  762. resultvar=op.result)
  763. def gct_gc_gettypeid(self, hop):
  764. op = hop.spaceop
  765. v_addr = op.args[0]
  766. if v_addr.concretetype != llmemory.Address:
  767. v_addr = hop.genop("cast_ptr_to_adr", [v_addr],
  768. resulttype=llmemory.Address)
  769. hop.genop("direct_call", [self.gc_gettypeid_ptr, self.c_const_gc,
  770. v_addr],
  771. resultvar=op.result)
  772. def gct_gc_writebarrier(self, hop):
  773. if self.write_barrier_ptr is None:
  774. return
  775. op = hop.spaceop
  776. v_addr = op.args[0]
  777. if v_addr.concretetype != llmemory.Address:
  778. v_addr = hop.genop('cast_ptr_to_adr',
  779. [v_addr], resulttype=llmemory.Address)
  780. hop.genop("direct_call", [self.write_barrier_ptr,
  781. self.c_const_gc, v_addr])
  782. def gct_gc_heap_stats(self, hop):
  783. if not hasattr(self, 'heap_stats_ptr'):
  784. return GCTransformer.gct_gc_heap_stats(self, hop)
  785. op = hop.spaceop
  786. livevars = self.push_roots(hop)
  787. hop.genop("direct_call", [self.heap_stats_ptr, self.c_const_gc],
  788. resultvar=op.result)
  789. self.pop_roots(hop, livevars)
  790. def gct_get_member_index(self, hop):
  791. op = hop.spaceop
  792. v_typeid = op.args[0]
  793. hop.genop("direct_call", [self.get_member_index_ptr, self.c_const_gc,
  794. v_typeid], resultvar=op.result)
  795. def _gc_adr_of_gc_attr(self, hop, attrname):
  796. if getattr(self.gcdata.gc, attrname, None) is None:
  797. raise NotImplementedError("gc_adr_of_%s only for generational gcs"
  798. % (attrname,))
  799. op = hop.spaceop
  800. ofs = llmemory.offsetof(self.c_const_gc.concretetype.TO,
  801. 'inst_' + attrname)
  802. c_ofs = rmodel.inputconst(lltype.Signed, ofs)
  803. v_gc_adr = hop.genop('cast_ptr_to_adr', [self.c_const_gc],
  804. resulttype=llmemory.Address)
  805. hop.genop('adr_add', [v_gc_adr, c_ofs], resultvar=op.result)
  806. def gct_gc_adr_of_nursery_free(self, hop):
  807. self._gc_adr_of_gc_attr(hop, 'nursery_free')
  808. def gct_gc_adr_of_nursery_top(self, hop):
  809. self._gc_adr_of_gc_attr(hop, 'nursery_top')
  810. def _gc_adr_of_gcdata_attr(self, hop, attrname):
  811. op = hop.spaceop
  812. ofs = llmemory.offsetof(self.c_const_gcdata.concretetype.TO,
  813. 'inst_' + attrname)
  814. c_ofs = rmodel.inputconst(lltype.Signed, ofs)
  815. v_gcdata_adr = hop.genop('cast_ptr_to_adr', [self.c_const_gcdata],
  816. resulttype=llmemory.Address)
  817. hop.genop('adr_add', [v_gcdata_adr, c_ofs], resultvar=op.result)
  818. def gct_gc_adr_of_root_stack_base(self, hop):
  819. self._gc_adr_of_gcdata_attr(hop, 'root_stack_base')
  820. def gct_gc_adr_of_root_stack_top(self, hop):
  821. self._gc_adr_of_gcdata_attr(hop, 'root_stack_top')
  822. def gct_gc_detach_callback_pieces(self, hop):
  823. op = hop.spaceop
  824. assert len(op.args) == 0
  825. hop.genop("direct_call",
  826. [self.root_walker.gc_detach_callback_pieces_ptr],
  827. resultvar=op.result)
  828. def gct_gc_reattach_callback_pieces(self, hop):
  829. op = hop.spaceop
  830. assert len(op.args) == 1
  831. hop.genop("direct_call",
  832. [self.root_walker.gc_reattach_callback_pieces_ptr,
  833. op.args[0]],
  834. resultvar=op.result)
  835. def gct_do_malloc_fixedsize(self, hop):
  836. # used by the JIT (see rpython.jit.backend.llsupport.gc)
  837. op = hop.spaceop
  838. [v_typeid, v_size,
  839. v_has_finalizer, v_has_light_finalizer, v_contains_weakptr] = op.args
  840. livevars = self.push_roots(hop)
  841. hop.genop("direct_call",
  842. [self.malloc_fixedsize_ptr, self.c_const_gc,
  843. v_typeid, v_size,
  844. v_has_finalizer, v_has_light_finalizer,
  845. v_contains_weakptr],
  846. resultvar=op.result)
  847. self.pop_roots(hop, livevars)
  848. def gct_do_malloc_fixedsize_clear(self, hop):
  849. # used by the JIT (see rpython.jit.backend.llsupport.gc)
  850. self.gct_do_malloc_fixedsize(hop)
  851. if not self.malloc_zero_filled:
  852. op = hop.spaceop
  853. v_size = op.args[1]
  854. c_after_header = rmodel.inputconst(lltype.Signed,
  855. llmemory.sizeof(self.HDR))
  856. v_a = op.result
  857. v_clear_size = hop.genop('int_sub', [v_size, c_after_header],
  858. resulttype=lltype.Signed)
  859. self.emit_raw_memclear(hop.llops, v_clear_size, None,
  860. c_after_header, v_a)
  861. def gct_do_malloc_varsize(self, hop):
  862. # used by the JIT (see rpython.jit.backend.llsupport.gc)
  863. op = hop.spaceop
  864. [v_typeid, v_length, v_size, v_itemsize,
  865. v_offset_to_length] = op.args
  866. livevars = self.push_roots(hop)
  867. hop.genop("direct_call",
  868. [self.malloc_varsize_ptr, self.c_const_gc,
  869. v_typeid, v_length, v_size, v_itemsize,
  870. v_offset_to_length],
  871. resultvar=op.result)
  872. self.pop_roots(hop, livevars)
  873. def gct_do_malloc_varsize_clear(self, hop):
  874. # used by the JIT (see rpython.jit.backend.llsupport.gc)
  875. self.gct_do_malloc_varsize(hop)
  876. if not self.malloc_zero_filled:
  877. op = hop.spaceop
  878. v_num_elem = op.args[1]
  879. c_basesize = op.args[2]
  880. c_itemsize = op.args[3]
  881. c_length_ofs = op.args[4]
  882. v_a = op.result
  883. # Clear the fixed-size part, which is everything after the
  884. # GC header and before the length field. This might be 0
  885. # bytes long.
  886. c_after_header = rmodel.inputconst(lltype.Signed,
  887. llmemory.sizeof(self.HDR))
  888. v_clear_size = hop.genop('int_sub', [c_length_ofs, c_after_header],
  889. resulttype=lltype.Signed)
  890. self.emit_raw_memclear(hop.llops, v_clear_size, None,
  891. c_after_header, v_a)
  892. # Clear the variable-size part
  893. self.emit_raw_memclear(hop.llops, v_num_elem, c_itemsize,
  894. c_basesize, v_a)
  895. def gct_get_write_barrier_failing_case(self, hop):
  896. op = hop.spaceop
  897. hop.genop("same_as",
  898. [self.write_barrier_failing_case_ptr],
  899. resultvar=op.result)
  900. def gct_get_write_barrier_from_array_failing_case(self, hop):
  901. op = hop.spaceop
  902. null = lltype.nullptr(op.result.concretetype.TO)
  903. c_null = rmodel.inputconst(op.result.concretetype, null)
  904. v = getattr(self, 'write_barrier_from_array_failing_case_ptr', c_null)
  905. hop.genop("same_as", [v], resultvar=op.result)
  906. def gct_zero_gc_pointers_inside(self, hop):
  907. if not self.malloc_zero_filled:
  908. v_ob = hop.spaceop.args[0]
  909. TYPE = v_ob.concretetype.TO
  910. self.gen_zero_gc_pointers(TYPE, v_ob, hop.llops)
  911. def gct_zero_everything_inside(self, hop):
  912. if not self.malloc_zero_filled:
  913. v_ob = hop.spaceop.args[0]
  914. TYPE = v_ob.concretetype.TO
  915. self.gen_zero_gc_pointers(TYPE, v_ob, hop.llops, everything=True)
  916. def gct_gc_writebarrier_before_copy(self, hop):
  917. op = hop.spaceop
  918. if not hasattr(self, 'wb_before_copy_ptr'):
  919. # no write barrier needed in that case
  920. hop.genop("same_as",
  921. [rmodel.inputconst(lltype.Bool, True)],
  922. resultvar=op.result)
  923. return
  924. source_addr = hop.genop('cast_ptr_to_adr', [op.args[0]],
  925. resulttype=llmemory.Address)
  926. dest_addr = hop.genop('cast_ptr_to_adr', [op.args[1]],
  927. resulttype=llmemory.Address)
  928. hop.genop('direct_call', [self.wb_before_copy_ptr, self.c_const_gc,
  929. source_addr, dest_addr] + op.args[2:],
  930. resultvar=op.result)
  931. def gct_weakref_create(self, hop):
  932. op = hop.spaceop
  933. type_id = self.get_type_id(WEAKREF)
  934. c_type_id = rmodel.inputconst(TYPE_ID, type_id)
  935. info = self.layoutbuilder.get_info(type_id)
  936. c_size = rmodel.inputconst(lltype.Signed, info.fixedsize)
  937. malloc_ptr = self.malloc_fixedsize_ptr
  938. c_false = rmodel.inputconst(lltype.Bool, False)
  939. c_has_weakptr = rmodel.inputconst(lltype.Bool, True)
  940. args = [self.c_const_gc, c_type_id, c_size,
  941. c_false, c_false, c_has_weakptr]
  942. # push and pop the current live variables *including* the argument
  943. # to the weakref_create operation, which must be kept alive and
  944. # moved if the GC needs to collect
  945. livevars = self.push_roots(hop, keep_current_args=True)
  946. v_result = hop.genop("direct_call", [malloc_ptr] + args,
  947. resulttype=llmemory.GCREF)
  948. v_result = hop.genop("cast_opaque_ptr", [v_result],
  949. resulttype=WEAKREFPTR)
  950. self.pop_roots(hop, livevars)
  951. # cast_ptr_to_adr must be done after malloc, as the GC pointer
  952. # might have moved just now.
  953. v_instance, = op.args
  954. v_addr = hop.genop("cast_ptr_to_adr", [v_instance],
  955. resulttype=llmemory.Address)
  956. hop.genop("bare_setfield",
  957. [v_result, rmodel.inputconst(lltype.Void, "weakptr"), v_addr])
  958. v_weakref = hop.genop("cast_ptr_to_weakrefptr", [v_result],
  959. resulttype=llmemory.WeakRefPtr)
  960. hop.cast_result(v_weakref)
  961. def gct_weakref_deref(self, hop):
  962. v_wref, = hop.spaceop.args
  963. v_addr = hop.genop("direct_call",
  964. [self.weakref_deref_ptr, v_wref],
  965. resulttype=l

Large files files are truncated, but you can click here to view the full file