PageRenderTime 60ms CodeModel.GetById 26ms RepoModel.GetById 1ms app.codeStats 0ms

/pypy/rpython/memory/gctransform/framework.py

http://github.com/pypy/pypy
Python | 1385 lines | 1237 code | 89 blank | 59 comment | 135 complexity | 60b115feaf144985b1d9f18572ad37c9 MD5 | raw file

Large files files are truncated, but you can click here to view the full file

  1. from pypy.rpython.memory.gctransform.transform import GCTransformer
  2. from pypy.rpython.memory.gctransform.support import find_gc_ptrs_in_type, \
  3. get_rtti, ll_call_destructor, type_contains_pyobjs, var_ispyobj
  4. from pypy.rpython.lltypesystem import lltype, llmemory, rffi, llgroup
  5. from pypy.rpython import rmodel
  6. from pypy.rpython.memory import gctypelayout
  7. from pypy.rpython.memory.gc import marksweep
  8. from pypy.rpython.memory.gcheader import GCHeaderBuilder
  9. from pypy.rlib.rarithmetic import ovfcheck
  10. from pypy.rlib import rgc
  11. from pypy.rlib.objectmodel import we_are_translated
  12. from pypy.translator.backendopt import graphanalyze
  13. from pypy.translator.backendopt.support import var_needsgc
  14. from pypy.translator.backendopt.finalizer import FinalizerAnalyzer
  15. from pypy.annotation import model as annmodel
  16. from pypy.rpython import annlowlevel
  17. from pypy.rpython.rbuiltin import gen_cast
  18. from pypy.rpython.memory.gctypelayout import ll_weakref_deref, WEAKREF
  19. from pypy.rpython.memory.gctypelayout import convert_weakref_to, WEAKREFPTR
  20. from pypy.rpython.memory.gctransform.log import log
  21. from pypy.tool.sourcetools import func_with_new_name
  22. from pypy.rpython.lltypesystem.lloperation import llop, LL_OPERATIONS
  23. import sys, types
  24. TYPE_ID = llgroup.HALFWORD
  25. class CollectAnalyzer(graphanalyze.BoolGraphAnalyzer):
  26. def analyze_direct_call(self, graph, seen=None):
  27. try:
  28. func = graph.func
  29. except AttributeError:
  30. pass
  31. else:
  32. if getattr(func, '_gctransformer_hint_cannot_collect_', False):
  33. return False
  34. if getattr(func, '_gctransformer_hint_close_stack_', False):
  35. return True
  36. return graphanalyze.GraphAnalyzer.analyze_direct_call(self, graph,
  37. seen)
  38. def analyze_external_call(self, op, seen=None):
  39. funcobj = op.args[0].value._obj
  40. if getattr(funcobj, 'random_effects_on_gcobjs', False):
  41. return True
  42. return graphanalyze.GraphAnalyzer.analyze_external_call(self, op,
  43. seen)
  44. def analyze_simple_operation(self, op, graphinfo):
  45. if op.opname in ('malloc', 'malloc_varsize'):
  46. flags = op.args[1].value
  47. return flags['flavor'] == 'gc'
  48. else:
  49. return (op.opname in LL_OPERATIONS and
  50. LL_OPERATIONS[op.opname].canmallocgc)
  51. def find_initializing_stores(collect_analyzer, graph):
  52. from pypy.objspace.flow.model import mkentrymap
  53. entrymap = mkentrymap(graph)
  54. # a bit of a hackish analysis: if a block contains a malloc and check that
  55. # the result is not zero, then the block following the True link will
  56. # usually initialize the newly allocated object
  57. result = set()
  58. def find_in_block(block, mallocvars):
  59. for i, op in enumerate(block.operations):
  60. if op.opname in ("cast_pointer", "same_as"):
  61. if op.args[0] in mallocvars:
  62. mallocvars[op.result] = True
  63. elif op.opname in ("setfield", "setarrayitem", "setinteriorfield"):
  64. TYPE = op.args[-1].concretetype
  65. if (op.args[0] in mallocvars and
  66. isinstance(TYPE, lltype.Ptr) and
  67. TYPE.TO._gckind == "gc"):
  68. result.add(op)
  69. else:
  70. if collect_analyzer.analyze(op):
  71. return
  72. for exit in block.exits:
  73. if len(entrymap[exit.target]) != 1:
  74. continue
  75. newmallocvars = {}
  76. for i, var in enumerate(exit.args):
  77. if var in mallocvars:
  78. newmallocvars[exit.target.inputargs[i]] = True
  79. if newmallocvars:
  80. find_in_block(exit.target, newmallocvars)
  81. mallocnum = 0
  82. blockset = set(graph.iterblocks())
  83. while blockset:
  84. block = blockset.pop()
  85. if len(block.operations) < 2:
  86. continue
  87. mallocop = block.operations[-2]
  88. checkop = block.operations[-1]
  89. if not (mallocop.opname == "malloc" and
  90. checkop.opname == "ptr_nonzero" and
  91. mallocop.result is checkop.args[0] and
  92. block.exitswitch is checkop.result):
  93. continue
  94. rtti = get_rtti(mallocop.args[0].value)
  95. if rtti is not None and hasattr(rtti._obj, 'destructor_funcptr'):
  96. continue
  97. exits = [exit for exit in block.exits if exit.llexitcase]
  98. if len(exits) != 1:
  99. continue
  100. exit = exits[0]
  101. if len(entrymap[exit.target]) != 1:
  102. continue
  103. try:
  104. index = exit.args.index(mallocop.result)
  105. except ValueError:
  106. continue
  107. target = exit.target
  108. mallocvars = {target.inputargs[index]: True}
  109. mallocnum += 1
  110. find_in_block(target, mallocvars)
  111. #if result:
  112. # print "found %s initializing stores in %s" % (len(result), graph.name)
  113. return result
  114. def find_clean_setarrayitems(collect_analyzer, graph):
  115. result = set()
  116. for block in graph.iterblocks():
  117. cache = set()
  118. for op in block.operations:
  119. if op.opname == 'getarrayitem':
  120. cache.add((op.args[0], op.result))
  121. elif op.opname == 'setarrayitem':
  122. if (op.args[0], op.args[2]) in cache:
  123. result.add(op)
  124. elif collect_analyzer.analyze(op):
  125. cache = set()
  126. return result
  127. class FrameworkGCTransformer(GCTransformer):
  128. root_stack_depth = None # for tests to override
  129. def __init__(self, translator):
  130. from pypy.rpython.memory.gc.base import choose_gc_from_config
  131. from pypy.rpython.memory.gc.base import ARRAY_TYPEID_MAP
  132. from pypy.rpython.memory.gc import inspector
  133. super(FrameworkGCTransformer, self).__init__(translator, inline=True)
  134. if hasattr(self, 'GC_PARAMS'):
  135. # for tests: the GC choice can be specified as class attributes
  136. from pypy.rpython.memory.gc.marksweep import MarkSweepGC
  137. GCClass = getattr(self, 'GCClass', MarkSweepGC)
  138. GC_PARAMS = self.GC_PARAMS
  139. else:
  140. # for regular translation: pick the GC from the config
  141. GCClass, GC_PARAMS = choose_gc_from_config(translator.config)
  142. if hasattr(translator, '_jit2gc'):
  143. self.layoutbuilder = translator._jit2gc['layoutbuilder']
  144. else:
  145. self.layoutbuilder = TransformerLayoutBuilder(translator, GCClass)
  146. self.layoutbuilder.transformer = self
  147. self.get_type_id = self.layoutbuilder.get_type_id
  148. # set up GCData with the llgroup from the layoutbuilder, which
  149. # will grow as more TYPE_INFO members are added to it
  150. gcdata = gctypelayout.GCData(self.layoutbuilder.type_info_group)
  151. # initialize the following two fields with a random non-NULL address,
  152. # to make the annotator happy. The fields are patched in finish()
  153. # to point to a real array.
  154. foo = lltype.malloc(lltype.FixedSizeArray(llmemory.Address, 1),
  155. immortal=True, zero=True)
  156. a_random_address = llmemory.cast_ptr_to_adr(foo)
  157. gcdata.static_root_start = a_random_address # patched in finish()
  158. gcdata.static_root_nongcend = a_random_address # patched in finish()
  159. gcdata.static_root_end = a_random_address # patched in finish()
  160. gcdata.max_type_id = 13 # patched in finish()
  161. gcdata.typeids_z = a_random_address # patched in finish()
  162. self.gcdata = gcdata
  163. self.malloc_fnptr_cache = {}
  164. gcdata.gc = GCClass(translator.config.translation, **GC_PARAMS)
  165. root_walker = self.build_root_walker()
  166. self.root_walker = root_walker
  167. gcdata.set_query_functions(gcdata.gc)
  168. gcdata.gc.set_root_walker(root_walker)
  169. self.num_pushs = 0
  170. self.write_barrier_calls = 0
  171. self.write_barrier_from_array_calls = 0
  172. def frameworkgc_setup():
  173. # run-time initialization code
  174. root_walker.setup_root_walker()
  175. gcdata.gc.setup()
  176. gcdata.gc.post_setup()
  177. def frameworkgc__teardown():
  178. # run-time teardown code for tests!
  179. gcdata.gc._teardown()
  180. bk = self.translator.annotator.bookkeeper
  181. r_typeid16 = rffi.platform.numbertype_to_rclass[TYPE_ID]
  182. s_typeid16 = annmodel.SomeInteger(knowntype=r_typeid16)
  183. # the point of this little dance is to not annotate
  184. # self.gcdata.static_root_xyz as constants. XXX is it still needed??
  185. data_classdef = bk.getuniqueclassdef(gctypelayout.GCData)
  186. data_classdef.generalize_attr(
  187. 'static_root_start',
  188. annmodel.SomeAddress())
  189. data_classdef.generalize_attr(
  190. 'static_root_nongcend',
  191. annmodel.SomeAddress())
  192. data_classdef.generalize_attr(
  193. 'static_root_end',
  194. annmodel.SomeAddress())
  195. data_classdef.generalize_attr(
  196. 'max_type_id',
  197. annmodel.SomeInteger())
  198. data_classdef.generalize_attr(
  199. 'typeids_z',
  200. annmodel.SomeAddress())
  201. annhelper = annlowlevel.MixLevelHelperAnnotator(self.translator.rtyper)
  202. def getfn(ll_function, args_s, s_result, inline=False,
  203. minimal_transform=True):
  204. graph = annhelper.getgraph(ll_function, args_s, s_result)
  205. if minimal_transform:
  206. self.need_minimal_transform(graph)
  207. if inline:
  208. self.graphs_to_inline[graph] = True
  209. return annhelper.graph2const(graph)
  210. self.frameworkgc_setup_ptr = getfn(frameworkgc_setup, [],
  211. annmodel.s_None)
  212. # for tests
  213. self.frameworkgc__teardown_ptr = getfn(frameworkgc__teardown, [],
  214. annmodel.s_None)
  215. if root_walker.need_root_stack:
  216. self.incr_stack_ptr = getfn(root_walker.incr_stack,
  217. [annmodel.SomeInteger()],
  218. annmodel.SomeAddress(),
  219. inline = True)
  220. self.decr_stack_ptr = getfn(root_walker.decr_stack,
  221. [annmodel.SomeInteger()],
  222. annmodel.SomeAddress(),
  223. inline = True)
  224. else:
  225. self.incr_stack_ptr = None
  226. self.decr_stack_ptr = None
  227. self.weakref_deref_ptr = self.inittime_helper(
  228. ll_weakref_deref, [llmemory.WeakRefPtr], llmemory.Address)
  229. classdef = bk.getuniqueclassdef(GCClass)
  230. s_gc = annmodel.SomeInstance(classdef)
  231. s_gcref = annmodel.SomePtr(llmemory.GCREF)
  232. malloc_fixedsize_clear_meth = GCClass.malloc_fixedsize_clear.im_func
  233. self.malloc_fixedsize_clear_ptr = getfn(
  234. malloc_fixedsize_clear_meth,
  235. [s_gc, s_typeid16,
  236. annmodel.SomeInteger(nonneg=True),
  237. annmodel.SomeBool(),
  238. annmodel.SomeBool(),
  239. annmodel.SomeBool()], s_gcref,
  240. inline = False)
  241. if hasattr(GCClass, 'malloc_fixedsize'):
  242. malloc_fixedsize_meth = GCClass.malloc_fixedsize.im_func
  243. self.malloc_fixedsize_ptr = getfn(
  244. malloc_fixedsize_meth,
  245. [s_gc, s_typeid16,
  246. annmodel.SomeInteger(nonneg=True),
  247. annmodel.SomeBool(),
  248. annmodel.SomeBool(),
  249. annmodel.SomeBool()], s_gcref,
  250. inline = False)
  251. else:
  252. malloc_fixedsize_meth = None
  253. self.malloc_fixedsize_ptr = self.malloc_fixedsize_clear_ptr
  254. ## self.malloc_varsize_ptr = getfn(
  255. ## GCClass.malloc_varsize.im_func,
  256. ## [s_gc] + [annmodel.SomeInteger(nonneg=True) for i in range(5)]
  257. ## + [annmodel.SomeBool()], s_gcref)
  258. self.malloc_varsize_clear_ptr = getfn(
  259. GCClass.malloc_varsize_clear.im_func,
  260. [s_gc, s_typeid16]
  261. + [annmodel.SomeInteger(nonneg=True) for i in range(4)], s_gcref)
  262. self.collect_ptr = getfn(GCClass.collect.im_func,
  263. [s_gc, annmodel.SomeInteger()], annmodel.s_None)
  264. self.can_move_ptr = getfn(GCClass.can_move.im_func,
  265. [s_gc, annmodel.SomeAddress()],
  266. annmodel.SomeBool())
  267. if hasattr(GCClass, 'shrink_array'):
  268. self.shrink_array_ptr = getfn(
  269. GCClass.shrink_array.im_func,
  270. [s_gc, annmodel.SomeAddress(),
  271. annmodel.SomeInteger(nonneg=True)], annmodel.s_Bool)
  272. else:
  273. self.shrink_array_ptr = None
  274. if hasattr(GCClass, 'assume_young_pointers'):
  275. # xxx should really be a noop for gcs without generations
  276. self.assume_young_pointers_ptr = getfn(
  277. GCClass.assume_young_pointers.im_func,
  278. [s_gc, annmodel.SomeAddress()],
  279. annmodel.s_None)
  280. if hasattr(GCClass, 'heap_stats'):
  281. self.heap_stats_ptr = getfn(GCClass.heap_stats.im_func,
  282. [s_gc], annmodel.SomePtr(lltype.Ptr(ARRAY_TYPEID_MAP)),
  283. minimal_transform=False)
  284. self.get_member_index_ptr = getfn(
  285. GCClass.get_member_index.im_func,
  286. [s_gc, annmodel.SomeInteger(knowntype=llgroup.r_halfword)],
  287. annmodel.SomeInteger())
  288. if hasattr(GCClass, 'writebarrier_before_copy'):
  289. self.wb_before_copy_ptr = \
  290. getfn(GCClass.writebarrier_before_copy.im_func,
  291. [s_gc] + [annmodel.SomeAddress()] * 2 +
  292. [annmodel.SomeInteger()] * 3, annmodel.SomeBool())
  293. elif GCClass.needs_write_barrier:
  294. raise NotImplementedError("GC needs write barrier, but does not provide writebarrier_before_copy functionality")
  295. # in some GCs we can inline the common case of
  296. # malloc_fixedsize(typeid, size, False, False, False)
  297. if getattr(GCClass, 'inline_simple_malloc', False):
  298. # make a copy of this function so that it gets annotated
  299. # independently and the constants are folded inside
  300. if malloc_fixedsize_meth is None:
  301. malloc_fast_meth = malloc_fixedsize_clear_meth
  302. self.malloc_fast_is_clearing = True
  303. else:
  304. malloc_fast_meth = malloc_fixedsize_meth
  305. self.malloc_fast_is_clearing = False
  306. malloc_fast = func_with_new_name(
  307. malloc_fast_meth,
  308. "malloc_fast")
  309. s_False = annmodel.SomeBool(); s_False.const = False
  310. self.malloc_fast_ptr = getfn(
  311. malloc_fast,
  312. [s_gc, s_typeid16,
  313. annmodel.SomeInteger(nonneg=True),
  314. s_False, s_False, s_False], s_gcref,
  315. inline = True)
  316. else:
  317. self.malloc_fast_ptr = None
  318. # in some GCs we can also inline the common case of
  319. # malloc_varsize(typeid, length, (3 constant sizes), True, False)
  320. if getattr(GCClass, 'inline_simple_malloc_varsize', False):
  321. # make a copy of this function so that it gets annotated
  322. # independently and the constants are folded inside
  323. malloc_varsize_clear_fast = func_with_new_name(
  324. GCClass.malloc_varsize_clear.im_func,
  325. "malloc_varsize_clear_fast")
  326. s_False = annmodel.SomeBool(); s_False.const = False
  327. self.malloc_varsize_clear_fast_ptr = getfn(
  328. malloc_varsize_clear_fast,
  329. [s_gc, s_typeid16,
  330. annmodel.SomeInteger(nonneg=True),
  331. annmodel.SomeInteger(nonneg=True),
  332. annmodel.SomeInteger(nonneg=True),
  333. annmodel.SomeInteger(nonneg=True)], s_gcref,
  334. inline = True)
  335. else:
  336. self.malloc_varsize_clear_fast_ptr = None
  337. if getattr(GCClass, 'malloc_varsize_nonmovable', False):
  338. malloc_nonmovable = func_with_new_name(
  339. GCClass.malloc_varsize_nonmovable.im_func,
  340. "malloc_varsize_nonmovable")
  341. self.malloc_varsize_nonmovable_ptr = getfn(
  342. malloc_nonmovable,
  343. [s_gc, s_typeid16,
  344. annmodel.SomeInteger(nonneg=True)], s_gcref)
  345. else:
  346. self.malloc_varsize_nonmovable_ptr = None
  347. if getattr(GCClass, 'raw_malloc_memory_pressure', False):
  348. def raw_malloc_memory_pressure_varsize(length, itemsize):
  349. totalmem = length * itemsize
  350. if totalmem > 0:
  351. gcdata.gc.raw_malloc_memory_pressure(totalmem)
  352. #else: probably an overflow -- the following rawmalloc
  353. # will fail then
  354. def raw_malloc_memory_pressure(sizehint):
  355. gcdata.gc.raw_malloc_memory_pressure(sizehint)
  356. self.raw_malloc_memory_pressure_varsize_ptr = getfn(
  357. raw_malloc_memory_pressure_varsize,
  358. [annmodel.SomeInteger(), annmodel.SomeInteger()],
  359. annmodel.s_None, minimal_transform = False)
  360. self.raw_malloc_memory_pressure_ptr = getfn(
  361. raw_malloc_memory_pressure,
  362. [annmodel.SomeInteger()],
  363. annmodel.s_None, minimal_transform = False)
  364. self.identityhash_ptr = getfn(GCClass.identityhash.im_func,
  365. [s_gc, s_gcref],
  366. annmodel.SomeInteger(),
  367. minimal_transform=False)
  368. if getattr(GCClass, 'obtain_free_space', False):
  369. self.obtainfreespace_ptr = getfn(GCClass.obtain_free_space.im_func,
  370. [s_gc, annmodel.SomeInteger()],
  371. annmodel.SomeAddress())
  372. if GCClass.moving_gc:
  373. self.id_ptr = getfn(GCClass.id.im_func,
  374. [s_gc, s_gcref], annmodel.SomeInteger(),
  375. inline = False,
  376. minimal_transform = False)
  377. else:
  378. self.id_ptr = None
  379. self.get_rpy_roots_ptr = getfn(inspector.get_rpy_roots,
  380. [s_gc],
  381. rgc.s_list_of_gcrefs(),
  382. minimal_transform=False)
  383. self.get_rpy_referents_ptr = getfn(inspector.get_rpy_referents,
  384. [s_gc, s_gcref],
  385. rgc.s_list_of_gcrefs(),
  386. minimal_transform=False)
  387. self.get_rpy_memory_usage_ptr = getfn(inspector.get_rpy_memory_usage,
  388. [s_gc, s_gcref],
  389. annmodel.SomeInteger(),
  390. minimal_transform=False)
  391. self.get_rpy_type_index_ptr = getfn(inspector.get_rpy_type_index,
  392. [s_gc, s_gcref],
  393. annmodel.SomeInteger(),
  394. minimal_transform=False)
  395. self.is_rpy_instance_ptr = getfn(inspector.is_rpy_instance,
  396. [s_gc, s_gcref],
  397. annmodel.SomeBool(),
  398. minimal_transform=False)
  399. self.dump_rpy_heap_ptr = getfn(inspector.dump_rpy_heap,
  400. [s_gc, annmodel.SomeInteger()],
  401. annmodel.s_Bool,
  402. minimal_transform=False)
  403. self.get_typeids_z_ptr = getfn(inspector.get_typeids_z,
  404. [s_gc],
  405. annmodel.SomePtr(
  406. lltype.Ptr(rgc.ARRAY_OF_CHAR)),
  407. minimal_transform=False)
  408. self.set_max_heap_size_ptr = getfn(GCClass.set_max_heap_size.im_func,
  409. [s_gc,
  410. annmodel.SomeInteger(nonneg=True)],
  411. annmodel.s_None)
  412. self.write_barrier_ptr = None
  413. self.write_barrier_from_array_ptr = None
  414. if GCClass.needs_write_barrier:
  415. self.write_barrier_ptr = getfn(GCClass.write_barrier.im_func,
  416. [s_gc,
  417. annmodel.SomeAddress(),
  418. annmodel.SomeAddress()],
  419. annmodel.s_None,
  420. inline=True)
  421. func = getattr(gcdata.gc, 'remember_young_pointer', None)
  422. if func is not None:
  423. # func should not be a bound method, but a real function
  424. assert isinstance(func, types.FunctionType)
  425. self.write_barrier_failing_case_ptr = getfn(func,
  426. [annmodel.SomeAddress(),
  427. annmodel.SomeAddress()],
  428. annmodel.s_None)
  429. func = getattr(GCClass, 'write_barrier_from_array', None)
  430. if func is not None:
  431. self.write_barrier_from_array_ptr = getfn(func.im_func,
  432. [s_gc,
  433. annmodel.SomeAddress(),
  434. annmodel.SomeAddress(),
  435. annmodel.SomeInteger()],
  436. annmodel.s_None,
  437. inline=True)
  438. func = getattr(gcdata.gc, 'remember_young_pointer_from_array3',
  439. None)
  440. if func is not None:
  441. # func should not be a bound method, but a real function
  442. assert isinstance(func, types.FunctionType)
  443. self.write_barrier_from_array_failing_case_ptr = \
  444. getfn(func,
  445. [annmodel.SomeAddress(),
  446. annmodel.SomeInteger(),
  447. annmodel.SomeAddress()],
  448. annmodel.s_None)
  449. self.statistics_ptr = getfn(GCClass.statistics.im_func,
  450. [s_gc, annmodel.SomeInteger()],
  451. annmodel.SomeInteger())
  452. # thread support
  453. if translator.config.translation.continuation:
  454. root_walker.need_stacklet_support(self, getfn)
  455. if translator.config.translation.thread:
  456. root_walker.need_thread_support(self, getfn)
  457. self.layoutbuilder.encode_type_shapes_now()
  458. annhelper.finish() # at this point, annotate all mix-level helpers
  459. annhelper.backend_optimize()
  460. self.collect_analyzer = CollectAnalyzer(self.translator)
  461. self.collect_analyzer.analyze_all()
  462. s_gc = self.translator.annotator.bookkeeper.valueoftype(GCClass)
  463. r_gc = self.translator.rtyper.getrepr(s_gc)
  464. self.c_const_gc = rmodel.inputconst(r_gc, self.gcdata.gc)
  465. s_gc_data = self.translator.annotator.bookkeeper.valueoftype(
  466. gctypelayout.GCData)
  467. r_gc_data = self.translator.rtyper.getrepr(s_gc_data)
  468. self.c_const_gcdata = rmodel.inputconst(r_gc_data, self.gcdata)
  469. self.malloc_zero_filled = GCClass.malloc_zero_filled
  470. HDR = self.HDR = self.gcdata.gc.gcheaderbuilder.HDR
  471. size_gc_header = self.gcdata.gc.gcheaderbuilder.size_gc_header
  472. vtableinfo = (HDR, size_gc_header, self.gcdata.gc.typeid_is_in_field)
  473. self.c_vtableinfo = rmodel.inputconst(lltype.Void, vtableinfo)
  474. tig = self.layoutbuilder.type_info_group._as_ptr()
  475. self.c_type_info_group = rmodel.inputconst(lltype.typeOf(tig), tig)
  476. sko = llmemory.sizeof(gcdata.TYPE_INFO)
  477. self.c_vtinfo_skip_offset = rmodel.inputconst(lltype.typeOf(sko), sko)
  478. def build_root_walker(self):
  479. from pypy.rpython.memory.gctransform import shadowstack
  480. return shadowstack.ShadowStackRootWalker(self)
  481. def consider_constant(self, TYPE, value):
  482. self.layoutbuilder.consider_constant(TYPE, value, self.gcdata.gc)
  483. #def get_type_id(self, TYPE):
  484. # this method is attached to the instance and redirects to
  485. # layoutbuilder.get_type_id().
  486. def special_funcptr_for_type(self, TYPE):
  487. return self.layoutbuilder.special_funcptr_for_type(TYPE)
  488. def gc_header_for(self, obj, needs_hash=False):
  489. hdr = self.gcdata.gc.gcheaderbuilder.header_of_object(obj)
  490. HDR = self.HDR
  491. withhash, flag = self.gcdata.gc.withhash_flag_is_in_field
  492. x = getattr(hdr, withhash)
  493. TYPE = lltype.typeOf(x)
  494. x = lltype.cast_primitive(lltype.Signed, x)
  495. if needs_hash:
  496. x |= flag # set the flag in the header
  497. else:
  498. x &= ~flag # clear the flag in the header
  499. x = lltype.cast_primitive(TYPE, x)
  500. setattr(hdr, withhash, x)
  501. return hdr
  502. def get_hash_offset(self, T):
  503. type_id = self.get_type_id(T)
  504. assert not self.gcdata.q_is_varsize(type_id)
  505. return self.gcdata.q_fixed_size(type_id)
  506. def finish_tables(self):
  507. group = self.layoutbuilder.close_table()
  508. log.info("assigned %s typeids" % (len(group.members), ))
  509. log.info("added %s push/pop stack root instructions" % (
  510. self.num_pushs, ))
  511. if self.write_barrier_ptr:
  512. log.info("inserted %s write barrier calls" % (
  513. self.write_barrier_calls, ))
  514. if self.write_barrier_from_array_ptr:
  515. log.info("inserted %s write_barrier_from_array calls" % (
  516. self.write_barrier_from_array_calls, ))
  517. # XXX because we call inputconst already in replace_malloc, we can't
  518. # modify the instance, we have to modify the 'rtyped instance'
  519. # instead. horrors. is there a better way?
  520. s_gcdata = self.translator.annotator.bookkeeper.immutablevalue(
  521. self.gcdata)
  522. r_gcdata = self.translator.rtyper.getrepr(s_gcdata)
  523. ll_instance = rmodel.inputconst(r_gcdata, self.gcdata).value
  524. addresses_of_static_ptrs = (
  525. self.layoutbuilder.addresses_of_static_ptrs_in_nongc +
  526. self.layoutbuilder.addresses_of_static_ptrs)
  527. log.info("found %s static roots" % (len(addresses_of_static_ptrs), ))
  528. ll_static_roots_inside = lltype.malloc(lltype.Array(llmemory.Address),
  529. len(addresses_of_static_ptrs),
  530. immortal=True)
  531. for i in range(len(addresses_of_static_ptrs)):
  532. ll_static_roots_inside[i] = addresses_of_static_ptrs[i]
  533. ll_instance.inst_static_root_start = llmemory.cast_ptr_to_adr(ll_static_roots_inside) + llmemory.ArrayItemsOffset(lltype.Array(llmemory.Address))
  534. ll_instance.inst_static_root_nongcend = ll_instance.inst_static_root_start + llmemory.sizeof(llmemory.Address) * len(self.layoutbuilder.addresses_of_static_ptrs_in_nongc)
  535. ll_instance.inst_static_root_end = ll_instance.inst_static_root_start + llmemory.sizeof(llmemory.Address) * len(addresses_of_static_ptrs)
  536. newgcdependencies = []
  537. newgcdependencies.append(ll_static_roots_inside)
  538. ll_instance.inst_max_type_id = len(group.members)
  539. typeids_z = self.write_typeid_list()
  540. ll_typeids_z = lltype.malloc(rgc.ARRAY_OF_CHAR,
  541. len(typeids_z),
  542. immortal=True)
  543. for i in range(len(typeids_z)):
  544. ll_typeids_z[i] = typeids_z[i]
  545. ll_instance.inst_typeids_z = llmemory.cast_ptr_to_adr(ll_typeids_z)
  546. newgcdependencies.append(ll_typeids_z)
  547. return newgcdependencies
  548. def get_finish_tables(self):
  549. # We must first make sure that the type_info_group's members
  550. # are all followed. Do it repeatedly while new members show up.
  551. # Once it is really done, do finish_tables().
  552. seen = 0
  553. while seen < len(self.layoutbuilder.type_info_group.members):
  554. curtotal = len(self.layoutbuilder.type_info_group.members)
  555. yield self.layoutbuilder.type_info_group.members[seen:curtotal]
  556. seen = curtotal
  557. yield self.finish_tables()
  558. def write_typeid_list(self):
  559. """write out the list of type ids together with some info"""
  560. from pypy.tool.udir import udir
  561. # XXX not ideal since it is not per compilation, but per run
  562. # XXX argh argh, this only gives the member index but not the
  563. # real typeid, which is a complete mess to obtain now...
  564. all_ids = self.layoutbuilder.id_of_type.items()
  565. all_ids = [(typeinfo.index, TYPE) for (TYPE, typeinfo) in all_ids]
  566. all_ids = dict(all_ids)
  567. f = udir.join("typeids.txt").open("w")
  568. for index in range(len(self.layoutbuilder.type_info_group.members)):
  569. f.write("member%-4d %s\n" % (index, all_ids.get(index, '?')))
  570. f.close()
  571. try:
  572. import zlib
  573. return zlib.compress(udir.join("typeids.txt").read(), 9)
  574. except ImportError:
  575. return ''
  576. def transform_graph(self, graph):
  577. func = getattr(graph, 'func', None)
  578. if func and getattr(func, '_gc_no_collect_', False):
  579. if self.collect_analyzer.analyze_direct_call(graph):
  580. raise Exception("'no_collect' function can trigger collection:"
  581. " %s" % func)
  582. if self.write_barrier_ptr:
  583. self.clean_sets = (
  584. find_initializing_stores(self.collect_analyzer, graph))
  585. if self.gcdata.gc.can_optimize_clean_setarrayitems():
  586. self.clean_sets = self.clean_sets.union(
  587. find_clean_setarrayitems(self.collect_analyzer, graph))
  588. super(FrameworkGCTransformer, self).transform_graph(graph)
  589. if self.write_barrier_ptr:
  590. self.clean_sets = None
  591. def gct_direct_call(self, hop):
  592. if self.collect_analyzer.analyze(hop.spaceop):
  593. livevars = self.push_roots(hop)
  594. self.default(hop)
  595. self.pop_roots(hop, livevars)
  596. else:
  597. self.default(hop)
  598. if hop.spaceop.opname == "direct_call":
  599. self.mark_call_cannotcollect(hop, hop.spaceop.args[0])
  600. def mark_call_cannotcollect(self, hop, name):
  601. pass
  602. gct_indirect_call = gct_direct_call
  603. def gct_fv_gc_malloc(self, hop, flags, TYPE, *args):
  604. op = hop.spaceop
  605. flavor = flags['flavor']
  606. PTRTYPE = op.result.concretetype
  607. assert PTRTYPE.TO == TYPE
  608. type_id = self.get_type_id(TYPE)
  609. c_type_id = rmodel.inputconst(TYPE_ID, type_id)
  610. info = self.layoutbuilder.get_info(type_id)
  611. c_size = rmodel.inputconst(lltype.Signed, info.fixedsize)
  612. kind_and_fptr = self.special_funcptr_for_type(TYPE)
  613. has_finalizer = (kind_and_fptr is not None and
  614. kind_and_fptr[0] == "finalizer")
  615. has_light_finalizer = (kind_and_fptr is not None and
  616. kind_and_fptr[0] == "light_finalizer")
  617. if has_light_finalizer:
  618. has_finalizer = True
  619. c_has_finalizer = rmodel.inputconst(lltype.Bool, has_finalizer)
  620. c_has_light_finalizer = rmodel.inputconst(lltype.Bool,
  621. has_light_finalizer)
  622. if not op.opname.endswith('_varsize') and not flags.get('varsize'):
  623. #malloc_ptr = self.malloc_fixedsize_ptr
  624. zero = flags.get('zero', False)
  625. if (self.malloc_fast_ptr is not None and
  626. not c_has_finalizer.value and
  627. (self.malloc_fast_is_clearing or not zero)):
  628. malloc_ptr = self.malloc_fast_ptr
  629. elif zero:
  630. malloc_ptr = self.malloc_fixedsize_clear_ptr
  631. else:
  632. malloc_ptr = self.malloc_fixedsize_ptr
  633. args = [self.c_const_gc, c_type_id, c_size,
  634. c_has_finalizer, c_has_light_finalizer,
  635. rmodel.inputconst(lltype.Bool, False)]
  636. else:
  637. assert not c_has_finalizer.value
  638. info_varsize = self.layoutbuilder.get_info_varsize(type_id)
  639. v_length = op.args[-1]
  640. c_ofstolength = rmodel.inputconst(lltype.Signed,
  641. info_varsize.ofstolength)
  642. c_varitemsize = rmodel.inputconst(lltype.Signed,
  643. info_varsize.varitemsize)
  644. if flags.get('nonmovable') and self.malloc_varsize_nonmovable_ptr:
  645. # we don't have tests for such cases, let's fail
  646. # explicitely
  647. malloc_ptr = self.malloc_varsize_nonmovable_ptr
  648. args = [self.c_const_gc, c_type_id, v_length]
  649. else:
  650. if self.malloc_varsize_clear_fast_ptr is not None:
  651. malloc_ptr = self.malloc_varsize_clear_fast_ptr
  652. else:
  653. malloc_ptr = self.malloc_varsize_clear_ptr
  654. args = [self.c_const_gc, c_type_id, v_length, c_size,
  655. c_varitemsize, c_ofstolength]
  656. livevars = self.push_roots(hop)
  657. v_result = hop.genop("direct_call", [malloc_ptr] + args,
  658. resulttype=llmemory.GCREF)
  659. self.pop_roots(hop, livevars)
  660. return v_result
  661. gct_fv_gc_malloc_varsize = gct_fv_gc_malloc
  662. def gct_gc__collect(self, hop):
  663. op = hop.spaceop
  664. if len(op.args) == 1:
  665. v_gen = op.args[0]
  666. else:
  667. # pick a number larger than expected different gc gens :-)
  668. v_gen = rmodel.inputconst(lltype.Signed, 9)
  669. livevars = self.push_roots(hop)
  670. hop.genop("direct_call", [self.collect_ptr, self.c_const_gc, v_gen],
  671. resultvar=op.result)
  672. self.pop_roots(hop, livevars)
  673. def gct_gc_can_move(self, hop):
  674. op = hop.spaceop
  675. v_addr = hop.genop('cast_ptr_to_adr',
  676. [op.args[0]], resulttype=llmemory.Address)
  677. hop.genop("direct_call", [self.can_move_ptr, self.c_const_gc, v_addr],
  678. resultvar=op.result)
  679. def gct_shrink_array(self, hop):
  680. if self.shrink_array_ptr is None:
  681. return GCTransformer.gct_shrink_array(self, hop)
  682. op = hop.spaceop
  683. v_addr = hop.genop('cast_ptr_to_adr',
  684. [op.args[0]], resulttype=llmemory.Address)
  685. v_length = op.args[1]
  686. hop.genop("direct_call", [self.shrink_array_ptr, self.c_const_gc,
  687. v_addr, v_length],
  688. resultvar=op.result)
  689. def gct_gc_assume_young_pointers(self, hop):
  690. if not hasattr(self, 'assume_young_pointers_ptr'):
  691. return
  692. op = hop.spaceop
  693. v_addr = op.args[0]
  694. if v_addr.concretetype != llmemory.Address:
  695. v_addr = hop.genop('cast_ptr_to_adr',
  696. [v_addr], resulttype=llmemory.Address)
  697. hop.genop("direct_call", [self.assume_young_pointers_ptr,
  698. self.c_const_gc, v_addr])
  699. def gct_gc_heap_stats(self, hop):
  700. if not hasattr(self, 'heap_stats_ptr'):
  701. return GCTransformer.gct_gc_heap_stats(self, hop)
  702. op = hop.spaceop
  703. livevars = self.push_roots(hop)
  704. hop.genop("direct_call", [self.heap_stats_ptr, self.c_const_gc],
  705. resultvar=op.result)
  706. self.pop_roots(hop, livevars)
  707. def gct_get_member_index(self, hop):
  708. op = hop.spaceop
  709. v_typeid = op.args[0]
  710. hop.genop("direct_call", [self.get_member_index_ptr, self.c_const_gc,
  711. v_typeid], resultvar=op.result)
  712. def _gc_adr_of_gc_attr(self, hop, attrname):
  713. if getattr(self.gcdata.gc, attrname, None) is None:
  714. raise NotImplementedError("gc_adr_of_%s only for generational gcs"
  715. % (attrname,))
  716. op = hop.spaceop
  717. ofs = llmemory.offsetof(self.c_const_gc.concretetype.TO,
  718. 'inst_' + attrname)
  719. c_ofs = rmodel.inputconst(lltype.Signed, ofs)
  720. v_gc_adr = hop.genop('cast_ptr_to_adr', [self.c_const_gc],
  721. resulttype=llmemory.Address)
  722. hop.genop('adr_add', [v_gc_adr, c_ofs], resultvar=op.result)
  723. def gct_gc_adr_of_nursery_free(self, hop):
  724. self._gc_adr_of_gc_attr(hop, 'nursery_free')
  725. def gct_gc_adr_of_nursery_top(self, hop):
  726. self._gc_adr_of_gc_attr(hop, 'nursery_top')
  727. def _gc_adr_of_gcdata_attr(self, hop, attrname):
  728. op = hop.spaceop
  729. ofs = llmemory.offsetof(self.c_const_gcdata.concretetype.TO,
  730. 'inst_' + attrname)
  731. c_ofs = rmodel.inputconst(lltype.Signed, ofs)
  732. v_gcdata_adr = hop.genop('cast_ptr_to_adr', [self.c_const_gcdata],
  733. resulttype=llmemory.Address)
  734. hop.genop('adr_add', [v_gcdata_adr, c_ofs], resultvar=op.result)
  735. def gct_gc_adr_of_root_stack_base(self, hop):
  736. self._gc_adr_of_gcdata_attr(hop, 'root_stack_base')
  737. def gct_gc_adr_of_root_stack_top(self, hop):
  738. self._gc_adr_of_gcdata_attr(hop, 'root_stack_top')
  739. def gct_gc_shadowstackref_new(self, hop):
  740. op = hop.spaceop
  741. livevars = self.push_roots(hop)
  742. hop.genop("direct_call", [self.root_walker.gc_shadowstackref_new_ptr],
  743. resultvar=op.result)
  744. self.pop_roots(hop, livevars)
  745. def gct_gc_shadowstackref_context(self, hop):
  746. op = hop.spaceop
  747. hop.genop("direct_call",
  748. [self.root_walker.gc_shadowstackref_context_ptr, op.args[0]],
  749. resultvar=op.result)
  750. def gct_gc_shadowstackref_destroy(self, hop):
  751. op = hop.spaceop
  752. hop.genop("direct_call",
  753. [self.root_walker.gc_shadowstackref_destroy_ptr, op.args[0]])
  754. def gct_gc_save_current_state_away(self, hop):
  755. op = hop.spaceop
  756. hop.genop("direct_call",
  757. [self.root_walker.gc_save_current_state_away_ptr,
  758. op.args[0], op.args[1]])
  759. def gct_gc_forget_current_state(self, hop):
  760. hop.genop("direct_call",
  761. [self.root_walker.gc_forget_current_state_ptr])
  762. def gct_gc_restore_state_from(self, hop):
  763. op = hop.spaceop
  764. hop.genop("direct_call",
  765. [self.root_walker.gc_restore_state_from_ptr,
  766. op.args[0]])
  767. def gct_gc_start_fresh_new_state(self, hop):
  768. hop.genop("direct_call",
  769. [self.root_walker.gc_start_fresh_new_state_ptr])
  770. def gct_gc_x_swap_pool(self, hop):
  771. raise NotImplementedError("old operation deprecated")
  772. def gct_gc_x_clone(self, hop):
  773. raise NotImplementedError("old operation deprecated")
  774. def gct_gc_x_size_header(self, hop):
  775. raise NotImplementedError("old operation deprecated")
  776. def gct_do_malloc_fixedsize_clear(self, hop):
  777. # used by the JIT (see pypy.jit.backend.llsupport.gc)
  778. op = hop.spaceop
  779. [v_typeid, v_size,
  780. v_has_finalizer, v_has_light_finalizer, v_contains_weakptr] = op.args
  781. livevars = self.push_roots(hop)
  782. hop.genop("direct_call",
  783. [self.malloc_fixedsize_clear_ptr, self.c_const_gc,
  784. v_typeid, v_size,
  785. v_has_finalizer, v_has_light_finalizer,
  786. v_contains_weakptr],
  787. resultvar=op.result)
  788. self.pop_roots(hop, livevars)
  789. def gct_do_malloc_varsize_clear(self, hop):
  790. # used by the JIT (see pypy.jit.backend.llsupport.gc)
  791. op = hop.spaceop
  792. [v_typeid, v_length, v_size, v_itemsize,
  793. v_offset_to_length] = op.args
  794. livevars = self.push_roots(hop)
  795. hop.genop("direct_call",
  796. [self.malloc_varsize_clear_ptr, self.c_const_gc,
  797. v_typeid, v_length, v_size, v_itemsize,
  798. v_offset_to_length],
  799. resultvar=op.result)
  800. self.pop_roots(hop, livevars)
  801. def gct_get_write_barrier_failing_case(self, hop):
  802. op = hop.spaceop
  803. hop.genop("same_as",
  804. [self.write_barrier_failing_case_ptr],
  805. resultvar=op.result)
  806. def gct_get_write_barrier_from_array_failing_case(self, hop):
  807. op = hop.spaceop
  808. v = getattr(self, 'write_barrier_from_array_failing_case_ptr',
  809. lltype.nullptr(op.result.concretetype.TO))
  810. hop.genop("same_as", [v], resultvar=op.result)
  811. def gct_zero_gc_pointers_inside(self, hop):
  812. if not self.malloc_zero_filled:
  813. v_ob = hop.spaceop.args[0]
  814. TYPE = v_ob.concretetype.TO
  815. gen_zero_gc_pointers(TYPE, v_ob, hop.llops)
  816. def gct_gc_writebarrier_before_copy(self, hop):
  817. op = hop.spaceop
  818. if not hasattr(self, 'wb_before_copy_ptr'):
  819. # no write barrier needed in that case
  820. hop.genop("same_as",
  821. [rmodel.inputconst(lltype.Bool, True)],
  822. resultvar=op.result)
  823. return
  824. source_addr = hop.genop('cast_ptr_to_adr', [op.args[0]],
  825. resulttype=llmemory.Address)
  826. dest_addr = hop.genop('cast_ptr_to_adr', [op.args[1]],
  827. resulttype=llmemory.Address)
  828. hop.genop('direct_call', [self.wb_before_copy_ptr, self.c_const_gc,
  829. source_addr, dest_addr] + op.args[2:],
  830. resultvar=op.result)
  831. def gct_weakref_create(self, hop):
  832. op = hop.spaceop
  833. type_id = self.get_type_id(WEAKREF)
  834. c_type_id = rmodel.inputconst(TYPE_ID, type_id)
  835. info = self.layoutbuilder.get_info(type_id)
  836. c_size = rmodel.inputconst(lltype.Signed, info.fixedsize)
  837. malloc_ptr = self.malloc_fixedsize_ptr
  838. c_false = rmodel.inputconst(lltype.Bool, False)
  839. c_has_weakptr = rmodel.inputconst(lltype.Bool, True)
  840. args = [self.c_const_gc, c_type_id, c_size,
  841. c_false, c_false, c_has_weakptr]
  842. # push and pop the current live variables *including* the argument
  843. # to the weakref_create operation, which must be kept alive and
  844. # moved if the GC needs to collect
  845. livevars = self.push_roots(hop, keep_current_args=True)
  846. v_result = hop.genop("direct_call", [malloc_ptr] + args,
  847. resulttype=llmemory.GCREF)
  848. v_result = hop.genop("cast_opaque_ptr", [v_result],
  849. resulttype=WEAKREFPTR)
  850. self.pop_roots(hop, livevars)
  851. # cast_ptr_to_adr must be done after malloc, as the GC pointer
  852. # might have moved just now.
  853. v_instance, = op.args
  854. v_addr = hop.genop("cast_ptr_to_adr", [v_instance],
  855. resulttype=llmemory.Address)
  856. hop.genop("bare_setfield",
  857. [v_result, rmodel.inputconst(lltype.Void, "weakptr"), v_addr])
  858. v_weakref = hop.genop("cast_ptr_to_weakrefptr", [v_result],
  859. resulttype=llmemory.WeakRefPtr)
  860. hop.cast_result(v_weakref)
  861. def gct_weakref_deref(self, hop):
  862. v_wref, = hop.spaceop.args
  863. v_addr = hop.genop("direct_call",
  864. [self.weakref_deref_ptr, v_wref],
  865. resulttype=llmemory.Address)
  866. hop.cast_result(v_addr)
  867. def gct_gc_identityhash(self, hop):
  868. livevars = self.push_roots(hop)
  869. [v_ptr] = hop.spaceop.args
  870. v_ptr = hop.genop("cast_opaque_ptr", [v_ptr],
  871. resulttype=llmemory.GCREF)
  872. hop.genop("direct_call",
  873. [self.identityhash_ptr, self.c_const_gc, v_ptr],
  874. resultvar=hop.spaceop.result)
  875. self.pop_roots(hop, livevars)
  876. def gct_gc_id(self, hop):
  877. if self.id_ptr is not None:
  878. livevars = self.push_roots(hop)
  879. [v_ptr] = hop.spaceop.args
  880. v_ptr = hop.genop("cast_opaque_ptr", [v_ptr],
  881. resulttype=llmemory.GCREF)
  882. hop.genop("direct_call", [self.id_ptr, self.c_const_gc, v_ptr],
  883. resultvar=hop.spaceop.result)
  884. self.pop_roots(hop, livevars)
  885. else:
  886. hop.rename('cast_ptr_to_int') # works nicely for non-moving GCs
  887. def gct_gc_obtain_free_space(self, hop):
  888. livevars = self.push_roots(hop)
  889. [v_number] = hop.spaceop.args
  890. hop.genop("direct_call",
  891. [self.obtainfreespace_ptr, self.c_const_gc, v_number],
  892. resultvar=hop.spaceop.result)
  893. self.pop_roots(hop, livevars)
  894. def gct_gc_set_max_heap_size(self, hop):
  895. [v_size] = hop.spaceop.args
  896. hop.genop("direct_call", [self.set_max_heap_size_ptr,
  897. self.c_const_gc,
  898. v_size])
  899. def gct_gc_thread_prepare(self, hop):
  900. pass # no effect any more
  901. def gct_gc_thread_run(self, hop):
  902. assert self.translator.config.translation.thread
  903. if hasattr(self.root_walker, 'thread_run_ptr'):
  904. livevars = self.push_roots(hop)
  905. hop.genop("direct_call", [self.root_walker.thread_run_ptr])
  906. self.pop_roots(hop, livevars)
  907. def gct_gc_thread_start(self, hop):
  908. assert self.translator.config.translation.thread
  909. if hasattr(self.root_walker, 'thread_start_ptr'):
  910. # only with asmgcc. Note that this is actually called after
  911. # the first gc_thread_run() in the new thread.
  912. hop.genop("direct_call", [self.root_walker.thread_start_ptr])
  913. def gct_gc_thread_die(self, hop):
  914. assert self.translator.config.translation.thread
  915. if hasattr(self.root_walker, 'thread_die_ptr'):
  916. livevars = self.push_roots(hop)
  917. hop.genop("direct_call", [self.root_walker.thread_die_ptr])
  918. self.pop_roots(hop, livevars)
  919. def gct_gc_thread_before_fork(self, hop):
  920. if (self.translator.config.translation.thread
  921. and hasattr(self.root_walker, 'thread_before_fork_ptr')):
  922. hop.genop("direct_call", [self.root_walker.thread_before_fork_ptr],
  923. resultvar=hop.spaceop.result)
  924. else:
  925. c_null = rmodel.inputconst(llmemory.Address, llmemory.NULL)
  926. hop.genop("same_as", [c_null],
  927. resultvar=hop.spaceop.result)
  928. def gct_gc_thread_after_fork(self, hop):
  929. if (self.translator.config.translation.thread
  930. and hasattr(self.root_walker, 'thread_after_fork_ptr')):
  931. livevars = self.push_roots(hop)
  932. hop.genop("direct_call", [self.root_walker.thread_after_fork_ptr]
  933. + hop.spaceop.args)
  934. self.pop_roots(hop, livevars)
  935. def gct_gc_get_type_info_group(self, hop):
  936. return hop.cast_result(self.c_type_info_group)
  937. def gct_gc_get_rpy_roots(self, hop):
  938. livevars = self.push_roots(hop)
  939. hop.genop("direct_call",
  940. [self.get_rpy_roots_ptr, self.c_const_gc],
  941. resultvar=hop.spaceop.result)
  942. self.pop_roots(hop, livevars)
  943. def gct_gc_get_rpy_referents(self, hop):
  944. livevars = self.push_roots(hop)
  945. [v_ptr] = hop.spaceop.args
  946. hop.genop("direct_call",
  947. [self.get_rpy_referents_ptr, self.c_const_gc, v_ptr],
  948. resultvar=hop.spaceop.result)
  949. self.pop_roots(hop, livevars)
  950. def gct_gc_get_rpy_memory_usage(self, hop):
  951. livevars = self.push_roots(hop)
  952. [v_ptr] = hop.spaceop.args
  953. hop.genop("direct_call",
  954. [self.get_rpy_memory_usage_ptr, self.c_const_gc, v_ptr],
  955. resultvar=hop.spaceop.result)
  956. self.pop_roots(hop, livevars)
  957. def gct_gc_get_rpy_type_index(self, hop):
  958. livevars = self.push_roots(hop)
  959. [v_ptr] = hop.spaceop.args
  960. hop.genop("direct_call",
  961. [self.get_rpy_type_index_ptr, self.c_const_gc, v_ptr],
  962. resultvar=hop.spaceop.result)
  963. self.pop_roots(hop, livevars)
  964. def gct_gc_is_rpy_instance(self, hop):
  965. livevars = self.push_roots(hop)
  966. [v_ptr] = hop.spaceop.args
  967. hop.genop("direct_call",
  968. [self.is_rpy_instance_ptr, self.c_const_gc, v_ptr],
  969. resultvar=hop.spaceop.result)
  970. self.pop_roots(hop, livevars)
  971. def gct_gc_dump_rpy_heap(self, hop):
  972. livevars = self.push_roots(hop)
  973. [v_fd] = hop.spaceop.args
  974. hop.genop("direct_call",
  975. [self.dump_rpy_heap_ptr, self.c_const_gc, v_fd],
  976. resultvar=hop.spaceop.result)
  977. self.pop_roots(hop, livevars)
  978. def gct_gc_typeids_z(self, hop):
  979. livevars = self.push_roots(hop)
  980. hop.genop("direct_call",
  981. [self.get_typeids_z_ptr, self.c_const_gc]

Large files files are truncated, but you can click here to view the full file