PageRenderTime 55ms CodeModel.GetById 27ms RepoModel.GetById 1ms app.codeStats 0ms

/gfx/harfbuzz/src/hb-ot-layout-gsubgpos-private.hh

https://bitbucket.org/MeeGoAdmin/mozilla-central/
C++ Header | 981 lines | 755 code | 138 blank | 88 comment | 90 complexity | bc9ed7748bf6c4a8358a5ba2f3a5746b MD5 | raw file
Possible License(s): AGPL-1.0, MIT, BSD-3-Clause, Apache-2.0, LGPL-2.1, 0BSD, LGPL-3.0, MPL-2.0-no-copyleft-exception, GPL-2.0, JSON
  1. /*
  2. * Copyright (C) 2007,2008,2009,2010 Red Hat, Inc.
  3. * Copyright (C) 2010 Google, Inc.
  4. *
  5. * This is part of HarfBuzz, a text shaping library.
  6. *
  7. * Permission is hereby granted, without written agreement and without
  8. * license or royalty fees, to use, copy, modify, and distribute this
  9. * software and its documentation for any purpose, provided that the
  10. * above copyright notice and the following two paragraphs appear in
  11. * all copies of this software.
  12. *
  13. * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
  14. * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
  15. * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
  16. * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  17. * DAMAGE.
  18. *
  19. * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
  20. * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
  21. * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
  22. * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
  23. * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
  24. *
  25. * Red Hat Author(s): Behdad Esfahbod
  26. * Google Author(s): Behdad Esfahbod
  27. */
  28. #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
  29. #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
  30. #include "hb-buffer-private.hh"
  31. #include "hb-ot-layout-gdef-private.hh"
  32. HB_BEGIN_DECLS
  33. /* buffer var allocations */
  34. #define lig_id() var2.u16[0] /* unique ligature id */
  35. #define lig_comp() var2.u16[1] /* component number in the ligature (0 = base) */
  36. #ifndef HB_DEBUG_APPLY
  37. #define HB_DEBUG_APPLY (HB_DEBUG+0)
  38. #endif
  39. #define TRACE_APPLY() \
  40. hb_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", HB_FUNC, this); \
  41. HB_BEGIN_DECLS
  42. struct hb_apply_context_t
  43. {
  44. unsigned int debug_depth;
  45. hb_ot_layout_context_t *layout;
  46. hb_buffer_t *buffer;
  47. hb_mask_t lookup_mask;
  48. unsigned int context_length;
  49. unsigned int nesting_level_left;
  50. unsigned int lookup_props;
  51. unsigned int property; /* propety of first glyph */
  52. inline void replace_glyph (hb_codepoint_t glyph_index) const
  53. {
  54. clear_property ();
  55. buffer->replace_glyph (glyph_index);
  56. }
  57. inline void replace_glyphs_be16 (unsigned int num_in,
  58. unsigned int num_out,
  59. const uint16_t *glyph_data_be) const
  60. {
  61. clear_property ();
  62. buffer->replace_glyphs_be16 (num_in, num_out, glyph_data_be);
  63. }
  64. inline void guess_glyph_class (unsigned int klass)
  65. {
  66. /* XXX if ! has gdef */
  67. buffer->info[buffer->i].props_cache() = klass;
  68. }
  69. private:
  70. inline void clear_property (void) const
  71. {
  72. /* XXX if has gdef */
  73. buffer->info[buffer->i].props_cache() = 0;
  74. }
  75. };
  76. typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
  77. typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
  78. struct ContextFuncs
  79. {
  80. match_func_t match;
  81. apply_lookup_func_t apply;
  82. };
  83. static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
  84. {
  85. return glyph_id == value;
  86. }
  87. static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
  88. {
  89. const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
  90. return class_def.get_class (glyph_id) == value;
  91. }
  92. static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
  93. {
  94. const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
  95. return (data+coverage) (glyph_id) != NOT_COVERED;
  96. }
  97. static inline bool match_input (hb_apply_context_t *c,
  98. unsigned int count, /* Including the first glyph (not matched) */
  99. const USHORT input[], /* Array of input values--start with second glyph */
  100. match_func_t match_func,
  101. const void *match_data,
  102. unsigned int *context_length_out)
  103. {
  104. unsigned int i, j;
  105. unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length);
  106. if (unlikely (c->buffer->i + count > end))
  107. return false;
  108. for (i = 1, j = c->buffer->i + 1; i < count; i++, j++)
  109. {
  110. while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[j], c->lookup_props, NULL))
  111. {
  112. if (unlikely (j + count - i == end))
  113. return false;
  114. j++;
  115. }
  116. if (likely (!match_func (c->buffer->info[j].codepoint, input[i - 1], match_data)))
  117. return false;
  118. }
  119. *context_length_out = j - c->buffer->i;
  120. return true;
  121. }
  122. static inline bool match_backtrack (hb_apply_context_t *c,
  123. unsigned int count,
  124. const USHORT backtrack[],
  125. match_func_t match_func,
  126. const void *match_data)
  127. {
  128. if (unlikely (c->buffer->out_len < count))
  129. return false;
  130. for (unsigned int i = 0, j = c->buffer->out_len - 1; i < count; i++, j--)
  131. {
  132. while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->out_info[j], c->lookup_props, NULL))
  133. {
  134. if (unlikely (j + 1 == count - i))
  135. return false;
  136. j--;
  137. }
  138. if (likely (!match_func (c->buffer->out_info[j].codepoint, backtrack[i], match_data)))
  139. return false;
  140. }
  141. return true;
  142. }
  143. static inline bool match_lookahead (hb_apply_context_t *c,
  144. unsigned int count,
  145. const USHORT lookahead[],
  146. match_func_t match_func,
  147. const void *match_data,
  148. unsigned int offset)
  149. {
  150. unsigned int i, j;
  151. unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length);
  152. if (unlikely (c->buffer->i + offset + count > end))
  153. return false;
  154. for (i = 0, j = c->buffer->i + offset; i < count; i++, j++)
  155. {
  156. while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[j], c->lookup_props, NULL))
  157. {
  158. if (unlikely (j + count - i == end))
  159. return false;
  160. j++;
  161. }
  162. if (likely (!match_func (c->buffer->info[j].codepoint, lookahead[i], match_data)))
  163. return false;
  164. }
  165. return true;
  166. }
  167. HB_END_DECLS
  168. struct LookupRecord
  169. {
  170. inline bool sanitize (hb_sanitize_context_t *c) {
  171. TRACE_SANITIZE ();
  172. return c->check_struct (this);
  173. }
  174. USHORT sequenceIndex; /* Index into current glyph
  175. * sequence--first glyph = 0 */
  176. USHORT lookupListIndex; /* Lookup to apply to that
  177. * position--zero--based */
  178. public:
  179. DEFINE_SIZE_STATIC (4);
  180. };
  181. HB_BEGIN_DECLS
  182. static inline bool apply_lookup (hb_apply_context_t *c,
  183. unsigned int count, /* Including the first glyph */
  184. unsigned int lookupCount,
  185. const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
  186. apply_lookup_func_t apply_func)
  187. {
  188. unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length);
  189. if (unlikely (count == 0 || c->buffer->i + count > end))
  190. return false;
  191. /* TODO We don't support lookupRecord arrays that are not increasing:
  192. * Should be easy for in_place ones at least. */
  193. /* Note: If sublookup is reverse, i will underflow after the first loop
  194. * and we jump out of it. Not entirely disastrous. So we don't check
  195. * for reverse lookup here.
  196. */
  197. for (unsigned int i = 0; i < count; /* NOP */)
  198. {
  199. while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[c->buffer->i], c->lookup_props, NULL))
  200. {
  201. if (unlikely (c->buffer->i == end))
  202. return true;
  203. /* No lookup applied for this index */
  204. c->buffer->next_glyph ();
  205. }
  206. if (lookupCount && i == lookupRecord->sequenceIndex)
  207. {
  208. unsigned int old_pos = c->buffer->i;
  209. /* Apply a lookup */
  210. bool done = apply_func (c, lookupRecord->lookupListIndex);
  211. lookupRecord++;
  212. lookupCount--;
  213. /* Err, this is wrong if the lookup jumped over some glyphs */
  214. i += c->buffer->i - old_pos;
  215. if (unlikely (c->buffer->i == end))
  216. return true;
  217. if (!done)
  218. goto not_applied;
  219. }
  220. else
  221. {
  222. not_applied:
  223. /* No lookup applied for this index */
  224. c->buffer->next_glyph ();
  225. i++;
  226. }
  227. }
  228. return true;
  229. }
  230. HB_END_DECLS
  231. /* Contextual lookups */
  232. struct ContextLookupContext
  233. {
  234. ContextFuncs funcs;
  235. const void *match_data;
  236. };
  237. static inline bool context_lookup (hb_apply_context_t *c,
  238. unsigned int inputCount, /* Including the first glyph (not matched) */
  239. const USHORT input[], /* Array of input values--start with second glyph */
  240. unsigned int lookupCount,
  241. const LookupRecord lookupRecord[],
  242. ContextLookupContext &lookup_context)
  243. {
  244. hb_apply_context_t new_context = *c;
  245. return match_input (c,
  246. inputCount, input,
  247. lookup_context.funcs.match, lookup_context.match_data,
  248. &new_context.context_length)
  249. && apply_lookup (&new_context,
  250. inputCount,
  251. lookupCount, lookupRecord,
  252. lookup_context.funcs.apply);
  253. }
  254. struct Rule
  255. {
  256. friend struct RuleSet;
  257. private:
  258. inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
  259. {
  260. TRACE_APPLY ();
  261. const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
  262. return context_lookup (c,
  263. inputCount, input,
  264. lookupCount, lookupRecord,
  265. lookup_context);
  266. }
  267. public:
  268. inline bool sanitize (hb_sanitize_context_t *c) {
  269. TRACE_SANITIZE ();
  270. return inputCount.sanitize (c)
  271. && lookupCount.sanitize (c)
  272. && c->check_range (input,
  273. input[0].static_size * inputCount
  274. + lookupRecordX[0].static_size * lookupCount);
  275. }
  276. private:
  277. USHORT inputCount; /* Total number of glyphs in input
  278. * glyph sequence--includes the first
  279. * glyph */
  280. USHORT lookupCount; /* Number of LookupRecords */
  281. USHORT input[VAR]; /* Array of match inputs--start with
  282. * second glyph */
  283. LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
  284. * design order */
  285. public:
  286. DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
  287. };
  288. struct RuleSet
  289. {
  290. inline bool apply (hb_apply_context_t *c, ContextLookupContext &lookup_context) const
  291. {
  292. TRACE_APPLY ();
  293. unsigned int num_rules = rule.len;
  294. for (unsigned int i = 0; i < num_rules; i++)
  295. {
  296. if ((this+rule[i]).apply (c, lookup_context))
  297. return true;
  298. }
  299. return false;
  300. }
  301. inline bool sanitize (hb_sanitize_context_t *c) {
  302. TRACE_SANITIZE ();
  303. return rule.sanitize (c, this);
  304. }
  305. private:
  306. OffsetArrayOf<Rule>
  307. rule; /* Array of Rule tables
  308. * ordered by preference */
  309. public:
  310. DEFINE_SIZE_ARRAY (2, rule);
  311. };
  312. struct ContextFormat1
  313. {
  314. friend struct Context;
  315. private:
  316. inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
  317. {
  318. TRACE_APPLY ();
  319. unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
  320. if (likely (index == NOT_COVERED))
  321. return false;
  322. const RuleSet &rule_set = this+ruleSet[index];
  323. struct ContextLookupContext lookup_context = {
  324. {match_glyph, apply_func},
  325. NULL
  326. };
  327. return rule_set.apply (c, lookup_context);
  328. }
  329. inline bool sanitize (hb_sanitize_context_t *c) {
  330. TRACE_SANITIZE ();
  331. return coverage.sanitize (c, this)
  332. && ruleSet.sanitize (c, this);
  333. }
  334. private:
  335. USHORT format; /* Format identifier--format = 1 */
  336. OffsetTo<Coverage>
  337. coverage; /* Offset to Coverage table--from
  338. * beginning of table */
  339. OffsetArrayOf<RuleSet>
  340. ruleSet; /* Array of RuleSet tables
  341. * ordered by Coverage Index */
  342. public:
  343. DEFINE_SIZE_ARRAY (6, ruleSet);
  344. };
  345. struct ContextFormat2
  346. {
  347. friend struct Context;
  348. private:
  349. inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
  350. {
  351. TRACE_APPLY ();
  352. unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
  353. if (likely (index == NOT_COVERED))
  354. return false;
  355. const ClassDef &class_def = this+classDef;
  356. index = class_def (c->buffer->info[c->buffer->i].codepoint);
  357. const RuleSet &rule_set = this+ruleSet[index];
  358. struct ContextLookupContext lookup_context = {
  359. {match_class, apply_func},
  360. &class_def
  361. };
  362. return rule_set.apply (c, lookup_context);
  363. }
  364. inline bool sanitize (hb_sanitize_context_t *c) {
  365. TRACE_SANITIZE ();
  366. return coverage.sanitize (c, this)
  367. && classDef.sanitize (c, this)
  368. && ruleSet.sanitize (c, this);
  369. }
  370. private:
  371. USHORT format; /* Format identifier--format = 2 */
  372. OffsetTo<Coverage>
  373. coverage; /* Offset to Coverage table--from
  374. * beginning of table */
  375. OffsetTo<ClassDef>
  376. classDef; /* Offset to glyph ClassDef table--from
  377. * beginning of table */
  378. OffsetArrayOf<RuleSet>
  379. ruleSet; /* Array of RuleSet tables
  380. * ordered by class */
  381. public:
  382. DEFINE_SIZE_ARRAY (8, ruleSet);
  383. };
  384. struct ContextFormat3
  385. {
  386. friend struct Context;
  387. private:
  388. inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
  389. {
  390. TRACE_APPLY ();
  391. unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->i].codepoint);
  392. if (likely (index == NOT_COVERED))
  393. return false;
  394. const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
  395. struct ContextLookupContext lookup_context = {
  396. {match_coverage, apply_func},
  397. this
  398. };
  399. return context_lookup (c,
  400. glyphCount, (const USHORT *) (coverage + 1),
  401. lookupCount, lookupRecord,
  402. lookup_context);
  403. }
  404. inline bool sanitize (hb_sanitize_context_t *c) {
  405. TRACE_SANITIZE ();
  406. if (!c->check_struct (this)) return false;
  407. unsigned int count = glyphCount;
  408. if (!c->check_array (coverage, coverage[0].static_size, count)) return false;
  409. for (unsigned int i = 0; i < count; i++)
  410. if (!coverage[i].sanitize (c, this)) return false;
  411. LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
  412. return c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount);
  413. }
  414. private:
  415. USHORT format; /* Format identifier--format = 3 */
  416. USHORT glyphCount; /* Number of glyphs in the input glyph
  417. * sequence */
  418. USHORT lookupCount; /* Number of LookupRecords */
  419. OffsetTo<Coverage>
  420. coverage[VAR]; /* Array of offsets to Coverage
  421. * table in glyph sequence order */
  422. LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
  423. * design order */
  424. public:
  425. DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
  426. };
  427. struct Context
  428. {
  429. protected:
  430. inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
  431. {
  432. TRACE_APPLY ();
  433. switch (u.format) {
  434. case 1: return u.format1.apply (c, apply_func);
  435. case 2: return u.format2.apply (c, apply_func);
  436. case 3: return u.format3.apply (c, apply_func);
  437. default:return false;
  438. }
  439. }
  440. inline bool sanitize (hb_sanitize_context_t *c) {
  441. TRACE_SANITIZE ();
  442. if (!u.format.sanitize (c)) return false;
  443. switch (u.format) {
  444. case 1: return u.format1.sanitize (c);
  445. case 2: return u.format2.sanitize (c);
  446. case 3: return u.format3.sanitize (c);
  447. default:return true;
  448. }
  449. }
  450. private:
  451. union {
  452. USHORT format; /* Format identifier */
  453. ContextFormat1 format1;
  454. ContextFormat2 format2;
  455. ContextFormat3 format3;
  456. } u;
  457. };
  458. /* Chaining Contextual lookups */
  459. struct ChainContextLookupContext
  460. {
  461. ContextFuncs funcs;
  462. const void *match_data[3];
  463. };
  464. static inline bool chain_context_lookup (hb_apply_context_t *c,
  465. unsigned int backtrackCount,
  466. const USHORT backtrack[],
  467. unsigned int inputCount, /* Including the first glyph (not matched) */
  468. const USHORT input[], /* Array of input values--start with second glyph */
  469. unsigned int lookaheadCount,
  470. const USHORT lookahead[],
  471. unsigned int lookupCount,
  472. const LookupRecord lookupRecord[],
  473. ChainContextLookupContext &lookup_context)
  474. {
  475. /* First guess */
  476. if (unlikely (c->buffer->out_len < backtrackCount ||
  477. c->buffer->i + inputCount + lookaheadCount > c->buffer->len ||
  478. inputCount + lookaheadCount > c->context_length))
  479. return false;
  480. hb_apply_context_t new_context = *c;
  481. return match_backtrack (c,
  482. backtrackCount, backtrack,
  483. lookup_context.funcs.match, lookup_context.match_data[0])
  484. && match_input (c,
  485. inputCount, input,
  486. lookup_context.funcs.match, lookup_context.match_data[1],
  487. &new_context.context_length)
  488. && match_lookahead (c,
  489. lookaheadCount, lookahead,
  490. lookup_context.funcs.match, lookup_context.match_data[2],
  491. new_context.context_length)
  492. && apply_lookup (&new_context,
  493. inputCount,
  494. lookupCount, lookupRecord,
  495. lookup_context.funcs.apply);
  496. }
  497. struct ChainRule
  498. {
  499. friend struct ChainRuleSet;
  500. private:
  501. inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
  502. {
  503. TRACE_APPLY ();
  504. const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
  505. const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
  506. const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
  507. return chain_context_lookup (c,
  508. backtrack.len, backtrack.array,
  509. input.len, input.array,
  510. lookahead.len, lookahead.array,
  511. lookup.len, lookup.array,
  512. lookup_context);
  513. }
  514. public:
  515. inline bool sanitize (hb_sanitize_context_t *c) {
  516. TRACE_SANITIZE ();
  517. if (!backtrack.sanitize (c)) return false;
  518. HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
  519. if (!input.sanitize (c)) return false;
  520. ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
  521. if (!lookahead.sanitize (c)) return false;
  522. ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
  523. return lookup.sanitize (c);
  524. }
  525. private:
  526. ArrayOf<USHORT>
  527. backtrack; /* Array of backtracking values
  528. * (to be matched before the input
  529. * sequence) */
  530. HeadlessArrayOf<USHORT>
  531. inputX; /* Array of input values (start with
  532. * second glyph) */
  533. ArrayOf<USHORT>
  534. lookaheadX; /* Array of lookahead values's (to be
  535. * matched after the input sequence) */
  536. ArrayOf<LookupRecord>
  537. lookupX; /* Array of LookupRecords--in
  538. * design order) */
  539. public:
  540. DEFINE_SIZE_MIN (8);
  541. };
  542. struct ChainRuleSet
  543. {
  544. inline bool apply (hb_apply_context_t *c, ChainContextLookupContext &lookup_context) const
  545. {
  546. TRACE_APPLY ();
  547. unsigned int num_rules = rule.len;
  548. for (unsigned int i = 0; i < num_rules; i++)
  549. {
  550. if ((this+rule[i]).apply (c, lookup_context))
  551. return true;
  552. }
  553. return false;
  554. }
  555. inline bool sanitize (hb_sanitize_context_t *c) {
  556. TRACE_SANITIZE ();
  557. return rule.sanitize (c, this);
  558. }
  559. private:
  560. OffsetArrayOf<ChainRule>
  561. rule; /* Array of ChainRule tables
  562. * ordered by preference */
  563. public:
  564. DEFINE_SIZE_ARRAY (2, rule);
  565. };
  566. struct ChainContextFormat1
  567. {
  568. friend struct ChainContext;
  569. private:
  570. inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
  571. {
  572. TRACE_APPLY ();
  573. unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
  574. if (likely (index == NOT_COVERED))
  575. return false;
  576. const ChainRuleSet &rule_set = this+ruleSet[index];
  577. struct ChainContextLookupContext lookup_context = {
  578. {match_glyph, apply_func},
  579. {NULL, NULL, NULL}
  580. };
  581. return rule_set.apply (c, lookup_context);
  582. }
  583. inline bool sanitize (hb_sanitize_context_t *c) {
  584. TRACE_SANITIZE ();
  585. return coverage.sanitize (c, this)
  586. && ruleSet.sanitize (c, this);
  587. }
  588. private:
  589. USHORT format; /* Format identifier--format = 1 */
  590. OffsetTo<Coverage>
  591. coverage; /* Offset to Coverage table--from
  592. * beginning of table */
  593. OffsetArrayOf<ChainRuleSet>
  594. ruleSet; /* Array of ChainRuleSet tables
  595. * ordered by Coverage Index */
  596. public:
  597. DEFINE_SIZE_ARRAY (6, ruleSet);
  598. };
  599. struct ChainContextFormat2
  600. {
  601. friend struct ChainContext;
  602. private:
  603. inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
  604. {
  605. TRACE_APPLY ();
  606. unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
  607. if (likely (index == NOT_COVERED))
  608. return false;
  609. const ClassDef &backtrack_class_def = this+backtrackClassDef;
  610. const ClassDef &input_class_def = this+inputClassDef;
  611. const ClassDef &lookahead_class_def = this+lookaheadClassDef;
  612. index = input_class_def (c->buffer->info[c->buffer->i].codepoint);
  613. const ChainRuleSet &rule_set = this+ruleSet[index];
  614. struct ChainContextLookupContext lookup_context = {
  615. {match_class, apply_func},
  616. {&backtrack_class_def,
  617. &input_class_def,
  618. &lookahead_class_def}
  619. };
  620. return rule_set.apply (c, lookup_context);
  621. }
  622. inline bool sanitize (hb_sanitize_context_t *c) {
  623. TRACE_SANITIZE ();
  624. return coverage.sanitize (c, this)
  625. && backtrackClassDef.sanitize (c, this)
  626. && inputClassDef.sanitize (c, this)
  627. && lookaheadClassDef.sanitize (c, this)
  628. && ruleSet.sanitize (c, this);
  629. }
  630. private:
  631. USHORT format; /* Format identifier--format = 2 */
  632. OffsetTo<Coverage>
  633. coverage; /* Offset to Coverage table--from
  634. * beginning of table */
  635. OffsetTo<ClassDef>
  636. backtrackClassDef; /* Offset to glyph ClassDef table
  637. * containing backtrack sequence
  638. * data--from beginning of table */
  639. OffsetTo<ClassDef>
  640. inputClassDef; /* Offset to glyph ClassDef
  641. * table containing input sequence
  642. * data--from beginning of table */
  643. OffsetTo<ClassDef>
  644. lookaheadClassDef; /* Offset to glyph ClassDef table
  645. * containing lookahead sequence
  646. * data--from beginning of table */
  647. OffsetArrayOf<ChainRuleSet>
  648. ruleSet; /* Array of ChainRuleSet tables
  649. * ordered by class */
  650. public:
  651. DEFINE_SIZE_ARRAY (12, ruleSet);
  652. };
  653. struct ChainContextFormat3
  654. {
  655. friend struct ChainContext;
  656. private:
  657. inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
  658. {
  659. TRACE_APPLY ();
  660. const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  661. unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->i].codepoint);
  662. if (likely (index == NOT_COVERED))
  663. return false;
  664. const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
  665. const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
  666. struct ChainContextLookupContext lookup_context = {
  667. {match_coverage, apply_func},
  668. {this, this, this}
  669. };
  670. return chain_context_lookup (c,
  671. backtrack.len, (const USHORT *) backtrack.array,
  672. input.len, (const USHORT *) input.array + 1,
  673. lookahead.len, (const USHORT *) lookahead.array,
  674. lookup.len, lookup.array,
  675. lookup_context);
  676. }
  677. inline bool sanitize (hb_sanitize_context_t *c) {
  678. TRACE_SANITIZE ();
  679. if (!backtrack.sanitize (c, this)) return false;
  680. OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  681. if (!input.sanitize (c, this)) return false;
  682. OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
  683. if (!lookahead.sanitize (c, this)) return false;
  684. ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
  685. return lookup.sanitize (c);
  686. }
  687. private:
  688. USHORT format; /* Format identifier--format = 3 */
  689. OffsetArrayOf<Coverage>
  690. backtrack; /* Array of coverage tables
  691. * in backtracking sequence, in glyph
  692. * sequence order */
  693. OffsetArrayOf<Coverage>
  694. inputX ; /* Array of coverage
  695. * tables in input sequence, in glyph
  696. * sequence order */
  697. OffsetArrayOf<Coverage>
  698. lookaheadX; /* Array of coverage tables
  699. * in lookahead sequence, in glyph
  700. * sequence order */
  701. ArrayOf<LookupRecord>
  702. lookupX; /* Array of LookupRecords--in
  703. * design order) */
  704. public:
  705. DEFINE_SIZE_MIN (10);
  706. };
  707. struct ChainContext
  708. {
  709. protected:
  710. inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
  711. {
  712. TRACE_APPLY ();
  713. switch (u.format) {
  714. case 1: return u.format1.apply (c, apply_func);
  715. case 2: return u.format2.apply (c, apply_func);
  716. case 3: return u.format3.apply (c, apply_func);
  717. default:return false;
  718. }
  719. }
  720. inline bool sanitize (hb_sanitize_context_t *c) {
  721. TRACE_SANITIZE ();
  722. if (!u.format.sanitize (c)) return false;
  723. switch (u.format) {
  724. case 1: return u.format1.sanitize (c);
  725. case 2: return u.format2.sanitize (c);
  726. case 3: return u.format3.sanitize (c);
  727. default:return true;
  728. }
  729. }
  730. private:
  731. union {
  732. USHORT format; /* Format identifier */
  733. ChainContextFormat1 format1;
  734. ChainContextFormat2 format2;
  735. ChainContextFormat3 format3;
  736. } u;
  737. };
  738. struct ExtensionFormat1
  739. {
  740. friend struct Extension;
  741. protected:
  742. inline unsigned int get_type (void) const { return extensionLookupType; }
  743. inline unsigned int get_offset (void) const { return extensionOffset; }
  744. inline bool sanitize (hb_sanitize_context_t *c) {
  745. TRACE_SANITIZE ();
  746. return c->check_struct (this);
  747. }
  748. private:
  749. USHORT format; /* Format identifier. Set to 1. */
  750. USHORT extensionLookupType; /* Lookup type of subtable referenced
  751. * by ExtensionOffset (i.e. the
  752. * extension subtable). */
  753. ULONG extensionOffset; /* Offset to the extension subtable,
  754. * of lookup type subtable. */
  755. public:
  756. DEFINE_SIZE_STATIC (8);
  757. };
  758. struct Extension
  759. {
  760. inline unsigned int get_type (void) const
  761. {
  762. switch (u.format) {
  763. case 1: return u.format1.get_type ();
  764. default:return 0;
  765. }
  766. }
  767. inline unsigned int get_offset (void) const
  768. {
  769. switch (u.format) {
  770. case 1: return u.format1.get_offset ();
  771. default:return 0;
  772. }
  773. }
  774. inline bool sanitize (hb_sanitize_context_t *c) {
  775. TRACE_SANITIZE ();
  776. if (!u.format.sanitize (c)) return false;
  777. switch (u.format) {
  778. case 1: return u.format1.sanitize (c);
  779. default:return true;
  780. }
  781. }
  782. private:
  783. union {
  784. USHORT format; /* Format identifier */
  785. ExtensionFormat1 format1;
  786. } u;
  787. };
  788. /*
  789. * GSUB/GPOS Common
  790. */
  791. struct GSUBGPOS
  792. {
  793. static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
  794. static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
  795. inline unsigned int get_script_count (void) const
  796. { return (this+scriptList).len; }
  797. inline const Tag& get_script_tag (unsigned int i) const
  798. { return (this+scriptList).get_tag (i); }
  799. inline unsigned int get_script_tags (unsigned int start_offset,
  800. unsigned int *script_count /* IN/OUT */,
  801. hb_tag_t *script_tags /* OUT */) const
  802. { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
  803. inline const Script& get_script (unsigned int i) const
  804. { return (this+scriptList)[i]; }
  805. inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
  806. { return (this+scriptList).find_index (tag, index); }
  807. inline unsigned int get_feature_count (void) const
  808. { return (this+featureList).len; }
  809. inline const Tag& get_feature_tag (unsigned int i) const
  810. { return (this+featureList).get_tag (i); }
  811. inline unsigned int get_feature_tags (unsigned int start_offset,
  812. unsigned int *feature_count /* IN/OUT */,
  813. hb_tag_t *feature_tags /* OUT */) const
  814. { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
  815. inline const Feature& get_feature (unsigned int i) const
  816. { return (this+featureList)[i]; }
  817. inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
  818. { return (this+featureList).find_index (tag, index); }
  819. inline unsigned int get_lookup_count (void) const
  820. { return (this+lookupList).len; }
  821. inline const Lookup& get_lookup (unsigned int i) const
  822. { return (this+lookupList)[i]; }
  823. inline bool sanitize (hb_sanitize_context_t *c) {
  824. TRACE_SANITIZE ();
  825. return version.sanitize (c) && likely (version.major == 1)
  826. && scriptList.sanitize (c, this)
  827. && featureList.sanitize (c, this)
  828. && lookupList.sanitize (c, this);
  829. }
  830. protected:
  831. FixedVersion version; /* Version of the GSUB/GPOS table--initially set
  832. * to 0x00010000 */
  833. OffsetTo<ScriptList>
  834. scriptList; /* ScriptList table */
  835. OffsetTo<FeatureList>
  836. featureList; /* FeatureList table */
  837. OffsetTo<LookupList>
  838. lookupList; /* LookupList table */
  839. public:
  840. DEFINE_SIZE_STATIC (10);
  841. };
  842. HB_END_DECLS
  843. #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */