PageRenderTime 60ms CodeModel.GetById 26ms RepoModel.GetById 0ms app.codeStats 0ms

/src/harfbuzz-1.2.7/src/hb-ot-layout-gsub-table.hh

https://bitbucket.org/cegui/cegui-dependencies
C++ Header | 1354 lines | 1082 code | 188 blank | 84 comment | 169 complexity | 64fb2aa9e0dda9bb9cc8a2a46292044c MD5 | raw file
Possible License(s): BSD-2-Clause, LGPL-2.1, MPL-2.0-no-copyleft-exception, GPL-2.0, LGPL-3.0, Apache-2.0, BSD-3-Clause, LGPL-2.0, 0BSD, MIT
  1. /*
  2. * Copyright Š 2007,2008,2009,2010 Red Hat, Inc.
  3. * Copyright Š 2010,2012,2013 Google, Inc.
  4. *
  5. * This is part of HarfBuzz, a text shaping library.
  6. *
  7. * Permission is hereby granted, without written agreement and without
  8. * license or royalty fees, to use, copy, modify, and distribute this
  9. * software and its documentation for any purpose, provided that the
  10. * above copyright notice and the following two paragraphs appear in
  11. * all copies of this software.
  12. *
  13. * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
  14. * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
  15. * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
  16. * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  17. * DAMAGE.
  18. *
  19. * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
  20. * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
  21. * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
  22. * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
  23. * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
  24. *
  25. * Red Hat Author(s): Behdad Esfahbod
  26. * Google Author(s): Behdad Esfahbod
  27. */
  28. #ifndef HB_OT_LAYOUT_GSUB_TABLE_HH
  29. #define HB_OT_LAYOUT_GSUB_TABLE_HH
  30. #include "hb-ot-layout-gsubgpos-private.hh"
  31. namespace OT {
  32. struct SingleSubstFormat1
  33. {
  34. inline void closure (hb_closure_context_t *c) const
  35. {
  36. TRACE_CLOSURE (this);
  37. Coverage::Iter iter;
  38. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  39. hb_codepoint_t glyph_id = iter.get_glyph ();
  40. if (c->glyphs->has (glyph_id))
  41. c->glyphs->add ((glyph_id + deltaGlyphID) & 0xFFFFu);
  42. }
  43. }
  44. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  45. {
  46. TRACE_COLLECT_GLYPHS (this);
  47. Coverage::Iter iter;
  48. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  49. hb_codepoint_t glyph_id = iter.get_glyph ();
  50. c->input->add (glyph_id);
  51. c->output->add ((glyph_id + deltaGlyphID) & 0xFFFFu);
  52. }
  53. }
  54. inline const Coverage &get_coverage (void) const
  55. {
  56. return this+coverage;
  57. }
  58. inline bool would_apply (hb_would_apply_context_t *c) const
  59. {
  60. TRACE_WOULD_APPLY (this);
  61. return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  62. }
  63. inline bool apply (hb_apply_context_t *c) const
  64. {
  65. TRACE_APPLY (this);
  66. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  67. unsigned int index = (this+coverage).get_coverage (glyph_id);
  68. if (likely (index == NOT_COVERED)) return_trace (false);
  69. /* According to the Adobe Annotated OpenType Suite, result is always
  70. * limited to 16bit. */
  71. glyph_id = (glyph_id + deltaGlyphID) & 0xFFFFu;
  72. c->replace_glyph (glyph_id);
  73. return_trace (true);
  74. }
  75. inline bool serialize (hb_serialize_context_t *c,
  76. Supplier<GlyphID> &glyphs,
  77. unsigned int num_glyphs,
  78. int delta)
  79. {
  80. TRACE_SERIALIZE (this);
  81. if (unlikely (!c->extend_min (*this))) return_trace (false);
  82. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return_trace (false);
  83. deltaGlyphID.set (delta); /* TODO(serilaize) overflow? */
  84. return_trace (true);
  85. }
  86. inline bool sanitize (hb_sanitize_context_t *c) const
  87. {
  88. TRACE_SANITIZE (this);
  89. return_trace (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c));
  90. }
  91. protected:
  92. USHORT format; /* Format identifier--format = 1 */
  93. OffsetTo<Coverage>
  94. coverage; /* Offset to Coverage table--from
  95. * beginning of Substitution table */
  96. SHORT deltaGlyphID; /* Add to original GlyphID to get
  97. * substitute GlyphID */
  98. public:
  99. DEFINE_SIZE_STATIC (6);
  100. };
  101. struct SingleSubstFormat2
  102. {
  103. inline void closure (hb_closure_context_t *c) const
  104. {
  105. TRACE_CLOSURE (this);
  106. Coverage::Iter iter;
  107. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  108. if (c->glyphs->has (iter.get_glyph ()))
  109. c->glyphs->add (substitute[iter.get_coverage ()]);
  110. }
  111. }
  112. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  113. {
  114. TRACE_COLLECT_GLYPHS (this);
  115. Coverage::Iter iter;
  116. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  117. c->input->add (iter.get_glyph ());
  118. c->output->add (substitute[iter.get_coverage ()]);
  119. }
  120. }
  121. inline const Coverage &get_coverage (void) const
  122. {
  123. return this+coverage;
  124. }
  125. inline bool would_apply (hb_would_apply_context_t *c) const
  126. {
  127. TRACE_WOULD_APPLY (this);
  128. return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  129. }
  130. inline bool apply (hb_apply_context_t *c) const
  131. {
  132. TRACE_APPLY (this);
  133. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  134. unsigned int index = (this+coverage).get_coverage (glyph_id);
  135. if (likely (index == NOT_COVERED)) return_trace (false);
  136. if (unlikely (index >= substitute.len)) return_trace (false);
  137. glyph_id = substitute[index];
  138. c->replace_glyph (glyph_id);
  139. return_trace (true);
  140. }
  141. inline bool serialize (hb_serialize_context_t *c,
  142. Supplier<GlyphID> &glyphs,
  143. Supplier<GlyphID> &substitutes,
  144. unsigned int num_glyphs)
  145. {
  146. TRACE_SERIALIZE (this);
  147. if (unlikely (!c->extend_min (*this))) return_trace (false);
  148. if (unlikely (!substitute.serialize (c, substitutes, num_glyphs))) return_trace (false);
  149. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return_trace (false);
  150. return_trace (true);
  151. }
  152. inline bool sanitize (hb_sanitize_context_t *c) const
  153. {
  154. TRACE_SANITIZE (this);
  155. return_trace (coverage.sanitize (c, this) && substitute.sanitize (c));
  156. }
  157. protected:
  158. USHORT format; /* Format identifier--format = 2 */
  159. OffsetTo<Coverage>
  160. coverage; /* Offset to Coverage table--from
  161. * beginning of Substitution table */
  162. ArrayOf<GlyphID>
  163. substitute; /* Array of substitute
  164. * GlyphIDs--ordered by Coverage Index */
  165. public:
  166. DEFINE_SIZE_ARRAY (6, substitute);
  167. };
  168. struct SingleSubst
  169. {
  170. inline bool serialize (hb_serialize_context_t *c,
  171. Supplier<GlyphID> &glyphs,
  172. Supplier<GlyphID> &substitutes,
  173. unsigned int num_glyphs)
  174. {
  175. TRACE_SERIALIZE (this);
  176. if (unlikely (!c->extend_min (u.format))) return_trace (false);
  177. unsigned int format = 2;
  178. int delta = 0;
  179. if (num_glyphs) {
  180. format = 1;
  181. /* TODO(serialize) check for wrap-around */
  182. delta = substitutes[0] - glyphs[0];
  183. for (unsigned int i = 1; i < num_glyphs; i++)
  184. if (delta != substitutes[i] - glyphs[i]) {
  185. format = 2;
  186. break;
  187. }
  188. }
  189. u.format.set (format);
  190. switch (u.format) {
  191. case 1: return_trace (u.format1.serialize (c, glyphs, num_glyphs, delta));
  192. case 2: return_trace (u.format2.serialize (c, glyphs, substitutes, num_glyphs));
  193. default:return_trace (false);
  194. }
  195. }
  196. template <typename context_t>
  197. inline typename context_t::return_t dispatch (context_t *c) const
  198. {
  199. TRACE_DISPATCH (this, u.format);
  200. if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
  201. switch (u.format) {
  202. case 1: return_trace (c->dispatch (u.format1));
  203. case 2: return_trace (c->dispatch (u.format2));
  204. default:return_trace (c->default_return_value ());
  205. }
  206. }
  207. protected:
  208. union {
  209. USHORT format; /* Format identifier */
  210. SingleSubstFormat1 format1;
  211. SingleSubstFormat2 format2;
  212. } u;
  213. };
  214. struct Sequence
  215. {
  216. inline void closure (hb_closure_context_t *c) const
  217. {
  218. TRACE_CLOSURE (this);
  219. unsigned int count = substitute.len;
  220. for (unsigned int i = 0; i < count; i++)
  221. c->glyphs->add (substitute[i]);
  222. }
  223. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  224. {
  225. TRACE_COLLECT_GLYPHS (this);
  226. unsigned int count = substitute.len;
  227. for (unsigned int i = 0; i < count; i++)
  228. c->output->add (substitute[i]);
  229. }
  230. inline bool apply (hb_apply_context_t *c) const
  231. {
  232. TRACE_APPLY (this);
  233. unsigned int count = substitute.len;
  234. /* TODO:
  235. * Testing shows that Uniscribe actually allows zero-len susbstitute,
  236. * which essentially deletes a glyph. We don't allow for now. It
  237. * can be confusing to the client since the cluster from the deleted
  238. * glyph won't be merged with any output cluster... Also, currently
  239. * buffer->move_to() makes assumptions about this too. Perhaps fix
  240. * in the future after figuring out what to do with the clusters.
  241. */
  242. if (unlikely (!count)) return_trace (false);
  243. /* Special-case to make it in-place and not consider this
  244. * as a "multiplied" substitution. */
  245. if (unlikely (count == 1))
  246. {
  247. c->replace_glyph (substitute.array[0]);
  248. return_trace (true);
  249. }
  250. unsigned int klass = _hb_glyph_info_is_ligature (&c->buffer->cur()) ?
  251. HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH : 0;
  252. for (unsigned int i = 0; i < count; i++) {
  253. _hb_glyph_info_set_lig_props_for_component (&c->buffer->cur(), i);
  254. c->output_glyph_for_component (substitute.array[i], klass);
  255. }
  256. c->buffer->skip_glyph ();
  257. return_trace (true);
  258. }
  259. inline bool serialize (hb_serialize_context_t *c,
  260. Supplier<GlyphID> &glyphs,
  261. unsigned int num_glyphs)
  262. {
  263. TRACE_SERIALIZE (this);
  264. if (unlikely (!c->extend_min (*this))) return_trace (false);
  265. if (unlikely (!substitute.serialize (c, glyphs, num_glyphs))) return_trace (false);
  266. return_trace (true);
  267. }
  268. inline bool sanitize (hb_sanitize_context_t *c) const
  269. {
  270. TRACE_SANITIZE (this);
  271. return_trace (substitute.sanitize (c));
  272. }
  273. protected:
  274. ArrayOf<GlyphID>
  275. substitute; /* String of GlyphIDs to substitute */
  276. public:
  277. DEFINE_SIZE_ARRAY (2, substitute);
  278. };
  279. struct MultipleSubstFormat1
  280. {
  281. inline void closure (hb_closure_context_t *c) const
  282. {
  283. TRACE_CLOSURE (this);
  284. Coverage::Iter iter;
  285. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  286. if (c->glyphs->has (iter.get_glyph ()))
  287. (this+sequence[iter.get_coverage ()]).closure (c);
  288. }
  289. }
  290. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  291. {
  292. TRACE_COLLECT_GLYPHS (this);
  293. (this+coverage).add_coverage (c->input);
  294. unsigned int count = sequence.len;
  295. for (unsigned int i = 0; i < count; i++)
  296. (this+sequence[i]).collect_glyphs (c);
  297. }
  298. inline const Coverage &get_coverage (void) const
  299. {
  300. return this+coverage;
  301. }
  302. inline bool would_apply (hb_would_apply_context_t *c) const
  303. {
  304. TRACE_WOULD_APPLY (this);
  305. return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  306. }
  307. inline bool apply (hb_apply_context_t *c) const
  308. {
  309. TRACE_APPLY (this);
  310. unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
  311. if (likely (index == NOT_COVERED)) return_trace (false);
  312. return_trace ((this+sequence[index]).apply (c));
  313. }
  314. inline bool serialize (hb_serialize_context_t *c,
  315. Supplier<GlyphID> &glyphs,
  316. Supplier<unsigned int> &substitute_len_list,
  317. unsigned int num_glyphs,
  318. Supplier<GlyphID> &substitute_glyphs_list)
  319. {
  320. TRACE_SERIALIZE (this);
  321. if (unlikely (!c->extend_min (*this))) return_trace (false);
  322. if (unlikely (!sequence.serialize (c, num_glyphs))) return_trace (false);
  323. for (unsigned int i = 0; i < num_glyphs; i++)
  324. if (unlikely (!sequence[i].serialize (c, this).serialize (c,
  325. substitute_glyphs_list,
  326. substitute_len_list[i]))) return_trace (false);
  327. substitute_len_list.advance (num_glyphs);
  328. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return_trace (false);
  329. return_trace (true);
  330. }
  331. inline bool sanitize (hb_sanitize_context_t *c) const
  332. {
  333. TRACE_SANITIZE (this);
  334. return_trace (coverage.sanitize (c, this) && sequence.sanitize (c, this));
  335. }
  336. protected:
  337. USHORT format; /* Format identifier--format = 1 */
  338. OffsetTo<Coverage>
  339. coverage; /* Offset to Coverage table--from
  340. * beginning of Substitution table */
  341. OffsetArrayOf<Sequence>
  342. sequence; /* Array of Sequence tables
  343. * ordered by Coverage Index */
  344. public:
  345. DEFINE_SIZE_ARRAY (6, sequence);
  346. };
  347. struct MultipleSubst
  348. {
  349. inline bool serialize (hb_serialize_context_t *c,
  350. Supplier<GlyphID> &glyphs,
  351. Supplier<unsigned int> &substitute_len_list,
  352. unsigned int num_glyphs,
  353. Supplier<GlyphID> &substitute_glyphs_list)
  354. {
  355. TRACE_SERIALIZE (this);
  356. if (unlikely (!c->extend_min (u.format))) return_trace (false);
  357. unsigned int format = 1;
  358. u.format.set (format);
  359. switch (u.format) {
  360. case 1: return_trace (u.format1.serialize (c, glyphs, substitute_len_list, num_glyphs, substitute_glyphs_list));
  361. default:return_trace (false);
  362. }
  363. }
  364. template <typename context_t>
  365. inline typename context_t::return_t dispatch (context_t *c) const
  366. {
  367. TRACE_DISPATCH (this, u.format);
  368. if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
  369. switch (u.format) {
  370. case 1: return_trace (c->dispatch (u.format1));
  371. default:return_trace (c->default_return_value ());
  372. }
  373. }
  374. protected:
  375. union {
  376. USHORT format; /* Format identifier */
  377. MultipleSubstFormat1 format1;
  378. } u;
  379. };
  380. typedef ArrayOf<GlyphID> AlternateSet; /* Array of alternate GlyphIDs--in
  381. * arbitrary order */
  382. struct AlternateSubstFormat1
  383. {
  384. inline void closure (hb_closure_context_t *c) const
  385. {
  386. TRACE_CLOSURE (this);
  387. Coverage::Iter iter;
  388. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  389. if (c->glyphs->has (iter.get_glyph ())) {
  390. const AlternateSet &alt_set = this+alternateSet[iter.get_coverage ()];
  391. unsigned int count = alt_set.len;
  392. for (unsigned int i = 0; i < count; i++)
  393. c->glyphs->add (alt_set[i]);
  394. }
  395. }
  396. }
  397. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  398. {
  399. TRACE_COLLECT_GLYPHS (this);
  400. Coverage::Iter iter;
  401. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  402. c->input->add (iter.get_glyph ());
  403. const AlternateSet &alt_set = this+alternateSet[iter.get_coverage ()];
  404. unsigned int count = alt_set.len;
  405. for (unsigned int i = 0; i < count; i++)
  406. c->output->add (alt_set[i]);
  407. }
  408. }
  409. inline const Coverage &get_coverage (void) const
  410. {
  411. return this+coverage;
  412. }
  413. inline bool would_apply (hb_would_apply_context_t *c) const
  414. {
  415. TRACE_WOULD_APPLY (this);
  416. return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  417. }
  418. inline bool apply (hb_apply_context_t *c) const
  419. {
  420. TRACE_APPLY (this);
  421. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  422. unsigned int index = (this+coverage).get_coverage (glyph_id);
  423. if (likely (index == NOT_COVERED)) return_trace (false);
  424. const AlternateSet &alt_set = this+alternateSet[index];
  425. if (unlikely (!alt_set.len)) return_trace (false);
  426. hb_mask_t glyph_mask = c->buffer->cur().mask;
  427. hb_mask_t lookup_mask = c->lookup_mask;
  428. /* Note: This breaks badly if two features enabled this lookup together. */
  429. unsigned int shift = _hb_ctz (lookup_mask);
  430. unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift);
  431. if (unlikely (alt_index > alt_set.len || alt_index == 0)) return_trace (false);
  432. glyph_id = alt_set[alt_index - 1];
  433. c->replace_glyph (glyph_id);
  434. return_trace (true);
  435. }
  436. inline bool serialize (hb_serialize_context_t *c,
  437. Supplier<GlyphID> &glyphs,
  438. Supplier<unsigned int> &alternate_len_list,
  439. unsigned int num_glyphs,
  440. Supplier<GlyphID> &alternate_glyphs_list)
  441. {
  442. TRACE_SERIALIZE (this);
  443. if (unlikely (!c->extend_min (*this))) return_trace (false);
  444. if (unlikely (!alternateSet.serialize (c, num_glyphs))) return_trace (false);
  445. for (unsigned int i = 0; i < num_glyphs; i++)
  446. if (unlikely (!alternateSet[i].serialize (c, this).serialize (c,
  447. alternate_glyphs_list,
  448. alternate_len_list[i]))) return_trace (false);
  449. alternate_len_list.advance (num_glyphs);
  450. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return_trace (false);
  451. return_trace (true);
  452. }
  453. inline bool sanitize (hb_sanitize_context_t *c) const
  454. {
  455. TRACE_SANITIZE (this);
  456. return_trace (coverage.sanitize (c, this) && alternateSet.sanitize (c, this));
  457. }
  458. protected:
  459. USHORT format; /* Format identifier--format = 1 */
  460. OffsetTo<Coverage>
  461. coverage; /* Offset to Coverage table--from
  462. * beginning of Substitution table */
  463. OffsetArrayOf<AlternateSet>
  464. alternateSet; /* Array of AlternateSet tables
  465. * ordered by Coverage Index */
  466. public:
  467. DEFINE_SIZE_ARRAY (6, alternateSet);
  468. };
  469. struct AlternateSubst
  470. {
  471. inline bool serialize (hb_serialize_context_t *c,
  472. Supplier<GlyphID> &glyphs,
  473. Supplier<unsigned int> &alternate_len_list,
  474. unsigned int num_glyphs,
  475. Supplier<GlyphID> &alternate_glyphs_list)
  476. {
  477. TRACE_SERIALIZE (this);
  478. if (unlikely (!c->extend_min (u.format))) return_trace (false);
  479. unsigned int format = 1;
  480. u.format.set (format);
  481. switch (u.format) {
  482. case 1: return_trace (u.format1.serialize (c, glyphs, alternate_len_list, num_glyphs, alternate_glyphs_list));
  483. default:return_trace (false);
  484. }
  485. }
  486. template <typename context_t>
  487. inline typename context_t::return_t dispatch (context_t *c) const
  488. {
  489. TRACE_DISPATCH (this, u.format);
  490. if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
  491. switch (u.format) {
  492. case 1: return_trace (c->dispatch (u.format1));
  493. default:return_trace (c->default_return_value ());
  494. }
  495. }
  496. protected:
  497. union {
  498. USHORT format; /* Format identifier */
  499. AlternateSubstFormat1 format1;
  500. } u;
  501. };
  502. struct Ligature
  503. {
  504. inline void closure (hb_closure_context_t *c) const
  505. {
  506. TRACE_CLOSURE (this);
  507. unsigned int count = component.len;
  508. for (unsigned int i = 1; i < count; i++)
  509. if (!c->glyphs->has (component[i]))
  510. return;
  511. c->glyphs->add (ligGlyph);
  512. }
  513. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  514. {
  515. TRACE_COLLECT_GLYPHS (this);
  516. unsigned int count = component.len;
  517. for (unsigned int i = 1; i < count; i++)
  518. c->input->add (component[i]);
  519. c->output->add (ligGlyph);
  520. }
  521. inline bool would_apply (hb_would_apply_context_t *c) const
  522. {
  523. TRACE_WOULD_APPLY (this);
  524. if (c->len != component.len)
  525. return_trace (false);
  526. for (unsigned int i = 1; i < c->len; i++)
  527. if (likely (c->glyphs[i] != component[i]))
  528. return_trace (false);
  529. return_trace (true);
  530. }
  531. inline bool apply (hb_apply_context_t *c) const
  532. {
  533. TRACE_APPLY (this);
  534. unsigned int count = component.len;
  535. if (unlikely (!count)) return_trace (false);
  536. /* Special-case to make it in-place and not consider this
  537. * as a "ligated" substitution. */
  538. if (unlikely (count == 1))
  539. {
  540. c->replace_glyph (ligGlyph);
  541. return_trace (true);
  542. }
  543. bool is_mark_ligature = false;
  544. unsigned int total_component_count = 0;
  545. unsigned int match_length = 0;
  546. unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
  547. if (likely (!match_input (c, count,
  548. &component[1],
  549. match_glyph,
  550. NULL,
  551. &match_length,
  552. match_positions,
  553. &is_mark_ligature,
  554. &total_component_count)))
  555. return_trace (false);
  556. ligate_input (c,
  557. count,
  558. match_positions,
  559. match_length,
  560. ligGlyph,
  561. is_mark_ligature,
  562. total_component_count);
  563. return_trace (true);
  564. }
  565. inline bool serialize (hb_serialize_context_t *c,
  566. GlyphID ligature,
  567. Supplier<GlyphID> &components, /* Starting from second */
  568. unsigned int num_components /* Including first component */)
  569. {
  570. TRACE_SERIALIZE (this);
  571. if (unlikely (!c->extend_min (*this))) return_trace (false);
  572. ligGlyph = ligature;
  573. if (unlikely (!component.serialize (c, components, num_components))) return_trace (false);
  574. return_trace (true);
  575. }
  576. public:
  577. inline bool sanitize (hb_sanitize_context_t *c) const
  578. {
  579. TRACE_SANITIZE (this);
  580. return_trace (ligGlyph.sanitize (c) && component.sanitize (c));
  581. }
  582. protected:
  583. GlyphID ligGlyph; /* GlyphID of ligature to substitute */
  584. HeadlessArrayOf<GlyphID>
  585. component; /* Array of component GlyphIDs--start
  586. * with the second component--ordered
  587. * in writing direction */
  588. public:
  589. DEFINE_SIZE_ARRAY (4, component);
  590. };
  591. struct LigatureSet
  592. {
  593. inline void closure (hb_closure_context_t *c) const
  594. {
  595. TRACE_CLOSURE (this);
  596. unsigned int num_ligs = ligature.len;
  597. for (unsigned int i = 0; i < num_ligs; i++)
  598. (this+ligature[i]).closure (c);
  599. }
  600. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  601. {
  602. TRACE_COLLECT_GLYPHS (this);
  603. unsigned int num_ligs = ligature.len;
  604. for (unsigned int i = 0; i < num_ligs; i++)
  605. (this+ligature[i]).collect_glyphs (c);
  606. }
  607. inline bool would_apply (hb_would_apply_context_t *c) const
  608. {
  609. TRACE_WOULD_APPLY (this);
  610. unsigned int num_ligs = ligature.len;
  611. for (unsigned int i = 0; i < num_ligs; i++)
  612. {
  613. const Ligature &lig = this+ligature[i];
  614. if (lig.would_apply (c))
  615. return_trace (true);
  616. }
  617. return_trace (false);
  618. }
  619. inline bool apply (hb_apply_context_t *c) const
  620. {
  621. TRACE_APPLY (this);
  622. unsigned int num_ligs = ligature.len;
  623. for (unsigned int i = 0; i < num_ligs; i++)
  624. {
  625. const Ligature &lig = this+ligature[i];
  626. if (lig.apply (c)) return_trace (true);
  627. }
  628. return_trace (false);
  629. }
  630. inline bool serialize (hb_serialize_context_t *c,
  631. Supplier<GlyphID> &ligatures,
  632. Supplier<unsigned int> &component_count_list,
  633. unsigned int num_ligatures,
  634. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  635. {
  636. TRACE_SERIALIZE (this);
  637. if (unlikely (!c->extend_min (*this))) return_trace (false);
  638. if (unlikely (!ligature.serialize (c, num_ligatures))) return_trace (false);
  639. for (unsigned int i = 0; i < num_ligatures; i++)
  640. if (unlikely (!ligature[i].serialize (c, this).serialize (c,
  641. ligatures[i],
  642. component_list,
  643. component_count_list[i]))) return_trace (false);
  644. ligatures.advance (num_ligatures);
  645. component_count_list.advance (num_ligatures);
  646. return_trace (true);
  647. }
  648. inline bool sanitize (hb_sanitize_context_t *c) const
  649. {
  650. TRACE_SANITIZE (this);
  651. return_trace (ligature.sanitize (c, this));
  652. }
  653. protected:
  654. OffsetArrayOf<Ligature>
  655. ligature; /* Array LigatureSet tables
  656. * ordered by preference */
  657. public:
  658. DEFINE_SIZE_ARRAY (2, ligature);
  659. };
  660. struct LigatureSubstFormat1
  661. {
  662. inline void closure (hb_closure_context_t *c) const
  663. {
  664. TRACE_CLOSURE (this);
  665. Coverage::Iter iter;
  666. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  667. if (c->glyphs->has (iter.get_glyph ()))
  668. (this+ligatureSet[iter.get_coverage ()]).closure (c);
  669. }
  670. }
  671. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  672. {
  673. TRACE_COLLECT_GLYPHS (this);
  674. Coverage::Iter iter;
  675. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  676. c->input->add (iter.get_glyph ());
  677. (this+ligatureSet[iter.get_coverage ()]).collect_glyphs (c);
  678. }
  679. }
  680. inline const Coverage &get_coverage (void) const
  681. {
  682. return this+coverage;
  683. }
  684. inline bool would_apply (hb_would_apply_context_t *c) const
  685. {
  686. TRACE_WOULD_APPLY (this);
  687. unsigned int index = (this+coverage).get_coverage (c->glyphs[0]);
  688. if (likely (index == NOT_COVERED)) return_trace (false);
  689. const LigatureSet &lig_set = this+ligatureSet[index];
  690. return_trace (lig_set.would_apply (c));
  691. }
  692. inline bool apply (hb_apply_context_t *c) const
  693. {
  694. TRACE_APPLY (this);
  695. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  696. unsigned int index = (this+coverage).get_coverage (glyph_id);
  697. if (likely (index == NOT_COVERED)) return_trace (false);
  698. const LigatureSet &lig_set = this+ligatureSet[index];
  699. return_trace (lig_set.apply (c));
  700. }
  701. inline bool serialize (hb_serialize_context_t *c,
  702. Supplier<GlyphID> &first_glyphs,
  703. Supplier<unsigned int> &ligature_per_first_glyph_count_list,
  704. unsigned int num_first_glyphs,
  705. Supplier<GlyphID> &ligatures_list,
  706. Supplier<unsigned int> &component_count_list,
  707. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  708. {
  709. TRACE_SERIALIZE (this);
  710. if (unlikely (!c->extend_min (*this))) return_trace (false);
  711. if (unlikely (!ligatureSet.serialize (c, num_first_glyphs))) return_trace (false);
  712. for (unsigned int i = 0; i < num_first_glyphs; i++)
  713. if (unlikely (!ligatureSet[i].serialize (c, this).serialize (c,
  714. ligatures_list,
  715. component_count_list,
  716. ligature_per_first_glyph_count_list[i],
  717. component_list))) return_trace (false);
  718. ligature_per_first_glyph_count_list.advance (num_first_glyphs);
  719. if (unlikely (!coverage.serialize (c, this).serialize (c, first_glyphs, num_first_glyphs))) return_trace (false);
  720. return_trace (true);
  721. }
  722. inline bool sanitize (hb_sanitize_context_t *c) const
  723. {
  724. TRACE_SANITIZE (this);
  725. return_trace (coverage.sanitize (c, this) && ligatureSet.sanitize (c, this));
  726. }
  727. protected:
  728. USHORT format; /* Format identifier--format = 1 */
  729. OffsetTo<Coverage>
  730. coverage; /* Offset to Coverage table--from
  731. * beginning of Substitution table */
  732. OffsetArrayOf<LigatureSet>
  733. ligatureSet; /* Array LigatureSet tables
  734. * ordered by Coverage Index */
  735. public:
  736. DEFINE_SIZE_ARRAY (6, ligatureSet);
  737. };
  738. struct LigatureSubst
  739. {
  740. inline bool serialize (hb_serialize_context_t *c,
  741. Supplier<GlyphID> &first_glyphs,
  742. Supplier<unsigned int> &ligature_per_first_glyph_count_list,
  743. unsigned int num_first_glyphs,
  744. Supplier<GlyphID> &ligatures_list,
  745. Supplier<unsigned int> &component_count_list,
  746. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  747. {
  748. TRACE_SERIALIZE (this);
  749. if (unlikely (!c->extend_min (u.format))) return_trace (false);
  750. unsigned int format = 1;
  751. u.format.set (format);
  752. switch (u.format) {
  753. case 1: return_trace (u.format1.serialize (c,
  754. first_glyphs,
  755. ligature_per_first_glyph_count_list,
  756. num_first_glyphs,
  757. ligatures_list,
  758. component_count_list,
  759. component_list));
  760. default:return_trace (false);
  761. }
  762. }
  763. template <typename context_t>
  764. inline typename context_t::return_t dispatch (context_t *c) const
  765. {
  766. TRACE_DISPATCH (this, u.format);
  767. if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
  768. switch (u.format) {
  769. case 1: return_trace (c->dispatch (u.format1));
  770. default:return_trace (c->default_return_value ());
  771. }
  772. }
  773. protected:
  774. union {
  775. USHORT format; /* Format identifier */
  776. LigatureSubstFormat1 format1;
  777. } u;
  778. };
  779. struct ContextSubst : Context {};
  780. struct ChainContextSubst : ChainContext {};
  781. struct ExtensionSubst : Extension<ExtensionSubst>
  782. {
  783. typedef struct SubstLookupSubTable LookupSubTable;
  784. inline bool is_reverse (void) const;
  785. };
  786. struct ReverseChainSingleSubstFormat1
  787. {
  788. inline void closure (hb_closure_context_t *c) const
  789. {
  790. TRACE_CLOSURE (this);
  791. const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  792. unsigned int count;
  793. count = backtrack.len;
  794. for (unsigned int i = 0; i < count; i++)
  795. if (!(this+backtrack[i]).intersects (c->glyphs))
  796. return;
  797. count = lookahead.len;
  798. for (unsigned int i = 0; i < count; i++)
  799. if (!(this+lookahead[i]).intersects (c->glyphs))
  800. return;
  801. const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  802. Coverage::Iter iter;
  803. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  804. if (c->glyphs->has (iter.get_glyph ()))
  805. c->glyphs->add (substitute[iter.get_coverage ()]);
  806. }
  807. }
  808. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  809. {
  810. TRACE_COLLECT_GLYPHS (this);
  811. const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  812. unsigned int count;
  813. (this+coverage).add_coverage (c->input);
  814. count = backtrack.len;
  815. for (unsigned int i = 0; i < count; i++)
  816. (this+backtrack[i]).add_coverage (c->before);
  817. count = lookahead.len;
  818. for (unsigned int i = 0; i < count; i++)
  819. (this+lookahead[i]).add_coverage (c->after);
  820. const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  821. count = substitute.len;
  822. for (unsigned int i = 0; i < count; i++)
  823. c->output->add (substitute[i]);
  824. }
  825. inline const Coverage &get_coverage (void) const
  826. {
  827. return this+coverage;
  828. }
  829. inline bool would_apply (hb_would_apply_context_t *c) const
  830. {
  831. TRACE_WOULD_APPLY (this);
  832. return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  833. }
  834. inline bool apply (hb_apply_context_t *c) const
  835. {
  836. TRACE_APPLY (this);
  837. if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL))
  838. return_trace (false); /* No chaining to this type */
  839. unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
  840. if (likely (index == NOT_COVERED)) return_trace (false);
  841. const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  842. const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  843. if (match_backtrack (c,
  844. backtrack.len, (USHORT *) backtrack.array,
  845. match_coverage, this) &&
  846. match_lookahead (c,
  847. lookahead.len, (USHORT *) lookahead.array,
  848. match_coverage, this,
  849. 1))
  850. {
  851. c->replace_glyph_inplace (substitute[index]);
  852. /* Note: We DON'T decrease buffer->idx. The main loop does it
  853. * for us. This is useful for preventing surprises if someone
  854. * calls us through a Context lookup. */
  855. return_trace (true);
  856. }
  857. return_trace (false);
  858. }
  859. inline bool sanitize (hb_sanitize_context_t *c) const
  860. {
  861. TRACE_SANITIZE (this);
  862. if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
  863. return_trace (false);
  864. const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  865. if (!lookahead.sanitize (c, this))
  866. return_trace (false);
  867. const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  868. return_trace (substitute.sanitize (c));
  869. }
  870. protected:
  871. USHORT format; /* Format identifier--format = 1 */
  872. OffsetTo<Coverage>
  873. coverage; /* Offset to Coverage table--from
  874. * beginning of table */
  875. OffsetArrayOf<Coverage>
  876. backtrack; /* Array of coverage tables
  877. * in backtracking sequence, in glyph
  878. * sequence order */
  879. OffsetArrayOf<Coverage>
  880. lookaheadX; /* Array of coverage tables
  881. * in lookahead sequence, in glyph
  882. * sequence order */
  883. ArrayOf<GlyphID>
  884. substituteX; /* Array of substitute
  885. * GlyphIDs--ordered by Coverage Index */
  886. public:
  887. DEFINE_SIZE_MIN (10);
  888. };
  889. struct ReverseChainSingleSubst
  890. {
  891. template <typename context_t>
  892. inline typename context_t::return_t dispatch (context_t *c) const
  893. {
  894. TRACE_DISPATCH (this, u.format);
  895. if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
  896. switch (u.format) {
  897. case 1: return_trace (c->dispatch (u.format1));
  898. default:return_trace (c->default_return_value ());
  899. }
  900. }
  901. protected:
  902. union {
  903. USHORT format; /* Format identifier */
  904. ReverseChainSingleSubstFormat1 format1;
  905. } u;
  906. };
  907. /*
  908. * SubstLookup
  909. */
  910. struct SubstLookupSubTable
  911. {
  912. friend struct SubstLookup;
  913. enum Type {
  914. Single = 1,
  915. Multiple = 2,
  916. Alternate = 3,
  917. Ligature = 4,
  918. Context = 5,
  919. ChainContext = 6,
  920. Extension = 7,
  921. ReverseChainSingle = 8
  922. };
  923. template <typename context_t>
  924. inline typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type) const
  925. {
  926. TRACE_DISPATCH (this, lookup_type);
  927. if (unlikely (!c->may_dispatch (this, &u.sub_format))) return_trace (c->no_dispatch_return_value ());
  928. switch (lookup_type) {
  929. case Single: return_trace (u.single.dispatch (c));
  930. case Multiple: return_trace (u.multiple.dispatch (c));
  931. case Alternate: return_trace (u.alternate.dispatch (c));
  932. case Ligature: return_trace (u.ligature.dispatch (c));
  933. case Context: return_trace (u.context.dispatch (c));
  934. case ChainContext: return_trace (u.chainContext.dispatch (c));
  935. case Extension: return_trace (u.extension.dispatch (c));
  936. case ReverseChainSingle: return_trace (u.reverseChainContextSingle.dispatch (c));
  937. default: return_trace (c->default_return_value ());
  938. }
  939. }
  940. protected:
  941. union {
  942. USHORT sub_format;
  943. SingleSubst single;
  944. MultipleSubst multiple;
  945. AlternateSubst alternate;
  946. LigatureSubst ligature;
  947. ContextSubst context;
  948. ChainContextSubst chainContext;
  949. ExtensionSubst extension;
  950. ReverseChainSingleSubst reverseChainContextSingle;
  951. } u;
  952. public:
  953. DEFINE_SIZE_UNION (2, sub_format);
  954. };
  955. struct SubstLookup : Lookup
  956. {
  957. inline const SubstLookupSubTable& get_subtable (unsigned int i) const
  958. { return Lookup::get_subtable<SubstLookupSubTable> (i); }
  959. inline static bool lookup_type_is_reverse (unsigned int lookup_type)
  960. { return lookup_type == SubstLookupSubTable::ReverseChainSingle; }
  961. inline bool is_reverse (void) const
  962. {
  963. unsigned int type = get_type ();
  964. if (unlikely (type == SubstLookupSubTable::Extension))
  965. return CastR<ExtensionSubst> (get_subtable(0)).is_reverse ();
  966. return lookup_type_is_reverse (type);
  967. }
  968. inline bool apply (hb_apply_context_t *c) const
  969. {
  970. TRACE_APPLY (this);
  971. return_trace (dispatch (c));
  972. }
  973. inline hb_closure_context_t::return_t closure (hb_closure_context_t *c) const
  974. {
  975. TRACE_CLOSURE (this);
  976. c->set_recurse_func (dispatch_recurse_func<hb_closure_context_t>);
  977. return_trace (dispatch (c));
  978. }
  979. inline hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
  980. {
  981. TRACE_COLLECT_GLYPHS (this);
  982. c->set_recurse_func (dispatch_recurse_func<hb_collect_glyphs_context_t>);
  983. return_trace (dispatch (c));
  984. }
  985. template <typename set_t>
  986. inline void add_coverage (set_t *glyphs) const
  987. {
  988. hb_add_coverage_context_t<set_t> c (glyphs);
  989. dispatch (&c);
  990. }
  991. inline bool would_apply (hb_would_apply_context_t *c,
  992. const hb_ot_layout_lookup_accelerator_t *accel) const
  993. {
  994. TRACE_WOULD_APPLY (this);
  995. if (unlikely (!c->len)) return_trace (false);
  996. if (!accel->may_have (c->glyphs[0])) return_trace (false);
  997. return_trace (dispatch (c));
  998. }
  999. static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index);
  1000. inline SubstLookupSubTable& serialize_subtable (hb_serialize_context_t *c,
  1001. unsigned int i)
  1002. { return get_subtables<SubstLookupSubTable> ()[i].serialize (c, this); }
  1003. inline bool serialize_single (hb_serialize_context_t *c,
  1004. uint32_t lookup_props,
  1005. Supplier<GlyphID> &glyphs,
  1006. Supplier<GlyphID> &substitutes,
  1007. unsigned int num_glyphs)
  1008. {
  1009. TRACE_SERIALIZE (this);
  1010. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Single, lookup_props, 1))) return_trace (false);
  1011. return_trace (serialize_subtable (c, 0).u.single.serialize (c, glyphs, substitutes, num_glyphs));
  1012. }
  1013. inline bool serialize_multiple (hb_serialize_context_t *c,
  1014. uint32_t lookup_props,
  1015. Supplier<GlyphID> &glyphs,
  1016. Supplier<unsigned int> &substitute_len_list,
  1017. unsigned int num_glyphs,
  1018. Supplier<GlyphID> &substitute_glyphs_list)
  1019. {
  1020. TRACE_SERIALIZE (this);
  1021. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Multiple, lookup_props, 1))) return_trace (false);
  1022. return_trace (serialize_subtable (c, 0).u.multiple.serialize (c,
  1023. glyphs,
  1024. substitute_len_list,
  1025. num_glyphs,
  1026. substitute_glyphs_list));
  1027. }
  1028. inline bool serialize_alternate (hb_serialize_context_t *c,
  1029. uint32_t lookup_props,
  1030. Supplier<GlyphID> &glyphs,
  1031. Supplier<unsigned int> &alternate_len_list,
  1032. unsigned int num_glyphs,
  1033. Supplier<GlyphID> &alternate_glyphs_list)
  1034. {
  1035. TRACE_SERIALIZE (this);
  1036. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Alternate, lookup_props, 1))) return_trace (false);
  1037. return_trace (serialize_subtable (c, 0).u.alternate.serialize (c,
  1038. glyphs,
  1039. alternate_len_list,
  1040. num_glyphs,
  1041. alternate_glyphs_list));
  1042. }
  1043. inline bool serialize_ligature (hb_serialize_context_t *c,
  1044. uint32_t lookup_props,
  1045. Supplier<GlyphID> &first_glyphs,
  1046. Supplier<unsigned int> &ligature_per_first_glyph_count_list,
  1047. unsigned int num_first_glyphs,
  1048. Supplier<GlyphID> &ligatures_list,
  1049. Supplier<unsigned int> &component_count_list,
  1050. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  1051. {
  1052. TRACE_SERIALIZE (this);
  1053. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Ligature, lookup_props, 1))) return_trace (false);
  1054. return_trace (serialize_subtable (c, 0).u.ligature.serialize (c,
  1055. first_glyphs,
  1056. ligature_per_first_glyph_count_list,
  1057. num_first_glyphs,
  1058. ligatures_list,
  1059. component_count_list,
  1060. component_list));
  1061. }
  1062. template <typename context_t>
  1063. static inline typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
  1064. template <typename context_t>
  1065. inline typename context_t::return_t dispatch (context_t *c) const
  1066. { return Lookup::dispatch<SubstLookupSubTable> (c); }
  1067. inline bool sanitize (hb_sanitize_context_t *c) const
  1068. {
  1069. TRACE_SANITIZE (this);
  1070. if (unlikely (!Lookup::sanitize (c))) return_trace (false);
  1071. if (unlikely (!dispatch (c))) return_trace (false);
  1072. if (unlikely (get_type () == SubstLookupSubTable::Extension))
  1073. {
  1074. /* The spec says all subtables of an Extension lookup should
  1075. * have the same type. This is specially important if one has
  1076. * a reverse type! */
  1077. unsigned int type = get_subtable (0).u.extension.get_type ();
  1078. unsigned int count = get_subtable_count ();
  1079. for (unsigned int i = 1; i < count; i++)
  1080. if (get_subtable (i).u.extension.get_type () != type)
  1081. return_trace (false);
  1082. }
  1083. return_trace (true);
  1084. }
  1085. };
  1086. typedef OffsetListOf<SubstLookup> SubstLookupList;
  1087. /*
  1088. * GSUB -- The Glyph Substitution Table
  1089. */
  1090. struct GSUB : GSUBGPOS
  1091. {
  1092. static const hb_tag_t tableTag = HB_OT_TAG_GSUB;
  1093. inline const SubstLookup& get_lookup (unsigned int i) const
  1094. { return CastR<SubstLookup> (GSUBGPOS::get_lookup (i)); }
  1095. static inline void substitute_start (hb_font_t *font, hb_buffer_t *buffer);
  1096. inline bool sanitize (hb_sanitize_context_t *c) const
  1097. {
  1098. TRACE_SANITIZE (this);
  1099. if (unlikely (!GSUBGPOS::sanitize (c))) return_trace (false);
  1100. const OffsetTo<SubstLookupList> &list = CastR<OffsetTo<SubstLookupList> > (lookupList);
  1101. return_trace (list.sanitize (c, this));
  1102. }
  1103. public:
  1104. DEFINE_SIZE_STATIC (10);
  1105. };
  1106. void
  1107. GSUB::substitute_start (hb_font_t *font, hb_buffer_t *buffer)
  1108. {
  1109. _hb_buffer_assert_gsubgpos_vars (buffer);
  1110. const GDEF &gdef = *hb_ot_layout_from_face (font->face)->gdef;
  1111. unsigned int count = buffer->len;
  1112. hb_glyph_info_t *info = buffer->info;
  1113. for (unsigned int i = 0; i < count; i++)
  1114. {
  1115. unsigned int props = gdef.get_glyph_props (info[i].codepoint);
  1116. if (!props)
  1117. {
  1118. /* Never mark default-ignorables as marks.
  1119. * They won't get in the way of lookups anyway,
  1120. * but having them as mark will cause them to be skipped
  1121. * over if the lookup-flag says so, but at least for the
  1122. * Mongolian variation selectors, looks like Uniscribe
  1123. * marks them as non-mark. Some Mongolian fonts without
  1124. * GDEF rely on this. Another notable character that
  1125. * this applies to is COMBINING GRAPHEME JOINER. */
  1126. props = (_hb_glyph_info_get_general_category (&info[i]) !=
  1127. HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK ||
  1128. _hb_glyph_info_is_default_ignorable (&info[i])) ?
  1129. HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH :
  1130. HB_OT_LAYOUT_GLYPH_PROPS_MARK;
  1131. }
  1132. _hb_glyph_info_set_glyph_props (&info[i], props);
  1133. _hb_glyph_info_clear_lig_props (&info[i]);
  1134. buffer->info[i].syllable() = 0;
  1135. }
  1136. }
  1137. /* Out-of-class implementation for methods recursing */
  1138. /*static*/ inline bool ExtensionSubst::is_reverse (void) const
  1139. {
  1140. unsigned int type = get_type ();
  1141. if (unlikely (type == SubstLookupSubTable::Extension))
  1142. return CastR<ExtensionSubst> (get_subtable<LookupSubTable>()).is_reverse ();
  1143. return SubstLookup::lookup_type_is_reverse (type);
  1144. }
  1145. template <typename context_t>
  1146. /*static*/ inline typename context_t::return_t SubstLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
  1147. {
  1148. const GSUB &gsub = *(hb_ot_layout_from_face (c->face)->gsub);
  1149. const SubstLookup &l = gsub.get_lookup (lookup_index);
  1150. return l.dispatch (c);
  1151. }
  1152. /*static*/ inline bool SubstLookup::apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index)
  1153. {
  1154. const GSUB &gsub = *(hb_ot_layout_from_face (c->face)->gsub);
  1155. const SubstLookup &l = gsub.get_lookup (lookup_index);
  1156. unsigned int saved_lookup_props = c->lookup_props;
  1157. unsigned int saved_lookup_index = c->lookup_index;
  1158. c->set_lookup_index (lookup_index);
  1159. c->set_lookup_props (l.get_props ());
  1160. bool ret = l.dispatch (c);
  1161. c->set_lookup_index (saved_lookup_index);
  1162. c->set_lookup_props (saved_lookup_props);
  1163. return ret;
  1164. }
  1165. } /* namespace OT */
  1166. #endif /* HB_OT_LAYOUT_GSUB_TABLE_HH */