PageRenderTime 64ms CodeModel.GetById 29ms RepoModel.GetById 0ms app.codeStats 0ms

/gfx/harfbuzz/src/hb-ot-layout-gsub-table.hh

https://github.com/marcussaad/firefox
C++ Header | 1414 lines | 1153 code | 195 blank | 66 comment | 184 complexity | 95cfc2ae44eee90679a5af40f12fe86b MD5 | raw file
Possible License(s): JSON, LGPL-2.1, AGPL-1.0, MPL-2.0-no-copyleft-exception, MPL-2.0, BSD-3-Clause, LGPL-3.0, BSD-2-Clause, MIT, Apache-2.0, GPL-2.0, 0BSD
  1. /*
  2. * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
  3. * Copyright © 2010,2012 Google, Inc.
  4. *
  5. * This is part of HarfBuzz, a text shaping library.
  6. *
  7. * Permission is hereby granted, without written agreement and without
  8. * license or royalty fees, to use, copy, modify, and distribute this
  9. * software and its documentation for any purpose, provided that the
  10. * above copyright notice and the following two paragraphs appear in
  11. * all copies of this software.
  12. *
  13. * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
  14. * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
  15. * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
  16. * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  17. * DAMAGE.
  18. *
  19. * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
  20. * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
  21. * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
  22. * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
  23. * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
  24. *
  25. * Red Hat Author(s): Behdad Esfahbod
  26. * Google Author(s): Behdad Esfahbod
  27. */
  28. #ifndef HB_OT_LAYOUT_GSUB_TABLE_HH
  29. #define HB_OT_LAYOUT_GSUB_TABLE_HH
  30. #include "hb-ot-layout-gsubgpos-private.hh"
  31. namespace OT {
  32. struct SingleSubstFormat1
  33. {
  34. inline void closure (hb_closure_context_t *c) const
  35. {
  36. TRACE_CLOSURE (this);
  37. Coverage::Iter iter;
  38. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  39. hb_codepoint_t glyph_id = iter.get_glyph ();
  40. if (c->glyphs->has (glyph_id))
  41. c->glyphs->add ((glyph_id + deltaGlyphID) & 0xFFFF);
  42. }
  43. }
  44. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  45. {
  46. TRACE_COLLECT_GLYPHS (this);
  47. Coverage::Iter iter;
  48. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  49. hb_codepoint_t glyph_id = iter.get_glyph ();
  50. c->input->add (glyph_id);
  51. c->output->add ((glyph_id + deltaGlyphID) & 0xFFFF);
  52. }
  53. }
  54. inline const Coverage &get_coverage (void) const
  55. {
  56. return this+coverage;
  57. }
  58. inline bool would_apply (hb_would_apply_context_t *c) const
  59. {
  60. TRACE_WOULD_APPLY (this);
  61. return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  62. }
  63. inline bool apply (hb_apply_context_t *c) const
  64. {
  65. TRACE_APPLY (this);
  66. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  67. unsigned int index = (this+coverage).get_coverage (glyph_id);
  68. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  69. /* According to the Adobe Annotated OpenType Suite, result is always
  70. * limited to 16bit. */
  71. glyph_id = (glyph_id + deltaGlyphID) & 0xFFFF;
  72. c->replace_glyph (glyph_id);
  73. return TRACE_RETURN (true);
  74. }
  75. inline bool serialize (hb_serialize_context_t *c,
  76. Supplier<GlyphID> &glyphs,
  77. unsigned int num_glyphs,
  78. int delta)
  79. {
  80. TRACE_SERIALIZE (this);
  81. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  82. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
  83. deltaGlyphID.set (delta); /* TODO(serilaize) overflow? */
  84. return TRACE_RETURN (true);
  85. }
  86. inline bool sanitize (hb_sanitize_context_t *c) {
  87. TRACE_SANITIZE (this);
  88. return TRACE_RETURN (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c));
  89. }
  90. protected:
  91. USHORT format; /* Format identifier--format = 1 */
  92. OffsetTo<Coverage>
  93. coverage; /* Offset to Coverage table--from
  94. * beginning of Substitution table */
  95. SHORT deltaGlyphID; /* Add to original GlyphID to get
  96. * substitute GlyphID */
  97. public:
  98. DEFINE_SIZE_STATIC (6);
  99. };
  100. struct SingleSubstFormat2
  101. {
  102. inline void closure (hb_closure_context_t *c) const
  103. {
  104. TRACE_CLOSURE (this);
  105. Coverage::Iter iter;
  106. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  107. if (c->glyphs->has (iter.get_glyph ()))
  108. c->glyphs->add (substitute[iter.get_coverage ()]);
  109. }
  110. }
  111. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  112. {
  113. TRACE_COLLECT_GLYPHS (this);
  114. Coverage::Iter iter;
  115. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  116. c->input->add (iter.get_glyph ());
  117. c->output->add (substitute[iter.get_coverage ()]);
  118. }
  119. }
  120. inline const Coverage &get_coverage (void) const
  121. {
  122. return this+coverage;
  123. }
  124. inline bool would_apply (hb_would_apply_context_t *c) const
  125. {
  126. TRACE_WOULD_APPLY (this);
  127. return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  128. }
  129. inline bool apply (hb_apply_context_t *c) const
  130. {
  131. TRACE_APPLY (this);
  132. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  133. unsigned int index = (this+coverage).get_coverage (glyph_id);
  134. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  135. if (unlikely (index >= substitute.len)) return TRACE_RETURN (false);
  136. glyph_id = substitute[index];
  137. c->replace_glyph (glyph_id);
  138. return TRACE_RETURN (true);
  139. }
  140. inline bool serialize (hb_serialize_context_t *c,
  141. Supplier<GlyphID> &glyphs,
  142. Supplier<GlyphID> &substitutes,
  143. unsigned int num_glyphs)
  144. {
  145. TRACE_SERIALIZE (this);
  146. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  147. if (unlikely (!substitute.serialize (c, substitutes, num_glyphs))) return TRACE_RETURN (false);
  148. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
  149. return TRACE_RETURN (true);
  150. }
  151. inline bool sanitize (hb_sanitize_context_t *c) {
  152. TRACE_SANITIZE (this);
  153. return TRACE_RETURN (coverage.sanitize (c, this) && substitute.sanitize (c));
  154. }
  155. protected:
  156. USHORT format; /* Format identifier--format = 2 */
  157. OffsetTo<Coverage>
  158. coverage; /* Offset to Coverage table--from
  159. * beginning of Substitution table */
  160. ArrayOf<GlyphID>
  161. substitute; /* Array of substitute
  162. * GlyphIDs--ordered by Coverage Index */
  163. public:
  164. DEFINE_SIZE_ARRAY (6, substitute);
  165. };
  166. struct SingleSubst
  167. {
  168. template <typename context_t>
  169. inline typename context_t::return_t process (context_t *c) const
  170. {
  171. TRACE_PROCESS (this);
  172. switch (u.format) {
  173. case 1: return TRACE_RETURN (c->process (u.format1));
  174. case 2: return TRACE_RETURN (c->process (u.format2));
  175. default:return TRACE_RETURN (c->default_return_value ());
  176. }
  177. }
  178. inline bool serialize (hb_serialize_context_t *c,
  179. Supplier<GlyphID> &glyphs,
  180. Supplier<GlyphID> &substitutes,
  181. unsigned int num_glyphs)
  182. {
  183. TRACE_SERIALIZE (this);
  184. if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
  185. unsigned int format = 2;
  186. int delta;
  187. if (num_glyphs) {
  188. format = 1;
  189. /* TODO(serialize) check for wrap-around */
  190. delta = substitutes[0] - glyphs[0];
  191. for (unsigned int i = 1; i < num_glyphs; i++)
  192. if (delta != substitutes[i] - glyphs[i]) {
  193. format = 2;
  194. break;
  195. }
  196. }
  197. u.format.set (format);
  198. switch (u.format) {
  199. case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, num_glyphs, delta));
  200. case 2: return TRACE_RETURN (u.format2.serialize (c, glyphs, substitutes, num_glyphs));
  201. default:return TRACE_RETURN (false);
  202. }
  203. }
  204. inline bool sanitize (hb_sanitize_context_t *c) {
  205. TRACE_SANITIZE (this);
  206. if (!u.format.sanitize (c)) return TRACE_RETURN (false);
  207. switch (u.format) {
  208. case 1: return TRACE_RETURN (u.format1.sanitize (c));
  209. case 2: return TRACE_RETURN (u.format2.sanitize (c));
  210. default:return TRACE_RETURN (true);
  211. }
  212. }
  213. protected:
  214. union {
  215. USHORT format; /* Format identifier */
  216. SingleSubstFormat1 format1;
  217. SingleSubstFormat2 format2;
  218. } u;
  219. };
  220. struct Sequence
  221. {
  222. inline void closure (hb_closure_context_t *c) const
  223. {
  224. TRACE_CLOSURE (this);
  225. unsigned int count = substitute.len;
  226. for (unsigned int i = 0; i < count; i++)
  227. c->glyphs->add (substitute[i]);
  228. }
  229. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  230. {
  231. TRACE_COLLECT_GLYPHS (this);
  232. unsigned int count = substitute.len;
  233. for (unsigned int i = 0; i < count; i++)
  234. c->output->add (substitute[i]);
  235. }
  236. inline bool apply (hb_apply_context_t *c) const
  237. {
  238. TRACE_APPLY (this);
  239. if (unlikely (!substitute.len)) return TRACE_RETURN (false);
  240. unsigned int klass = c->property & HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE ? HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH : 0;
  241. unsigned int count = substitute.len;
  242. for (unsigned int i = 0; i < count; i++) {
  243. set_lig_props_for_component (c->buffer->cur(), i);
  244. c->output_glyph (substitute.array[i], klass);
  245. }
  246. c->buffer->skip_glyph ();
  247. return TRACE_RETURN (true);
  248. }
  249. inline bool serialize (hb_serialize_context_t *c,
  250. Supplier<GlyphID> &glyphs,
  251. unsigned int num_glyphs)
  252. {
  253. TRACE_SERIALIZE (this);
  254. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  255. if (unlikely (!substitute.serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
  256. return TRACE_RETURN (true);
  257. }
  258. inline bool sanitize (hb_sanitize_context_t *c) {
  259. TRACE_SANITIZE (this);
  260. return TRACE_RETURN (substitute.sanitize (c));
  261. }
  262. protected:
  263. ArrayOf<GlyphID>
  264. substitute; /* String of GlyphIDs to substitute */
  265. public:
  266. DEFINE_SIZE_ARRAY (2, substitute);
  267. };
  268. struct MultipleSubstFormat1
  269. {
  270. inline void closure (hb_closure_context_t *c) const
  271. {
  272. TRACE_CLOSURE (this);
  273. Coverage::Iter iter;
  274. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  275. if (c->glyphs->has (iter.get_glyph ()))
  276. (this+sequence[iter.get_coverage ()]).closure (c);
  277. }
  278. }
  279. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  280. {
  281. TRACE_COLLECT_GLYPHS (this);
  282. (this+coverage).add_coverage (c->input);
  283. unsigned int count = sequence.len;
  284. for (unsigned int i = 0; i < count; i++)
  285. (this+sequence[i]).collect_glyphs (c);
  286. }
  287. inline const Coverage &get_coverage (void) const
  288. {
  289. return this+coverage;
  290. }
  291. inline bool would_apply (hb_would_apply_context_t *c) const
  292. {
  293. TRACE_WOULD_APPLY (this);
  294. return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  295. }
  296. inline bool apply (hb_apply_context_t *c) const
  297. {
  298. TRACE_APPLY (this);
  299. unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
  300. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  301. return TRACE_RETURN ((this+sequence[index]).apply (c));
  302. }
  303. inline bool serialize (hb_serialize_context_t *c,
  304. Supplier<GlyphID> &glyphs,
  305. Supplier<unsigned int> &substitute_len_list,
  306. unsigned int num_glyphs,
  307. Supplier<GlyphID> &substitute_glyphs_list)
  308. {
  309. TRACE_SERIALIZE (this);
  310. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  311. if (unlikely (!sequence.serialize (c, num_glyphs))) return TRACE_RETURN (false);
  312. for (unsigned int i = 0; i < num_glyphs; i++)
  313. if (unlikely (!sequence[i].serialize (c, this).serialize (c,
  314. substitute_glyphs_list,
  315. substitute_len_list[i]))) return TRACE_RETURN (false);
  316. substitute_len_list.advance (num_glyphs);
  317. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
  318. return TRACE_RETURN (true);
  319. }
  320. inline bool sanitize (hb_sanitize_context_t *c) {
  321. TRACE_SANITIZE (this);
  322. return TRACE_RETURN (coverage.sanitize (c, this) && sequence.sanitize (c, this));
  323. }
  324. protected:
  325. USHORT format; /* Format identifier--format = 1 */
  326. OffsetTo<Coverage>
  327. coverage; /* Offset to Coverage table--from
  328. * beginning of Substitution table */
  329. OffsetArrayOf<Sequence>
  330. sequence; /* Array of Sequence tables
  331. * ordered by Coverage Index */
  332. public:
  333. DEFINE_SIZE_ARRAY (6, sequence);
  334. };
  335. struct MultipleSubst
  336. {
  337. template <typename context_t>
  338. inline typename context_t::return_t process (context_t *c) const
  339. {
  340. TRACE_PROCESS (this);
  341. switch (u.format) {
  342. case 1: return TRACE_RETURN (c->process (u.format1));
  343. default:return TRACE_RETURN (c->default_return_value ());
  344. }
  345. }
  346. inline bool serialize (hb_serialize_context_t *c,
  347. Supplier<GlyphID> &glyphs,
  348. Supplier<unsigned int> &substitute_len_list,
  349. unsigned int num_glyphs,
  350. Supplier<GlyphID> &substitute_glyphs_list)
  351. {
  352. TRACE_SERIALIZE (this);
  353. if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
  354. unsigned int format = 1;
  355. u.format.set (format);
  356. switch (u.format) {
  357. case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, substitute_len_list, num_glyphs, substitute_glyphs_list));
  358. default:return TRACE_RETURN (false);
  359. }
  360. }
  361. inline bool sanitize (hb_sanitize_context_t *c) {
  362. TRACE_SANITIZE (this);
  363. if (!u.format.sanitize (c)) return TRACE_RETURN (false);
  364. switch (u.format) {
  365. case 1: return TRACE_RETURN (u.format1.sanitize (c));
  366. default:return TRACE_RETURN (true);
  367. }
  368. }
  369. protected:
  370. union {
  371. USHORT format; /* Format identifier */
  372. MultipleSubstFormat1 format1;
  373. } u;
  374. };
  375. typedef ArrayOf<GlyphID> AlternateSet; /* Array of alternate GlyphIDs--in
  376. * arbitrary order */
  377. struct AlternateSubstFormat1
  378. {
  379. inline void closure (hb_closure_context_t *c) const
  380. {
  381. TRACE_CLOSURE (this);
  382. Coverage::Iter iter;
  383. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  384. if (c->glyphs->has (iter.get_glyph ())) {
  385. const AlternateSet &alt_set = this+alternateSet[iter.get_coverage ()];
  386. unsigned int count = alt_set.len;
  387. for (unsigned int i = 0; i < count; i++)
  388. c->glyphs->add (alt_set[i]);
  389. }
  390. }
  391. }
  392. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  393. {
  394. TRACE_COLLECT_GLYPHS (this);
  395. Coverage::Iter iter;
  396. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  397. c->input->add (iter.get_glyph ());
  398. const AlternateSet &alt_set = this+alternateSet[iter.get_coverage ()];
  399. unsigned int count = alt_set.len;
  400. for (unsigned int i = 0; i < count; i++)
  401. c->output->add (alt_set[i]);
  402. }
  403. }
  404. inline const Coverage &get_coverage (void) const
  405. {
  406. return this+coverage;
  407. }
  408. inline bool would_apply (hb_would_apply_context_t *c) const
  409. {
  410. TRACE_WOULD_APPLY (this);
  411. return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  412. }
  413. inline bool apply (hb_apply_context_t *c) const
  414. {
  415. TRACE_APPLY (this);
  416. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  417. unsigned int index = (this+coverage).get_coverage (glyph_id);
  418. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  419. const AlternateSet &alt_set = this+alternateSet[index];
  420. if (unlikely (!alt_set.len)) return TRACE_RETURN (false);
  421. hb_mask_t glyph_mask = c->buffer->cur().mask;
  422. hb_mask_t lookup_mask = c->lookup_mask;
  423. /* Note: This breaks badly if two features enabled this lookup together. */
  424. unsigned int shift = _hb_ctz (lookup_mask);
  425. unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift);
  426. if (unlikely (alt_index > alt_set.len || alt_index == 0)) return TRACE_RETURN (false);
  427. glyph_id = alt_set[alt_index - 1];
  428. c->replace_glyph (glyph_id);
  429. return TRACE_RETURN (true);
  430. }
  431. inline bool serialize (hb_serialize_context_t *c,
  432. Supplier<GlyphID> &glyphs,
  433. Supplier<unsigned int> &alternate_len_list,
  434. unsigned int num_glyphs,
  435. Supplier<GlyphID> &alternate_glyphs_list)
  436. {
  437. TRACE_SERIALIZE (this);
  438. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  439. if (unlikely (!alternateSet.serialize (c, num_glyphs))) return TRACE_RETURN (false);
  440. for (unsigned int i = 0; i < num_glyphs; i++)
  441. if (unlikely (!alternateSet[i].serialize (c, this).serialize (c,
  442. alternate_glyphs_list,
  443. alternate_len_list[i]))) return TRACE_RETURN (false);
  444. alternate_len_list.advance (num_glyphs);
  445. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
  446. return TRACE_RETURN (true);
  447. }
  448. inline bool sanitize (hb_sanitize_context_t *c) {
  449. TRACE_SANITIZE (this);
  450. return TRACE_RETURN (coverage.sanitize (c, this) && alternateSet.sanitize (c, this));
  451. }
  452. protected:
  453. USHORT format; /* Format identifier--format = 1 */
  454. OffsetTo<Coverage>
  455. coverage; /* Offset to Coverage table--from
  456. * beginning of Substitution table */
  457. OffsetArrayOf<AlternateSet>
  458. alternateSet; /* Array of AlternateSet tables
  459. * ordered by Coverage Index */
  460. public:
  461. DEFINE_SIZE_ARRAY (6, alternateSet);
  462. };
  463. struct AlternateSubst
  464. {
  465. template <typename context_t>
  466. inline typename context_t::return_t process (context_t *c) const
  467. {
  468. TRACE_PROCESS (this);
  469. switch (u.format) {
  470. case 1: return TRACE_RETURN (c->process (u.format1));
  471. default:return TRACE_RETURN (c->default_return_value ());
  472. }
  473. }
  474. inline bool serialize (hb_serialize_context_t *c,
  475. Supplier<GlyphID> &glyphs,
  476. Supplier<unsigned int> &alternate_len_list,
  477. unsigned int num_glyphs,
  478. Supplier<GlyphID> &alternate_glyphs_list)
  479. {
  480. TRACE_SERIALIZE (this);
  481. if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
  482. unsigned int format = 1;
  483. u.format.set (format);
  484. switch (u.format) {
  485. case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, alternate_len_list, num_glyphs, alternate_glyphs_list));
  486. default:return TRACE_RETURN (false);
  487. }
  488. }
  489. inline bool sanitize (hb_sanitize_context_t *c) {
  490. TRACE_SANITIZE (this);
  491. if (!u.format.sanitize (c)) return TRACE_RETURN (false);
  492. switch (u.format) {
  493. case 1: return TRACE_RETURN (u.format1.sanitize (c));
  494. default:return TRACE_RETURN (true);
  495. }
  496. }
  497. protected:
  498. union {
  499. USHORT format; /* Format identifier */
  500. AlternateSubstFormat1 format1;
  501. } u;
  502. };
  503. struct Ligature
  504. {
  505. inline void closure (hb_closure_context_t *c) const
  506. {
  507. TRACE_CLOSURE (this);
  508. unsigned int count = component.len;
  509. for (unsigned int i = 1; i < count; i++)
  510. if (!c->glyphs->has (component[i]))
  511. return;
  512. c->glyphs->add (ligGlyph);
  513. }
  514. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  515. {
  516. TRACE_COLLECT_GLYPHS (this);
  517. unsigned int count = component.len;
  518. for (unsigned int i = 1; i < count; i++)
  519. c->input->add (component[i]);
  520. c->output->add (ligGlyph);
  521. }
  522. inline bool would_apply (hb_would_apply_context_t *c) const
  523. {
  524. TRACE_WOULD_APPLY (this);
  525. if (c->len != component.len)
  526. return TRACE_RETURN (false);
  527. for (unsigned int i = 1; i < c->len; i++)
  528. if (likely (c->glyphs[i] != component[i]))
  529. return TRACE_RETURN (false);
  530. return TRACE_RETURN (true);
  531. }
  532. inline bool apply (hb_apply_context_t *c) const
  533. {
  534. TRACE_APPLY (this);
  535. unsigned int count = component.len;
  536. if (unlikely (count < 1)) return TRACE_RETURN (false);
  537. unsigned int end_offset = 0;
  538. bool is_mark_ligature = false;
  539. unsigned int total_component_count = 0;
  540. if (likely (!match_input (c, count,
  541. &component[1],
  542. match_glyph,
  543. NULL,
  544. &end_offset,
  545. &is_mark_ligature,
  546. &total_component_count)))
  547. return TRACE_RETURN (false);
  548. /* Deal, we are forming the ligature. */
  549. c->buffer->merge_clusters (c->buffer->idx, c->buffer->idx + end_offset);
  550. ligate_input (c,
  551. count,
  552. &component[1],
  553. ligGlyph,
  554. match_glyph,
  555. NULL,
  556. is_mark_ligature,
  557. total_component_count);
  558. return TRACE_RETURN (true);
  559. }
  560. inline bool serialize (hb_serialize_context_t *c,
  561. GlyphID ligature,
  562. Supplier<GlyphID> &components, /* Starting from second */
  563. unsigned int num_components /* Including first component */)
  564. {
  565. TRACE_SERIALIZE (this);
  566. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  567. ligGlyph = ligature;
  568. if (unlikely (!component.serialize (c, components, num_components))) return TRACE_RETURN (false);
  569. return TRACE_RETURN (true);
  570. }
  571. public:
  572. inline bool sanitize (hb_sanitize_context_t *c) {
  573. TRACE_SANITIZE (this);
  574. return TRACE_RETURN (ligGlyph.sanitize (c) && component.sanitize (c));
  575. }
  576. protected:
  577. GlyphID ligGlyph; /* GlyphID of ligature to substitute */
  578. HeadlessArrayOf<GlyphID>
  579. component; /* Array of component GlyphIDs--start
  580. * with the second component--ordered
  581. * in writing direction */
  582. public:
  583. DEFINE_SIZE_ARRAY (4, component);
  584. };
  585. struct LigatureSet
  586. {
  587. inline void closure (hb_closure_context_t *c) const
  588. {
  589. TRACE_CLOSURE (this);
  590. unsigned int num_ligs = ligature.len;
  591. for (unsigned int i = 0; i < num_ligs; i++)
  592. (this+ligature[i]).closure (c);
  593. }
  594. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  595. {
  596. TRACE_COLLECT_GLYPHS (this);
  597. unsigned int num_ligs = ligature.len;
  598. for (unsigned int i = 0; i < num_ligs; i++)
  599. (this+ligature[i]).collect_glyphs (c);
  600. }
  601. inline bool would_apply (hb_would_apply_context_t *c) const
  602. {
  603. TRACE_WOULD_APPLY (this);
  604. unsigned int num_ligs = ligature.len;
  605. for (unsigned int i = 0; i < num_ligs; i++)
  606. {
  607. const Ligature &lig = this+ligature[i];
  608. if (lig.would_apply (c))
  609. return TRACE_RETURN (true);
  610. }
  611. return TRACE_RETURN (false);
  612. }
  613. inline bool apply (hb_apply_context_t *c) const
  614. {
  615. TRACE_APPLY (this);
  616. unsigned int num_ligs = ligature.len;
  617. for (unsigned int i = 0; i < num_ligs; i++)
  618. {
  619. const Ligature &lig = this+ligature[i];
  620. if (lig.apply (c)) return TRACE_RETURN (true);
  621. }
  622. return TRACE_RETURN (false);
  623. }
  624. inline bool serialize (hb_serialize_context_t *c,
  625. Supplier<GlyphID> &ligatures,
  626. Supplier<unsigned int> &component_count_list,
  627. unsigned int num_ligatures,
  628. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  629. {
  630. TRACE_SERIALIZE (this);
  631. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  632. if (unlikely (!ligature.serialize (c, num_ligatures))) return TRACE_RETURN (false);
  633. for (unsigned int i = 0; i < num_ligatures; i++)
  634. if (unlikely (!ligature[i].serialize (c, this).serialize (c,
  635. ligatures[i],
  636. component_list,
  637. component_count_list[i]))) return TRACE_RETURN (false);
  638. ligatures.advance (num_ligatures);
  639. component_count_list.advance (num_ligatures);
  640. return TRACE_RETURN (true);
  641. }
  642. inline bool sanitize (hb_sanitize_context_t *c) {
  643. TRACE_SANITIZE (this);
  644. return TRACE_RETURN (ligature.sanitize (c, this));
  645. }
  646. protected:
  647. OffsetArrayOf<Ligature>
  648. ligature; /* Array LigatureSet tables
  649. * ordered by preference */
  650. public:
  651. DEFINE_SIZE_ARRAY (2, ligature);
  652. };
  653. struct LigatureSubstFormat1
  654. {
  655. inline void closure (hb_closure_context_t *c) const
  656. {
  657. TRACE_CLOSURE (this);
  658. Coverage::Iter iter;
  659. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  660. if (c->glyphs->has (iter.get_glyph ()))
  661. (this+ligatureSet[iter.get_coverage ()]).closure (c);
  662. }
  663. }
  664. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  665. {
  666. TRACE_COLLECT_GLYPHS (this);
  667. Coverage::Iter iter;
  668. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  669. c->input->add (iter.get_glyph ());
  670. (this+ligatureSet[iter.get_coverage ()]).collect_glyphs (c);
  671. }
  672. }
  673. inline const Coverage &get_coverage (void) const
  674. {
  675. return this+coverage;
  676. }
  677. inline bool would_apply (hb_would_apply_context_t *c) const
  678. {
  679. TRACE_WOULD_APPLY (this);
  680. unsigned int index = (this+coverage).get_coverage (c->glyphs[0]);
  681. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  682. const LigatureSet &lig_set = this+ligatureSet[index];
  683. return TRACE_RETURN (lig_set.would_apply (c));
  684. }
  685. inline bool apply (hb_apply_context_t *c) const
  686. {
  687. TRACE_APPLY (this);
  688. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  689. unsigned int index = (this+coverage).get_coverage (glyph_id);
  690. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  691. const LigatureSet &lig_set = this+ligatureSet[index];
  692. return TRACE_RETURN (lig_set.apply (c));
  693. }
  694. inline bool serialize (hb_serialize_context_t *c,
  695. Supplier<GlyphID> &first_glyphs,
  696. Supplier<unsigned int> &ligature_per_first_glyph_count_list,
  697. unsigned int num_first_glyphs,
  698. Supplier<GlyphID> &ligatures_list,
  699. Supplier<unsigned int> &component_count_list,
  700. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  701. {
  702. TRACE_SERIALIZE (this);
  703. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  704. if (unlikely (!ligatureSet.serialize (c, num_first_glyphs))) return TRACE_RETURN (false);
  705. for (unsigned int i = 0; i < num_first_glyphs; i++)
  706. if (unlikely (!ligatureSet[i].serialize (c, this).serialize (c,
  707. ligatures_list,
  708. component_count_list,
  709. ligature_per_first_glyph_count_list[i],
  710. component_list))) return TRACE_RETURN (false);
  711. ligature_per_first_glyph_count_list.advance (num_first_glyphs);
  712. if (unlikely (!coverage.serialize (c, this).serialize (c, first_glyphs, num_first_glyphs))) return TRACE_RETURN (false);
  713. return TRACE_RETURN (true);
  714. }
  715. inline bool sanitize (hb_sanitize_context_t *c) {
  716. TRACE_SANITIZE (this);
  717. return TRACE_RETURN (coverage.sanitize (c, this) && ligatureSet.sanitize (c, this));
  718. }
  719. protected:
  720. USHORT format; /* Format identifier--format = 1 */
  721. OffsetTo<Coverage>
  722. coverage; /* Offset to Coverage table--from
  723. * beginning of Substitution table */
  724. OffsetArrayOf<LigatureSet>
  725. ligatureSet; /* Array LigatureSet tables
  726. * ordered by Coverage Index */
  727. public:
  728. DEFINE_SIZE_ARRAY (6, ligatureSet);
  729. };
  730. struct LigatureSubst
  731. {
  732. template <typename context_t>
  733. inline typename context_t::return_t process (context_t *c) const
  734. {
  735. TRACE_PROCESS (this);
  736. switch (u.format) {
  737. case 1: return TRACE_RETURN (c->process (u.format1));
  738. default:return TRACE_RETURN (c->default_return_value ());
  739. }
  740. }
  741. inline bool serialize (hb_serialize_context_t *c,
  742. Supplier<GlyphID> &first_glyphs,
  743. Supplier<unsigned int> &ligature_per_first_glyph_count_list,
  744. unsigned int num_first_glyphs,
  745. Supplier<GlyphID> &ligatures_list,
  746. Supplier<unsigned int> &component_count_list,
  747. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  748. {
  749. TRACE_SERIALIZE (this);
  750. if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
  751. unsigned int format = 1;
  752. u.format.set (format);
  753. switch (u.format) {
  754. case 1: return TRACE_RETURN (u.format1.serialize (c, first_glyphs, ligature_per_first_glyph_count_list, num_first_glyphs,
  755. ligatures_list, component_count_list, component_list));
  756. default:return TRACE_RETURN (false);
  757. }
  758. }
  759. inline bool sanitize (hb_sanitize_context_t *c) {
  760. TRACE_SANITIZE (this);
  761. if (!u.format.sanitize (c)) return TRACE_RETURN (false);
  762. switch (u.format) {
  763. case 1: return TRACE_RETURN (u.format1.sanitize (c));
  764. default:return TRACE_RETURN (true);
  765. }
  766. }
  767. protected:
  768. union {
  769. USHORT format; /* Format identifier */
  770. LigatureSubstFormat1 format1;
  771. } u;
  772. };
  773. struct ContextSubst : Context {};
  774. struct ChainContextSubst : ChainContext {};
  775. struct ExtensionSubst : Extension<ExtensionSubst>
  776. {
  777. typedef struct SubstLookupSubTable LookupSubTable;
  778. inline bool is_reverse (void) const;
  779. };
  780. struct ReverseChainSingleSubstFormat1
  781. {
  782. inline void closure (hb_closure_context_t *c) const
  783. {
  784. TRACE_CLOSURE (this);
  785. const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  786. unsigned int count;
  787. count = backtrack.len;
  788. for (unsigned int i = 0; i < count; i++)
  789. if (!(this+backtrack[i]).intersects (c->glyphs))
  790. return;
  791. count = lookahead.len;
  792. for (unsigned int i = 0; i < count; i++)
  793. if (!(this+lookahead[i]).intersects (c->glyphs))
  794. return;
  795. const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  796. Coverage::Iter iter;
  797. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  798. if (c->glyphs->has (iter.get_glyph ()))
  799. c->glyphs->add (substitute[iter.get_coverage ()]);
  800. }
  801. }
  802. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  803. {
  804. TRACE_COLLECT_GLYPHS (this);
  805. const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  806. unsigned int count;
  807. (this+coverage).add_coverage (c->input);
  808. count = backtrack.len;
  809. for (unsigned int i = 0; i < count; i++)
  810. (this+backtrack[i]).add_coverage (c->before);
  811. count = lookahead.len;
  812. for (unsigned int i = 0; i < count; i++)
  813. (this+lookahead[i]).add_coverage (c->after);
  814. const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  815. count = substitute.len;
  816. for (unsigned int i = 0; i < count; i++)
  817. c->output->add (substitute[i]);
  818. }
  819. inline const Coverage &get_coverage (void) const
  820. {
  821. return this+coverage;
  822. }
  823. inline bool would_apply (hb_would_apply_context_t *c) const
  824. {
  825. TRACE_WOULD_APPLY (this);
  826. return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  827. }
  828. inline bool apply (hb_apply_context_t *c) const
  829. {
  830. TRACE_APPLY (this);
  831. if (unlikely (c->nesting_level_left != MAX_NESTING_LEVEL))
  832. return TRACE_RETURN (false); /* No chaining to this type */
  833. unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
  834. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  835. const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  836. const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  837. if (match_backtrack (c,
  838. backtrack.len, (USHORT *) backtrack.array,
  839. match_coverage, this) &&
  840. match_lookahead (c,
  841. lookahead.len, (USHORT *) lookahead.array,
  842. match_coverage, this,
  843. 1))
  844. {
  845. c->replace_glyph_inplace (substitute[index]);
  846. c->buffer->idx--; /* Reverse! */
  847. return TRACE_RETURN (true);
  848. }
  849. return TRACE_RETURN (false);
  850. }
  851. inline bool sanitize (hb_sanitize_context_t *c) {
  852. TRACE_SANITIZE (this);
  853. if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
  854. return TRACE_RETURN (false);
  855. OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  856. if (!lookahead.sanitize (c, this))
  857. return TRACE_RETURN (false);
  858. ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  859. return TRACE_RETURN (substitute.sanitize (c));
  860. }
  861. protected:
  862. USHORT format; /* Format identifier--format = 1 */
  863. OffsetTo<Coverage>
  864. coverage; /* Offset to Coverage table--from
  865. * beginning of table */
  866. OffsetArrayOf<Coverage>
  867. backtrack; /* Array of coverage tables
  868. * in backtracking sequence, in glyph
  869. * sequence order */
  870. OffsetArrayOf<Coverage>
  871. lookaheadX; /* Array of coverage tables
  872. * in lookahead sequence, in glyph
  873. * sequence order */
  874. ArrayOf<GlyphID>
  875. substituteX; /* Array of substitute
  876. * GlyphIDs--ordered by Coverage Index */
  877. public:
  878. DEFINE_SIZE_MIN (10);
  879. };
  880. struct ReverseChainSingleSubst
  881. {
  882. template <typename context_t>
  883. inline typename context_t::return_t process (context_t *c) const
  884. {
  885. TRACE_PROCESS (this);
  886. switch (u.format) {
  887. case 1: return TRACE_RETURN (c->process (u.format1));
  888. default:return TRACE_RETURN (c->default_return_value ());
  889. }
  890. }
  891. inline bool sanitize (hb_sanitize_context_t *c) {
  892. TRACE_SANITIZE (this);
  893. if (!u.format.sanitize (c)) return TRACE_RETURN (false);
  894. switch (u.format) {
  895. case 1: return TRACE_RETURN (u.format1.sanitize (c));
  896. default:return TRACE_RETURN (true);
  897. }
  898. }
  899. protected:
  900. union {
  901. USHORT format; /* Format identifier */
  902. ReverseChainSingleSubstFormat1 format1;
  903. } u;
  904. };
  905. /*
  906. * SubstLookup
  907. */
  908. struct SubstLookupSubTable
  909. {
  910. friend struct SubstLookup;
  911. enum Type {
  912. Single = 1,
  913. Multiple = 2,
  914. Alternate = 3,
  915. Ligature = 4,
  916. Context = 5,
  917. ChainContext = 6,
  918. Extension = 7,
  919. ReverseChainSingle = 8
  920. };
  921. template <typename context_t>
  922. inline typename context_t::return_t process (context_t *c, unsigned int lookup_type) const
  923. {
  924. TRACE_PROCESS (this);
  925. switch (lookup_type) {
  926. case Single: return TRACE_RETURN (u.single.process (c));
  927. case Multiple: return TRACE_RETURN (u.multiple.process (c));
  928. case Alternate: return TRACE_RETURN (u.alternate.process (c));
  929. case Ligature: return TRACE_RETURN (u.ligature.process (c));
  930. case Context: return TRACE_RETURN (u.context.process (c));
  931. case ChainContext: return TRACE_RETURN (u.chainContext.process (c));
  932. case Extension: return TRACE_RETURN (u.extension.process (c));
  933. case ReverseChainSingle: return TRACE_RETURN (u.reverseChainContextSingle.process (c));
  934. default: return TRACE_RETURN (c->default_return_value ());
  935. }
  936. }
  937. inline bool sanitize (hb_sanitize_context_t *c, unsigned int lookup_type) {
  938. TRACE_SANITIZE (this);
  939. if (!u.header.sub_format.sanitize (c))
  940. return TRACE_RETURN (false);
  941. switch (lookup_type) {
  942. case Single: return TRACE_RETURN (u.single.sanitize (c));
  943. case Multiple: return TRACE_RETURN (u.multiple.sanitize (c));
  944. case Alternate: return TRACE_RETURN (u.alternate.sanitize (c));
  945. case Ligature: return TRACE_RETURN (u.ligature.sanitize (c));
  946. case Context: return TRACE_RETURN (u.context.sanitize (c));
  947. case ChainContext: return TRACE_RETURN (u.chainContext.sanitize (c));
  948. case Extension: return TRACE_RETURN (u.extension.sanitize (c));
  949. case ReverseChainSingle: return TRACE_RETURN (u.reverseChainContextSingle.sanitize (c));
  950. default: return TRACE_RETURN (true);
  951. }
  952. }
  953. protected:
  954. union {
  955. struct {
  956. USHORT sub_format;
  957. } header;
  958. SingleSubst single;
  959. MultipleSubst multiple;
  960. AlternateSubst alternate;
  961. LigatureSubst ligature;
  962. ContextSubst context;
  963. ChainContextSubst chainContext;
  964. ExtensionSubst extension;
  965. ReverseChainSingleSubst reverseChainContextSingle;
  966. } u;
  967. public:
  968. DEFINE_SIZE_UNION (2, header.sub_format);
  969. };
  970. struct SubstLookup : Lookup
  971. {
  972. inline const SubstLookupSubTable& get_subtable (unsigned int i) const
  973. { return this+CastR<OffsetArrayOf<SubstLookupSubTable> > (subTable)[i]; }
  974. inline static bool lookup_type_is_reverse (unsigned int lookup_type)
  975. { return lookup_type == SubstLookupSubTable::ReverseChainSingle; }
  976. inline bool is_reverse (void) const
  977. {
  978. unsigned int type = get_type ();
  979. if (unlikely (type == SubstLookupSubTable::Extension))
  980. return CastR<ExtensionSubst> (get_subtable(0)).is_reverse ();
  981. return lookup_type_is_reverse (type);
  982. }
  983. template <typename context_t>
  984. inline typename context_t::return_t process (context_t *c) const
  985. {
  986. TRACE_PROCESS (this);
  987. unsigned int lookup_type = get_type ();
  988. unsigned int count = get_subtable_count ();
  989. for (unsigned int i = 0; i < count; i++) {
  990. typename context_t::return_t r = get_subtable (i).process (c, lookup_type);
  991. if (c->stop_sublookup_iteration (r))
  992. return TRACE_RETURN (r);
  993. }
  994. return TRACE_RETURN (c->default_return_value ());
  995. }
  996. template <typename context_t>
  997. static inline typename context_t::return_t process_recurse_func (context_t *c, unsigned int lookup_index);
  998. inline hb_closure_context_t::return_t closure (hb_closure_context_t *c) const
  999. {
  1000. TRACE_CLOSURE (this);
  1001. c->set_recurse_func (process_recurse_func<hb_closure_context_t>);
  1002. return TRACE_RETURN (process (c));
  1003. }
  1004. inline hb_collect_glyphs_context_t::return_t collect_glyphs_lookup (hb_collect_glyphs_context_t *c) const
  1005. {
  1006. TRACE_COLLECT_GLYPHS (this);
  1007. c->set_recurse_func (process_recurse_func<hb_collect_glyphs_context_t>);
  1008. return TRACE_RETURN (process (c));
  1009. }
  1010. template <typename set_t>
  1011. inline void add_coverage (set_t *glyphs) const
  1012. {
  1013. hb_get_coverage_context_t c;
  1014. const Coverage *last = NULL;
  1015. unsigned int count = get_subtable_count ();
  1016. for (unsigned int i = 0; i < count; i++) {
  1017. const Coverage *coverage = &get_subtable (i).process (&c, get_type ());
  1018. if (coverage != last) {
  1019. coverage->add_coverage (glyphs);
  1020. last = coverage;
  1021. }
  1022. }
  1023. }
  1024. inline bool would_apply (hb_would_apply_context_t *c, const hb_set_digest_t *digest) const
  1025. {
  1026. TRACE_WOULD_APPLY (this);
  1027. if (unlikely (!c->len)) return TRACE_RETURN (false);
  1028. if (!digest->may_have (c->glyphs[0])) return TRACE_RETURN (false);
  1029. return TRACE_RETURN (process (c));
  1030. }
  1031. inline bool apply_once (hb_apply_context_t *c) const
  1032. {
  1033. TRACE_APPLY (this);
  1034. if (!c->check_glyph_property (&c->buffer->cur(), c->lookup_props, &c->property))
  1035. return TRACE_RETURN (false);
  1036. return TRACE_RETURN (process (c));
  1037. }
  1038. static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index);
  1039. inline bool apply_string (hb_apply_context_t *c, const hb_set_digest_t *digest) const
  1040. {
  1041. bool ret = false;
  1042. if (unlikely (!c->buffer->len || !c->lookup_mask))
  1043. return false;
  1044. c->set_recurse_func (apply_recurse_func);
  1045. c->set_lookup (*this);
  1046. if (likely (!is_reverse ()))
  1047. {
  1048. /* in/out forward substitution */
  1049. c->buffer->clear_output ();
  1050. c->buffer->idx = 0;
  1051. while (c->buffer->idx < c->buffer->len)
  1052. {
  1053. if ((c->buffer->cur().mask & c->lookup_mask) &&
  1054. digest->may_have (c->buffer->cur().codepoint) &&
  1055. apply_once (c))
  1056. ret = true;
  1057. else
  1058. c->buffer->next_glyph ();
  1059. }
  1060. if (ret)
  1061. c->buffer->swap_buffers ();
  1062. }
  1063. else
  1064. {
  1065. /* in-place backward substitution */
  1066. c->buffer->remove_output ();
  1067. c->buffer->idx = c->buffer->len - 1;
  1068. do
  1069. {
  1070. if ((c->buffer->cur().mask & c->lookup_mask) &&
  1071. digest->may_have (c->buffer->cur().codepoint) &&
  1072. apply_once (c))
  1073. ret = true;
  1074. else
  1075. c->buffer->idx--;
  1076. }
  1077. while ((int) c->buffer->idx >= 0);
  1078. }
  1079. return ret;
  1080. }
  1081. inline SubstLookupSubTable& serialize_subtable (hb_serialize_context_t *c,
  1082. unsigned int i)
  1083. { return CastR<OffsetArrayOf<SubstLookupSubTable> > (subTable)[i].serialize (c, this); }
  1084. inline bool serialize_single (hb_serialize_context_t *c,
  1085. uint32_t lookup_props,
  1086. Supplier<GlyphID> &glyphs,
  1087. Supplier<GlyphID> &substitutes,
  1088. unsigned int num_glyphs)
  1089. {
  1090. TRACE_SERIALIZE (this);
  1091. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Single, lookup_props, 1))) return TRACE_RETURN (false);
  1092. return TRACE_RETURN (serialize_subtable (c, 0).u.single.serialize (c, glyphs, substitutes, num_glyphs));
  1093. }
  1094. inline bool serialize_multiple (hb_serialize_context_t *c,
  1095. uint32_t lookup_props,
  1096. Supplier<GlyphID> &glyphs,
  1097. Supplier<unsigned int> &substitute_len_list,
  1098. unsigned int num_glyphs,
  1099. Supplier<GlyphID> &substitute_glyphs_list)
  1100. {
  1101. TRACE_SERIALIZE (this);
  1102. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Multiple, lookup_props, 1))) return TRACE_RETURN (false);
  1103. return TRACE_RETURN (serialize_subtable (c, 0).u.multiple.serialize (c, glyphs, substitute_len_list, num_glyphs,
  1104. substitute_glyphs_list));
  1105. }
  1106. inline bool serialize_alternate (hb_serialize_context_t *c,
  1107. uint32_t lookup_props,
  1108. Supplier<GlyphID> &glyphs,
  1109. Supplier<unsigned int> &alternate_len_list,
  1110. unsigned int num_glyphs,
  1111. Supplier<GlyphID> &alternate_glyphs_list)
  1112. {
  1113. TRACE_SERIALIZE (this);
  1114. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Alternate, lookup_props, 1))) return TRACE_RETURN (false);
  1115. return TRACE_RETURN (serialize_subtable (c, 0).u.alternate.serialize (c, glyphs, alternate_len_list, num_glyphs,
  1116. alternate_glyphs_list));
  1117. }
  1118. inline bool serialize_ligature (hb_serialize_context_t *c,
  1119. uint32_t lookup_props,
  1120. Supplier<GlyphID> &first_glyphs,
  1121. Supplier<unsigned int> &ligature_per_first_glyph_count_list,
  1122. unsigned int num_first_glyphs,
  1123. Supplier<GlyphID> &ligatures_list,
  1124. Supplier<unsigned int> &component_count_list,
  1125. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  1126. {
  1127. TRACE_SERIALIZE (this);
  1128. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Ligature, lookup_props, 1))) return TRACE_RETURN (false);
  1129. return TRACE_RETURN (serialize_subtable (c, 0).u.ligature.serialize (c, first_glyphs, ligature_per_first_glyph_count_list, num_first_glyphs,
  1130. ligatures_list, component_count_list, component_list));
  1131. }
  1132. inline bool sanitize (hb_sanitize_context_t *c)
  1133. {
  1134. TRACE_SANITIZE (this);
  1135. if (unlikely (!Lookup::sanitize (c))) return TRACE_RETURN (false);
  1136. OffsetArrayOf<SubstLookupSubTable> &list = CastR<OffsetArrayOf<SubstLookupSubTable> > (subTable);
  1137. if (unlikely (!list.sanitize (c, this, get_type ()))) return TRACE_RETURN (false);
  1138. if (unlikely (get_type () == SubstLookupSubTable::Extension))
  1139. {
  1140. /* The spec says all subtables of an Extension lookup should
  1141. * have the same type. This is specially important if one has
  1142. * a reverse type!
  1143. *
  1144. * We just check that they are all either forward, or reverse. */
  1145. unsigned int type = get_subtable (0).u.extension.get_type ();
  1146. unsigned int count = get_subtable_count ();
  1147. for (unsigned int i = 1; i < count; i++)
  1148. if (get_subtable (i).u.extension.get_type () != type)
  1149. return TRACE_RETURN (false);
  1150. }
  1151. return TRACE_RETURN (true);
  1152. }
  1153. };
  1154. typedef OffsetListOf<SubstLookup> SubstLookupList;
  1155. /*
  1156. * GSUB -- The Glyph Substitution Table
  1157. */
  1158. struct GSUB : GSUBGPOS
  1159. {
  1160. static const hb_tag_t Tag = HB_OT_TAG_GSUB;
  1161. inline const SubstLookup& get_lookup (unsigned int i) const
  1162. { return CastR<SubstLookup> (GSUBGPOS::get_lookup (i)); }
  1163. static inline void substitute_start (hb_font_t *font, hb_buffer_t *buffer);
  1164. static inline void substitute_finish (hb_font_t *font, hb_buffer_t *buffer);
  1165. inline bool sanitize (hb_sanitize_context_t *c) {
  1166. TRACE_SANITIZE (this);
  1167. if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false);
  1168. OffsetTo<SubstLookupList> &list = CastR<OffsetTo<SubstLookupList> > (lookupList);
  1169. return TRACE_RETURN (list.sanitize (c, this));
  1170. }
  1171. public:
  1172. DEFINE_SIZE_STATIC (10);
  1173. };
  1174. void
  1175. GSUB::substitute_start (hb_font_t *font, hb_buffer_t *buffer)
  1176. {
  1177. HB_BUFFER_ALLOCATE_VAR (buffer, glyph_props);
  1178. HB_BUFFER_ALLOCATE_VAR (buffer, lig_props);
  1179. HB_BUFFER_ALLOCATE_VAR (buffer, syllable);
  1180. const GDEF &gdef = *hb_ot_layout_from_face (font->face)->gdef;
  1181. unsigned int count = buffer->len;
  1182. for (unsigned int i = 0; i < count; i++) {
  1183. buffer->info[i].lig_props() = buffer->info[i].syllable() = 0;
  1184. buffer->info[i].glyph_props() = gdef.get_glyph_props (buffer->info[i].codepoint);
  1185. }
  1186. }
  1187. void
  1188. GSUB::substitute_finish (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer HB_UNUSED)
  1189. {
  1190. }
  1191. /* Out-of-class implementation for methods recursing */
  1192. inline bool ExtensionSubst::is_reverse (void) const
  1193. {
  1194. unsigned int type = get_type ();
  1195. if (unlikely (type == SubstLookupSubTable::Extension))
  1196. return CastR<ExtensionSubst> (get_subtable<SubstLookupSubTable>()).is_reverse ();
  1197. return SubstLookup::lookup_type_is_reverse (type);
  1198. }
  1199. template <typename context_t>
  1200. inline typename context_t::return_t SubstLookup::process_recurse_func (context_t *c, unsigned int lookup_index)
  1201. {
  1202. const GSUB &gsub = *(hb_ot_layout_from_face (c->face)->gsub);
  1203. const SubstLookup &l = gsub.get_lookup (lookup_index);
  1204. return l.process (c);
  1205. }
  1206. inline bool SubstLookup::apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index)
  1207. {
  1208. const GSUB &gsub = *(hb_ot_layout_from_face (c->face)->gsub);
  1209. const SubstLookup &l = gsub.get_lookup (lookup_index);
  1210. unsigned int saved_lookup_props = c->lookup_props;
  1211. unsigned int saved_property = c->property;
  1212. c->set_lookup (l);
  1213. bool ret = l.apply_once (c);
  1214. c->lookup_props = saved_lookup_props;
  1215. c->property = saved_property;
  1216. return ret;
  1217. }
  1218. } /* namespace OT */
  1219. #endif /* HB_OT_LAYOUT_GSUB_TABLE_HH */