PageRenderTime 57ms CodeModel.GetById 22ms RepoModel.GetById 0ms app.codeStats 0ms

/source/libs/harfbuzz/harfbuzz-0.9.15/src/hb-ot-layout-gsub-table.hh

https://bitbucket.org/khaledhosny/xetex
C++ Header | 1412 lines | 1152 code | 196 blank | 64 comment | 184 complexity | f3331d3556c2fbc319e10092dc42f53a MD5 | raw file
Possible License(s): LGPL-2.1, LGPL-3.0, GPL-2.0, CPL-1.0
  1. /*
  2. * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
  3. * Copyright © 2010,2012 Google, Inc.
  4. *
  5. * This is part of HarfBuzz, a text shaping library.
  6. *
  7. * Permission is hereby granted, without written agreement and without
  8. * license or royalty fees, to use, copy, modify, and distribute this
  9. * software and its documentation for any purpose, provided that the
  10. * above copyright notice and the following two paragraphs appear in
  11. * all copies of this software.
  12. *
  13. * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
  14. * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
  15. * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
  16. * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  17. * DAMAGE.
  18. *
  19. * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
  20. * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
  21. * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
  22. * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
  23. * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
  24. *
  25. * Red Hat Author(s): Behdad Esfahbod
  26. * Google Author(s): Behdad Esfahbod
  27. */
  28. #ifndef HB_OT_LAYOUT_GSUB_TABLE_HH
  29. #define HB_OT_LAYOUT_GSUB_TABLE_HH
  30. #include "hb-ot-layout-gsubgpos-private.hh"
  31. namespace OT {
  32. struct SingleSubstFormat1
  33. {
  34. inline void closure (hb_closure_context_t *c) const
  35. {
  36. TRACE_CLOSURE (this);
  37. Coverage::Iter iter;
  38. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  39. hb_codepoint_t glyph_id = iter.get_glyph ();
  40. if (c->glyphs->has (glyph_id))
  41. c->glyphs->add ((glyph_id + deltaGlyphID) & 0xFFFF);
  42. }
  43. }
  44. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  45. {
  46. TRACE_COLLECT_GLYPHS (this);
  47. Coverage::Iter iter;
  48. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  49. hb_codepoint_t glyph_id = iter.get_glyph ();
  50. c->input->add (glyph_id);
  51. c->output->add ((glyph_id + deltaGlyphID) & 0xFFFF);
  52. }
  53. }
  54. inline const Coverage &get_coverage (void) const
  55. {
  56. return this+coverage;
  57. }
  58. inline bool would_apply (hb_would_apply_context_t *c) const
  59. {
  60. TRACE_WOULD_APPLY (this);
  61. return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  62. }
  63. inline bool apply (hb_apply_context_t *c) const
  64. {
  65. TRACE_APPLY (this);
  66. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  67. unsigned int index = (this+coverage).get_coverage (glyph_id);
  68. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  69. /* According to the Adobe Annotated OpenType Suite, result is always
  70. * limited to 16bit. */
  71. glyph_id = (glyph_id + deltaGlyphID) & 0xFFFF;
  72. c->replace_glyph (glyph_id);
  73. return TRACE_RETURN (true);
  74. }
  75. inline bool serialize (hb_serialize_context_t *c,
  76. Supplier<GlyphID> &glyphs,
  77. unsigned int num_glyphs,
  78. int delta)
  79. {
  80. TRACE_SERIALIZE (this);
  81. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  82. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
  83. deltaGlyphID.set (delta); /* TODO(serilaize) overflow? */
  84. return TRACE_RETURN (true);
  85. }
  86. inline bool sanitize (hb_sanitize_context_t *c) {
  87. TRACE_SANITIZE (this);
  88. return TRACE_RETURN (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c));
  89. }
  90. protected:
  91. USHORT format; /* Format identifier--format = 1 */
  92. OffsetTo<Coverage>
  93. coverage; /* Offset to Coverage table--from
  94. * beginning of Substitution table */
  95. SHORT deltaGlyphID; /* Add to original GlyphID to get
  96. * substitute GlyphID */
  97. public:
  98. DEFINE_SIZE_STATIC (6);
  99. };
  100. struct SingleSubstFormat2
  101. {
  102. inline void closure (hb_closure_context_t *c) const
  103. {
  104. TRACE_CLOSURE (this);
  105. Coverage::Iter iter;
  106. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  107. if (c->glyphs->has (iter.get_glyph ()))
  108. c->glyphs->add (substitute[iter.get_coverage ()]);
  109. }
  110. }
  111. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  112. {
  113. TRACE_COLLECT_GLYPHS (this);
  114. Coverage::Iter iter;
  115. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  116. c->input->add (iter.get_glyph ());
  117. c->output->add (substitute[iter.get_coverage ()]);
  118. }
  119. }
  120. inline const Coverage &get_coverage (void) const
  121. {
  122. return this+coverage;
  123. }
  124. inline bool would_apply (hb_would_apply_context_t *c) const
  125. {
  126. TRACE_WOULD_APPLY (this);
  127. return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  128. }
  129. inline bool apply (hb_apply_context_t *c) const
  130. {
  131. TRACE_APPLY (this);
  132. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  133. unsigned int index = (this+coverage).get_coverage (glyph_id);
  134. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  135. if (unlikely (index >= substitute.len)) return TRACE_RETURN (false);
  136. glyph_id = substitute[index];
  137. c->replace_glyph (glyph_id);
  138. return TRACE_RETURN (true);
  139. }
  140. inline bool serialize (hb_serialize_context_t *c,
  141. Supplier<GlyphID> &glyphs,
  142. Supplier<GlyphID> &substitutes,
  143. unsigned int num_glyphs)
  144. {
  145. TRACE_SERIALIZE (this);
  146. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  147. if (unlikely (!substitute.serialize (c, substitutes, num_glyphs))) return TRACE_RETURN (false);
  148. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
  149. return TRACE_RETURN (true);
  150. }
  151. inline bool sanitize (hb_sanitize_context_t *c) {
  152. TRACE_SANITIZE (this);
  153. return TRACE_RETURN (coverage.sanitize (c, this) && substitute.sanitize (c));
  154. }
  155. protected:
  156. USHORT format; /* Format identifier--format = 2 */
  157. OffsetTo<Coverage>
  158. coverage; /* Offset to Coverage table--from
  159. * beginning of Substitution table */
  160. ArrayOf<GlyphID>
  161. substitute; /* Array of substitute
  162. * GlyphIDs--ordered by Coverage Index */
  163. public:
  164. DEFINE_SIZE_ARRAY (6, substitute);
  165. };
  166. struct SingleSubst
  167. {
  168. inline bool serialize (hb_serialize_context_t *c,
  169. Supplier<GlyphID> &glyphs,
  170. Supplier<GlyphID> &substitutes,
  171. unsigned int num_glyphs)
  172. {
  173. TRACE_SERIALIZE (this);
  174. if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
  175. unsigned int format = 2;
  176. int delta;
  177. if (num_glyphs) {
  178. format = 1;
  179. /* TODO(serialize) check for wrap-around */
  180. delta = substitutes[0] - glyphs[0];
  181. for (unsigned int i = 1; i < num_glyphs; i++)
  182. if (delta != substitutes[i] - glyphs[i]) {
  183. format = 2;
  184. break;
  185. }
  186. }
  187. u.format.set (format);
  188. switch (u.format) {
  189. case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, num_glyphs, delta));
  190. case 2: return TRACE_RETURN (u.format2.serialize (c, glyphs, substitutes, num_glyphs));
  191. default:return TRACE_RETURN (false);
  192. }
  193. }
  194. template <typename context_t>
  195. inline typename context_t::return_t dispatch (context_t *c) const
  196. {
  197. TRACE_DISPATCH (this);
  198. switch (u.format) {
  199. case 1: return TRACE_RETURN (c->dispatch (u.format1));
  200. case 2: return TRACE_RETURN (c->dispatch (u.format2));
  201. default:return TRACE_RETURN (c->default_return_value ());
  202. }
  203. }
  204. inline bool sanitize (hb_sanitize_context_t *c) {
  205. TRACE_SANITIZE (this);
  206. if (!u.format.sanitize (c)) return TRACE_RETURN (false);
  207. switch (u.format) {
  208. case 1: return TRACE_RETURN (u.format1.sanitize (c));
  209. case 2: return TRACE_RETURN (u.format2.sanitize (c));
  210. default:return TRACE_RETURN (true);
  211. }
  212. }
  213. protected:
  214. union {
  215. USHORT format; /* Format identifier */
  216. SingleSubstFormat1 format1;
  217. SingleSubstFormat2 format2;
  218. } u;
  219. };
  220. struct Sequence
  221. {
  222. inline void closure (hb_closure_context_t *c) const
  223. {
  224. TRACE_CLOSURE (this);
  225. unsigned int count = substitute.len;
  226. for (unsigned int i = 0; i < count; i++)
  227. c->glyphs->add (substitute[i]);
  228. }
  229. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  230. {
  231. TRACE_COLLECT_GLYPHS (this);
  232. unsigned int count = substitute.len;
  233. for (unsigned int i = 0; i < count; i++)
  234. c->output->add (substitute[i]);
  235. }
  236. inline bool apply (hb_apply_context_t *c) const
  237. {
  238. TRACE_APPLY (this);
  239. if (unlikely (!substitute.len)) return TRACE_RETURN (false);
  240. unsigned int klass = c->buffer->cur().glyph_props() &
  241. HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE ? HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH : 0;
  242. unsigned int count = substitute.len;
  243. for (unsigned int i = 0; i < count; i++) {
  244. set_lig_props_for_component (c->buffer->cur(), i);
  245. c->output_glyph (substitute.array[i], klass);
  246. }
  247. c->buffer->skip_glyph ();
  248. return TRACE_RETURN (true);
  249. }
  250. inline bool serialize (hb_serialize_context_t *c,
  251. Supplier<GlyphID> &glyphs,
  252. unsigned int num_glyphs)
  253. {
  254. TRACE_SERIALIZE (this);
  255. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  256. if (unlikely (!substitute.serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
  257. return TRACE_RETURN (true);
  258. }
  259. inline bool sanitize (hb_sanitize_context_t *c) {
  260. TRACE_SANITIZE (this);
  261. return TRACE_RETURN (substitute.sanitize (c));
  262. }
  263. protected:
  264. ArrayOf<GlyphID>
  265. substitute; /* String of GlyphIDs to substitute */
  266. public:
  267. DEFINE_SIZE_ARRAY (2, substitute);
  268. };
  269. struct MultipleSubstFormat1
  270. {
  271. inline void closure (hb_closure_context_t *c) const
  272. {
  273. TRACE_CLOSURE (this);
  274. Coverage::Iter iter;
  275. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  276. if (c->glyphs->has (iter.get_glyph ()))
  277. (this+sequence[iter.get_coverage ()]).closure (c);
  278. }
  279. }
  280. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  281. {
  282. TRACE_COLLECT_GLYPHS (this);
  283. (this+coverage).add_coverage (c->input);
  284. unsigned int count = sequence.len;
  285. for (unsigned int i = 0; i < count; i++)
  286. (this+sequence[i]).collect_glyphs (c);
  287. }
  288. inline const Coverage &get_coverage (void) const
  289. {
  290. return this+coverage;
  291. }
  292. inline bool would_apply (hb_would_apply_context_t *c) const
  293. {
  294. TRACE_WOULD_APPLY (this);
  295. return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  296. }
  297. inline bool apply (hb_apply_context_t *c) const
  298. {
  299. TRACE_APPLY (this);
  300. unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
  301. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  302. return TRACE_RETURN ((this+sequence[index]).apply (c));
  303. }
  304. inline bool serialize (hb_serialize_context_t *c,
  305. Supplier<GlyphID> &glyphs,
  306. Supplier<unsigned int> &substitute_len_list,
  307. unsigned int num_glyphs,
  308. Supplier<GlyphID> &substitute_glyphs_list)
  309. {
  310. TRACE_SERIALIZE (this);
  311. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  312. if (unlikely (!sequence.serialize (c, num_glyphs))) return TRACE_RETURN (false);
  313. for (unsigned int i = 0; i < num_glyphs; i++)
  314. if (unlikely (!sequence[i].serialize (c, this).serialize (c,
  315. substitute_glyphs_list,
  316. substitute_len_list[i]))) return TRACE_RETURN (false);
  317. substitute_len_list.advance (num_glyphs);
  318. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
  319. return TRACE_RETURN (true);
  320. }
  321. inline bool sanitize (hb_sanitize_context_t *c) {
  322. TRACE_SANITIZE (this);
  323. return TRACE_RETURN (coverage.sanitize (c, this) && sequence.sanitize (c, this));
  324. }
  325. protected:
  326. USHORT format; /* Format identifier--format = 1 */
  327. OffsetTo<Coverage>
  328. coverage; /* Offset to Coverage table--from
  329. * beginning of Substitution table */
  330. OffsetArrayOf<Sequence>
  331. sequence; /* Array of Sequence tables
  332. * ordered by Coverage Index */
  333. public:
  334. DEFINE_SIZE_ARRAY (6, sequence);
  335. };
  336. struct MultipleSubst
  337. {
  338. inline bool serialize (hb_serialize_context_t *c,
  339. Supplier<GlyphID> &glyphs,
  340. Supplier<unsigned int> &substitute_len_list,
  341. unsigned int num_glyphs,
  342. Supplier<GlyphID> &substitute_glyphs_list)
  343. {
  344. TRACE_SERIALIZE (this);
  345. if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
  346. unsigned int format = 1;
  347. u.format.set (format);
  348. switch (u.format) {
  349. case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, substitute_len_list, num_glyphs, substitute_glyphs_list));
  350. default:return TRACE_RETURN (false);
  351. }
  352. }
  353. template <typename context_t>
  354. inline typename context_t::return_t dispatch (context_t *c) const
  355. {
  356. TRACE_DISPATCH (this);
  357. switch (u.format) {
  358. case 1: return TRACE_RETURN (c->dispatch (u.format1));
  359. default:return TRACE_RETURN (c->default_return_value ());
  360. }
  361. }
  362. inline bool sanitize (hb_sanitize_context_t *c) {
  363. TRACE_SANITIZE (this);
  364. if (!u.format.sanitize (c)) return TRACE_RETURN (false);
  365. switch (u.format) {
  366. case 1: return TRACE_RETURN (u.format1.sanitize (c));
  367. default:return TRACE_RETURN (true);
  368. }
  369. }
  370. protected:
  371. union {
  372. USHORT format; /* Format identifier */
  373. MultipleSubstFormat1 format1;
  374. } u;
  375. };
  376. typedef ArrayOf<GlyphID> AlternateSet; /* Array of alternate GlyphIDs--in
  377. * arbitrary order */
  378. struct AlternateSubstFormat1
  379. {
  380. inline void closure (hb_closure_context_t *c) const
  381. {
  382. TRACE_CLOSURE (this);
  383. Coverage::Iter iter;
  384. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  385. if (c->glyphs->has (iter.get_glyph ())) {
  386. const AlternateSet &alt_set = this+alternateSet[iter.get_coverage ()];
  387. unsigned int count = alt_set.len;
  388. for (unsigned int i = 0; i < count; i++)
  389. c->glyphs->add (alt_set[i]);
  390. }
  391. }
  392. }
  393. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  394. {
  395. TRACE_COLLECT_GLYPHS (this);
  396. Coverage::Iter iter;
  397. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  398. c->input->add (iter.get_glyph ());
  399. const AlternateSet &alt_set = this+alternateSet[iter.get_coverage ()];
  400. unsigned int count = alt_set.len;
  401. for (unsigned int i = 0; i < count; i++)
  402. c->output->add (alt_set[i]);
  403. }
  404. }
  405. inline const Coverage &get_coverage (void) const
  406. {
  407. return this+coverage;
  408. }
  409. inline bool would_apply (hb_would_apply_context_t *c) const
  410. {
  411. TRACE_WOULD_APPLY (this);
  412. return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  413. }
  414. inline bool apply (hb_apply_context_t *c) const
  415. {
  416. TRACE_APPLY (this);
  417. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  418. unsigned int index = (this+coverage).get_coverage (glyph_id);
  419. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  420. const AlternateSet &alt_set = this+alternateSet[index];
  421. if (unlikely (!alt_set.len)) return TRACE_RETURN (false);
  422. hb_mask_t glyph_mask = c->buffer->cur().mask;
  423. hb_mask_t lookup_mask = c->lookup_mask;
  424. /* Note: This breaks badly if two features enabled this lookup together. */
  425. unsigned int shift = _hb_ctz (lookup_mask);
  426. unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift);
  427. if (unlikely (alt_index > alt_set.len || alt_index == 0)) return TRACE_RETURN (false);
  428. glyph_id = alt_set[alt_index - 1];
  429. c->replace_glyph (glyph_id);
  430. return TRACE_RETURN (true);
  431. }
  432. inline bool serialize (hb_serialize_context_t *c,
  433. Supplier<GlyphID> &glyphs,
  434. Supplier<unsigned int> &alternate_len_list,
  435. unsigned int num_glyphs,
  436. Supplier<GlyphID> &alternate_glyphs_list)
  437. {
  438. TRACE_SERIALIZE (this);
  439. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  440. if (unlikely (!alternateSet.serialize (c, num_glyphs))) return TRACE_RETURN (false);
  441. for (unsigned int i = 0; i < num_glyphs; i++)
  442. if (unlikely (!alternateSet[i].serialize (c, this).serialize (c,
  443. alternate_glyphs_list,
  444. alternate_len_list[i]))) return TRACE_RETURN (false);
  445. alternate_len_list.advance (num_glyphs);
  446. if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
  447. return TRACE_RETURN (true);
  448. }
  449. inline bool sanitize (hb_sanitize_context_t *c) {
  450. TRACE_SANITIZE (this);
  451. return TRACE_RETURN (coverage.sanitize (c, this) && alternateSet.sanitize (c, this));
  452. }
  453. protected:
  454. USHORT format; /* Format identifier--format = 1 */
  455. OffsetTo<Coverage>
  456. coverage; /* Offset to Coverage table--from
  457. * beginning of Substitution table */
  458. OffsetArrayOf<AlternateSet>
  459. alternateSet; /* Array of AlternateSet tables
  460. * ordered by Coverage Index */
  461. public:
  462. DEFINE_SIZE_ARRAY (6, alternateSet);
  463. };
  464. struct AlternateSubst
  465. {
  466. inline bool serialize (hb_serialize_context_t *c,
  467. Supplier<GlyphID> &glyphs,
  468. Supplier<unsigned int> &alternate_len_list,
  469. unsigned int num_glyphs,
  470. Supplier<GlyphID> &alternate_glyphs_list)
  471. {
  472. TRACE_SERIALIZE (this);
  473. if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
  474. unsigned int format = 1;
  475. u.format.set (format);
  476. switch (u.format) {
  477. case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, alternate_len_list, num_glyphs, alternate_glyphs_list));
  478. default:return TRACE_RETURN (false);
  479. }
  480. }
  481. template <typename context_t>
  482. inline typename context_t::return_t dispatch (context_t *c) const
  483. {
  484. TRACE_DISPATCH (this);
  485. switch (u.format) {
  486. case 1: return TRACE_RETURN (c->dispatch (u.format1));
  487. default:return TRACE_RETURN (c->default_return_value ());
  488. }
  489. }
  490. inline bool sanitize (hb_sanitize_context_t *c) {
  491. TRACE_SANITIZE (this);
  492. if (!u.format.sanitize (c)) return TRACE_RETURN (false);
  493. switch (u.format) {
  494. case 1: return TRACE_RETURN (u.format1.sanitize (c));
  495. default:return TRACE_RETURN (true);
  496. }
  497. }
  498. protected:
  499. union {
  500. USHORT format; /* Format identifier */
  501. AlternateSubstFormat1 format1;
  502. } u;
  503. };
  504. struct Ligature
  505. {
  506. inline void closure (hb_closure_context_t *c) const
  507. {
  508. TRACE_CLOSURE (this);
  509. unsigned int count = component.len;
  510. for (unsigned int i = 1; i < count; i++)
  511. if (!c->glyphs->has (component[i]))
  512. return;
  513. c->glyphs->add (ligGlyph);
  514. }
  515. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  516. {
  517. TRACE_COLLECT_GLYPHS (this);
  518. unsigned int count = component.len;
  519. for (unsigned int i = 1; i < count; i++)
  520. c->input->add (component[i]);
  521. c->output->add (ligGlyph);
  522. }
  523. inline bool would_apply (hb_would_apply_context_t *c) const
  524. {
  525. TRACE_WOULD_APPLY (this);
  526. if (c->len != component.len)
  527. return TRACE_RETURN (false);
  528. for (unsigned int i = 1; i < c->len; i++)
  529. if (likely (c->glyphs[i] != component[i]))
  530. return TRACE_RETURN (false);
  531. return TRACE_RETURN (true);
  532. }
  533. inline bool apply (hb_apply_context_t *c) const
  534. {
  535. TRACE_APPLY (this);
  536. unsigned int count = component.len;
  537. if (unlikely (count < 1)) return TRACE_RETURN (false);
  538. unsigned int end_offset = 0;
  539. bool is_mark_ligature = false;
  540. unsigned int total_component_count = 0;
  541. if (likely (!match_input (c, count,
  542. &component[1],
  543. match_glyph,
  544. NULL,
  545. &end_offset,
  546. &is_mark_ligature,
  547. &total_component_count)))
  548. return TRACE_RETURN (false);
  549. /* Deal, we are forming the ligature. */
  550. c->buffer->merge_clusters (c->buffer->idx, c->buffer->idx + end_offset);
  551. ligate_input (c,
  552. count,
  553. &component[1],
  554. match_glyph,
  555. NULL,
  556. ligGlyph,
  557. is_mark_ligature,
  558. total_component_count);
  559. return TRACE_RETURN (true);
  560. }
  561. inline bool serialize (hb_serialize_context_t *c,
  562. GlyphID ligature,
  563. Supplier<GlyphID> &components, /* Starting from second */
  564. unsigned int num_components /* Including first component */)
  565. {
  566. TRACE_SERIALIZE (this);
  567. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  568. ligGlyph = ligature;
  569. if (unlikely (!component.serialize (c, components, num_components))) return TRACE_RETURN (false);
  570. return TRACE_RETURN (true);
  571. }
  572. public:
  573. inline bool sanitize (hb_sanitize_context_t *c) {
  574. TRACE_SANITIZE (this);
  575. return TRACE_RETURN (ligGlyph.sanitize (c) && component.sanitize (c));
  576. }
  577. protected:
  578. GlyphID ligGlyph; /* GlyphID of ligature to substitute */
  579. HeadlessArrayOf<GlyphID>
  580. component; /* Array of component GlyphIDs--start
  581. * with the second component--ordered
  582. * in writing direction */
  583. public:
  584. DEFINE_SIZE_ARRAY (4, component);
  585. };
  586. struct LigatureSet
  587. {
  588. inline void closure (hb_closure_context_t *c) const
  589. {
  590. TRACE_CLOSURE (this);
  591. unsigned int num_ligs = ligature.len;
  592. for (unsigned int i = 0; i < num_ligs; i++)
  593. (this+ligature[i]).closure (c);
  594. }
  595. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  596. {
  597. TRACE_COLLECT_GLYPHS (this);
  598. unsigned int num_ligs = ligature.len;
  599. for (unsigned int i = 0; i < num_ligs; i++)
  600. (this+ligature[i]).collect_glyphs (c);
  601. }
  602. inline bool would_apply (hb_would_apply_context_t *c) const
  603. {
  604. TRACE_WOULD_APPLY (this);
  605. unsigned int num_ligs = ligature.len;
  606. for (unsigned int i = 0; i < num_ligs; i++)
  607. {
  608. const Ligature &lig = this+ligature[i];
  609. if (lig.would_apply (c))
  610. return TRACE_RETURN (true);
  611. }
  612. return TRACE_RETURN (false);
  613. }
  614. inline bool apply (hb_apply_context_t *c) const
  615. {
  616. TRACE_APPLY (this);
  617. unsigned int num_ligs = ligature.len;
  618. for (unsigned int i = 0; i < num_ligs; i++)
  619. {
  620. const Ligature &lig = this+ligature[i];
  621. if (lig.apply (c)) return TRACE_RETURN (true);
  622. }
  623. return TRACE_RETURN (false);
  624. }
  625. inline bool serialize (hb_serialize_context_t *c,
  626. Supplier<GlyphID> &ligatures,
  627. Supplier<unsigned int> &component_count_list,
  628. unsigned int num_ligatures,
  629. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  630. {
  631. TRACE_SERIALIZE (this);
  632. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  633. if (unlikely (!ligature.serialize (c, num_ligatures))) return TRACE_RETURN (false);
  634. for (unsigned int i = 0; i < num_ligatures; i++)
  635. if (unlikely (!ligature[i].serialize (c, this).serialize (c,
  636. ligatures[i],
  637. component_list,
  638. component_count_list[i]))) return TRACE_RETURN (false);
  639. ligatures.advance (num_ligatures);
  640. component_count_list.advance (num_ligatures);
  641. return TRACE_RETURN (true);
  642. }
  643. inline bool sanitize (hb_sanitize_context_t *c) {
  644. TRACE_SANITIZE (this);
  645. return TRACE_RETURN (ligature.sanitize (c, this));
  646. }
  647. protected:
  648. OffsetArrayOf<Ligature>
  649. ligature; /* Array LigatureSet tables
  650. * ordered by preference */
  651. public:
  652. DEFINE_SIZE_ARRAY (2, ligature);
  653. };
  654. struct LigatureSubstFormat1
  655. {
  656. inline void closure (hb_closure_context_t *c) const
  657. {
  658. TRACE_CLOSURE (this);
  659. Coverage::Iter iter;
  660. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  661. if (c->glyphs->has (iter.get_glyph ()))
  662. (this+ligatureSet[iter.get_coverage ()]).closure (c);
  663. }
  664. }
  665. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  666. {
  667. TRACE_COLLECT_GLYPHS (this);
  668. Coverage::Iter iter;
  669. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  670. c->input->add (iter.get_glyph ());
  671. (this+ligatureSet[iter.get_coverage ()]).collect_glyphs (c);
  672. }
  673. }
  674. inline const Coverage &get_coverage (void) const
  675. {
  676. return this+coverage;
  677. }
  678. inline bool would_apply (hb_would_apply_context_t *c) const
  679. {
  680. TRACE_WOULD_APPLY (this);
  681. unsigned int index = (this+coverage).get_coverage (c->glyphs[0]);
  682. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  683. const LigatureSet &lig_set = this+ligatureSet[index];
  684. return TRACE_RETURN (lig_set.would_apply (c));
  685. }
  686. inline bool apply (hb_apply_context_t *c) const
  687. {
  688. TRACE_APPLY (this);
  689. hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
  690. unsigned int index = (this+coverage).get_coverage (glyph_id);
  691. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  692. const LigatureSet &lig_set = this+ligatureSet[index];
  693. return TRACE_RETURN (lig_set.apply (c));
  694. }
  695. inline bool serialize (hb_serialize_context_t *c,
  696. Supplier<GlyphID> &first_glyphs,
  697. Supplier<unsigned int> &ligature_per_first_glyph_count_list,
  698. unsigned int num_first_glyphs,
  699. Supplier<GlyphID> &ligatures_list,
  700. Supplier<unsigned int> &component_count_list,
  701. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  702. {
  703. TRACE_SERIALIZE (this);
  704. if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
  705. if (unlikely (!ligatureSet.serialize (c, num_first_glyphs))) return TRACE_RETURN (false);
  706. for (unsigned int i = 0; i < num_first_glyphs; i++)
  707. if (unlikely (!ligatureSet[i].serialize (c, this).serialize (c,
  708. ligatures_list,
  709. component_count_list,
  710. ligature_per_first_glyph_count_list[i],
  711. component_list))) return TRACE_RETURN (false);
  712. ligature_per_first_glyph_count_list.advance (num_first_glyphs);
  713. if (unlikely (!coverage.serialize (c, this).serialize (c, first_glyphs, num_first_glyphs))) return TRACE_RETURN (false);
  714. return TRACE_RETURN (true);
  715. }
  716. inline bool sanitize (hb_sanitize_context_t *c) {
  717. TRACE_SANITIZE (this);
  718. return TRACE_RETURN (coverage.sanitize (c, this) && ligatureSet.sanitize (c, this));
  719. }
  720. protected:
  721. USHORT format; /* Format identifier--format = 1 */
  722. OffsetTo<Coverage>
  723. coverage; /* Offset to Coverage table--from
  724. * beginning of Substitution table */
  725. OffsetArrayOf<LigatureSet>
  726. ligatureSet; /* Array LigatureSet tables
  727. * ordered by Coverage Index */
  728. public:
  729. DEFINE_SIZE_ARRAY (6, ligatureSet);
  730. };
  731. struct LigatureSubst
  732. {
  733. inline bool serialize (hb_serialize_context_t *c,
  734. Supplier<GlyphID> &first_glyphs,
  735. Supplier<unsigned int> &ligature_per_first_glyph_count_list,
  736. unsigned int num_first_glyphs,
  737. Supplier<GlyphID> &ligatures_list,
  738. Supplier<unsigned int> &component_count_list,
  739. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  740. {
  741. TRACE_SERIALIZE (this);
  742. if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
  743. unsigned int format = 1;
  744. u.format.set (format);
  745. switch (u.format) {
  746. case 1: return TRACE_RETURN (u.format1.serialize (c, first_glyphs, ligature_per_first_glyph_count_list, num_first_glyphs,
  747. ligatures_list, component_count_list, component_list));
  748. default:return TRACE_RETURN (false);
  749. }
  750. }
  751. template <typename context_t>
  752. inline typename context_t::return_t dispatch (context_t *c) const
  753. {
  754. TRACE_DISPATCH (this);
  755. switch (u.format) {
  756. case 1: return TRACE_RETURN (c->dispatch (u.format1));
  757. default:return TRACE_RETURN (c->default_return_value ());
  758. }
  759. }
  760. inline bool sanitize (hb_sanitize_context_t *c) {
  761. TRACE_SANITIZE (this);
  762. if (!u.format.sanitize (c)) return TRACE_RETURN (false);
  763. switch (u.format) {
  764. case 1: return TRACE_RETURN (u.format1.sanitize (c));
  765. default:return TRACE_RETURN (true);
  766. }
  767. }
  768. protected:
  769. union {
  770. USHORT format; /* Format identifier */
  771. LigatureSubstFormat1 format1;
  772. } u;
  773. };
  774. struct ContextSubst : Context {};
  775. struct ChainContextSubst : ChainContext {};
  776. struct ExtensionSubst : Extension<ExtensionSubst>
  777. {
  778. typedef struct SubstLookupSubTable LookupSubTable;
  779. inline bool is_reverse (void) const;
  780. };
  781. struct ReverseChainSingleSubstFormat1
  782. {
  783. inline void closure (hb_closure_context_t *c) const
  784. {
  785. TRACE_CLOSURE (this);
  786. const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  787. unsigned int count;
  788. count = backtrack.len;
  789. for (unsigned int i = 0; i < count; i++)
  790. if (!(this+backtrack[i]).intersects (c->glyphs))
  791. return;
  792. count = lookahead.len;
  793. for (unsigned int i = 0; i < count; i++)
  794. if (!(this+lookahead[i]).intersects (c->glyphs))
  795. return;
  796. const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  797. Coverage::Iter iter;
  798. for (iter.init (this+coverage); iter.more (); iter.next ()) {
  799. if (c->glyphs->has (iter.get_glyph ()))
  800. c->glyphs->add (substitute[iter.get_coverage ()]);
  801. }
  802. }
  803. inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
  804. {
  805. TRACE_COLLECT_GLYPHS (this);
  806. const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  807. unsigned int count;
  808. (this+coverage).add_coverage (c->input);
  809. count = backtrack.len;
  810. for (unsigned int i = 0; i < count; i++)
  811. (this+backtrack[i]).add_coverage (c->before);
  812. count = lookahead.len;
  813. for (unsigned int i = 0; i < count; i++)
  814. (this+lookahead[i]).add_coverage (c->after);
  815. const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  816. count = substitute.len;
  817. for (unsigned int i = 0; i < count; i++)
  818. c->output->add (substitute[i]);
  819. }
  820. inline const Coverage &get_coverage (void) const
  821. {
  822. return this+coverage;
  823. }
  824. inline bool would_apply (hb_would_apply_context_t *c) const
  825. {
  826. TRACE_WOULD_APPLY (this);
  827. return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
  828. }
  829. inline bool apply (hb_apply_context_t *c) const
  830. {
  831. TRACE_APPLY (this);
  832. if (unlikely (c->nesting_level_left != MAX_NESTING_LEVEL))
  833. return TRACE_RETURN (false); /* No chaining to this type */
  834. unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
  835. if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
  836. const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  837. const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  838. if (match_backtrack (c,
  839. backtrack.len, (USHORT *) backtrack.array,
  840. match_coverage, this) &&
  841. match_lookahead (c,
  842. lookahead.len, (USHORT *) lookahead.array,
  843. match_coverage, this,
  844. 1))
  845. {
  846. c->replace_glyph_inplace (substitute[index]);
  847. c->buffer->idx--; /* Reverse! */
  848. return TRACE_RETURN (true);
  849. }
  850. return TRACE_RETURN (false);
  851. }
  852. inline bool sanitize (hb_sanitize_context_t *c) {
  853. TRACE_SANITIZE (this);
  854. if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
  855. return TRACE_RETURN (false);
  856. OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
  857. if (!lookahead.sanitize (c, this))
  858. return TRACE_RETURN (false);
  859. ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
  860. return TRACE_RETURN (substitute.sanitize (c));
  861. }
  862. protected:
  863. USHORT format; /* Format identifier--format = 1 */
  864. OffsetTo<Coverage>
  865. coverage; /* Offset to Coverage table--from
  866. * beginning of table */
  867. OffsetArrayOf<Coverage>
  868. backtrack; /* Array of coverage tables
  869. * in backtracking sequence, in glyph
  870. * sequence order */
  871. OffsetArrayOf<Coverage>
  872. lookaheadX; /* Array of coverage tables
  873. * in lookahead sequence, in glyph
  874. * sequence order */
  875. ArrayOf<GlyphID>
  876. substituteX; /* Array of substitute
  877. * GlyphIDs--ordered by Coverage Index */
  878. public:
  879. DEFINE_SIZE_MIN (10);
  880. };
  881. struct ReverseChainSingleSubst
  882. {
  883. template <typename context_t>
  884. inline typename context_t::return_t dispatch (context_t *c) const
  885. {
  886. TRACE_DISPATCH (this);
  887. switch (u.format) {
  888. case 1: return TRACE_RETURN (c->dispatch (u.format1));
  889. default:return TRACE_RETURN (c->default_return_value ());
  890. }
  891. }
  892. inline bool sanitize (hb_sanitize_context_t *c) {
  893. TRACE_SANITIZE (this);
  894. if (!u.format.sanitize (c)) return TRACE_RETURN (false);
  895. switch (u.format) {
  896. case 1: return TRACE_RETURN (u.format1.sanitize (c));
  897. default:return TRACE_RETURN (true);
  898. }
  899. }
  900. protected:
  901. union {
  902. USHORT format; /* Format identifier */
  903. ReverseChainSingleSubstFormat1 format1;
  904. } u;
  905. };
  906. /*
  907. * SubstLookup
  908. */
  909. struct SubstLookupSubTable
  910. {
  911. friend struct SubstLookup;
  912. enum Type {
  913. Single = 1,
  914. Multiple = 2,
  915. Alternate = 3,
  916. Ligature = 4,
  917. Context = 5,
  918. ChainContext = 6,
  919. Extension = 7,
  920. ReverseChainSingle = 8
  921. };
  922. template <typename context_t>
  923. inline typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type) const
  924. {
  925. TRACE_DISPATCH (this);
  926. switch (lookup_type) {
  927. case Single: return TRACE_RETURN (u.single.dispatch (c));
  928. case Multiple: return TRACE_RETURN (u.multiple.dispatch (c));
  929. case Alternate: return TRACE_RETURN (u.alternate.dispatch (c));
  930. case Ligature: return TRACE_RETURN (u.ligature.dispatch (c));
  931. case Context: return TRACE_RETURN (u.context.dispatch (c));
  932. case ChainContext: return TRACE_RETURN (u.chainContext.dispatch (c));
  933. case Extension: return TRACE_RETURN (u.extension.dispatch (c));
  934. case ReverseChainSingle: return TRACE_RETURN (u.reverseChainContextSingle.dispatch (c));
  935. default: return TRACE_RETURN (c->default_return_value ());
  936. }
  937. }
  938. inline bool sanitize (hb_sanitize_context_t *c, unsigned int lookup_type) {
  939. TRACE_SANITIZE (this);
  940. if (!u.header.sub_format.sanitize (c))
  941. return TRACE_RETURN (false);
  942. switch (lookup_type) {
  943. case Single: return TRACE_RETURN (u.single.sanitize (c));
  944. case Multiple: return TRACE_RETURN (u.multiple.sanitize (c));
  945. case Alternate: return TRACE_RETURN (u.alternate.sanitize (c));
  946. case Ligature: return TRACE_RETURN (u.ligature.sanitize (c));
  947. case Context: return TRACE_RETURN (u.context.sanitize (c));
  948. case ChainContext: return TRACE_RETURN (u.chainContext.sanitize (c));
  949. case Extension: return TRACE_RETURN (u.extension.sanitize (c));
  950. case ReverseChainSingle: return TRACE_RETURN (u.reverseChainContextSingle.sanitize (c));
  951. default: return TRACE_RETURN (true);
  952. }
  953. }
  954. protected:
  955. union {
  956. struct {
  957. USHORT sub_format;
  958. } header;
  959. SingleSubst single;
  960. MultipleSubst multiple;
  961. AlternateSubst alternate;
  962. LigatureSubst ligature;
  963. ContextSubst context;
  964. ChainContextSubst chainContext;
  965. ExtensionSubst extension;
  966. ReverseChainSingleSubst reverseChainContextSingle;
  967. } u;
  968. public:
  969. DEFINE_SIZE_UNION (2, header.sub_format);
  970. };
  971. struct SubstLookup : Lookup
  972. {
  973. inline const SubstLookupSubTable& get_subtable (unsigned int i) const
  974. { return this+CastR<OffsetArrayOf<SubstLookupSubTable> > (subTable)[i]; }
  975. inline static bool lookup_type_is_reverse (unsigned int lookup_type)
  976. { return lookup_type == SubstLookupSubTable::ReverseChainSingle; }
  977. inline bool is_reverse (void) const
  978. {
  979. unsigned int type = get_type ();
  980. if (unlikely (type == SubstLookupSubTable::Extension))
  981. return CastR<ExtensionSubst> (get_subtable(0)).is_reverse ();
  982. return lookup_type_is_reverse (type);
  983. }
  984. inline hb_closure_context_t::return_t closure (hb_closure_context_t *c) const
  985. {
  986. TRACE_CLOSURE (this);
  987. c->set_recurse_func (dispatch_recurse_func<hb_closure_context_t>);
  988. return TRACE_RETURN (dispatch (c));
  989. }
  990. inline hb_collect_glyphs_context_t::return_t collect_glyphs_lookup (hb_collect_glyphs_context_t *c) const
  991. {
  992. TRACE_COLLECT_GLYPHS (this);
  993. c->set_recurse_func (dispatch_recurse_func<hb_collect_glyphs_context_t>);
  994. return TRACE_RETURN (dispatch (c));
  995. }
  996. template <typename set_t>
  997. inline void add_coverage (set_t *glyphs) const
  998. {
  999. hb_get_coverage_context_t c;
  1000. const Coverage *last = NULL;
  1001. unsigned int count = get_subtable_count ();
  1002. for (unsigned int i = 0; i < count; i++) {
  1003. const Coverage *coverage = &get_subtable (i).dispatch (&c, get_type ());
  1004. if (coverage != last) {
  1005. coverage->add_coverage (glyphs);
  1006. last = coverage;
  1007. }
  1008. }
  1009. }
  1010. inline bool would_apply (hb_would_apply_context_t *c, const hb_set_digest_t *digest) const
  1011. {
  1012. TRACE_WOULD_APPLY (this);
  1013. if (unlikely (!c->len)) return TRACE_RETURN (false);
  1014. if (!digest->may_have (c->glyphs[0])) return TRACE_RETURN (false);
  1015. return TRACE_RETURN (dispatch (c));
  1016. }
  1017. inline bool apply_once (hb_apply_context_t *c) const
  1018. {
  1019. TRACE_APPLY (this);
  1020. if (!c->check_glyph_property (&c->buffer->cur(), c->lookup_props))
  1021. return TRACE_RETURN (false);
  1022. return TRACE_RETURN (dispatch (c));
  1023. }
  1024. static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index);
  1025. inline bool apply_string (hb_apply_context_t *c, const hb_set_digest_t *digest) const
  1026. {
  1027. bool ret = false;
  1028. if (unlikely (!c->buffer->len || !c->lookup_mask))
  1029. return false;
  1030. c->set_recurse_func (apply_recurse_func);
  1031. c->set_lookup (*this);
  1032. if (likely (!is_reverse ()))
  1033. {
  1034. /* in/out forward substitution */
  1035. c->buffer->clear_output ();
  1036. c->buffer->idx = 0;
  1037. while (c->buffer->idx < c->buffer->len)
  1038. {
  1039. if ((c->buffer->cur().mask & c->lookup_mask) &&
  1040. digest->may_have (c->buffer->cur().codepoint) &&
  1041. apply_once (c))
  1042. ret = true;
  1043. else
  1044. c->buffer->next_glyph ();
  1045. }
  1046. if (ret)
  1047. c->buffer->swap_buffers ();
  1048. }
  1049. else
  1050. {
  1051. /* in-place backward substitution */
  1052. c->buffer->remove_output ();
  1053. c->buffer->idx = c->buffer->len - 1;
  1054. do
  1055. {
  1056. if ((c->buffer->cur().mask & c->lookup_mask) &&
  1057. digest->may_have (c->buffer->cur().codepoint) &&
  1058. apply_once (c))
  1059. ret = true;
  1060. else
  1061. c->buffer->idx--;
  1062. }
  1063. while ((int) c->buffer->idx >= 0);
  1064. }
  1065. return ret;
  1066. }
  1067. inline SubstLookupSubTable& serialize_subtable (hb_serialize_context_t *c,
  1068. unsigned int i)
  1069. { return CastR<OffsetArrayOf<SubstLookupSubTable> > (subTable)[i].serialize (c, this); }
  1070. inline bool serialize_single (hb_serialize_context_t *c,
  1071. uint32_t lookup_props,
  1072. Supplier<GlyphID> &glyphs,
  1073. Supplier<GlyphID> &substitutes,
  1074. unsigned int num_glyphs)
  1075. {
  1076. TRACE_SERIALIZE (this);
  1077. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Single, lookup_props, 1))) return TRACE_RETURN (false);
  1078. return TRACE_RETURN (serialize_subtable (c, 0).u.single.serialize (c, glyphs, substitutes, num_glyphs));
  1079. }
  1080. inline bool serialize_multiple (hb_serialize_context_t *c,
  1081. uint32_t lookup_props,
  1082. Supplier<GlyphID> &glyphs,
  1083. Supplier<unsigned int> &substitute_len_list,
  1084. unsigned int num_glyphs,
  1085. Supplier<GlyphID> &substitute_glyphs_list)
  1086. {
  1087. TRACE_SERIALIZE (this);
  1088. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Multiple, lookup_props, 1))) return TRACE_RETURN (false);
  1089. return TRACE_RETURN (serialize_subtable (c, 0).u.multiple.serialize (c, glyphs, substitute_len_list, num_glyphs,
  1090. substitute_glyphs_list));
  1091. }
  1092. inline bool serialize_alternate (hb_serialize_context_t *c,
  1093. uint32_t lookup_props,
  1094. Supplier<GlyphID> &glyphs,
  1095. Supplier<unsigned int> &alternate_len_list,
  1096. unsigned int num_glyphs,
  1097. Supplier<GlyphID> &alternate_glyphs_list)
  1098. {
  1099. TRACE_SERIALIZE (this);
  1100. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Alternate, lookup_props, 1))) return TRACE_RETURN (false);
  1101. return TRACE_RETURN (serialize_subtable (c, 0).u.alternate.serialize (c, glyphs, alternate_len_list, num_glyphs,
  1102. alternate_glyphs_list));
  1103. }
  1104. inline bool serialize_ligature (hb_serialize_context_t *c,
  1105. uint32_t lookup_props,
  1106. Supplier<GlyphID> &first_glyphs,
  1107. Supplier<unsigned int> &ligature_per_first_glyph_count_list,
  1108. unsigned int num_first_glyphs,
  1109. Supplier<GlyphID> &ligatures_list,
  1110. Supplier<unsigned int> &component_count_list,
  1111. Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
  1112. {
  1113. TRACE_SERIALIZE (this);
  1114. if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Ligature, lookup_props, 1))) return TRACE_RETURN (false);
  1115. return TRACE_RETURN (serialize_subtable (c, 0).u.ligature.serialize (c, first_glyphs, ligature_per_first_glyph_count_list, num_first_glyphs,
  1116. ligatures_list, component_count_list, component_list));
  1117. }
  1118. template <typename context_t>
  1119. static inline typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
  1120. template <typename context_t>
  1121. inline typename context_t::return_t dispatch (context_t *c) const
  1122. {
  1123. TRACE_DISPATCH (this);
  1124. unsigned int lookup_type = get_type ();
  1125. unsigned int count = get_subtable_count ();
  1126. for (unsigned int i = 0; i < count; i++) {
  1127. typename context_t::return_t r = get_subtable (i).dispatch (c, lookup_type);
  1128. if (c->stop_sublookup_iteration (r))
  1129. return TRACE_RETURN (r);
  1130. }
  1131. return TRACE_RETURN (c->default_return_value ());
  1132. }
  1133. inline bool sanitize (hb_sanitize_context_t *c)
  1134. {
  1135. TRACE_SANITIZE (this);
  1136. if (unlikely (!Lookup::sanitize (c))) return TRACE_RETURN (false);
  1137. OffsetArrayOf<SubstLookupSubTable> &list = CastR<OffsetArrayOf<SubstLookupSubTable> > (subTable);
  1138. if (unlikely (!list.sanitize (c, this, get_type ()))) return TRACE_RETURN (false);
  1139. if (unlikely (get_type () == SubstLookupSubTable::Extension))
  1140. {
  1141. /* The spec says all subtables of an Extension lookup should
  1142. * have the same type. This is specially important if one has
  1143. * a reverse type! */
  1144. unsigned int type = get_subtable (0).u.extension.get_type ();
  1145. unsigned int count = get_subtable_count ();
  1146. for (unsigned int i = 1; i < count; i++)
  1147. if (get_subtable (i).u.extension.get_type () != type)
  1148. return TRACE_RETURN (false);
  1149. }
  1150. return TRACE_RETURN (true);
  1151. }
  1152. };
  1153. typedef OffsetListOf<SubstLookup> SubstLookupList;
  1154. /*
  1155. * GSUB -- The Glyph Substitution Table
  1156. */
  1157. struct GSUB : GSUBGPOS
  1158. {
  1159. static const hb_tag_t Tag = HB_OT_TAG_GSUB;
  1160. inline const SubstLookup& get_lookup (unsigned int i) const
  1161. { return CastR<SubstLookup> (GSUBGPOS::get_lookup (i)); }
  1162. static inline void substitute_start (hb_font_t *font, hb_buffer_t *buffer);
  1163. static inline void substitute_finish (hb_font_t *font, hb_buffer_t *buffer);
  1164. inline bool sanitize (hb_sanitize_context_t *c) {
  1165. TRACE_SANITIZE (this);
  1166. if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false);
  1167. OffsetTo<SubstLookupList> &list = CastR<OffsetTo<SubstLookupList> > (lookupList);
  1168. return TRACE_RETURN (list.sanitize (c, this));
  1169. }
  1170. public:
  1171. DEFINE_SIZE_STATIC (10);
  1172. };
  1173. void
  1174. GSUB::substitute_start (hb_font_t *font, hb_buffer_t *buffer)
  1175. {
  1176. HB_BUFFER_ALLOCATE_VAR (buffer, glyph_props);
  1177. HB_BUFFER_ALLOCATE_VAR (buffer, lig_props);
  1178. HB_BUFFER_ALLOCATE_VAR (buffer, syllable);
  1179. const GDEF &gdef = *hb_ot_layout_from_face (font->face)->gdef;
  1180. unsigned int count = buffer->len;
  1181. for (unsigned int i = 0; i < count; i++) {
  1182. buffer->info[i].lig_props() = buffer->info[i].syllable() = 0;
  1183. buffer->info[i].glyph_props() = gdef.get_glyph_props (buffer->info[i].codepoint);
  1184. }
  1185. }
  1186. void
  1187. GSUB::substitute_finish (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer HB_UNUSED)
  1188. {
  1189. }
  1190. /* Out-of-class implementation for methods recursing */
  1191. inline bool ExtensionSubst::is_reverse (void) const
  1192. {
  1193. unsigned int type = get_type ();
  1194. if (unlikely (type == SubstLookupSubTable::Extension))
  1195. return CastR<ExtensionSubst> (get_subtable<SubstLookupSubTable>()).is_reverse ();
  1196. return SubstLookup::lookup_type_is_reverse (type);
  1197. }
  1198. template <typename context_t>
  1199. inline typename context_t::return_t SubstLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
  1200. {
  1201. const GSUB &gsub = *(hb_ot_layout_from_face (c->face)->gsub);
  1202. const SubstLookup &l = gsub.get_lookup (lookup_index);
  1203. return l.dispatch (c);
  1204. }
  1205. inline bool SubstLookup::apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index)
  1206. {
  1207. const GSUB &gsub = *(hb_ot_layout_from_face (c->face)->gsub);
  1208. const SubstLookup &l = gsub.get_lookup (lookup_index);
  1209. unsigned int saved_lookup_props = c->lookup_props;
  1210. c->set_lookup (l);
  1211. bool ret = l.apply_once (c);
  1212. c->lookup_props = saved_lookup_props;
  1213. return ret;
  1214. }
  1215. } /* namespace OT */
  1216. #endif /* HB_OT_LAYOUT_GSUB_TABLE_HH */