|
1 /* |
|
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. |
|
3 * Copyright © 2010,2012 Google, Inc. |
|
4 * |
|
5 * This is part of HarfBuzz, a text shaping library. |
|
6 * |
|
7 * Permission is hereby granted, without written agreement and without |
|
8 * license or royalty fees, to use, copy, modify, and distribute this |
|
9 * software and its documentation for any purpose, provided that the |
|
10 * above copyright notice and the following two paragraphs appear in |
|
11 * all copies of this software. |
|
12 * |
|
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
|
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
|
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
|
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
|
17 * DAMAGE. |
|
18 * |
|
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
|
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
|
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
|
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
|
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
|
24 * |
|
25 * Red Hat Author(s): Behdad Esfahbod |
|
26 * Google Author(s): Behdad Esfahbod |
|
27 */ |
|
28 |
|
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH |
|
30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH |
|
31 |
|
32 #include "hb-buffer-private.hh" |
|
33 #include "hb-ot-layout-gdef-table.hh" |
|
34 #include "hb-set-private.hh" |
|
35 |
|
36 |
|
37 namespace OT { |
|
38 |
|
39 |
|
40 #ifndef HB_DEBUG_CLOSURE |
|
41 #define HB_DEBUG_CLOSURE (HB_DEBUG+0) |
|
42 #endif |
|
43 |
|
44 #define TRACE_CLOSURE(this) \ |
|
45 hb_auto_trace_t<HB_DEBUG_CLOSURE, hb_void_t> trace \ |
|
46 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ |
|
47 ""); |
|
48 |
|
49 struct hb_closure_context_t : |
|
50 hb_dispatch_context_t<hb_closure_context_t, hb_void_t, HB_DEBUG_CLOSURE> |
|
51 { |
|
52 inline const char *get_name (void) { return "CLOSURE"; } |
|
53 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index); |
|
54 template <typename T> |
|
55 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; } |
|
56 static return_t default_return_value (void) { return HB_VOID; } |
|
57 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } |
|
58 return_t recurse (unsigned int lookup_index) |
|
59 { |
|
60 if (unlikely (nesting_level_left == 0 || !recurse_func)) |
|
61 return default_return_value (); |
|
62 |
|
63 nesting_level_left--; |
|
64 recurse_func (this, lookup_index); |
|
65 nesting_level_left++; |
|
66 return HB_VOID; |
|
67 } |
|
68 |
|
69 hb_face_t *face; |
|
70 hb_set_t *glyphs; |
|
71 recurse_func_t recurse_func; |
|
72 unsigned int nesting_level_left; |
|
73 unsigned int debug_depth; |
|
74 |
|
75 hb_closure_context_t (hb_face_t *face_, |
|
76 hb_set_t *glyphs_, |
|
77 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : |
|
78 face (face_), |
|
79 glyphs (glyphs_), |
|
80 recurse_func (NULL), |
|
81 nesting_level_left (nesting_level_left_), |
|
82 debug_depth (0) {} |
|
83 |
|
84 void set_recurse_func (recurse_func_t func) { recurse_func = func; } |
|
85 }; |
|
86 |
|
87 |
|
88 |
|
89 #ifndef HB_DEBUG_WOULD_APPLY |
|
90 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0) |
|
91 #endif |
|
92 |
|
93 #define TRACE_WOULD_APPLY(this) \ |
|
94 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \ |
|
95 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ |
|
96 "%d glyphs", c->len); |
|
97 |
|
98 struct hb_would_apply_context_t : |
|
99 hb_dispatch_context_t<hb_would_apply_context_t, bool, HB_DEBUG_WOULD_APPLY> |
|
100 { |
|
101 inline const char *get_name (void) { return "WOULD_APPLY"; } |
|
102 template <typename T> |
|
103 inline return_t dispatch (const T &obj) { return obj.would_apply (this); } |
|
104 static return_t default_return_value (void) { return false; } |
|
105 bool stop_sublookup_iteration (return_t r) const { return r; } |
|
106 |
|
107 hb_face_t *face; |
|
108 const hb_codepoint_t *glyphs; |
|
109 unsigned int len; |
|
110 bool zero_context; |
|
111 unsigned int debug_depth; |
|
112 |
|
113 hb_would_apply_context_t (hb_face_t *face_, |
|
114 const hb_codepoint_t *glyphs_, |
|
115 unsigned int len_, |
|
116 bool zero_context_) : |
|
117 face (face_), |
|
118 glyphs (glyphs_), |
|
119 len (len_), |
|
120 zero_context (zero_context_), |
|
121 debug_depth (0) {} |
|
122 }; |
|
123 |
|
124 |
|
125 |
|
126 #ifndef HB_DEBUG_COLLECT_GLYPHS |
|
127 #define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0) |
|
128 #endif |
|
129 |
|
130 #define TRACE_COLLECT_GLYPHS(this) \ |
|
131 hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, hb_void_t> trace \ |
|
132 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ |
|
133 ""); |
|
134 |
|
135 struct hb_collect_glyphs_context_t : |
|
136 hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_void_t, HB_DEBUG_COLLECT_GLYPHS> |
|
137 { |
|
138 inline const char *get_name (void) { return "COLLECT_GLYPHS"; } |
|
139 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index); |
|
140 template <typename T> |
|
141 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; } |
|
142 static return_t default_return_value (void) { return HB_VOID; } |
|
143 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } |
|
144 return_t recurse (unsigned int lookup_index) |
|
145 { |
|
146 if (unlikely (nesting_level_left == 0 || !recurse_func)) |
|
147 return default_return_value (); |
|
148 |
|
149 /* Note that GPOS sets recurse_func to NULL already, so it doesn't get |
|
150 * past the previous check. For GSUB, we only want to collect the output |
|
151 * glyphs in the recursion. If output is not requested, we can go home now. |
|
152 * |
|
153 * Note further, that the above is not exactly correct. A recursed lookup |
|
154 * is allowed to match input that is not matched in the context, but that's |
|
155 * not how most fonts are built. It's possible to relax that and recurse |
|
156 * with all sets here if it proves to be an issue. |
|
157 */ |
|
158 |
|
159 if (output == hb_set_get_empty ()) |
|
160 return HB_VOID; |
|
161 |
|
162 /* Return if new lookup was recursed to before. */ |
|
163 if (recursed_lookups.has (lookup_index)) |
|
164 return HB_VOID; |
|
165 |
|
166 hb_set_t *old_before = before; |
|
167 hb_set_t *old_input = input; |
|
168 hb_set_t *old_after = after; |
|
169 before = input = after = hb_set_get_empty (); |
|
170 |
|
171 nesting_level_left--; |
|
172 recurse_func (this, lookup_index); |
|
173 nesting_level_left++; |
|
174 |
|
175 before = old_before; |
|
176 input = old_input; |
|
177 after = old_after; |
|
178 |
|
179 recursed_lookups.add (lookup_index); |
|
180 |
|
181 return HB_VOID; |
|
182 } |
|
183 |
|
184 hb_face_t *face; |
|
185 hb_set_t *before; |
|
186 hb_set_t *input; |
|
187 hb_set_t *after; |
|
188 hb_set_t *output; |
|
189 recurse_func_t recurse_func; |
|
190 hb_set_t recursed_lookups; |
|
191 unsigned int nesting_level_left; |
|
192 unsigned int debug_depth; |
|
193 |
|
194 hb_collect_glyphs_context_t (hb_face_t *face_, |
|
195 hb_set_t *glyphs_before, /* OUT. May be NULL */ |
|
196 hb_set_t *glyphs_input, /* OUT. May be NULL */ |
|
197 hb_set_t *glyphs_after, /* OUT. May be NULL */ |
|
198 hb_set_t *glyphs_output, /* OUT. May be NULL */ |
|
199 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : |
|
200 face (face_), |
|
201 before (glyphs_before ? glyphs_before : hb_set_get_empty ()), |
|
202 input (glyphs_input ? glyphs_input : hb_set_get_empty ()), |
|
203 after (glyphs_after ? glyphs_after : hb_set_get_empty ()), |
|
204 output (glyphs_output ? glyphs_output : hb_set_get_empty ()), |
|
205 recurse_func (NULL), |
|
206 recursed_lookups (), |
|
207 nesting_level_left (nesting_level_left_), |
|
208 debug_depth (0) |
|
209 { |
|
210 recursed_lookups.init (); |
|
211 } |
|
212 ~hb_collect_glyphs_context_t (void) |
|
213 { |
|
214 recursed_lookups.fini (); |
|
215 } |
|
216 |
|
217 void set_recurse_func (recurse_func_t func) { recurse_func = func; } |
|
218 }; |
|
219 |
|
220 |
|
221 |
|
222 #ifndef HB_DEBUG_GET_COVERAGE |
|
223 #define HB_DEBUG_GET_COVERAGE (HB_DEBUG+0) |
|
224 #endif |
|
225 |
|
226 /* XXX Can we remove this? */ |
|
227 |
|
228 template <typename set_t> |
|
229 struct hb_add_coverage_context_t : |
|
230 hb_dispatch_context_t<hb_add_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE> |
|
231 { |
|
232 inline const char *get_name (void) { return "GET_COVERAGE"; } |
|
233 typedef const Coverage &return_t; |
|
234 template <typename T> |
|
235 inline return_t dispatch (const T &obj) { return obj.get_coverage (); } |
|
236 static return_t default_return_value (void) { return Null(Coverage); } |
|
237 bool stop_sublookup_iteration (return_t r) const |
|
238 { |
|
239 r.add_coverage (set); |
|
240 return false; |
|
241 } |
|
242 |
|
243 hb_add_coverage_context_t (set_t *set_) : |
|
244 set (set_), |
|
245 debug_depth (0) {} |
|
246 |
|
247 set_t *set; |
|
248 unsigned int debug_depth; |
|
249 }; |
|
250 |
|
251 |
|
252 |
|
253 #ifndef HB_DEBUG_APPLY |
|
254 #define HB_DEBUG_APPLY (HB_DEBUG+0) |
|
255 #endif |
|
256 |
|
257 #define TRACE_APPLY(this) \ |
|
258 hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \ |
|
259 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ |
|
260 "idx %d gid %u lookup %d", \ |
|
261 c->buffer->idx, c->buffer->cur().codepoint, (int) c->lookup_index); |
|
262 |
|
263 struct hb_apply_context_t : |
|
264 hb_dispatch_context_t<hb_apply_context_t, bool, HB_DEBUG_APPLY> |
|
265 { |
|
266 struct matcher_t |
|
267 { |
|
268 inline matcher_t (void) : |
|
269 lookup_props (0), |
|
270 ignore_zwnj (false), |
|
271 ignore_zwj (false), |
|
272 mask (-1), |
|
273 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */ |
|
274 syllable arg1(0), |
|
275 #undef arg1 |
|
276 match_func (NULL), |
|
277 match_data (NULL) {}; |
|
278 |
|
279 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); |
|
280 |
|
281 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; } |
|
282 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; } |
|
283 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; } |
|
284 inline void set_mask (hb_mask_t mask_) { mask = mask_; } |
|
285 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; } |
|
286 inline void set_match_func (match_func_t match_func_, |
|
287 const void *match_data_) |
|
288 { match_func = match_func_; match_data = match_data_; } |
|
289 |
|
290 enum may_match_t { |
|
291 MATCH_NO, |
|
292 MATCH_YES, |
|
293 MATCH_MAYBE |
|
294 }; |
|
295 |
|
296 inline may_match_t may_match (const hb_glyph_info_t &info, |
|
297 const USHORT *glyph_data) const |
|
298 { |
|
299 if (!(info.mask & mask) || |
|
300 (syllable && syllable != info.syllable ())) |
|
301 return MATCH_NO; |
|
302 |
|
303 if (match_func) |
|
304 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO; |
|
305 |
|
306 return MATCH_MAYBE; |
|
307 } |
|
308 |
|
309 enum may_skip_t { |
|
310 SKIP_NO, |
|
311 SKIP_YES, |
|
312 SKIP_MAYBE |
|
313 }; |
|
314 |
|
315 inline may_skip_t |
|
316 may_skip (const hb_apply_context_t *c, |
|
317 const hb_glyph_info_t &info) const |
|
318 { |
|
319 if (!c->check_glyph_property (&info, lookup_props)) |
|
320 return SKIP_YES; |
|
321 |
|
322 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_fvs (&info) && |
|
323 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) && |
|
324 (ignore_zwj || !_hb_glyph_info_is_zwj (&info)))) |
|
325 return SKIP_MAYBE; |
|
326 |
|
327 return SKIP_NO; |
|
328 } |
|
329 |
|
330 protected: |
|
331 unsigned int lookup_props; |
|
332 bool ignore_zwnj; |
|
333 bool ignore_zwj; |
|
334 hb_mask_t mask; |
|
335 uint8_t syllable; |
|
336 match_func_t match_func; |
|
337 const void *match_data; |
|
338 }; |
|
339 |
|
340 struct skipping_iterator_t |
|
341 { |
|
342 inline void init (hb_apply_context_t *c_, bool context_match = false) |
|
343 { |
|
344 c = c_; |
|
345 match_glyph_data = NULL, |
|
346 matcher.set_match_func (NULL, NULL); |
|
347 matcher.set_lookup_props (c->lookup_props); |
|
348 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */ |
|
349 matcher.set_ignore_zwnj (context_match || c->table_index == 1); |
|
350 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */ |
|
351 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj); |
|
352 matcher.set_mask (context_match ? -1 : c->lookup_mask); |
|
353 } |
|
354 inline void set_lookup_props (unsigned int lookup_props) |
|
355 { |
|
356 matcher.set_lookup_props (lookup_props); |
|
357 } |
|
358 inline void set_match_func (matcher_t::match_func_t match_func_, |
|
359 const void *match_data_, |
|
360 const USHORT glyph_data[]) |
|
361 { |
|
362 matcher.set_match_func (match_func_, match_data_); |
|
363 match_glyph_data = glyph_data; |
|
364 } |
|
365 |
|
366 inline void reset (unsigned int start_index_, |
|
367 unsigned int num_items_) |
|
368 { |
|
369 idx = start_index_; |
|
370 num_items = num_items_; |
|
371 end = c->buffer->len; |
|
372 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0); |
|
373 } |
|
374 |
|
375 inline void reject (void) { num_items++; match_glyph_data--; } |
|
376 |
|
377 inline bool next (void) |
|
378 { |
|
379 assert (num_items > 0); |
|
380 while (idx + num_items < end) |
|
381 { |
|
382 idx++; |
|
383 const hb_glyph_info_t &info = c->buffer->info[idx]; |
|
384 |
|
385 matcher_t::may_skip_t skip = matcher.may_skip (c, info); |
|
386 if (unlikely (skip == matcher_t::SKIP_YES)) |
|
387 continue; |
|
388 |
|
389 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); |
|
390 if (match == matcher_t::MATCH_YES || |
|
391 (match == matcher_t::MATCH_MAYBE && |
|
392 skip == matcher_t::SKIP_NO)) |
|
393 { |
|
394 num_items--; |
|
395 match_glyph_data++; |
|
396 return true; |
|
397 } |
|
398 |
|
399 if (skip == matcher_t::SKIP_NO) |
|
400 return false; |
|
401 } |
|
402 return false; |
|
403 } |
|
404 inline bool prev (void) |
|
405 { |
|
406 assert (num_items > 0); |
|
407 while (idx >= num_items) |
|
408 { |
|
409 idx--; |
|
410 const hb_glyph_info_t &info = c->buffer->out_info[idx]; |
|
411 |
|
412 matcher_t::may_skip_t skip = matcher.may_skip (c, info); |
|
413 if (unlikely (skip == matcher_t::SKIP_YES)) |
|
414 continue; |
|
415 |
|
416 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); |
|
417 if (match == matcher_t::MATCH_YES || |
|
418 (match == matcher_t::MATCH_MAYBE && |
|
419 skip == matcher_t::SKIP_NO)) |
|
420 { |
|
421 num_items--; |
|
422 match_glyph_data++; |
|
423 return true; |
|
424 } |
|
425 |
|
426 if (skip == matcher_t::SKIP_NO) |
|
427 return false; |
|
428 } |
|
429 return false; |
|
430 } |
|
431 |
|
432 unsigned int idx; |
|
433 protected: |
|
434 hb_apply_context_t *c; |
|
435 matcher_t matcher; |
|
436 const USHORT *match_glyph_data; |
|
437 |
|
438 unsigned int num_items; |
|
439 unsigned int end; |
|
440 }; |
|
441 |
|
442 |
|
443 inline const char *get_name (void) { return "APPLY"; } |
|
444 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index); |
|
445 template <typename T> |
|
446 inline return_t dispatch (const T &obj) { return obj.apply (this); } |
|
447 static return_t default_return_value (void) { return false; } |
|
448 bool stop_sublookup_iteration (return_t r) const { return r; } |
|
449 return_t recurse (unsigned int lookup_index) |
|
450 { |
|
451 if (unlikely (nesting_level_left == 0 || !recurse_func)) |
|
452 return default_return_value (); |
|
453 |
|
454 nesting_level_left--; |
|
455 bool ret = recurse_func (this, lookup_index); |
|
456 nesting_level_left++; |
|
457 return ret; |
|
458 } |
|
459 |
|
460 unsigned int table_index; /* GSUB/GPOS */ |
|
461 hb_font_t *font; |
|
462 hb_face_t *face; |
|
463 hb_buffer_t *buffer; |
|
464 hb_direction_t direction; |
|
465 hb_mask_t lookup_mask; |
|
466 bool auto_zwj; |
|
467 recurse_func_t recurse_func; |
|
468 unsigned int nesting_level_left; |
|
469 unsigned int lookup_props; |
|
470 const GDEF &gdef; |
|
471 bool has_glyph_classes; |
|
472 const VariationStore &var_store; |
|
473 skipping_iterator_t iter_input, iter_context; |
|
474 unsigned int lookup_index; |
|
475 unsigned int debug_depth; |
|
476 |
|
477 |
|
478 hb_apply_context_t (unsigned int table_index_, |
|
479 hb_font_t *font_, |
|
480 hb_buffer_t *buffer_) : |
|
481 table_index (table_index_), |
|
482 font (font_), face (font->face), buffer (buffer_), |
|
483 direction (buffer_->props.direction), |
|
484 lookup_mask (1), |
|
485 auto_zwj (true), |
|
486 recurse_func (NULL), |
|
487 nesting_level_left (HB_MAX_NESTING_LEVEL), |
|
488 lookup_props (0), |
|
489 gdef (*hb_ot_layout_from_face (face)->gdef), |
|
490 has_glyph_classes (gdef.has_glyph_classes ()), |
|
491 var_store (gdef.get_var_store ()), |
|
492 iter_input (), |
|
493 iter_context (), |
|
494 lookup_index ((unsigned int) -1), |
|
495 debug_depth (0) {} |
|
496 |
|
497 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; } |
|
498 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; } |
|
499 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; } |
|
500 inline void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; } |
|
501 inline void set_lookup_props (unsigned int lookup_props_) |
|
502 { |
|
503 lookup_props = lookup_props_; |
|
504 iter_input.init (this, false); |
|
505 iter_context.init (this, true); |
|
506 } |
|
507 |
|
508 inline bool |
|
509 match_properties_mark (hb_codepoint_t glyph, |
|
510 unsigned int glyph_props, |
|
511 unsigned int match_props) const |
|
512 { |
|
513 /* If using mark filtering sets, the high short of |
|
514 * match_props has the set index. |
|
515 */ |
|
516 if (match_props & LookupFlag::UseMarkFilteringSet) |
|
517 return gdef.mark_set_covers (match_props >> 16, glyph); |
|
518 |
|
519 /* The second byte of match_props has the meaning |
|
520 * "ignore marks of attachment type different than |
|
521 * the attachment type specified." |
|
522 */ |
|
523 if (match_props & LookupFlag::MarkAttachmentType) |
|
524 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType); |
|
525 |
|
526 return true; |
|
527 } |
|
528 |
|
529 inline bool |
|
530 check_glyph_property (const hb_glyph_info_t *info, |
|
531 unsigned int match_props) const |
|
532 { |
|
533 hb_codepoint_t glyph = info->codepoint; |
|
534 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info); |
|
535 |
|
536 /* Not covered, if, for example, glyph class is ligature and |
|
537 * match_props includes LookupFlags::IgnoreLigatures |
|
538 */ |
|
539 if (glyph_props & match_props & LookupFlag::IgnoreFlags) |
|
540 return false; |
|
541 |
|
542 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) |
|
543 return match_properties_mark (glyph, glyph_props, match_props); |
|
544 |
|
545 return true; |
|
546 } |
|
547 |
|
548 inline void _set_glyph_props (hb_codepoint_t glyph_index, |
|
549 unsigned int class_guess = 0, |
|
550 bool ligature = false, |
|
551 bool component = false) const |
|
552 { |
|
553 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) & |
|
554 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE; |
|
555 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED; |
|
556 if (ligature) |
|
557 { |
|
558 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED; |
|
559 /* In the only place that the MULTIPLIED bit is used, Uniscribe |
|
560 * seems to only care about the "last" transformation between |
|
561 * Ligature and Multiple substitions. Ie. if you ligate, expand, |
|
562 * and ligate again, it forgives the multiplication and acts as |
|
563 * if only ligation happened. As such, clear MULTIPLIED bit. |
|
564 */ |
|
565 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; |
|
566 } |
|
567 if (component) |
|
568 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; |
|
569 if (likely (has_glyph_classes)) |
|
570 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index)); |
|
571 else if (class_guess) |
|
572 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess); |
|
573 } |
|
574 |
|
575 inline void replace_glyph (hb_codepoint_t glyph_index) const |
|
576 { |
|
577 _set_glyph_props (glyph_index); |
|
578 buffer->replace_glyph (glyph_index); |
|
579 } |
|
580 inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const |
|
581 { |
|
582 _set_glyph_props (glyph_index); |
|
583 buffer->cur().codepoint = glyph_index; |
|
584 } |
|
585 inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index, |
|
586 unsigned int class_guess) const |
|
587 { |
|
588 _set_glyph_props (glyph_index, class_guess, true); |
|
589 buffer->replace_glyph (glyph_index); |
|
590 } |
|
591 inline void output_glyph_for_component (hb_codepoint_t glyph_index, |
|
592 unsigned int class_guess) const |
|
593 { |
|
594 _set_glyph_props (glyph_index, class_guess, false, true); |
|
595 buffer->output_glyph (glyph_index); |
|
596 } |
|
597 }; |
|
598 |
|
599 |
|
600 |
|
601 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); |
|
602 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); |
|
603 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); |
|
604 |
|
605 struct ContextClosureFuncs |
|
606 { |
|
607 intersects_func_t intersects; |
|
608 }; |
|
609 struct ContextCollectGlyphsFuncs |
|
610 { |
|
611 collect_glyphs_func_t collect; |
|
612 }; |
|
613 struct ContextApplyFuncs |
|
614 { |
|
615 match_func_t match; |
|
616 }; |
|
617 |
|
618 |
|
619 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) |
|
620 { |
|
621 return glyphs->has (value); |
|
622 } |
|
623 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data) |
|
624 { |
|
625 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
|
626 return class_def.intersects_class (glyphs, value); |
|
627 } |
|
628 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) |
|
629 { |
|
630 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; |
|
631 return (data+coverage).intersects (glyphs); |
|
632 } |
|
633 |
|
634 static inline bool intersects_array (hb_closure_context_t *c, |
|
635 unsigned int count, |
|
636 const USHORT values[], |
|
637 intersects_func_t intersects_func, |
|
638 const void *intersects_data) |
|
639 { |
|
640 for (unsigned int i = 0; i < count; i++) |
|
641 if (likely (!intersects_func (c->glyphs, values[i], intersects_data))) |
|
642 return false; |
|
643 return true; |
|
644 } |
|
645 |
|
646 |
|
647 static inline void collect_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) |
|
648 { |
|
649 glyphs->add (value); |
|
650 } |
|
651 static inline void collect_class (hb_set_t *glyphs, const USHORT &value, const void *data) |
|
652 { |
|
653 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
|
654 class_def.add_class (glyphs, value); |
|
655 } |
|
656 static inline void collect_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) |
|
657 { |
|
658 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; |
|
659 (data+coverage).add_coverage (glyphs); |
|
660 } |
|
661 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED, |
|
662 hb_set_t *glyphs, |
|
663 unsigned int count, |
|
664 const USHORT values[], |
|
665 collect_glyphs_func_t collect_func, |
|
666 const void *collect_data) |
|
667 { |
|
668 for (unsigned int i = 0; i < count; i++) |
|
669 collect_func (glyphs, values[i], collect_data); |
|
670 } |
|
671 |
|
672 |
|
673 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED) |
|
674 { |
|
675 return glyph_id == value; |
|
676 } |
|
677 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data) |
|
678 { |
|
679 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
|
680 return class_def.get_class (glyph_id) == value; |
|
681 } |
|
682 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data) |
|
683 { |
|
684 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; |
|
685 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED; |
|
686 } |
|
687 |
|
688 static inline bool would_match_input (hb_would_apply_context_t *c, |
|
689 unsigned int count, /* Including the first glyph (not matched) */ |
|
690 const USHORT input[], /* Array of input values--start with second glyph */ |
|
691 match_func_t match_func, |
|
692 const void *match_data) |
|
693 { |
|
694 if (count != c->len) |
|
695 return false; |
|
696 |
|
697 for (unsigned int i = 1; i < count; i++) |
|
698 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data))) |
|
699 return false; |
|
700 |
|
701 return true; |
|
702 } |
|
703 static inline bool match_input (hb_apply_context_t *c, |
|
704 unsigned int count, /* Including the first glyph (not matched) */ |
|
705 const USHORT input[], /* Array of input values--start with second glyph */ |
|
706 match_func_t match_func, |
|
707 const void *match_data, |
|
708 unsigned int *end_offset, |
|
709 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], |
|
710 bool *p_is_mark_ligature = NULL, |
|
711 unsigned int *p_total_component_count = NULL) |
|
712 { |
|
713 TRACE_APPLY (NULL); |
|
714 |
|
715 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false); |
|
716 |
|
717 hb_buffer_t *buffer = c->buffer; |
|
718 |
|
719 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
|
720 skippy_iter.reset (buffer->idx, count - 1); |
|
721 skippy_iter.set_match_func (match_func, match_data, input); |
|
722 |
|
723 /* |
|
724 * This is perhaps the trickiest part of OpenType... Remarks: |
|
725 * |
|
726 * - If all components of the ligature were marks, we call this a mark ligature. |
|
727 * |
|
728 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize |
|
729 * it as a ligature glyph. |
|
730 * |
|
731 * - Ligatures cannot be formed across glyphs attached to different components |
|
732 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and |
|
733 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother. |
|
734 * However, it would be wrong to ligate that SHADDA,FATHA sequence.o |
|
735 * There is an exception to this: If a ligature tries ligating with marks that |
|
736 * belong to it itself, go ahead, assuming that the font designer knows what |
|
737 * they are doing (otherwise it can break Indic stuff when a matra wants to |
|
738 * ligate with a conjunct...) |
|
739 */ |
|
740 |
|
741 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur()); |
|
742 |
|
743 unsigned int total_component_count = 0; |
|
744 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur()); |
|
745 |
|
746 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); |
|
747 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); |
|
748 |
|
749 match_positions[0] = buffer->idx; |
|
750 for (unsigned int i = 1; i < count; i++) |
|
751 { |
|
752 if (!skippy_iter.next ()) return_trace (false); |
|
753 |
|
754 match_positions[i] = skippy_iter.idx; |
|
755 |
|
756 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]); |
|
757 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]); |
|
758 |
|
759 if (first_lig_id && first_lig_comp) { |
|
760 /* If first component was attached to a previous ligature component, |
|
761 * all subsequent components should be attached to the same ligature |
|
762 * component, otherwise we shouldn't ligate them. */ |
|
763 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp) |
|
764 return_trace (false); |
|
765 } else { |
|
766 /* If first component was NOT attached to a previous ligature component, |
|
767 * all subsequent components should also NOT be attached to any ligature |
|
768 * component, unless they are attached to the first component itself! */ |
|
769 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id)) |
|
770 return_trace (false); |
|
771 } |
|
772 |
|
773 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]); |
|
774 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]); |
|
775 } |
|
776 |
|
777 *end_offset = skippy_iter.idx - buffer->idx + 1; |
|
778 |
|
779 if (p_is_mark_ligature) |
|
780 *p_is_mark_ligature = is_mark_ligature; |
|
781 |
|
782 if (p_total_component_count) |
|
783 *p_total_component_count = total_component_count; |
|
784 |
|
785 return_trace (true); |
|
786 } |
|
787 static inline bool ligate_input (hb_apply_context_t *c, |
|
788 unsigned int count, /* Including the first glyph */ |
|
789 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ |
|
790 unsigned int match_length, |
|
791 hb_codepoint_t lig_glyph, |
|
792 bool is_mark_ligature, |
|
793 unsigned int total_component_count) |
|
794 { |
|
795 TRACE_APPLY (NULL); |
|
796 |
|
797 hb_buffer_t *buffer = c->buffer; |
|
798 |
|
799 buffer->merge_clusters (buffer->idx, buffer->idx + match_length); |
|
800 |
|
801 /* |
|
802 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave |
|
803 * the ligature to keep its old ligature id. This will allow it to attach to |
|
804 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH, |
|
805 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a |
|
806 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature |
|
807 * later, we don't want them to lose their ligature id/component, otherwise |
|
808 * GPOS will fail to correctly position the mark ligature on top of the |
|
809 * LAM,LAM,HEH ligature. See: |
|
810 * https://bugzilla.gnome.org/show_bug.cgi?id=676343 |
|
811 * |
|
812 * - If a ligature is formed of components that some of which are also ligatures |
|
813 * themselves, and those ligature components had marks attached to *their* |
|
814 * components, we have to attach the marks to the new ligature component |
|
815 * positions! Now *that*'s tricky! And these marks may be following the |
|
816 * last component of the whole sequence, so we should loop forward looking |
|
817 * for them and update them. |
|
818 * |
|
819 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a |
|
820 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature |
|
821 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature |
|
822 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to |
|
823 * the new ligature with a component value of 2. |
|
824 * |
|
825 * This in fact happened to a font... See: |
|
826 * https://bugzilla.gnome.org/show_bug.cgi?id=437633 |
|
827 */ |
|
828 |
|
829 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE; |
|
830 unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer); |
|
831 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); |
|
832 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); |
|
833 unsigned int components_so_far = last_num_components; |
|
834 |
|
835 if (!is_mark_ligature) |
|
836 { |
|
837 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count); |
|
838 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK) |
|
839 { |
|
840 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER); |
|
841 } |
|
842 } |
|
843 c->replace_glyph_with_ligature (lig_glyph, klass); |
|
844 |
|
845 for (unsigned int i = 1; i < count; i++) |
|
846 { |
|
847 while (buffer->idx < match_positions[i] && !buffer->in_error) |
|
848 { |
|
849 if (!is_mark_ligature) { |
|
850 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); |
|
851 if (this_comp == 0) |
|
852 this_comp = last_num_components; |
|
853 unsigned int new_lig_comp = components_so_far - last_num_components + |
|
854 MIN (this_comp, last_num_components); |
|
855 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp); |
|
856 } |
|
857 buffer->next_glyph (); |
|
858 } |
|
859 |
|
860 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); |
|
861 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); |
|
862 components_so_far += last_num_components; |
|
863 |
|
864 /* Skip the base glyph */ |
|
865 buffer->idx++; |
|
866 } |
|
867 |
|
868 if (!is_mark_ligature && last_lig_id) { |
|
869 /* Re-adjust components for any marks following. */ |
|
870 for (unsigned int i = buffer->idx; i < buffer->len; i++) { |
|
871 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) { |
|
872 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]); |
|
873 if (!this_comp) |
|
874 break; |
|
875 unsigned int new_lig_comp = components_so_far - last_num_components + |
|
876 MIN (this_comp, last_num_components); |
|
877 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp); |
|
878 } else |
|
879 break; |
|
880 } |
|
881 } |
|
882 return_trace (true); |
|
883 } |
|
884 |
|
885 static inline bool match_backtrack (hb_apply_context_t *c, |
|
886 unsigned int count, |
|
887 const USHORT backtrack[], |
|
888 match_func_t match_func, |
|
889 const void *match_data) |
|
890 { |
|
891 TRACE_APPLY (NULL); |
|
892 |
|
893 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; |
|
894 skippy_iter.reset (c->buffer->backtrack_len (), count); |
|
895 skippy_iter.set_match_func (match_func, match_data, backtrack); |
|
896 |
|
897 for (unsigned int i = 0; i < count; i++) |
|
898 if (!skippy_iter.prev ()) |
|
899 return_trace (false); |
|
900 |
|
901 return_trace (true); |
|
902 } |
|
903 |
|
904 static inline bool match_lookahead (hb_apply_context_t *c, |
|
905 unsigned int count, |
|
906 const USHORT lookahead[], |
|
907 match_func_t match_func, |
|
908 const void *match_data, |
|
909 unsigned int offset) |
|
910 { |
|
911 TRACE_APPLY (NULL); |
|
912 |
|
913 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; |
|
914 skippy_iter.reset (c->buffer->idx + offset - 1, count); |
|
915 skippy_iter.set_match_func (match_func, match_data, lookahead); |
|
916 |
|
917 for (unsigned int i = 0; i < count; i++) |
|
918 if (!skippy_iter.next ()) |
|
919 return_trace (false); |
|
920 |
|
921 return_trace (true); |
|
922 } |
|
923 |
|
924 |
|
925 |
|
926 struct LookupRecord |
|
927 { |
|
928 inline bool sanitize (hb_sanitize_context_t *c) const |
|
929 { |
|
930 TRACE_SANITIZE (this); |
|
931 return_trace (c->check_struct (this)); |
|
932 } |
|
933 |
|
934 USHORT sequenceIndex; /* Index into current glyph |
|
935 * sequence--first glyph = 0 */ |
|
936 USHORT lookupListIndex; /* Lookup to apply to that |
|
937 * position--zero--based */ |
|
938 public: |
|
939 DEFINE_SIZE_STATIC (4); |
|
940 }; |
|
941 |
|
942 |
|
943 template <typename context_t> |
|
944 static inline void recurse_lookups (context_t *c, |
|
945 unsigned int lookupCount, |
|
946 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */) |
|
947 { |
|
948 for (unsigned int i = 0; i < lookupCount; i++) |
|
949 c->recurse (lookupRecord[i].lookupListIndex); |
|
950 } |
|
951 |
|
952 static inline bool apply_lookup (hb_apply_context_t *c, |
|
953 unsigned int count, /* Including the first glyph */ |
|
954 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ |
|
955 unsigned int lookupCount, |
|
956 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ |
|
957 unsigned int match_length) |
|
958 { |
|
959 TRACE_APPLY (NULL); |
|
960 |
|
961 hb_buffer_t *buffer = c->buffer; |
|
962 unsigned int end; |
|
963 |
|
964 /* All positions are distance from beginning of *output* buffer. |
|
965 * Adjust. */ |
|
966 { |
|
967 unsigned int bl = buffer->backtrack_len (); |
|
968 end = bl + match_length; |
|
969 |
|
970 int delta = bl - buffer->idx; |
|
971 /* Convert positions to new indexing. */ |
|
972 for (unsigned int j = 0; j < count; j++) |
|
973 match_positions[j] += delta; |
|
974 } |
|
975 |
|
976 for (unsigned int i = 0; i < lookupCount && !buffer->in_error; i++) |
|
977 { |
|
978 unsigned int idx = lookupRecord[i].sequenceIndex; |
|
979 if (idx >= count) |
|
980 continue; |
|
981 |
|
982 /* Don't recurse to ourself at same position. |
|
983 * Note that this test is too naive, it doesn't catch longer loops. */ |
|
984 if (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index) |
|
985 continue; |
|
986 |
|
987 buffer->move_to (match_positions[idx]); |
|
988 |
|
989 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len (); |
|
990 if (!c->recurse (lookupRecord[i].lookupListIndex)) |
|
991 continue; |
|
992 |
|
993 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len (); |
|
994 int delta = new_len - orig_len; |
|
995 |
|
996 if (!delta) |
|
997 continue; |
|
998 |
|
999 /* Recursed lookup changed buffer len. Adjust. */ |
|
1000 |
|
1001 end = int (end) + delta; |
|
1002 if (end <= match_positions[idx]) |
|
1003 { |
|
1004 /* End might end up being smaller than match_positions[idx] if the recursed |
|
1005 * lookup ended up removing many items, more than we have had matched. |
|
1006 * Just never rewind end back and get out of here. |
|
1007 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */ |
|
1008 end = match_positions[idx]; |
|
1009 /* There can't be any further changes. */ |
|
1010 break; |
|
1011 } |
|
1012 |
|
1013 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */ |
|
1014 |
|
1015 if (delta > 0) |
|
1016 { |
|
1017 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH)) |
|
1018 break; |
|
1019 } |
|
1020 else |
|
1021 { |
|
1022 /* NOTE: delta is negative. */ |
|
1023 delta = MAX (delta, (int) next - (int) count); |
|
1024 next -= delta; |
|
1025 } |
|
1026 |
|
1027 /* Shift! */ |
|
1028 memmove (match_positions + next + delta, match_positions + next, |
|
1029 (count - next) * sizeof (match_positions[0])); |
|
1030 next += delta; |
|
1031 count += delta; |
|
1032 |
|
1033 /* Fill in new entries. */ |
|
1034 for (unsigned int j = idx + 1; j < next; j++) |
|
1035 match_positions[j] = match_positions[j - 1] + 1; |
|
1036 |
|
1037 /* And fixup the rest. */ |
|
1038 for (; next < count; next++) |
|
1039 match_positions[next] += delta; |
|
1040 } |
|
1041 |
|
1042 buffer->move_to (end); |
|
1043 |
|
1044 return_trace (true); |
|
1045 } |
|
1046 |
|
1047 |
|
1048 |
|
1049 /* Contextual lookups */ |
|
1050 |
|
1051 struct ContextClosureLookupContext |
|
1052 { |
|
1053 ContextClosureFuncs funcs; |
|
1054 const void *intersects_data; |
|
1055 }; |
|
1056 |
|
1057 struct ContextCollectGlyphsLookupContext |
|
1058 { |
|
1059 ContextCollectGlyphsFuncs funcs; |
|
1060 const void *collect_data; |
|
1061 }; |
|
1062 |
|
1063 struct ContextApplyLookupContext |
|
1064 { |
|
1065 ContextApplyFuncs funcs; |
|
1066 const void *match_data; |
|
1067 }; |
|
1068 |
|
1069 static inline void context_closure_lookup (hb_closure_context_t *c, |
|
1070 unsigned int inputCount, /* Including the first glyph (not matched) */ |
|
1071 const USHORT input[], /* Array of input values--start with second glyph */ |
|
1072 unsigned int lookupCount, |
|
1073 const LookupRecord lookupRecord[], |
|
1074 ContextClosureLookupContext &lookup_context) |
|
1075 { |
|
1076 if (intersects_array (c, |
|
1077 inputCount ? inputCount - 1 : 0, input, |
|
1078 lookup_context.funcs.intersects, lookup_context.intersects_data)) |
|
1079 recurse_lookups (c, |
|
1080 lookupCount, lookupRecord); |
|
1081 } |
|
1082 |
|
1083 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, |
|
1084 unsigned int inputCount, /* Including the first glyph (not matched) */ |
|
1085 const USHORT input[], /* Array of input values--start with second glyph */ |
|
1086 unsigned int lookupCount, |
|
1087 const LookupRecord lookupRecord[], |
|
1088 ContextCollectGlyphsLookupContext &lookup_context) |
|
1089 { |
|
1090 collect_array (c, c->input, |
|
1091 inputCount ? inputCount - 1 : 0, input, |
|
1092 lookup_context.funcs.collect, lookup_context.collect_data); |
|
1093 recurse_lookups (c, |
|
1094 lookupCount, lookupRecord); |
|
1095 } |
|
1096 |
|
1097 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c, |
|
1098 unsigned int inputCount, /* Including the first glyph (not matched) */ |
|
1099 const USHORT input[], /* Array of input values--start with second glyph */ |
|
1100 unsigned int lookupCount HB_UNUSED, |
|
1101 const LookupRecord lookupRecord[] HB_UNUSED, |
|
1102 ContextApplyLookupContext &lookup_context) |
|
1103 { |
|
1104 return would_match_input (c, |
|
1105 inputCount, input, |
|
1106 lookup_context.funcs.match, lookup_context.match_data); |
|
1107 } |
|
1108 static inline bool context_apply_lookup (hb_apply_context_t *c, |
|
1109 unsigned int inputCount, /* Including the first glyph (not matched) */ |
|
1110 const USHORT input[], /* Array of input values--start with second glyph */ |
|
1111 unsigned int lookupCount, |
|
1112 const LookupRecord lookupRecord[], |
|
1113 ContextApplyLookupContext &lookup_context) |
|
1114 { |
|
1115 unsigned int match_length = 0; |
|
1116 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH]; |
|
1117 return match_input (c, |
|
1118 inputCount, input, |
|
1119 lookup_context.funcs.match, lookup_context.match_data, |
|
1120 &match_length, match_positions) |
|
1121 && apply_lookup (c, |
|
1122 inputCount, match_positions, |
|
1123 lookupCount, lookupRecord, |
|
1124 match_length); |
|
1125 } |
|
1126 |
|
1127 struct Rule |
|
1128 { |
|
1129 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const |
|
1130 { |
|
1131 TRACE_CLOSURE (this); |
|
1132 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); |
|
1133 context_closure_lookup (c, |
|
1134 inputCount, inputZ, |
|
1135 lookupCount, lookupRecord, |
|
1136 lookup_context); |
|
1137 } |
|
1138 |
|
1139 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const |
|
1140 { |
|
1141 TRACE_COLLECT_GLYPHS (this); |
|
1142 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); |
|
1143 context_collect_glyphs_lookup (c, |
|
1144 inputCount, inputZ, |
|
1145 lookupCount, lookupRecord, |
|
1146 lookup_context); |
|
1147 } |
|
1148 |
|
1149 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const |
|
1150 { |
|
1151 TRACE_WOULD_APPLY (this); |
|
1152 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); |
|
1153 return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); |
|
1154 } |
|
1155 |
|
1156 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const |
|
1157 { |
|
1158 TRACE_APPLY (this); |
|
1159 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); |
|
1160 return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); |
|
1161 } |
|
1162 |
|
1163 public: |
|
1164 inline bool sanitize (hb_sanitize_context_t *c) const |
|
1165 { |
|
1166 TRACE_SANITIZE (this); |
|
1167 return inputCount.sanitize (c) |
|
1168 && lookupCount.sanitize (c) |
|
1169 && c->check_range (inputZ, |
|
1170 inputZ[0].static_size * inputCount |
|
1171 + lookupRecordX[0].static_size * lookupCount); |
|
1172 } |
|
1173 |
|
1174 protected: |
|
1175 USHORT inputCount; /* Total number of glyphs in input |
|
1176 * glyph sequence--includes the first |
|
1177 * glyph */ |
|
1178 USHORT lookupCount; /* Number of LookupRecords */ |
|
1179 USHORT inputZ[VAR]; /* Array of match inputs--start with |
|
1180 * second glyph */ |
|
1181 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in |
|
1182 * design order */ |
|
1183 public: |
|
1184 DEFINE_SIZE_ARRAY2 (4, inputZ, lookupRecordX); |
|
1185 }; |
|
1186 |
|
1187 struct RuleSet |
|
1188 { |
|
1189 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const |
|
1190 { |
|
1191 TRACE_CLOSURE (this); |
|
1192 unsigned int num_rules = rule.len; |
|
1193 for (unsigned int i = 0; i < num_rules; i++) |
|
1194 (this+rule[i]).closure (c, lookup_context); |
|
1195 } |
|
1196 |
|
1197 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const |
|
1198 { |
|
1199 TRACE_COLLECT_GLYPHS (this); |
|
1200 unsigned int num_rules = rule.len; |
|
1201 for (unsigned int i = 0; i < num_rules; i++) |
|
1202 (this+rule[i]).collect_glyphs (c, lookup_context); |
|
1203 } |
|
1204 |
|
1205 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const |
|
1206 { |
|
1207 TRACE_WOULD_APPLY (this); |
|
1208 unsigned int num_rules = rule.len; |
|
1209 for (unsigned int i = 0; i < num_rules; i++) |
|
1210 { |
|
1211 if ((this+rule[i]).would_apply (c, lookup_context)) |
|
1212 return_trace (true); |
|
1213 } |
|
1214 return_trace (false); |
|
1215 } |
|
1216 |
|
1217 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const |
|
1218 { |
|
1219 TRACE_APPLY (this); |
|
1220 unsigned int num_rules = rule.len; |
|
1221 for (unsigned int i = 0; i < num_rules; i++) |
|
1222 { |
|
1223 if ((this+rule[i]).apply (c, lookup_context)) |
|
1224 return_trace (true); |
|
1225 } |
|
1226 return_trace (false); |
|
1227 } |
|
1228 |
|
1229 inline bool sanitize (hb_sanitize_context_t *c) const |
|
1230 { |
|
1231 TRACE_SANITIZE (this); |
|
1232 return_trace (rule.sanitize (c, this)); |
|
1233 } |
|
1234 |
|
1235 protected: |
|
1236 OffsetArrayOf<Rule> |
|
1237 rule; /* Array of Rule tables |
|
1238 * ordered by preference */ |
|
1239 public: |
|
1240 DEFINE_SIZE_ARRAY (2, rule); |
|
1241 }; |
|
1242 |
|
1243 |
|
1244 struct ContextFormat1 |
|
1245 { |
|
1246 inline void closure (hb_closure_context_t *c) const |
|
1247 { |
|
1248 TRACE_CLOSURE (this); |
|
1249 |
|
1250 const Coverage &cov = (this+coverage); |
|
1251 |
|
1252 struct ContextClosureLookupContext lookup_context = { |
|
1253 {intersects_glyph}, |
|
1254 NULL |
|
1255 }; |
|
1256 |
|
1257 unsigned int count = ruleSet.len; |
|
1258 for (unsigned int i = 0; i < count; i++) |
|
1259 if (cov.intersects_coverage (c->glyphs, i)) { |
|
1260 const RuleSet &rule_set = this+ruleSet[i]; |
|
1261 rule_set.closure (c, lookup_context); |
|
1262 } |
|
1263 } |
|
1264 |
|
1265 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
1266 { |
|
1267 TRACE_COLLECT_GLYPHS (this); |
|
1268 (this+coverage).add_coverage (c->input); |
|
1269 |
|
1270 struct ContextCollectGlyphsLookupContext lookup_context = { |
|
1271 {collect_glyph}, |
|
1272 NULL |
|
1273 }; |
|
1274 |
|
1275 unsigned int count = ruleSet.len; |
|
1276 for (unsigned int i = 0; i < count; i++) |
|
1277 (this+ruleSet[i]).collect_glyphs (c, lookup_context); |
|
1278 } |
|
1279 |
|
1280 inline bool would_apply (hb_would_apply_context_t *c) const |
|
1281 { |
|
1282 TRACE_WOULD_APPLY (this); |
|
1283 |
|
1284 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; |
|
1285 struct ContextApplyLookupContext lookup_context = { |
|
1286 {match_glyph}, |
|
1287 NULL |
|
1288 }; |
|
1289 return_trace (rule_set.would_apply (c, lookup_context)); |
|
1290 } |
|
1291 |
|
1292 inline const Coverage &get_coverage (void) const |
|
1293 { |
|
1294 return this+coverage; |
|
1295 } |
|
1296 |
|
1297 inline bool apply (hb_apply_context_t *c) const |
|
1298 { |
|
1299 TRACE_APPLY (this); |
|
1300 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
|
1301 if (likely (index == NOT_COVERED)) |
|
1302 return_trace (false); |
|
1303 |
|
1304 const RuleSet &rule_set = this+ruleSet[index]; |
|
1305 struct ContextApplyLookupContext lookup_context = { |
|
1306 {match_glyph}, |
|
1307 NULL |
|
1308 }; |
|
1309 return_trace (rule_set.apply (c, lookup_context)); |
|
1310 } |
|
1311 |
|
1312 inline bool sanitize (hb_sanitize_context_t *c) const |
|
1313 { |
|
1314 TRACE_SANITIZE (this); |
|
1315 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); |
|
1316 } |
|
1317 |
|
1318 protected: |
|
1319 USHORT format; /* Format identifier--format = 1 */ |
|
1320 OffsetTo<Coverage> |
|
1321 coverage; /* Offset to Coverage table--from |
|
1322 * beginning of table */ |
|
1323 OffsetArrayOf<RuleSet> |
|
1324 ruleSet; /* Array of RuleSet tables |
|
1325 * ordered by Coverage Index */ |
|
1326 public: |
|
1327 DEFINE_SIZE_ARRAY (6, ruleSet); |
|
1328 }; |
|
1329 |
|
1330 |
|
1331 struct ContextFormat2 |
|
1332 { |
|
1333 inline void closure (hb_closure_context_t *c) const |
|
1334 { |
|
1335 TRACE_CLOSURE (this); |
|
1336 if (!(this+coverage).intersects (c->glyphs)) |
|
1337 return; |
|
1338 |
|
1339 const ClassDef &class_def = this+classDef; |
|
1340 |
|
1341 struct ContextClosureLookupContext lookup_context = { |
|
1342 {intersects_class}, |
|
1343 &class_def |
|
1344 }; |
|
1345 |
|
1346 unsigned int count = ruleSet.len; |
|
1347 for (unsigned int i = 0; i < count; i++) |
|
1348 if (class_def.intersects_class (c->glyphs, i)) { |
|
1349 const RuleSet &rule_set = this+ruleSet[i]; |
|
1350 rule_set.closure (c, lookup_context); |
|
1351 } |
|
1352 } |
|
1353 |
|
1354 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
1355 { |
|
1356 TRACE_COLLECT_GLYPHS (this); |
|
1357 (this+coverage).add_coverage (c->input); |
|
1358 |
|
1359 const ClassDef &class_def = this+classDef; |
|
1360 struct ContextCollectGlyphsLookupContext lookup_context = { |
|
1361 {collect_class}, |
|
1362 &class_def |
|
1363 }; |
|
1364 |
|
1365 unsigned int count = ruleSet.len; |
|
1366 for (unsigned int i = 0; i < count; i++) |
|
1367 (this+ruleSet[i]).collect_glyphs (c, lookup_context); |
|
1368 } |
|
1369 |
|
1370 inline bool would_apply (hb_would_apply_context_t *c) const |
|
1371 { |
|
1372 TRACE_WOULD_APPLY (this); |
|
1373 |
|
1374 const ClassDef &class_def = this+classDef; |
|
1375 unsigned int index = class_def.get_class (c->glyphs[0]); |
|
1376 const RuleSet &rule_set = this+ruleSet[index]; |
|
1377 struct ContextApplyLookupContext lookup_context = { |
|
1378 {match_class}, |
|
1379 &class_def |
|
1380 }; |
|
1381 return_trace (rule_set.would_apply (c, lookup_context)); |
|
1382 } |
|
1383 |
|
1384 inline const Coverage &get_coverage (void) const |
|
1385 { |
|
1386 return this+coverage; |
|
1387 } |
|
1388 |
|
1389 inline bool apply (hb_apply_context_t *c) const |
|
1390 { |
|
1391 TRACE_APPLY (this); |
|
1392 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
|
1393 if (likely (index == NOT_COVERED)) return_trace (false); |
|
1394 |
|
1395 const ClassDef &class_def = this+classDef; |
|
1396 index = class_def.get_class (c->buffer->cur().codepoint); |
|
1397 const RuleSet &rule_set = this+ruleSet[index]; |
|
1398 struct ContextApplyLookupContext lookup_context = { |
|
1399 {match_class}, |
|
1400 &class_def |
|
1401 }; |
|
1402 return_trace (rule_set.apply (c, lookup_context)); |
|
1403 } |
|
1404 |
|
1405 inline bool sanitize (hb_sanitize_context_t *c) const |
|
1406 { |
|
1407 TRACE_SANITIZE (this); |
|
1408 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this)); |
|
1409 } |
|
1410 |
|
1411 protected: |
|
1412 USHORT format; /* Format identifier--format = 2 */ |
|
1413 OffsetTo<Coverage> |
|
1414 coverage; /* Offset to Coverage table--from |
|
1415 * beginning of table */ |
|
1416 OffsetTo<ClassDef> |
|
1417 classDef; /* Offset to glyph ClassDef table--from |
|
1418 * beginning of table */ |
|
1419 OffsetArrayOf<RuleSet> |
|
1420 ruleSet; /* Array of RuleSet tables |
|
1421 * ordered by class */ |
|
1422 public: |
|
1423 DEFINE_SIZE_ARRAY (8, ruleSet); |
|
1424 }; |
|
1425 |
|
1426 |
|
1427 struct ContextFormat3 |
|
1428 { |
|
1429 inline void closure (hb_closure_context_t *c) const |
|
1430 { |
|
1431 TRACE_CLOSURE (this); |
|
1432 if (!(this+coverageZ[0]).intersects (c->glyphs)) |
|
1433 return; |
|
1434 |
|
1435 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); |
|
1436 struct ContextClosureLookupContext lookup_context = { |
|
1437 {intersects_coverage}, |
|
1438 this |
|
1439 }; |
|
1440 context_closure_lookup (c, |
|
1441 glyphCount, (const USHORT *) (coverageZ + 1), |
|
1442 lookupCount, lookupRecord, |
|
1443 lookup_context); |
|
1444 } |
|
1445 |
|
1446 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
1447 { |
|
1448 TRACE_COLLECT_GLYPHS (this); |
|
1449 (this+coverageZ[0]).add_coverage (c->input); |
|
1450 |
|
1451 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); |
|
1452 struct ContextCollectGlyphsLookupContext lookup_context = { |
|
1453 {collect_coverage}, |
|
1454 this |
|
1455 }; |
|
1456 |
|
1457 context_collect_glyphs_lookup (c, |
|
1458 glyphCount, (const USHORT *) (coverageZ + 1), |
|
1459 lookupCount, lookupRecord, |
|
1460 lookup_context); |
|
1461 } |
|
1462 |
|
1463 inline bool would_apply (hb_would_apply_context_t *c) const |
|
1464 { |
|
1465 TRACE_WOULD_APPLY (this); |
|
1466 |
|
1467 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); |
|
1468 struct ContextApplyLookupContext lookup_context = { |
|
1469 {match_coverage}, |
|
1470 this |
|
1471 }; |
|
1472 return_trace (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); |
|
1473 } |
|
1474 |
|
1475 inline const Coverage &get_coverage (void) const |
|
1476 { |
|
1477 return this+coverageZ[0]; |
|
1478 } |
|
1479 |
|
1480 inline bool apply (hb_apply_context_t *c) const |
|
1481 { |
|
1482 TRACE_APPLY (this); |
|
1483 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint); |
|
1484 if (likely (index == NOT_COVERED)) return_trace (false); |
|
1485 |
|
1486 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); |
|
1487 struct ContextApplyLookupContext lookup_context = { |
|
1488 {match_coverage}, |
|
1489 this |
|
1490 }; |
|
1491 return_trace (context_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); |
|
1492 } |
|
1493 |
|
1494 inline bool sanitize (hb_sanitize_context_t *c) const |
|
1495 { |
|
1496 TRACE_SANITIZE (this); |
|
1497 if (!c->check_struct (this)) return_trace (false); |
|
1498 unsigned int count = glyphCount; |
|
1499 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */ |
|
1500 if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return_trace (false); |
|
1501 for (unsigned int i = 0; i < count; i++) |
|
1502 if (!coverageZ[i].sanitize (c, this)) return_trace (false); |
|
1503 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count); |
|
1504 return_trace (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount)); |
|
1505 } |
|
1506 |
|
1507 protected: |
|
1508 USHORT format; /* Format identifier--format = 3 */ |
|
1509 USHORT glyphCount; /* Number of glyphs in the input glyph |
|
1510 * sequence */ |
|
1511 USHORT lookupCount; /* Number of LookupRecords */ |
|
1512 OffsetTo<Coverage> |
|
1513 coverageZ[VAR]; /* Array of offsets to Coverage |
|
1514 * table in glyph sequence order */ |
|
1515 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in |
|
1516 * design order */ |
|
1517 public: |
|
1518 DEFINE_SIZE_ARRAY2 (6, coverageZ, lookupRecordX); |
|
1519 }; |
|
1520 |
|
1521 struct Context |
|
1522 { |
|
1523 template <typename context_t> |
|
1524 inline typename context_t::return_t dispatch (context_t *c) const |
|
1525 { |
|
1526 TRACE_DISPATCH (this, u.format); |
|
1527 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
|
1528 switch (u.format) { |
|
1529 case 1: return_trace (c->dispatch (u.format1)); |
|
1530 case 2: return_trace (c->dispatch (u.format2)); |
|
1531 case 3: return_trace (c->dispatch (u.format3)); |
|
1532 default:return_trace (c->default_return_value ()); |
|
1533 } |
|
1534 } |
|
1535 |
|
1536 protected: |
|
1537 union { |
|
1538 USHORT format; /* Format identifier */ |
|
1539 ContextFormat1 format1; |
|
1540 ContextFormat2 format2; |
|
1541 ContextFormat3 format3; |
|
1542 } u; |
|
1543 }; |
|
1544 |
|
1545 |
|
1546 /* Chaining Contextual lookups */ |
|
1547 |
|
1548 struct ChainContextClosureLookupContext |
|
1549 { |
|
1550 ContextClosureFuncs funcs; |
|
1551 const void *intersects_data[3]; |
|
1552 }; |
|
1553 |
|
1554 struct ChainContextCollectGlyphsLookupContext |
|
1555 { |
|
1556 ContextCollectGlyphsFuncs funcs; |
|
1557 const void *collect_data[3]; |
|
1558 }; |
|
1559 |
|
1560 struct ChainContextApplyLookupContext |
|
1561 { |
|
1562 ContextApplyFuncs funcs; |
|
1563 const void *match_data[3]; |
|
1564 }; |
|
1565 |
|
1566 static inline void chain_context_closure_lookup (hb_closure_context_t *c, |
|
1567 unsigned int backtrackCount, |
|
1568 const USHORT backtrack[], |
|
1569 unsigned int inputCount, /* Including the first glyph (not matched) */ |
|
1570 const USHORT input[], /* Array of input values--start with second glyph */ |
|
1571 unsigned int lookaheadCount, |
|
1572 const USHORT lookahead[], |
|
1573 unsigned int lookupCount, |
|
1574 const LookupRecord lookupRecord[], |
|
1575 ChainContextClosureLookupContext &lookup_context) |
|
1576 { |
|
1577 if (intersects_array (c, |
|
1578 backtrackCount, backtrack, |
|
1579 lookup_context.funcs.intersects, lookup_context.intersects_data[0]) |
|
1580 && intersects_array (c, |
|
1581 inputCount ? inputCount - 1 : 0, input, |
|
1582 lookup_context.funcs.intersects, lookup_context.intersects_data[1]) |
|
1583 && intersects_array (c, |
|
1584 lookaheadCount, lookahead, |
|
1585 lookup_context.funcs.intersects, lookup_context.intersects_data[2])) |
|
1586 recurse_lookups (c, |
|
1587 lookupCount, lookupRecord); |
|
1588 } |
|
1589 |
|
1590 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, |
|
1591 unsigned int backtrackCount, |
|
1592 const USHORT backtrack[], |
|
1593 unsigned int inputCount, /* Including the first glyph (not matched) */ |
|
1594 const USHORT input[], /* Array of input values--start with second glyph */ |
|
1595 unsigned int lookaheadCount, |
|
1596 const USHORT lookahead[], |
|
1597 unsigned int lookupCount, |
|
1598 const LookupRecord lookupRecord[], |
|
1599 ChainContextCollectGlyphsLookupContext &lookup_context) |
|
1600 { |
|
1601 collect_array (c, c->before, |
|
1602 backtrackCount, backtrack, |
|
1603 lookup_context.funcs.collect, lookup_context.collect_data[0]); |
|
1604 collect_array (c, c->input, |
|
1605 inputCount ? inputCount - 1 : 0, input, |
|
1606 lookup_context.funcs.collect, lookup_context.collect_data[1]); |
|
1607 collect_array (c, c->after, |
|
1608 lookaheadCount, lookahead, |
|
1609 lookup_context.funcs.collect, lookup_context.collect_data[2]); |
|
1610 recurse_lookups (c, |
|
1611 lookupCount, lookupRecord); |
|
1612 } |
|
1613 |
|
1614 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c, |
|
1615 unsigned int backtrackCount, |
|
1616 const USHORT backtrack[] HB_UNUSED, |
|
1617 unsigned int inputCount, /* Including the first glyph (not matched) */ |
|
1618 const USHORT input[], /* Array of input values--start with second glyph */ |
|
1619 unsigned int lookaheadCount, |
|
1620 const USHORT lookahead[] HB_UNUSED, |
|
1621 unsigned int lookupCount HB_UNUSED, |
|
1622 const LookupRecord lookupRecord[] HB_UNUSED, |
|
1623 ChainContextApplyLookupContext &lookup_context) |
|
1624 { |
|
1625 return (c->zero_context ? !backtrackCount && !lookaheadCount : true) |
|
1626 && would_match_input (c, |
|
1627 inputCount, input, |
|
1628 lookup_context.funcs.match, lookup_context.match_data[1]); |
|
1629 } |
|
1630 |
|
1631 static inline bool chain_context_apply_lookup (hb_apply_context_t *c, |
|
1632 unsigned int backtrackCount, |
|
1633 const USHORT backtrack[], |
|
1634 unsigned int inputCount, /* Including the first glyph (not matched) */ |
|
1635 const USHORT input[], /* Array of input values--start with second glyph */ |
|
1636 unsigned int lookaheadCount, |
|
1637 const USHORT lookahead[], |
|
1638 unsigned int lookupCount, |
|
1639 const LookupRecord lookupRecord[], |
|
1640 ChainContextApplyLookupContext &lookup_context) |
|
1641 { |
|
1642 unsigned int match_length = 0; |
|
1643 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH]; |
|
1644 return match_input (c, |
|
1645 inputCount, input, |
|
1646 lookup_context.funcs.match, lookup_context.match_data[1], |
|
1647 &match_length, match_positions) |
|
1648 && match_backtrack (c, |
|
1649 backtrackCount, backtrack, |
|
1650 lookup_context.funcs.match, lookup_context.match_data[0]) |
|
1651 && match_lookahead (c, |
|
1652 lookaheadCount, lookahead, |
|
1653 lookup_context.funcs.match, lookup_context.match_data[2], |
|
1654 match_length) |
|
1655 && apply_lookup (c, |
|
1656 inputCount, match_positions, |
|
1657 lookupCount, lookupRecord, |
|
1658 match_length); |
|
1659 } |
|
1660 |
|
1661 struct ChainRule |
|
1662 { |
|
1663 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const |
|
1664 { |
|
1665 TRACE_CLOSURE (this); |
|
1666 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); |
|
1667 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); |
|
1668 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
|
1669 chain_context_closure_lookup (c, |
|
1670 backtrack.len, backtrack.array, |
|
1671 input.len, input.array, |
|
1672 lookahead.len, lookahead.array, |
|
1673 lookup.len, lookup.array, |
|
1674 lookup_context); |
|
1675 } |
|
1676 |
|
1677 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const |
|
1678 { |
|
1679 TRACE_COLLECT_GLYPHS (this); |
|
1680 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); |
|
1681 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); |
|
1682 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
|
1683 chain_context_collect_glyphs_lookup (c, |
|
1684 backtrack.len, backtrack.array, |
|
1685 input.len, input.array, |
|
1686 lookahead.len, lookahead.array, |
|
1687 lookup.len, lookup.array, |
|
1688 lookup_context); |
|
1689 } |
|
1690 |
|
1691 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const |
|
1692 { |
|
1693 TRACE_WOULD_APPLY (this); |
|
1694 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); |
|
1695 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); |
|
1696 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
|
1697 return_trace (chain_context_would_apply_lookup (c, |
|
1698 backtrack.len, backtrack.array, |
|
1699 input.len, input.array, |
|
1700 lookahead.len, lookahead.array, lookup.len, |
|
1701 lookup.array, lookup_context)); |
|
1702 } |
|
1703 |
|
1704 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const |
|
1705 { |
|
1706 TRACE_APPLY (this); |
|
1707 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); |
|
1708 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); |
|
1709 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
|
1710 return_trace (chain_context_apply_lookup (c, |
|
1711 backtrack.len, backtrack.array, |
|
1712 input.len, input.array, |
|
1713 lookahead.len, lookahead.array, lookup.len, |
|
1714 lookup.array, lookup_context)); |
|
1715 } |
|
1716 |
|
1717 inline bool sanitize (hb_sanitize_context_t *c) const |
|
1718 { |
|
1719 TRACE_SANITIZE (this); |
|
1720 if (!backtrack.sanitize (c)) return_trace (false); |
|
1721 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); |
|
1722 if (!input.sanitize (c)) return_trace (false); |
|
1723 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); |
|
1724 if (!lookahead.sanitize (c)) return_trace (false); |
|
1725 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
|
1726 return_trace (lookup.sanitize (c)); |
|
1727 } |
|
1728 |
|
1729 protected: |
|
1730 ArrayOf<USHORT> |
|
1731 backtrack; /* Array of backtracking values |
|
1732 * (to be matched before the input |
|
1733 * sequence) */ |
|
1734 HeadlessArrayOf<USHORT> |
|
1735 inputX; /* Array of input values (start with |
|
1736 * second glyph) */ |
|
1737 ArrayOf<USHORT> |
|
1738 lookaheadX; /* Array of lookahead values's (to be |
|
1739 * matched after the input sequence) */ |
|
1740 ArrayOf<LookupRecord> |
|
1741 lookupX; /* Array of LookupRecords--in |
|
1742 * design order) */ |
|
1743 public: |
|
1744 DEFINE_SIZE_MIN (8); |
|
1745 }; |
|
1746 |
|
1747 struct ChainRuleSet |
|
1748 { |
|
1749 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const |
|
1750 { |
|
1751 TRACE_CLOSURE (this); |
|
1752 unsigned int num_rules = rule.len; |
|
1753 for (unsigned int i = 0; i < num_rules; i++) |
|
1754 (this+rule[i]).closure (c, lookup_context); |
|
1755 } |
|
1756 |
|
1757 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const |
|
1758 { |
|
1759 TRACE_COLLECT_GLYPHS (this); |
|
1760 unsigned int num_rules = rule.len; |
|
1761 for (unsigned int i = 0; i < num_rules; i++) |
|
1762 (this+rule[i]).collect_glyphs (c, lookup_context); |
|
1763 } |
|
1764 |
|
1765 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const |
|
1766 { |
|
1767 TRACE_WOULD_APPLY (this); |
|
1768 unsigned int num_rules = rule.len; |
|
1769 for (unsigned int i = 0; i < num_rules; i++) |
|
1770 if ((this+rule[i]).would_apply (c, lookup_context)) |
|
1771 return_trace (true); |
|
1772 |
|
1773 return_trace (false); |
|
1774 } |
|
1775 |
|
1776 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const |
|
1777 { |
|
1778 TRACE_APPLY (this); |
|
1779 unsigned int num_rules = rule.len; |
|
1780 for (unsigned int i = 0; i < num_rules; i++) |
|
1781 if ((this+rule[i]).apply (c, lookup_context)) |
|
1782 return_trace (true); |
|
1783 |
|
1784 return_trace (false); |
|
1785 } |
|
1786 |
|
1787 inline bool sanitize (hb_sanitize_context_t *c) const |
|
1788 { |
|
1789 TRACE_SANITIZE (this); |
|
1790 return_trace (rule.sanitize (c, this)); |
|
1791 } |
|
1792 |
|
1793 protected: |
|
1794 OffsetArrayOf<ChainRule> |
|
1795 rule; /* Array of ChainRule tables |
|
1796 * ordered by preference */ |
|
1797 public: |
|
1798 DEFINE_SIZE_ARRAY (2, rule); |
|
1799 }; |
|
1800 |
|
1801 struct ChainContextFormat1 |
|
1802 { |
|
1803 inline void closure (hb_closure_context_t *c) const |
|
1804 { |
|
1805 TRACE_CLOSURE (this); |
|
1806 const Coverage &cov = (this+coverage); |
|
1807 |
|
1808 struct ChainContextClosureLookupContext lookup_context = { |
|
1809 {intersects_glyph}, |
|
1810 {NULL, NULL, NULL} |
|
1811 }; |
|
1812 |
|
1813 unsigned int count = ruleSet.len; |
|
1814 for (unsigned int i = 0; i < count; i++) |
|
1815 if (cov.intersects_coverage (c->glyphs, i)) { |
|
1816 const ChainRuleSet &rule_set = this+ruleSet[i]; |
|
1817 rule_set.closure (c, lookup_context); |
|
1818 } |
|
1819 } |
|
1820 |
|
1821 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
1822 { |
|
1823 TRACE_COLLECT_GLYPHS (this); |
|
1824 (this+coverage).add_coverage (c->input); |
|
1825 |
|
1826 struct ChainContextCollectGlyphsLookupContext lookup_context = { |
|
1827 {collect_glyph}, |
|
1828 {NULL, NULL, NULL} |
|
1829 }; |
|
1830 |
|
1831 unsigned int count = ruleSet.len; |
|
1832 for (unsigned int i = 0; i < count; i++) |
|
1833 (this+ruleSet[i]).collect_glyphs (c, lookup_context); |
|
1834 } |
|
1835 |
|
1836 inline bool would_apply (hb_would_apply_context_t *c) const |
|
1837 { |
|
1838 TRACE_WOULD_APPLY (this); |
|
1839 |
|
1840 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; |
|
1841 struct ChainContextApplyLookupContext lookup_context = { |
|
1842 {match_glyph}, |
|
1843 {NULL, NULL, NULL} |
|
1844 }; |
|
1845 return_trace (rule_set.would_apply (c, lookup_context)); |
|
1846 } |
|
1847 |
|
1848 inline const Coverage &get_coverage (void) const |
|
1849 { |
|
1850 return this+coverage; |
|
1851 } |
|
1852 |
|
1853 inline bool apply (hb_apply_context_t *c) const |
|
1854 { |
|
1855 TRACE_APPLY (this); |
|
1856 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
|
1857 if (likely (index == NOT_COVERED)) return_trace (false); |
|
1858 |
|
1859 const ChainRuleSet &rule_set = this+ruleSet[index]; |
|
1860 struct ChainContextApplyLookupContext lookup_context = { |
|
1861 {match_glyph}, |
|
1862 {NULL, NULL, NULL} |
|
1863 }; |
|
1864 return_trace (rule_set.apply (c, lookup_context)); |
|
1865 } |
|
1866 |
|
1867 inline bool sanitize (hb_sanitize_context_t *c) const |
|
1868 { |
|
1869 TRACE_SANITIZE (this); |
|
1870 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); |
|
1871 } |
|
1872 |
|
1873 protected: |
|
1874 USHORT format; /* Format identifier--format = 1 */ |
|
1875 OffsetTo<Coverage> |
|
1876 coverage; /* Offset to Coverage table--from |
|
1877 * beginning of table */ |
|
1878 OffsetArrayOf<ChainRuleSet> |
|
1879 ruleSet; /* Array of ChainRuleSet tables |
|
1880 * ordered by Coverage Index */ |
|
1881 public: |
|
1882 DEFINE_SIZE_ARRAY (6, ruleSet); |
|
1883 }; |
|
1884 |
|
1885 struct ChainContextFormat2 |
|
1886 { |
|
1887 inline void closure (hb_closure_context_t *c) const |
|
1888 { |
|
1889 TRACE_CLOSURE (this); |
|
1890 if (!(this+coverage).intersects (c->glyphs)) |
|
1891 return; |
|
1892 |
|
1893 const ClassDef &backtrack_class_def = this+backtrackClassDef; |
|
1894 const ClassDef &input_class_def = this+inputClassDef; |
|
1895 const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
|
1896 |
|
1897 struct ChainContextClosureLookupContext lookup_context = { |
|
1898 {intersects_class}, |
|
1899 {&backtrack_class_def, |
|
1900 &input_class_def, |
|
1901 &lookahead_class_def} |
|
1902 }; |
|
1903 |
|
1904 unsigned int count = ruleSet.len; |
|
1905 for (unsigned int i = 0; i < count; i++) |
|
1906 if (input_class_def.intersects_class (c->glyphs, i)) { |
|
1907 const ChainRuleSet &rule_set = this+ruleSet[i]; |
|
1908 rule_set.closure (c, lookup_context); |
|
1909 } |
|
1910 } |
|
1911 |
|
1912 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
1913 { |
|
1914 TRACE_COLLECT_GLYPHS (this); |
|
1915 (this+coverage).add_coverage (c->input); |
|
1916 |
|
1917 const ClassDef &backtrack_class_def = this+backtrackClassDef; |
|
1918 const ClassDef &input_class_def = this+inputClassDef; |
|
1919 const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
|
1920 |
|
1921 struct ChainContextCollectGlyphsLookupContext lookup_context = { |
|
1922 {collect_class}, |
|
1923 {&backtrack_class_def, |
|
1924 &input_class_def, |
|
1925 &lookahead_class_def} |
|
1926 }; |
|
1927 |
|
1928 unsigned int count = ruleSet.len; |
|
1929 for (unsigned int i = 0; i < count; i++) |
|
1930 (this+ruleSet[i]).collect_glyphs (c, lookup_context); |
|
1931 } |
|
1932 |
|
1933 inline bool would_apply (hb_would_apply_context_t *c) const |
|
1934 { |
|
1935 TRACE_WOULD_APPLY (this); |
|
1936 |
|
1937 const ClassDef &backtrack_class_def = this+backtrackClassDef; |
|
1938 const ClassDef &input_class_def = this+inputClassDef; |
|
1939 const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
|
1940 |
|
1941 unsigned int index = input_class_def.get_class (c->glyphs[0]); |
|
1942 const ChainRuleSet &rule_set = this+ruleSet[index]; |
|
1943 struct ChainContextApplyLookupContext lookup_context = { |
|
1944 {match_class}, |
|
1945 {&backtrack_class_def, |
|
1946 &input_class_def, |
|
1947 &lookahead_class_def} |
|
1948 }; |
|
1949 return_trace (rule_set.would_apply (c, lookup_context)); |
|
1950 } |
|
1951 |
|
1952 inline const Coverage &get_coverage (void) const |
|
1953 { |
|
1954 return this+coverage; |
|
1955 } |
|
1956 |
|
1957 inline bool apply (hb_apply_context_t *c) const |
|
1958 { |
|
1959 TRACE_APPLY (this); |
|
1960 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
|
1961 if (likely (index == NOT_COVERED)) return_trace (false); |
|
1962 |
|
1963 const ClassDef &backtrack_class_def = this+backtrackClassDef; |
|
1964 const ClassDef &input_class_def = this+inputClassDef; |
|
1965 const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
|
1966 |
|
1967 index = input_class_def.get_class (c->buffer->cur().codepoint); |
|
1968 const ChainRuleSet &rule_set = this+ruleSet[index]; |
|
1969 struct ChainContextApplyLookupContext lookup_context = { |
|
1970 {match_class}, |
|
1971 {&backtrack_class_def, |
|
1972 &input_class_def, |
|
1973 &lookahead_class_def} |
|
1974 }; |
|
1975 return_trace (rule_set.apply (c, lookup_context)); |
|
1976 } |
|
1977 |
|
1978 inline bool sanitize (hb_sanitize_context_t *c) const |
|
1979 { |
|
1980 TRACE_SANITIZE (this); |
|
1981 return_trace (coverage.sanitize (c, this) && |
|
1982 backtrackClassDef.sanitize (c, this) && |
|
1983 inputClassDef.sanitize (c, this) && |
|
1984 lookaheadClassDef.sanitize (c, this) && |
|
1985 ruleSet.sanitize (c, this)); |
|
1986 } |
|
1987 |
|
1988 protected: |
|
1989 USHORT format; /* Format identifier--format = 2 */ |
|
1990 OffsetTo<Coverage> |
|
1991 coverage; /* Offset to Coverage table--from |
|
1992 * beginning of table */ |
|
1993 OffsetTo<ClassDef> |
|
1994 backtrackClassDef; /* Offset to glyph ClassDef table |
|
1995 * containing backtrack sequence |
|
1996 * data--from beginning of table */ |
|
1997 OffsetTo<ClassDef> |
|
1998 inputClassDef; /* Offset to glyph ClassDef |
|
1999 * table containing input sequence |
|
2000 * data--from beginning of table */ |
|
2001 OffsetTo<ClassDef> |
|
2002 lookaheadClassDef; /* Offset to glyph ClassDef table |
|
2003 * containing lookahead sequence |
|
2004 * data--from beginning of table */ |
|
2005 OffsetArrayOf<ChainRuleSet> |
|
2006 ruleSet; /* Array of ChainRuleSet tables |
|
2007 * ordered by class */ |
|
2008 public: |
|
2009 DEFINE_SIZE_ARRAY (12, ruleSet); |
|
2010 }; |
|
2011 |
|
2012 struct ChainContextFormat3 |
|
2013 { |
|
2014 inline void closure (hb_closure_context_t *c) const |
|
2015 { |
|
2016 TRACE_CLOSURE (this); |
|
2017 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
|
2018 |
|
2019 if (!(this+input[0]).intersects (c->glyphs)) |
|
2020 return; |
|
2021 |
|
2022 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); |
|
2023 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
|
2024 struct ChainContextClosureLookupContext lookup_context = { |
|
2025 {intersects_coverage}, |
|
2026 {this, this, this} |
|
2027 }; |
|
2028 chain_context_closure_lookup (c, |
|
2029 backtrack.len, (const USHORT *) backtrack.array, |
|
2030 input.len, (const USHORT *) input.array + 1, |
|
2031 lookahead.len, (const USHORT *) lookahead.array, |
|
2032 lookup.len, lookup.array, |
|
2033 lookup_context); |
|
2034 } |
|
2035 |
|
2036 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
|
2037 { |
|
2038 TRACE_COLLECT_GLYPHS (this); |
|
2039 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
|
2040 |
|
2041 (this+input[0]).add_coverage (c->input); |
|
2042 |
|
2043 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); |
|
2044 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
|
2045 struct ChainContextCollectGlyphsLookupContext lookup_context = { |
|
2046 {collect_coverage}, |
|
2047 {this, this, this} |
|
2048 }; |
|
2049 chain_context_collect_glyphs_lookup (c, |
|
2050 backtrack.len, (const USHORT *) backtrack.array, |
|
2051 input.len, (const USHORT *) input.array + 1, |
|
2052 lookahead.len, (const USHORT *) lookahead.array, |
|
2053 lookup.len, lookup.array, |
|
2054 lookup_context); |
|
2055 } |
|
2056 |
|
2057 inline bool would_apply (hb_would_apply_context_t *c) const |
|
2058 { |
|
2059 TRACE_WOULD_APPLY (this); |
|
2060 |
|
2061 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
|
2062 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); |
|
2063 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
|
2064 struct ChainContextApplyLookupContext lookup_context = { |
|
2065 {match_coverage}, |
|
2066 {this, this, this} |
|
2067 }; |
|
2068 return_trace (chain_context_would_apply_lookup (c, |
|
2069 backtrack.len, (const USHORT *) backtrack.array, |
|
2070 input.len, (const USHORT *) input.array + 1, |
|
2071 lookahead.len, (const USHORT *) lookahead.array, |
|
2072 lookup.len, lookup.array, lookup_context)); |
|
2073 } |
|
2074 |
|
2075 inline const Coverage &get_coverage (void) const |
|
2076 { |
|
2077 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
|
2078 return this+input[0]; |
|
2079 } |
|
2080 |
|
2081 inline bool apply (hb_apply_context_t *c) const |
|
2082 { |
|
2083 TRACE_APPLY (this); |
|
2084 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
|
2085 |
|
2086 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint); |
|
2087 if (likely (index == NOT_COVERED)) return_trace (false); |
|
2088 |
|
2089 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); |
|
2090 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
|
2091 struct ChainContextApplyLookupContext lookup_context = { |
|
2092 {match_coverage}, |
|
2093 {this, this, this} |
|
2094 }; |
|
2095 return_trace (chain_context_apply_lookup (c, |
|
2096 backtrack.len, (const USHORT *) backtrack.array, |
|
2097 input.len, (const USHORT *) input.array + 1, |
|
2098 lookahead.len, (const USHORT *) lookahead.array, |
|
2099 lookup.len, lookup.array, lookup_context)); |
|
2100 } |
|
2101 |
|
2102 inline bool sanitize (hb_sanitize_context_t *c) const |
|
2103 { |
|
2104 TRACE_SANITIZE (this); |
|
2105 if (!backtrack.sanitize (c, this)) return_trace (false); |
|
2106 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
|
2107 if (!input.sanitize (c, this)) return_trace (false); |
|
2108 if (!input.len) return_trace (false); /* To be consistent with Context. */ |
|
2109 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); |
|
2110 if (!lookahead.sanitize (c, this)) return_trace (false); |
|
2111 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
|
2112 return_trace (lookup.sanitize (c)); |
|
2113 } |
|
2114 |
|
2115 protected: |
|
2116 USHORT format; /* Format identifier--format = 3 */ |
|
2117 OffsetArrayOf<Coverage> |
|
2118 backtrack; /* Array of coverage tables |
|
2119 * in backtracking sequence, in glyph |
|
2120 * sequence order */ |
|
2121 OffsetArrayOf<Coverage> |
|
2122 inputX ; /* Array of coverage |
|
2123 * tables in input sequence, in glyph |
|
2124 * sequence order */ |
|
2125 OffsetArrayOf<Coverage> |
|
2126 lookaheadX; /* Array of coverage tables |
|
2127 * in lookahead sequence, in glyph |
|
2128 * sequence order */ |
|
2129 ArrayOf<LookupRecord> |
|
2130 lookupX; /* Array of LookupRecords--in |
|
2131 * design order) */ |
|
2132 public: |
|
2133 DEFINE_SIZE_MIN (10); |
|
2134 }; |
|
2135 |
|
2136 struct ChainContext |
|
2137 { |
|
2138 template <typename context_t> |
|
2139 inline typename context_t::return_t dispatch (context_t *c) const |
|
2140 { |
|
2141 TRACE_DISPATCH (this, u.format); |
|
2142 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
|
2143 switch (u.format) { |
|
2144 case 1: return_trace (c->dispatch (u.format1)); |
|
2145 case 2: return_trace (c->dispatch (u.format2)); |
|
2146 case 3: return_trace (c->dispatch (u.format3)); |
|
2147 default:return_trace (c->default_return_value ()); |
|
2148 } |
|
2149 } |
|
2150 |
|
2151 protected: |
|
2152 union { |
|
2153 USHORT format; /* Format identifier */ |
|
2154 ChainContextFormat1 format1; |
|
2155 ChainContextFormat2 format2; |
|
2156 ChainContextFormat3 format3; |
|
2157 } u; |
|
2158 }; |
|
2159 |
|
2160 |
|
2161 template <typename T> |
|
2162 struct ExtensionFormat1 |
|
2163 { |
|
2164 inline unsigned int get_type (void) const { return extensionLookupType; } |
|
2165 |
|
2166 template <typename X> |
|
2167 inline const X& get_subtable (void) const |
|
2168 { |
|
2169 unsigned int offset = extensionOffset; |
|
2170 if (unlikely (!offset)) return Null(typename T::LookupSubTable); |
|
2171 return StructAtOffset<typename T::LookupSubTable> (this, offset); |
|
2172 } |
|
2173 |
|
2174 template <typename context_t> |
|
2175 inline typename context_t::return_t dispatch (context_t *c) const |
|
2176 { |
|
2177 TRACE_DISPATCH (this, format); |
|
2178 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ()); |
|
2179 return_trace (get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ())); |
|
2180 } |
|
2181 |
|
2182 /* This is called from may_dispatch() above with hb_sanitize_context_t. */ |
|
2183 inline bool sanitize (hb_sanitize_context_t *c) const |
|
2184 { |
|
2185 TRACE_SANITIZE (this); |
|
2186 return_trace (c->check_struct (this) && extensionOffset != 0); |
|
2187 } |
|
2188 |
|
2189 protected: |
|
2190 USHORT format; /* Format identifier. Set to 1. */ |
|
2191 USHORT extensionLookupType; /* Lookup type of subtable referenced |
|
2192 * by ExtensionOffset (i.e. the |
|
2193 * extension subtable). */ |
|
2194 ULONG extensionOffset; /* Offset to the extension subtable, |
|
2195 * of lookup type subtable. */ |
|
2196 public: |
|
2197 DEFINE_SIZE_STATIC (8); |
|
2198 }; |
|
2199 |
|
2200 template <typename T> |
|
2201 struct Extension |
|
2202 { |
|
2203 inline unsigned int get_type (void) const |
|
2204 { |
|
2205 switch (u.format) { |
|
2206 case 1: return u.format1.get_type (); |
|
2207 default:return 0; |
|
2208 } |
|
2209 } |
|
2210 template <typename X> |
|
2211 inline const X& get_subtable (void) const |
|
2212 { |
|
2213 switch (u.format) { |
|
2214 case 1: return u.format1.template get_subtable<typename T::LookupSubTable> (); |
|
2215 default:return Null(typename T::LookupSubTable); |
|
2216 } |
|
2217 } |
|
2218 |
|
2219 template <typename context_t> |
|
2220 inline typename context_t::return_t dispatch (context_t *c) const |
|
2221 { |
|
2222 TRACE_DISPATCH (this, u.format); |
|
2223 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
|
2224 switch (u.format) { |
|
2225 case 1: return_trace (u.format1.dispatch (c)); |
|
2226 default:return_trace (c->default_return_value ()); |
|
2227 } |
|
2228 } |
|
2229 |
|
2230 protected: |
|
2231 union { |
|
2232 USHORT format; /* Format identifier */ |
|
2233 ExtensionFormat1<T> format1; |
|
2234 } u; |
|
2235 }; |
|
2236 |
|
2237 |
|
2238 /* |
|
2239 * GSUB/GPOS Common |
|
2240 */ |
|
2241 |
|
2242 struct GSUBGPOS |
|
2243 { |
|
2244 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB; |
|
2245 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS; |
|
2246 |
|
2247 inline unsigned int get_script_count (void) const |
|
2248 { return (this+scriptList).len; } |
|
2249 inline const Tag& get_script_tag (unsigned int i) const |
|
2250 { return (this+scriptList).get_tag (i); } |
|
2251 inline unsigned int get_script_tags (unsigned int start_offset, |
|
2252 unsigned int *script_count /* IN/OUT */, |
|
2253 hb_tag_t *script_tags /* OUT */) const |
|
2254 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } |
|
2255 inline const Script& get_script (unsigned int i) const |
|
2256 { return (this+scriptList)[i]; } |
|
2257 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const |
|
2258 { return (this+scriptList).find_index (tag, index); } |
|
2259 |
|
2260 inline unsigned int get_feature_count (void) const |
|
2261 { return (this+featureList).len; } |
|
2262 inline hb_tag_t get_feature_tag (unsigned int i) const |
|
2263 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); } |
|
2264 inline unsigned int get_feature_tags (unsigned int start_offset, |
|
2265 unsigned int *feature_count /* IN/OUT */, |
|
2266 hb_tag_t *feature_tags /* OUT */) const |
|
2267 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } |
|
2268 inline const Feature& get_feature (unsigned int i) const |
|
2269 { return (this+featureList)[i]; } |
|
2270 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const |
|
2271 { return (this+featureList).find_index (tag, index); } |
|
2272 |
|
2273 inline unsigned int get_lookup_count (void) const |
|
2274 { return (this+lookupList).len; } |
|
2275 inline const Lookup& get_lookup (unsigned int i) const |
|
2276 { return (this+lookupList)[i]; } |
|
2277 |
|
2278 inline bool find_variations_index (const int *coords, unsigned int num_coords, |
|
2279 unsigned int *index) const |
|
2280 { return (version.to_int () >= 0x00010001u ? this+featureVars : Null(FeatureVariations)) |
|
2281 .find_index (coords, num_coords, index); } |
|
2282 inline const Feature& get_feature_variation (unsigned int feature_index, |
|
2283 unsigned int variations_index) const |
|
2284 { |
|
2285 if (FeatureVariations::NOT_FOUND_INDEX != variations_index && |
|
2286 version.to_int () >= 0x00010001u) |
|
2287 { |
|
2288 const Feature *feature = (this+featureVars).find_substitute (variations_index, |
|
2289 feature_index); |
|
2290 if (feature) |
|
2291 return *feature; |
|
2292 } |
|
2293 return get_feature (feature_index); |
|
2294 } |
|
2295 |
|
2296 inline bool sanitize (hb_sanitize_context_t *c) const |
|
2297 { |
|
2298 TRACE_SANITIZE (this); |
|
2299 return_trace (version.sanitize (c) && |
|
2300 likely (version.major == 1) && |
|
2301 scriptList.sanitize (c, this) && |
|
2302 featureList.sanitize (c, this) && |
|
2303 lookupList.sanitize (c, this) && |
|
2304 (version.to_int () < 0x00010001u || featureVars.sanitize (c, this))); |
|
2305 } |
|
2306 |
|
2307 protected: |
|
2308 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set |
|
2309 * to 0x00010000u */ |
|
2310 OffsetTo<ScriptList> |
|
2311 scriptList; /* ScriptList table */ |
|
2312 OffsetTo<FeatureList> |
|
2313 featureList; /* FeatureList table */ |
|
2314 OffsetTo<LookupList> |
|
2315 lookupList; /* LookupList table */ |
|
2316 OffsetTo<FeatureVariations, ULONG> |
|
2317 featureVars; /* Offset to Feature Variations |
|
2318 table--from beginning of table |
|
2319 * (may be NULL). Introduced |
|
2320 * in version 0x00010001. */ |
|
2321 public: |
|
2322 DEFINE_SIZE_MIN (10); |
|
2323 }; |
|
2324 |
|
2325 |
|
2326 } /* namespace OT */ |
|
2327 |
|
2328 |
|
2329 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */ |