| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2007,2008,2009,2010 Red Hat, Inc. | 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. |
| 3 * Copyright (C) 2010 Google, Inc. | 3 * Copyright © 2010 Google, Inc. |
| 4 * | 4 * |
| 5 * This is part of HarfBuzz, a text shaping library. | 5 * This is part of HarfBuzz, a text shaping library. |
| 6 * | 6 * |
| 7 * Permission is hereby granted, without written agreement and without | 7 * Permission is hereby granted, without written agreement and without |
| 8 * license or royalty fees, to use, copy, modify, and distribute this | 8 * license or royalty fees, to use, copy, modify, and distribute this |
| 9 * software and its documentation for any purpose, provided that the | 9 * software and its documentation for any purpose, provided that the |
| 10 * above copyright notice and the following two paragraphs appear in | 10 * above copyright notice and the following two paragraphs appear in |
| 11 * all copies of this software. | 11 * all copies of this software. |
| 12 * | 12 * |
| 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR | 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
| 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES | 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
| 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN | 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
| 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH | 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
| 17 * DAMAGE. | 17 * DAMAGE. |
| 18 * | 18 * |
| 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, | 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
| 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND | 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
| 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS | 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
| 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO | 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
| 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. | 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
| 24 * | 24 * |
| 25 * Red Hat Author(s): Behdad Esfahbod | 25 * Red Hat Author(s): Behdad Esfahbod |
| 26 * Google Author(s): Behdad Esfahbod | 26 * Google Author(s): Behdad Esfahbod |
| 27 */ | 27 */ |
| 28 | 28 |
| 29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH | 29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH |
| 30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH | 30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH |
| 31 | 31 |
| 32 #include "hb-buffer-private.hh" | 32 #include "hb-buffer-private.hh" |
| 33 #include "hb-ot-layout-gdef-private.hh" | 33 #include "hb-ot-layout-gdef-table.hh" |
| 34 | |
| 35 HB_BEGIN_DECLS | |
| 36 | 34 |
| 37 | 35 |
| 38 /* buffer var allocations */ | 36 /* buffer var allocations */ |
| 39 #define lig_id() var2.u16[0] /* unique ligature id */ | 37 #define lig_id() var2.u8[2] /* unique ligature id */ |
| 40 #define lig_comp() var2.u16[1] /* component number in the ligature (0 = base) */ | 38 #define lig_comp() var2.u8[3] /* component number in the ligature (0 = base) */ |
| 39 |
| 40 static inline uint8_t allocate_lig_id (hb_buffer_t *buffer) { |
| 41 uint8_t lig_id = buffer->next_serial (); |
| 42 if (unlikely (!lig_id)) lig_id = buffer->next_serial (); /* in case of overflo
w */ |
| 43 return lig_id; |
| 44 } |
| 45 |
| 41 | 46 |
| 42 | 47 |
| 43 #ifndef HB_DEBUG_APPLY | 48 #ifndef HB_DEBUG_APPLY |
| 44 #define HB_DEBUG_APPLY (HB_DEBUG+0) | 49 #define HB_DEBUG_APPLY (HB_DEBUG+0) |
| 45 #endif | 50 #endif |
| 46 | 51 |
| 47 #define TRACE_APPLY() \ | 52 #define TRACE_APPLY() \ |
| 48 » hb_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", HB_FUNC, thi
s); \ | 53 » hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, N
ULL, HB_FUNC); |
| 49 | 54 |
| 50 | 55 |
| 51 HB_BEGIN_DECLS | |
| 52 | 56 |
| 53 struct hb_apply_context_t | 57 struct hb_apply_context_t |
| 54 { | 58 { |
| 55 unsigned int debug_depth; | 59 unsigned int debug_depth; |
| 56 hb_ot_layout_context_t *layout; | 60 hb_font_t *font; |
| 61 hb_face_t *face; |
| 57 hb_buffer_t *buffer; | 62 hb_buffer_t *buffer; |
| 63 hb_direction_t direction; |
| 58 hb_mask_t lookup_mask; | 64 hb_mask_t lookup_mask; |
| 59 unsigned int context_length; | 65 unsigned int context_length; |
| 60 unsigned int nesting_level_left; | 66 unsigned int nesting_level_left; |
| 61 unsigned int lookup_props; | 67 unsigned int lookup_props; |
| 62 unsigned int property; /* propety of first glyph */ | 68 unsigned int property; /* propety of first glyph */ |
| 63 | 69 |
| 70 struct mark_skipping_forward_iterator_t |
| 71 { |
| 72 inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_, |
| 73 unsigned int start_index_, |
| 74 unsigned int num_items_) |
| 75 { |
| 76 c = c_; |
| 77 idx = start_index_; |
| 78 num_items = num_items_; |
| 79 end = MIN (c->buffer->len, c->buffer->idx + c->context_length); |
| 80 } |
| 81 inline bool has_no_chance (void) const |
| 82 { |
| 83 return unlikely (num_items && idx + num_items >= end); |
| 84 } |
| 85 inline bool next (unsigned int *property_out, |
| 86 unsigned int lookup_props) |
| 87 { |
| 88 assert (num_items > 0); |
| 89 do |
| 90 { |
| 91 if (has_no_chance ()) |
| 92 return false; |
| 93 idx++; |
| 94 } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[idx], lookup_p
rops, property_out)); |
| 95 num_items--; |
| 96 return true; |
| 97 } |
| 98 inline bool next (unsigned int *property_out = NULL) |
| 99 { |
| 100 return next (property_out, c->lookup_props); |
| 101 } |
| 102 |
| 103 unsigned int idx; |
| 104 private: |
| 105 hb_apply_context_t *c; |
| 106 unsigned int num_items; |
| 107 unsigned int end; |
| 108 }; |
| 109 |
| 110 struct mark_skipping_backward_iterator_t |
| 111 { |
| 112 inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_, |
| 113 unsigned int start_index_, |
| 114 unsigned int num_items_) |
| 115 { |
| 116 c = c_; |
| 117 idx = start_index_; |
| 118 num_items = num_items_; |
| 119 } |
| 120 inline bool has_no_chance (void) const |
| 121 { |
| 122 return unlikely (idx < num_items); |
| 123 } |
| 124 inline bool prev (unsigned int *property_out, |
| 125 unsigned int lookup_props) |
| 126 { |
| 127 assert (num_items > 0); |
| 128 do |
| 129 { |
| 130 if (has_no_chance ()) |
| 131 return false; |
| 132 idx--; |
| 133 } while (_hb_ot_layout_skip_mark (c->face, &c->buffer->out_info[idx], look
up_props, property_out)); |
| 134 num_items--; |
| 135 return true; |
| 136 } |
| 137 inline bool prev (unsigned int *property_out = NULL) |
| 138 { |
| 139 return prev (property_out, c->lookup_props); |
| 140 } |
| 141 |
| 142 unsigned int idx; |
| 143 private: |
| 144 hb_apply_context_t *c; |
| 145 unsigned int num_items; |
| 146 }; |
| 147 |
| 148 inline bool should_mark_skip_current_glyph (void) const |
| 149 { |
| 150 return _hb_ot_layout_skip_mark (face, &buffer->info[buffer->idx], lookup_pro
ps, NULL); |
| 151 } |
| 152 |
| 153 |
| 64 | 154 |
| 65 inline void replace_glyph (hb_codepoint_t glyph_index) const | 155 inline void replace_glyph (hb_codepoint_t glyph_index) const |
| 66 { | 156 { |
| 67 clear_property (); | 157 clear_property (); |
| 68 buffer->replace_glyph (glyph_index); | 158 buffer->replace_glyph (glyph_index); |
| 69 } | 159 } |
| 70 inline void replace_glyphs_be16 (unsigned int num_in, | 160 inline void replace_glyphs_be16 (unsigned int num_in, |
| 71 unsigned int num_out, | 161 unsigned int num_out, |
| 72 const uint16_t *glyph_data_be) const | 162 const uint16_t *glyph_data_be) const |
| 73 { | 163 { |
| 74 clear_property (); | 164 clear_property (); |
| 75 buffer->replace_glyphs_be16 (num_in, num_out, glyph_data_be); | 165 buffer->replace_glyphs_be16 (num_in, num_out, glyph_data_be); |
| 76 } | 166 } |
| 77 | 167 |
| 78 inline void guess_glyph_class (unsigned int klass) | 168 inline void guess_glyph_class (unsigned int klass) |
| 79 { | 169 { |
| 80 /* XXX if ! has gdef */ | 170 /* XXX if ! has gdef */ |
| 81 buffer->info[buffer->i].props_cache() = klass; | 171 buffer->info[buffer->idx].props_cache() = klass; |
| 82 } | 172 } |
| 83 | 173 |
| 84 private: | 174 private: |
| 85 inline void clear_property (void) const | 175 inline void clear_property (void) const |
| 86 { | 176 { |
| 87 /* XXX if has gdef */ | 177 /* XXX if has gdef */ |
| 88 buffer->info[buffer->i].props_cache() = 0; | 178 buffer->info[buffer->idx].props_cache() = 0; |
| 89 } | 179 } |
| 90 }; | 180 }; |
| 91 | 181 |
| 92 | 182 |
| 93 | 183 |
| 94 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, cons
t void *data); | 184 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, cons
t void *data); |
| 95 typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_
index); | 185 typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_
index); |
| 96 | 186 |
| 97 struct ContextFuncs | 187 struct ContextFuncs |
| 98 { | 188 { |
| (...skipping 20 matching lines...) Expand all Loading... |
| 119 } | 209 } |
| 120 | 210 |
| 121 | 211 |
| 122 static inline bool match_input (hb_apply_context_t *c, | 212 static inline bool match_input (hb_apply_context_t *c, |
| 123 unsigned int count, /* Including the first glyph
(not matched) */ | 213 unsigned int count, /* Including the first glyph
(not matched) */ |
| 124 const USHORT input[], /* Array of input values--
start with second glyph */ | 214 const USHORT input[], /* Array of input values--
start with second glyph */ |
| 125 match_func_t match_func, | 215 match_func_t match_func, |
| 126 const void *match_data, | 216 const void *match_data, |
| 127 unsigned int *context_length_out) | 217 unsigned int *context_length_out) |
| 128 { | 218 { |
| 129 unsigned int i, j; | 219 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer
->idx, count - 1); |
| 130 unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length); | 220 if (skippy_iter.has_no_chance ()) |
| 131 if (unlikely (c->buffer->i + count > end)) | |
| 132 return false; | 221 return false; |
| 133 | 222 |
| 134 for (i = 1, j = c->buffer->i + 1; i < count; i++, j++) | 223 for (unsigned int i = 1; i < count; i++) |
| 135 { | 224 { |
| 136 while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[j], c->loo
kup_props, NULL)) | 225 if (!skippy_iter.next ()) |
| 137 { | 226 return false; |
| 138 if (unlikely (j + count - i == end)) | |
| 139 » return false; | |
| 140 j++; | |
| 141 } | |
| 142 | 227 |
| 143 if (likely (!match_func (c->buffer->info[j].codepoint, input[i - 1], match_d
ata))) | 228 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i
- 1], match_data))) |
| 144 return false; | 229 return false; |
| 145 } | 230 } |
| 146 | 231 |
| 147 *context_length_out = j - c->buffer->i; | 232 *context_length_out = skippy_iter.idx - c->buffer->idx + 1; |
| 148 | 233 |
| 149 return true; | 234 return true; |
| 150 } | 235 } |
| 151 | 236 |
| 152 static inline bool match_backtrack (hb_apply_context_t *c, | 237 static inline bool match_backtrack (hb_apply_context_t *c, |
| 153 unsigned int count, | 238 unsigned int count, |
| 154 const USHORT backtrack[], | 239 const USHORT backtrack[], |
| 155 match_func_t match_func, | 240 match_func_t match_func, |
| 156 const void *match_data) | 241 const void *match_data) |
| 157 { | 242 { |
| 158 if (unlikely (c->buffer->out_len < count)) | 243 hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffe
r->backtrack_len (), count); |
| 244 if (skippy_iter.has_no_chance ()) |
| 159 return false; | 245 return false; |
| 160 | 246 |
| 161 for (unsigned int i = 0, j = c->buffer->out_len - 1; i < count; i++, j--) | 247 for (unsigned int i = 0; i < count; i++) |
| 162 { | 248 { |
| 163 while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->out_info[j], c-
>lookup_props, NULL)) | 249 if (!skippy_iter.prev ()) |
| 164 { | 250 return false; |
| 165 if (unlikely (j + 1 == count - i)) | |
| 166 » return false; | |
| 167 j--; | |
| 168 } | |
| 169 | 251 |
| 170 if (likely (!match_func (c->buffer->out_info[j].codepoint, backtrack[i], mat
ch_data))) | 252 if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, bac
ktrack[i], match_data))) |
| 171 return false; | 253 return false; |
| 172 } | 254 } |
| 173 | 255 |
| 174 return true; | 256 return true; |
| 175 } | 257 } |
| 176 | 258 |
| 177 static inline bool match_lookahead (hb_apply_context_t *c, | 259 static inline bool match_lookahead (hb_apply_context_t *c, |
| 178 unsigned int count, | 260 unsigned int count, |
| 179 const USHORT lookahead[], | 261 const USHORT lookahead[], |
| 180 match_func_t match_func, | 262 match_func_t match_func, |
| 181 const void *match_data, | 263 const void *match_data, |
| 182 unsigned int offset) | 264 unsigned int offset) |
| 183 { | 265 { |
| 184 unsigned int i, j; | 266 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer
->idx + offset - 1, count); |
| 185 unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length); | 267 if (skippy_iter.has_no_chance ()) |
| 186 if (unlikely (c->buffer->i + offset + count > end)) | |
| 187 return false; | 268 return false; |
| 188 | 269 |
| 189 for (i = 0, j = c->buffer->i + offset; i < count; i++, j++) | 270 for (unsigned int i = 0; i < count; i++) |
| 190 { | 271 { |
| 191 while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[j], c->loo
kup_props, NULL)) | 272 if (!skippy_iter.next ()) |
| 192 { | 273 return false; |
| 193 if (unlikely (j + count - i == end)) | |
| 194 » return false; | |
| 195 j++; | |
| 196 } | |
| 197 | 274 |
| 198 if (likely (!match_func (c->buffer->info[j].codepoint, lookahead[i], match_d
ata))) | 275 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahe
ad[i], match_data))) |
| 199 return false; | 276 return false; |
| 200 } | 277 } |
| 201 | 278 |
| 202 return true; | 279 return true; |
| 203 } | 280 } |
| 204 | 281 |
| 205 HB_END_DECLS | |
| 206 | 282 |
| 207 | 283 |
| 208 struct LookupRecord | 284 struct LookupRecord |
| 209 { | 285 { |
| 210 inline bool sanitize (hb_sanitize_context_t *c) { | 286 inline bool sanitize (hb_sanitize_context_t *c) { |
| 211 TRACE_SANITIZE (); | 287 TRACE_SANITIZE (); |
| 212 return c->check_struct (this); | 288 return c->check_struct (this); |
| 213 } | 289 } |
| 214 | 290 |
| 215 USHORT sequenceIndex; /* Index into current glyph | 291 USHORT sequenceIndex; /* Index into current glyph |
| 216 * sequence--first glyph = 0 */ | 292 * sequence--first glyph = 0 */ |
| 217 USHORT lookupListIndex; /* Lookup to apply to that | 293 USHORT lookupListIndex; /* Lookup to apply to that |
| 218 * position--zero--based */ | 294 * position--zero--based */ |
| 219 public: | 295 public: |
| 220 DEFINE_SIZE_STATIC (4); | 296 DEFINE_SIZE_STATIC (4); |
| 221 }; | 297 }; |
| 222 | 298 |
| 223 | 299 |
| 224 HB_BEGIN_DECLS | |
| 225 | 300 |
| 226 static inline bool apply_lookup (hb_apply_context_t *c, | 301 static inline bool apply_lookup (hb_apply_context_t *c, |
| 227 unsigned int count, /* Including the first glyp
h */ | 302 unsigned int count, /* Including the first glyp
h */ |
| 228 unsigned int lookupCount, | 303 unsigned int lookupCount, |
| 229 const LookupRecord lookupRecord[], /* Array of
LookupRecords--in design order */ | 304 const LookupRecord lookupRecord[], /* Array of
LookupRecords--in design order */ |
| 230 apply_lookup_func_t apply_func) | 305 apply_lookup_func_t apply_func) |
| 231 { | 306 { |
| 232 unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length); | 307 unsigned int end = MIN (c->buffer->len, c->buffer->idx + c->context_length); |
| 233 if (unlikely (count == 0 || c->buffer->i + count > end)) | 308 if (unlikely (count == 0 || c->buffer->idx + count > end)) |
| 234 return false; | 309 return false; |
| 235 | 310 |
| 236 /* TODO We don't support lookupRecord arrays that are not increasing: | 311 /* TODO We don't support lookupRecord arrays that are not increasing: |
| 237 * Should be easy for in_place ones at least. */ | 312 * Should be easy for in_place ones at least. */ |
| 238 | 313 |
| 239 /* Note: If sublookup is reverse, it will underflow after the first loop | 314 /* Note: If sublookup is reverse, it will underflow after the first loop |
| 240 * and we jump out of it. Not entirely disastrous. So we don't check | 315 * and we jump out of it. Not entirely disastrous. So we don't check |
| 241 * for reverse lookup here. | 316 * for reverse lookup here. |
| 242 */ | 317 */ |
| 243 for (unsigned int i = 0; i < count; /* NOP */) | 318 for (unsigned int i = 0; i < count; /* NOP */) |
| 244 { | 319 { |
| 245 while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->info[c->buffer-
>i], c->lookup_props, NULL)) | 320 if (unlikely (c->buffer->idx == end)) |
| 321 return true; |
| 322 while (c->should_mark_skip_current_glyph ()) |
| 246 { | 323 { |
| 247 if (unlikely (c->buffer->i == end)) | |
| 248 return true; | |
| 249 /* No lookup applied for this index */ | 324 /* No lookup applied for this index */ |
| 250 c->buffer->next_glyph (); | 325 c->buffer->next_glyph (); |
| 326 if (unlikely (c->buffer->idx == end)) |
| 327 return true; |
| 251 } | 328 } |
| 252 | 329 |
| 253 if (lookupCount && i == lookupRecord->sequenceIndex) | 330 if (lookupCount && i == lookupRecord->sequenceIndex) |
| 254 { | 331 { |
| 255 unsigned int old_pos = c->buffer->i; | 332 unsigned int old_pos = c->buffer->idx; |
| 256 | 333 |
| 257 /* Apply a lookup */ | 334 /* Apply a lookup */ |
| 258 bool done = apply_func (c, lookupRecord->lookupListIndex); | 335 bool done = apply_func (c, lookupRecord->lookupListIndex); |
| 259 | 336 |
| 260 lookupRecord++; | 337 lookupRecord++; |
| 261 lookupCount--; | 338 lookupCount--; |
| 262 /* Err, this is wrong if the lookup jumped over some glyphs */ | 339 /* Err, this is wrong if the lookup jumped over some glyphs */ |
| 263 i += c->buffer->i - old_pos; | 340 i += c->buffer->idx - old_pos; |
| 264 if (unlikely (c->buffer->i == end)) | 341 if (unlikely (c->buffer->idx == end)) |
| 265 return true; | 342 return true; |
| 266 | 343 |
| 267 if (!done) | 344 if (!done) |
| 268 goto not_applied; | 345 goto not_applied; |
| 269 } | 346 } |
| 270 else | 347 else |
| 271 { | 348 { |
| 272 not_applied: | 349 not_applied: |
| 273 /* No lookup applied for this index */ | 350 /* No lookup applied for this index */ |
| 274 c->buffer->next_glyph (); | 351 c->buffer->next_glyph (); |
| 275 i++; | 352 i++; |
| 276 } | 353 } |
| 277 } | 354 } |
| 278 | 355 |
| 279 return true; | 356 return true; |
| 280 } | 357 } |
| 281 | 358 |
| 282 HB_END_DECLS | |
| 283 | 359 |
| 284 | 360 |
| 285 /* Contextual lookups */ | 361 /* Contextual lookups */ |
| 286 | 362 |
| 287 struct ContextLookupContext | 363 struct ContextLookupContext |
| 288 { | 364 { |
| 289 ContextFuncs funcs; | 365 ContextFuncs funcs; |
| 290 const void *match_data; | 366 const void *match_data; |
| 291 }; | 367 }; |
| 292 | 368 |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 376 | 452 |
| 377 | 453 |
| 378 struct ContextFormat1 | 454 struct ContextFormat1 |
| 379 { | 455 { |
| 380 friend struct Context; | 456 friend struct Context; |
| 381 | 457 |
| 382 private: | 458 private: |
| 383 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t | 459 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t |
| 384 { | 460 { |
| 385 TRACE_APPLY (); | 461 TRACE_APPLY (); |
| 386 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoin
t); | 462 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepo
int); |
| 387 if (likely (index == NOT_COVERED)) | 463 if (likely (index == NOT_COVERED)) |
| 388 return false; | 464 return false; |
| 389 | 465 |
| 390 const RuleSet &rule_set = this+ruleSet[index]; | 466 const RuleSet &rule_set = this+ruleSet[index]; |
| 391 struct ContextLookupContext lookup_context = { | 467 struct ContextLookupContext lookup_context = { |
| 392 {match_glyph, apply_func}, | 468 {match_glyph, apply_func}, |
| 393 NULL | 469 NULL |
| 394 }; | 470 }; |
| 395 return rule_set.apply (c, lookup_context); | 471 return rule_set.apply (c, lookup_context); |
| 396 } | 472 } |
| (...skipping 18 matching lines...) Expand all Loading... |
| 415 | 491 |
| 416 | 492 |
| 417 struct ContextFormat2 | 493 struct ContextFormat2 |
| 418 { | 494 { |
| 419 friend struct Context; | 495 friend struct Context; |
| 420 | 496 |
| 421 private: | 497 private: |
| 422 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t | 498 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t |
| 423 { | 499 { |
| 424 TRACE_APPLY (); | 500 TRACE_APPLY (); |
| 425 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoin
t); | 501 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepo
int); |
| 426 if (likely (index == NOT_COVERED)) | 502 if (likely (index == NOT_COVERED)) |
| 427 return false; | 503 return false; |
| 428 | 504 |
| 429 const ClassDef &class_def = this+classDef; | 505 const ClassDef &class_def = this+classDef; |
| 430 index = class_def (c->buffer->info[c->buffer->i].codepoint); | 506 index = class_def (c->buffer->info[c->buffer->idx].codepoint); |
| 431 const RuleSet &rule_set = this+ruleSet[index]; | 507 const RuleSet &rule_set = this+ruleSet[index]; |
| 432 struct ContextLookupContext lookup_context = { | 508 struct ContextLookupContext lookup_context = { |
| 433 {match_class, apply_func}, | 509 {match_class, apply_func}, |
| 434 &class_def | 510 &class_def |
| 435 }; | 511 }; |
| 436 return rule_set.apply (c, lookup_context); | 512 return rule_set.apply (c, lookup_context); |
| 437 } | 513 } |
| 438 | 514 |
| 439 inline bool sanitize (hb_sanitize_context_t *c) { | 515 inline bool sanitize (hb_sanitize_context_t *c) { |
| 440 TRACE_SANITIZE (); | 516 TRACE_SANITIZE (); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 460 | 536 |
| 461 | 537 |
| 462 struct ContextFormat3 | 538 struct ContextFormat3 |
| 463 { | 539 { |
| 464 friend struct Context; | 540 friend struct Context; |
| 465 | 541 |
| 466 private: | 542 private: |
| 467 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t | 543 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t |
| 468 { | 544 { |
| 469 TRACE_APPLY (); | 545 TRACE_APPLY (); |
| 470 unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->i].codep
oint); | 546 unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->idx].cod
epoint); |
| 471 if (likely (index == NOT_COVERED)) | 547 if (likely (index == NOT_COVERED)) |
| 472 return false; | 548 return false; |
| 473 | 549 |
| 474 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage,
coverage[0].static_size * glyphCount); | 550 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage,
coverage[0].static_size * glyphCount); |
| 475 struct ContextLookupContext lookup_context = { | 551 struct ContextLookupContext lookup_context = { |
| 476 {match_coverage, apply_func}, | 552 {match_coverage, apply_func}, |
| 477 this | 553 this |
| 478 }; | 554 }; |
| 479 return context_lookup (c, | 555 return context_lookup (c, |
| 480 glyphCount, (const USHORT *) (coverage + 1), | 556 glyphCount, (const USHORT *) (coverage + 1), |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 555 const USHORT backtrack[], | 631 const USHORT backtrack[], |
| 556 unsigned int inputCount, /* Including t
he first glyph (not matched) */ | 632 unsigned int inputCount, /* Including t
he first glyph (not matched) */ |
| 557 const USHORT input[], /* Array of input
values--start with second glyph */ | 633 const USHORT input[], /* Array of input
values--start with second glyph */ |
| 558 unsigned int lookaheadCount, | 634 unsigned int lookaheadCount, |
| 559 const USHORT lookahead[], | 635 const USHORT lookahead[], |
| 560 unsigned int lookupCount, | 636 unsigned int lookupCount, |
| 561 const LookupRecord lookupRecord[], | 637 const LookupRecord lookupRecord[], |
| 562 ChainContextLookupContext &lookup_conte
xt) | 638 ChainContextLookupContext &lookup_conte
xt) |
| 563 { | 639 { |
| 564 /* First guess */ | 640 /* First guess */ |
| 565 if (unlikely (c->buffer->out_len < backtrackCount || | 641 if (unlikely (c->buffer->backtrack_len () < backtrackCount || |
| 566 » » c->buffer->i + inputCount + lookaheadCount > c->buffer->len || | 642 » » c->buffer->idx + inputCount + lookaheadCount > c->buffer->len || |
| 567 inputCount + lookaheadCount > c->context_length)) | 643 inputCount + lookaheadCount > c->context_length)) |
| 568 return false; | 644 return false; |
| 569 | 645 |
| 570 hb_apply_context_t new_context = *c; | 646 hb_apply_context_t new_context = *c; |
| 571 return match_backtrack (c, | 647 return match_backtrack (c, |
| 572 backtrackCount, backtrack, | 648 backtrackCount, backtrack, |
| 573 lookup_context.funcs.match, lookup_context.match_data[
0]) | 649 lookup_context.funcs.match, lookup_context.match_data[
0]) |
| 574 && match_input (c, | 650 && match_input (c, |
| 575 inputCount, input, | 651 inputCount, input, |
| 576 lookup_context.funcs.match, lookup_context.match_data[1], | 652 lookup_context.funcs.match, lookup_context.match_data[1], |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 663 }; | 739 }; |
| 664 | 740 |
| 665 struct ChainContextFormat1 | 741 struct ChainContextFormat1 |
| 666 { | 742 { |
| 667 friend struct ChainContext; | 743 friend struct ChainContext; |
| 668 | 744 |
| 669 private: | 745 private: |
| 670 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t | 746 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t |
| 671 { | 747 { |
| 672 TRACE_APPLY (); | 748 TRACE_APPLY (); |
| 673 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoin
t); | 749 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepo
int); |
| 674 if (likely (index == NOT_COVERED)) | 750 if (likely (index == NOT_COVERED)) |
| 675 return false; | 751 return false; |
| 676 | 752 |
| 677 const ChainRuleSet &rule_set = this+ruleSet[index]; | 753 const ChainRuleSet &rule_set = this+ruleSet[index]; |
| 678 struct ChainContextLookupContext lookup_context = { | 754 struct ChainContextLookupContext lookup_context = { |
| 679 {match_glyph, apply_func}, | 755 {match_glyph, apply_func}, |
| 680 {NULL, NULL, NULL} | 756 {NULL, NULL, NULL} |
| 681 }; | 757 }; |
| 682 return rule_set.apply (c, lookup_context); | 758 return rule_set.apply (c, lookup_context); |
| 683 } | 759 } |
| (...skipping 17 matching lines...) Expand all Loading... |
| 701 }; | 777 }; |
| 702 | 778 |
| 703 struct ChainContextFormat2 | 779 struct ChainContextFormat2 |
| 704 { | 780 { |
| 705 friend struct ChainContext; | 781 friend struct ChainContext; |
| 706 | 782 |
| 707 private: | 783 private: |
| 708 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t | 784 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t |
| 709 { | 785 { |
| 710 TRACE_APPLY (); | 786 TRACE_APPLY (); |
| 711 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoin
t); | 787 unsigned int index = (this+coverage) (c->buffer->info[c->buffer->idx].codepo
int); |
| 712 if (likely (index == NOT_COVERED)) | 788 if (likely (index == NOT_COVERED)) |
| 713 return false; | 789 return false; |
| 714 | 790 |
| 715 const ClassDef &backtrack_class_def = this+backtrackClassDef; | 791 const ClassDef &backtrack_class_def = this+backtrackClassDef; |
| 716 const ClassDef &input_class_def = this+inputClassDef; | 792 const ClassDef &input_class_def = this+inputClassDef; |
| 717 const ClassDef &lookahead_class_def = this+lookaheadClassDef; | 793 const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
| 718 | 794 |
| 719 index = input_class_def (c->buffer->info[c->buffer->i].codepoint); | 795 index = input_class_def (c->buffer->info[c->buffer->idx].codepoint); |
| 720 const ChainRuleSet &rule_set = this+ruleSet[index]; | 796 const ChainRuleSet &rule_set = this+ruleSet[index]; |
| 721 struct ChainContextLookupContext lookup_context = { | 797 struct ChainContextLookupContext lookup_context = { |
| 722 {match_class, apply_func}, | 798 {match_class, apply_func}, |
| 723 {&backtrack_class_def, | 799 {&backtrack_class_def, |
| 724 &input_class_def, | 800 &input_class_def, |
| 725 &lookahead_class_def} | 801 &lookahead_class_def} |
| 726 }; | 802 }; |
| 727 return rule_set.apply (c, lookup_context); | 803 return rule_set.apply (c, lookup_context); |
| 728 } | 804 } |
| 729 | 805 |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 764 { | 840 { |
| 765 friend struct ChainContext; | 841 friend struct ChainContext; |
| 766 | 842 |
| 767 private: | 843 private: |
| 768 | 844 |
| 769 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t | 845 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) cons
t |
| 770 { | 846 { |
| 771 TRACE_APPLY (); | 847 TRACE_APPLY (); |
| 772 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> >
(backtrack); | 848 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> >
(backtrack); |
| 773 | 849 |
| 774 unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->i].codepoin
t); | 850 unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->idx].codepo
int); |
| 775 if (likely (index == NOT_COVERED)) | 851 if (likely (index == NOT_COVERED)) |
| 776 return false; | 852 return false; |
| 777 | 853 |
| 778 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverag
e> > (input); | 854 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverag
e> > (input); |
| 779 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (l
ookahead); | 855 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (l
ookahead); |
| 780 struct ChainContextLookupContext lookup_context = { | 856 struct ChainContextLookupContext lookup_context = { |
| 781 {match_coverage, apply_func}, | 857 {match_coverage, apply_func}, |
| 782 {this, this, this} | 858 {this, this, this} |
| 783 }; | 859 }; |
| 784 return chain_context_lookup (c, | 860 return chain_context_lookup (c, |
| (...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 969 scriptList; /* ScriptList table */ | 1045 scriptList; /* ScriptList table */ |
| 970 OffsetTo<FeatureList> | 1046 OffsetTo<FeatureList> |
| 971 featureList; /* FeatureList table */ | 1047 featureList; /* FeatureList table */ |
| 972 OffsetTo<LookupList> | 1048 OffsetTo<LookupList> |
| 973 lookupList; /* LookupList table */ | 1049 lookupList; /* LookupList table */ |
| 974 public: | 1050 public: |
| 975 DEFINE_SIZE_STATIC (10); | 1051 DEFINE_SIZE_STATIC (10); |
| 976 }; | 1052 }; |
| 977 | 1053 |
| 978 | 1054 |
| 979 HB_END_DECLS | |
| 980 | 1055 |
| 981 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */ | 1056 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */ |
| OLD | NEW |