2 * Copyright © 2018 Adobe Inc.
4 * This is part of HarfBuzz, a text shaping library.
6 * Permission is hereby granted, without written agreement and without
7 * license or royalty fees, to use, copy, modify, and distribute this
8 * software and its documentation for any purpose, provided that the
9 * above copyright notice and the following two paragraphs appear in
10 * all copies of this software.
12 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
13 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
14 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
15 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
18 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
19 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
20 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
21 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
22 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 * Adobe Author(s): Michiharu Ariza
26 #ifndef HB_OT_CFF_COMMON_HH
27 #define HB_OT_CFF_COMMON_HH
29 #include "hb-open-type.hh"
30 #include "hb-bimap.hh"
31 #include "hb-ot-layout-common.hh"
32 #include "hb-cff-interp-dict-common.hh"
33 #include "hb-subset-plan.hh"
39 #define CFF_UNDEF_CODE 0xFFFFFFFF
41 using objidx_t
= hb_serialize_context_t::objidx_t
;
42 using whence_t
= hb_serialize_context_t::whence_t
;
45 template<typename Type
>
46 static inline const Type
& StructAtOffsetOrNull (const void *P
, unsigned int offset
)
47 { return offset
? StructAtOffset
<Type
> (P
, offset
) : Null (Type
); }
56 using str_buff_t
= hb_vector_t
<unsigned char>;
57 using str_buff_vec_t
= hb_vector_t
<str_buff_t
>;
58 using glyph_to_sid_map_t
= hb_vector_t
<code_pair_t
>;
62 template <typename Iterable
,
63 hb_requires (hb_is_iterable (Iterable
))>
64 unsigned operator () (const Iterable
&_
) const { return hb_len (hb_iter (_
)); }
66 unsigned operator () (unsigned _
) const { return _
; }
68 HB_FUNCOBJ (length_f
);
71 template <typename COUNT
>
74 unsigned int offset_array_size () const
75 { return offSize
* (count
+ 1); }
77 template <typename Iterable
,
78 hb_requires (hb_is_iterable (Iterable
))>
79 bool serialize (hb_serialize_context_t
*c
,
80 const Iterable
&iterable
,
81 const unsigned *p_data_size
= nullptr,
82 unsigned min_off_size
= 0)
84 TRACE_SERIALIZE (this);
87 data_size
= *p_data_size
;
89 total_size (iterable
, &data_size
);
91 auto it
= hb_iter (iterable
);
92 if (unlikely (!serialize_header (c
, +it
, data_size
, min_off_size
))) return_trace (false);
93 unsigned char *ret
= c
->allocate_size
<unsigned char> (data_size
, false);
94 if (unlikely (!ret
)) return_trace (false);
95 for (const auto &_
: +it
)
97 unsigned len
= _
.length
;
105 hb_memcpy (ret
, _
.arrayZ
, len
);
111 template <typename Iterator
,
112 hb_requires (hb_is_iterator (Iterator
))>
113 bool serialize_header (hb_serialize_context_t
*c
,
116 unsigned min_off_size
= 0)
118 TRACE_SERIALIZE (this);
120 unsigned off_size
= (hb_bit_storage (data_size
+ 1) + 7) / 8;
121 off_size
= hb_max(min_off_size
, off_size
);
123 /* serialize CFFIndex header */
124 if (unlikely (!c
->extend_min (this))) return_trace (false);
125 this->count
= hb_len (it
);
126 if (!this->count
) return_trace (true);
127 if (unlikely (!c
->extend (this->offSize
))) return_trace (false);
128 this->offSize
= off_size
;
129 if (unlikely (!c
->allocate_size
<HBUINT8
> (off_size
* (this->count
+ 1), false)))
130 return_trace (false);
132 /* serialize indices */
133 unsigned int offset
= 1;
134 if (HB_OPTIMIZE_SIZE_VAL
)
137 for (const auto &_
: +it
)
139 set_offset_at (i
++, offset
);
140 offset
+= length_f (_
);
142 set_offset_at (i
, offset
);
149 HBUINT8
*p
= (HBUINT8
*) offsets
;
150 for (const auto &_
: +it
)
153 offset
+= length_f (_
);
160 HBUINT16
*p
= (HBUINT16
*) offsets
;
161 for (const auto &_
: +it
)
164 offset
+= length_f (_
);
171 HBUINT24
*p
= (HBUINT24
*) offsets
;
172 for (const auto &_
: +it
)
175 offset
+= length_f (_
);
182 HBUINT32
*p
= (HBUINT32
*) offsets
;
183 for (const auto &_
: +it
)
186 offset
+= length_f (_
);
195 assert (offset
== data_size
+ 1);
199 template <typename Iterable
,
200 hb_requires (hb_is_iterable (Iterable
))>
201 static unsigned total_size (const Iterable
&iterable
, unsigned *data_size
= nullptr, unsigned min_off_size
= 0)
203 auto it
= + hb_iter (iterable
);
206 if (data_size
) *data_size
= 0;
211 for (const auto &_
: +it
)
212 total
+= length_f (_
);
214 if (data_size
) *data_size
= total
;
216 unsigned off_size
= (hb_bit_storage (total
+ 1) + 7) / 8;
217 off_size
= hb_max(min_off_size
, off_size
);
219 return min_size
+ HBUINT8::static_size
+ (hb_len (it
) + 1) * off_size
+ total
;
222 void set_offset_at (unsigned int index
, unsigned int offset
)
224 assert (index
<= count
);
226 unsigned int size
= offSize
;
227 const HBUINT8
*p
= offsets
;
230 case 1: ((HBUINT8
*) p
)[index
] = offset
; break;
231 case 2: ((HBUINT16
*) p
)[index
] = offset
; break;
232 case 3: ((HBUINT24
*) p
)[index
] = offset
; break;
233 case 4: ((HBUINT32
*) p
)[index
] = offset
; break;
239 unsigned int offset_at (unsigned int index
) const
241 assert (index
<= count
);
243 unsigned int size
= offSize
;
244 const HBUINT8
*p
= offsets
;
247 case 1: return ((HBUINT8
*) p
)[index
];
248 case 2: return ((HBUINT16
*) p
)[index
];
249 case 3: return ((HBUINT24
*) p
)[index
];
250 case 4: return ((HBUINT32
*) p
)[index
];
255 const unsigned char *data_base () const
256 { return (const unsigned char *) this + min_size
+ offSize
.static_size
- 1 + offset_array_size (); }
259 hb_ubytes_t
operator [] (unsigned int index
) const
261 if (unlikely (index
>= count
)) return hb_ubytes_t ();
262 _hb_compiler_memory_r_barrier ();
263 unsigned offset0
= offset_at (index
);
264 unsigned offset1
= offset_at (index
+ 1);
265 if (unlikely (offset1
< offset0
|| offset1
> offset_at (count
)))
266 return hb_ubytes_t ();
267 return hb_ubytes_t (data_base () + offset0
, offset1
- offset0
);
270 unsigned int get_size () const
273 return min_size
+ offSize
.static_size
+ offset_array_size () + (offset_at (count
) - 1);
274 return min_size
; /* empty CFFIndex contains count only */
277 bool sanitize (hb_sanitize_context_t
*c
) const
279 TRACE_SANITIZE (this);
280 return_trace (likely (c
->check_struct (this) &&
282 (count
== 0 || /* empty INDEX */
283 (count
< count
+ 1u &&
285 c
->check_struct (&offSize
) && offSize
>= 1 && offSize
<= 4 &&
286 c
->check_array (offsets
, offSize
, count
+ 1u) &&
287 c
->check_array ((const HBUINT8
*) data_base (), 1, offset_at (count
))))));
291 COUNT count
; /* Number of object data. Note there are (count+1) offsets */
293 HBUINT8 offSize
; /* The byte size of each offset in the offsets array. */
294 HBUINT8 offsets
[HB_VAR_ARRAY
];
295 /* The array of (count + 1) offsets into objects array (1-base). */
296 /* HBUINT8 data[HB_VAR_ARRAY]; Object data */
298 DEFINE_SIZE_MIN (COUNT::static_size
);
301 /* Top Dict, Font Dict, Private Dict */
302 struct Dict
: UnsizedByteStr
304 template <typename DICTVAL
, typename OP_SERIALIZER
, typename
...Ts
>
305 bool serialize (hb_serialize_context_t
*c
,
306 const DICTVAL
&dictval
,
307 OP_SERIALIZER
& opszr
,
310 TRACE_SERIALIZE (this);
311 for (unsigned int i
= 0; i
< dictval
.get_count (); i
++)
312 if (unlikely (!opszr
.serialize (c
, dictval
[i
], std::forward
<Ts
> (ds
)...)))
313 return_trace (false);
318 template <typename T
, typename V
>
319 static bool serialize_int_op (hb_serialize_context_t
*c
, op_code_t op
, V value
, op_code_t intOp
)
321 if (unlikely ((!serialize_int
<T
, V
> (c
, intOp
, value
))))
324 TRACE_SERIALIZE (this);
325 /* serialize the opcode */
326 HBUINT8
*p
= c
->allocate_size
<HBUINT8
> (OpCode_Size (op
), false);
327 if (unlikely (!p
)) return_trace (false);
328 if (Is_OpCode_ESC (op
))
331 op
= Unmake_OpCode_ESC (op
);
338 template <typename V
>
339 static bool serialize_int4_op (hb_serialize_context_t
*c
, op_code_t op
, V value
)
340 { return serialize_int_op
<HBINT32
> (c
, op
, value
, OpCode_longintdict
); }
342 template <typename V
>
343 static bool serialize_int2_op (hb_serialize_context_t
*c
, op_code_t op
, V value
)
344 { return serialize_int_op
<HBINT16
> (c
, op
, value
, OpCode_shortint
); }
346 template <typename T
, int int_op
>
347 static bool serialize_link_op (hb_serialize_context_t
*c
, op_code_t op
, objidx_t link
, whence_t whence
)
349 T
&ofs
= *(T
*) (c
->head
+ OpCode_Size (int_op
));
350 if (unlikely (!serialize_int_op
<T
> (c
, op
, 0, int_op
))) return false;
351 c
->add_link (ofs
, link
, whence
);
355 static bool serialize_link4_op (hb_serialize_context_t
*c
, op_code_t op
, objidx_t link
, whence_t whence
= whence_t::Head
)
356 { return serialize_link_op
<HBINT32
, OpCode_longintdict
> (c
, op
, link
, whence
); }
358 static bool serialize_link2_op (hb_serialize_context_t
*c
, op_code_t op
, objidx_t link
, whence_t whence
= whence_t::Head
)
359 { return serialize_link_op
<HBINT16
, OpCode_shortint
> (c
, op
, link
, whence
); }
362 struct TopDict
: Dict
{};
363 struct FontDict
: Dict
{};
364 struct PrivateDict
: Dict
{};
368 void init () { offset
= size
= 0; link
= 0; }
375 template <typename COUNT
>
376 struct FDArray
: CFFIndex
<COUNT
>
378 template <typename DICTVAL
, typename INFO
, typename Iterator
, typename OP_SERIALIZER
>
379 bool serialize (hb_serialize_context_t
*c
,
381 OP_SERIALIZER
& opszr
)
383 TRACE_SERIALIZE (this);
385 /* serialize INDEX data */
386 hb_vector_t
<unsigned> sizes
;
387 if (it
.is_random_access_iterator
)
388 sizes
.alloc (hb_len (it
));
391 char *data_base
= c
->head
;
393 | hb_map ([&] (const hb_pair_t
<const DICTVAL
&, const INFO
&> &_
)
395 FontDict
*dict
= c
->start_embed
<FontDict
> ();
396 dict
->serialize (c
, _
.first
, opszr
, _
.second
);
397 return c
->head
- (const char*)dict
;
401 unsigned data_size
= c
->head
- data_base
;
404 if (unlikely (sizes
.in_error ())) return_trace (false);
406 /* It just happens that the above is packed right after the header below.
409 /* serialize INDEX header */
410 return_trace (CFFIndex
<COUNT
>::serialize_header (c
, hb_iter (sizes
), data_size
));
416 bool sanitize (hb_sanitize_context_t
*c
, unsigned int fdcount
) const
418 TRACE_SANITIZE (this);
419 if (unlikely (!(c
->check_struct (this))))
420 return_trace (false);
422 if (unlikely (!c
->check_array (fds
, c
->get_num_glyphs ())))
423 return_trace (false);
428 unsigned get_fd (hb_codepoint_t glyph
) const
429 { return fds
[glyph
]; }
431 hb_pair_t
<unsigned, hb_codepoint_t
> get_fd_range (hb_codepoint_t glyph
) const
432 { return {fds
[glyph
], glyph
+ 1}; }
434 unsigned int get_size (unsigned int num_glyphs
) const
435 { return HBUINT8::static_size
* num_glyphs
; }
437 HBUINT8 fds
[HB_VAR_ARRAY
];
442 template <typename GID_TYPE
, typename FD_TYPE
>
443 struct FDSelect3_4_Range
445 bool sanitize (hb_sanitize_context_t
*c
, const void * /*nullptr*/, unsigned int fdcount
) const
447 TRACE_SANITIZE (this);
448 return_trace (c
->check_struct (this) &&
450 first
< c
->get_num_glyphs () && (fd
< fdcount
));
456 DEFINE_SIZE_STATIC (GID_TYPE::static_size
+ FD_TYPE::static_size
);
459 template <typename GID_TYPE
, typename FD_TYPE
>
462 unsigned int get_size () const
463 { return GID_TYPE::static_size
* 2 + ranges
.get_size (); }
465 bool sanitize (hb_sanitize_context_t
*c
, unsigned int fdcount
) const
467 TRACE_SANITIZE (this);
468 if (unlikely (!(c
->check_struct (this) &&
469 ranges
.sanitize (c
, nullptr, fdcount
) &&
472 ranges
[0].first
== 0)))
473 return_trace (false);
475 for (unsigned int i
= 1; i
< nRanges (); i
++)
476 if (unlikely (ranges
[i
- 1].first
>= ranges
[i
].first
))
477 return_trace (false);
479 if (unlikely (!(sentinel().sanitize (c
) &&
481 (sentinel() == c
->get_num_glyphs ()))))
482 return_trace (false);
487 static int _cmp_range (const void *_key
, const void *_item
)
489 hb_codepoint_t glyph
= * (hb_codepoint_t
*) _key
;
490 FDSelect3_4_Range
<GID_TYPE
, FD_TYPE
> *range
= (FDSelect3_4_Range
<GID_TYPE
, FD_TYPE
> *) _item
;
492 if (glyph
< range
[0].first
) return -1;
493 if (glyph
< range
[1].first
) return 0;
497 unsigned get_fd (hb_codepoint_t glyph
) const
499 auto *range
= hb_bsearch (glyph
, &ranges
[0], nRanges () - 1, sizeof (ranges
[0]), _cmp_range
);
500 return range
? range
->fd
: ranges
[nRanges () - 1].fd
;
503 hb_pair_t
<unsigned, hb_codepoint_t
> get_fd_range (hb_codepoint_t glyph
) const
505 auto *range
= hb_bsearch (glyph
, &ranges
[0], nRanges () - 1, sizeof (ranges
[0]), _cmp_range
);
506 unsigned fd
= range
? range
->fd
: ranges
[nRanges () - 1].fd
;
507 hb_codepoint_t end
= range
? range
[1].first
: ranges
[nRanges () - 1].first
;
511 GID_TYPE
&nRanges () { return ranges
.len
; }
512 GID_TYPE
nRanges () const { return ranges
.len
; }
513 GID_TYPE
&sentinel () { return StructAfter
<GID_TYPE
> (ranges
[nRanges () - 1]); }
514 const GID_TYPE
&sentinel () const { return StructAfter
<GID_TYPE
> (ranges
[nRanges () - 1]); }
516 ArrayOf
<FDSelect3_4_Range
<GID_TYPE
, FD_TYPE
>, GID_TYPE
> ranges
;
517 /* GID_TYPE sentinel */
519 DEFINE_SIZE_ARRAY (GID_TYPE::static_size
, ranges
);
522 typedef FDSelect3_4
<HBUINT16
, HBUINT8
> FDSelect3
;
523 typedef FDSelect3_4_Range
<HBUINT16
, HBUINT8
> FDSelect3_Range
;
527 bool serialize (hb_serialize_context_t
*c
, const FDSelect
&src
, unsigned int num_glyphs
)
529 TRACE_SERIALIZE (this);
530 unsigned int size
= src
.get_size (num_glyphs
);
531 FDSelect
*dest
= c
->allocate_size
<FDSelect
> (size
, false);
532 if (unlikely (!dest
)) return_trace (false);
533 hb_memcpy (dest
, &src
, size
);
537 unsigned int get_size (unsigned int num_glyphs
) const
541 case 0: return format
.static_size
+ u
.format0
.get_size (num_glyphs
);
542 case 3: return format
.static_size
+ u
.format3
.get_size ();
547 unsigned get_fd (hb_codepoint_t glyph
) const
549 if (this == &Null (FDSelect
)) return 0;
553 case 0: return u
.format0
.get_fd (glyph
);
554 case 3: return u
.format3
.get_fd (glyph
);
558 /* Returns pair of fd and one after last glyph in range. */
559 hb_pair_t
<unsigned, hb_codepoint_t
> get_fd_range (hb_codepoint_t glyph
) const
561 if (this == &Null (FDSelect
)) return {0, 1};
565 case 0: return u
.format0
.get_fd_range (glyph
);
566 case 3: return u
.format3
.get_fd_range (glyph
);
567 default:return {0, 1};
571 bool sanitize (hb_sanitize_context_t
*c
, unsigned int fdcount
) const
573 TRACE_SANITIZE (this);
574 if (unlikely (!c
->check_struct (this)))
575 return_trace (false);
580 case 0: return_trace (u
.format0
.sanitize (c
, fdcount
));
581 case 3: return_trace (u
.format3
.sanitize (c
, fdcount
));
582 default:return_trace (false);
595 template <typename COUNT
>
596 struct Subrs
: CFFIndex
<COUNT
>
598 typedef COUNT count_type
;
599 typedef CFFIndex
<COUNT
> SUPER
;
602 } /* namespace CFF */
604 #endif /* HB_OT_CFF_COMMON_HH */