summaryrefslogtreecommitdiff
path: root/Build/source/libs/harfbuzz/harfbuzz-src/src/hb-ot-layout-gpos-table.hh
diff options
context:
space:
mode:
Diffstat (limited to 'Build/source/libs/harfbuzz/harfbuzz-src/src/hb-ot-layout-gpos-table.hh')
-rw-r--r--Build/source/libs/harfbuzz/harfbuzz-src/src/hb-ot-layout-gpos-table.hh535
1 files changed, 420 insertions, 115 deletions
diff --git a/Build/source/libs/harfbuzz/harfbuzz-src/src/hb-ot-layout-gpos-table.hh b/Build/source/libs/harfbuzz/harfbuzz-src/src/hb-ot-layout-gpos-table.hh
index 024312d610c..d55d6ece8e5 100644
--- a/Build/source/libs/harfbuzz/harfbuzz-src/src/hb-ot-layout-gpos-table.hh
+++ b/Build/source/libs/harfbuzz/harfbuzz-src/src/hb-ot-layout-gpos-table.hh
@@ -34,6 +34,11 @@
namespace OT {
+struct MarkArray;
+static void Markclass_closure_and_remap_indexes (const Coverage &mark_coverage,
+ const MarkArray &mark_array,
+ const hb_set_t &glyphset,
+ hb_map_t* klass_mapping /* INOUT */);
/* buffer **position** var allocations */
#define attach_chain() var.i16[0] /* glyph to which this attaches to, relative to current glyphs; negative for going back, positive for forward. */
@@ -155,6 +160,22 @@ struct ValueFormat : HBUINT16
return ret;
}
+ void serialize_copy (hb_serialize_context_t *c, const void *base, const Value *values) const
+ {
+ unsigned int format = *this;
+ if (!format) return;
+
+ if (format & xPlacement) c->copy (*values++);
+ if (format & yPlacement) c->copy (*values++);
+ if (format & xAdvance) c->copy (*values++);
+ if (format & yAdvance) c->copy (*values++);
+
+ if (format & xPlaDevice) copy_device (c, base, values++);
+ if (format & yPlaDevice) copy_device (c, base, values++);
+ if (format & xAdvDevice) copy_device (c, base, values++);
+ if (format & yAdvDevice) copy_device (c, base, values++);
+ }
+
private:
bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
{
@@ -173,18 +194,41 @@ struct ValueFormat : HBUINT16
return true;
}
- HB_INTERNAL static OffsetTo<Device>& get_device (Value* value)
- { return *CastP<OffsetTo<Device>> (value); }
- HB_INTERNAL static const OffsetTo<Device>& get_device (const Value* value, bool *worked=nullptr)
+ bool copy_device (hb_serialize_context_t *c, const void *base, const Value *src_value) const
+ {
+ Value *dst_value = c->copy (*src_value);
+
+ if (!dst_value) return false;
+ if (*dst_value == 0) return true;
+
+ *dst_value = 0;
+ c->push ();
+ if ((base + get_device (src_value)).copy (c))
+ {
+ c->add_link (*dst_value, c->pop_pack ());
+ return true;
+ }
+ else
+ {
+ c->pop_discard ();
+ return false;
+ }
+ }
+
+ static inline OffsetTo<Device>& get_device (Value* value)
+ {
+ return *static_cast<OffsetTo<Device> *> (value);
+ }
+ static inline const OffsetTo<Device>& get_device (const Value* value, bool *worked=nullptr)
{
if (worked) *worked |= bool (*value);
- return *CastP<OffsetTo<Device>> (value);
+ return *static_cast<const OffsetTo<Device> *> (value);
}
- HB_INTERNAL static const HBINT16& get_short (const Value* value, bool *worked=nullptr)
+ static inline const HBINT16& get_short (const Value* value, bool *worked=nullptr)
{
if (worked) *worked |= bool (*value);
- return *CastP<HBINT16> (value);
+ return *reinterpret_cast<const HBINT16 *> (value);
}
public:
@@ -237,9 +281,10 @@ struct ValueFormat : HBUINT16
};
template<typename Iterator>
-static inline void SinglePos_serialize (hb_serialize_context_t *c,
- Iterator it,
- ValueFormat valFormat);
+static void SinglePos_serialize (hb_serialize_context_t *c,
+ const void *src,
+ Iterator it,
+ ValueFormat valFormat);
struct AnchorFormat1
@@ -344,8 +389,8 @@ struct AnchorFormat3
auto *out = c->embed<AnchorFormat3> (this);
if (unlikely (!out)) return_trace (nullptr);
- out->xDeviceTable.serialize_copy (c, xDeviceTable, this, out);
- out->yDeviceTable.serialize_copy (c, yDeviceTable, this, out);
+ out->xDeviceTable.serialize_copy (c, xDeviceTable, this);
+ out->yDeviceTable.serialize_copy (c, yDeviceTable, this);
return_trace (out);
}
@@ -420,11 +465,33 @@ struct AnchorMatrix
unsigned int cols, bool *found) const
{
*found = false;
- if (unlikely (row >= rows || col >= cols)) return Null(Anchor);
+ if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
*found = !matrixZ[row * cols + col].is_null ();
return this+matrixZ[row * cols + col];
}
+ template <typename Iterator,
+ hb_requires (hb_is_iterator (Iterator))>
+ bool serialize (hb_serialize_context_t *c,
+ unsigned num_rows,
+ AnchorMatrix const *offset_matrix,
+ Iterator index_iter)
+ {
+ TRACE_SERIALIZE (this);
+ if (!index_iter.len ()) return_trace (false);
+ if (unlikely (!c->extend_min ((*this)))) return_trace (false);
+
+ this->rows = num_rows;
+ for (const unsigned i : index_iter)
+ {
+ auto *offset = c->embed (offset_matrix->matrixZ[i]);
+ if (!offset) return_trace (false);
+ offset->serialize_copy (c, offset_matrix->matrixZ[i], offset_matrix, c->to_bias (this));
+ }
+
+ return_trace (true);
+ }
+
bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const
{
TRACE_SANITIZE (this);
@@ -438,7 +505,6 @@ struct AnchorMatrix
}
HBUINT16 rows; /* Number of rows */
- protected:
UnsizedArrayOf<OffsetTo<Anchor>>
matrixZ; /* Matrix of offsets to Anchor tables--
* from beginning of AnchorMatrix table */
@@ -451,12 +517,25 @@ struct MarkRecord
{
friend struct MarkArray;
+ unsigned get_class () const { return (unsigned) klass; }
bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && markAnchor.sanitize (c, base));
}
+ MarkRecord *copy (hb_serialize_context_t *c, const void *base,
+ unsigned dst_bias, const hb_map_t *klass_mapping) const
+ {
+ TRACE_SERIALIZE (this);
+ auto *out = c->embed (this);
+ if (unlikely (!out)) return_trace (nullptr);
+
+ out->klass = klass_mapping->get (klass);
+ out->markAnchor.serialize_copy (c, markAnchor, base, dst_bias);
+ return_trace (out);
+ }
+
protected:
HBUINT16 klass; /* Class defined for this mark */
OffsetTo<Anchor>
@@ -502,6 +581,20 @@ struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage orde
return_trace (true);
}
+ template<typename Iterator,
+ hb_requires (hb_is_source_of (Iterator, MarkRecord))>
+ bool serialize (hb_serialize_context_t *c,
+ const hb_map_t *klass_mapping,
+ const void *base,
+ Iterator it)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (*this))) return_trace (false);
+ if (unlikely (!c->check_assign (len, it.len ()))) return_trace (false);
+ c->copy_all (it, base, c->to_bias (this), klass_mapping);
+ return_trace (true);
+ }
+
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
@@ -517,8 +610,10 @@ struct SinglePosFormat1
bool intersects (const hb_set_t *glyphs) const
{ return (this+coverage).intersects (glyphs); }
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
void collect_glyphs (hb_collect_glyphs_context_t *c) const
- { if (unlikely (!(this+coverage).add_coverage (c->input))) return; }
+ { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
const Coverage &get_coverage () const { return this+coverage; }
@@ -538,14 +633,19 @@ struct SinglePosFormat1
template<typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
void serialize (hb_serialize_context_t *c,
+ const void *src,
Iterator it,
ValueFormat valFormat)
{
- if (unlikely (!c->extend_min (*this))) return;
+ auto out = c->extend_min (*this);
+ if (unlikely (!out)) return;
if (unlikely (!c->check_assign (valueFormat, valFormat))) return;
- for (const auto &_ : hb_second (*it))
- c->copy (_);
+ + it
+ | hb_map (hb_second)
+ | hb_apply ([&] (hb_array_t<const Value> _)
+ { valFormat.serialize_copy (c, src, &_); })
+ ;
auto glyphs =
+ it
@@ -569,7 +669,7 @@ struct SinglePosFormat1
;
bool ret = bool (it);
- SinglePos_serialize (c->serializer, it, valueFormat);
+ SinglePos_serialize (c->serializer, this, it, valueFormat);
return_trace (ret);
}
@@ -600,8 +700,10 @@ struct SinglePosFormat2
bool intersects (const hb_set_t *glyphs) const
{ return (this+coverage).intersects (glyphs); }
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
void collect_glyphs (hb_collect_glyphs_context_t *c) const
- { if (unlikely (!(this+coverage).add_coverage (c->input))) return; }
+ { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
const Coverage &get_coverage () const { return this+coverage; }
@@ -625,16 +727,20 @@ struct SinglePosFormat2
template<typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
void serialize (hb_serialize_context_t *c,
+ const void *src,
Iterator it,
ValueFormat valFormat)
{
- if (unlikely (!c->extend_min (*this))) return;
+ auto out = c->extend_min (*this);
+ if (unlikely (!out)) return;
if (unlikely (!c->check_assign (valueFormat, valFormat))) return;
if (unlikely (!c->check_assign (valueCount, it.len ()))) return;
- for (const auto iter : it)
- for (const auto &_ : iter.second)
- c->copy (_);
+ + it
+ | hb_map (hb_second)
+ | hb_apply ([&] (hb_array_t<const Value> _)
+ { valFormat.serialize_copy (c, src, &_); })
+ ;
auto glyphs =
+ it
@@ -665,7 +771,7 @@ struct SinglePosFormat2
;
bool ret = bool (it);
- SinglePos_serialize (c->serializer, it, valueFormat);
+ SinglePos_serialize (c->serializer, this, it, valueFormat);
return_trace (ret);
}
@@ -711,6 +817,7 @@ struct SinglePos
template<typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
void serialize (hb_serialize_context_t *c,
+ const void *src,
Iterator glyph_val_iter_pairs,
ValueFormat valFormat)
{
@@ -721,9 +828,9 @@ struct SinglePos
u.format = format;
switch (u.format) {
- case 1: u.format1.serialize (c, glyph_val_iter_pairs, valFormat);
+ case 1: u.format1.serialize (c, src, glyph_val_iter_pairs, valFormat);
return;
- case 2: u.format2.serialize (c, glyph_val_iter_pairs, valFormat);
+ case 2: u.format2.serialize (c, src, glyph_val_iter_pairs, valFormat);
return;
default:return;
}
@@ -750,27 +857,42 @@ struct SinglePos
};
template<typename Iterator>
-static inline void
+static void
SinglePos_serialize (hb_serialize_context_t *c,
+ const void *src,
Iterator it,
ValueFormat valFormat)
-{ c->start_embed<SinglePos> ()->serialize (c, it, valFormat); }
+{ c->start_embed<SinglePos> ()->serialize (c, src, it, valFormat); }
struct PairValueRecord
{
friend struct PairSet;
+ int cmp (hb_codepoint_t k) const
+ { return secondGlyph.cmp (k); }
+
+ struct serialize_closure_t
+ {
+ const void *base;
+ const ValueFormat *valueFormats;
+ unsigned len1; /* valueFormats[0].get_len() */
+ const hb_map_t *glyph_map;
+ };
+
bool serialize (hb_serialize_context_t *c,
- unsigned length,
- const hb_map_t &glyph_map) const
+ serialize_closure_t *closure) const
{
TRACE_SERIALIZE (this);
auto *out = c->start_embed (*this);
if (unlikely (!c->extend_min (out))) return_trace (false);
-
- out->secondGlyph = glyph_map[secondGlyph];
- return_trace (c->copy (values, length));
+
+ out->secondGlyph = (*closure->glyph_map)[secondGlyph];
+
+ closure->valueFormats[0].serialize_copy (c, closure->base, &values[0]);
+ closure->valueFormats[1].serialize_copy (c, closure->base, &values[closure->len1]);
+
+ return_trace (true);
}
protected:
@@ -817,8 +939,8 @@ struct PairSet
}
bool apply (hb_ot_apply_context_t *c,
- const ValueFormat *valueFormats,
- unsigned int pos) const
+ const ValueFormat *valueFormats,
+ unsigned int pos) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
@@ -826,40 +948,26 @@ struct PairSet
unsigned int len2 = valueFormats[1].get_len ();
unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
- unsigned int count = len;
-
- /* Hand-coded bsearch. */
- if (unlikely (!count))
- return_trace (false);
- hb_codepoint_t x = buffer->info[pos].codepoint;
- int min = 0, max = (int) count - 1;
- while (min <= max)
+ const PairValueRecord *record = hb_bsearch (buffer->info[pos].codepoint,
+ &firstPairValueRecord,
+ len,
+ record_size);
+ if (record)
{
- int mid = ((unsigned int) min + (unsigned int) max) / 2;
- const PairValueRecord *record = &StructAtOffset<PairValueRecord> (&firstPairValueRecord, record_size * mid);
- hb_codepoint_t mid_x = record->secondGlyph;
- if (x < mid_x)
- max = mid - 1;
- else if (x > mid_x)
- min = mid + 1;
- else
- {
- /* Note the intentional use of "|" instead of short-circuit "||". */
- if (valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos()) |
- valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]))
- buffer->unsafe_to_break (buffer->idx, pos + 1);
- if (len2)
- pos++;
- buffer->idx = pos;
- return_trace (true);
- }
+ /* Note the intentional use of "|" instead of short-circuit "||". */
+ if (valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos()) |
+ valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]))
+ buffer->unsafe_to_break (buffer->idx, pos + 1);
+ if (len2)
+ pos++;
+ buffer->idx = pos;
+ return_trace (true);
}
-
return_trace (false);
}
bool subset (hb_subset_context_t *c,
- const ValueFormat valueFormats[2]) const
+ const ValueFormat valueFormats[2]) const
{
TRACE_SUBSET (this);
auto snap = c->serializer->snapshot ();
@@ -875,12 +983,20 @@ struct PairSet
unsigned len2 = valueFormats[1].get_len ();
unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
+ PairValueRecord::serialize_closure_t closure =
+ {
+ this,
+ valueFormats,
+ len1,
+ &glyph_map
+ };
+
const PairValueRecord *record = &firstPairValueRecord;
unsigned count = len, num = 0;
for (unsigned i = 0; i < count; i++)
{
- if (!glyphset.has (record->secondGlyph)) continue;
- if (record->serialize (c->serializer, len1 + len2, glyph_map)) num++;
+ if (glyphset.has (record->secondGlyph)
+ && record->serialize (c->serializer, &closure)) num++;
record = &StructAtOffset<const PairValueRecord> (record, record_size);
}
@@ -891,7 +1007,6 @@ struct PairSet
struct sanitize_closure_t
{
- const void *base;
const ValueFormat *valueFormats;
unsigned int len1; /* valueFormats[0].get_len() */
unsigned int stride; /* 1 + len1 + len2 */
@@ -908,8 +1023,8 @@ struct PairSet
unsigned int count = len;
const PairValueRecord *record = &firstPairValueRecord;
- return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, closure->base, &record->values[0], count, closure->stride) &&
- closure->valueFormats[1].sanitize_values_stride_unsafe (c, closure->base, &record->values[closure->len1], count, closure->stride));
+ return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, this, &record->values[0], count, closure->stride) &&
+ closure->valueFormats[1].sanitize_values_stride_unsafe (c, this, &record->values[closure->len1], count, closure->stride));
}
protected:
@@ -935,9 +1050,11 @@ struct PairPosFormat1
;
}
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
- if (unlikely (!(this+coverage).add_coverage (c->input))) return;
+ if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
unsigned int count = pairSet.len;
for (unsigned int i = 0; i < count; i++)
(this+pairSet[i]).collect_glyphs (c, valueFormat);
@@ -981,7 +1098,7 @@ struct PairPosFormat1
auto *o = out->pairSet.serialize_append (c->serializer);
if (unlikely (!o)) return false;
auto snap = c->serializer->snapshot ();
- bool ret = o->serialize_subset (c, _, this, out, valueFormat);
+ bool ret = o->serialize_subset (c, _, this, valueFormat);
if (!ret)
{
out->pairSet.pop ();
@@ -1011,7 +1128,6 @@ struct PairPosFormat1
unsigned int len2 = valueFormat[1].get_len ();
PairSet::sanitize_closure_t closure =
{
- this,
valueFormat,
len1,
1 + len1 + len2
@@ -1046,10 +1162,12 @@ struct PairPosFormat2
(this+classDef2).intersects (glyphs);
}
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
- if (unlikely (!(this+coverage).add_coverage (c->input))) return;
- if (unlikely (!(this+classDef2).add_coverage (c->input))) return;
+ if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
+ if (unlikely (!(this+classDef2).collect_coverage (c->input))) return;
}
const Coverage &get_coverage () const { return this+coverage; }
@@ -1096,32 +1214,33 @@ struct PairPosFormat2
out->valueFormat2 = valueFormat2;
hb_map_t klass1_map;
- out->classDef1.serialize_subset (c, classDef1, this, out, &klass1_map);
+ out->classDef1.serialize_subset (c, classDef1, this, &klass1_map);
out->class1Count = klass1_map.get_population ();
hb_map_t klass2_map;
- out->classDef2.serialize_subset (c, classDef2, this, out, &klass2_map);
+ out->classDef2.serialize_subset (c, classDef2, this, &klass2_map);
out->class2Count = klass2_map.get_population ();
- unsigned record_len = valueFormat1.get_len () + valueFormat2.get_len ();
+ unsigned len1 = valueFormat1.get_len ();
+ unsigned len2 = valueFormat2.get_len ();
+ hb_range ((unsigned) class1Count)
| hb_filter (klass1_map)
| hb_apply ([&] (const unsigned class1_idx)
- {
- + hb_range ((unsigned) class2Count)
- | hb_filter (klass2_map)
- | hb_apply ([&] (const unsigned class2_idx)
- {
- unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * record_len;
- for (unsigned i = 0; i < record_len; i++)
- c->serializer->copy (values[idx+i]);
- })
- ;
- })
+ {
+ + hb_range ((unsigned) class2Count)
+ | hb_filter (klass2_map)
+ | hb_apply ([&] (const unsigned class2_idx)
+ {
+ unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
+ valueFormat1.serialize_copy (c->serializer, this, &values[idx]);
+ valueFormat2.serialize_copy (c->serializer, this, &values[idx + len1]);
+ })
+ ;
+ })
;
- const hb_set_t &glyphset = *c->plan->glyphset ();
+ const hb_set_t &glyphset = *c->plan->_glyphset_gsub;
const hb_map_t &glyph_map = *c->plan->glyph_map;
auto it =
@@ -1217,16 +1336,14 @@ struct EntryExitRecord
return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
}
- EntryExitRecord* copy (hb_serialize_context_t *c,
- const void *src_base,
- const void *dst_base) const
+ EntryExitRecord* copy (hb_serialize_context_t *c, const void *base) const
{
TRACE_SERIALIZE (this);
auto *out = c->embed (this);
if (unlikely (!out)) return_trace (nullptr);
- out->entryAnchor.serialize_copy (c, entryAnchor, src_base, dst_base);
- out->exitAnchor.serialize_copy (c, exitAnchor, src_base, dst_base);
+ out->entryAnchor.serialize_copy (c, entryAnchor, base);
+ out->exitAnchor.serialize_copy (c, exitAnchor, base);
return_trace (out);
}
@@ -1251,8 +1368,10 @@ struct CursivePosFormat1
bool intersects (const hb_set_t *glyphs) const
{ return (this+coverage).intersects (glyphs); }
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
void collect_glyphs (hb_collect_glyphs_context_t *c) const
- { if (unlikely (!(this+coverage).add_coverage (c->input))) return; }
+ { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
const Coverage &get_coverage () const { return this+coverage; }
@@ -1359,9 +1478,7 @@ struct CursivePosFormat1
template <typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
- void serialize (hb_serialize_context_t *c,
- Iterator it,
- const void *src_base)
+ void serialize (hb_serialize_context_t *c, Iterator it, const void *base)
{
if (unlikely (!c->extend_min ((*this)))) return;
this->format = 1;
@@ -1369,7 +1486,7 @@ struct CursivePosFormat1
for (const EntryExitRecord& entry_record : + it
| hb_map (hb_second))
- c->copy (entry_record, src_base, this);
+ c->copy (entry_record, base);
auto glyphs =
+ it
@@ -1444,16 +1561,41 @@ typedef AnchorMatrix BaseArray; /* base-major--
* mark-minor--
* ordered by class--zero-based. */
+static void Markclass_closure_and_remap_indexes (const Coverage &mark_coverage,
+ const MarkArray &mark_array,
+ const hb_set_t &glyphset,
+ hb_map_t* klass_mapping /* INOUT */)
+{
+ hb_set_t orig_classes;
+
+ + hb_zip (mark_coverage, mark_array)
+ | hb_filter (glyphset, hb_first)
+ | hb_map (hb_second)
+ | hb_map (&MarkRecord::get_class)
+ | hb_sink (orig_classes)
+ ;
+
+ unsigned idx = 0;
+ for (auto klass : orig_classes.iter ())
+ {
+ if (klass_mapping->has (klass)) continue;
+ klass_mapping->set (klass, idx);
+ idx++;
+ }
+}
+
struct MarkBasePosFormat1
{
bool intersects (const hb_set_t *glyphs) const
{ return (this+markCoverage).intersects (glyphs) &&
(this+baseCoverage).intersects (glyphs); }
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
- if (unlikely (!(this+markCoverage).add_coverage (c->input))) return;
- if (unlikely (!(this+baseCoverage).add_coverage (c->input))) return;
+ if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
+ if (unlikely (!(this+baseCoverage).collect_coverage (c->input))) return;
}
const Coverage &get_coverage () const { return this+markCoverage; }
@@ -1501,8 +1643,70 @@ struct MarkBasePosFormat1
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
- // TODO(subset)
- return_trace (false);
+ const hb_set_t &glyphset = *c->plan->glyphset ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->format = format;
+
+ hb_map_t klass_mapping;
+ Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
+
+ if (!klass_mapping.get_population ()) return_trace (false);
+ out->classCount = klass_mapping.get_population ();
+
+ auto mark_iter =
+ + hb_zip (this+markCoverage, this+markArray)
+ | hb_filter (glyphset, hb_first)
+ ;
+
+ hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ + mark_iter
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+
+ if (!out->markCoverage.serialize (c->serializer, out)
+ .serialize (c->serializer, new_coverage.iter ()))
+ return_trace (false);
+
+ out->markArray.serialize (c->serializer, out)
+ .serialize (c->serializer, &klass_mapping, &(this+markArray), + mark_iter
+ | hb_map (hb_second));
+
+ unsigned basecount = (this+baseArray).rows;
+ auto base_iter =
+ + hb_zip (this+baseCoverage, hb_range (basecount))
+ | hb_filter (glyphset, hb_first)
+ ;
+
+ new_coverage.reset ();
+ + base_iter
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+
+ if (!out->baseCoverage.serialize (c->serializer, out)
+ .serialize (c->serializer, new_coverage.iter ()))
+ return_trace (false);
+
+ hb_sorted_vector_t<unsigned> base_indexes;
+ for (const unsigned row : + base_iter
+ | hb_map (hb_second))
+ {
+ + hb_range ((unsigned) classCount)
+ | hb_filter (klass_mapping)
+ | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
+ | hb_sink (base_indexes)
+ ;
+ }
+ out->baseArray.serialize (c->serializer, out)
+ .serialize (c->serializer, base_iter.len (), &(this+baseArray), base_indexes.iter ());
+
+ return_trace (true);
}
bool sanitize (hb_sanitize_context_t *c) const
@@ -1571,10 +1775,12 @@ struct MarkLigPosFormat1
{ return (this+markCoverage).intersects (glyphs) &&
(this+ligatureCoverage).intersects (glyphs); }
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
- if (unlikely (!(this+markCoverage).add_coverage (c->input))) return;
- if (unlikely (!(this+ligatureCoverage).add_coverage (c->input))) return;
+ if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
+ if (unlikely (!(this+ligatureCoverage).collect_coverage (c->input))) return;
}
const Coverage &get_coverage () const { return this+markCoverage; }
@@ -1691,10 +1897,12 @@ struct MarkMarkPosFormat1
{ return (this+mark1Coverage).intersects (glyphs) &&
(this+mark2Coverage).intersects (glyphs); }
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
- if (unlikely (!(this+mark1Coverage).add_coverage (c->input))) return;
- if (unlikely (!(this+mark2Coverage).add_coverage (c->input))) return;
+ if (unlikely (!(this+mark1Coverage).collect_coverage (c->input))) return;
+ if (unlikely (!(this+mark2Coverage).collect_coverage (c->input))) return;
}
const Coverage &get_coverage () const { return this+mark1Coverage; }
@@ -1746,8 +1954,70 @@ struct MarkMarkPosFormat1
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
- // TODO(subset)
- return_trace (false);
+ const hb_set_t &glyphset = *c->plan->glyphset ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->format = format;
+
+ hb_map_t klass_mapping;
+ Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, glyphset, &klass_mapping);
+
+ if (!klass_mapping.get_population ()) return_trace (false);
+ out->classCount = klass_mapping.get_population ();
+
+ auto mark1_iter =
+ + hb_zip (this+mark1Coverage, this+mark1Array)
+ | hb_filter (glyphset, hb_first)
+ ;
+
+ hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ + mark1_iter
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+
+ if (!out->mark1Coverage.serialize (c->serializer, out)
+ .serialize (c->serializer, new_coverage.iter ()))
+ return_trace (false);
+
+ out->mark1Array.serialize (c->serializer, out)
+ .serialize (c->serializer, &klass_mapping, &(this+mark1Array), + mark1_iter
+ | hb_map (hb_second));
+//////
+ unsigned mark2count = (this+mark2Array).rows;
+ auto mark2_iter =
+ + hb_zip (this+mark2Coverage, hb_range (mark2count))
+ | hb_filter (glyphset, hb_first)
+ ;
+
+ new_coverage.reset ();
+ + mark2_iter
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+
+ if (!out->mark2Coverage.serialize (c->serializer, out)
+ .serialize (c->serializer, new_coverage.iter ()))
+ return_trace (false);
+
+ hb_sorted_vector_t<unsigned> mark2_indexes;
+ for (const unsigned row : + mark2_iter
+ | hb_map (hb_second))
+ {
+ + hb_range ((unsigned) classCount)
+ | hb_filter (klass_mapping)
+ | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
+ | hb_sink (mark2_indexes)
+ ;
+ }
+ out->mark2Array.serialize (c->serializer, out)
+ .serialize (c->serializer, mark2_iter.len (), &(this+mark2Array), mark2_indexes.iter ());
+
+ return_trace (true);
}
bool sanitize (hb_sanitize_context_t *c) const
@@ -1853,6 +2123,12 @@ struct PosLookupSubTable
}
}
+ bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const
+ {
+ hb_intersects_context_t c (glyphs);
+ return dispatch (&c, lookup_type);
+ }
+
protected:
union {
SinglePos single;
@@ -1897,18 +2173,37 @@ struct PosLookup : Lookup
hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
{ return dispatch (c); }
+ hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const
+ {
+ if (c->is_lookup_visited (this_index))
+ return hb_closure_lookups_context_t::default_return_value ();
+
+ c->set_lookup_visited (this_index);
+ if (!intersects (c->glyphs))
+ {
+ c->set_lookup_inactive (this_index);
+ return hb_closure_lookups_context_t::default_return_value ();
+ }
+ c->set_recurse_func (dispatch_closure_lookups_recurse_func);
+
+ hb_closure_lookups_context_t::return_t ret = dispatch (c);
+ return ret;
+ }
+
template <typename set_t>
- void add_coverage (set_t *glyphs) const
+ void collect_coverage (set_t *glyphs) const
{
- hb_add_coverage_context_t<set_t> c (glyphs);
+ hb_collect_coverage_context_t<set_t> c (glyphs);
dispatch (&c);
}
- HB_INTERNAL static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
+ static inline bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
template <typename context_t>
static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
+ HB_INTERNAL static hb_closure_lookups_context_t::return_t dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index);
+
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{ return Lookup::dispatch<SubTable> (c, hb_forward<Ts> (ds)...); }
@@ -1930,14 +2225,17 @@ struct GPOS : GSUBGPOS
static constexpr hb_tag_t tableTag = HB_OT_TAG_GPOS;
const PosLookup& get_lookup (unsigned int i) const
- { return CastR<PosLookup> (GSUBGPOS::get_lookup (i)); }
+ { return static_cast<const PosLookup &> (GSUBGPOS::get_lookup (i)); }
static inline void position_start (hb_font_t *font, hb_buffer_t *buffer);
static inline void position_finish_advances (hb_font_t *font, hb_buffer_t *buffer);
static inline void position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer);
bool subset (hb_subset_context_t *c) const
- { return GSUBGPOS::subset<PosLookup> (c); }
+ {
+ hb_subset_layout_context_t l (c, tableTag, c->plan->gpos_lookups, c->plan->gpos_features);
+ return GSUBGPOS::subset<PosLookup> (&l);
+ }
bool sanitize (hb_sanitize_context_t *c) const
{ return GSUBGPOS::sanitize<PosLookup> (c); }
@@ -2060,12 +2358,19 @@ struct GPOS_accelerator_t : GPOS::accelerator_t {};
#ifndef HB_NO_OT_LAYOUT
template <typename context_t>
-/*static*/ inline typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
+/*static*/ typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
{
const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
return l.dispatch (c);
}
-/*static*/ inline bool PosLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
+
+/*static*/ inline hb_closure_lookups_context_t::return_t PosLookup::dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index)
+{
+ const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (this_index);
+ return l.closure_lookups (c, this_index);
+}
+
+/*static*/ bool PosLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
{
const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
unsigned int saved_lookup_props = c->lookup_props;