Initial work of ESM transformation

This commit is contained in:
be5invis 2022-07-16 19:26:49 -07:00
parent 2472c9cff2
commit b8205a63aa
303 changed files with 1959 additions and 2450 deletions

View file

@ -1,24 +1,18 @@
"use strict";
const { CreateEmptyFont } = require("./empty-font");
const { buildGlyphs } = require("../glyphs/index");
const { finalizeFont } = require("./finalize/index");
const { convertOtd } = require("./otd-conv/index");
const Caching = require("./caching/index");
const { buildOtl } = require("../otl/index");
const { assignFontNames } = require("../meta/naming");
const { copyFontMetrics } = require("../meta/aesthetics");
exports.buildFont = async function buildFont(argv, para) {
import { CreateEmptyFont } from "./empty-font.mjs";
import { buildGlyphs } from "../glyphs/index.mjs";
import { finalizeFont } from "./finalize/index.mjs";
import { convertOtd } from "./otd-conv/index.mjs";
import * as Caching from "./caching/index.mjs";
import { buildOtl } from "../otl/index.mjs";
import { assignFontNames } from "../meta/naming.mjs";
import { copyFontMetrics } from "../meta/aesthetics.mjs";
("use strict");
export async function buildFont(argv, para) {
const gs = buildGlyphs(para);
const baseFont = CreateEmptyFont(argv);
assignFontNames(para, baseFont);
copyFontMetrics(gs.fontMetrics, baseFont);
const otl = buildOtl(para, gs.glyphStore);
// Regulate
const excludeChars = new Set();
if (para.excludedCharRanges) {
@ -26,14 +20,12 @@ exports.buildFont = async function buildFont(argv, para) {
for (let p = start; p <= end; p++) excludeChars.add(p);
}
}
// Finalize (like geometry conversion)
const cache = await Caching.load(argv.iCache, argv.menu.version, argv.cacheFreshAgeKey);
const finalGs = finalizeFont(cache, para, gs.glyphStore, excludeChars, otl);
if (cache.isUpdated()) {
await Caching.save(argv.oCache, argv.menu.version, cache, true);
}
const font = convertOtd(baseFont, otl, finalGs);
return { font, glyphStore: finalGs, cacheUpdated: cache.isUpdated() };
};
}

View file

@ -1,19 +1,15 @@
"use strict";
const fs = require("fs");
const zlib = require("zlib");
const { encode, decode } = require("@msgpack/msgpack");
import fs from "fs";
import zlib from "zlib";
import { encode, decode } from "@msgpack/msgpack";
const Edition = 20;
const MAX_AGE = 16;
class GfEntry {
constructor(age, value) {
this.age = age;
this.value = value;
}
}
class Cache {
constructor(freshAgeKey) {
this.freshAgeKey = freshAgeKey;
@ -40,21 +36,18 @@ class Cache {
this.historyAgeKeys[0] === this.freshAgeKey
? this.historyAgeKeys
: [this.freshAgeKey, ...this.historyAgeKeys];
return {
version: version + "@" + Edition,
ageKeys: mergedAgeKeys,
gf: gfRep
};
}
isEmpty() {
return this.gf.size == 0;
}
isUpdated() {
return this.diff.size != 0;
}
// Geometry flattening conversion cache
getGF(k) {
const entry = this.gf.get(k);
@ -79,8 +72,7 @@ class Cache {
}
}
}
exports.load = async function (path, version, freshAgeKey) {
export const load = async function (path, version, freshAgeKey) {
let cache = new Cache(freshAgeKey);
if (path && fs.existsSync(path)) {
const buf = zlib.gunzipSync(await fs.promises.readFile(path));
@ -88,21 +80,19 @@ exports.load = async function (path, version, freshAgeKey) {
}
return cache;
};
exports.save = async function savePTCache(path, version, cache, diffOnly) {
export const save = async function savePTCache(path, version, cache, diffOnly) {
if (path) {
const buf = encode(cache.toRep(version, diffOnly));
const bufZip = zlib.gzipSync(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength));
await fs.promises.writeFile(path, bufZip);
}
};
exports.merge = async function (base, diff, version, freshAgeKey) {
const cacheDiff = await exports.load(diff, version, freshAgeKey);
export const merge = async function (base, diff, version, freshAgeKey) {
const cacheDiff = await load(diff, version, freshAgeKey);
if (!cacheDiff.isEmpty()) {
const cacheBase = await exports.load(base, version, freshAgeKey);
const cacheBase = await load(base, version, freshAgeKey);
cacheBase.merge(cacheDiff);
await exports.save(base, version, cacheBase, false);
await save(base, version, cacheBase, false);
}
if (fs.existsSync(diff)) await fs.promises.rm(diff);
};

View file

@ -1,8 +1,6 @@
"use strict";
import { Ot } from "ot-builder";
const { Ot } = require("ot-builder");
exports.CreateEmptyFont = function (argv) {
export const CreateEmptyFont = function (argv) {
let font = {
head: new Ot.Head.Table(),
hhea: new Ot.MetricHead.Hhea(),

View file

@ -1,13 +1,4 @@
"use strict";
const { Radical } = require("../../support/gr");
exports.gcFont = function (glyphStore, excludedChars, otl, cfg) {
markSweepOtlLookups(otl.GSUB);
markSweepOtlLookups(otl.GPOS);
const sink = markGlyphs(glyphStore, excludedChars, otl, cfg);
return sweep(glyphStore, sink);
};
import { Radical } from "../../support/gr.mjs";
function markSweepOtlLookups(table) {
if (!table || !table.features || !table.lookups) return;
@ -65,7 +56,6 @@ function sweepFeatures(table, accessibleLookupsIds) {
}
table.features = features1;
}
function markGlyphs(glyphStore, excludedChars, otl, cfg) {
const sink = markGlyphsInitial(glyphStore, excludedChars);
while (markGlyphsStep(glyphStore, sink, otl, cfg));
@ -88,7 +78,6 @@ function markGlyphsInitial(glyphStore, excludedChars) {
}
function markGlyphsStep(glyphStore, sink, otl, cfg) {
const glyphCount = sink.size;
if (otl.GSUB) {
for (const l in otl.GSUB.lookups) {
const lookup = otl.GSUB.lookups[l];
@ -99,7 +88,6 @@ function markGlyphsStep(glyphStore, sink, otl, cfg) {
const glyphCount1 = sink.size;
return glyphCount1 > glyphCount;
}
function markGlyphsLookupImpl(sink, lookup, cfg) {
switch (lookup.type) {
case "gsub_single":
@ -116,7 +104,6 @@ function markGlyphsLookupImpl(sink, lookup, cfg) {
return markGlyphsGsubReverse(sink, lookup, cfg);
}
}
function markGlyphsGsubSingle(sink, lookup, cfg) {
const st = lookup.substitutions;
for (const k in st) if (sink.has(k) && st[k]) sink.add(st[k]);
@ -149,7 +136,12 @@ function markGlyphsGsubReverse(sink, lookup, cfg) {
}
}
}
function sweep(glyphStore, gnSet) {
return glyphStore.filterByName(gnSet);
}
export const gcFont = function (glyphStore, excludedChars, otl, cfg) {
markSweepOtlLookups(otl.GSUB);
markSweepOtlLookups(otl.GPOS);
const sink = markGlyphs(glyphStore, excludedChars, otl, cfg);
return sweep(glyphStore, sink);
};

View file

@ -1,19 +1,10 @@
"use strict";
const TypoGeom = require("typo-geom");
const Geom = require("../../support/geometry/index");
const { Point } = require("../../support/geometry/point");
const { Transform } = require("../../support/geometry/transform");
const CurveUtil = require("../../support/geometry/curve-util");
exports.finalizeGlyphs = function finalizeGlyphs(cache, para, glyphStore) {
const skew = Math.tan(((para.slopeAngle || 0) / 180) * Math.PI);
regulateGlyphStore(cache, skew, glyphStore);
return glyphStore;
};
import * as TypoGeom from "typo-geom";
import * as Geom from "../../support/geometry/index.mjs";
import { Point } from "../../support/geometry/point.mjs";
import { Transform } from "../../support/geometry/transform.mjs";
import * as CurveUtil from "../../support/geometry/curve-util.mjs";
///////////////////////////////////////////////////////////////////////////////////////////////////
function regulateGlyphStore(cache, skew, glyphStore) {
const compositeMemo = new Map();
for (const g of glyphStore.glyphs()) {
@ -26,24 +17,19 @@ function regulateGlyphStore(cache, skew, glyphStore) {
if (!compositeMemo.get(g)) flattenSimpleGlyph(cache, skew, g);
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////
function memoSet(memo, g, v) {
memo.set(g, v);
return v;
}
function regulateCompositeGlyph(glyphStore, memo, g) {
if (memo.has(g)) return memo.get(g);
let refs = g.geometry.asReferences();
if (!refs) return memoSet(memo, g, false);
for (const sr of refs) {
const gn = glyphStore.queryNameOf(sr.glyph);
if (!gn) return memoSet(memo, g, false);
}
// De-doppelganger
while (refs.length === 1 && regulateCompositeGlyph(glyphStore, memo, refs[0].glyph)) {
const sr = refs[0];
@ -54,10 +40,8 @@ function regulateCompositeGlyph(glyphStore, memo, g) {
}
refs = g.geometry.asReferences();
}
return memoSet(memo, g, true);
}
function flattenSimpleGlyph(cache, skew, g) {
const ck = Geom.hashGeometry(g.geometry);
const cached = cache.getGF(ck);
@ -78,9 +62,7 @@ function flattenSimpleGlyph(cache, skew, g) {
if (ck) cache.saveGF(ck, CurveUtil.shapeToRep(cs));
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////
class SimplifyGeometry extends Geom.GeometryBase {
constructor(g) {
super();
@ -120,7 +102,6 @@ class SimplifyGeometry extends Geom.GeometryBase {
return `SimplifyGeometry{${sTarget}}`;
}
}
class FairizedShapeSink {
constructor() {
this.contours = [];
@ -153,7 +134,6 @@ class FairizedShapeSink {
}
this.lineTo(x, y);
}
// Contour cleaning code
alignHVKnots(c0) {
const c = c0.slice(0);
@ -224,11 +204,9 @@ class FairizedShapeSink {
c.length = n;
lengthAfter = c.length;
} while (lengthAfter < lengthBefore);
return c;
}
}
// Disjoint set for coordinate alignment
class CoordinateAligner {
constructor(c, lens, lensSet) {
@ -268,12 +246,10 @@ class CoordinateAligner {
}
}
}
const GetX = z => z.x;
const SetX = (z, x) => (z.x = x);
const GetY = z => z.y;
const SetY = (z, y) => (z.y = y);
function isOccurrent(zFirst, zLast) {
return (
zFirst.type === Point.Type.Corner &&
@ -291,3 +267,8 @@ function aligned(a, b, c) {
function between(a, b, c) {
return (a <= b && b <= c) || (a >= b && b >= c);
}
export function finalizeGlyphs(cache, para, glyphStore) {
const skew = Math.tan(((para.slopeAngle || 0) / 180) * Math.PI);
regulateGlyphStore(cache, skew, glyphStore);
return glyphStore;
}

View file

@ -1,23 +1,11 @@
"use strict";
const { finalizeGlyphs } = require("./glyphs");
const { gcFont } = require("./gc");
const { Nwid, Wwid } = require("../../support/gr");
exports.finalizeFont = function (cache, para, glyphStore, excludedCodePoints, restFont) {
assignGrAndCodeRank(glyphStore, Nwid, Wwid);
assignSubRank(glyphStore);
glyphStore = gcFont(glyphStore, excludedCodePoints, restFont, {});
glyphStore = finalizeGlyphs(cache, para, glyphStore);
validateMonospace(para, glyphStore);
return glyphStore;
};
import { finalizeGlyphs } from "./glyphs.mjs";
import { gcFont } from "./gc.mjs";
import { Nwid, Wwid } from "../../support/gr.mjs";
function assignGrAndCodeRank(glyphStore, ...flatteners) {
for (const g of glyphStore.glyphs()) {
g.codeRank = 0xffffffff;
for (const c of glyphStore.flattenCodes(g, flatteners)) if (c < g.codeRank) g.codeRank = c;
g.grRank = 0;
for (let i = 0; i < flatteners.length; i++) if (flatteners[i].get(g)) g.grRank |= 1 << i;
}
@ -26,7 +14,6 @@ function assignSubRank(glyphStore) {
let sr = 0;
for (const g of glyphStore.glyphs()) g.subRank = sr++;
}
// In FontConfig, a font is considered "monospace" if and only if all non-combining characters
// (AW > 0) have the same width. We use this method to validate whether our "Fixed" subfamilies
// are properly built.
@ -43,3 +30,11 @@ function validateMonospace(para, glyphStore) {
throw new Error("Unreachable! Building monospace with more than 2 character widths");
}
}
export const finalizeFont = function (cache, para, glyphStore, excludedCodePoints, restFont) {
assignGrAndCodeRank(glyphStore, Nwid, Wwid);
assignSubRank(glyphStore);
glyphStore = gcFont(glyphStore, excludedCodePoints, restFont, {});
glyphStore = finalizeGlyphs(cache, para, glyphStore);
validateMonospace(para, glyphStore);
return glyphStore;
};

View file

@ -1,5 +1,4 @@
const { Joining, AnyCv, TieMark, Nwid, Wwid } = require("../../support/gr");
import { Joining, AnyCv, TieMark, Nwid, Wwid } from "../../support/gr.mjs";
const ApplePostNames = new Map([
/* spell-checker: disable */
[0xd, "nonmarkingreturn"],
@ -260,11 +259,9 @@ const ApplePostNames = new Map([
[0x111, "dcroat"]
/* spell-checker: enable */
]);
function byCode(gSrc, primaryUnicode, conflictSet) {
if (gSrc.glyphRank === 9999) return ".notdef";
if (gSrc.glyphRank === 9998) return ".null";
let preferredName = null;
if (primaryUnicode) {
preferredName =
@ -275,7 +272,6 @@ function byCode(gSrc, primaryUnicode, conflictSet) {
}
return preferredName;
}
function formatCodePointHex(u) {
return u.toString(16).padStart(4, "0").toUpperCase();
}
@ -326,14 +322,12 @@ function nameByGr(gr, gSrcBase, gOtBase, internalNameMap, conflictSet) {
}
return 0;
}
function byBuildOrder(rank, gSrc, gnOrig) {
if (!gnOrig) gnOrig = `.g${rank}`;
gnOrig = Joining.amendOtName(gnOrig, Joining.get(gSrc));
return gnOrig;
}
exports.byCode = byCode;
exports.bySpacing = bySpacing;
exports.byGr = byGr;
exports.byBuildOrder = byBuildOrder;
export { byCode };
export { bySpacing };
export { byGr };
export { byBuildOrder };

View file

@ -1,30 +1,7 @@
const { Ot } = require("ot-builder");
const { Point } = require("../../support/geometry/point");
const Gr = require("../../support/gr");
const { byCode, bySpacing, byGr, byBuildOrder } = require("./glyph-name");
exports.convertGlyphs = function convertGlyphs(gsOrig) {
const sortedEntries = Array.from(gsOrig.namedEntries(Gr.Nwid, Gr.Wwid)).sort(byRank);
const gs = new MappedGlyphStore();
const cmap = new Ot.Cmap.Table();
for (const [name, gSrc] of sortedEntries) {
gs.declare(name, gSrc);
const us = gsOrig.queryUnicodeOf(gSrc);
if (us) {
for (const u of us) {
if (!(isFinite(u - 0) && u)) continue;
cmap.unicode.set(u, gs.queryBySourceGlyph(gSrc));
gs.setPrimaryUnicode(gSrc, u);
}
}
}
for (const [name, gSrc] of sortedEntries) gs.fill(name, gSrc);
gs.fillOtGlyphNames();
return { glyphs: gs, cmap };
};
import { Ot } from "ot-builder";
import { Point } from "../../support/geometry/point.mjs";
import * as Gr from "../../support/gr.mjs";
import { byCode, bySpacing, byGr, byBuildOrder } from "./glyph-name.mjs";
function byRank([gna, a], [gnb, b]) {
return (
b.glyphRank - a.glyphRank ||
@ -33,7 +10,6 @@ function byRank([gna, a], [gnb, b]) {
a.subRank - b.subRank
);
}
class MappedGlyphStore {
constructor() {
this.m_nameMapping = new Map();
@ -48,14 +24,12 @@ class MappedGlyphStore {
setPrimaryUnicode(source, u) {
this.m_primaryUnicodeMapping.set(u, source);
}
queryBySourceGlyph(source) {
return this.m_mapping.get(source);
}
queryByName(name) {
return this.m_nameMapping.get(name);
}
decideOrder() {
const gs = Ot.ListGlyphStoreFactory.createStoreFromList([...this.m_mapping.values()]);
return gs.decideOrder();
@ -63,10 +37,8 @@ class MappedGlyphStore {
fill(name, source) {
const g = this.queryBySourceGlyph(source);
if (!g) throw new Error("Unreachable");
// Fill metrics
g.horizontal = { start: 0, end: source.advanceWidth };
// Fill Geometry
if (source.geometry.isEmpty()) return;
const rs = source.geometry.asReferences();
@ -81,13 +53,10 @@ class MappedGlyphStore {
let conflictSet = new Set();
let rev = new Map();
for (const [u, g] of this.m_primaryUnicodeMapping) rev.set(g, u);
const glyphsInBuildOrder = Array.from(this.m_mapping).sort(
([a], [b]) => a.subRank - b.subRank
);
for (const [gSrc, gOt] of glyphsInBuildOrder) gOt.name = undefined;
// Name by Unicode
for (const [gSrc, gOt] of glyphsInBuildOrder) {
gOt.name = byCode(gSrc, rev.get(gSrc), conflictSet);
@ -111,7 +80,6 @@ class MappedGlyphStore {
for (const [gSrc, gOt] of glyphsInBuildOrder) {
gOt.name = byBuildOrder(gSrc.subRank, gSrc, gOt.name);
}
// validate
{
let gnSet = new Set();
@ -121,7 +89,6 @@ class MappedGlyphStore {
}
}
}
fillReferences(g, rs) {
const gl = new Ot.Glyph.GeometryList();
for (const ref of rs) {
@ -152,3 +119,22 @@ class MappedGlyphStore {
g.geometry = cs;
}
}
export function convertGlyphs(gsOrig) {
const sortedEntries = Array.from(gsOrig.namedEntries(Gr.Nwid, Gr.Wwid)).sort(byRank);
const gs = new MappedGlyphStore();
const cmap = new Ot.Cmap.Table();
for (const [name, gSrc] of sortedEntries) {
gs.declare(name, gSrc);
const us = gsOrig.queryUnicodeOf(gSrc);
if (us) {
for (const u of us) {
if (!(isFinite(u - 0) && u)) continue;
cmap.unicode.set(u, gs.queryBySourceGlyph(gSrc));
gs.setPrimaryUnicode(gSrc, u);
}
}
}
for (const [name, gSrc] of sortedEntries) gs.fill(name, gSrc);
gs.fillOtGlyphNames();
return { glyphs: gs, cmap };
}

View file

@ -1,12 +1,9 @@
const { convertGlyphs } = require("./glyphs");
const { convertGsub, convertGpos, convertGdef } = require("./layout");
exports.convertOtd = function convertOtd(baseFont, otl, gs) {
import { convertGlyphs } from "./glyphs.mjs";
import { convertGsub, convertGpos, convertGdef } from "./layout.mjs";
export function convertOtd(baseFont, otl, gs) {
const { glyphs, cmap } = convertGlyphs(gs);
const gsub = convertGsub(otl.GSUB, glyphs);
const gpos = convertGpos(otl.GPOS, glyphs);
const gdef = convertGdef(otl.GDEF, glyphs);
return { ...baseFont, glyphs, cmap, gsub, gpos, gdef };
};
}

View file

@ -1,326 +0,0 @@
const { Ot } = require("ot-builder");
class LookupStore {
constructor(handlers, glyphs) {
this.glyphs = glyphs;
this.m_handlers = handlers;
this.m_mapping = new Map();
}
extract() {
return Array.from(this.m_mapping.values());
}
query(id) {
return this.m_mapping.get(id);
}
declare(id, otdLookup) {
if (this.m_mapping.has(id)) return;
const handler = this.m_handlers[otdLookup.type];
if (!handler) return;
this.m_mapping.set(id, handler.init());
}
fill(id, otdLookup) {
const dst = this.query(id);
const handler = this.m_handlers[otdLookup.type];
if (!dst || !handler) return;
if (otdLookup.subtables) throw new Error("Unreachable.");
handler.fill(dst, otdLookup, this);
}
}
const GsubSingleHandler = {
init() {
return new Ot.Gsub.Single();
},
fill(dst, src, store) {
const st = src.substitutions;
for (const k in st) {
const from = store.glyphs.queryByName(k);
const to = store.glyphs.queryByName(st[k]);
if (from && to) dst.mapping.set(from, to);
}
}
};
const GsubMultipleHandler = {
init() {
return new Ot.Gsub.Multiple();
},
fill(dst, src, store) {
const st = src.substitutions;
for (const k in st) {
const from = store.glyphs.queryByName(k);
const to = mapGlyphListAll(st[k], store);
if (!from || !to) continue;
dst.mapping.set(from, to);
}
}
};
const GsubAlternateHandler = {
init() {
return new Ot.Gsub.Alternate();
},
fill: GsubMultipleHandler.fill
};
const GsubLigatureHandler = {
init() {
return new Ot.Gsub.Ligature();
},
fill(dst, src, store) {
const st = src.substitutions;
for (const { from: _from, to: _to } of st) {
const to = store.glyphs.queryByName(_to);
const from = mapGlyphListAll(_from, store);
if (!from || !to) continue;
dst.mapping.push({ from, to });
}
}
};
const GsubChainingHandler = {
init() {
return new Ot.Gsub.Chaining();
},
fill(dst, src, store) {
out: for (const st of src.rules) {
const match = [];
for (const m of st.match) {
const m1 = mapGlyphListSome(m, store);
if (!m1) continue out;
match.push(new Set(m1));
}
const inputBegins = st.inputBegins;
const inputEnds = st.inputEnds;
const applications = [];
for (const ap of st.apply) {
const lookup = store.query(ap.lookup);
if (!lookup) continue out;
applications.push({ at: ap.at - inputBegins, apply: lookup });
}
dst.rules.push({ match, inputBegins, inputEnds, applications });
}
}
};
const GsubReverseHandler = {
init() {
return new Ot.Gsub.ReverseSub();
},
fill(dst, src, store) {
out: for (const st of src.rules) {
const match = [];
const doSubAt = st.inputIndex;
const replacement = new Map();
for (let j = 0; j < st.match.length; j++) {
{
const m1 = new Set();
for (let k = 0; k < st.match[j].length; k++) {
const gFrom = store.glyphs.queryByName(st.match[j][k]);
if (gFrom) m1.add(gFrom);
}
if (!m1.size) continue out;
match.push(m1);
}
if (j === doSubAt) {
for (let k = 0; k < st.match[j].length; k++) {
const gFrom = store.glyphs.queryByName(st.match[j][k]);
const gTo = store.glyphs.queryByName(st.to[k]);
if (!gFrom) continue;
if (gTo) {
replacement.set(gFrom, gTo);
} else {
replacement.set(gFrom, gFrom);
}
}
}
}
dst.rules.push({ match, doSubAt, replacement });
}
}
};
function mapGlyphListAll(gl, store) {
const out = [];
for (const item of gl) {
const fg = store.glyphs.queryByName(item);
if (!fg) return null;
out.push(fg);
}
return out;
}
function mapGlyphListSome(gl, store) {
const out = [];
for (const item of gl) {
const fg = store.glyphs.queryByName(item);
if (!fg) continue;
out.push(fg);
}
if (!out.length) return null;
return out;
}
const GsubHandlers = {
gsub_single: GsubSingleHandler,
gsub_multiple: GsubMultipleHandler,
gsub_alternate: GsubAlternateHandler,
gsub_ligature: GsubLigatureHandler,
gsub_chaining: GsubChainingHandler,
gsub_reverse: GsubReverseHandler
};
const GposMarkToBaseHandler = {
init() {
return new Ot.Gpos.MarkToBase();
},
fill(dst, src, store) {
const mm = collectClassMap(src.marks);
dst.marks = convertMarkRecords(src.marks, mm, store);
dst.bases = convertBaseRecords(src.bases, mm, store);
}
};
const GposMarkToMarkHandler = {
init() {
return new Ot.Gpos.MarkToMark();
},
fill(dst, src, store) {
const mm = collectClassMap(src.marks);
dst.marks = convertMarkRecords(src.marks, mm, store);
dst.baseMarks = convertBaseRecords(src.bases, mm, store);
}
};
function collectClassMap(marks) {
let n = 0;
const m = new Map();
for (const gn in marks) {
const mark = marks[gn];
if (!m.has(mark.class)) {
m.set(mark.class, n);
n++;
}
}
return m;
}
function convertMarkRecords(marks, mm, store) {
const out = new Map();
for (const gn in marks) {
const mark = marks[gn];
const g = store.glyphs.queryByName(gn);
if (!g) continue;
let markAnchors = [];
markAnchors[mm.get(mark.class)] = { x: mark.x, y: mark.y };
out.set(g, { markAnchors: markAnchors });
}
return out;
}
function convertBaseRecords(bases, mm, store) {
const out = new Map();
for (const gn in bases) {
const baseObj = bases[gn];
const g = store.glyphs.queryByName(gn);
if (!g) continue;
const baseArray = [];
for (const bkStr in baseObj) {
baseArray[mm.get(bkStr)] = baseObj[bkStr];
}
out.set(g, { baseAnchors: baseArray });
}
return out;
}
const GposHandlers = {
gpos_mark_to_base: GposMarkToBaseHandler,
gpos_mark_to_mark: GposMarkToMarkHandler
};
class FeatureStore {
constructor(lookups) {
this.lookupStore = lookups;
this.m_mapping = new Map();
}
extract() {
return Array.from(this.m_mapping.values());
}
query(id) {
return this.m_mapping.get(id);
}
fill(id, data) {
const tag = id.slice(0, 4);
const lookups = [];
for (const lid of data) {
const lookup = this.lookupStore.query(lid);
if (lookup) lookups.push(lookup);
}
this.m_mapping.set(id, { tag, lookups });
}
}
class ScriptLanguageStore {
constructor(features) {
this.featureStore = features;
this.m_scriptMapping = new Map();
}
extract() {
return this.m_scriptMapping;
}
fill(id, data) {
const scriptTag = id.slice(0, 4);
const languageTag = id.slice(5, 9).padEnd(4);
let sr = this.m_scriptMapping.get(scriptTag);
if (!sr) {
sr = { defaultLanguage: null, languages: new Map() };
this.m_scriptMapping.set(scriptTag, sr);
}
const lr = this.createLanguageRecord(data);
if (languageTag === "dflt" || languageTag === "DFLT") sr.defaultLanguage = lr;
else sr.languages.set(languageTag, lr);
}
createLanguageRecord(data) {
const features = [];
for (const fid of data.features) {
const feature = this.featureStore.query(fid);
if (feature) features.push(feature);
}
return {
requiredFeature: this.featureStore.query(data.requiredFeature) || null,
features: features
};
}
}
exports.convertGsub = ConvertGsubGposT(GsubHandlers, Ot.Gsub.Table);
exports.convertGpos = ConvertGsubGposT(GposHandlers, Ot.Gpos.Table);
function ConvertGsubGposT(handlers, T) {
return function (table, glyphs) {
if (!table) return null;
const ls = new LookupStore(handlers, glyphs);
if (table.lookups) {
if (table.lookupOrder) {
for (const l of table.lookupOrder) ls.declare(l, table.lookups[l]);
}
for (const l in table.lookups) ls.declare(l, table.lookups[l]);
for (const l in table.lookups) ls.fill(l, table.lookups[l]);
}
const fs = new FeatureStore(ls);
if (table.features) {
for (const f in table.features) fs.fill(f, table.features[f]);
}
const ss = new ScriptLanguageStore(fs);
if (table.languages) {
for (const sl in table.languages) ss.fill(sl, table.languages[sl]);
}
return new T(ss.extract(), fs.extract(), ls.extract());
};
}
exports.convertGdef = convertGdef;
function convertGdef(otdGdef, glyphs) {
const gdef = new Ot.Gdef.Table();
gdef.glyphClassDef = new Map();
for (const gn in otdGdef.glyphClassDef) {
const g = glyphs.queryByName(gn);
if (g) gdef.glyphClassDef.set(g, otdGdef.glyphClassDef[gn]);
}
return gdef;
}

View file

@ -0,0 +1,339 @@
import { Ot } from "ot-builder";
class LookupStore {
constructor(handlers, glyphs) {
this.glyphs = glyphs;
this.m_handlers = handlers;
this.m_mapping = new Map();
}
extract() {
return Array.from(this.m_mapping.values());
}
query(id) {
return this.m_mapping.get(id);
}
declare(id, otdLookup) {
if (this.m_mapping.has(id))
return;
const handler = this.m_handlers[otdLookup.type];
if (!handler)
return;
this.m_mapping.set(id, handler.init());
}
fill(id, otdLookup) {
const dst = this.query(id);
const handler = this.m_handlers[otdLookup.type];
if (!dst || !handler)
return;
if (otdLookup.subtables)
throw new Error("Unreachable.");
handler.fill(dst, otdLookup, this);
}
}
const GsubSingleHandler = {
init() {
return new Ot.Gsub.Single();
},
fill(dst, src, store) {
const st = src.substitutions;
for (const k in st) {
const from = store.glyphs.queryByName(k);
const to = store.glyphs.queryByName(st[k]);
if (from && to)
dst.mapping.set(from, to);
}
}
};
const GsubMultipleHandler = {
init() {
return new Ot.Gsub.Multiple();
},
fill(dst, src, store) {
const st = src.substitutions;
for (const k in st) {
const from = store.glyphs.queryByName(k);
const to = mapGlyphListAll(st[k], store);
if (!from || !to)
continue;
dst.mapping.set(from, to);
}
}
};
const GsubAlternateHandler = {
init() {
return new Ot.Gsub.Alternate();
},
fill: GsubMultipleHandler.fill
};
const GsubLigatureHandler = {
init() {
return new Ot.Gsub.Ligature();
},
fill(dst, src, store) {
const st = src.substitutions;
for (const { from: _from, to: _to } of st) {
const to = store.glyphs.queryByName(_to);
const from = mapGlyphListAll(_from, store);
if (!from || !to)
continue;
dst.mapping.push({ from, to });
}
}
};
const GsubChainingHandler = {
init() {
return new Ot.Gsub.Chaining();
},
fill(dst, src, store) {
out: for (const st of src.rules) {
const match = [];
for (const m of st.match) {
const m1 = mapGlyphListSome(m, store);
if (!m1)
continue out;
match.push(new Set(m1));
}
const inputBegins = st.inputBegins;
const inputEnds = st.inputEnds;
const applications = [];
for (const ap of st.apply) {
const lookup = store.query(ap.lookup);
if (!lookup)
continue out;
applications.push({ at: ap.at - inputBegins, apply: lookup });
}
dst.rules.push({ match, inputBegins, inputEnds, applications });
}
}
};
const GsubReverseHandler = {
init() {
return new Ot.Gsub.ReverseSub();
},
fill(dst, src, store) {
out: for (const st of src.rules) {
const match = [];
const doSubAt = st.inputIndex;
const replacement = new Map();
for (let j = 0; j < st.match.length; j++) {
{
const m1 = new Set();
for (let k = 0; k < st.match[j].length; k++) {
const gFrom = store.glyphs.queryByName(st.match[j][k]);
if (gFrom)
m1.add(gFrom);
}
if (!m1.size)
continue out;
match.push(m1);
}
if (j === doSubAt) {
for (let k = 0; k < st.match[j].length; k++) {
const gFrom = store.glyphs.queryByName(st.match[j][k]);
const gTo = store.glyphs.queryByName(st.to[k]);
if (!gFrom)
continue;
if (gTo) {
replacement.set(gFrom, gTo);
}
else {
replacement.set(gFrom, gFrom);
}
}
}
}
dst.rules.push({ match, doSubAt, replacement });
}
}
};
function mapGlyphListAll(gl, store) {
const out = [];
for (const item of gl) {
const fg = store.glyphs.queryByName(item);
if (!fg)
return null;
out.push(fg);
}
return out;
}
function mapGlyphListSome(gl, store) {
const out = [];
for (const item of gl) {
const fg = store.glyphs.queryByName(item);
if (!fg)
continue;
out.push(fg);
}
if (!out.length)
return null;
return out;
}
const GsubHandlers = {
gsub_single: GsubSingleHandler,
gsub_multiple: GsubMultipleHandler,
gsub_alternate: GsubAlternateHandler,
gsub_ligature: GsubLigatureHandler,
gsub_chaining: GsubChainingHandler,
gsub_reverse: GsubReverseHandler
};
const GposMarkToBaseHandler = {
init() {
return new Ot.Gpos.MarkToBase();
},
fill(dst, src, store) {
const mm = collectClassMap(src.marks);
dst.marks = convertMarkRecords(src.marks, mm, store);
dst.bases = convertBaseRecords(src.bases, mm, store);
}
};
const GposMarkToMarkHandler = {
init() {
return new Ot.Gpos.MarkToMark();
},
fill(dst, src, store) {
const mm = collectClassMap(src.marks);
dst.marks = convertMarkRecords(src.marks, mm, store);
dst.baseMarks = convertBaseRecords(src.bases, mm, store);
}
};
function collectClassMap(marks) {
let n = 0;
const m = new Map();
for (const gn in marks) {
const mark = marks[gn];
if (!m.has(mark.class)) {
m.set(mark.class, n);
n++;
}
}
return m;
}
function convertMarkRecords(marks, mm, store) {
const out = new Map();
for (const gn in marks) {
const mark = marks[gn];
const g = store.glyphs.queryByName(gn);
if (!g)
continue;
let markAnchors = [];
markAnchors[mm.get(mark.class)] = { x: mark.x, y: mark.y };
out.set(g, { markAnchors: markAnchors });
}
return out;
}
function convertBaseRecords(bases, mm, store) {
const out = new Map();
for (const gn in bases) {
const baseObj = bases[gn];
const g = store.glyphs.queryByName(gn);
if (!g)
continue;
const baseArray = [];
for (const bkStr in baseObj) {
baseArray[mm.get(bkStr)] = baseObj[bkStr];
}
out.set(g, { baseAnchors: baseArray });
}
return out;
}
const GposHandlers = {
gpos_mark_to_base: GposMarkToBaseHandler,
gpos_mark_to_mark: GposMarkToMarkHandler
};
class FeatureStore {
constructor(lookups) {
this.lookupStore = lookups;
this.m_mapping = new Map();
}
extract() {
return Array.from(this.m_mapping.values());
}
query(id) {
return this.m_mapping.get(id);
}
fill(id, data) {
const tag = id.slice(0, 4);
const lookups = [];
for (const lid of data) {
const lookup = this.lookupStore.query(lid);
if (lookup)
lookups.push(lookup);
}
this.m_mapping.set(id, { tag, lookups });
}
}
class ScriptLanguageStore {
constructor(features) {
this.featureStore = features;
this.m_scriptMapping = new Map();
}
extract() {
return this.m_scriptMapping;
}
fill(id, data) {
const scriptTag = id.slice(0, 4);
const languageTag = id.slice(5, 9).padEnd(4);
let sr = this.m_scriptMapping.get(scriptTag);
if (!sr) {
sr = { defaultLanguage: null, languages: new Map() };
this.m_scriptMapping.set(scriptTag, sr);
}
const lr = this.createLanguageRecord(data);
if (languageTag === "dflt" || languageTag === "DFLT")
sr.defaultLanguage = lr;
else
sr.languages.set(languageTag, lr);
}
createLanguageRecord(data) {
const features = [];
for (const fid of data.features) {
const feature = this.featureStore.query(fid);
if (feature)
features.push(feature);
}
return {
requiredFeature: this.featureStore.query(data.requiredFeature) || null,
features: features
};
}
}
function ConvertGsubGposT(handlers, T) {
return function (table, glyphs) {
if (!table)
return null;
const ls = new LookupStore(handlers, glyphs);
if (table.lookups) {
if (table.lookupOrder) {
for (const l of table.lookupOrder)
ls.declare(l, table.lookups[l]);
}
for (const l in table.lookups)
ls.declare(l, table.lookups[l]);
for (const l in table.lookups)
ls.fill(l, table.lookups[l]);
}
const fs = new FeatureStore(ls);
if (table.features) {
for (const f in table.features)
fs.fill(f, table.features[f]);
}
const ss = new ScriptLanguageStore(fs);
if (table.languages) {
for (const sl in table.languages)
ss.fill(sl, table.languages[sl]);
}
return new T(ss.extract(), fs.extract(), ls.extract());
};
}
function convertGdef(otdGdef, glyphs) {
const gdef = new Ot.Gdef.Table();
gdef.glyphClassDef = new Map();
for (const gn in otdGdef.glyphClassDef) {
const g = glyphs.queryByName(gn);
if (g)
gdef.glyphClassDef.set(g, otdGdef.glyphClassDef[gn]);
}
return gdef;
}
export const convertGsub = ConvertGsubGposT(GsubHandlers, Ot.Gsub.Table);
export const convertGpos = ConvertGsubGposT(GposHandlers, Ot.Gpos.Table);
export { convertGdef };