import/export cleanup
This commit is contained in:
parent
ead4159c55
commit
82913e9d7a
48 changed files with 195 additions and 234 deletions
|
@ -2277,12 +2277,14 @@ Alongside stylistic sets, Monospace Iosevka can also be configured to cherry-pic
|
|||
<td rowspan="2"><code>cv87</code></td>
|
||||
<td colspan="2"><img src="images/character-variant-cv87-1.png" width="64"/></td>
|
||||
<td colspan="2"><img src="images/character-variant-cv87-2.png" width="64"/></td>
|
||||
<td colspan="8"> </td>
|
||||
<td colspan="2"><img src="images/character-variant-cv87-3.png" width="64"/></td>
|
||||
<td colspan="6"> </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td colspan="2">1</td>
|
||||
<td colspan="2">2</td>
|
||||
<td colspan="8"> </td>
|
||||
<td colspan="2">3</td>
|
||||
<td colspan="6"> </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="2"><code>cv88</code></td>
|
||||
|
@ -2920,7 +2922,7 @@ Subsection `variants` is used to configure character variants in the font. Prope
|
|||
- Styles for `(`, `)`:
|
||||
<table><tr><td rowspan="2" width="92"><img src="images/character-variant-cv86-1.png" width="64"/></td><td><code>paren = 'normal'</code>, <code>cv86 = 1</code></td></tr><tr><td>Parenthesis with normal contour</td></tr><tr><td rowspan="2" width="92"><img src="images/character-variant-cv86-2.png" width="64"/></td><td><code>paren = 'large-contour'</code>, <code>cv86 = 2</code></td></tr><tr><td>Parenthesis with larger contour, like that in Monaco</td></tr><tr><td rowspan="2" width="92"><img src="images/character-variant-cv86-3.png" width="64"/></td><td><code>paren = 'flat-arc'</code>, <code>cv86 = 3</code></td></tr><tr><td>Parenthesis with flat arc, like that in JetBrains Mono</td></tr></table>
|
||||
- Styles for `{`, `}`:
|
||||
<table><tr><td rowspan="2" width="92"><img src="images/character-variant-cv87-1.png" width="64"/></td><td><code>brace = 'straight'</code>, <code>cv87 = 1</code></td></tr><tr><td>More straight braces</td></tr><tr><td rowspan="2" width="92"><img src="images/character-variant-cv87-2.png" width="64"/></td><td><code>brace = 'curly'</code>, <code>cv87 = 2</code></td></tr><tr><td>More curly braces</td></tr></table>
|
||||
<table><tr><td rowspan="2" width="92"><img src="images/character-variant-cv87-1.png" width="64"/></td><td><code>brace = 'straight'</code>, <code>cv87 = 1</code></td></tr><tr><td>More straight braces</td></tr><tr><td rowspan="2" width="92"><img src="images/character-variant-cv87-2.png" width="64"/></td><td><code>brace = 'curly'</code>, <code>cv87 = 2</code></td></tr><tr><td>More curly braces</td></tr><tr><td rowspan="2" width="92"><img src="images/character-variant-cv87-3.png" width="64"/></td><td><code>brace = 'curly-flat-boundary'</code>, <code>cv87 = 3</code></td></tr><tr><td>Curly braces with flat boundary shape</td></tr></table>
|
||||
- Styles for `#`:
|
||||
<table><tr><td rowspan="2" width="60"><img src="images/character-variant-cv88-1.png" width="32"/></td><td><code>number-sign = 'upright'</code>, <code>cv88 = 1</code></td></tr><tr><td>Number sign with vertical bars</td></tr><tr><td rowspan="2" width="60"><img src="images/character-variant-cv88-2.png" width="32"/></td><td><code>number-sign = 'slanted'</code>, <code>cv88 = 2</code></td></tr><tr><td>Number sign with slanted bars</td></tr><tr><td rowspan="2" width="60"><img src="images/character-variant-cv88-3.png" width="32"/></td><td><code>number-sign = 'upright-open'</code>, <code>cv88 = 3</code></td></tr><tr><td>Number sign with vertical bars and open inner</td></tr><tr><td rowspan="2" width="60"><img src="images/character-variant-cv88-4.png" width="32"/></td><td><code>number-sign = 'slanted-open'</code>, <code>cv88 = 4</code></td></tr><tr><td>Number sign with slanted bars and open inner</td></tr></table>
|
||||
- Styles for `&`:
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
"use strict";
|
||||
|
||||
const EmptyFont = require("./empty-font");
|
||||
const { CreateEmptyFont } = require("./empty-font");
|
||||
const { buildGlyphs } = require("../glyphs/index");
|
||||
const finalizeFont = require("./finalize/index");
|
||||
const convertOtd = require("./otd-conv/index");
|
||||
const { finalizeFont } = require("./finalize/index");
|
||||
const { convertOtd } = require("./otd-conv/index");
|
||||
const Caching = require("./caching/index");
|
||||
|
||||
const { buildOtl } = require("../otl/index");
|
||||
const { assignFontNames } = require("../meta/naming");
|
||||
const { copyFontMetrics } = require("../meta/aesthetics");
|
||||
|
||||
module.exports = async function (argv, para) {
|
||||
exports.buildFont = async function buildFont(argv, para) {
|
||||
const gs = buildGlyphs(para);
|
||||
|
||||
const baseFont = EmptyFont(argv);
|
||||
const baseFont = CreateEmptyFont(argv);
|
||||
assignFontNames(para, baseFont);
|
||||
copyFontMetrics(gs.fontMetrics, baseFont);
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
const { Ot } = require("ot-builder");
|
||||
|
||||
module.exports = function (argv) {
|
||||
exports.CreateEmptyFont = function (argv) {
|
||||
let font = {
|
||||
head: new Ot.Head.Table(),
|
||||
hhea: new Ot.MetricHead.Hhea(),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
const { Radical } = require("../../support/gr");
|
||||
|
||||
module.exports = function gcFont(glyphStore, excludedChars, otl, cfg) {
|
||||
exports.gcFont = function (glyphStore, excludedChars, otl, cfg) {
|
||||
markSweepOtlLookups(otl.GSUB);
|
||||
markSweepOtlLookups(otl.GPOS);
|
||||
const sink = markGlyphs(glyphStore, excludedChars, otl, cfg);
|
||||
|
|
|
@ -2,16 +2,15 @@
|
|||
|
||||
const TypoGeom = require("typo-geom");
|
||||
const Geom = require("../../support/geometry/index");
|
||||
const Point = require("../../support/geometry/point");
|
||||
const Transform = require("../../support/geometry/transform");
|
||||
const { Point } = require("../../support/geometry/point");
|
||||
const { Transform } = require("../../support/geometry/transform");
|
||||
const CurveUtil = require("../../support/geometry/curve-util");
|
||||
|
||||
module.exports = finalizeGlyphs;
|
||||
function finalizeGlyphs(cache, para, glyphStore) {
|
||||
exports.finalizeGlyphs = function finalizeGlyphs(cache, para, glyphStore) {
|
||||
const skew = Math.tan(((para.slopeAngle || 0) / 180) * Math.PI);
|
||||
regulateGlyphStore(cache, skew, glyphStore);
|
||||
return glyphStore;
|
||||
}
|
||||
};
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
"use strict";
|
||||
|
||||
const finalizeGlyphs = require("./glyphs");
|
||||
const gcFont = require("./gc");
|
||||
const { finalizeGlyphs } = require("./glyphs");
|
||||
const { gcFont } = require("./gc");
|
||||
const { Nwid, Wwid } = require("../../support/gr");
|
||||
|
||||
module.exports = function finalizeFont(cache, para, glyphStore, excludedCodePoints, restFont) {
|
||||
exports.finalizeFont = function (cache, para, glyphStore, excludedCodePoints, restFont) {
|
||||
assignGrAndCodeRank(glyphStore, Nwid, Wwid);
|
||||
assignSubRank(glyphStore);
|
||||
glyphStore = gcFont(glyphStore, excludedCodePoints, restFont, {});
|
||||
|
|
|
@ -1,8 +1,39 @@
|
|||
const { Ot } = require("ot-builder");
|
||||
const Point = require("../../support/geometry/point");
|
||||
const { Point } = require("../../support/geometry/point");
|
||||
const Gr = require("../../support/gr");
|
||||
const { byCode, bySpacing, byGr, byBuildOrder } = require("./glyph-name");
|
||||
|
||||
exports.convertGlyphs = function convertGlyphs(gsOrig) {
|
||||
const sortedEntries = Array.from(gsOrig.namedEntries(Gr.Nwid, Gr.Wwid)).sort(byRank);
|
||||
|
||||
const gs = new MappedGlyphStore();
|
||||
const cmap = new Ot.Cmap.Table();
|
||||
|
||||
for (const [name, gSrc] of sortedEntries) {
|
||||
gs.declare(name, gSrc);
|
||||
const us = gsOrig.queryUnicodeOf(gSrc);
|
||||
if (us) {
|
||||
for (const u of us) {
|
||||
if (!(isFinite(u - 0) && u)) continue;
|
||||
cmap.unicode.set(u, gs.queryBySourceGlyph(gSrc));
|
||||
gs.setPrimaryUnicode(gSrc, u);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const [name, gSrc] of sortedEntries) gs.fill(name, gSrc);
|
||||
gs.fillOtGlyphNames();
|
||||
return { glyphs: gs, cmap };
|
||||
};
|
||||
|
||||
function byRank([gna, a], [gnb, b]) {
|
||||
return (
|
||||
b.glyphRank - a.glyphRank ||
|
||||
a.grRank - b.grRank ||
|
||||
a.codeRank - b.codeRank ||
|
||||
a.subRank - b.subRank
|
||||
);
|
||||
}
|
||||
|
||||
class MappedGlyphStore {
|
||||
constructor() {
|
||||
this.m_nameMapping = new Map();
|
||||
|
@ -121,35 +152,3 @@ class MappedGlyphStore {
|
|||
g.geometry = cs;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = convertGlyphs;
|
||||
function convertGlyphs(gsOrig) {
|
||||
const sortedEntries = Array.from(gsOrig.namedEntries(Gr.Nwid, Gr.Wwid)).sort(byRank);
|
||||
|
||||
const gs = new MappedGlyphStore();
|
||||
const cmap = new Ot.Cmap.Table();
|
||||
|
||||
for (const [name, gSrc] of sortedEntries) {
|
||||
gs.declare(name, gSrc);
|
||||
const us = gsOrig.queryUnicodeOf(gSrc);
|
||||
if (us) {
|
||||
for (const u of us) {
|
||||
if (!(isFinite(u - 0) && u)) continue;
|
||||
cmap.unicode.set(u, gs.queryBySourceGlyph(gSrc));
|
||||
gs.setPrimaryUnicode(gSrc, u);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const [name, gSrc] of sortedEntries) gs.fill(name, gSrc);
|
||||
gs.fillOtGlyphNames();
|
||||
return { glyphs: gs, cmap };
|
||||
}
|
||||
|
||||
function byRank([gna, a], [gnb, b]) {
|
||||
return (
|
||||
b.glyphRank - a.glyphRank ||
|
||||
a.grRank - b.grRank ||
|
||||
a.codeRank - b.codeRank ||
|
||||
a.subRank - b.subRank
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const convertGlyphs = require("./glyphs");
|
||||
const { convertGlyphs } = require("./glyphs");
|
||||
const { convertGsub, convertGpos, convertGdef } = require("./layout");
|
||||
|
||||
module.exports = function (baseFont, otl, gs) {
|
||||
exports.convertOtd = function convertOtd(baseFont, otl, gs) {
|
||||
const { glyphs, cmap } = convertGlyphs(gs);
|
||||
|
||||
const gsub = convertGsub(otl.GSUB, glyphs);
|
||||
|
|
|
@ -3,7 +3,7 @@ $$include '../../meta/macros.ptl'
|
|||
|
||||
import [Dotless AnyDerivingCv DotlessOrNot getGrTree CvDecompose CcmpDecompose RequireCcmpDecompose] from "../../support/gr"
|
||||
import [fallback] from '../../support/utils'
|
||||
import "../../meta/unicode-knowledge" as UnicodeKnowledge
|
||||
import as UnicodeKnowledge from "../../meta/unicode-knowledge"
|
||||
|
||||
glyph-module
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
$$include '../../meta/macros.ptl'
|
||||
|
||||
import '../../support/geometry/transform' as Transform
|
||||
import [Transform] from '../../support/geometry/transform'
|
||||
|
||||
import [mix linreg clamp fallback] from '../../support/utils'
|
||||
import [Radical] from '../../support/gr'
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import '../support/glyph/index' as Glyph
|
||||
import '../support/glyph-store' as GlyphStore
|
||||
import '../support/glyph-block' as GlyphBlock
|
||||
import '../support/gr' as Gr
|
||||
import '../kits/spiro-kit' as SpiroKit
|
||||
import '../kits/boole-kit' as BooleKit
|
||||
import [Glyph] from '../support/glyph/index'
|
||||
import [GlyphStore] from '../support/glyph-store'
|
||||
import [GlyphBlock] from '../support/glyph-block'
|
||||
import as Gr from '../support/gr'
|
||||
import as SpiroKit from '../kits/spiro-kit'
|
||||
import as BooleKit from '../kits/boole-kit'
|
||||
import [ DesignParameters ] from "../meta/aesthetics"
|
||||
|
||||
extern isFinite
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
$$include '../../../meta/macros.ptl'
|
||||
|
||||
import [mix barmixL linreg clamp fallback] from '../../../support/utils'
|
||||
import '../../../support/geometry/point' as Point
|
||||
import [Point] from '../../../support/geometry/point'
|
||||
|
||||
glyph-module
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
$$include '../../../meta/macros.ptl'
|
||||
|
||||
import [mix barmixL linreg clamp fallback] from '../../../support/utils'
|
||||
import '../../../support/geometry/point' as Point
|
||||
import [Point] from '../../../support/geometry/point'
|
||||
|
||||
glyph-module
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ $$include '../../../meta/macros.ptl'
|
|||
|
||||
import [mix linreg clamp fallback] from '../../../support/utils'
|
||||
import [DesignParameters] from '../../../meta/aesthetics'
|
||||
import '../../../support/geometry/point' as Point
|
||||
import [Point] from '../../../support/geometry/point'
|
||||
|
||||
glyph-module
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
$$include '../../../meta/macros.ptl'
|
||||
|
||||
import [mix linreg clamp fallback] from '../../../support/utils'
|
||||
import '../../../support/geometry/transform' as Transform
|
||||
import [Transform] from '../../../support/geometry/transform'
|
||||
|
||||
glyph-module
|
||||
|
||||
|
|
|
@ -8,15 +8,15 @@ const { encode } = require("@msgpack/msgpack");
|
|||
const { FontIo } = require("ot-builder");
|
||||
const Toml = require("@iarna/toml");
|
||||
|
||||
const BuildFont = require("./gen/build-font.js");
|
||||
const { buildFont } = require("./gen/build-font.js");
|
||||
const Parameters = require("./support/parameters");
|
||||
const VariantData = require("./support/variant-data");
|
||||
const ApplyLigationData = require("./support/ligation-data");
|
||||
const { applyLigationData } = require("./support/ligation-data");
|
||||
const { createGrDisplaySheet } = require("./support/gr");
|
||||
|
||||
module.exports = async function main(argv) {
|
||||
const paraT = await getParameters();
|
||||
const { font, glyphStore } = await BuildFont(argv, paraT(argv));
|
||||
const { font, glyphStore } = await buildFont(argv, paraT(argv));
|
||||
if (argv.oCharMap) await saveCharMap(argv, glyphStore);
|
||||
if (argv.o) await saveTTF(argv, font);
|
||||
};
|
||||
|
@ -45,7 +45,7 @@ async function getParameters() {
|
|||
function createParaImpl(argv) {
|
||||
let para = Parameters.init(deepClone(parametersData), argv);
|
||||
VariantData.apply(deepClone(rawVariantsData), para, argv);
|
||||
ApplyLigationData(deepClone(rawLigationData), para, argv);
|
||||
applyLigationData(deepClone(rawLigationData), para, argv);
|
||||
|
||||
if (argv.excludedCharRanges) para.excludedCharRanges = argv.excludedCharRanges;
|
||||
if (argv.compatibilityLigatures) para.compLig = argv.compatibilityLigatures;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"use strict";
|
||||
|
||||
const TypoGeom = require("typo-geom");
|
||||
const { BooleanGeometry, TransformedGeometry } = require("../support/geometry");
|
||||
const { BooleanGeometry, TransformedGeometry } = require("../support/geometry/index");
|
||||
|
||||
exports.SetupBuilders = function ({ Glyph, GlobalTransform }) {
|
||||
function impl(operator, operands) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import '../support/geometry/point' as Point
|
||||
import '../support/geometry/transform' as Transform
|
||||
import '../support/geometry/anchor' as Anchor
|
||||
import [Point] from '../support/geometry/point'
|
||||
import [Transform] from '../support/geometry/transform'
|
||||
import [Anchor] from '../support/geometry/anchor'
|
||||
import [mix linreg clamp fallback] from '../support/utils'
|
||||
|
||||
# Parameter generation
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import 'semver' as semver
|
||||
import as semver from 'semver'
|
||||
import [Ot] from "ot-builder"
|
||||
extern Buffer
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import '../support/glyph' as Glyph
|
||||
import '../support/geometry/transform' as Transform
|
||||
import [Glyph] from '../support/glyph'
|
||||
import [Transform] from '../support/geometry/transform'
|
||||
|
||||
define GDEF_SIMPLE 1
|
||||
define GDEF_LIGATURE 2
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import [add-common-feature add-feature add-lookup add-feature-lookup ChainRuleBuilder BeginLookupBlock EndLookupBlock UkMapToLookup UkMap2ToLookup] from "./table-util"
|
||||
import [AnyCv Dotless TieMark TieGlyph CcmpDecompose] from "../support/gr"
|
||||
import "../meta/unicode-knowledge" as UnicodeKnowledge
|
||||
import as UnicodeKnowledge from "../meta/unicode-knowledge"
|
||||
|
||||
extern Set
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import 'toposort' as toposort
|
||||
import '../support/glyph/index' as Glyph
|
||||
import '../support/gr' as Gr
|
||||
import as toposort from 'toposort'
|
||||
import as Gr from '../support/gr'
|
||||
|
||||
import [CreateEmptyTable finalizeTable MoveBackUtilityLookups] from "./table-util"
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import as toposort from 'toposort'
|
||||
import [AnyCv] from "../support/gr"
|
||||
import 'toposort' as toposort
|
||||
|
||||
export : define [CreateEmptyTable] {.languages {.} .features {.} .lookups {.} .lookupDep {}}
|
||||
extern Map
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
|
||||
module.exports = class Anchor {
|
||||
exports.Anchor = class Anchor {
|
||||
constructor(x, y) {
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"use strict";
|
||||
|
||||
const TypoGeom = require("typo-geom");
|
||||
const Point = require("./point");
|
||||
const Transform = require("./transform");
|
||||
const { Point } = require("./point");
|
||||
const { Transform } = require("./transform");
|
||||
|
||||
exports.SPIRO_PRECISION = 1 / 2;
|
||||
exports.OCCURRENT_PRECISION = 1 / 16;
|
||||
|
|
|
@ -4,11 +4,11 @@ const crypto = require("crypto");
|
|||
const TypoGeom = require("typo-geom");
|
||||
const SpiroJs = require("spiro");
|
||||
|
||||
const Point = require("./point");
|
||||
const Transform = require("./transform");
|
||||
const CurveUtil = require("./curve-util");
|
||||
const { SpiroExpander } = require("./spiro-expand");
|
||||
const Format = require("../util/formatter");
|
||||
const { Point } = require("./point");
|
||||
const { Transform } = require("./transform");
|
||||
const { SpiroExpander } = require("./spiro-expand");
|
||||
|
||||
class GeometryBase {
|
||||
asContours() {
|
||||
|
|
|
@ -60,4 +60,4 @@ Point.Type = {
|
|||
Quadratic: 3
|
||||
};
|
||||
|
||||
module.exports = Point;
|
||||
exports.Point = Point;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
|
||||
module.exports = class Transform {
|
||||
exports.Transform = class Transform {
|
||||
constructor(xx, yx, xy, yy, x, y) {
|
||||
this.xx = xx;
|
||||
this.yx = yx;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
|
||||
class GlyphBlock {
|
||||
exports.GlyphBlock = class GlyphBlock {
|
||||
constructor(capture, blockName, body) {
|
||||
this.capture = capture;
|
||||
this.blockName = blockName;
|
||||
|
@ -26,6 +26,4 @@ class GlyphBlock {
|
|||
for (const f of pendingApplications) f();
|
||||
return this.exports;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GlyphBlock;
|
||||
};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
|
||||
class GlyphStore {
|
||||
exports.GlyphStore = class GlyphStore {
|
||||
constructor() {
|
||||
this.nameForward = new Map();
|
||||
this.nameBackward = new Map();
|
||||
|
@ -116,6 +116,4 @@ class GlyphStore {
|
|||
}
|
||||
return gs1;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GlyphStore;
|
||||
};
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
"use strict";
|
||||
|
||||
const Transform = require("../geometry/transform");
|
||||
const Point = require("../geometry/point");
|
||||
const Anchor = require("../geometry/anchor");
|
||||
const { Transform } = require("../geometry/transform");
|
||||
const { Point } = require("../geometry/point");
|
||||
const { Anchor } = require("../geometry/anchor");
|
||||
const Geom = require("../geometry");
|
||||
|
||||
module.exports = class Glyph {
|
||||
exports.Glyph = class Glyph {
|
||||
constructor(_identifier) {
|
||||
this._m_identifier = _identifier;
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
const Parameters = require("./parameters");
|
||||
|
||||
module.exports = function applyLigationData(data, para, argv) {
|
||||
exports.applyLigationData = function (data, para, argv) {
|
||||
const defaultBuildup = {};
|
||||
|
||||
const hives = {};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
|
||||
const monotonicInterpolate = require("./util/monotonic-interpolate");
|
||||
const { monotonicInterpolate } = require("./util/monotonic-interpolate");
|
||||
|
||||
exports.init = initPara;
|
||||
function initPara(data, argv) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
|
||||
module.exports = function (xs, ys) {
|
||||
exports.monotonicInterpolate = function (xs, ys) {
|
||||
let i,
|
||||
length = xs.length;
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
"fs-extra": "^10.0.0",
|
||||
"ot-builder": "^1.1.0",
|
||||
"otb-ttc-bundle": "^1.1.0",
|
||||
"patel": "^0.35.0",
|
||||
"patel": "^0.35.1",
|
||||
"semver": "^7.3.5",
|
||||
"spiro": "^3.0.0",
|
||||
"stylus": "^0.54.8",
|
||||
|
|
|
@ -2,20 +2,13 @@
|
|||
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const parseVariantsData = require("../export-data/variants-data");
|
||||
const parseLigationData = require("../export-data/ligation-data");
|
||||
const getCharMapAndSupportedLanguageList = require("../export-data/supported-languages");
|
||||
const execMain = require("../shared/execMain");
|
||||
|
||||
const charMapPath = process.argv[2];
|
||||
const charMapItalicPath = process.argv[3];
|
||||
const charMapObliquePath = process.argv[4];
|
||||
|
||||
execMain(main);
|
||||
const { parseVariantsData } = require("../export-data/variants-data");
|
||||
const { parseLigationData } = require("../export-data/ligation-data");
|
||||
const { getCharMapAndSupportedLanguageList } = require("../export-data/supported-languages");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
async function main() {
|
||||
module.exports = async function main(argv) {
|
||||
const readmePath = path.resolve(__dirname, "../../README.md");
|
||||
let readme = await fs.readFile(readmePath, "utf-8");
|
||||
readme = (await processSsOt()).apply(readme);
|
||||
|
@ -26,10 +19,10 @@ async function main() {
|
|||
readme = (await processLigSetPreDef()).apply(readme);
|
||||
readme = (await processLigSetOt(1, g => g.tag === "calt")).apply(readme);
|
||||
readme = (await processLigSetOt(2, g => g.tag !== "calt")).apply(readme);
|
||||
readme = (await processLangList()).apply(readme);
|
||||
readme = (await processLangList(argv)).apply(readme);
|
||||
readme = (await processPrivateBuildPlans()).apply(readme);
|
||||
await fs.writeFile(readmePath, readme);
|
||||
}
|
||||
};
|
||||
|
||||
async function processSsOt() {
|
||||
const variantsData = await parseVariantsData();
|
||||
|
@ -309,11 +302,11 @@ async function processLigSetOt(index, fn) {
|
|||
return md;
|
||||
}
|
||||
|
||||
async function processLangList() {
|
||||
async function processLangList(argv) {
|
||||
const cl = await getCharMapAndSupportedLanguageList(
|
||||
charMapPath,
|
||||
charMapItalicPath,
|
||||
charMapObliquePath
|
||||
argv.charMapPath,
|
||||
argv.charMapItalicPath,
|
||||
argv.charMapObliquePath
|
||||
);
|
||||
const md = new MdCol("Section-Language-List");
|
||||
md.log(`${cl.languages.length} Supported Languages: \n`);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
module.exports = [
|
||||
exports.blockData = [
|
||||
[[0x0000, 0x007f], "Basic Latin"],
|
||||
[[0x0080, 0x00ff], "Latin-1 Supplement"],
|
||||
[[0x0100, 0x017f], "Latin Extended-A"],
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
const blockData = require("./block-data");
|
||||
const { blockData } = require("./block-data");
|
||||
const ucdNames = require("@unicode/unicode-13.0.0/Names");
|
||||
const ugc = require("@unicode/unicode-13.0.0/General_Category");
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
module.exports = function (covUpright, covItalic, covOblique) {
|
||||
exports.gatherCoverageData = function (covUpright, covItalic, covOblique) {
|
||||
const result = [];
|
||||
for (const [[lchBlockStart, lchBlockEnd], block] of blockData) {
|
||||
let blockResults = [];
|
||||
|
|
|
@ -1,32 +1,23 @@
|
|||
"use strict";
|
||||
|
||||
const fs = require("fs-extra");
|
||||
const parseVariantsData = require("./variants-data");
|
||||
const parseLigationData = require("./ligation-data");
|
||||
const getCharMapAndSupportedLanguageList = require("./supported-languages");
|
||||
const execMain = require("../shared/execMain");
|
||||
const { parseVariantsData } = require("./variants-data");
|
||||
const { parseLigationData } = require("./ligation-data");
|
||||
const { getCharMapAndSupportedLanguageList } = require("./supported-languages");
|
||||
|
||||
const version = require("../../package.json").version;
|
||||
|
||||
const charMapPath = process.argv[2];
|
||||
const charMapItalicPath = process.argv[3];
|
||||
const charMapObliquePath = process.argv[4];
|
||||
const exportPathMeta = process.argv[5];
|
||||
const exportPathCov = process.argv[6];
|
||||
|
||||
execMain(main);
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
async function main() {
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
module.exports = async function main(argv) {
|
||||
const variantsData = await parseVariantsData();
|
||||
const ligationData = await parseLigationData();
|
||||
const cl = await getCharMapAndSupportedLanguageList(
|
||||
charMapPath,
|
||||
charMapItalicPath,
|
||||
charMapObliquePath
|
||||
argv.charMapPath,
|
||||
argv.charMapItalicPath,
|
||||
argv.charMapObliquePath
|
||||
);
|
||||
await fs.writeJson(
|
||||
exportPathMeta,
|
||||
argv.exportPathMeta,
|
||||
{
|
||||
version,
|
||||
variantsData,
|
||||
|
@ -38,5 +29,5 @@ async function main() {
|
|||
},
|
||||
{ spaces: 2 }
|
||||
);
|
||||
await fs.writeJson(exportPathCov, { version, ...cl }, { spaces: 2 });
|
||||
}
|
||||
await fs.writeJson(argv.exportPathCov, { version, ...cl }, { spaces: 2 });
|
||||
};
|
||||
|
|
|
@ -188,7 +188,7 @@ const ligationSamplesNarrow = [
|
|||
]
|
||||
];
|
||||
|
||||
module.exports = async function getLigationData() {
|
||||
exports.parseLigationData = async function () {
|
||||
const ligToml = await fs.readFile(
|
||||
path.join(__dirname, "../../params/ligation-set.toml"),
|
||||
"utf8"
|
||||
|
|
|
@ -5,15 +5,15 @@ const fs = require("fs-extra");
|
|||
const zlib = require("zlib");
|
||||
const { decode } = require("@msgpack/msgpack");
|
||||
|
||||
const gatherCov = require("./coverage-export/gather-coverage-data");
|
||||
const { gatherCoverageData } = require("./coverage-export/gather-coverage-data");
|
||||
|
||||
// List all the languages that Iosevka supports, but cannot inferred from CLDR data.
|
||||
const overrideSupportedLanguages = [];
|
||||
|
||||
module.exports = async function (charMapPath, charMapItalicPath, charMapObliquePath) {
|
||||
const charMap = await readMpCharMap(charMapPath);
|
||||
const charMapItalic = await readMpCharMap(charMapItalicPath);
|
||||
const charMapOblique = await readMpCharMap(charMapObliquePath);
|
||||
exports.getCharMapAndSupportedLanguageList = async function (cmpUpright, cmpItalic, cmpOblique) {
|
||||
const charMap = await readMpCharMap(cmpUpright);
|
||||
const charMapItalic = await readMpCharMap(cmpItalic);
|
||||
const charMapOblique = await readMpCharMap(cmpOblique);
|
||||
|
||||
const rawCoverage = getRawCoverage(charMap);
|
||||
const rawCoverageItalic = getRawCoverage(charMapItalic);
|
||||
|
@ -24,7 +24,7 @@ module.exports = async function (charMapPath, charMapItalicPath, charMapObliqueP
|
|||
glyphCount: charMap.length,
|
||||
codePointCount: rawCoverage.size
|
||||
},
|
||||
unicodeCoverage: gatherCov(rawCoverage, rawCoverageItalic, rawCoverageOblique),
|
||||
unicodeCoverage: gatherCoverageData(rawCoverage, rawCoverageItalic, rawCoverageOblique),
|
||||
languages: Array.from(getSupportedLanguageSet(rawCoverage)).sort()
|
||||
};
|
||||
};
|
||||
|
|
|
@ -6,7 +6,7 @@ const toml = require("@iarna/toml");
|
|||
|
||||
const VariantDataParser = require("../../font-src/support/variant-data");
|
||||
|
||||
module.exports = async function () {
|
||||
exports.parseVariantsData = async function () {
|
||||
const variantsToml = await fs.readFile(
|
||||
path.join(__dirname, "../../params/variants.toml"),
|
||||
"utf8"
|
||||
|
|
|
@ -3,22 +3,17 @@
|
|||
const path = require("path");
|
||||
const fs = require("fs-extra");
|
||||
const semver = require("semver");
|
||||
const execMain = require("../shared/execMain");
|
||||
|
||||
const ChangeFileDir = path.join(__dirname, "../../changes");
|
||||
const ModifiedSinceVersion = "2.x";
|
||||
const Version = process.argv[2];
|
||||
const outputPath = process.argv[3];
|
||||
|
||||
execMain(main);
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
async function main() {
|
||||
module.exports = async function main(argv) {
|
||||
const out = new Output();
|
||||
await GenerateChangeList(out);
|
||||
await fs.writeFile(outputPath, out.buffer);
|
||||
}
|
||||
await GenerateChangeList(argv, out);
|
||||
await fs.writeFile(argv.outputPath, out.buffer);
|
||||
};
|
||||
|
||||
class Output {
|
||||
constructor() {
|
||||
|
@ -29,14 +24,14 @@ class Output {
|
|||
}
|
||||
}
|
||||
|
||||
async function GenerateChangeList(out) {
|
||||
async function GenerateChangeList(argv, out) {
|
||||
const changeFiles = await fs.readdir(ChangeFileDir);
|
||||
const fragments = new Map();
|
||||
for (const file of changeFiles) {
|
||||
const filePath = path.join(ChangeFileDir, file);
|
||||
const fileParts = path.parse(filePath);
|
||||
if (fileParts.ext !== ".md") continue;
|
||||
if (!semver.valid(fileParts.name) || semver.lt(Version, fileParts.name)) continue;
|
||||
if (!semver.valid(fileParts.name) || semver.lt(argv.version, fileParts.name)) continue;
|
||||
fragments.set(fileParts.name, await fs.readFile(filePath, "utf8"));
|
||||
}
|
||||
const sortedFragments = Array.from(fragments).sort((a, b) => semver.compare(b[0], a[0]));
|
||||
|
|
|
@ -3,27 +3,21 @@
|
|||
const Path = require("path");
|
||||
const Fs = require("fs-extra");
|
||||
const SemVer = require("semver");
|
||||
const execMain = require("../shared/execMain");
|
||||
|
||||
const ChangeFileDir = Path.join(__dirname, "../../changes");
|
||||
const Version = process.argv[2];
|
||||
const releasePackagesJsonPath = process.argv[3];
|
||||
const outputPath = process.argv[4];
|
||||
|
||||
execMain(main);
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
async function main() {
|
||||
module.exports = async function main(argv) {
|
||||
const out = new Output();
|
||||
|
||||
await GenerateChangeList(out);
|
||||
await GenerateChangeList(argv, out);
|
||||
await CopyMarkdown(out, "packages-desc.md");
|
||||
await GeneratePackageList(out);
|
||||
await GeneratePackageList(argv, out);
|
||||
|
||||
await Fs.ensureDir(Path.join(__dirname, `../../release-archives/`));
|
||||
await Fs.writeFile(outputPath, out.buffer);
|
||||
}
|
||||
await Fs.writeFile(argv.outputPath, out.buffer);
|
||||
};
|
||||
|
||||
class Output {
|
||||
constructor() {
|
||||
|
@ -48,14 +42,14 @@ async function CopyMarkdown(out, name) {
|
|||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// CHANGE LIST
|
||||
|
||||
async function GenerateChangeList(out) {
|
||||
async function GenerateChangeList(argv, out) {
|
||||
const changeFiles = await Fs.readdir(ChangeFileDir);
|
||||
const fragments = new Map();
|
||||
for (const file of changeFiles) {
|
||||
const filePath = Path.join(ChangeFileDir, file);
|
||||
const fileParts = Path.parse(filePath);
|
||||
if (fileParts.ext !== ".md") continue;
|
||||
if (!SemVer.valid(fileParts.name) || SemVer.lt(Version, fileParts.name)) continue;
|
||||
if (!SemVer.valid(fileParts.name) || SemVer.lt(argv.version, fileParts.name)) continue;
|
||||
fragments.set(fileParts.name, await Fs.readFile(filePath, "utf8"));
|
||||
}
|
||||
|
||||
|
@ -85,19 +79,21 @@ const Spacings = {
|
|||
"quasi-proportional": ["Default", false]
|
||||
};
|
||||
|
||||
const imagePrefix = `https://raw.githubusercontent.com/be5invis/Iosevka/v${Version}/images`;
|
||||
const ImagePrefixNoVersion = `https://raw.githubusercontent.com/be5invis/Iosevka`;
|
||||
const DownloadLinkPrefixNoVersion = `https://github.com/be5invis/Iosevka/releases/download`;
|
||||
|
||||
async function GeneratePackageList(out) {
|
||||
const pkgShapesData = await Fs.readJson(releasePackagesJsonPath);
|
||||
const DownloadLinkPrefix = `https://github.com/be5invis/Iosevka/releases/download/v${Version}`;
|
||||
async function GeneratePackageList(argv, out) {
|
||||
const imagePrefix = `${ImagePrefixNoVersion}/v${argv.version}/images`;
|
||||
const pkgShapesData = await Fs.readJson(argv.releasePackagesJsonPath);
|
||||
const DownloadLinkPrefix = `${DownloadLinkPrefixNoVersion}/v${argv.version}`;
|
||||
|
||||
out.log(`<table>`);
|
||||
for (let [groupID, gr] of Object.entries(pkgShapesData)) {
|
||||
const prime = gr.subGroups[groupID];
|
||||
|
||||
const familyName = buildName("\u00a0", ...prime.family.split(" "));
|
||||
const sTtcName = buildName("-", "super-ttc", groupID, Version);
|
||||
const ttcName = buildName("-", "ttc", groupID, Version);
|
||||
const sTtcName = buildName("-", "super-ttc", groupID, argv.version);
|
||||
const ttcName = buildName("-", "ttc", groupID, argv.version);
|
||||
const sTtcLink = `${DownloadLinkPrefix}/${sTtcName}.zip`;
|
||||
const ttcLink = `${DownloadLinkPrefix}/${ttcName}.zip`;
|
||||
|
||||
|
@ -128,7 +124,7 @@ async function GeneratePackageList(out) {
|
|||
for (const [subGroupID, subGr] of Object.entries(gr.subGroups)) {
|
||||
const [spacingDesc, ligation] = Spacings[subGr.spacing];
|
||||
const createLink = (label, prefix) => {
|
||||
const fileName = buildName("-", prefix, subGroupID, Version);
|
||||
const fileName = buildName("-", prefix, subGroupID, argv.version);
|
||||
const downloadLink = `${DownloadLinkPrefix}/${fileName}.zip`;
|
||||
return `<b><a href="${downloadLink}">${label}</a></b>`;
|
||||
};
|
||||
|
|
|
@ -3,23 +3,16 @@
|
|||
const ejs = require("ejs");
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const parseVariantsData = require("../export-data/variants-data");
|
||||
const getLigationData = require("../export-data/ligation-data");
|
||||
const execMain = require("../shared/execMain");
|
||||
const { parseVariantsData } = require("../export-data/variants-data");
|
||||
const { parseLigationData } = require("../export-data/ligation-data");
|
||||
|
||||
const inputPath = process.argv[2];
|
||||
const outputPath = process.argv[3];
|
||||
const outputDataPath = process.argv[4];
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
execMain(main);
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
async function main() {
|
||||
module.exports = async function main(argv) {
|
||||
const weightGrades = [100, 200, 300, 400, 500, 600, 700, 800, 900];
|
||||
const templatePath = path.join(inputPath, "index.ejs");
|
||||
const templatePath = path.join(argv.inputPath, "index.ejs");
|
||||
const variationData = await await parseVariantsData();
|
||||
const ligationData = await getLigationData();
|
||||
const ligationData = await parseLigationData();
|
||||
const html = await ejs.renderFile(templatePath, {
|
||||
...variationData,
|
||||
ligation: ligationData,
|
||||
|
@ -32,7 +25,7 @@ async function main() {
|
|||
.replace(/\n/g, "<br/>");
|
||||
}
|
||||
});
|
||||
await fs.writeFile(outputPath, html);
|
||||
await fs.writeFile(argv.outputPath, html);
|
||||
|
||||
let readmeSnapshotTasks = [
|
||||
{ el: "#languages", name: "languages" },
|
||||
|
@ -108,7 +101,7 @@ async function main() {
|
|||
}
|
||||
|
||||
await fs.writeJson(
|
||||
outputDataPath,
|
||||
argv.outputDataPath,
|
||||
{
|
||||
readmeSnapshotTasks,
|
||||
ligationSamples: ligationData.samples,
|
||||
|
@ -116,4 +109,4 @@ async function main() {
|
|||
},
|
||||
{ spaces: " " }
|
||||
);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
module.exports = function (main) {
|
||||
setTimeout(
|
||||
() =>
|
||||
main().catch(e => {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
}),
|
||||
0
|
||||
);
|
||||
};
|
|
@ -3,12 +3,18 @@
|
|||
const path = require("path");
|
||||
const fs = require("fs-extra");
|
||||
const semver = require("semver");
|
||||
const execMain = require("../shared/execMain");
|
||||
|
||||
const ChangeFileDir = path.join(__dirname, "../../changes");
|
||||
const PackageJsonPath = path.join(__dirname, "../../package.json");
|
||||
|
||||
execMain(main);
|
||||
setTimeout(
|
||||
() =>
|
||||
main().catch(e => {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
}),
|
||||
0
|
||||
);
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
|
44
verdafile.js
44
verdafile.js
|
@ -11,7 +11,7 @@ const semver = require("semver");
|
|||
|
||||
const { task, file, oracle, computed, phony } = build.ruleTypes;
|
||||
const { de, fu, sfu, ofu } = build.rules;
|
||||
const { run, cd, cp, rm, fail, echo, silently } = build.actions;
|
||||
const { run, node, cd, cp, rm, fail, echo, silently } = build.actions;
|
||||
const { FileList } = build.predefinedFuncs;
|
||||
|
||||
module.exports = build;
|
||||
|
@ -599,15 +599,13 @@ const PagesDataExport = task(`pages:data-export`, async t => {
|
|||
BuildCM("iosevka", "iosevka-italic"),
|
||||
BuildCM("iosevka", "iosevka-oblique")
|
||||
);
|
||||
await run(
|
||||
`node`,
|
||||
`utility/export-data/index`,
|
||||
cm.full,
|
||||
cmi.full,
|
||||
cmo.full,
|
||||
Path.resolve(pagesDir, "shared/data-import/raw/metadata.json"),
|
||||
Path.resolve(pagesDir, "shared/data-import/raw/coverage.json")
|
||||
);
|
||||
await node(`utility/export-data/index`, {
|
||||
charMapPath: cm.full,
|
||||
charMapItalicPath: cmi.full,
|
||||
charMapObliquePath: cmo.full,
|
||||
exportPathMeta: Path.resolve(pagesDir, "shared/data-import/raw/metadata.json"),
|
||||
exportPathCov: Path.resolve(pagesDir, "shared/data-import/raw/coverage.json")
|
||||
});
|
||||
});
|
||||
|
||||
const PagesFontExport = task.group(`pages:font-export`, async (target, gr) => {
|
||||
|
@ -692,14 +690,16 @@ const SnapShotHtml = file(`${SNAPSHOT_TMP}/index.html`, async (target, out) => {
|
|||
BuildCM("iosevka", "iosevka-italic"),
|
||||
BuildCM("iosevka", "iosevka-oblique")
|
||||
);
|
||||
await run(
|
||||
`node`,
|
||||
`utility/generate-snapshot-page/index.js`,
|
||||
"snapshot-src/templates",
|
||||
out.full,
|
||||
`${out.dir}/${out.name}.data.json`
|
||||
);
|
||||
await run(`node`, `utility/amend-readme/index`, cm.full, cmi.full, cmo.full);
|
||||
await node(`utility/generate-snapshot-page/index`, {
|
||||
inputPath: "snapshot-src/templates",
|
||||
outputPath: out.full,
|
||||
outputDataPath: `${out.dir}/${out.name}.data.json`
|
||||
});
|
||||
await node(`utility/amend-readme/index`, {
|
||||
charMapPath: cm.full,
|
||||
charMapItalicPath: cmi.full,
|
||||
charMapObliquePath: cmo.full
|
||||
});
|
||||
});
|
||||
const SnapShotStatic = file.make(
|
||||
x => `${SNAPSHOT_TMP}/${x}`,
|
||||
|
@ -738,7 +738,11 @@ const ReleaseNotesFile = file.make(
|
|||
await t.need(UtilScripts, de(ARCHIVE_DIR));
|
||||
const [changeFiles, rpFiles] = await t.need(ChangeFileList(), ReleaseNotePackagesFile);
|
||||
await t.need(changeFiles.map(fu));
|
||||
await run("node", "utility/generate-release-note/index", version, rpFiles.full, out.full);
|
||||
await node("utility/generate-release-note/index", {
|
||||
version,
|
||||
releasePackagesJsonPath: rpFiles.full,
|
||||
outputPath: out.full
|
||||
});
|
||||
}
|
||||
);
|
||||
const ReleaseNotePackagesFile = file(`${BUILD}/release-packages.json`, async (t, out) => {
|
||||
|
@ -773,7 +777,7 @@ const ChangeLogFile = file(`CHANGELOG.md`, async (t, out) => {
|
|||
await t.need(UtilScripts, de(ARCHIVE_DIR));
|
||||
const [changeFiles] = await t.need(ChangeFileList());
|
||||
await t.need(changeFiles.map(fu));
|
||||
await run("node", "utility/generate-change-log/index", version, out.full);
|
||||
await node("utility/generate-change-log/index", { version, outputPath: out.full });
|
||||
});
|
||||
const ChangeFileList = oracle.make(
|
||||
() => `release:change-file-list`,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue