Refactor [AdviceStroke]
and other weight control functions to make dense letters less outstanding. (#2586)
* Refactor the logic of AdviceStroke. * More cleanup + optimize memory use of geometry cache * Notes
This commit is contained in:
parent
0e7579e4bf
commit
9e4d1621d2
6 changed files with 169 additions and 42 deletions
|
@ -1,3 +1,4 @@
|
|||
* Make dense letters less outstanding (#2581).
|
||||
* Optimize glyphs for Armenian Capital Ini (`U+053B`), Ken (`U+053F`), and Vew (`U+054E`).
|
||||
* Remove bottom-right serif from Armenian Capital Now (`U+0546`).
|
||||
* Remove top-right serif from Armenian Lower Ben (`U+0562`).
|
|
@ -76,24 +76,26 @@ export : define [calculateMetrics para] : begin
|
|||
define SmallArchDepth para.smallArchDepth
|
||||
|
||||
# Weight Control
|
||||
# We will estimate darkness using lower-case 'e'
|
||||
define DarknessMockWidth : if (Width < HalfUPM) (HalfUPM * [Math.sqrt : Width / HalfUPM]) Width
|
||||
define DarknessMockWidth2 : HalfUPM * [Math.sqrt : Width / HalfUPM]
|
||||
define [BaseFillRate con] : 1 / 2 + para.stroke / ([Math.max HalfUPM DarknessMockWidth2] - SB * 2)
|
||||
define [StrokeWeightControlSigmoid x] : Math.tanh x
|
||||
|
||||
define BaseFillRate : Math.min 0.95 : 2 * HVContrast * para.stroke / (DarknessMockWidth - SB * 2)
|
||||
define WeightControlFactor : [Math.atanh BaseFillRate] / BaseFillRate
|
||||
define [StrokeWeightControlSigmoid x] : Math.tanh : WeightControlFactor * x
|
||||
|
||||
define [AdviceStrokeInSpace availSpace contrast crowdedness mul] : begin
|
||||
local adjCrowdedness : crowdedness * [Math.sqrt contrast]
|
||||
local fillRateScalar : [StrokeWeightControlSigmoid adjCrowdedness] / adjCrowdedness
|
||||
return : Math.min para.stroke (mul * fillRateScalar * [BaseFillRate contrast] * availSpace)
|
||||
local nonAdjustedFillRate : crowdedness * contrast * para.stroke / availSpace
|
||||
local adjustedFillRate : StrokeWeightControlSigmoid nonAdjustedFillRate
|
||||
local strokeWidthScalar : Math.min 1 (mul * adjustedFillRate / nonAdjustedFillRate)
|
||||
return : para.stroke * strokeWidthScalar
|
||||
define [AdviceStroke crowdedness div mul] : begin
|
||||
local spaceH : DarknessMockWidth * [fallback div 1] - SB * 2
|
||||
AdviceStrokeInSpace spaceH HVContrast crowdedness [fallback mul 1]
|
||||
define [AdviceStroke2 cowX cowY refH div] : begin
|
||||
return : AdviceStrokeInSpace spaceH HVContrast crowdedness [fallback mul 1]
|
||||
define [AdviceStroke2 crwX crwY refH div] : begin
|
||||
local spaceH : DarknessMockWidth * [fallback div 1] - SB * 2
|
||||
local spaceV : refH - SB * 2
|
||||
local spaceV refH
|
||||
return : Math.min
|
||||
AdviceStrokeInSpace spaceH HVContrast cowX 1
|
||||
AdviceStrokeInSpace spaceV 1 cowY 1
|
||||
AdviceStrokeInSpace spaceH HVContrast crwX 1
|
||||
AdviceStrokeInSpace spaceV 1 crwY 1
|
||||
|
||||
define Stroke : AdviceStroke 2
|
||||
define DotSize : fallback para.dotSize Stroke
|
||||
|
@ -183,7 +185,7 @@ export : define [calculateMetrics para] : begin
|
|||
define OverlayStroke : AdviceStroke 3.75
|
||||
define OperatorStroke : AdviceStroke 2.75
|
||||
define GeometryStroke : AdviceStroke 4
|
||||
define ShoulderFine : Math.min (Stroke * para.shoulderFineMin) [AdviceStroke 16]
|
||||
define ShoulderFine : Math.min (Stroke * para.shoulderFineMin) [AdviceStroke 24]
|
||||
|
||||
define [AdviceGlottalStopArchDepth y sign] : begin
|
||||
return : ((y - Stroke) * 0.24 + Stroke * 0.625) + sign * TanSlope * SmoothAdjust
|
||||
|
@ -395,8 +397,6 @@ export : define DesignParameters : object
|
|||
braceCurlyM1 0.6
|
||||
braceCurlyM2 0.45
|
||||
braceOvershoot 0.02
|
||||
# Crosdedness
|
||||
lllcrowdedness (3 + 1 / 3)
|
||||
# Bar position
|
||||
hBarPos 0.525
|
||||
eBarPos 0.5
|
||||
|
|
|
@ -2,17 +2,19 @@ import fs from "fs";
|
|||
import { setTimeout } from "node:timers/promises";
|
||||
import zlib from "zlib";
|
||||
|
||||
import * as CurveUtil from "@iosevka/geometry/curve-util";
|
||||
import * as ContourSetEncoding from "@iosevka/geometry/encoding";
|
||||
import { encode, decode } from "@msgpack/msgpack";
|
||||
|
||||
const Edition = 50;
|
||||
const Edition = 60;
|
||||
const MAX_AGE = 16;
|
||||
|
||||
class GfEntry {
|
||||
constructor(age, value) {
|
||||
constructor(age, valueBuffer) {
|
||||
this.age = age;
|
||||
this.value = value;
|
||||
this.valueBuffer = valueBuffer;
|
||||
}
|
||||
}
|
||||
|
||||
class Cache {
|
||||
constructor(freshAgeKey) {
|
||||
this.freshAgeKey = freshAgeKey;
|
||||
|
@ -25,15 +27,16 @@ class Cache {
|
|||
this.historyAgeKeys = rep.ageKeys.slice(0, MAX_AGE);
|
||||
const ageKeySet = new Set(this.historyAgeKeys);
|
||||
for (const [k, e] of Object.entries(rep.gf)) {
|
||||
if (ageKeySet.has(e.age))
|
||||
this.gf.set(k, new GfEntry(e.age, CurveUtil.repToShape(e.value)));
|
||||
if (ageKeySet.has(e.age)) {
|
||||
this.gf.set(k, new GfEntry(e.age, Buffer.from(e.buf, "base64")));
|
||||
}
|
||||
}
|
||||
}
|
||||
toRep(version, diffOnly) {
|
||||
let gfRep = {};
|
||||
for (const [k, e] of this.gf) {
|
||||
if (!diffOnly || this.diff.has(k)) {
|
||||
gfRep[k] = { age: e.age, value: e.value };
|
||||
gfRep[k] = { age: e.age, buf: e.valueBuffer.toString("base64") };
|
||||
}
|
||||
}
|
||||
const mergedAgeKeys =
|
||||
|
@ -56,7 +59,7 @@ class Cache {
|
|||
getGF(k) {
|
||||
const entry = this.gf.get(k);
|
||||
if (!entry) return undefined;
|
||||
else return entry.value;
|
||||
else return ContourSetEncoding.decode(entry.valueBuffer);
|
||||
}
|
||||
refreshGF(k) {
|
||||
const entry = this.gf.get(k);
|
||||
|
@ -66,10 +69,12 @@ class Cache {
|
|||
entry.age = this.freshAgeKey;
|
||||
}
|
||||
}
|
||||
saveGF(k, v) {
|
||||
this.gf.set(k, new GfEntry(this.freshAgeKey, v));
|
||||
saveGF(k, cs) {
|
||||
const buf = ContourSetEncoding.encode(cs);
|
||||
this.gf.set(k, new GfEntry(this.freshAgeKey, buf));
|
||||
this.diff.add(k);
|
||||
}
|
||||
// Merging
|
||||
merge(other) {
|
||||
for (const [k, e] of other.gf) {
|
||||
this.gf.set(k, e);
|
||||
|
|
|
@ -12,7 +12,8 @@
|
|||
"./transform": "./src/transform.mjs",
|
||||
"./spiro-control": "./src/spiro-control.mjs",
|
||||
"./spiro-expand": "./src/spiro-expand.mjs",
|
||||
"./spiro-pen-expand": "./src/spiro-pen-expand.mjs"
|
||||
"./spiro-pen-expand": "./src/spiro-pen-expand.mjs",
|
||||
"./encoding": "./src/encoding.mjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"@iosevka/util": "32.0.2",
|
||||
|
|
|
@ -4,16 +4,6 @@ import * as TypoGeom from "typo-geom";
|
|||
import { Point, Vec2 } from "./point.mjs";
|
||||
import { Transform } from "./transform.mjs";
|
||||
|
||||
function contourToRep(contour) {
|
||||
let c = [];
|
||||
for (const z of contour) c.push({ type: z.type, x: z.x, y: z.y });
|
||||
return c;
|
||||
}
|
||||
function repToContour(contourRep) {
|
||||
let c = [];
|
||||
for (const z of contourRep) c.push(Point.fromXY(z.type, z.x, z.y));
|
||||
return c;
|
||||
}
|
||||
function convertContourToArcs(contour) {
|
||||
if (!contour || !contour.length) return [];
|
||||
const newContour = [];
|
||||
|
@ -115,12 +105,6 @@ export class OffsetCurve {
|
|||
export function convertShapeToArcs(shape) {
|
||||
return shape.map(convertContourToArcs);
|
||||
}
|
||||
export function shapeToRep(shape) {
|
||||
return shape.map(contourToRep);
|
||||
}
|
||||
export function repToShape(shapeRep) {
|
||||
return shapeRep.map(repToContour);
|
||||
}
|
||||
|
||||
export class BezToContoursSink {
|
||||
constructor(gizmo) {
|
||||
|
|
136
packages/geometry/src/encoding.mjs
Normal file
136
packages/geometry/src/encoding.mjs
Normal file
|
@ -0,0 +1,136 @@
|
|||
import { Point } from "./point.mjs";
|
||||
|
||||
/// A generic buffer writer helper class
|
||||
class BufferWriter {
|
||||
constructor() {
|
||||
this.buffer = Buffer.alloc(0x1000);
|
||||
this.capacity = 0x1000;
|
||||
this.length = 0;
|
||||
}
|
||||
grow(sizeToAdd) {
|
||||
let newCapacity = this.capacity;
|
||||
while (newCapacity < this.length + sizeToAdd) newCapacity *= 2;
|
||||
if (newCapacity > this.capacity) {
|
||||
let newBuffer = Buffer.alloc(newCapacity);
|
||||
this.buffer.copy(newBuffer);
|
||||
this.buffer = newBuffer;
|
||||
this.capacity = newCapacity;
|
||||
}
|
||||
}
|
||||
|
||||
writeUInt8(value) {
|
||||
this.grow(1);
|
||||
this.buffer.writeUInt8(value, this.length);
|
||||
this.length += 1;
|
||||
}
|
||||
writeUInt32(value) {
|
||||
this.grow(4);
|
||||
this.buffer.writeUInt32LE(value, this.length);
|
||||
this.length += 4;
|
||||
}
|
||||
writeFloat64(value) {
|
||||
this.grow(8);
|
||||
this.buffer.writeDoubleLE(value, this.length);
|
||||
this.length += 8;
|
||||
}
|
||||
|
||||
getResult() {
|
||||
return this.buffer.subarray(0, this.length);
|
||||
}
|
||||
}
|
||||
|
||||
/// A generic buffer reader helper class
|
||||
class BufferReader {
|
||||
constructor(buffer) {
|
||||
this.buffer = buffer;
|
||||
this.cursor = 0;
|
||||
}
|
||||
|
||||
nextUInt8() {
|
||||
return this.buffer.readUInt8(this.cursor++);
|
||||
}
|
||||
nextUInt32() {
|
||||
let value = this.buffer.readUInt32LE(this.cursor);
|
||||
this.cursor += 4;
|
||||
return value;
|
||||
}
|
||||
nextFloat64() {
|
||||
let value = this.buffer.readDoubleLE(this.cursor);
|
||||
this.cursor += 8;
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/// Encode a contour set to a buffer
|
||||
/// Encoding schema:
|
||||
/// - 4 bytes: number of contours
|
||||
/// - 4 bytes: total number of points
|
||||
/// - 4 bytes per contour: number of points in each contour
|
||||
/// - N bytes: point types, each point type is a byte
|
||||
/// - 16 bytes per point: x and y coordinates, each coordinate is a float64
|
||||
export function encode(cs) {
|
||||
let totalPoints = 0;
|
||||
let contourPointCounts = [];
|
||||
for (const contour of cs) {
|
||||
totalPoints += contour.length;
|
||||
contourPointCounts.push(contour.length);
|
||||
}
|
||||
|
||||
const writer = new BufferWriter();
|
||||
|
||||
// Write the header
|
||||
writer.writeUInt32(cs.length);
|
||||
writer.writeUInt32(totalPoints);
|
||||
for (const count of contourPointCounts) writer.writeUInt32(count);
|
||||
|
||||
// Write the points' type
|
||||
for (const contour of cs) {
|
||||
for (const z of contour) {
|
||||
writer.writeUInt8(z.type);
|
||||
}
|
||||
}
|
||||
|
||||
// Write the points' coordinates
|
||||
for (const contour of cs) {
|
||||
for (const z of contour) {
|
||||
writer.writeFloat64(z.x);
|
||||
writer.writeFloat64(z.y);
|
||||
}
|
||||
}
|
||||
|
||||
return writer.getResult();
|
||||
}
|
||||
|
||||
/// Decode a contour set from a buffer
|
||||
export function decode(buf) {
|
||||
const reader = new BufferReader(buf);
|
||||
const numContours = reader.nextUInt32();
|
||||
const numPoints = reader.nextUInt32();
|
||||
const contourPointCounts = [];
|
||||
for (let i = 0; i < numContours; i++) {
|
||||
contourPointCounts.push(reader.nextUInt32());
|
||||
}
|
||||
|
||||
// Read the points' type, set up the contour set
|
||||
const cs = [];
|
||||
for (let i = 0; i < numContours; i++) {
|
||||
const contour = [];
|
||||
for (let j = 0; j < contourPointCounts[i]; j++) {
|
||||
const type = reader.nextUInt8();
|
||||
contour.push(Point.fromXY(type, 0, 0));
|
||||
}
|
||||
cs.push(contour);
|
||||
}
|
||||
|
||||
// Read the points' coordinates, set the coordinates properly
|
||||
for (let i = 0; i < numContours; i++) {
|
||||
const contour = cs[i];
|
||||
for (let j = 0; j < contourPointCounts[i]; j++) {
|
||||
const z = contour[j];
|
||||
z.x = reader.nextFloat64();
|
||||
z.y = reader.nextFloat64();
|
||||
}
|
||||
}
|
||||
|
||||
return cs;
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue