Deps cleanup

This commit is contained in:
be5invis 2022-06-27 20:24:04 -07:00
parent 274239b10b
commit 76e258ef77
15 changed files with 873 additions and 872 deletions

View file

@ -1,6 +1,6 @@
"use strict";
const fs = require("fs-extra");
const fs = require("fs");
const zlib = require("zlib");
const { encode, decode } = require("@msgpack/msgpack");
@ -83,7 +83,7 @@ class Cache {
exports.load = async function (path, version, freshAgeKey) {
let cache = new Cache(freshAgeKey);
if (path && fs.existsSync(path)) {
const buf = zlib.gunzipSync(await fs.readFile(path));
const buf = zlib.gunzipSync(await fs.promises.readFile(path));
cache.loadRep(version, decode(buf));
}
return cache;
@ -93,7 +93,7 @@ exports.save = async function savePTCache(path, version, cache, diffOnly) {
if (path) {
const buf = encode(cache.toRep(version, diffOnly));
const bufZip = zlib.gzipSync(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength));
await fs.writeFile(path, bufZip);
await fs.promises.writeFile(path, bufZip);
}
};
@ -104,5 +104,5 @@ exports.merge = async function (base, diff, version, freshAgeKey) {
cacheBase.merge(cacheDiff);
await exports.save(base, version, cacheBase, false);
}
if (fs.existsSync(diff)) await fs.rm(diff);
if (fs.existsSync(diff)) await fs.promises.rm(diff);
};

View file

@ -1,6 +1,6 @@
"use strict";
const fs = require("fs-extra");
const fs = require("fs");
const path = require("path");
const zlib = require("zlib");
const { encode } = require("@msgpack/msgpack");
@ -78,7 +78,7 @@ async function getParameters() {
async function tryParseToml(str) {
try {
return Toml.parse(await fs.readFile(str, "utf-8"));
return Toml.parse(await fs.promises.readFile(str, "utf-8"));
} catch (e) {
throw new Error(
`Failed to parse configuration file ${str}.\nPlease validate whether there's syntax error.\n${e}`
@ -110,5 +110,5 @@ async function saveCharMap(argv, glyphStore) {
...createGrDisplaySheet(glyphStore, gn)
]);
}
await fs.writeFile(argv.oCharMap, zlib.gzipSync(encode(charMap)));
await fs.promises.writeFile(argv.oCharMap, zlib.gzipSync(encode(charMap)));
}

1554
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,36 +1,35 @@
{
"name": "iosevka",
"version": "15.5.2",
"main": "./font-src/index.js",
"scripts": {
"build": "node utility/ensure-verda-exists && verda -f verdafile.js",
"bump-ver": "node utility/update-package-json-version/index",
"clean": "node utility/ensure-verda-exists && verda -f verdafile.js clean"
},
"dependencies": {
"@iarna/toml": "^2.2.5",
"@msgpack/msgpack": "^2.7.2",
"@unicode/unicode-14.0.0": "^1.2.1",
"cldr": "^7.2.0",
"fs-extra": "^10.1.0",
"ot-builder": "^1.5.2",
"otb-ttc-bundle": "^1.5.2",
"patel": "^0.37.1",
"semver": "^7.3.7",
"spiro": "^3.0.0",
"toposort": "^2.0.2",
"typo-geom": "^0.12.1",
"uuid": "^8.3.2",
"verda": "^1.6.0",
"wawoff2": "^2.0.1"
},
"devDependencies": {
"eslint": "^8.17.0",
"eslint-config-prettier": "^8.5.0",
"prettier": "^2.6.2",
"which": "^2.0.2"
},
"engines": {
"node": ">=12.16.0"
}
"name": "iosevka",
"version": "15.5.2",
"main": "./font-src/index.js",
"scripts": {
"build": "node utility/ensure-verda-exists && verda -f verdafile.js",
"bump-ver": "node utility/update-package-json-version/index",
"clean": "node utility/ensure-verda-exists && verda -f verdafile.js clean"
},
"dependencies": {
"@iarna/toml": "^2.2.5",
"@msgpack/msgpack": "^2.7.2",
"ot-builder": "^1.5.3",
"otb-ttc-bundle": "^1.5.3",
"semver": "^7.3.7",
"spiro": "^3.0.0",
"toposort": "^2.0.2",
"typo-geom": "^0.12.1",
"uuid": "^8.3.2",
"wawoff2": "^2.0.1"
},
"devDependencies": {
"@unicode/unicode-14.0.0": "^1.2.2",
"cldr": "^7.2.0",
"eslint": "^8.18.0",
"eslint-config-prettier": "^8.5.0",
"patel": "^0.37.1",
"prettier": "^2.7.1",
"verda": "^1.6.0",
"which": "^2.0.2"
},
"engines": {
"node": ">=12.16.0"
}
}

View file

@ -1,6 +1,6 @@
"use strict";
const fs = require("fs-extra");
const fs = require("fs");
const path = require("path");
const { parseVariantsData } = require("../export-data/variants-data");
const { parseLigationData } = require("../export-data/ligation-data");
@ -9,7 +9,7 @@ const { getCharMapAndSupportedLanguageList } = require("../export-data/supported
///////////////////////////////////////////////////////////////////////////////////////////////////
module.exports = async function main(argv) {
let readme = await fs.readFile(argv.mdFilePath, "utf-8");
let readme = await fs.promises.readFile(argv.mdFilePath, "utf-8");
const dirs = {
images: path.posix.relative(path.dirname(argv.mdFilePath), "images")
};
@ -23,7 +23,7 @@ module.exports = async function main(argv) {
readme = (await processLigSetOt(dirs, 2, g => g.tag !== "calt")).apply(readme);
readme = (await processLangList(argv)).apply(readme);
readme = (await processPrivateBuildPlans()).apply(readme);
await fs.writeFile(argv.mdFilePath, readme);
await fs.promises.writeFile(argv.mdFilePath, readme);
};
async function processSsOt(dirs) {
@ -105,7 +105,7 @@ async function processSsStyles() {
const variantsData = await parseVariantsData();
const md = new MdCol("Section-Stylistic-Sets");
const headerPath = path.resolve(__dirname, "fragments/description-stylistic-sets.md");
md.log(await fs.readFile(headerPath, "utf-8"));
md.log(await fs.promises.readFile(headerPath, "utf-8"));
for (const gr of variantsData.composites) {
if (!gr.rank) continue;
md.log(` - \`${gr.tag}\`: Set character variant to “${gr.description}”.`);
@ -117,7 +117,7 @@ async function processCherryPickingStyles(dirs) {
const md = new MdCol("Section-Cherry-Picking-Styles");
const headerPath = path.resolve(__dirname, "fragments/description-cheery-picking-styles.md");
md.log(await fs.readFile(headerPath, "utf-8"));
md.log(await fs.promises.readFile(headerPath, "utf-8"));
for (const cv of [...variantsData.specials, ...variantsData.primes]) {
if (!cv.tag && !cv.isSpecial) continue;
@ -188,7 +188,7 @@ function escapeHtml(s) {
async function processPrivateBuildPlans() {
const md = new MdCol("Section-Private-Build-Plan-Sample");
const tomlPath = path.resolve(__dirname, "../../private-build-plans.sample.toml");
const toml = await fs.readFile(tomlPath, "utf-8");
const toml = await fs.promises.readFile(tomlPath, "utf-8");
md.log("```toml\n" + toml + "```");
return md;
}
@ -253,7 +253,7 @@ async function processLigSetCherryPicking() {
__dirname,
"fragments/description-cherry-picking-ligation-sets.md"
);
md.log(await fs.readFile(headerPath, "utf-8"));
md.log(await fs.promises.readFile(headerPath, "utf-8"));
for (const gr in ligData.cherry) {
md.log(` - \`${gr}\`: ${ligData.cherry[gr].desc}.`);
@ -265,7 +265,7 @@ async function processLigSetPreDef() {
const ligData = await parseLigationData();
const md = new MdCol("Section-Predefined-Ligation-Sets");
const headerPath = path.resolve(__dirname, "fragments/description-predefined-ligation-sets.md");
md.log(await fs.readFile(headerPath, "utf-8"));
md.log(await fs.promises.readFile(headerPath, "utf-8"));
for (const gr in ligData.rawSets) {
const readmeDesc =
ligData.rawSets[gr].readmeDesc ||

View file

@ -1,6 +1,6 @@
"use strict";
const fs = require("fs-extra");
const fs = require("fs");
const { parseVariantsData } = require("./variants-data");
const { parseLigationData } = require("./ligation-data");
const { getCharMapAndSupportedLanguageList } = require("./supported-languages");
@ -16,18 +16,23 @@ module.exports = async function main(argv) {
argv.charMapItalicPath,
argv.charMapObliquePath
);
await fs.writeJson(
await fs.promises.writeFile(
argv.exportPathMeta,
{
version,
variantsData,
ligationData: {
cherry: ligationData.cherry,
samplesNarrow: ligationData.samplesNarrow,
nonMergeSets: ligationData.nonMergeSets
}
},
{ spaces: 2 }
JSON.stringify(
{
version,
variantsData,
ligationData: {
cherry: ligationData.cherry,
samplesNarrow: ligationData.samplesNarrow,
nonMergeSets: ligationData.nonMergeSets
}
},
{ spaces: 2 }
)
);
await fs.promises.writeFile(
argv.exportPathCov,
JSON.stringify({ version, ...cl }, { spaces: 2 })
);
await fs.writeJson(argv.exportPathCov, { version, ...cl }, { spaces: 2 });
};

View file

@ -1,6 +1,6 @@
"use strict";
const fs = require("fs-extra");
const fs = require("fs");
const path = require("path");
const toml = require("@iarna/toml");
@ -189,7 +189,7 @@ const ligationSamplesNarrow = [
];
exports.parseLigationData = async function () {
const ligToml = await fs.readFile(
const ligToml = await fs.promises.readFile(
path.join(__dirname, "../../params/ligation-set.toml"),
"utf8"
);

View file

@ -1,7 +1,7 @@
"use strict";
const cldr = require("cldr");
const fs = require("fs-extra");
const fs = require("fs");
const zlib = require("zlib");
const { decode } = require("@msgpack/msgpack");
@ -30,7 +30,7 @@ exports.getCharMapAndSupportedLanguageList = async function (cmpUpright, cmpItal
};
async function readMpCharMap(p) {
return decode(zlib.gunzipSync(await fs.readFile(p)));
return decode(zlib.gunzipSync(await fs.promises.readFile(p)));
}
function getSupportedLanguageSet(rawCoverage) {

View file

@ -1,13 +1,13 @@
"use strict";
const fs = require("fs-extra");
const fs = require("fs");
const path = require("path");
const toml = require("@iarna/toml");
const VariantDataParser = require("../../font-src/support/variant-data");
exports.parseVariantsData = async function () {
const variantsToml = await fs.readFile(
const variantsToml = await fs.promises.readFile(
path.join(__dirname, "../../params/variants.toml"),
"utf8"
);

View file

@ -1,7 +1,7 @@
"use strict";
const path = require("path");
const fs = require("fs-extra");
const fs = require("fs");
const semver = require("semver");
const ChangeFileDir = path.join(__dirname, "../../changes");
@ -25,14 +25,14 @@ class Output {
}
async function GenerateChangeList(argv, out) {
const changeFiles = await fs.readdir(ChangeFileDir);
const changeFiles = await fs.promises.readdir(ChangeFileDir);
const fragments = new Map();
for (const file of changeFiles) {
const filePath = path.join(ChangeFileDir, file);
const fileParts = path.parse(filePath);
if (fileParts.ext !== ".md") continue;
if (!semver.valid(fileParts.name) || semver.lt(argv.version, fileParts.name)) continue;
fragments.set(fileParts.name, await fs.readFile(filePath, "utf8"));
fragments.set(fileParts.name, await fs.promises.readFile(filePath, "utf8"));
}
const sortedFragments = Array.from(fragments).sort((a, b) => semver.compare(b[0], a[0]));

View file

@ -1,10 +1,10 @@
"use strict";
const Path = require("path");
const Fs = require("fs-extra");
const path = require("path");
const fs = require("fs");
const SemVer = require("semver");
const ChangeFileDir = Path.join(__dirname, "../../changes");
const ChangeFileDir = path.join(__dirname, "../../changes");
///////////////////////////////////////////////////////////////////////////////////////////////////
@ -23,8 +23,7 @@ module.exports = async function main(argv) {
`</table>`
);
await Fs.ensureDir(Path.join(__dirname, `../../release-archives/`));
await Fs.writeFile(argv.outputPath, out.buffer);
await fs.promises.writeFile(argv.outputPath, out.buffer);
};
class Output {
@ -40,8 +39,8 @@ class Output {
// Copy Markdown
async function CopyMarkdown(out, name) {
const content = await Fs.readFile(
Path.resolve(__dirname, `release-note-fragments/${name}`),
const content = await fs.promises.readFile(
path.resolve(__dirname, `release-note-fragments/${name}`),
"utf8"
);
out.log(content);
@ -51,14 +50,14 @@ async function CopyMarkdown(out, name) {
// CHANGE LIST
async function GenerateChangeList(argv, out) {
const changeFiles = await Fs.readdir(ChangeFileDir);
const changeFiles = await fs.promises.readdir(ChangeFileDir);
const fragments = new Map();
for (const file of changeFiles) {
const filePath = Path.join(ChangeFileDir, file);
const fileParts = Path.parse(filePath);
const filePath = path.join(ChangeFileDir, file);
const fileParts = path.parse(filePath);
if (fileParts.ext !== ".md") continue;
if (!SemVer.valid(fileParts.name) || SemVer.lt(argv.version, fileParts.name)) continue;
fragments.set(fileParts.name, await Fs.readFile(filePath, "utf8"));
fragments.set(fileParts.name, await fs.promises.readFile(filePath, "utf8"));
}
const sortedFragments = Array.from(fragments).sort((a, b) => SemVer.compare(b[0], a[0]));

View file

@ -1,7 +1,7 @@
"use strict";
const Path = require("path");
const Fs = require("fs-extra");
const path = require("path");
const fs = require("fs");
///////////////////////////////////////////////////////////////////////////////////////////////////
@ -12,8 +12,7 @@ module.exports = async function main(argv) {
await CopyMarkdown(out, "packages-desc.md");
await GeneratePackageList(argv, out);
await Fs.ensureDir(Path.join(__dirname, `../../release-archives/`));
await Fs.writeFile(argv.outputPath, out.buffer);
await fs.promises.writeFile(argv.outputPath, out.buffer);
};
class Output {
@ -29,8 +28,8 @@ class Output {
// Copy Markdown
async function CopyMarkdown(out, name) {
const content = await Fs.readFile(
Path.resolve(__dirname, `release-note-fragments/${name}`),
const content = await fs.promises.readFile(
path.resolve(__dirname, `release-note-fragments/${name}`),
"utf8"
);
out.log(content);
@ -52,7 +51,7 @@ const DownloadLinkPrefixNoVersion = `https://github.com/be5invis/Iosevka/release
async function GeneratePackageList(argv, out) {
const imagePrefix = `${ImagePrefixNoVersion}/v${argv.version}/images`;
const pkgShapesData = await Fs.readJson(argv.releasePackagesJsonPath);
const pkgShapesData = await fs.promises.readJson(argv.releasePackagesJsonPath);
const DownloadLinkPrefix = `${DownloadLinkPrefixNoVersion}/v${argv.version}`;
out.log(`<table>`);

View file

@ -16,7 +16,6 @@ const { parseVariantsData } = require("../export-data/variants-data");
const { parseLigationData } = require("../export-data/ligation-data");
module.exports = async function main(argv) {
const weightGrades = [100, 200, 300, 400, 500, 600, 700, 800, 900];
const variantsData = await parseVariantsData();
const ligationData = await parseLigationData();

View file

@ -1,10 +1,10 @@
"use strict";
const fs = require("fs-extra");
const fs = require("fs");
const wawoff = require("wawoff2");
module.exports = async function (from, to) {
const input = await fs.readFile(from);
const input = await fs.promises.readFile(from);
const out = await wawoff.compress(input);
await fs.writeFile(to, out);
await fs.promises.writeFile(to, out);
};

View file

@ -1,7 +1,7 @@
"use strict";
const path = require("path");
const fs = require("fs-extra");
const fs = require("fs");
const semver = require("semver");
const ChangeFileDir = path.join(__dirname, "../../changes");
@ -26,7 +26,7 @@ async function main() {
}
async function GetLatestVersion() {
const changeFiles = await fs.readdir(ChangeFileDir);
const changeFiles = await fs.promises.readdir(ChangeFileDir);
const versions = new Set();
for (const file of changeFiles) {
const filePath = path.join(ChangeFileDir, file);