Format all files with prettier

This commit is contained in:
Andy Balaam
2022-12-15 11:00:58 +00:00
parent 040344eeab
commit 0faac52dae
67 changed files with 1760 additions and 1793 deletions

View File

@@ -20,16 +20,16 @@ function errCheck(err?: Error): void {
}
const I18N_BASE_PATH = "src/i18n/strings/";
const INCLUDE_LANGS = fs.readdirSync(I18N_BASE_PATH).filter(fn => fn.endsWith(".json"));
const INCLUDE_LANGS = fs.readdirSync(I18N_BASE_PATH).filter((fn) => fn.endsWith(".json"));
// Ensure lib, lib/i18n and lib/i18n/strings all exist
fs.mkdirSync('lib/i18n/strings', { recursive: true });
fs.mkdirSync("lib/i18n/strings", { recursive: true });
type Translations = Record<string, Record<string, string> | string>;
function genLangFile(file: string, dest: string): void {
const inTrs: Record<string, string> = {};
[file].forEach(function(f) {
[file].forEach(function (f) {
if (fs.existsSync(f)) {
try {
Object.assign(inTrs, JSON.parse(fs.readFileSync(f).toString()));
@@ -68,7 +68,7 @@ function weblateToCounterpart(inTrs: Record<string, string>): Translations {
const outTrs: Translations = {};
for (const key of Object.keys(inTrs)) {
const keyParts = key.split('|', 2);
const keyParts = key.split("|", 2);
if (keyParts.length === 2) {
let obj = outTrs[keyParts[0]];
if (obj === undefined) {
@@ -77,7 +77,7 @@ function weblateToCounterpart(inTrs: Record<string, string>): Translations {
// This is a transitional edge case if a string went from singular to pluralised and both still remain
// in the translation json file. Use the singular translation as `other` and merge pluralisation atop.
obj = outTrs[keyParts[0]] = {
"other": inTrs[key],
other: inTrs[key],
};
console.warn("Found entry in i18n file in both singular and pluralised form", keyParts[0]);
}
@@ -108,10 +108,7 @@ function watchLanguage(file: string, dest: string): void {
}, 500);
};
chokidar.watch(file)
.on('add', makeLang)
.on('change', makeLang)
.on('error', errCheck);
chokidar.watch(file).on("add", makeLang).on("change", makeLang).on("error", errCheck);
}
// language resources
@@ -121,5 +118,5 @@ INCLUDE_LANGS.forEach((file): void => {
}, {});
if (watch) {
INCLUDE_LANGS.forEach(file => watchLanguage(I18N_BASE_PATH + file, I18N_DEST));
INCLUDE_LANGS.forEach((file) => watchLanguage(I18N_BASE_PATH + file, I18N_DEST));
}

View File

@@ -1,7 +1,7 @@
const fsProm = require('fs').promises;
const path = require('path');
const fsProm = require("fs").promises;
const path = require("path");
exports.default = async function(context) {
exports.default = async function (context) {
const { electronPlatformName, appOutDir } = context;
// Squirrel windows will try to relaunch the app using an executable of the same name as
@@ -9,7 +9,7 @@ exports.default = async function(context) {
// We add a fake Riot.exe that it can run which runs the real one.
// This also gets signed automatically, presumably because electron-build just looks for all
// exe files and signs them all...
if (electronPlatformName === 'win32') {
await fsProm.copyFile('build/rebrand_stub/rebrand_stub.exe', path.join(appOutDir, "Riot.exe"));
if (electronPlatformName === "win32") {
await fsProm.copyFile("build/rebrand_stub/rebrand_stub.exe", path.join(appOutDir, "Riot.exe"));
}
};

View File

@@ -1,11 +1,11 @@
const { notarize } = require('@electron/notarize');
const { notarize } = require("@electron/notarize");
let warned = false;
exports.default = async function(context) {
exports.default = async function (context) {
const { electronPlatformName, appOutDir } = context;
const appId = context.packager.info.appInfo.id;
if (electronPlatformName === 'darwin') {
if (electronPlatformName === "darwin") {
const appName = context.packager.appInfo.productFilename;
const keychainProfile = process.env.NOTARIZE_KEYCHAIN_PROFILE;

View File

@@ -1,4 +1,4 @@
const { execFile } = require('child_process');
const { execFile } = require("child_process");
// Loosely based on computeSignToolArgs from app-builder-lib/src/codeSign/windowsCodeSign.ts
function computeSignToolArgs(options, keyContainer) {
@@ -8,15 +8,15 @@ function computeSignToolArgs(options, keyContainer) {
const timestampingServiceUrl = options.options.timeStampServer || "http://timestamp.digicert.com";
args.push(
options.isNest || options.hash === "sha256" ? "/tr" : "/t",
options.isNest || options.hash === "sha256" ? (
options.options.rfc3161TimeStampServer || "http://timestamp.comodoca.com/rfc3161"
) : timestampingServiceUrl,
options.isNest || options.hash === "sha256"
? options.options.rfc3161TimeStampServer || "http://timestamp.comodoca.com/rfc3161"
: timestampingServiceUrl,
);
}
args.push('/kc', keyContainer);
args.push("/kc", keyContainer);
// To use the hardware token (this should probably be less hardcoded)
args.push('/csp', 'eToken Base Cryptographic Provider');
args.push("/csp", "eToken Base Cryptographic Provider");
// The certificate file. Somehow this appears to be the only way to specify
// the cert that works. If you specify the subject name or hash, it will
// say it can't associate the private key to the certificate.
@@ -24,7 +24,7 @@ function computeSignToolArgs(options, keyContainer) {
// so we don't have to hard-code this here
// fwiw https://stackoverflow.com/questions/17927895/automate-extended-validation-ev-code-signing
// is about the most useful resource on automating code signing...
args.push('/f', 'element.io\\New_Vector_Ltd.pem');
args.push("/f", "element.io\\New_Vector_Ltd.pem");
if (options.hash !== "sha1") {
args.push("/fd", options.hash);
@@ -35,7 +35,7 @@ function computeSignToolArgs(options, keyContainer) {
// msi does not support dual-signing
if (options.isNest) {
args.push("/as");
args.push("/as");
}
// https://github.com/electron-userland/electron-builder/issues/2875#issuecomment-387233610
@@ -47,15 +47,15 @@ function computeSignToolArgs(options, keyContainer) {
}
let warned = false;
exports.default = async function(options) {
exports.default = async function (options) {
const keyContainer = process.env.SIGNING_KEY_CONTAINER;
if (keyContainer === undefined) {
if (!warned) {
console.warn(
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" +
"! Skipping Windows signing. !\n" +
"! SIGNING_KEY_CONTAINER not defined. !\n" +
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!",
"! Skipping Windows signing. !\n" +
"! SIGNING_KEY_CONTAINER not defined. !\n" +
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!",
);
warned = true;
}
@@ -63,9 +63,9 @@ exports.default = async function(options) {
}
return new Promise((resolve, reject) => {
const args = ['sign'].concat(computeSignToolArgs(options, keyContainer));
const args = ["sign"].concat(computeSignToolArgs(options, keyContainer));
execFile('signtool', args, {}, (error, stdout) => {
execFile("signtool", args, {}, (error, stdout) => {
if (error) {
console.error("signtool failed with code " + error);
reject("signtool failed with code " + error);

View File

@@ -14,7 +14,7 @@ import { setPackageVersion } from "./set-version";
const PUB_KEY_URL = "https://packages.riot.im/element-release-key.asc";
const PACKAGE_URL_PREFIX = "https://github.com/vector-im/element-web/releases/download/";
const DEVELOP_TGZ_URL = "https://develop.element.io/develop.tar.gz";
const ASAR_PATH = 'webapp.asar';
const ASAR_PATH = "webapp.asar";
async function downloadToFile(url: string, filename: string): Promise<void> {
console.log("Downloading " + url + "...");
@@ -35,7 +35,7 @@ async function downloadToFile(url: string, filename: string): Promise<void> {
async function verifyFile(filename: string): Promise<void> {
return new Promise<void>((resolve, reject) => {
childProcess.execFile('gpg', ['--verify', filename + '.asc', filename], (error) => {
childProcess.execFile("gpg", ["--verify", filename + ".asc", filename], (error) => {
if (error) {
reject(error);
} else {
@@ -48,8 +48,8 @@ async function verifyFile(filename: string): Promise<void> {
async function main(): Promise<number | undefined> {
let verify = true;
let importkey = false;
let pkgDir = 'packages';
let deployDir = 'deploys';
let pkgDir = "packages";
let deployDir = "deploys";
let cfgDir: string | undefined;
let targetVersion: string | undefined;
let filename: string | undefined;
@@ -58,22 +58,22 @@ async function main(): Promise<number | undefined> {
while (process.argv.length > 2) {
switch (process.argv[2]) {
case '--noverify':
case "--noverify":
verify = false;
break;
case '--importkey':
case "--importkey":
importkey = true;
break;
case '--packages':
case "--packages":
process.argv.shift();
pkgDir = process.argv[2];
break;
case '--deploys':
case "--deploys":
process.argv.shift();
deployDir = process.argv[2];
break;
case '--cfgdir':
case '-d':
case "--cfgdir":
case "-d":
process.argv.shift();
cfgDir = process.argv[2];
break;
@@ -84,13 +84,13 @@ async function main(): Promise<number | undefined> {
}
if (targetVersion === undefined) {
targetVersion = 'v' + riotDesktopPackageJson.version;
} else if (targetVersion !== 'develop') {
targetVersion = "v" + riotDesktopPackageJson.version;
} else if (targetVersion !== "develop") {
setVersion = true; // version was specified
}
if (targetVersion === 'develop') {
filename = 'develop.tar.gz';
if (targetVersion === "develop") {
filename = "develop.tar.gz";
url = DEVELOP_TGZ_URL;
verify = false; // develop builds aren't signed
} else if (targetVersion.includes("://")) {
@@ -99,11 +99,11 @@ async function main(): Promise<number | undefined> {
verify = false; // manually verified
} else {
filename = `element-${targetVersion}.tar.gz`;
url = PACKAGE_URL_PREFIX + targetVersion + '/' + filename;
url = PACKAGE_URL_PREFIX + targetVersion + "/" + filename;
}
const haveGpg = await new Promise<boolean>((resolve) => {
childProcess.execFile('gpg', ['--version'], (error) => {
childProcess.execFile("gpg", ["--version"], (error) => {
resolve(!error);
});
});
@@ -115,7 +115,7 @@ async function main(): Promise<number | undefined> {
}
await new Promise<boolean>((resolve) => {
const gpgProc = childProcess.execFile('gpg', ['--import'], (error) => {
const gpgProc = childProcess.execFile("gpg", ["--import"], (error) => {
if (error) {
console.log("Failed to import key", error);
} else {
@@ -123,7 +123,7 @@ async function main(): Promise<number | undefined> {
}
resolve(!error);
});
fetch(PUB_KEY_URL).then(resp => {
fetch(PUB_KEY_URL).then((resp) => {
stream.pipeline(resp.body, gpgProc.stdin!);
});
});
@@ -143,13 +143,12 @@ async function main(): Promise<number | undefined> {
}
let haveDeploy = false;
let expectedDeployDir = path.join(deployDir, path.basename(filename).replace(/\.tar\.gz/, ''));
let expectedDeployDir = path.join(deployDir, path.basename(filename).replace(/\.tar\.gz/, ""));
try {
await fs.opendir(expectedDeployDir);
console.log(expectedDeployDir + "already exists");
haveDeploy = true;
} catch (e) {
}
} catch (e) {}
if (!haveDeploy) {
const outPath = path.join(pkgDir, filename);
@@ -167,11 +166,11 @@ async function main(): Promise<number | undefined> {
if (verify) {
try {
await fs.stat(outPath+'.asc');
await fs.stat(outPath + ".asc");
console.log("Already have " + filename + ".asc: not redownloading");
} catch (e) {
try {
await downloadToFile(url + '.asc', outPath + '.asc');
await downloadToFile(url + ".asc", outPath + ".asc");
} catch (e) {
console.log("Failed to download " + url, e);
return 1;
@@ -192,7 +191,7 @@ async function main(): Promise<number | undefined> {
await tar.x({
file: outPath,
cwd: deployDir,
onentry: entry => {
onentry: (entry) => {
// Find the appropriate extraction path, only needed for `develop` where the dir name is unknown
if (entry.type === "Directory" && !path.join(deployDir, entry.path).startsWith(expectedDeployDir)) {
expectedDeployDir = path.join(deployDir, entry.path);
@@ -205,13 +204,12 @@ async function main(): Promise<number | undefined> {
await fs.stat(ASAR_PATH);
console.log(ASAR_PATH + " already present: removing");
await fs.unlink(ASAR_PATH);
} catch (e) {
}
} catch (e) {}
if (cfgDir.length) {
const configJsonSource = path.join(cfgDir, 'config.json');
const configJsonDest = path.join(expectedDeployDir, 'config.json');
console.log(configJsonSource + ' -> ' + configJsonDest);
const configJsonSource = path.join(cfgDir, "config.json");
const configJsonDest = path.join(expectedDeployDir, "config.json");
console.log(configJsonSource + " -> " + configJsonDest);
await fs.copyFile(configJsonSource, configJsonDest);
} else {
console.log("Skipping config file");
@@ -229,9 +227,11 @@ async function main(): Promise<number | undefined> {
console.log("Done!");
}
main().then((ret) => {
process.exit(ret);
}).catch(e => {
console.error(e);
process.exit(1);
});
main()
.then((ret) => {
process.exit(ret);
})
.catch((e) => {
console.error(e);
process.exit(1);
});

View File

@@ -84,21 +84,21 @@ function humanFileSize(bytes: number, si = false, dp = 1): string {
const thresh = si ? 1000 : 1024;
if (Math.abs(bytes) < thresh) {
return bytes + ' B';
return bytes + " B";
}
const units = si
? ['kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
: ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'];
? ["kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
: ["KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"];
let u = -1;
const r = 10**dp;
const r = 10 ** dp;
do {
bytes /= thresh;
++u;
} while (Math.round(Math.abs(bytes) * r) / r >= thresh && u < units.length - 1);
return bytes.toFixed(dp) + ' ' + units[u];
return bytes.toFixed(dp) + " " + units[u];
}
const dateTimeOptions: Intl.DateTimeFormatOptions = {
@@ -122,7 +122,8 @@ function indexLayout(prefix: string, files: _Object[], dirs: string[]): string {
}
for (const file of files) {
if (!file.Key ||
if (
!file.Key ||
HIDDEN_FILES.includes(`/${file.Key}`) ||
HIDDEN_FILES.includes(file.Key.slice(file.Key.lastIndexOf("/") + 1))
) {
@@ -143,11 +144,15 @@ function indexLayout(prefix: string, files: _Object[], dirs: string[]): string {
</tr>
</thead>
<tbody>
${rows.map(([link, name, size, date]) => `<tr>
${rows
.map(
([link, name, size, date]) => `<tr>
<td class="link"><a href="${link}">${name}</a></td>
<td class="size">${size ? humanFileSize(size) : "-"}</td>
<td class="date">${date?.toLocaleString("en-GB", dateTimeOptions) ?? "-"}</td>
</tr>`).join("")}
</tr>`,
)
.join("")}
</tbody>
</table>
`);
@@ -166,17 +171,20 @@ async function generateIndex(Prefix: string): Promise<{
const listResponse = await client.send(command);
const files = listResponse.Contents ?? [];
const dirs = listResponse.CommonPrefixes
?.map(p => p.Prefix?.slice(Prefix.length).split("/", 2)[0])
.filter(Boolean) as string[] ?? [];
const dirs =
(listResponse.CommonPrefixes?.map((p) => p.Prefix?.slice(Prefix.length).split("/", 2)[0]).filter(
Boolean,
) as string[]) ?? [];
const Body = indexLayout(Prefix, files, dirs);
await client.send(new PutObjectCommand({
Body,
Bucket,
ContentType: "text/html",
Key: Prefix + "index.html",
}));
await client.send(
new PutObjectCommand({
Body,
Bucket,
ContentType: "text/html",
Key: Prefix + "index.html",
}),
);
return { files, dirs };
}

View File

@@ -1,40 +1,40 @@
hak
===
# hak
This tool builds native dependencies for element-desktop. Here follows some very minimal
documentation for it.
Goals:
* Must build compiled native node modules in a shippable state
(ie. only dynamically linked against libraries that will be on the
target system, all unnecessary files removed).
* Must be able to build any native module, no matter what build system
it uses (electron-rebuild is supposed to do this job but only works
for modules that use gyp).
- Must build compiled native node modules in a shippable state
(ie. only dynamically linked against libraries that will be on the
target system, all unnecessary files removed).
- Must be able to build any native module, no matter what build system
it uses (electron-rebuild is supposed to do this job but only works
for modules that use gyp).
It's also loosely designed to be a general tool and agnostic to what it's
actually building. It's used here to build modules for the electron app
but should work equally well for building modules for normal node.
Running
=======
# Running
Hak is invoked with a command and a dependency, eg. `yarn run hak fetch matrix-seshat`.
If no dependencies are given, hak runs the command on all dependencies.
Files
=====
# Files
There are a lot of files involved:
* scripts/hak/... - The tool itself
* hak/[dependency] - Files provided by the app that tell hak how to build each of its native dependencies.
Contains a hak.json file and also some script files, each of which must be referenced in hak.json.
* .hak/ - Files generated by hak in the course of doing its job. Includes the dependency module itself and
any of the native dependency's native dependencies.
* .hak/[dependency]/build - An extracted copy of the dependency's node module used to build it.
* .hak/[dependency]/out - Another extracted copy of the dependency, this one contains only what will be shipped.
- scripts/hak/... - The tool itself
- hak/[dependency] - Files provided by the app that tell hak how to build each of its native dependencies.
Contains a hak.json file and also some script files, each of which must be referenced in hak.json.
- .hak/ - Files generated by hak in the course of doing its job. Includes the dependency module itself and
any of the native dependency's native dependencies.
- .hak/[dependency]/build - An extracted copy of the dependency's node module used to build it.
- .hak/[dependency]/out - Another extracted copy of the dependency, this one contains only what will be shipped.
# Workings
Workings
========
Hak works around native node modules that try to fetch or build their native component in
the npm 'install' phase - modules that do this will typically end up with native components
targeted to the build platform and the node that npm/yarn is using, which is no good for an
@@ -49,33 +49,34 @@ This also means that the dependencies cannot be listed in `dependencies` or
try to fetch their native parts. Instead, they are listed in `hakDependencies` which
hak reads to install them for you.
Hak will *not* install dependencies for the copy of the module it links into your
Hak will _not_ install dependencies for the copy of the module it links into your
project, so if your native module has javascript dependencies that are actually needed at
runtime (and not just to fetch / build the native parts), it won't work.
Hak will generate a `.yarnrc` in the project directory to set the link directory to its
own in the .hak directory (unless one already exists, in which case this is your problem).
Lifecycle
=========
Hak is divided into lifecycle stages, in order:
* fetch - Download and extract the source of the dependency
* link - Link the copy of the dependency into your node_modules directory
* fetchDeps - Fetch & extract any native dependencies required to build the module.
* build - The Good Stuff. Configure and build any native dependencies, then the module itself.
* copy - Copy the built artifact from the module build directory to the module output directory.
# Lifecycle
Hak is divided into lifecycle stages, in order:
- fetch - Download and extract the source of the dependency
- link - Link the copy of the dependency into your node_modules directory
- fetchDeps - Fetch & extract any native dependencies required to build the module.
- build - The Good Stuff. Configure and build any native dependencies, then the module itself.
- copy - Copy the built artifact from the module build directory to the module output directory.
# hak.json
hak.json
========
The scripts section contains scripts used for lifecycle stages that need them (fetch, fetchDeps, build).
It also contains 'prune' and 'copy' which are globs of files to delete from the output module directory
and copy over from the module build directory to the output module directory, respectively.
Shortcomings
============
# Shortcomings
Hak doesn't know about dependencies between lifecycle stages, ie. it doesn't know that you need to
'fetch' and 'fetchDeps' before you can 'build', etc. You get to run each individually, and remember
the right order.
There is also a *lot* of duplication in the command execution: we should abstract away
There is also a _lot_ of duplication in the command execution: we should abstract away
some of the boilerplate required to run commands & so forth.

View File

@@ -14,11 +14,11 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import path from 'path';
import rimraf from 'rimraf';
import path from "path";
import rimraf from "rimraf";
import { DependencyInfo } from './dep';
import HakEnv from './hakEnv';
import { DependencyInfo } from "./dep";
import HakEnv from "./hakEnv";
export default async function clean(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
await new Promise<void>((resolve, reject) => {
@@ -32,7 +32,7 @@ export default async function clean(hakEnv: HakEnv, moduleInfo: DependencyInfo):
});
await new Promise<void>((resolve, reject) => {
rimraf(path.join(hakEnv.dotHakDir, 'links', moduleInfo.name), (err?: Error | null) => {
rimraf(path.join(hakEnv.dotHakDir, "links", moduleInfo.name), (err?: Error | null) => {
if (err) {
reject(err);
} else {
@@ -42,7 +42,7 @@ export default async function clean(hakEnv: HakEnv, moduleInfo: DependencyInfo):
});
await new Promise<void>((resolve, reject) => {
rimraf(path.join(hakEnv.projectRoot, 'node_modules', moduleInfo.name), (err?: Error | null) => {
rimraf(path.join(hakEnv.projectRoot, "node_modules", moduleInfo.name), (err?: Error | null) => {
if (err) {
reject(err);
} else {

View File

@@ -14,15 +14,15 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import path from 'path';
import fsProm from 'fs/promises';
import childProcess from 'child_process';
import rimraf from 'rimraf';
import glob from 'glob';
import mkdirp from 'mkdirp';
import path from "path";
import fsProm from "fs/promises";
import childProcess from "child_process";
import rimraf from "rimraf";
import glob from "glob";
import mkdirp from "mkdirp";
import HakEnv from './hakEnv';
import { DependencyInfo } from './dep';
import HakEnv from "./hakEnv";
import { DependencyInfo } from "./dep";
export default async function copy(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
if (moduleInfo.cfg.prune) {
@@ -34,7 +34,7 @@ export default async function copy(hakEnv: HakEnv, moduleInfo: DependencyInfo):
await mkdirp(moduleInfo.moduleOutDir);
process.chdir(moduleInfo.moduleOutDir);
await new Promise<void>((resolve, reject) => {
rimraf(moduleInfo.cfg.prune, {}, err => {
rimraf(moduleInfo.cfg.prune, {}, (err) => {
err ? reject(err) : resolve();
});
});
@@ -48,46 +48,44 @@ export default async function copy(hakEnv: HakEnv, moduleInfo: DependencyInfo):
// is the same as moduleBuildDirs[0], so we're just listing the contents
// of the first one.
const files = await new Promise<string[]>((resolve, reject) => {
glob(moduleInfo.cfg.copy, {
nosort: true,
silent: true,
cwd: moduleInfo.moduleBuildDir,
}, (err, files) => {
err ? reject(err) : resolve(files);
});
glob(
moduleInfo.cfg.copy,
{
nosort: true,
silent: true,
cwd: moduleInfo.moduleBuildDir,
},
(err, files) => {
err ? reject(err) : resolve(files);
},
);
});
if (moduleInfo.moduleBuildDirs.length > 1) {
if (!hakEnv.isMac()) {
console.error(
"You asked me to copy multiple targets but I've only been taught " +
"how to do that on macOS.",
"You asked me to copy multiple targets but I've only been taught " + "how to do that on macOS.",
);
throw new Error("Can't copy multiple targets on this platform");
}
for (const f of files) {
const components = moduleInfo.moduleBuildDirs.map(dir => path.join(dir, f));
const components = moduleInfo.moduleBuildDirs.map((dir) => path.join(dir, f));
const dst = path.join(moduleInfo.moduleOutDir, f);
await mkdirp(path.dirname(dst));
await new Promise<void>((resolve, reject) => {
childProcess.execFile('lipo',
['-create', '-output', dst, ...components], (err) => {
if (err) {
reject(err);
} else {
resolve();
}
},
);
childProcess.execFile("lipo", ["-create", "-output", dst, ...components], (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
} else {
console.log(
"Copying files from " +
moduleInfo.moduleBuildDir + " to " + moduleInfo.moduleOutDir,
);
console.log("Copying files from " + moduleInfo.moduleBuildDir + " to " + moduleInfo.moduleOutDir);
for (const f of files) {
console.log("\t" + f);
const src = path.join(moduleInfo.moduleBuildDir, f);

View File

@@ -28,5 +28,5 @@ export interface DependencyInfo {
moduleOutDir: string;
nodeModuleBinDir: string;
depPrefix: string;
scripts: Record<string, (hakEnv: HakEnv, moduleInfo: DependencyInfo) => Promise<void> >;
scripts: Record<string, (hakEnv: HakEnv, moduleInfo: DependencyInfo) => Promise<void>>;
}

View File

@@ -14,12 +14,12 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import fsProm from 'fs/promises';
import childProcess from 'child_process';
import pacote from 'pacote';
import fsProm from "fs/promises";
import childProcess from "child_process";
import pacote from "pacote";
import HakEnv from './hakEnv';
import { DependencyInfo } from './dep';
import HakEnv from "./hakEnv";
import { DependencyInfo } from "./dep";
export default async function fetch(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
let haveModuleBuildDir;
@@ -41,15 +41,11 @@ export default async function fetch(hakEnv: HakEnv, moduleInfo: DependencyInfo):
console.log("Running yarn install in " + moduleInfo.moduleBuildDir);
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(
hakEnv.isWin() ? 'yarn.cmd' : 'yarn',
['install', '--ignore-scripts'],
{
stdio: 'inherit',
cwd: moduleInfo.moduleBuildDir,
},
);
proc.on('exit', code => {
const proc = childProcess.spawn(hakEnv.isWin() ? "yarn.cmd" : "yarn", ["install", "--ignore-scripts"], {
stdio: "inherit",
cwd: moduleInfo.moduleBuildDir,
});
proc.on("exit", (code) => {
code ? reject(code) : resolve();
});
});

View File

@@ -14,10 +14,10 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import mkdirp from 'mkdirp';
import mkdirp from "mkdirp";
import { DependencyInfo } from './dep';
import HakEnv from './hakEnv';
import { DependencyInfo } from "./dep";
import HakEnv from "./hakEnv";
export default async function fetchDeps(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
await mkdirp(moduleInfo.moduleDotHakDir);

View File

@@ -14,16 +14,16 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import path from 'path';
import os from 'os';
import path from "path";
import os from "os";
import nodePreGypVersioning from "node-pre-gyp/lib/util/versioning";
import { getElectronVersion } from "app-builder-lib/out/electron/electronVersion";
import { Arch, Target, TARGETS, getHost, isHostId, TargetId } from './target';
import { Arch, Target, TARGETS, getHost, isHostId, TargetId } from "./target";
async function getRuntime(projectRoot: string): Promise<string> {
const electronVersion = await getElectronVersion(projectRoot);
return electronVersion ? 'electron' : 'node-webkit';
return electronVersion ? "electron" : "node-webkit";
}
async function getRuntimeVersion(projectRoot: string): Promise<string> {
@@ -48,7 +48,7 @@ export default class HakEnv {
throw new Error(`Unknown target ${targetId}!`);
}
this.target = target;
this.dotHakDir = path.join(this.projectRoot, '.hak');
this.dotHakDir = path.join(this.projectRoot, ".hak");
}
public async init(): Promise<void> {
@@ -62,7 +62,7 @@ export default class HakEnv {
// {node_abi}-{platform}-{arch}
public getNodeTriple(): string {
return this.getRuntimeAbi() + '-' + this.target.platform + '-' + this.target.arch;
return this.getRuntimeAbi() + "-" + this.target.platform + "-" + this.target.arch;
}
public getTargetId(): TargetId {
@@ -70,15 +70,15 @@ export default class HakEnv {
}
public isWin(): boolean {
return this.target.platform === 'win32';
return this.target.platform === "win32";
}
public isMac(): boolean {
return this.target.platform === 'darwin';
return this.target.platform === "darwin";
}
public isLinux(): boolean {
return this.target.platform === 'linux';
return this.target.platform === "linux";
}
public getTargetArch(): Arch {
@@ -93,7 +93,7 @@ export default class HakEnv {
return Object.assign({}, process.env, {
npm_config_arch: this.target.arch,
npm_config_target_arch: this.target.arch,
npm_config_disturl: 'https://electronjs.org/headers',
npm_config_disturl: "https://electronjs.org/headers",
npm_config_runtime: this.runtime,
npm_config_target: this.runtimeVersion,
npm_config_build_from_source: true,
@@ -102,7 +102,7 @@ export default class HakEnv {
}
public wantsStaticSqlCipherUnix(): boolean {
return this.isMac() || process.env.SQLCIPHER_STATIC == '1';
return this.isMac() || process.env.SQLCIPHER_STATIC == "1";
}
public wantsStaticSqlCipher(): boolean {

View File

@@ -14,42 +14,27 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import path from 'path';
import findNpmPrefix from 'find-npm-prefix';
import path from "path";
import findNpmPrefix from "find-npm-prefix";
import HakEnv from './hakEnv';
import { TargetId } from './target';
import { DependencyInfo } from './dep';
import HakEnv from "./hakEnv";
import { TargetId } from "./target";
import { DependencyInfo } from "./dep";
const GENERALCOMMANDS = [
'target',
];
const GENERALCOMMANDS = ["target"];
// These can only be run on specific modules
const MODULECOMMANDS = [
'check',
'fetch',
'link',
'fetchDeps',
'build',
'copy',
'clean',
];
const MODULECOMMANDS = ["check", "fetch", "link", "fetchDeps", "build", "copy", "clean"];
// Shortcuts for multiple commands at once (useful for building universal binaries
// because you can run the fetch/fetchDeps/build for each arch and then copy/link once)
const METACOMMANDS: Record<string, string[]> = {
'fetchandbuild': ['check', 'fetch', 'fetchDeps', 'build'],
'copyandlink': ['copy', 'link'],
fetchandbuild: ["check", "fetch", "fetchDeps", "build"],
copyandlink: ["copy", "link"],
};
// Scripts valid in a hak.json 'scripts' section
const HAKSCRIPTS = [
'check',
'fetch',
'fetchDeps',
'build',
];
const HAKSCRIPTS = ["check", "fetch", "fetchDeps", "build"];
async function main(): Promise<void> {
const prefix = await findNpmPrefix(process.cwd());
@@ -65,11 +50,12 @@ async function main(): Promise<void> {
// Apply `--target <target>` option if specified
// Can be specified multiple times for the copy command to bundle
// multiple archs into a single universal output module)
while (true) { // eslint-disable-line no-constant-condition
const targetIndex = process.argv.indexOf('--target');
for (;;) {
// eslint-disable-line no-constant-condition
const targetIndex = process.argv.indexOf("--target");
if (targetIndex === -1) break;
if ((targetIndex + 1) >= process.argv.length) {
if (targetIndex + 1 >= process.argv.length) {
console.error("--target option specified without a target");
process.exit(1);
}
@@ -77,7 +63,7 @@ async function main(): Promise<void> {
targetIds.push(process.argv.splice(targetIndex, 2)[1] as TargetId);
}
const hakEnvs = targetIds.map(tid => new HakEnv(prefix, tid));
const hakEnvs = targetIds.map((tid) => new HakEnv(prefix, tid));
if (hakEnvs.length == 0) hakEnvs.push(new HakEnv(prefix, null));
for (const h of hakEnvs) {
await h.init();
@@ -89,7 +75,7 @@ async function main(): Promise<void> {
const hakDepsCfg = packageJson.hakDependencies || {};
for (const dep of Object.keys(hakDepsCfg)) {
const hakJsonPath = path.join(prefix, 'hak', dep, 'hak.json');
const hakJsonPath = path.join(prefix, "hak", dep, "hak.json");
let hakJson: Record<string, any>;
try {
hakJson = await require(hakJsonPath);
@@ -102,20 +88,20 @@ async function main(): Promise<void> {
name: dep,
version: hakDepsCfg[dep],
cfg: hakJson,
moduleHakDir: path.join(prefix, 'hak', dep),
moduleHakDir: path.join(prefix, "hak", dep),
moduleDotHakDir: path.join(hakEnv.dotHakDir, dep),
moduleTargetDotHakDir: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId()),
moduleBuildDir: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), 'build'),
moduleBuildDirs: hakEnvs.map(h => path.join(h.dotHakDir, dep, h.getTargetId(), 'build')),
moduleOutDir: path.join(hakEnv.dotHakDir, 'hakModules', dep),
nodeModuleBinDir: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), 'build', 'node_modules', '.bin'),
depPrefix: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), 'opt'),
moduleBuildDir: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), "build"),
moduleBuildDirs: hakEnvs.map((h) => path.join(h.dotHakDir, dep, h.getTargetId(), "build")),
moduleOutDir: path.join(hakEnv.dotHakDir, "hakModules", dep),
nodeModuleBinDir: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), "build", "node_modules", ".bin"),
depPrefix: path.join(hakEnv.dotHakDir, dep, hakEnv.getTargetId(), "opt"),
scripts: {},
};
for (const s of HAKSCRIPTS) {
if (hakJson.scripts && hakJson.scripts[s]) {
const scriptModule = await import(path.join(prefix, 'hak', dep, hakJson.scripts[s]));
const scriptModule = await import(path.join(prefix, "hak", dep, hakJson.scripts[s]));
if (scriptModule.__esModule) {
deps[dep].scripts[s] = scriptModule.default;
} else {
@@ -127,14 +113,14 @@ async function main(): Promise<void> {
let cmds: string[];
if (process.argv.length < 3) {
cmds = ['check', 'fetch', 'fetchDeps', 'build', 'copy', 'link'];
cmds = ["check", "fetch", "fetchDeps", "build", "copy", "link"];
} else if (METACOMMANDS[process.argv[2]]) {
cmds = METACOMMANDS[process.argv[2]];
} else {
cmds = [process.argv[2]];
}
if (hakEnvs.length > 1 && cmds.some(c => !['copy', 'link'].includes(c))) {
if (hakEnvs.length > 1 && cmds.some((c) => !["copy", "link"].includes(c))) {
// We allow link here too for convenience because it's completely arch independent
console.error("Multiple targets only supported with the copy command");
return;
@@ -145,7 +131,7 @@ async function main(): Promise<void> {
for (const cmd of cmds) {
if (GENERALCOMMANDS.includes(cmd)) {
if (cmd === 'target') {
if (cmd === "target") {
console.log(hakEnv.getNodeTriple());
}
return;
@@ -160,15 +146,12 @@ async function main(): Promise<void> {
process.exit(1);
}
const cmdFunc = (await import('./' + cmd)).default;
const cmdFunc = (await import("./" + cmd)).default;
for (const mod of modules) {
const depInfo = deps[mod];
if (depInfo === undefined) {
console.log(
"Module " + mod + " not found - is it in hakDependencies " +
"in your package.json?",
);
console.log("Module " + mod + " not found - is it in hakDependencies " + "in your package.json?");
process.exit(1);
}
console.log("hak " + cmd + ": " + mod);
@@ -177,7 +160,7 @@ async function main(): Promise<void> {
}
}
main().catch(err => {
main().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@@ -14,16 +14,16 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import path from 'path';
import os from 'os';
import fsProm from 'fs/promises';
import childProcess from 'child_process';
import path from "path";
import os from "os";
import fsProm from "fs/promises";
import childProcess from "child_process";
import HakEnv from './hakEnv';
import { DependencyInfo } from './dep';
import HakEnv from "./hakEnv";
import { DependencyInfo } from "./dep";
export default async function link(hakEnv: HakEnv, moduleInfo: DependencyInfo): Promise<void> {
const yarnrc = path.join(hakEnv.projectRoot, '.yarnrc');
const yarnrc = path.join(hakEnv.projectRoot, ".yarnrc");
// this is fairly terrible but it's reasonably clunky to either parse a yarnrc
// properly or get yarn to do it, so this will probably suffice for now.
// We just check to see if there is a local .yarnrc at all, and assume that
@@ -43,28 +43,28 @@ export default async function link(hakEnv: HakEnv, moduleInfo: DependencyInfo):
// (ie. Windows absolute paths) but strings in quotes get parsed as
// JSON so need to be valid JSON encoded strings (ie. have the
// backslashes escaped). JSON.stringify will add quotes and escape.
'--link-folder ' + JSON.stringify(path.join(hakEnv.dotHakDir, 'links')) + os.EOL,
"--link-folder " + JSON.stringify(path.join(hakEnv.dotHakDir, "links")) + os.EOL,
);
}
const yarnCmd = 'yarn' + (hakEnv.isWin() ? '.cmd' : '');
const yarnCmd = "yarn" + (hakEnv.isWin() ? ".cmd" : "");
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(yarnCmd, ['link'], {
const proc = childProcess.spawn(yarnCmd, ["link"], {
cwd: moduleInfo.moduleOutDir,
stdio: 'inherit',
stdio: "inherit",
});
proc.on('exit', code => {
proc.on("exit", (code) => {
code ? reject(code) : resolve();
});
});
await new Promise<void>((resolve, reject) => {
const proc = childProcess.spawn(yarnCmd, ['link', moduleInfo.name], {
const proc = childProcess.spawn(yarnCmd, ["link", moduleInfo.name], {
cwd: hakEnv.projectRoot,
stdio: 'inherit',
stdio: "inherit",
});
proc.on('exit', code => {
proc.on("exit", (code) => {
code ? reject(code) : resolve();
});
});

View File

@@ -20,29 +20,29 @@ import { GLIBC, MUSL, family as processLibC } from "detect-libc";
// details in a single string.
// See https://doc.rust-lang.org/rustc/platform-support.html.
export type TargetId =
'aarch64-apple-darwin' |
'x86_64-apple-darwin' |
'universal-apple-darwin' |
'i686-pc-windows-msvc' |
'x86_64-pc-windows-msvc' |
'i686-unknown-linux-musl' |
'i686-unknown-linux-gnu' |
'x86_64-unknown-linux-musl' |
'x86_64-unknown-linux-gnu' |
'aarch64-unknown-linux-musl' |
'aarch64-unknown-linux-gnu' |
'powerpc64le-unknown-linux-musl' |
'powerpc64le-unknown-linux-gnu';
| "aarch64-apple-darwin"
| "x86_64-apple-darwin"
| "universal-apple-darwin"
| "i686-pc-windows-msvc"
| "x86_64-pc-windows-msvc"
| "i686-unknown-linux-musl"
| "i686-unknown-linux-gnu"
| "x86_64-unknown-linux-musl"
| "x86_64-unknown-linux-gnu"
| "aarch64-unknown-linux-musl"
| "aarch64-unknown-linux-gnu"
| "powerpc64le-unknown-linux-musl"
| "powerpc64le-unknown-linux-gnu";
// Values are expected to match those used in `process.platform`.
export type Platform = 'darwin' | 'linux' | 'win32';
export type Platform = "darwin" | "linux" | "win32";
// Values are expected to match those used in `process.arch`.
export type Arch = 'arm64' | 'ia32' | 'x64' | 'ppc64' | 'universal';
export type Arch = "arm64" | "ia32" | "x64" | "ppc64" | "universal";
// Values are expected to match those used by Visual Studio's `vcvarsall.bat`.
// See https://docs.microsoft.com/cpp/build/building-on-the-command-line?view=msvc-160#vcvarsall-syntax
export type VcVarsArch = 'amd64' | 'arm64' | 'x86';
export type VcVarsArch = "amd64" | "arm64" | "x86";
export type Target = {
id: TargetId;
@@ -51,140 +51,135 @@ export type Target = {
};
export type WindowsTarget = Target & {
platform: 'win32';
platform: "win32";
vcVarsArch: VcVarsArch;
};
export type LinuxTarget = Target & {
platform: 'linux';
platform: "linux";
libC: typeof processLibC;
};
export type UniversalTarget = Target & {
arch: 'universal';
arch: "universal";
subtargets: Target[];
};
const aarch64AppleDarwin: Target = {
id: 'aarch64-apple-darwin',
platform: 'darwin',
arch: 'arm64',
id: "aarch64-apple-darwin",
platform: "darwin",
arch: "arm64",
};
const x8664AppleDarwin: Target = {
id: 'x86_64-apple-darwin',
platform: 'darwin',
arch: 'x64',
id: "x86_64-apple-darwin",
platform: "darwin",
arch: "x64",
};
const universalAppleDarwin: UniversalTarget = {
id: 'universal-apple-darwin',
platform: 'darwin',
arch: 'universal',
subtargets: [
aarch64AppleDarwin,
x8664AppleDarwin,
],
id: "universal-apple-darwin",
platform: "darwin",
arch: "universal",
subtargets: [aarch64AppleDarwin, x8664AppleDarwin],
};
const i686PcWindowsMsvc: WindowsTarget = {
id: 'i686-pc-windows-msvc',
platform: 'win32',
arch: 'ia32',
vcVarsArch: 'x86',
id: "i686-pc-windows-msvc",
platform: "win32",
arch: "ia32",
vcVarsArch: "x86",
};
const x8664PcWindowsMsvc: WindowsTarget = {
id: 'x86_64-pc-windows-msvc',
platform: 'win32',
arch: 'x64',
vcVarsArch: 'amd64',
id: "x86_64-pc-windows-msvc",
platform: "win32",
arch: "x64",
vcVarsArch: "amd64",
};
const x8664UnknownLinuxGnu: LinuxTarget = {
id: 'x86_64-unknown-linux-gnu',
platform: 'linux',
arch: 'x64',
id: "x86_64-unknown-linux-gnu",
platform: "linux",
arch: "x64",
libC: GLIBC,
};
const x8664UnknownLinuxMusl: LinuxTarget = {
id: 'x86_64-unknown-linux-musl',
platform: 'linux',
arch: 'x64',
id: "x86_64-unknown-linux-musl",
platform: "linux",
arch: "x64",
libC: MUSL,
};
const i686UnknownLinuxGnu: LinuxTarget = {
id: 'i686-unknown-linux-gnu',
platform: 'linux',
arch: 'ia32',
id: "i686-unknown-linux-gnu",
platform: "linux",
arch: "ia32",
libC: GLIBC,
};
const i686UnknownLinuxMusl: LinuxTarget = {
id: 'i686-unknown-linux-musl',
platform: 'linux',
arch: 'ia32',
id: "i686-unknown-linux-musl",
platform: "linux",
arch: "ia32",
libC: MUSL,
};
const aarch64UnknownLinuxGnu: LinuxTarget = {
id: 'aarch64-unknown-linux-gnu',
platform: 'linux',
arch: 'arm64',
id: "aarch64-unknown-linux-gnu",
platform: "linux",
arch: "arm64",
libC: GLIBC,
};
const aarch64UnknownLinuxMusl: LinuxTarget = {
id: 'aarch64-unknown-linux-musl',
platform: 'linux',
arch: 'arm64',
id: "aarch64-unknown-linux-musl",
platform: "linux",
arch: "arm64",
libC: MUSL,
};
const powerpc64leUnknownLinuxGnu: LinuxTarget = {
id: 'powerpc64le-unknown-linux-gnu',
platform: 'linux',
arch: 'ppc64',
id: "powerpc64le-unknown-linux-gnu",
platform: "linux",
arch: "ppc64",
libC: GLIBC,
};
const powerpc64leUnknownLinuxMusl: LinuxTarget = {
id: 'powerpc64le-unknown-linux-musl',
platform: 'linux',
arch: 'ppc64',
id: "powerpc64le-unknown-linux-musl",
platform: "linux",
arch: "ppc64",
libC: MUSL,
};
export const TARGETS: Record<TargetId, Target> = {
// macOS
'aarch64-apple-darwin': aarch64AppleDarwin,
'x86_64-apple-darwin': x8664AppleDarwin,
'universal-apple-darwin': universalAppleDarwin,
"aarch64-apple-darwin": aarch64AppleDarwin,
"x86_64-apple-darwin": x8664AppleDarwin,
"universal-apple-darwin": universalAppleDarwin,
// Windows
'i686-pc-windows-msvc': i686PcWindowsMsvc,
'x86_64-pc-windows-msvc': x8664PcWindowsMsvc,
"i686-pc-windows-msvc": i686PcWindowsMsvc,
"x86_64-pc-windows-msvc": x8664PcWindowsMsvc,
// Linux
'i686-unknown-linux-musl': i686UnknownLinuxMusl,
'i686-unknown-linux-gnu': i686UnknownLinuxGnu,
'x86_64-unknown-linux-musl': x8664UnknownLinuxMusl,
'x86_64-unknown-linux-gnu': x8664UnknownLinuxGnu,
'aarch64-unknown-linux-musl': aarch64UnknownLinuxMusl,
'aarch64-unknown-linux-gnu': aarch64UnknownLinuxGnu,
'powerpc64le-unknown-linux-musl': powerpc64leUnknownLinuxMusl,
'powerpc64le-unknown-linux-gnu': powerpc64leUnknownLinuxGnu,
"i686-unknown-linux-musl": i686UnknownLinuxMusl,
"i686-unknown-linux-gnu": i686UnknownLinuxGnu,
"x86_64-unknown-linux-musl": x8664UnknownLinuxMusl,
"x86_64-unknown-linux-gnu": x8664UnknownLinuxGnu,
"aarch64-unknown-linux-musl": aarch64UnknownLinuxMusl,
"aarch64-unknown-linux-gnu": aarch64UnknownLinuxGnu,
"powerpc64le-unknown-linux-musl": powerpc64leUnknownLinuxMusl,
"powerpc64le-unknown-linux-gnu": powerpc64leUnknownLinuxGnu,
};
export function getHost(): Target | undefined {
return Object.values(TARGETS).find(target => (
target.platform === process.platform &&
target.arch === process.arch &&
(
process.platform !== 'linux' ||
(target as LinuxTarget).libC === processLibC
)
));
return Object.values(TARGETS).find(
(target) =>
target.platform === process.platform &&
target.arch === process.arch &&
(process.platform !== "linux" || (target as LinuxTarget).libC === processLibC),
);
}
export function isHostId(id: TargetId): boolean {

View File

@@ -11,31 +11,35 @@ import * as childProcess from "child_process";
export async function versionFromAsar(): Promise<string> {
try {
await fs.stat('webapp.asar');
await fs.stat("webapp.asar");
} catch (e) {
throw new Error("No 'webapp.asar' found. Run 'yarn run fetch'");
}
return asar.extractFile('webapp.asar', 'version').toString().trim();
return asar.extractFile("webapp.asar", "version").toString().trim();
}
export async function setPackageVersion(ver: string): Promise<void> {
// set version in package.json: electron-builder will use this to populate
// all the various version fields
await new Promise<void>((resolve, reject) => {
childProcess.execFile(process.platform === 'win32' ? 'yarn.cmd' : 'yarn', [
'version',
'-s',
'--no-git-tag-version', // This also means "don't commit to git" as it turns out
'--new-version',
ver,
], (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
childProcess.execFile(
process.platform === "win32" ? "yarn.cmd" : "yarn",
[
"version",
"-s",
"--no-git-tag-version", // This also means "don't commit to git" as it turns out
"--new-version",
ver,
],
(err) => {
if (err) {
reject(err);
} else {
resolve();
}
},
);
});
}
@@ -49,10 +53,12 @@ async function main(args: string[]): Promise<number> {
}
if (require.main === module) {
main(process.argv.slice(2)).then((ret) => {
process.exit(ret);
}).catch(e => {
console.error(e);
process.exit(1);
});
main(process.argv.slice(2))
.then((ret) => {
process.exit(ret);
})
.catch((e) => {
console.error(e);
process.exit(1);
});
}

View File

@@ -1,21 +1,16 @@
{
"compilerOptions": {
"resolveJsonModule": true,
"moduleResolution": "node",
"esModuleInterop": true,
"target": "es2017",
"module": "commonjs",
"sourceMap": false,
"strict": true,
"lib": [
"es2019",
"dom"
]
},
"include": [
"./**/*.ts"
],
"ts-node": {
"transpileOnly": true
}
"compilerOptions": {
"resolveJsonModule": true,
"moduleResolution": "node",
"esModuleInterop": true,
"target": "es2017",
"module": "commonjs",
"sourceMap": false,
"strict": true,
"lib": ["es2019", "dom"]
},
"include": ["./**/*.ts"],
"ts-node": {
"transpileOnly": true
}
}