mirror of
https://github.com/game-ci/unity-builder.git
synced 2025-07-04 12:25:19 -04:00
Streamline code styles (#384)
* feat: streamline code styles * feat: spacing for comments and return statements * chore: enforce camelcase * fix: remove npm lock file * fix: add integrity test * fix: remove logfile * chore: update node in test workflow
This commit is contained in:
parent
4be5d2ddf4
commit
5ae03dfef6
@ -1,10 +1,15 @@
|
||||
{
|
||||
"plugins": ["jest", "@typescript-eslint", "prettier", "unicorn"],
|
||||
"extends": ["plugin:unicorn/recommended", "plugin:github/recommended", "prettier"],
|
||||
"extends": ["plugin:unicorn/recommended", "plugin:github/recommended", "plugin:prettier/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 9,
|
||||
"sourceType": "module"
|
||||
"ecmaVersion": 2020,
|
||||
"sourceType": "module",
|
||||
"extraFileExtensions": [".mjs"],
|
||||
"ecmaFeatures": {
|
||||
"impliedStrict": true
|
||||
},
|
||||
"project": "./tsconfig.json"
|
||||
},
|
||||
"env": {
|
||||
"node": true,
|
||||
@ -12,9 +17,44 @@
|
||||
"jest/globals": true
|
||||
},
|
||||
"rules": {
|
||||
// Error out for code formatting errors
|
||||
"prettier/prettier": "error",
|
||||
"import/no-extraneous-dependencies": 0,
|
||||
// Namespaces or sometimes needed
|
||||
"import/no-namespace": "off",
|
||||
"no-undef": "off" // TODO: REMOVE THIS LINE WHEN UPDATING ESLINT RULES
|
||||
// Properly format comments
|
||||
"spaced-comment": ["error", "always"],
|
||||
"lines-around-comment": [
|
||||
"error",
|
||||
{
|
||||
"beforeBlockComment": true,
|
||||
"beforeLineComment": true,
|
||||
"allowBlockStart": true,
|
||||
"allowObjectStart": true,
|
||||
"allowArrayStart": true,
|
||||
"allowClassStart": true,
|
||||
"ignorePattern": "pragma|ts-ignore"
|
||||
}
|
||||
],
|
||||
// Mandatory spacing
|
||||
"padding-line-between-statements": [
|
||||
"error",
|
||||
{ "blankLine": "always", "prev": "*", "next": "return" },
|
||||
{ "blankLine": "always", "prev": "directive", "next": "*" },
|
||||
{ "blankLine": "any", "prev": "directive", "next": "directive" }
|
||||
],
|
||||
// Enforce camelCase
|
||||
"camelcase": "error",
|
||||
// Allow forOfStatements
|
||||
"no-restricted-syntax": ["error", "ForInStatement", "LabeledStatement", "WithStatement"],
|
||||
// Continue is viable in forOf loops in generators
|
||||
"no-continue": "off",
|
||||
// From experience, named exports are almost always desired. I got tired of this rule
|
||||
"import/prefer-default-export": "off",
|
||||
// Unused vars are useful to keep method signatures consistent and documented
|
||||
"@typescript-eslint/no-unused-vars": "off",
|
||||
// For this project only use kebab-case
|
||||
"unicorn/filename-case": ["error", { "cases": { "kebabCase": true } }],
|
||||
// Allow Array.from(set) mitigate TS2569 which would require '--downlevelIteration'
|
||||
"unicorn/prefer-spread": "off"
|
||||
}
|
||||
}
|
||||
|
6
.github/workflows/integrity-check.yml
vendored
6
.github/workflows/integrity-check.yml
vendored
@ -12,10 +12,10 @@ jobs:
|
||||
name: Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 12.x
|
||||
node-version: '16'
|
||||
- run: yarn
|
||||
- run: yarn lint
|
||||
- run: yarn test --coverage
|
||||
|
@ -1,13 +0,0 @@
|
||||
Cloud Runner platform selected AWS
|
||||
Cloud Runner is running in custom job mode
|
||||
AWS Region: eu-west-2
|
||||
Parsing build steps:
|
||||
- name: 'step 1'
|
||||
image: 'alpine'
|
||||
commands: 'printenv'
|
||||
secrets:
|
||||
- name: 'testSecretName'
|
||||
value: 'testSecretValue'
|
||||
|
||||
game-ci stack does not exist (["game-ci-github-automation-424-linux64-a9hz-cleanup","game-ci-github-automation-423-linux64-v34g-cleanup","game-ci-github-automation-423-linux64-v34g","game-ci-github-automation-422-linux64-7x6i-cleanup","game-ci-github-automation-422-linux64-7x6i","game-ci-github-automation-414-linux64-j21p-cleanup","game-ci-github-automation-414-linux64-j21p","game-ci-github-automation-413-linux64-tcih-cleanup","game-ci-github-automation-413-linux64-tcih","game-ci-github-automation-411-linux64-0s69-cleanup","game-ci-github-automation-411-linux64-0s69","game-ci-github-automation-410-linux64-1tli-cleanup","game-ci-github-automation-410-linux64-1tli","game-ci-github-automation-408-linux64-8pbw-cleanup","game-ci-github-automation-408-linux64-8pbw","game-ci-github-automation-407-linux64-21un-cleanup","game-ci-github-automation-407-linux64-21un","game-ci-github-automation-406-linux64-dizb-cleanup","game-ci-github-automation-406-linux64-dizb","game-ci-github-automation-405-linux64-9xj5-cleanup","game-ci-github-automation-405-linux64-9xj5","game-ci-github-automation-402-linux64-0bym-cleanup","game-ci-github-automation-402-linux64-0bym","game-ci-github-automation-400-linux64-arqv-cleanup","game-ci-github-automation-400-linux64-arqv","game-ci-github-automation-399-linux64-utkt-cleanup","game-ci-github-automation-399-linux64-utkt","game-ci-github-automation-397-linux64-xwfu-cleanup","game-ci-github-automation-397-linux64-xwfu","game-ci-github-automation-396-linux64-2g3q-cleanup","game-ci-github-automation-396-linux64-2g3q","game-ci-github-automation","game-ci-stack-integration-tests-390-linux64-mcdw-cleanup","game-ci-stack-integration-tests-390-linux64-mcdw","game-ci-stack-integration-tests-391-linux64-2arq-cleanup","game-ci-stack-integration-tests-391-linux64-2arq","game-ci-stack-integration-tests-390-linux64-awd0-cleanup","game-ci-stack-integration-tests-390-linux64-awd0","game-ci-stack-integration-tests"])
|
||||
created stack (version: eedce7440581ab2e8a80cee59e34ed64)
|
BIN
dist/index.js
generated
vendored
BIN
dist/index.js
generated
vendored
Binary file not shown.
BIN
dist/index.js.map
generated
vendored
BIN
dist/index.js.map
generated
vendored
Binary file not shown.
6956
package-lock.json
generated
6956
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -7,7 +7,7 @@
|
||||
"author": "Webber <webber@takken.io>",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"prepare": "lefthook install",
|
||||
"prepare": "lefthook install && npx husky uninstall -y",
|
||||
"build": "yarn && tsc && ncc build lib --source-map --license licenses.txt",
|
||||
"lint": "prettier --check \"src/**/*.{js,ts}\" && eslint src/**/*.ts",
|
||||
"format": "prettier --write \"src/**/*.{js,ts}\"",
|
||||
|
@ -1,5 +1,5 @@
|
||||
import * as core from '@actions/core';
|
||||
import { Action, BuildParameters, Cache, Docker, ImageTag, Output, CloudRunner } from './model';
|
||||
import { Action, BuildParameters, Cache, CloudRunner, Docker, ImageTag, Output } from './model';
|
||||
import { Cli } from './model/cli/cli';
|
||||
import MacBuilder from './model/mac-builder';
|
||||
import PlatformSetup from './model/platform-setup';
|
||||
@ -7,6 +7,7 @@ async function runMain() {
|
||||
try {
|
||||
if (Cli.InitCliMode()) {
|
||||
await Cli.RunCli();
|
||||
|
||||
return;
|
||||
}
|
||||
Action.checkCompatibility();
|
||||
|
9
src/integrity.test.ts
Normal file
9
src/integrity.test.ts
Normal file
@ -0,0 +1,9 @@
|
||||
import { stat } from 'fs/promises';
|
||||
|
||||
describe('Integrity tests', () => {
|
||||
describe('package-lock.json', () => {
|
||||
it('does not exist', async () => {
|
||||
await expect(stat(`${process.cwd()}/package-lock.json`)).rejects.toThrowError();
|
||||
});
|
||||
});
|
||||
});
|
@ -6,12 +6,14 @@ export default class AndroidVersioning {
|
||||
if (!inputVersionCode) {
|
||||
return AndroidVersioning.versionToVersionCode(version);
|
||||
}
|
||||
|
||||
return inputVersionCode;
|
||||
}
|
||||
|
||||
static versionToVersionCode(version) {
|
||||
if (version === 'none') {
|
||||
core.info(`Versioning strategy is set to ${version}, so android version code should not be applied.`);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -19,6 +21,7 @@ export default class AndroidVersioning {
|
||||
|
||||
if (!parsedVersion) {
|
||||
core.warning(`Could not parse "${version}" to semver, defaulting android version code to 1`);
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -32,11 +35,13 @@ export default class AndroidVersioning {
|
||||
);
|
||||
}
|
||||
core.info(`Using android versionCode ${versionCode}`);
|
||||
|
||||
return versionCode;
|
||||
}
|
||||
|
||||
static determineSdkManagerParameters(targetSdkVersion) {
|
||||
const parsedVersion = Number.parseInt(targetSdkVersion.slice(-2), 10);
|
||||
|
||||
return Number.isNaN(parsedVersion) ? '' : `platforms;android-${parsedVersion}`;
|
||||
}
|
||||
}
|
||||
|
@ -77,7 +77,7 @@ class BuildParameters {
|
||||
// ---
|
||||
let unitySerial = '';
|
||||
if (!process.env.UNITY_SERIAL && Input.githubInputEnabled && Cli.options === undefined) {
|
||||
//No serial was present so it is a personal license that we need to convert
|
||||
// No serial was present, so it is a personal license that we need to convert
|
||||
if (!process.env.UNITY_LICENSE) {
|
||||
throw new Error(`Missing Unity License File and no Serial was found. If this
|
||||
is a personal license, make sure to follow the activation
|
||||
@ -167,6 +167,7 @@ class BuildParameters {
|
||||
throw new Error(`License File was corrupted, unable to locate serial`);
|
||||
}
|
||||
const endIndex = license.indexOf(endKey, startIndex);
|
||||
|
||||
// Slice off the first 4 characters as they are garbage values
|
||||
return Buffer.from(license.slice(startIndex, endIndex), 'base64').toString('binary').slice(4);
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ export class CliFunctionsRepository {
|
||||
if (results === undefined || results.length === 0) {
|
||||
throw new Error(`no CLI mode found for ${key}`);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,7 @@ export class Cli {
|
||||
if (Cli.options && alternativeKey && Cli.options[alternativeKey] !== undefined) {
|
||||
return Cli.options[alternativeKey];
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
@ -49,6 +50,7 @@ export class Cli {
|
||||
program.option('--artifactName <artifactName>', 'caching artifact name');
|
||||
program.parse(process.argv);
|
||||
Cli.options = program.opts();
|
||||
|
||||
return Cli.isCliMode;
|
||||
}
|
||||
|
||||
@ -61,6 +63,7 @@ export class Cli {
|
||||
const results = CliFunctionsRepository.GetCliFunctions(Cli.options.mode);
|
||||
CloudRunnerLogger.log(`Entrypoint: ${results.key}`);
|
||||
Cli.options.versioning = 'None';
|
||||
|
||||
return await results.target[results.propertyKey]();
|
||||
}
|
||||
|
||||
@ -88,6 +91,7 @@ export class Cli {
|
||||
public static async CLIBuild(): Promise<string> {
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
|
||||
return await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ describe('Cloud Runner', () => {
|
||||
const testSecretValue = 'testSecretValue';
|
||||
if (Input.cloudRunnerTests) {
|
||||
it('All build parameters sent to cloud runner as env vars', async () => {
|
||||
// build parameters
|
||||
// Build parameters
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
@ -32,13 +32,16 @@ describe('Cloud Runner', () => {
|
||||
`,
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
// setup parameters
|
||||
|
||||
// Setup parameters
|
||||
const buildParameter = await BuildParameters.create();
|
||||
Input.githubInputEnabled = true;
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
// run the job
|
||||
|
||||
// Run the job
|
||||
const file = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
// assert results
|
||||
|
||||
// Assert results
|
||||
expect(file).toContain(JSON.stringify(buildParameter));
|
||||
expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`);
|
||||
const environmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables();
|
||||
@ -89,7 +92,7 @@ describe('Cloud Runner', () => {
|
||||
}, 1000000);
|
||||
}
|
||||
it('Local cloud runner returns commands', async () => {
|
||||
// build parameters
|
||||
// Build parameters
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
@ -106,16 +109,18 @@ describe('Cloud Runner', () => {
|
||||
`,
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
// setup parameters
|
||||
|
||||
// Setup parameters
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
// run the job
|
||||
|
||||
// Run the job
|
||||
await expect(CloudRunner.run(buildParameter, baseImage.toString())).resolves.not.toThrow();
|
||||
Input.githubInputEnabled = true;
|
||||
delete Cli.options;
|
||||
}, 1000000);
|
||||
it('Test cloud runner returns commands', async () => {
|
||||
// build parameters
|
||||
// Build parameters
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
@ -124,10 +129,12 @@ describe('Cloud Runner', () => {
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
// setup parameters
|
||||
|
||||
// Setup parameters
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
// run the job
|
||||
|
||||
// Run the job
|
||||
await expect(CloudRunner.run(buildParameter, baseImage.toString())).resolves.not.toThrow();
|
||||
Input.githubInputEnabled = true;
|
||||
delete Cli.options;
|
||||
|
@ -80,6 +80,7 @@ class CloudRunner {
|
||||
);
|
||||
CloudRunnerLogger.log(`Cleanup complete`);
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||
|
||||
return output;
|
||||
} catch (error) {
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||
|
@ -29,6 +29,7 @@ export class AWSCloudFormationTemplates {
|
||||
public static insertAtTemplate(template, insertionKey, insertion) {
|
||||
const index = template.search(insertionKey) + insertionKey.length + '\n'.length;
|
||||
template = [template.slice(0, index), insertion, template.slice(index)].join('');
|
||||
|
||||
return template;
|
||||
}
|
||||
|
||||
|
@ -112,14 +112,7 @@ export class AWSJobStack {
|
||||
CloudRunnerLogger.log('Creating cloud runner job');
|
||||
await CF.waitFor('stackCreateComplete', { StackName: taskDefStackName }).promise();
|
||||
} catch (error) {
|
||||
await AWSError.handleStackCreationFailure(
|
||||
error,
|
||||
CF,
|
||||
taskDefStackName,
|
||||
//taskDefCloudFormation,
|
||||
//parameters,
|
||||
//secrets,
|
||||
);
|
||||
await AWSError.handleStackCreationFailure(error, CF, taskDefStackName);
|
||||
throw error;
|
||||
}
|
||||
|
||||
|
@ -64,6 +64,7 @@ class AWSTaskRunner {
|
||||
const wasSuccessful = exitCode === 0 || (exitCode === undefined && taskData.lastStatus === 'RUNNING');
|
||||
if (wasSuccessful) {
|
||||
CloudRunnerLogger.log(`Cloud runner job has finished successfully`);
|
||||
|
||||
return output;
|
||||
} else {
|
||||
if (taskData.stoppedReason === 'Essential container in task exited' && exitCode === 1) {
|
||||
@ -135,6 +136,7 @@ class AWSTaskRunner {
|
||||
output,
|
||||
));
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
@ -152,6 +154,7 @@ class AWSTaskRunner {
|
||||
.promise();
|
||||
iterator = records.NextShardIterator || '';
|
||||
({ shouldReadLogs, output } = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs, output));
|
||||
|
||||
return { iterator, shouldReadLogs, output };
|
||||
}
|
||||
|
||||
@ -170,6 +173,7 @@ class AWSTaskRunner {
|
||||
}
|
||||
CloudRunnerLogger.log(`## Status of job: ${taskData.lastStatus}`);
|
||||
}
|
||||
|
||||
return { timestamp, shouldReadLogs };
|
||||
}
|
||||
|
||||
@ -208,6 +212,7 @@ class AWSTaskRunner {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { shouldReadLogs, output };
|
||||
}
|
||||
|
||||
|
@ -79,6 +79,7 @@ class AWSBuildEnvironment implements ProviderInterface {
|
||||
if (postRunTaskTimeMs !== undefined)
|
||||
CloudRunnerLogger.log(`Cleanup job time: ${Math.floor((postCleanupTimeMs - postRunTaskTimeMs) / 1000)}s`);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
|
@ -77,7 +77,7 @@ class Kubernetes implements ProviderInterface {
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<string> {
|
||||
try {
|
||||
// setup
|
||||
// Setup
|
||||
this.buildGuid = buildGuid;
|
||||
this.secretName = `build-credentials-${buildGuid}`;
|
||||
this.jobName = `unity-builder-job-${buildGuid}`;
|
||||
@ -98,7 +98,7 @@ class Kubernetes implements ProviderInterface {
|
||||
k8s,
|
||||
);
|
||||
|
||||
//run
|
||||
// Run
|
||||
const jobResult = await this.kubeClientBatch.createNamespacedJob(this.namespace, jobSpec);
|
||||
CloudRunnerLogger.log(`Creating build job ${JSON.stringify(jobResult.body.metadata, undefined, 4)}`);
|
||||
|
||||
@ -131,6 +131,7 @@ class Kubernetes implements ProviderInterface {
|
||||
}
|
||||
}
|
||||
await this.cleanupTaskResources();
|
||||
|
||||
return output;
|
||||
} catch (error) {
|
||||
CloudRunnerLogger.log('Running job failed');
|
||||
@ -163,6 +164,7 @@ class Kubernetes implements ProviderInterface {
|
||||
async () => {
|
||||
const jobBody = (await this.kubeClientBatch.readNamespacedJob(this.jobName, this.namespace)).body;
|
||||
const podBody = (await this.kubeClient.readNamespacedPod(this.podName, this.namespace)).body;
|
||||
|
||||
return (jobBody === null || jobBody.status?.active === 0) && podBody === null;
|
||||
},
|
||||
{
|
||||
@ -195,6 +197,7 @@ class Kubernetes implements ProviderInterface {
|
||||
if (pod === undefined) {
|
||||
throw new Error("pod with job-name label doesn't exist");
|
||||
}
|
||||
|
||||
return pod;
|
||||
}
|
||||
}
|
||||
|
@ -117,6 +117,7 @@ class KubernetesJobSpecFactory {
|
||||
const environmentVariable = new V1EnvVar();
|
||||
environmentVariable.name = x.name;
|
||||
environmentVariable.value = x.value;
|
||||
|
||||
return environmentVariable;
|
||||
}),
|
||||
...secrets.map((x) => {
|
||||
@ -127,6 +128,7 @@ class KubernetesJobSpecFactory {
|
||||
const environmentVariable = new V1EnvVar();
|
||||
environmentVariable.name = x.EnvironmentVariable;
|
||||
environmentVariable.valueFrom = secret;
|
||||
|
||||
return environmentVariable;
|
||||
}),
|
||||
],
|
||||
@ -155,6 +157,7 @@ class KubernetesJobSpecFactory {
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return job;
|
||||
}
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ class KubernetesSecret {
|
||||
for (const buildSecret of secrets) {
|
||||
secret.data[buildSecret.ParameterKey] = base64.encode(buildSecret.ParameterValue);
|
||||
}
|
||||
|
||||
return kubeClient.createNamespacedSecret(namespace, secret);
|
||||
}
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ class KubernetesServiceAccount {
|
||||
name: serviceAccountName,
|
||||
};
|
||||
serviceAccount.automountServiceAccountToken = false;
|
||||
|
||||
return kubeClient.createNamespacedServiceAccount(namespace, serviceAccount);
|
||||
}
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import * as k8s from '@kubernetes/client-node';
|
||||
import BuildParameters from '../../../build-parameters';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import YAML from 'yaml';
|
||||
import { IncomingMessage } from 'http';
|
||||
|
||||
class KubernetesStorage {
|
||||
public static async createPersistentVolumeClaim(
|
||||
@ -15,6 +16,7 @@ class KubernetesStorage {
|
||||
if (buildParameters.kubeVolume) {
|
||||
CloudRunnerLogger.log(buildParameters.kubeVolume);
|
||||
pvcName = buildParameters.kubeVolume;
|
||||
|
||||
return;
|
||||
}
|
||||
const pvcList = (await kubeClient.listNamespacedPersistentVolumeClaim(namespace)).body.items.map(
|
||||
@ -27,6 +29,7 @@ class KubernetesStorage {
|
||||
if (!buildParameters.isCliMode) {
|
||||
core.setOutput('volume', pvcName);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
CloudRunnerLogger.log(`Creating PVC ${pvcName} (does not exist)`);
|
||||
@ -96,11 +99,12 @@ class KubernetesStorage {
|
||||
YAML.parse(process.env.K8s_STORAGE_PVC_SPEC);
|
||||
}
|
||||
const result = await kubeClient.createNamespacedPersistentVolumeClaim(namespace, pvc);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static async handleResult(
|
||||
result: { response: import('http').IncomingMessage; body: k8s.V1PersistentVolumeClaim },
|
||||
result: { response: IncomingMessage; body: k8s.V1PersistentVolumeClaim },
|
||||
kubeClient: k8s.CoreV1Api,
|
||||
namespace: string,
|
||||
pvcName: string,
|
||||
|
@ -73,6 +73,7 @@ class KubernetesTaskRunner {
|
||||
throw error;
|
||||
}
|
||||
CloudRunnerLogger.log('end of log stream');
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
@ -90,6 +91,7 @@ class KubernetesTaskRunner {
|
||||
}`,
|
||||
);
|
||||
if (success || phase !== 'Pending') return true;
|
||||
|
||||
return false;
|
||||
},
|
||||
{
|
||||
@ -97,6 +99,7 @@ class KubernetesTaskRunner {
|
||||
intervalBetweenAttempts: 15000,
|
||||
},
|
||||
);
|
||||
|
||||
return success;
|
||||
}
|
||||
}
|
||||
|
@ -42,6 +42,7 @@ class LocalDockerCloudRunner implements ProviderInterface {
|
||||
): Promise<string> {
|
||||
CloudRunnerLogger.log(buildGuid);
|
||||
CloudRunnerLogger.log(commands);
|
||||
|
||||
return CloudRunnerSystem.Run(commands, false, false);
|
||||
}
|
||||
}
|
||||
|
@ -42,6 +42,7 @@ class LocalCloudRunner implements ProviderInterface {
|
||||
CloudRunnerLogger.log(image);
|
||||
CloudRunnerLogger.log(buildGuid);
|
||||
CloudRunnerLogger.log(commands);
|
||||
|
||||
return await CloudRunnerSystem.Run(commands);
|
||||
}
|
||||
}
|
||||
|
@ -41,6 +41,7 @@ class TestCloudRunner implements ProviderInterface {
|
||||
CloudRunnerLogger.log(image);
|
||||
CloudRunnerLogger.log(buildGuid);
|
||||
CloudRunnerLogger.log(commands);
|
||||
|
||||
return await new Promise((result) => {
|
||||
result(commands);
|
||||
});
|
||||
|
@ -26,19 +26,19 @@ describe('Cloud Runner Caching', () => {
|
||||
const buildParameter = await BuildParameters.create();
|
||||
CloudRunner.buildParameters = buildParameter;
|
||||
|
||||
// create test folder
|
||||
// Create test folder
|
||||
const testFolder = path.resolve(__dirname, Cli.options.cacheKey);
|
||||
fs.mkdirSync(testFolder);
|
||||
|
||||
// crate cache folder
|
||||
// Create cache folder
|
||||
const cacheFolder = path.resolve(__dirname, `cache-${Cli.options.cacheKey}`);
|
||||
fs.mkdirSync(cacheFolder);
|
||||
|
||||
// add test has file to test folders
|
||||
// Add test file to test folders
|
||||
fs.writeFileSync(path.resolve(testFolder, 'test.txt'), Cli.options.cacheKey);
|
||||
await Caching.PushToCache(cacheFolder, testFolder, `${Cli.options.cacheKey}`);
|
||||
|
||||
// delete test folder
|
||||
// Delete test folder
|
||||
fs.rmdirSync(testFolder, { recursive: true });
|
||||
await Caching.PullFromCache(
|
||||
cacheFolder.replace(/\\/g, `/`),
|
||||
@ -49,7 +49,7 @@ describe('Cloud Runner Caching', () => {
|
||||
await CloudRunnerSystem.Run(`tree ${testFolder}`);
|
||||
await CloudRunnerSystem.Run(`tree ${cacheFolder}`);
|
||||
|
||||
// compare validity to original hash
|
||||
// Compare validity to original hash
|
||||
expect(fs.readFileSync(path.resolve(testFolder, 'test.txt'), { encoding: 'utf8' }).toString()).toContain(
|
||||
Cli.options.cacheKey,
|
||||
);
|
||||
|
@ -65,6 +65,7 @@ export class Caching {
|
||||
[path.resolve(sourceFolder, '..'), cacheFolder, cacheArtifactName],
|
||||
1,
|
||||
);
|
||||
|
||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
||||
});
|
||||
@ -116,6 +117,7 @@ export class Caching {
|
||||
[path.resolve(destinationFolder, '..'), cacheFolder, cacheArtifactName],
|
||||
1,
|
||||
);
|
||||
|
||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
||||
});
|
||||
|
@ -30,6 +30,7 @@ export class CloudRunnerBuildCommandProcessor {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return output.filter((x) => x.step !== undefined && x.hook !== undefined && x.hook.length > 0);
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import { CloudRunner } from '../..';
|
||||
export class CloudRunnerFolders {
|
||||
public static readonly repositoryFolder = 'repo';
|
||||
|
||||
// only the following paths that do not start a path.join with another "Full" suffixed property need to start with an absolute /
|
||||
// Only the following paths that do not start a path.join with another "Full" suffixed property need to start with an absolute /
|
||||
|
||||
public static get uniqueCloudRunnerJobFolderAbsolute(): string {
|
||||
return path.join(`/`, CloudRunnerFolders.buildVolumeFolder, CloudRunner.buildParameters.buildGuid);
|
||||
|
@ -4,6 +4,7 @@ import CloudRunnerConstants from './cloud-runner-constants';
|
||||
class CloudRunnerNamespace {
|
||||
static generateGuid(runNumber: string | number, platform: string) {
|
||||
const nanoid = customAlphabet(CloudRunnerConstants.alphabet, 4);
|
||||
|
||||
return `${runNumber}-${platform.toLowerCase().replace('standalone', '')}-${nanoid()}`;
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ const formatFunction = (value, arguments_) => {
|
||||
for (const element of arguments_) {
|
||||
value = value.replace(`{${element.key}}`, element.value);
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
@ -22,6 +23,7 @@ class CloudRunnerQueryOverride {
|
||||
) {
|
||||
return CloudRunnerQueryOverride.queryOverrides[alternativeKey];
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
@ -31,6 +33,7 @@ class CloudRunnerQueryOverride {
|
||||
const doesInclude =
|
||||
Input.readInputFromOverrideList().split(',').includes(query) ||
|
||||
Input.readInputFromOverrideList().split(',').includes(Input.ToEnvVarFormat(query));
|
||||
|
||||
return doesInclude ? true : false;
|
||||
} else {
|
||||
return true;
|
||||
|
@ -8,6 +8,7 @@ export class CloudRunnerSystem {
|
||||
RemoteClientLogger.log(element);
|
||||
}
|
||||
}
|
||||
|
||||
return await new Promise<string>((promise, throwError) => {
|
||||
let output = '';
|
||||
const child = exec(command, (error, stdout, stderr) => {
|
||||
|
@ -10,6 +10,7 @@ class DependencyOverrideService {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
public static async TryStartDependencies() {
|
||||
|
@ -22,6 +22,7 @@ export class LfsHashing {
|
||||
.replace(' .lfs-assets-guid', '')
|
||||
.replace(/\n/g, ``),
|
||||
};
|
||||
|
||||
return lfsHashes;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
@ -34,6 +35,7 @@ export class LfsHashing {
|
||||
.replace(/\n/g, '')
|
||||
.split(` `)[0];
|
||||
process.chdir(startPath);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -40,8 +40,10 @@ export class TaskParameterSerializer {
|
||||
array = array.map((x) => {
|
||||
x.name = Input.ToEnvVarFormat(x.name);
|
||||
x.value = `${x.value}`;
|
||||
|
||||
return x;
|
||||
});
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
@ -54,6 +56,7 @@ export class TaskParameterSerializer {
|
||||
});
|
||||
}
|
||||
array.push({ name: 'buildParameters', value: JSON.stringify(CloudRunner.buildParameters) });
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
@ -67,6 +70,7 @@ export class TaskParameterSerializer {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
@ -86,6 +90,7 @@ export class TaskParameterSerializer {
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
return array;
|
||||
}
|
||||
private static getValue(key) {
|
||||
@ -104,6 +109,7 @@ export class TaskParameterSerializer {
|
||||
ParameterValue: value,
|
||||
});
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
}
|
||||
|
@ -69,6 +69,7 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
||||
(x) => x.step.includes(`build`),
|
||||
);
|
||||
const builderPath = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', `index.js`).replace(/\\/g, `/`);
|
||||
|
||||
return `apt-get update > /dev/null
|
||||
apt-get install -y zip tree npm git-lfs jq unzip git > /dev/null
|
||||
npm install -g n > /dev/null
|
||||
@ -98,6 +99,7 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
||||
const linuxCacheFolder = CloudRunnerFolders.cacheFolderFull.replace(/\\/g, `/`);
|
||||
const distFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist');
|
||||
const ubuntuPlatformsFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', 'platforms', 'ubuntu');
|
||||
|
||||
return `echo "game ci cloud runner init"
|
||||
mkdir -p ${`${CloudRunnerFolders.projectBuildFolderAbsolute}/build`.replace(/\\/g, `/`)}
|
||||
cd ${CloudRunnerFolders.projectPathAbsolute}
|
||||
|
@ -25,6 +25,7 @@ export class CustomWorkflow {
|
||||
EnvironmentVariable: Input.ToEnvVarFormat(x.name),
|
||||
ParameterValue: x.value,
|
||||
};
|
||||
|
||||
return secret;
|
||||
});
|
||||
output += await CloudRunner.Provider.runTask(
|
||||
@ -37,6 +38,7 @@ export class CustomWorkflow {
|
||||
[...CloudRunner.defaultSecrets, ...stepSecrets],
|
||||
);
|
||||
}
|
||||
|
||||
return output;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
|
@ -18,6 +18,7 @@ export class WorkflowCompositionRoot implements WorkflowInterface {
|
||||
if (CloudRunner.buildParameters.customJob !== '') {
|
||||
return await CustomWorkflow.runCustomJob(CloudRunner.buildParameters.customJob);
|
||||
}
|
||||
|
||||
return await new BuildAutomationWorkflow().run(
|
||||
new CloudRunnerStepState(baseImage, CloudRunner.cloudRunnerEnvironmentVariables, CloudRunner.defaultSecrets),
|
||||
);
|
||||
|
@ -46,6 +46,7 @@ class Docker {
|
||||
|
||||
static getWindowsCommand(image: any, parameters: any): string {
|
||||
const { workspace, actionFolder, unitySerial, gitPrivateToken } = parameters;
|
||||
|
||||
return `docker run \
|
||||
--workdir /github/workspace \
|
||||
--rm \
|
||||
|
@ -21,6 +21,7 @@ class ImageEnvironmentFactory {
|
||||
|
||||
string += `--env ${p.name}="${p.value}" `;
|
||||
}
|
||||
|
||||
return string;
|
||||
}
|
||||
public static getEnvironmentVariables(parameters: BuildParameters) {
|
||||
@ -65,6 +66,7 @@ class ImageEnvironmentFactory {
|
||||
{ name: 'RUNNER_WORKSPACE', value: process.env.RUNNER_WORKSPACE },
|
||||
];
|
||||
if (parameters.sshAgent) environmentVariables.push({ name: 'SSH_AUTH_SOCK', value: '/ssh-agent' });
|
||||
|
||||
return environmentVariables;
|
||||
}
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ class ImageTag {
|
||||
this.imagePlatformPrefix = ImageTag.getImagePlatformPrefixes(
|
||||
isCloudRunnerLocal ? process.platform : cloudRunnerBuilderPlatform,
|
||||
);
|
||||
this.imageRollingVersion = 1; // will automatically roll to the latest non-breaking version.
|
||||
this.imageRollingVersion = 1; // Will automatically roll to the latest non-breaking version.
|
||||
}
|
||||
|
||||
static get versionPattern() {
|
||||
@ -75,6 +75,7 @@ class ImageTag {
|
||||
ImageTag.targetPlatformSuffixes;
|
||||
|
||||
const [major, minor] = version.split('.').map((digit) => Number(digit));
|
||||
|
||||
// @see: https://docs.unity3d.com/ScriptReference/BuildTarget.html
|
||||
switch (platform) {
|
||||
case Platform.types.StandaloneOSX:
|
||||
@ -91,12 +92,14 @@ class ImageTag {
|
||||
If you are trying to build for windows-mono, please use a Linux based OS.`);
|
||||
}
|
||||
}
|
||||
|
||||
return windows;
|
||||
case Platform.types.StandaloneLinux64: {
|
||||
// Unity versions before 2019.3 do not support il2cpp
|
||||
if (major >= 2020 || (major === 2019 && minor >= 3)) {
|
||||
return linuxIl2cpp;
|
||||
}
|
||||
|
||||
return linux;
|
||||
}
|
||||
case Platform.types.iOS:
|
||||
@ -109,6 +112,7 @@ class ImageTag {
|
||||
if (process.platform !== 'win32') {
|
||||
throw new Error(`WSAPlayer can only be built on a windows base OS`);
|
||||
}
|
||||
|
||||
return wsaPlayer;
|
||||
case Platform.types.PS4:
|
||||
return windows;
|
||||
@ -118,9 +122,11 @@ class ImageTag {
|
||||
if (process.platform !== 'win32') {
|
||||
throw new Error(`tvOS can only be built on a windows base OS`);
|
||||
}
|
||||
|
||||
return tvos;
|
||||
case Platform.types.Switch:
|
||||
return windows;
|
||||
|
||||
// Unsupported
|
||||
case Platform.types.Lumin:
|
||||
return windows;
|
||||
|
@ -9,11 +9,13 @@ export class GitRepoReader {
|
||||
const value = (await CloudRunnerSystem.Run(`git remote -v`, false, true)).replace(/ /g, ``);
|
||||
CloudRunnerLogger.log(`value ${value}`);
|
||||
assert(value.includes('github.com'));
|
||||
|
||||
return value.split('github.com/')[1].split('.git')[0];
|
||||
}
|
||||
|
||||
public static async GetBranch() {
|
||||
assert(fs.existsSync(`.git`));
|
||||
|
||||
return (await CloudRunnerSystem.Run(`git branch --show-current`, false, true))
|
||||
.split('\n')[0]
|
||||
.replace(/ /g, ``)
|
||||
|
@ -8,12 +8,14 @@ export class GithubCliReader {
|
||||
if (authStatus.includes('You are not logged') || authStatus === '') {
|
||||
return '';
|
||||
}
|
||||
|
||||
return (await CloudRunnerSystem.Run(`gh auth status -t`, false, true))
|
||||
.split(`Token: `)[1]
|
||||
.replace(/ /g, '')
|
||||
.replace(/\n/g, '');
|
||||
} catch (error: any) {
|
||||
core.info(error || 'Failed to get github auth token from gh cli');
|
||||
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
@ -4,5 +4,6 @@ import YAML from 'yaml';
|
||||
|
||||
export function ReadLicense() {
|
||||
const pipelineFile = path.join(__dirname, `.github`, `workflows`, `cloud-runner-k8s-pipeline.yml`);
|
||||
|
||||
return fs.existsSync(pipelineFile) ? YAML.parse(fs.readFileSync(pipelineFile, 'utf8')).env.UNITY_LICENSE : '';
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ class Input {
|
||||
}
|
||||
const alternativeQuery = Input.ToEnvVarFormat(query);
|
||||
|
||||
// query input sources
|
||||
// Query input sources
|
||||
if (Cli.query(query, alternativeQuery)) {
|
||||
return Cli.query(query, alternativeQuery);
|
||||
}
|
||||
@ -69,6 +69,7 @@ class Input {
|
||||
if (Input.cloudRunnerCluster !== 'local') {
|
||||
return 'linux';
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
@ -117,7 +118,7 @@ class Input {
|
||||
}
|
||||
|
||||
static get buildMethod() {
|
||||
return Input.getInput('buildMethod') || ''; // processed in docker file
|
||||
return Input.getInput('buildMethod') || ''; // Processed in docker file
|
||||
}
|
||||
|
||||
static get customParameters() {
|
||||
@ -228,6 +229,7 @@ class Input {
|
||||
if (Cli.isCliMode) {
|
||||
return Input.getInput('cloudRunnerCluster') || 'aws';
|
||||
}
|
||||
|
||||
return Input.getInput('cloudRunnerCluster') || 'local';
|
||||
}
|
||||
|
||||
@ -275,6 +277,7 @@ class Input {
|
||||
if (input.toUpperCase() === input) {
|
||||
return input;
|
||||
}
|
||||
|
||||
return input
|
||||
.replace(/([A-Z])/g, ' $1')
|
||||
.trim()
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { BuildParameters } from '.';
|
||||
import { SetupWindows, SetupMac } from './platform-setup/';
|
||||
import { SetupMac, SetupWindows } from './platform-setup/';
|
||||
import ValidateWindows from './platform-validation/validate-windows';
|
||||
|
||||
class PlatformSetup {
|
||||
@ -12,7 +12,8 @@ class PlatformSetup {
|
||||
case 'darwin':
|
||||
await SetupMac.setup(buildParameters, actionFolder);
|
||||
break;
|
||||
//Add other baseOS's here
|
||||
|
||||
// Add other baseOS's here
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -13,8 +13,9 @@ class SetupWindows {
|
||||
if (!fs.existsSync('c:/regkeys')) {
|
||||
fs.mkdirSync('c:/regkeys');
|
||||
}
|
||||
|
||||
// These all need the Windows 10 SDK
|
||||
switch (targetPlatform) {
|
||||
//These all need the Windows 10 SDK
|
||||
case 'StandaloneWindows':
|
||||
case 'StandaloneWindows64':
|
||||
case 'WSAPlayer':
|
||||
|
@ -32,7 +32,7 @@ class ValidateWindows {
|
||||
}
|
||||
|
||||
private static checkForWin10SDK() {
|
||||
//Check for Windows 10 SDK on runner
|
||||
// Check for Windows 10 SDK on runner
|
||||
const windows10SDKPathExists = fs.existsSync('C:/Program Files (x86)/Windows Kits');
|
||||
if (!windows10SDKPathExists) {
|
||||
throw new Error(`Windows 10 SDK not found in default location. Make sure
|
||||
@ -42,7 +42,7 @@ class ValidateWindows {
|
||||
}
|
||||
|
||||
private static checkForVisualStudio() {
|
||||
//Note: When upgrading to Server 2022, we will need to move to just "program files" since VS will be 64-bit
|
||||
// Note: When upgrading to Server 2022, we will need to move to just "program files" since VS will be 64-bit
|
||||
const visualStudioInstallPathExists = fs.existsSync('C:/Program Files (x86)/Microsoft Visual Studio');
|
||||
const visualStudioDataPathExists = fs.existsSync('C:/ProgramData/Microsoft/VisualStudio');
|
||||
|
||||
|
@ -17,12 +17,14 @@ class Platform {
|
||||
XboxOne: 'XboxOne',
|
||||
tvOS: 'tvOS',
|
||||
Switch: 'Switch',
|
||||
|
||||
// Unsupported
|
||||
Lumin: 'Lumin',
|
||||
BJM: 'BJM',
|
||||
Stadia: 'Stadia',
|
||||
Facebook: 'Facebook',
|
||||
NoTarget: 'NoTarget',
|
||||
|
||||
// Test specific
|
||||
Test: 'Test',
|
||||
};
|
||||
|
@ -34,6 +34,7 @@ describe('System', () => {
|
||||
it('outputs info', async () => {
|
||||
execSpy.mockImplementationOnce(async (input, _, options) => {
|
||||
options?.listeners?.stdout?.(Buffer.from(input, 'utf8'));
|
||||
|
||||
return 0;
|
||||
});
|
||||
|
||||
|
@ -10,6 +10,7 @@ export default class UnityVersioning {
|
||||
if (unityVersion === 'auto') {
|
||||
return UnityVersioning.read(projectPath);
|
||||
}
|
||||
|
||||
return unityVersion;
|
||||
}
|
||||
|
||||
@ -18,6 +19,7 @@ export default class UnityVersioning {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
throw new Error(`Project settings file not found at "${filePath}". Have you correctly set the projectPath?`);
|
||||
}
|
||||
|
||||
return UnityVersioning.parse(fs.readFileSync(filePath, 'utf8'));
|
||||
}
|
||||
|
||||
@ -26,6 +28,7 @@ export default class UnityVersioning {
|
||||
if (!matches || matches.length === 0) {
|
||||
throw new Error(`Failed to parse version from "${projectVersionTxt}".`);
|
||||
}
|
||||
|
||||
return matches[0];
|
||||
}
|
||||
}
|
||||
|
@ -110,6 +110,7 @@ describe('Versioning', () => {
|
||||
|
||||
expect(logDiffSpy).toHaveBeenCalledTimes(1);
|
||||
expect(gitSpy).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Todo - this no longer works since typescript
|
||||
// const issuedCommand = System.run.mock.calls[0][2].input.toString();
|
||||
// expect(issuedCommand.indexOf('diff')).toBeGreaterThan(-1);
|
||||
@ -137,6 +138,7 @@ describe('Versioning', () => {
|
||||
|
||||
test.each(['v0', 'v0.1', 'v0.1.2', 'v0.1-2', 'v0.1-2-g'])('does not like %s', (description) => {
|
||||
expect(Versioning.descriptionRegex1.test(description)).toBeFalsy();
|
||||
|
||||
// Also, never expect without the v to work for any of these cases.
|
||||
expect(Versioning.descriptionRegex1.test(description?.slice(1))).toBeFalsy();
|
||||
});
|
||||
|
@ -130,6 +130,7 @@ export default class Versioning {
|
||||
if (!(await this.hasAnyVersionTags())) {
|
||||
const version = `0.0.${await this.getTotalNumberOfCommits()}`;
|
||||
core.info(`Generated version ${version} (no version tags found).`);
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
@ -148,6 +149,7 @@ export default class Versioning {
|
||||
|
||||
const version = `0.0.${await this.getTotalNumberOfCommits()}`;
|
||||
core.info(`Generated version ${version} (semantic version couldn't be determined).`);
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
@ -203,6 +205,7 @@ export default class Versioning {
|
||||
core.warning(
|
||||
`Failed to parse git describe output or version can not be determined through: "${description}".`,
|
||||
);
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -9,5 +9,5 @@
|
||||
"noImplicitAny": false /* Re-enable after fixing compatibility */ /* Raise error on expressions and declarations with an implied 'any' type. */,
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
},
|
||||
"exclude": ["node_modules", "**/*.test.ts"]
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user