Cloud Runner v2 (#310)

This commit is contained in:
Frostebite 2022-02-01 02:31:20 +00:00 committed by GitHub
parent 4e38a84fe7
commit 03ae77dc7c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
90 changed files with 4253 additions and 1700 deletions

View File

@ -1,60 +0,0 @@
name: AWS
on:
push: { branches: [aws, remote-builder/refactor] }
env:
AWS_REGION: 'eu-west-1'
jobs:
buildForAllPlatforms:
name: AWS Fargate Build
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
projectPath:
- test-project
unityVersion:
# - 2019.2.11f1
- 2019.3.15f1
targetPlatform:
#- StandaloneOSX # Build a macOS standalone (Intel 64-bit).
#- StandaloneWindows64 # Build a Windows 64-bit standalone.
- StandaloneLinux64 # Build a Linux 64-bit standalone.
#- iOS # Build an iOS player.
#- Android # Build an Android .apk.
#- WebGL # WebGL.
# - StandaloneWindows # Build a Windows standalone.
# - WSAPlayer # Build an Windows Store Apps player.
# - PS4 # Build a PS4 Standalone.
# - XboxOne # Build a Xbox One Standalone.
# - tvOS # Build to Apple's tvOS platform.
# - Switch # Build a Nintendo Switch player
# steps
steps:
- name: Checkout (default)
uses: actions/checkout@v2
if: github.event.event_type != 'pull_request_target'
with:
lfs: true
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-2
- uses: ./
id: aws-fargate-unity-build
env:
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: eu-west-2
with:
remoteBuildCluster: aws
projectPath: ${{ matrix.projectPath }}
unityVersion: ${{ matrix.unityVersion }}
targetPlatform: ${{ matrix.targetPlatform }}
githubToken: ${{ secrets.GITHUB_TOKEN }}

View File

@ -0,0 +1,111 @@
name: Cloud Runner - AWS Tests
on:
push: { branches: [main, aws, remote-builder/unified-providers] }
env:
GKE_ZONE: 'us-central1'
GKE_REGION: 'us-central1'
GKE_PROJECT: 'unitykubernetesbuilder'
GKE_CLUSTER: 'unity-builder-cluster'
GCP_LOGGING: true
GCP_PROJECT: unitykubernetesbuilder
AWS_REGION: eu-west-2
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: eu-west-2
AWS_BASE_STACK_NAME: game-ci-github-pipelines
CLOUD_RUNNER_BRANCH: remote-builder/unified-providers
CLOUD_RUNNER_TESTS: true
DEBUG: true
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
jobs:
buildForAllPlatforms:
name: AWS Fargate Build
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
projectPath:
- test-project
unityVersion:
# - 2019.2.11f1
- 2019.3.15f1
targetPlatform:
#- StandaloneOSX # Build a macOS standalone (Intel 64-bit).
- StandaloneWindows64 # Build a Windows 64-bit standalone.
- StandaloneLinux64 # Build a Linux 64-bit standalone.
#- iOS # Build an iOS player.
#- Android # Build an Android .apk.
#- WebGL # WebGL.
# - StandaloneWindows # Build a Windows standalone.
# - WSAPlayer # Build an Windows Store Apps player.
# - PS4 # Build a PS4 Standalone.
# - XboxOne # Build a Xbox One Standalone.
# - tvOS # Build to Apple's tvOS platform.
# - Switch # Build a Nintendo Switch player
# steps
steps:
- name: Checkout (default)
uses: actions/checkout@v2
if: github.event.event_type != 'pull_request_target'
with:
lfs: true
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-2
- run: yarn
- run: yarn run cli --help
- run: yarn run test-i-aws
env:
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
PROJECT_PATH: ${{ matrix.projectPath }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TARGET_PLATFORM: ${{ matrix.targetPlatform }}
- uses: ./
id: aws-fargate-unity-build
timeout-minutes: 25
with:
cloudRunnerCluster: aws
versioning: None
projectPath: ${{ matrix.projectPath }}
unityVersion: ${{ matrix.unityVersion }}
targetPlatform: ${{ matrix.targetPlatform }}
githubToken: ${{ secrets.GITHUB_TOKEN }}
postBuildSteps: |
- name: upload
image: amazon/aws-cli
commands: |
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
aws configure set region $AWS_DEFAULT_REGION --profile default
aws s3 ls
aws s3 ls game-ci-test-storage
ls /data/cache/$BRANCH
echo "/data/cache/$BRANCH/build-$BUILD_GUID.zip s3://game-ci-test-storage/$BRANCH/$BUILD_FILE"
aws s3 cp /data/cache/$BRANCH/build-$BUILD_GUID.zip s3://game-ci-test-storage/$BRANCH/build-$BUILD_GUID.zip
aws s3 cp /data/cache/$BRANCH s3://game-ci-test-storage/$BRANCH/$BUILD_GUID
secrets:
- name: awsAccessKeyId
value: ${{ secrets.AWS_ACCESS_KEY_ID }}
- name: awsSecretAccessKey
value: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: awsDefaultRegion
value: eu-west-2
- run: |
aws s3 cp s3://game-ci-test-storage/${{ steps.aws-fargate-unity-build.outputs.BRANCH }}/build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.zip build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.zip
ls
###########################
# Upload #
###########################
# download from cloud storage
- uses: actions/upload-artifact@v2
with:
name: AWS Build (${{ matrix.targetPlatform }})
path: build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.zip
retention-days: 14

View File

@ -0,0 +1,125 @@
name: Cloud Runner - K8s Tests
on:
push: { branches: [remote-builder/k8s, remote-builder/unified-providers] }
# push: { branches: [main] }
# pull_request:
# paths-ignore:
# - '.github/**'
env:
GKE_ZONE: 'us-central1'
GKE_REGION: 'us-central1'
GKE_PROJECT: 'unitykubernetesbuilder'
GKE_CLUSTER: 'game-ci-github-pipelines'
GCP_LOGGING: true
GCP_PROJECT: unitykubernetesbuilder
GCP_LOG_FILE: ${{ github.workspace }}/cloud-runner-logs.txt
AWS_REGION: eu-west-2
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: eu-west-2
AWS_BASE_STACK_NAME: game-ci-github-pipelines
CLOUD_RUNNER_BRANCH: remote-builder/unified-providers
CLOUD_RUNNER_TESTS: true
DEBUG: true
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
jobs:
k8sBuilds:
name: K8s (GKE Autopilot) build for ${{ matrix.targetPlatform }} on version ${{ matrix.unityVersion }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
unityVersion:
# - 2019.2.11f1
- 2019.3.15f1
targetPlatform:
# - StandaloneWindows64
- StandaloneLinux64
steps:
###########################
# Checkout #
###########################
- uses: actions/checkout@v2
if: github.event.event_type != 'pull_request_target'
with:
lfs: true
###########################
# Setup #
###########################
- uses: google-github-actions/setup-gcloud@master
with:
version: '288.0.0'
service_account_email: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_EMAIL }}
service_account_key: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_KEY }}
- name: Get GKE cluster credentials
run: gcloud container clusters get-credentials $GKE_CLUSTER --zone $GKE_ZONE --project $GKE_PROJECT
###########################
# Cloud Runner Test Suite #
###########################
- uses: actions/setup-node@v2
with:
node-version: 12.x
- run: yarn
- run: yarn run cli --help
- name: Cloud Runner Test Suite
run: yarn run test-i-k8s --detectOpenHandles --forceExit
env:
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
PROJECT_PATH: ${{ matrix.projectPath }}
TARGET_PLATFORM: ${{ matrix.targetPlatform }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
KUBE_CONFIG: ${{ steps.read-base64.outputs.base64 }}
unityVersion: ${{ matrix.unityVersion }}
###########################
# Cloud Runner Build Test #
###########################
- name: Cloud Runner Build Test
uses: ./
id: k8s-unity-build
timeout-minutes: 30
with:
cloudRunnerCluster: k8s
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
targetPlatform: ${{ matrix.targetPlatform }}
kubeConfig: ${{ steps.read-base64.outputs.base64 }}
githubToken: ${{ secrets.GITHUB_TOKEN }}
projectPath: test-project
unityVersion: ${{ matrix.unityVersion }}
versioning: None
postBuildSteps: |
- name: upload
image: amazon/aws-cli
commands: |
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
aws configure set region $AWS_DEFAULT_REGION --profile default
aws s3 ls
aws s3 ls game-ci-test-storage
ls /data/cache/$BRANCH
echo "/data/cache/$BRANCH/build-$BUILD_GUID.zip s3://game-ci-test-storage/$BRANCH/$BUILD_FILE"
aws s3 cp /data/cache/$BRANCH/build-$BUILD_GUID.zip s3://game-ci-test-storage/$BRANCH/build-$BUILD_GUID.zip
secrets:
- name: awsAccessKeyId
value: ${{ secrets.AWS_ACCESS_KEY_ID }}
- name: awsSecretAccessKey
value: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: awsDefaultRegion
value: eu-west-2
- run: |
aws s3 cp s3://game-ci-test-storage/${{ steps.k8s-unity-build.outputs.BRANCH }}/build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.zip build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.zip
ls
###########################
# Upload #
###########################
# download from cloud storage
- uses: actions/upload-artifact@v2
with:
name: K8s Build (${{ matrix.targetPlatform }})
path: build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.zip
retention-days: 14

View File

@ -1,80 +0,0 @@
name: Kubernetes
on:
workflow_dispatch: {}
# push: { branches: [main] }
# pull_request:
# paths-ignore:
# - '.github/**'
env:
GKE_ZONE: 'us-central1-c'
GKE_REGION: 'us-central1'
GKE_PROJECT: 'unitykubernetesbuilder'
GKE_CLUSTER: 'unity-builder-cluster'
UNITY_LICENSE: "<?xml version=\"1.0\" encoding=\"UTF-8\"?><root>\n <License id=\"Terms\">\n <MachineBindings>\n <Binding Key=\"1\" Value=\"576562626572264761624c65526f7578\"/>\n <Binding Key=\"2\" Value=\"576562626572264761624c65526f7578\"/>\n </MachineBindings>\n <MachineID Value=\"D7nTUnjNAmtsUMcnoyrqkgIbYdM=\"/>\n <SerialHash Value=\"2033b8ac3e6faa3742ca9f0bfae44d18f2a96b80\"/>\n <Features>\n <Feature Value=\"33\"/>\n <Feature Value=\"1\"/>\n <Feature Value=\"12\"/>\n <Feature Value=\"2\"/>\n <Feature Value=\"24\"/>\n <Feature Value=\"3\"/>\n <Feature Value=\"36\"/>\n <Feature Value=\"17\"/>\n <Feature Value=\"19\"/>\n <Feature Value=\"62\"/>\n </Features>\n <DeveloperData Value=\"AQAAAEY0LUJHUlgtWEQ0RS1aQ1dWLUM1SlctR0RIQg==\"/>\n <SerialMasked Value=\"F4-BGRX-XD4E-ZCWV-C5JW-XXXX\"/>\n <StartDate Value=\"2021-02-08T00:00:00\"/>\n <UpdateDate Value=\"2021-02-09T00:34:57\"/>\n <InitialActivationDate Value=\"2021-02-08T00:34:56\"/>\n <LicenseVersion Value=\"6.x\"/>\n <ClientProvidedVersion Value=\"2018.4.30f1\"/>\n <AlwaysOnline Value=\"false\"/>\n <Entitlements>\n <Entitlement Ns=\"unity_editor\" Tag=\"UnityPersonal\" Type=\"EDITOR\" ValidTo=\"9999-12-31T00:00:00\"/>\n <Entitlement Ns=\"unity_editor\" Tag=\"DarkSkin\" Type=\"EDITOR_FEATURE\" ValidTo=\"9999-12-31T00:00:00\"/>\n </Entitlements>\n </License>\n<Signature xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><SignedInfo><CanonicalizationMethod Algorithm=\"http://www.w3.org/TR/2001/REC-xml-c14n-20010315#WithComments\"/><SignatureMethod Algorithm=\"http://www.w3.org/2000/09/xmldsig#rsa-sha1\"/><Reference URI=\"#Terms\"><Transforms><Transform Algorithm=\"http://www.w3.org/2000/09/xmldsig#enveloped-signature\"/></Transforms><DigestMethod Algorithm=\"http://www.w3.org/2000/09/xmldsig#sha1\"/><DigestValue>m0Db8UK+ktnOLJBtHybkfetpcKo=</DigestValue></Reference></SignedInfo><SignatureValue>o/pUbSQAukz7+ZYAWhnA0AJbIlyyCPL7bKVEM2lVqbrXt7cyey+umkCXamuOgsWPVUKBMkXtMH8L\n5etLmD0getWIhTGhzOnDCk+gtIPfL4jMo9tkEuOCROQAXCci23VFscKcrkB+3X6h4wEOtA2APhOY\nB+wvC794o8/82ffjP79aVAi57rp3Wmzx+9pe9yMwoJuljAy2sc2tIMgdQGWVmOGBpQm3JqsidyzI\nJWG2kjnc7pDXK9pwYzXoKiqUqqrut90d+kQqRyv7MSZXR50HFqD/LI69h68b7P8Bjo3bPXOhNXGR\n9YCoemH6EkfCJxp2gIjzjWW+l2Hj2EsFQi8YXw==</SignatureValue></Signature></root>"
jobs:
k8sBuilds:
name: K8s build for ${{ matrix.targetPlatform }} on version ${{ matrix.unityVersion }}
runs-on: ubuntu-latest
continue-on-error: true
strategy:
fail-fast: false
matrix:
targetPlatform:
- StandaloneLinux64
- StandaloneWindows64
steps:
###########################
# Checkout #
###########################
- uses: actions/checkout@v2
with:
lfs: true
###########################
# Spin up #
###########################
- uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
with:
version: '288.0.0'
service_account_email: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_EMAIL }}
service_account_key: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_KEY }}
- run: ./dist/bootstrapper/ApplyClusterAndAcquireLock.sh ${{ env.GKE_PROJECT }} ${{ env.GKE_CLUSTER }} ${{ env.GKE_ZONE }}
###########################
# Build #
###########################
- uses: frostebite/File-To-Base64@master
id: read-base64
with:
filePath: ~/.kube/config
- uses: ./
id: k8s-unity-build
with:
targetPlatform: ${{ matrix.targetPlatform }}
kubeConfig: ${{ steps.read-base64.outputs.base64 }}
githubToken: ${{ secrets.GITHUB_TOKEN }}
projectPath: test-project
unityVersion: 2019.3.15f1
###########################
# Upload #
###########################
- uses: frostebite/K8s-Download-Volume@master
with:
kubeConfig: ${{ steps.read-base64.outputs.base64 }}
volume: ${{ steps.k8s-unity-build.outputs.volume }}
sourcePath: repo/build/
- uses: actions/upload-artifact@v2
with:
name: Kubernetes Build (${{ matrix.targetPlatform }})
path: k8s-volume-download
retention-days: 14
###########################
# Spin down #
###########################
- run: ./dist/bootstrapper/ReleaseLockAndAttemptShutdown.sh ${{ env.GKE_PROJECT }} ${{ env.GKE_CLUSTER }} ${{ env.GKE_ZONE }}
if: ${{ always() }}

1
.gitignore vendored
View File

@ -3,3 +3,4 @@ node_modules
coverage/
lib/
.vsconfig
yarn-error.log

32
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,32 @@
{
"god.tsconfig": "./tsconfig.json",
"yaml.customTags": [
"!And",
"!And sequence",
"!If",
"!If sequence",
"!Not",
"!Not sequence",
"!Equals",
"!Equals sequence",
"!Or",
"!Or sequence",
"!FindInMap",
"!FindInMap sequence",
"!Base64",
"!Join",
"!Join sequence",
"!Cidr",
"!Ref",
"!Sub",
"!Sub sequence",
"!GetAtt",
"!GetAZs",
"!ImportValue",
"!ImportValue sequence",
"!Select",
"!Select sequence",
"!Split",
"!Split sequence"
]
}

View File

@ -22,6 +22,18 @@ inputs:
required: false
default: ''
description: 'Name of the build.'
postBuildSteps:
required: false
default: ''
description: 'run a post build job in yaml format with the keys image, secrets (name, value object array), command string'
preBuildSteps:
required: false
default: ''
description: 'Run a pre build job after the repository setup but before the build job (in yaml format with the keys image, secrets (name, value object array), command line string)'
customJob:
required: false
default: ''
description: 'Run a custom job instead of the standard build automation for cloud runner (in yaml format with the keys image, secrets (name, value object array), command line string)'
buildsPath:
required: false
default: ''
@ -30,7 +42,7 @@ inputs:
required: false
default: ''
description: 'Path to a Namespace.Class.StaticMethod to run to perform the build.'
remoteBuildCluster:
cloudRunnerCluster:
default: 'local'
required: false
description: 'Either local, k8s or aws can be used to run builds on a remote cluster. Additional parameters must be configured.'
@ -46,12 +58,12 @@ inputs:
default: ''
required: false
description: 'Supply a Persistent Volume Claim name to use for the Unity build.'
remoteBuildMemory:
default: '800M'
cloudRunnerMemory:
default: '750M'
required: false
description: 'Amount of memory to assign the remote build container'
remoteBuildCpu:
default: '0.25'
cloudRunnerCpu:
default: '1.0'
required: false
description: 'Amount of CPU time to assign the remote build container'
kubeVolumeSize:

13
cloud-runner-logs Normal file
View File

@ -0,0 +1,13 @@
Cloud Runner platform selected AWS
Cloud Runner is running in custom job mode
AWS Region: eu-west-2
Parsing build steps:
- name: 'step 1'
image: 'alpine'
commands: 'printenv'
secrets:
- name: 'testSecretName'
value: 'testSecretValue'
game-ci stack does not exist (["game-ci-github-automation-424-linux64-a9hz-cleanup","game-ci-github-automation-423-linux64-v34g-cleanup","game-ci-github-automation-423-linux64-v34g","game-ci-github-automation-422-linux64-7x6i-cleanup","game-ci-github-automation-422-linux64-7x6i","game-ci-github-automation-414-linux64-j21p-cleanup","game-ci-github-automation-414-linux64-j21p","game-ci-github-automation-413-linux64-tcih-cleanup","game-ci-github-automation-413-linux64-tcih","game-ci-github-automation-411-linux64-0s69-cleanup","game-ci-github-automation-411-linux64-0s69","game-ci-github-automation-410-linux64-1tli-cleanup","game-ci-github-automation-410-linux64-1tli","game-ci-github-automation-408-linux64-8pbw-cleanup","game-ci-github-automation-408-linux64-8pbw","game-ci-github-automation-407-linux64-21un-cleanup","game-ci-github-automation-407-linux64-21un","game-ci-github-automation-406-linux64-dizb-cleanup","game-ci-github-automation-406-linux64-dizb","game-ci-github-automation-405-linux64-9xj5-cleanup","game-ci-github-automation-405-linux64-9xj5","game-ci-github-automation-402-linux64-0bym-cleanup","game-ci-github-automation-402-linux64-0bym","game-ci-github-automation-400-linux64-arqv-cleanup","game-ci-github-automation-400-linux64-arqv","game-ci-github-automation-399-linux64-utkt-cleanup","game-ci-github-automation-399-linux64-utkt","game-ci-github-automation-397-linux64-xwfu-cleanup","game-ci-github-automation-397-linux64-xwfu","game-ci-github-automation-396-linux64-2g3q-cleanup","game-ci-github-automation-396-linux64-2g3q","game-ci-github-automation","game-ci-stack-integration-tests-390-linux64-mcdw-cleanup","game-ci-stack-integration-tests-390-linux64-mcdw","game-ci-stack-integration-tests-391-linux64-2arq-cleanup","game-ci-stack-integration-tests-391-linux64-2arq","game-ci-stack-integration-tests-390-linux64-awd0-cleanup","game-ci-stack-integration-tests-390-linux64-awd0","game-ci-stack-integration-tests"])
created stack (version: eedce7440581ab2e8a80cee59e34ed64)

View File

@ -1,69 +0,0 @@
#!/bin/sh
# This creates a GKE Cluster
# - Will wait for any deletion to complete on a cluster with the same name before creating
# - Will wait for completion before continuing
# - If the script is run concurrently multiple times, only one cluster will be created, all instances will wait for availability
# Requires GCP Cloud SDK
# Installs retry https://github.com/kadwanev/retry
GKE_PROJECT=$1
GKE_CLUSTER=$2
GKE_ZONE=$3
# may update this to avoid repeated install, drop me a comment if needed
sudo sh -c "curl https://raw.githubusercontent.com/kadwanev/retry/master/retry -o /usr/local/bin/retry && chmod +x /usr/local/bin/retry"
attempts=0
while [ $attempts -le 1 ]
do
retry -s 15 -t 20 -v '
STATUS=$(gcloud container clusters list --format="json" --project $GKE_PROJECT |
jq "
.[] |
{name: .name, status: .status} |
select(.name == \"$GKE_CLUSTER\")
" |
jq ".status")
if [ "$STATUS" == "\"STOPPING\"" ]; then echo "Cluster stopping waiting for completion" && exit 1; fi
exit 0
'
cluster=$(gcloud container clusters list --project $GKE_PROJECT --format="json" | jq '.[] | select(.name == "${GKE_CLUSTER}")')
if [ -z "$cluster" ];
then
echo "No clusters found for \"$GKE_CLUSTER\" in project \"$GKE_CLUSTER\" in zone \"$GKE_ZONE\""
# you may not need this, it installs GCP beta for additional command line options
gcloud components install beta -q
# replace this line with whatever type of cluster you want to create
gcloud beta container --project $GKE_PROJECT clusters create $GKE_CLUSTER --zone $GKE_ZONE --no-enable-basic-auth --cluster-version "1.15.12-gke.2" --machine-type "custom-1-3072" --image-type "COS" --disk-type "pd-standard" --disk-size "15" --metadata disable-legacy-endpoints=true --scopes "https://www.googleapis.com/auth/devstorage.read_only","https://www.googleapis.com/auth/logging.write","https://www.googleapis.com/auth/monitoring","https://www.googleapis.com/auth/servicecontrol","https://www.googleapis.com/auth/service.management.readonly","https://www.googleapis.com/auth/trace.append" --num-nodes "1" --enable-stackdriver-kubernetes --enable-ip-alias --default-max-pods-per-node "110" --enable-autoscaling --min-nodes "0" --max-nodes "3" --no-enable-master-authorized-networks --addons HorizontalPodAutoscaling,HttpLoadBalancing --enable-autoupgrade --enable-autorepair --max-surge-upgrade 1 --max-unavailable-upgrade 0
fi;
retry -s 15 -t 20 -v '
STATUS=$(gcloud container clusters list --format="json" --project $GKE_PROJECT |
jq "
.[] |
{name: .name, status: .status} |
select(.name == \"$GKE_CLUSTER\")
" |
jq ".status")
if [ "$STATUS" == "\"PROVISIONING\"" ]; then echo "Cluster provisioning waiting for available" && exit 1; fi
exit 0
'
echo "Cluster is available"
gcloud container clusters get-credentials $GKE_CLUSTER --zone $GKE_ZONE --project $GKE_PROJECT
kubectl version
NSID=$(cat /proc/sys/kernel/random/uuid)
echo "::set-env name=NSID::"$NSID
{
cat <<EOF | kubectl apply -f -
apiVersion: v1
kind: Namespace
metadata:
name: ns-unity-builder-$NSID
labels:
app: unity-builder
EOF
} && exit 0
attempts=$(($attempts+1))
done

View File

@ -1,13 +0,0 @@
kubectl delete ns ns-unity-builder-$NSID
# do any unity-builder namespaces remain?
namespaceCount=$(kubectl get ns --output json | jq ".items | .[] | select(.metadata.labels.app == \"unity-builder\") | select(.status.phase != \"TERMINATING\")" | jq -s "length")
echo $namespaceCount
if [ "$namespaceCount" != "0" ]
then
echo "let next cluster delete"
exit 0
else
echo "delete cluster"
retry -s 15 -t 5 -v 'gcloud container clusters delete $GKE_CLUSTER --zone $GKE_ZONE --project $GKE_PROJECT --quiet'
fi

View File

@ -7,6 +7,9 @@ Parameters:
Type: String
Default: development
Description: "Your deployment environment: DEV, QA , PROD"
Version:
Type: String
Description: "hash of template"
# ContainerPort:
# Type: Number
@ -231,10 +234,6 @@ Resources:
Statement:
- Effect: Allow
Action:
# Allow upload to S3
- 's3:GetObject'
- 's3:GetObjectVersion'
- 's3:PutObject'
# Allow the use of secret manager
- 'secretsmanager:GetSecretValue'
@ -250,7 +249,7 @@ Resources:
- 'logs:CreateLogStream'
- 'logs:PutLogEvents'
Resource: '*'
DeleteCFNLambdaExecutionRole:
Type: "AWS::IAM::Role"
Properties:
@ -348,12 +347,6 @@ Resources:
SecurityGroups:
- !Ref EFSServerSecurityGroup
S3Bucket:
Type: 'AWS::S3::Bucket'
DeletionPolicy: Retain
Properties:
BucketName: game-ci-storage
@ -388,7 +381,7 @@ Outputs:
Value: !GetAtt 'ECSTaskExecutionRole.Arn'
Export:
Name: !Sub ${EnvironmentName}:ECSTaskExecutionRole
DeleteCFNLambdaExecutionRole:
Description: Lambda execution role for cleaning up cloud formations
Value: !GetAtt 'DeleteCFNLambdaExecutionRole.Arn'

View File

@ -18,7 +18,7 @@ Parameters:
Type: String
Default: development
Description: 'Your deployment environment: DEV, QA , PROD'
BUILDID:
BUILDGUID:
Type: String
Default: ''
StackName:
@ -34,16 +34,16 @@ Resources:
DeleteCFNLambda:
Type: "AWS::Lambda::Function"
Properties:
FunctionName: !Join [ "", [ 'DeleteCFNLambda', !Ref BUILDID ] ]
FunctionName: !Join [ "", [ 'DeleteCFNLambda', !Ref BUILDGUID ] ]
Code:
ZipFile: |
import boto3
import os
import json
stack_name = os.environ['stackName']
delete_stack_name = os.environ['deleteStackName']
def delete_cfn(stack_name):
try:
cfn = boto3.resource('cloudformation')
@ -51,8 +51,8 @@ Resources:
stack.delete()
return "SUCCESS"
except:
return "ERROR"
return "ERROR"
def handler(event, context):
print("Received event:")
print(json.dumps(event))
@ -66,7 +66,7 @@ Resources:
Handler: "index.handler"
Runtime: "python3.6"
Timeout: "5"
Role:
Role:
'Fn::ImportValue': !Sub '${EnvironmentName}:DeleteCFNLambdaExecutionRole'
DeleteStackEventRule:
DependsOn:
@ -74,27 +74,27 @@ Resources:
- GenerateCronExpression
Type: "AWS::Events::Rule"
Properties:
Name: !Join [ "", [ 'DeleteStackEventRule', !Ref BUILDID ] ]
Name: !Join [ "", [ 'DeleteStackEventRule', !Ref BUILDGUID ] ]
Description: Delete stack event
ScheduleExpression: !GetAtt GenerateCronExpression.cron_exp
State: "ENABLED"
Targets:
-
Targets:
-
Arn: !GetAtt DeleteCFNLambda.Arn
Id: 'DeleteCFNLambda'
PermissionForDeleteCFNLambda:
Id: 'DeleteCFNLambda'
PermissionForDeleteCFNLambda:
Type: "AWS::Lambda::Permission"
DependsOn:
- DeleteStackEventRule
Properties:
FunctionName: !Join [ "", [ 'DeleteCFNLambda', !Ref BUILDID ] ]
Properties:
FunctionName: !Join [ "", [ 'DeleteCFNLambda', !Ref BUILDGUID ] ]
Action: "lambda:InvokeFunction"
Principal: "events.amazonaws.com"
SourceArn: !GetAtt DeleteStackEventRule.Arn
GenerateCronExpLambda:
Type: "AWS::Lambda::Function"
Properties:
FunctionName: !Join [ "", [ 'GenerateCronExpressionLambda', !Ref BUILDID ] ]
FunctionName: !Join [ "", [ 'GenerateCronExpressionLambda', !Ref BUILDGUID ] ]
Code:
ZipFile: |
from datetime import datetime, timedelta
@ -102,7 +102,7 @@ Resources:
import logging
import json
import cfnresponse
def deletion_time(ttl):
delete_at_time = datetime.now() + timedelta(minutes=int(ttl))
hh = delete_at_time.hour
@ -113,7 +113,7 @@ Resources:
# minutes hours day month day-of-week year
cron_exp = "cron({} {} {} {} ? {})".format(mm, hh, dd, month, yyyy)
return cron_exp
def handler(event, context):
print('Received event: %s' % json.dumps(event))
status = cfnresponse.SUCCESS
@ -132,12 +132,12 @@ Resources:
Handler: "index.handler"
Runtime: "python3.6"
Timeout: "5"
Role:
Role:
'Fn::ImportValue': !Sub '${EnvironmentName}:DeleteCFNLambdaExecutionRole'
GenerateCronExpression:
Type: "Custom::GenerateCronExpression"
Version: "1.0"
Properties:
Name: !Join [ "", [ 'GenerateCronExpression', !Ref BUILDID ] ]
Name: !Join [ "", [ 'GenerateCronExpression', !Ref BUILDGUID ] ]
ServiceToken: !GetAtt GenerateCronExpLambda.Arn
ttl: !Ref 'TTL'

View File

@ -30,7 +30,7 @@ Parameters:
Type: Number
Default: 2048
Description: How much memory in megabytes to give the container
BUILDID:
BUILDGUID:
Type: String
Default: ''
Command:
@ -47,7 +47,7 @@ Parameters:
Default: ''
Description: >-
(Optional) An IAM role to give the service's containers if the code within
needs to access other AWS resources like S3 buckets, DynamoDB tables, etc
needs to access other AWS resources
EFSMountDirectory:
Type: String
Default: '/efsdata'
@ -98,7 +98,7 @@ Resources:
Metadata:
'AWS::CloudFormation::Designer':
id: c6f18447-b879-4696-8873-f981b2cedd2b
# template secrets p2 - secret
TaskDefinition:

BIN
dist/index.js generated vendored

Binary file not shown.

BIN
dist/index.js.map generated vendored

Binary file not shown.

BIN
dist/licenses.txt generated vendored

Binary file not shown.

0
dist/platforms/ubuntu/entrypoint.sh vendored Normal file → Executable file
View File

0
dist/platforms/ubuntu/steps/activate.sh vendored Normal file → Executable file
View File

0
dist/platforms/ubuntu/steps/build.sh vendored Normal file → Executable file
View File

0
dist/platforms/ubuntu/steps/return_license.sh vendored Normal file → Executable file
View File

0
dist/platforms/ubuntu/steps/set_gitcredential.sh vendored Normal file → Executable file
View File

View File

@ -11,26 +11,41 @@
"build": "tsc && ncc build lib --source-map --license licenses.txt",
"lint": "prettier --check \"src/**/*.{js,ts}\" && eslint src/**/*.ts",
"format": "prettier --write \"src/**/*.{js,ts}\"",
"prepare": "husky install",
"cli": "yarn ts-node src/index.ts -m cli",
"cli-aws": "cross-env cloudRunnerCluster=aws yarn run test-cli",
"cli-k8s": "cross-env cloudRunnerCluster=k8s yarn run test-cli",
"test-cli": "cross-env cloudRunnerTests=true yarn ts-node src/index.ts -m cli --projectPath test-project",
"test": "jest",
"prepare": "husky install"
"test-i": "yarn run test-i-aws && yarn run test-i-k8s",
"test-i-f": "yarn run test-i-aws && yarn run test-i-k8s && yarn run cli-k8s && yarn run cli-aws",
"test-i-aws": "cross-env cloudRunnerTests=true cloudRunnerCluster=aws yarn test -i -t \"cloud runner\"",
"test-i-k8s": "cross-env cloudRunnerTests=true cloudRunnerCluster=k8s yarn test -i -t \"cloud runner\""
},
"dependencies": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.4",
"@actions/github": "^2.2.0",
"@kubernetes/client-node": "^0.14.3",
"@octokit/core": "^3.5.1",
"async-wait-until": "^2.0.7",
"aws-sdk": "^2.812.0",
"base-64": "^1.0.0",
"commander": "^8.3.0",
"commander-ts": "^0.2.0",
"kubernetes-client": "^9.0.0",
"nanoid": "^3.1.31",
"semver": "^7.3.5"
"reflect-metadata": "^0.1.13",
"semver": "^7.3.5",
"yaml": "^1.10.2",
"nanoid": "^3.1.31"
},
"devDependencies": {
"@types/jest": "^26.0.15",
"@types/jest": "^27.0.3",
"@types/node": "^14.14.9",
"@types/semver": "^7.3.5",
"@typescript-eslint/parser": "^4.8.1",
"@vercel/ncc": "^0.25.1",
"cross-env": "^7.0.3",
"eslint": "^7.17.0",
"eslint-config-prettier": "^8.1.0",
"eslint-plugin-github": "^4.1.1",
@ -43,7 +58,8 @@
"js-yaml": "^3.14.0",
"lint-staged": "^12.1.2",
"prettier": "^2.2.1",
"ts-jest": "^26.4.4",
"ts-jest": "^26.4.2",
"ts-node": "^10.4.0",
"typescript": "^4.1.3"
},
"lint-staged": {

5
scripts/cleanupGCPResources.sh Executable file
View File

@ -0,0 +1,5 @@
kubectl delete job $(kubectl get jobs -o custom-columns=:.metadata.name)
kubectl delete cronjob $(kubectl get cronjobs -o custom-columns=:.metadata.name)
kubectl delete pod $(kubectl get pods -o custom-columns=:.metadata.name)
kubectl delete pvc $(kubectl get pvc -o custom-columns=:.metadata.name)
kubectl delete secret $(kubectl get secrets -o custom-columns=:.metadata.name)

View File

@ -1,8 +1,8 @@
import * as core from '@actions/core';
import { Action, BuildParameters, Cache, Docker, ImageTag, Kubernetes, Output, RemoteBuilder } from './model';
import { Action, BuildParameters, Cache, Docker, ImageTag, Output, CloudRunner } from './model';
import { CLI } from './model/cli/cli';
import PlatformSetup from './model/platform-setup';
async function run() {
async function runMain() {
try {
Action.checkCompatibility();
Cache.verify();
@ -11,33 +11,28 @@ async function run() {
const buildParameters = await BuildParameters.create();
const baseImage = new ImageTag(buildParameters);
let builtImage;
switch (buildParameters.remoteBuildCluster) {
case 'k8s':
core.info('Building with Kubernetes');
await Kubernetes.runBuildJob(buildParameters, baseImage);
break;
case 'aws':
core.info('Building with AWS');
await RemoteBuilder.build(buildParameters, baseImage);
break;
// default and local case
default:
core.info('Building locally');
PlatformSetup.setup(buildParameters);
builtImage = await Docker.build({ path: actionFolder, dockerfile, baseImage });
await Docker.run(builtImage, { workspace, ...buildParameters });
break;
if (
buildParameters.cloudRunnerCluster &&
buildParameters.cloudRunnerCluster !== '' &&
buildParameters.cloudRunnerCluster !== 'local'
) {
await CloudRunner.run(buildParameters, baseImage.toString());
} else {
core.info('Building locally');
PlatformSetup.setup(buildParameters);
const builtImage = await Docker.build({ path: actionFolder, dockerfile, baseImage });
await Docker.run(builtImage, { workspace, ...buildParameters });
}
// Set output
await Output.setBuildVersion(buildParameters.buildVersion);
} catch (error) {
core.setFailed(error.message);
core.setFailed((error as Error).message);
}
}
run();
const options = CLI.SetupCli();
if (CLI.isCliMode(options)) {
CLI.RunCli(options);
} else {
runMain();
}

View File

@ -42,15 +42,13 @@ describe('BuildParameters', () => {
it('returns the android version code with provided input', async () => {
const mockValue = '42';
jest.spyOn(Input, 'androidVersionCode', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(
expect.objectContaining({ androidVersionCode: mockValue }),
);
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidVersionCode: mockValue }));
});
it('returns the android version code from version by default', async () => {
const mockValue = '';
jest.spyOn(Input, 'androidVersionCode', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidVersionCode: 1003037 }));
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidVersionCode: 1003037 }));
});
it('determines the android sdk manager parameters only once', async () => {
@ -61,19 +59,19 @@ describe('BuildParameters', () => {
it('returns the platform', async () => {
const mockValue = 'somePlatform';
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ platform: mockValue }));
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ platform: mockValue }));
});
it('returns the project path', async () => {
const mockValue = 'path/to/project';
jest.spyOn(Input, 'projectPath', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ projectPath: mockValue }));
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ projectPath: mockValue }));
});
it('returns the build name', async () => {
const mockValue = 'someBuildName';
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildName: mockValue }));
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildName: mockValue }));
});
it('returns the build path', async () => {
@ -82,15 +80,13 @@ describe('BuildParameters', () => {
const expectedBuildPath = `${mockPath}/${mockPlatform}`;
jest.spyOn(Input, 'buildsPath', 'get').mockReturnValue(mockPath);
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(mockPlatform);
await expect(BuildParameters.create()).resolves.toEqual(
expect.objectContaining({ buildPath: expectedBuildPath }),
);
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildPath: expectedBuildPath }));
});
it('returns the build file', async () => {
const mockValue = 'someBuildName';
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildFile: mockValue }));
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildFile: mockValue }));
});
test.each([Platform.types.StandaloneWindows, Platform.types.StandaloneWindows64])(
@ -98,7 +94,7 @@ describe('BuildParameters', () => {
async (targetPlatform) => {
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
await expect(BuildParameters.create()).resolves.toEqual(
expect(BuildParameters.create()).resolves.toEqual(
expect.objectContaining({ buildFile: `${targetPlatform}.exe` }),
);
},
@ -108,7 +104,7 @@ describe('BuildParameters', () => {
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
jest.spyOn(Input, 'androidAppBundle', 'get').mockReturnValue(false);
await expect(BuildParameters.create()).resolves.toEqual(
expect(BuildParameters.create()).resolves.toEqual(
expect.objectContaining({ buildFile: `${targetPlatform}.apk` }),
);
});
@ -117,7 +113,7 @@ describe('BuildParameters', () => {
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
jest.spyOn(Input, 'androidAppBundle', 'get').mockReturnValue(true);
await expect(BuildParameters.create()).resolves.toEqual(
expect(BuildParameters.create()).resolves.toEqual(
expect.objectContaining({ buildFile: `${targetPlatform}.aab` }),
);
});
@ -125,53 +121,43 @@ describe('BuildParameters', () => {
it('returns the build method', async () => {
const mockValue = 'Namespace.ClassName.BuildMethod';
jest.spyOn(Input, 'buildMethod', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildMethod: mockValue }));
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildMethod: mockValue }));
});
it('returns the android keystore name', async () => {
const mockValue = 'keystore.keystore';
jest.spyOn(Input, 'androidKeystoreName', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(
expect.objectContaining({ androidKeystoreName: mockValue }),
);
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeystoreName: mockValue }));
});
it('returns the android keystore base64-encoded content', async () => {
const mockValue = 'secret';
jest.spyOn(Input, 'androidKeystoreBase64', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(
expect.objectContaining({ androidKeystoreBase64: mockValue }),
);
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeystoreBase64: mockValue }));
});
it('returns the android keystore pass', async () => {
const mockValue = 'secret';
jest.spyOn(Input, 'androidKeystorePass', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(
expect.objectContaining({ androidKeystorePass: mockValue }),
);
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeystorePass: mockValue }));
});
it('returns the android keyalias name', async () => {
const mockValue = 'secret';
jest.spyOn(Input, 'androidKeyaliasName', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(
expect.objectContaining({ androidKeyaliasName: mockValue }),
);
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeyaliasName: mockValue }));
});
it('returns the android keyalias pass', async () => {
const mockValue = 'secret';
jest.spyOn(Input, 'androidKeyaliasPass', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(
expect.objectContaining({ androidKeyaliasPass: mockValue }),
);
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeyaliasPass: mockValue }));
});
it('returns the android target sdk version', async () => {
const mockValue = 'AndroidApiLevelAuto';
jest.spyOn(Input, 'androidTargetSdkVersion', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(
expect(BuildParameters.create()).resolves.toEqual(
expect.objectContaining({ androidTargetSdkVersion: mockValue }),
);
});
@ -179,7 +165,7 @@ describe('BuildParameters', () => {
it('returns the custom parameters', async () => {
const mockValue = '-profile SomeProfile -someBoolean -someValue exampleValue';
jest.spyOn(Input, 'customParameters', 'get').mockReturnValue(mockValue);
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ customParameters: mockValue }));
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ customParameters: mockValue }));
});
});
});

View File

@ -1,5 +1,8 @@
import { customAlphabet } from 'nanoid';
import * as core from '@actions/core';
import AndroidVersioning from './android-versioning';
import CloudRunnerConstants from './cloud-runner/services/cloud-runner-constants';
import CloudRunnerNamespace from './cloud-runner/services/cloud-runner-namespace';
import Input from './input';
import Platform from './platform';
import UnityVersioning from './unity-versioning';
@ -27,17 +30,29 @@ class BuildParameters {
public androidSdkManagerParameters!: string;
public customParameters!: string;
public sshAgent!: string;
public cloudRunnerCluster!: string;
public awsBaseStackName!: string;
public gitPrivateToken!: string;
public remoteBuildCluster!: string;
public awsStackName!: string;
public kubeConfig!: string;
public githubToken!: string;
public remoteBuildMemory!: string;
public remoteBuildCpu!: string;
public cloudRunnerMemory!: string;
public cloudRunnerCpu!: string;
public kubeVolumeSize!: string;
public kubeVolume!: string;
public chownFilesTo!: string;
public postBuildSteps!: string;
public preBuildSteps!: string;
public customJob!: string;
public runNumber!: string;
public branch!: string;
public githubRepo!: string;
public gitSha!: string;
public logId!: string;
public buildGuid!: string;
static async create(): Promise<BuildParameters> {
const buildFile = this.parseBuildFile(Input.buildName, Input.targetPlatform, Input.androidAppBundle);
@ -87,16 +102,27 @@ class BuildParameters {
androidSdkManagerParameters,
customParameters: Input.customParameters,
sshAgent: Input.sshAgent,
gitPrivateToken: Input.gitPrivateToken,
gitPrivateToken: await Input.gitPrivateToken(),
chownFilesTo: Input.chownFilesTo,
remoteBuildCluster: Input.remoteBuildCluster,
awsStackName: Input.awsStackName,
cloudRunnerCluster: Input.cloudRunnerCluster,
awsBaseStackName: Input.awsBaseStackName,
kubeConfig: Input.kubeConfig,
githubToken: Input.githubToken,
remoteBuildMemory: Input.remoteBuildMemory,
remoteBuildCpu: Input.remoteBuildCpu,
githubToken: await Input.githubToken(),
cloudRunnerMemory: Input.cloudRunnerMemory,
cloudRunnerCpu: Input.cloudRunnerCpu,
kubeVolumeSize: Input.kubeVolumeSize,
kubeVolume: Input.kubeVolume,
postBuildSteps: Input.postBuildSteps,
preBuildSteps: Input.preBuildSteps,
customJob: Input.customJob,
runNumber: Input.runNumber,
branch: await Input.branch(),
githubRepo: await Input.githubRepo(),
remoteBuildCluster: Input.cloudRunnerCluster,
awsStackName: Input.awsBaseStackName,
gitSha: Input.gitSha,
logId: customAlphabet(CloudRunnerConstants.alphabet, 9)(),
buildGuid: CloudRunnerNamespace.generateBuildName(Input.runNumber, Input.targetPlatform),
};
}

View File

@ -0,0 +1,23 @@
const targets = new Array();
export function CliFunction(key: string, description: string) {
return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) {
targets.push({
target,
propertyKey,
descriptor,
key,
description,
});
};
}
export function GetCliFunctions(key) {
return targets.find((x) => x.key === key);
}
export function GetAllCliModes() {
return targets.map((x) => {
return {
key: x.key,
description: x.description,
};
});
}

88
src/model/cli/cli.ts Normal file
View File

@ -0,0 +1,88 @@
import { Command } from 'commander-ts';
import { BuildParameters, CloudRunner, ImageTag, Input } from '..';
import * as core from '@actions/core';
import { ActionYamlReader } from '../input-readers/action-yaml';
import CloudRunnerLogger from '../cloud-runner/services/cloud-runner-logger';
import { CliFunction, GetAllCliModes, GetCliFunctions } from './cli-decorator';
import { RemoteClientLogger } from './remote-client/remote-client-services/remote-client-logger';
import { CloudRunnerState } from '../cloud-runner/state/cloud-runner-state';
import { SetupCloudRunnerRepository } from './remote-client/setup-cloud-runner-repository';
import * as SDK from 'aws-sdk';
export class CLI {
static async RunCli(options: any): Promise<void> {
Input.githubInputEnabled = false;
const results = GetCliFunctions(options.mode);
if (results === undefined || results.length === 0) {
throw new Error('no CLI mode found');
}
CloudRunnerLogger.log(`Entrypoint: ${results.key}`);
options.versioning = 'None';
Input.cliOptions = options;
return await results.target[results.propertyKey]();
}
static isCliMode(options: any) {
return options.mode !== undefined && options.mode !== '';
}
public static SetupCli() {
const program = new Command();
program.version('0.0.1');
const properties = Object.getOwnPropertyNames(Input);
core.info(`\n`);
core.info(`INPUT:`);
const actionYamlReader: ActionYamlReader = new ActionYamlReader();
for (const element of properties) {
program.option(`--${element} <${element}>`, actionYamlReader.GetActionYamlValue(element));
if (Input[element] !== undefined && Input[element] !== '' && typeof Input[element] !== `function`) {
core.info(`${element} ${Input[element]}`);
}
}
core.info(`\n`);
program.option(
'-m, --mode <mode>',
GetAllCliModes()
.map((x) => `${x.key} (${x.description})`)
.join(` | `),
);
program.parse(process.argv);
return program.opts();
}
@CliFunction(`cli`, `runs a cloud runner build`)
public static async CLIBuild(): Promise<string> {
const buildParameter = await BuildParameters.create();
const baseImage = new ImageTag(buildParameter);
return await CloudRunner.run(buildParameter, baseImage.toString());
}
@CliFunction(`remote-cli`, `sets up a repository, usually before a game-ci build`)
static async runRemoteClientJob() {
const buildParameter = JSON.parse(process.env.BUILD_PARAMETERS || '{}');
RemoteClientLogger.log(`Build Params:
${JSON.stringify(buildParameter, undefined, 4)}
`);
CloudRunnerState.setup(buildParameter);
await SetupCloudRunnerRepository.run();
}
@CliFunction(`cach-push`, `push to cache`)
static async cachePush() {}
@CliFunction(`cach-pull`, `pull from cache`)
static async cachePull() {}
@CliFunction(`garbage-collect-aws`, `garbage collect aws`)
static async garbageCollectAws() {
process.env.AWS_REGION = Input.region;
const CF = new SDK.CloudFormation();
const stacks = await CF.listStacks().promise();
CloudRunnerLogger.log(JSON.stringify(stacks, undefined, 4));
}
}

View File

@ -0,0 +1,117 @@
import { assert } from 'console';
import fs from 'fs';
import path from 'path';
import { Input } from '../../..';
import CloudRunnerLogger from '../../../cloud-runner/services/cloud-runner-logger';
import { CloudRunnerState } from '../../../cloud-runner/state/cloud-runner-state';
import { CloudRunnerSystem } from './cloud-runner-system';
import { LFSHashing } from './lfs-hashing';
import { RemoteClientLogger } from './remote-client-logger';
export class Caching {
public static async PushToCache(cacheFolder: string, sourceFolder: string, cacheKey: string) {
const startPath = process.cwd();
try {
if (!fs.existsSync(cacheFolder)) {
await CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
}
process.chdir(path.resolve(sourceFolder, '..'));
if (Input.cloudRunnerTests) {
CloudRunnerLogger.log(
`Hashed cache folder ${await LFSHashing.hashAllFiles(sourceFolder)} ${sourceFolder} ${path.basename(
sourceFolder,
)}`,
);
}
if (Input.cloudRunnerTests) {
await CloudRunnerSystem.Run(`ls ${path.basename(sourceFolder)}`);
}
await CloudRunnerSystem.Run(`zip ${cacheKey}.zip ${path.basename(sourceFolder)}`);
assert(fs.existsSync(`${cacheKey}.zip`), 'cache zip exists');
assert(fs.existsSync(path.basename(sourceFolder)), 'source folder exists');
await CloudRunnerSystem.Run(`mv ${cacheKey}.zip ${cacheFolder}`);
RemoteClientLogger.log(`moved ${cacheKey}.zip to ${cacheFolder}`);
assert(fs.existsSync(`${path.join(cacheFolder, cacheKey)}.zip`), 'cache zip exists inside cache folder');
if (Input.cloudRunnerTests) {
await CloudRunnerSystem.Run(`ls ${cacheFolder}`);
}
} catch (error) {
process.chdir(`${startPath}`);
throw error;
}
process.chdir(`${startPath}`);
}
public static async PullFromCache(cacheFolder: string, destinationFolder: string, cacheKey: string = ``) {
const startPath = process.cwd();
RemoteClientLogger.log(`Caching for ${path.basename(destinationFolder)}`);
try {
if (!fs.existsSync(cacheFolder)) {
fs.mkdirSync(cacheFolder);
}
if (!fs.existsSync(destinationFolder)) {
fs.mkdirSync(destinationFolder);
}
const latestInBranch = await (await CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`))
.replace(/\n/g, ``)
.replace('.zip', '');
process.chdir(cacheFolder);
const cacheSelection = cacheKey !== `` && fs.existsSync(`${cacheKey}.zip`) ? cacheKey : latestInBranch;
await CloudRunnerLogger.log(`cache key ${cacheKey} selection ${cacheSelection}`);
if (fs.existsSync(`${cacheSelection}.zip`)) {
const resultsFolder = `results${CloudRunnerState.buildParams.buildGuid}`;
await CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
if (Input.cloudRunnerTests) {
await CloudRunnerSystem.Run(`tree ${destinationFolder}`);
}
RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.zip`);
assert(`${fs.existsSync(destinationFolder)}`);
assert(`${fs.existsSync(`${cacheSelection}.zip`)}`);
const fullResultsFolder = path.join(cacheFolder, resultsFolder);
if (Input.cloudRunnerTests) {
await CloudRunnerSystem.Run(`tree ${cacheFolder}`);
}
await CloudRunnerSystem.Run(`unzip ${cacheSelection}.zip -d ${path.basename(resultsFolder)}`);
RemoteClientLogger.log(`cache item extracted to ${fullResultsFolder}`);
assert(`${fs.existsSync(fullResultsFolder)}`);
const destinationParentFolder = path.resolve(destinationFolder, '..');
if (fs.existsSync(destinationFolder)) {
fs.rmSync(destinationFolder, { recursive: true, force: true });
}
await CloudRunnerSystem.Run(
`mv "${fullResultsFolder}/${path.basename(destinationFolder)}" "${destinationParentFolder}"`,
);
if (Input.cloudRunnerTests) {
await CloudRunnerSystem.Run(`tree ${destinationParentFolder}`);
}
} else {
RemoteClientLogger.logWarning(`cache item ${cacheKey} doesn't exist ${destinationFolder}`);
if (cacheSelection !== ``) {
if (Input.cloudRunnerTests) {
await CloudRunnerSystem.Run(`tree ${cacheFolder}`);
}
RemoteClientLogger.logWarning(`cache item ${cacheKey}.zip doesn't exist ${destinationFolder}`);
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
}
}
} catch (error) {
process.chdir(`${startPath}`);
throw error;
}
process.chdir(`${startPath}`);
}
public static handleCachePurging() {
if (process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined) {
RemoteClientLogger.log(`purging ${CloudRunnerState.purgeRemoteCaching}`);
fs.rmdirSync(CloudRunnerState.cacheFolder, { recursive: true });
}
}
}

View File

@ -0,0 +1,37 @@
import { exec } from 'child_process';
import { RemoteClientLogger } from './remote-client-logger';
export class CloudRunnerSystem {
public static async Run(command: string, suppressError = false) {
for (const element of command.split(`\n`)) {
RemoteClientLogger.log(element);
}
return await new Promise<string>((promise) => {
let output = '';
const child = exec(command, (error, stdout, stderr) => {
if (error && !suppressError) {
throw error;
}
if (stderr) {
const diagnosticOutput = `${stderr.toString()}`;
RemoteClientLogger.logCliDiagnostic(diagnosticOutput);
output += diagnosticOutput;
return;
}
const outputChunk = `${stdout}`;
output += outputChunk;
});
child.on('close', function (code) {
RemoteClientLogger.log(`[Exit code ${code}]`);
if (code !== 0 && !suppressError) {
throw new Error(output);
}
const outputLines = output.split(`\n`);
for (const element of outputLines) {
RemoteClientLogger.log(element);
}
promise(output);
});
});
}
}

View File

@ -0,0 +1,42 @@
import path from 'path';
import { CloudRunnerState } from '../../../cloud-runner/state/cloud-runner-state';
import { CloudRunnerSystem } from './cloud-runner-system';
import fs from 'fs';
import { assert } from 'console';
import { Input } from '../../..';
import { RemoteClientLogger } from './remote-client-logger';
export class LFSHashing {
public static async createLFSHashFiles() {
try {
await CloudRunnerSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
await CloudRunnerSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
assert(fs.existsSync(`.lfs-assets-guid-sum`));
assert(fs.existsSync(`.lfs-assets-guid`));
const lfsHashes = {
lfsGuid: fs
.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8')
.replace(/\n/g, ``),
lfsGuidSum: fs
.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid-sum`)}`, 'utf8')
.replace(/\n/g, ``),
};
if (Input.cloudRunnerTests) {
RemoteClientLogger.log(lfsHashes.lfsGuid);
RemoteClientLogger.log(lfsHashes.lfsGuidSum);
}
return lfsHashes;
} catch (error) {
throw error;
}
}
public static async hashAllFiles(folder: string) {
const startPath = process.cwd();
process.chdir(folder);
const result = await (await CloudRunnerSystem.Run(`find -type f -exec md5sum "{}" + | sort | md5sum`))
.replace(/\n/g, '')
.split(` `)[0];
process.chdir(startPath);
return result;
}
}

View File

@ -0,0 +1,19 @@
import CloudRunnerLogger from '../../../cloud-runner/services/cloud-runner-logger';
export class RemoteClientLogger {
public static log(message: string) {
CloudRunnerLogger.log(`[Client] ${message}`);
}
public static logCliError(message: string) {
CloudRunnerLogger.log(`[Client][Error] ${message}`);
}
public static logCliDiagnostic(message: string) {
CloudRunnerLogger.log(`[Client][Diagnostic] ${message}`);
}
public static logWarning(message) {
CloudRunnerLogger.logWarning(message);
}
}

View File

@ -0,0 +1,81 @@
import fs from 'fs';
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
import { Caching } from './remote-client-services/caching';
import { LFSHashing } from './remote-client-services/lfs-hashing';
import { CloudRunnerSystem } from './remote-client-services/cloud-runner-system';
import { Input } from '../..';
import { RemoteClientLogger } from './remote-client-services/remote-client-logger';
import path from 'path';
import { assert } from 'console';
export class SetupCloudRunnerRepository {
public static async run() {
try {
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerState.buildPathFull}`);
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerState.repoPathFull}`);
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerState.cacheFolderFull}`);
process.chdir(CloudRunnerState.repoPathFull);
if (Input.cloudRunnerTests) {
await CloudRunnerSystem.Run(`ls -lh`);
await CloudRunnerSystem.Run(`tree`);
}
await SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
if (Input.cloudRunnerTests) {
await CloudRunnerSystem.Run(`ls -lh`);
await CloudRunnerSystem.Run(`tree`);
}
const lfsHashes = await LFSHashing.createLFSHashFiles();
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
}
await Caching.PullFromCache(
CloudRunnerState.lfsCacheFolderFull,
CloudRunnerState.lfsDirectoryFull,
`${lfsHashes.lfsGuid}`,
);
await SetupCloudRunnerRepository.pullLatestLFS();
await Caching.PushToCache(
CloudRunnerState.lfsCacheFolderFull,
CloudRunnerState.lfsDirectoryFull,
`${lfsHashes.lfsGuid}`,
);
await Caching.PullFromCache(CloudRunnerState.libraryCacheFolderFull, CloudRunnerState.libraryFolderFull);
Caching.handleCachePurging();
} catch (error) {
throw error;
}
}
private static async cloneRepoWithoutLFSFiles() {
try {
process.chdir(`${CloudRunnerState.repoPathFull}`);
RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
await CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
RemoteClientLogger.log(`Cloning the repository being built:`);
await CloudRunnerSystem.Run(`git lfs install --skip-smudge`);
await CloudRunnerSystem.Run(
`git clone ${CloudRunnerState.targetBuildRepoUrl} ${path.resolve(
`..`,
path.basename(CloudRunnerState.repoPathFull),
)}`,
);
assert(fs.existsSync(`.git`));
RemoteClientLogger.log(`${CloudRunnerState.buildParams.branch}`);
await CloudRunnerSystem.Run(`git checkout ${CloudRunnerState.buildParams.branch}`);
assert(fs.existsSync(path.join(`.git`, `lfs`)), 'LFS folder should not exist before caching');
RemoteClientLogger.log(`Checked out ${process.env.GITHUB_SHA}`);
} catch (error) {
throw error;
}
}
private static async pullLatestLFS() {
await CloudRunnerSystem.Run(`ls -lh ${CloudRunnerState.lfsDirectoryFull}/..`);
process.chdir(CloudRunnerState.repoPathFull);
await CloudRunnerSystem.Run(`git lfs pull`);
RemoteClientLogger.log(`pulled latest LFS files`);
assert(fs.existsSync(CloudRunnerState.lfsDirectoryFull));
await CloudRunnerSystem.Run(`ls -lh ${CloudRunnerState.lfsDirectoryFull}/..`);
}
}

View File

@ -0,0 +1,106 @@
import CloudRunnerLogger from '../services/cloud-runner-logger';
import * as core from '@actions/core';
import * as SDK from 'aws-sdk';
import * as fs from 'fs';
import path from 'path';
const crypto = require('crypto');
export class AWSBaseStack {
constructor(baseStackName: string) {
this.baseStackName = baseStackName;
}
private baseStackName: string;
async setupBaseStack(CF: SDK.CloudFormation) {
const baseStackName = this.baseStackName;
const baseStack = fs.readFileSync(path.join(__dirname, 'cloud-formations', 'base-setup.yml'), 'utf8');
// Cloud Formation Input
const describeStackInput: SDK.CloudFormation.DescribeStacksInput = {
StackName: baseStackName,
};
const parametersWithoutHash: SDK.CloudFormation.Parameter[] = [
{ ParameterKey: 'EnvironmentName', ParameterValue: baseStackName },
];
const parametersHash = crypto
.createHash('md5')
.update(baseStack + JSON.stringify(parametersWithoutHash))
.digest('hex');
const parameters: SDK.CloudFormation.Parameter[] = [
...parametersWithoutHash,
...[{ ParameterKey: 'Version', ParameterValue: parametersHash }],
];
const updateInput: SDK.CloudFormation.UpdateStackInput = {
StackName: baseStackName,
TemplateBody: baseStack,
Parameters: parameters,
Capabilities: ['CAPABILITY_IAM'],
};
const createStackInput: SDK.CloudFormation.CreateStackInput = {
StackName: baseStackName,
TemplateBody: baseStack,
Parameters: parameters,
Capabilities: ['CAPABILITY_IAM'],
};
const stacks = await CF.listStacks({
StackStatusFilter: ['UPDATE_COMPLETE', 'CREATE_COMPLETE', 'ROLLBACK_COMPLETE'],
}).promise();
const stackNames = stacks.StackSummaries?.map((x) => x.StackName) || [];
const stackExists: Boolean = stackNames.includes(baseStackName) || false;
const describeStack = async () => {
return await CF.describeStacks(describeStackInput).promise();
};
try {
if (!stackExists) {
CloudRunnerLogger.log(`${baseStackName} stack does not exist (${JSON.stringify(stackNames)})`);
await CF.createStack(createStackInput).promise();
CloudRunnerLogger.log(`created stack (version: ${parametersHash})`);
}
const CFState = await describeStack();
let stack = CFState.Stacks?.[0];
if (!stack) {
throw new Error(`Base stack doesn't exist, even after creation, stackExists check: ${stackExists}`);
}
const stackVersion = stack.Parameters?.find((x) => x.ParameterKey === 'Version')?.ParameterValue;
if (stack.StackStatus === 'CREATE_IN_PROGRESS') {
await CF.waitFor('stackCreateComplete', describeStackInput).promise();
}
if (stackExists) {
CloudRunnerLogger.log(`Base stack exists (version: ${stackVersion}, local version: ${parametersHash})`);
if (parametersHash !== stackVersion) {
CloudRunnerLogger.log(`Attempting update of base stack`);
try {
await CF.updateStack(updateInput).promise();
} catch (error: any) {
if (error['message'].includes('No updates are to be performed')) {
CloudRunnerLogger.log(`No updates are to be performed`);
} else {
CloudRunnerLogger.log(`Update Failed (Stack name: ${baseStackName})`);
CloudRunnerLogger.log(error['message']);
}
CloudRunnerLogger.log(`Continuing...`);
}
} else {
CloudRunnerLogger.log(`No update required`);
}
stack = (await describeStack()).Stacks?.[0];
if (!stack) {
throw new Error(
`Base stack doesn't exist, even after updating and creation, stackExists check: ${stackExists}`,
);
}
if (stack.StackStatus === 'UPDATE_IN_PROGRESS') {
await CF.waitFor('stackUpdateComplete', describeStackInput).promise();
}
}
CloudRunnerLogger.log('base stack is now ready');
} catch (error) {
core.error(JSON.stringify(await describeStack(), undefined, 4));
throw error;
}
}
}

View File

@ -0,0 +1,16 @@
import CloudRunnerLogger from '../services/cloud-runner-logger';
import * as SDK from 'aws-sdk';
import * as core from '@actions/core';
import { Input } from '../..';
export class AWSError {
static async handleStackCreationFailure(error: any, CF: SDK.CloudFormation, taskDefStackName: string) {
CloudRunnerLogger.log('aws error: ');
core.error(JSON.stringify(error, undefined, 4));
if (Input.cloudRunnerTests) {
CloudRunnerLogger.log('Getting events and resources for task stack');
const events = (await CF.describeStackEvents({ StackName: taskDefStackName }).promise()).StackEvents;
CloudRunnerLogger.log(JSON.stringify(events, undefined, 4));
}
}
}

View File

@ -0,0 +1,141 @@
import * as SDK from 'aws-sdk';
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
import CloudRunnerSecret from '../services/cloud-runner-secret';
import { AWSTemplates } from './aws-templates';
import CloudRunnerLogger from '../services/cloud-runner-logger';
import { AWSError } from './aws-error';
export class AWSJobStack {
private baseStackName: string;
constructor(baseStackName: string) {
this.baseStackName = baseStackName;
}
public async setupCloudFormations(
CF: SDK.CloudFormation,
buildGuid: string,
image: string,
entrypoint: string[],
commands: string,
mountdir: string,
workingdir: string,
secrets: CloudRunnerSecret[],
): Promise<CloudRunnerAWSTaskDef> {
const taskDefStackName = `${this.baseStackName}-${buildGuid}`;
let taskDefCloudFormation = AWSTemplates.readTaskCloudFormationTemplate();
for (const secret of secrets) {
secret.ParameterKey = `${buildGuid.replace(/[^\dA-Za-z]/g, '')}${secret.ParameterKey.replace(
/[^\dA-Za-z]/g,
'',
)}`;
if (typeof secret.ParameterValue == 'number') {
secret.ParameterValue = `${secret.ParameterValue}`;
}
if (!secret.ParameterValue || secret.ParameterValue === '') {
secrets = secrets.filter((x) => x !== secret);
continue;
}
taskDefCloudFormation = AWSTemplates.insertAtTemplate(
taskDefCloudFormation,
'p1 - input',
AWSTemplates.getParameterTemplate(secret.ParameterKey),
);
taskDefCloudFormation = AWSTemplates.insertAtTemplate(
taskDefCloudFormation,
'p2 - secret',
AWSTemplates.getSecretTemplate(`${secret.ParameterKey}`),
);
taskDefCloudFormation = AWSTemplates.insertAtTemplate(
taskDefCloudFormation,
'p3 - container def',
AWSTemplates.getSecretDefinitionTemplate(secret.EnvironmentVariable, secret.ParameterKey),
);
}
const secretsMappedToCloudFormationParameters = secrets.map((x) => {
return { ParameterKey: x.ParameterKey.replace(/[^\dA-Za-z]/g, ''), ParameterValue: x.ParameterValue };
});
const parameters = [
{
ParameterKey: 'EnvironmentName',
ParameterValue: this.baseStackName,
},
{
ParameterKey: 'ImageUrl',
ParameterValue: image,
},
{
ParameterKey: 'ServiceName',
ParameterValue: taskDefStackName,
},
{
ParameterKey: 'Command',
ParameterValue: 'echo "this template should be overwritten when running a task"',
},
{
ParameterKey: 'EntryPoint',
ParameterValue: entrypoint.join(','),
},
{
ParameterKey: 'WorkingDirectory',
ParameterValue: workingdir,
},
{
ParameterKey: 'EFSMountDirectory',
ParameterValue: mountdir,
},
...secretsMappedToCloudFormationParameters,
];
let previousStackExists = true;
while (previousStackExists) {
previousStackExists = false;
const stacks = await CF.listStacks().promise();
if (!stacks.StackSummaries) {
throw new Error('Faild to get stacks');
}
for (let index = 0; index < stacks.StackSummaries.length; index++) {
const element = stacks.StackSummaries[index];
if (element.StackName === taskDefStackName && element.StackStatus !== 'DELETE_COMPLETE') {
previousStackExists = true;
CloudRunnerLogger.log(`Previous stack still exists: ${JSON.stringify(element)}`);
}
}
}
try {
await CF.createStack({
StackName: taskDefStackName,
TemplateBody: taskDefCloudFormation,
Capabilities: ['CAPABILITY_IAM'],
Parameters: parameters,
}).promise();
CloudRunnerLogger.log('Creating cloud runner job');
await CF.waitFor('stackCreateComplete', { StackName: taskDefStackName }).promise();
} catch (error) {
await AWSError.handleStackCreationFailure(
error,
CF,
taskDefStackName,
//taskDefCloudFormation,
//parameters,
//secrets,
);
throw error;
}
const taskDefResources = (
await CF.describeStackResources({
StackName: taskDefStackName,
}).promise()
).StackResources;
const baseResources = (await CF.describeStackResources({ StackName: this.baseStackName }).promise()).StackResources;
return {
taskDefStackName,
taskDefCloudFormation,
taskDefResources,
baseResources,
};
}
}

View File

@ -0,0 +1,228 @@
import * as AWS from 'aws-sdk';
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
import * as core from '@actions/core';
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
import * as zlib from 'zlib';
import CloudRunnerLogger from '../services/cloud-runner-logger';
import { Input } from '../..';
import { CloudRunnerState } from '../state/cloud-runner-state';
import { CloudRunnerStatics } from '../cloud-runner-statics';
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
class AWSTaskRunner {
static async runTask(
taskDef: CloudRunnerAWSTaskDef,
ECS: AWS.ECS,
CF: AWS.CloudFormation,
environment: CloudRunnerEnvironmentVariable[],
buildGuid: string,
commands: string,
) {
const cluster = taskDef.baseResources?.find((x) => x.LogicalResourceId === 'ECSCluster')?.PhysicalResourceId || '';
const taskDefinition =
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'TaskDefinition')?.PhysicalResourceId || '';
const SubnetOne =
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'PublicSubnetOne')?.PhysicalResourceId || '';
const SubnetTwo =
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'PublicSubnetTwo')?.PhysicalResourceId || '';
const ContainerSecurityGroup =
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'ContainerSecurityGroup')?.PhysicalResourceId || '';
const streamName =
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'KinesisStream')?.PhysicalResourceId || '';
const task = await ECS.runTask({
cluster,
taskDefinition,
platformVersion: '1.4.0',
overrides: {
containerOverrides: [
{
name: taskDef.taskDefStackName,
environment,
command: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(commands, CloudRunnerState.buildParams)],
},
],
},
launchType: 'FARGATE',
networkConfiguration: {
awsvpcConfiguration: {
subnets: [SubnetOne, SubnetTwo],
assignPublicIp: 'ENABLED',
securityGroups: [ContainerSecurityGroup],
},
},
}).promise();
CloudRunnerLogger.log('Cloud runner job is starting');
const taskArn = task.tasks?.[0].taskArn || '';
try {
await ECS.waitFor('tasksRunning', { tasks: [taskArn], cluster }).promise();
} catch (error_) {
const error = error_ as Error;
await new Promise((resolve) => setTimeout(resolve, 3000));
CloudRunnerLogger.log(
`Cloud runner job has ended ${
(await AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].lastStatus
}`,
);
core.setFailed(error);
core.error(error);
}
CloudRunnerLogger.log(`Cloud runner job is running`);
const output = await this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
const exitCode = (await AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].exitCode;
CloudRunnerLogger.log(`Cloud runner job exit code ${exitCode}`);
if (exitCode !== 0 && exitCode !== undefined) {
core.error(
`job failed with exit code ${exitCode} ${JSON.stringify(
await ECS.describeTasks({ tasks: [taskArn], cluster }).promise(),
undefined,
4,
)}`,
);
throw new Error(`job failed with exit code ${exitCode}`);
} else {
CloudRunnerLogger.log(`Cloud runner job has finished successfully`);
return output;
}
}
static async describeTasks(ECS: AWS.ECS, clusterName: string, taskArn: string) {
const tasks = await ECS.describeTasks({
cluster: clusterName,
tasks: [taskArn],
}).promise();
if (tasks.tasks?.[0]) {
return tasks.tasks?.[0];
} else {
throw new Error('No task found');
}
}
static async streamLogsUntilTaskStops(
ECS: AWS.ECS,
CF: AWS.CloudFormation,
taskDef: CloudRunnerAWSTaskDef,
clusterName: string,
taskArn: string,
kinesisStreamName: string,
) {
const kinesis = new AWS.Kinesis();
const stream = await AWSTaskRunner.getLogStream(kinesis, kinesisStreamName);
let iterator = await AWSTaskRunner.getLogIterator(kinesis, stream);
CloudRunnerLogger.log(
`Cloud runner job status is ${(await AWSTaskRunner.describeTasks(ECS, clusterName, taskArn))?.lastStatus}`,
);
const logBaseUrl = `https://${Input.region}.console.aws.amazon.com/cloudwatch/home?region=${CF.config.region}#logsV2:log-groups/log-group/${taskDef.taskDefStackName}`;
CloudRunnerLogger.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
let shouldReadLogs = true;
let timestamp: number = 0;
let output = '';
while (shouldReadLogs) {
await new Promise((resolve) => setTimeout(resolve, 1500));
const taskData = await AWSTaskRunner.describeTasks(ECS, clusterName, taskArn);
({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs));
({ iterator, shouldReadLogs, output } = await AWSTaskRunner.handleLogStreamIteration(
kinesis,
iterator,
shouldReadLogs,
taskDef,
output,
));
}
return output;
}
private static async handleLogStreamIteration(
kinesis: AWS.Kinesis,
iterator: string,
shouldReadLogs: boolean,
taskDef: CloudRunnerAWSTaskDef,
output: string,
) {
const records = await kinesis
.getRecords({
ShardIterator: iterator,
})
.promise();
iterator = records.NextShardIterator || '';
({ shouldReadLogs, output } = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs, output));
return { iterator, shouldReadLogs, output };
}
private static checkStreamingShouldContinue(taskData: AWS.ECS.Task, timestamp: number, shouldReadLogs: boolean) {
if (taskData?.lastStatus !== 'RUNNING') {
if (timestamp === 0) {
CloudRunnerLogger.log('## Cloud runner job stopped, streaming end of logs');
timestamp = Date.now();
}
if (timestamp !== 0 && Date.now() - timestamp > 30000) {
CloudRunnerLogger.log('## Cloud runner status is not RUNNING for 30 seconds, last query for logs');
shouldReadLogs = false;
}
CloudRunnerLogger.log(`## Status of job: ${taskData.lastStatus}`);
}
return { timestamp, shouldReadLogs };
}
private static logRecords(
records,
iterator: string,
taskDef: CloudRunnerAWSTaskDef,
shouldReadLogs: boolean,
output: string,
) {
if (records.Records.length > 0 && iterator) {
for (let index = 0; index < records.Records.length; index++) {
const json = JSON.parse(
zlib.gunzipSync(Buffer.from(records.Records[index].Data as string, 'base64')).toString('utf8'),
);
if (json.messageType === 'DATA_MESSAGE') {
for (let logEventsIndex = 0; logEventsIndex < json.logEvents.length; logEventsIndex++) {
let message = json.logEvents[logEventsIndex].message;
if (json.logEvents[logEventsIndex].message.includes(`---${CloudRunnerState.buildParams.logId}`)) {
CloudRunnerLogger.log('End of log transmission received');
shouldReadLogs = false;
} else if (message.includes('Rebuilding Library because the asset database could not be found!')) {
core.warning('LIBRARY NOT FOUND!');
}
message = `[${CloudRunnerStatics.logPrefix}] ${message}`;
if (Input.cloudRunnerTests) {
output += message;
}
CloudRunnerLogger.log(message);
}
}
}
}
return { shouldReadLogs, output };
}
private static async getLogStream(kinesis: AWS.Kinesis, kinesisStreamName: string) {
return await kinesis
.describeStream({
StreamName: kinesisStreamName,
})
.promise();
}
private static async getLogIterator(kinesis: AWS.Kinesis, stream) {
return (
(
await kinesis
.getShardIterator({
ShardIteratorType: 'TRIM_HORIZON',
StreamName: stream.StreamDescription.StreamName,
ShardId: stream.StreamDescription.Shards[0].ShardId,
})
.promise()
).ShardIterator || ''
);
}
}
export default AWSTaskRunner;

View File

@ -0,0 +1,38 @@
import * as fs from 'fs';
export class AWSTemplates {
public static getParameterTemplate(p1) {
return `
${p1}:
Type: String
Default: ''
`;
}
public static getSecretTemplate(p1) {
return `
${p1}Secret:
Type: AWS::SecretsManager::Secret
Properties:
Name: '${p1}'
SecretString: !Ref ${p1}
`;
}
public static getSecretDefinitionTemplate(p1, p2) {
return `
- Name: '${p1}'
ValueFrom: !Ref ${p2}Secret
`;
}
public static insertAtTemplate(template, insertionKey, insertion) {
const index = template.search(insertionKey) + insertionKey.length + '\n'.length;
template = [template.slice(0, index), insertion, template.slice(index)].join('');
return template;
}
public static readTaskCloudFormationTemplate(): string {
return fs.readFileSync(`${__dirname}/cloud-formations/task-def-formation.yml`, 'utf8');
}
}

View File

@ -0,0 +1,391 @@
AWSTemplateFormatVersion: '2010-09-09'
Description: AWS Fargate cluster that can span public and private subnets. Supports
public facing load balancers, private internal load balancers, and
both internal and external service discovery namespaces.
Parameters:
EnvironmentName:
Type: String
Default: development
Description: 'Your deployment environment: DEV, QA , PROD'
Version:
Type: String
Description: 'hash of template'
# ContainerPort:
# Type: Number
# Default: 80
# Description: What port number the application inside the docker container is binding to
Mappings:
# Hard values for the subnet masks. These masks define
# the range of internal IP addresses that can be assigned.
# The VPC can have all IP's from 10.0.0.0 to 10.0.255.255
# There are four subnets which cover the ranges:
#
# 10.0.0.0 - 10.0.0.255
# 10.0.1.0 - 10.0.1.255
# 10.0.2.0 - 10.0.2.255
# 10.0.3.0 - 10.0.3.255
SubnetConfig:
VPC:
CIDR: '10.0.0.0/16'
PublicOne:
CIDR: '10.0.0.0/24'
PublicTwo:
CIDR: '10.0.1.0/24'
Resources:
# VPC in which containers will be networked.
# It has two public subnets, and two private subnets.
# We distribute the subnets across the first two available subnets
# for the region, for high availability.
VPC:
Type: AWS::EC2::VPC
Properties:
EnableDnsSupport: true
EnableDnsHostnames: true
CidrBlock: !FindInMap ['SubnetConfig', 'VPC', 'CIDR']
EFSServerSecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:
GroupName: 'efs-server-endpoints'
GroupDescription: Which client ip addrs are allowed to access EFS server
VpcId: !Ref 'VPC'
SecurityGroupIngress:
- IpProtocol: tcp
FromPort: 2049
ToPort: 2049
SourceSecurityGroupId: !Ref ContainerSecurityGroup
#CidrIp: !FindInMap ['SubnetConfig', 'VPC', 'CIDR']
# A security group for the containers we will run in Fargate.
# Rules are added to this security group based on what ingress you
# add for the cluster.
ContainerSecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:
GroupName: 'task security group'
GroupDescription: Access to the Fargate containers
VpcId: !Ref 'VPC'
# SecurityGroupIngress:
# - IpProtocol: tcp
# FromPort: !Ref ContainerPort
# ToPort: !Ref ContainerPort
# CidrIp: 0.0.0.0/0
SecurityGroupEgress:
- IpProtocol: -1
FromPort: 2049
ToPort: 2049
CidrIp: '0.0.0.0/0'
# Two public subnets, where containers can have public IP addresses
PublicSubnetOne:
Type: AWS::EC2::Subnet
Properties:
AvailabilityZone: !Select
- 0
- Fn::GetAZs: !Ref 'AWS::Region'
VpcId: !Ref 'VPC'
CidrBlock: !FindInMap ['SubnetConfig', 'PublicOne', 'CIDR']
# MapPublicIpOnLaunch: true
PublicSubnetTwo:
Type: AWS::EC2::Subnet
Properties:
AvailabilityZone: !Select
- 1
- Fn::GetAZs: !Ref 'AWS::Region'
VpcId: !Ref 'VPC'
CidrBlock: !FindInMap ['SubnetConfig', 'PublicTwo', 'CIDR']
# MapPublicIpOnLaunch: true
# Setup networking resources for the public subnets. Containers
# in the public subnets have public IP addresses and the routing table
# sends network traffic via the internet gateway.
InternetGateway:
Type: AWS::EC2::InternetGateway
GatewayAttachement:
Type: AWS::EC2::VPCGatewayAttachment
Properties:
VpcId: !Ref 'VPC'
InternetGatewayId: !Ref 'InternetGateway'
# Attaching a Internet Gateway to route table makes it public.
PublicRouteTable:
Type: AWS::EC2::RouteTable
Properties:
VpcId: !Ref 'VPC'
PublicRoute:
Type: AWS::EC2::Route
DependsOn: GatewayAttachement
Properties:
RouteTableId: !Ref 'PublicRouteTable'
DestinationCidrBlock: '0.0.0.0/0'
GatewayId: !Ref 'InternetGateway'
# Attaching a public route table makes a subnet public.
PublicSubnetOneRouteTableAssociation:
Type: AWS::EC2::SubnetRouteTableAssociation
Properties:
SubnetId: !Ref PublicSubnetOne
RouteTableId: !Ref PublicRouteTable
PublicSubnetTwoRouteTableAssociation:
Type: AWS::EC2::SubnetRouteTableAssociation
Properties:
SubnetId: !Ref PublicSubnetTwo
RouteTableId: !Ref PublicRouteTable
# ECS Resources
ECSCluster:
Type: AWS::ECS::Cluster
# A role used to allow AWS Autoscaling to inspect stats and adjust scaleable targets
# on your AWS account
AutoscalingRole:
Type: AWS::IAM::Role
Properties:
AssumeRolePolicyDocument:
Statement:
- Effect: Allow
Principal:
Service: [application-autoscaling.amazonaws.com]
Action: ['sts:AssumeRole']
Path: /
Policies:
- PolicyName: service-autoscaling
PolicyDocument:
Statement:
- Effect: Allow
Action:
- 'application-autoscaling:*'
- 'cloudwatch:DescribeAlarms'
- 'cloudwatch:PutMetricAlarm'
- 'ecs:DescribeServices'
- 'ecs:UpdateService'
Resource: '*'
# This is an IAM role which authorizes ECS to manage resources on your
# account on your behalf, such as updating your load balancer with the
# details of where your containers are, so that traffic can reach your
# containers.
ECSRole:
Type: AWS::IAM::Role
Properties:
AssumeRolePolicyDocument:
Statement:
- Effect: Allow
Principal:
Service: [ecs.amazonaws.com]
Action: ['sts:AssumeRole']
Path: /
Policies:
- PolicyName: ecs-service
PolicyDocument:
Statement:
- Effect: Allow
Action:
# Rules which allow ECS to attach network interfaces to instances
# on your behalf in order for awsvpc networking mode to work right
- 'ec2:AttachNetworkInterface'
- 'ec2:CreateNetworkInterface'
- 'ec2:CreateNetworkInterfacePermission'
- 'ec2:DeleteNetworkInterface'
- 'ec2:DeleteNetworkInterfacePermission'
- 'ec2:Describe*'
- 'ec2:DetachNetworkInterface'
# Rules which allow ECS to update load balancers on your behalf
# with the information sabout how to send traffic to your containers
- 'elasticloadbalancing:DeregisterInstancesFromLoadBalancer'
- 'elasticloadbalancing:DeregisterTargets'
- 'elasticloadbalancing:Describe*'
- 'elasticloadbalancing:RegisterInstancesWithLoadBalancer'
- 'elasticloadbalancing:RegisterTargets'
Resource: '*'
# This is a role which is used by the ECS tasks themselves.
ECSTaskExecutionRole:
Type: AWS::IAM::Role
Properties:
AssumeRolePolicyDocument:
Statement:
- Effect: Allow
Principal:
Service: [ecs-tasks.amazonaws.com]
Action: ['sts:AssumeRole']
Path: /
Policies:
- PolicyName: AmazonECSTaskExecutionRolePolicy
PolicyDocument:
Statement:
- Effect: Allow
Action:
# Allow the use of secret manager
- 'secretsmanager:GetSecretValue'
- 'kms:Decrypt'
# Allow the ECS Tasks to download images from ECR
- 'ecr:GetAuthorizationToken'
- 'ecr:BatchCheckLayerAvailability'
- 'ecr:GetDownloadUrlForLayer'
- 'ecr:BatchGetImage'
# Allow the ECS tasks to upload logs to CloudWatch
- 'logs:CreateLogStream'
- 'logs:PutLogEvents'
Resource: '*'
DeleteCFNLambdaExecutionRole:
Type: 'AWS::IAM::Role'
Properties:
AssumeRolePolicyDocument:
Version: '2012-10-17'
Statement:
- Effect: 'Allow'
Principal:
Service: ['lambda.amazonaws.com']
Action: 'sts:AssumeRole'
Path: '/'
Policies:
- PolicyName: DeleteCFNLambdaExecutionRole
PolicyDocument:
Version: '2012-10-17'
Statement:
- Effect: 'Allow'
Action:
- 'logs:CreateLogGroup'
- 'logs:CreateLogStream'
- 'logs:PutLogEvents'
Resource: 'arn:aws:logs:*:*:*'
- Effect: 'Allow'
Action:
- 'cloudformation:DeleteStack'
- 'kinesis:DeleteStream'
- 'secretsmanager:DeleteSecret'
- 'kinesis:DescribeStreamSummary'
- 'logs:DeleteLogGroup'
- 'logs:DeleteSubscriptionFilter'
- 'ecs:DeregisterTaskDefinition'
- 'lambda:DeleteFunction'
- 'lambda:InvokeFunction'
- 'events:RemoveTargets'
- 'events:DeleteRule'
- 'lambda:RemovePermission'
Resource: '*'
### cloud watch to kinesis role
CloudWatchIAMRole:
Type: AWS::IAM::Role
Properties:
AssumeRolePolicyDocument:
Statement:
- Effect: Allow
Principal:
Service: [logs.amazonaws.com]
Action: ['sts:AssumeRole']
Path: /
Policies:
- PolicyName: service-autoscaling
PolicyDocument:
Statement:
- Effect: Allow
Action:
- 'kinesis:PutRecord'
Resource: '*'
#####################EFS#####################
EfsFileStorage:
Type: 'AWS::EFS::FileSystem'
Properties:
BackupPolicy:
Status: ENABLED
PerformanceMode: maxIO
Encrypted: false
FileSystemPolicy:
Version: '2012-10-17'
Statement:
- Effect: 'Allow'
Action:
- 'elasticfilesystem:ClientMount'
- 'elasticfilesystem:ClientWrite'
- 'elasticfilesystem:ClientRootAccess'
Principal:
AWS: '*'
MountTargetResource1:
Type: AWS::EFS::MountTarget
Properties:
FileSystemId: !Ref EfsFileStorage
SubnetId: !Ref PublicSubnetOne
SecurityGroups:
- !Ref EFSServerSecurityGroup
MountTargetResource2:
Type: AWS::EFS::MountTarget
Properties:
FileSystemId: !Ref EfsFileStorage
SubnetId: !Ref PublicSubnetTwo
SecurityGroups:
- !Ref EFSServerSecurityGroup
Outputs:
EfsFileStorageId:
Description: 'The connection endpoint for the database.'
Value: !Ref EfsFileStorage
Export:
Name: !Sub ${EnvironmentName}:EfsFileStorageId
ClusterName:
Description: The name of the ECS cluster
Value: !Ref 'ECSCluster'
Export:
Name: !Sub ${EnvironmentName}:ClusterName
AutoscalingRole:
Description: The ARN of the role used for autoscaling
Value: !GetAtt 'AutoscalingRole.Arn'
Export:
Name: !Sub ${EnvironmentName}:AutoscalingRole
ECSRole:
Description: The ARN of the ECS role
Value: !GetAtt 'ECSRole.Arn'
Export:
Name: !Sub ${EnvironmentName}:ECSRole
ECSTaskExecutionRole:
Description: The ARN of the ECS role tsk execution role
Value: !GetAtt 'ECSTaskExecutionRole.Arn'
Export:
Name: !Sub ${EnvironmentName}:ECSTaskExecutionRole
DeleteCFNLambdaExecutionRole:
Description: Lambda execution role for cleaning up cloud formations
Value: !GetAtt 'DeleteCFNLambdaExecutionRole.Arn'
Export:
Name: !Sub ${EnvironmentName}:DeleteCFNLambdaExecutionRole
CloudWatchIAMRole:
Description: The ARN of the CloudWatch role for subscription filter
Value: !GetAtt 'CloudWatchIAMRole.Arn'
Export:
Name: !Sub ${EnvironmentName}:CloudWatchIAMRole
VpcId:
Description: The ID of the VPC that this stack is deployed in
Value: !Ref 'VPC'
Export:
Name: !Sub ${EnvironmentName}:VpcId
PublicSubnetOne:
Description: Public subnet one
Value: !Ref 'PublicSubnetOne'
Export:
Name: !Sub ${EnvironmentName}:PublicSubnetOne
PublicSubnetTwo:
Description: Public subnet two
Value: !Ref 'PublicSubnetTwo'
Export:
Name: !Sub ${EnvironmentName}:PublicSubnetTwo
ContainerSecurityGroup:
Description: A security group used to allow Fargate containers to receive traffic
Value: !Ref 'ContainerSecurityGroup'
Export:
Name: !Sub ${EnvironmentName}:ContainerSecurityGroup

View File

@ -0,0 +1,221 @@
AWSTemplateFormatVersion: 2010-09-09
Description: >-
AWS Fargate cluster that can span public and private subnets. Supports public
facing load balancers, private internal load balancers, and both internal and
external service discovery namespaces.
Parameters:
EnvironmentName:
Type: String
Default: development
Description: 'Your deployment environment: DEV, QA , PROD'
ServiceName:
Type: String
Default: example
Description: A name for the service
ImageUrl:
Type: String
Default: nginx
Description: >-
The url of a docker image that contains the application process that will
handle the traffic for this service
ContainerPort:
Type: Number
Default: 80
Description: What port number the application inside the docker container is binding to
ContainerCpu:
Type: Number
Default: 1024
Description: How much CPU to give the container. 1024 is 1 CPU
ContainerMemory:
Type: Number
Default: 2048
Description: How much memory in megabytes to give the container
BUILDGUID:
Type: String
Default: ''
Command:
Type: String
Default: 'ls'
EntryPoint:
Type: String
Default: '/bin/sh'
WorkingDirectory:
Type: String
Default: '/efsdata/'
Role:
Type: String
Default: ''
Description: >-
(Optional) An IAM role to give the service's containers if the code within
needs to access other AWS resources like S3 buckets, DynamoDB tables, etc
EFSMountDirectory:
Type: String
Default: '/efsdata'
# template secrets p1 - input
Mappings:
SubnetConfig:
VPC:
CIDR: 10.0.0.0/16
PublicOne:
CIDR: 10.0.0.0/24
PublicTwo:
CIDR: 10.0.1.0/24
Conditions:
HasCustomRole: !Not
- !Equals
- Ref: Role
- ''
Resources:
LogGroup:
Type: 'AWS::Logs::LogGroup'
Properties:
LogGroupName: !Ref ServiceName
Metadata:
'AWS::CloudFormation::Designer':
id: aece53ae-b82d-4267-bc16-ed964b05db27
SubscriptionFilter:
Type: 'AWS::Logs::SubscriptionFilter'
Properties:
FilterPattern: ''
RoleArn:
'Fn::ImportValue': !Sub '${EnvironmentName}:CloudWatchIAMRole'
LogGroupName: !Ref ServiceName
DestinationArn:
'Fn::GetAtt':
- KinesisStream
- Arn
Metadata:
'AWS::CloudFormation::Designer':
id: 7f809e91-9e5d-4678-98c1-c5085956c480
DependsOn:
- LogGroup
- KinesisStream
KinesisStream:
Type: 'AWS::Kinesis::Stream'
Properties:
Name: !Ref ServiceName
ShardCount: 1
Metadata:
'AWS::CloudFormation::Designer':
id: c6f18447-b879-4696-8873-f981b2cedd2b
# template secrets p2 - secret
TaskDefinition:
Type: 'AWS::ECS::TaskDefinition'
Properties:
Family: !Ref ServiceName
Cpu: !Ref ContainerCpu
Memory: !Ref ContainerMemory
NetworkMode: awsvpc
Volumes:
- Name: efs-data
EFSVolumeConfiguration:
FilesystemId:
'Fn::ImportValue': !Sub '${EnvironmentName}:EfsFileStorageId'
TransitEncryption: ENABLED
RequiresCompatibilities:
- FARGATE
ExecutionRoleArn:
'Fn::ImportValue': !Sub '${EnvironmentName}:ECSTaskExecutionRole'
TaskRoleArn:
'Fn::If':
- HasCustomRole
- !Ref Role
- !Ref 'AWS::NoValue'
ContainerDefinitions:
- Name: !Ref ServiceName
Cpu: !Ref ContainerCpu
Memory: !Ref ContainerMemory
Image: !Ref ImageUrl
EntryPoint:
Fn::Split:
- ','
- !Ref EntryPoint
Command:
Fn::Split:
- ','
- !Ref Command
WorkingDirectory: !Ref WorkingDirectory
Environment:
- Name: ALLOW_EMPTY_PASSWORD
Value: 'yes'
# template - env vars
MountPoints:
- SourceVolume: efs-data
ContainerPath: !Ref EFSMountDirectory
ReadOnly: false
Secrets:
# template secrets p3 - container def
LogConfiguration:
LogDriver: awslogs
Options:
awslogs-group: !Ref ServiceName
awslogs-region: !Ref 'AWS::Region'
awslogs-stream-prefix: !Ref ServiceName
Metadata:
'AWS::CloudFormation::Designer':
id: dabb0116-abe0-48a6-a8af-cf9111c879a5
DependsOn:
- LogGroup
Metadata:
'AWS::CloudFormation::Designer':
dabb0116-abe0-48a6-a8af-cf9111c879a5:
size:
width: 60
height: 60
position:
x: 270
'y': 90
z: 1
embeds: []
dependson:
- aece53ae-b82d-4267-bc16-ed964b05db27
c6f18447-b879-4696-8873-f981b2cedd2b:
size:
width: 60
height: 60
position:
x: 270
'y': 210
z: 1
embeds: []
7f809e91-9e5d-4678-98c1-c5085956c480:
size:
width: 60
height: 60
position:
x: 60
'y': 300
z: 1
embeds: []
dependson:
- aece53ae-b82d-4267-bc16-ed964b05db27
- c6f18447-b879-4696-8873-f981b2cedd2b
aece53ae-b82d-4267-bc16-ed964b05db27:
size:
width: 150
height: 150
position:
x: 60
'y': 90
z: 1
embeds: []
4d2da56c-3643-46b8-aaee-e46e19f95fcc:
source:
id: 7f809e91-9e5d-4678-98c1-c5085956c480
target:
id: aece53ae-b82d-4267-bc16-ed964b05db27
z: 11
14eb957b-f094-4653-93c4-77b2f851953c:
source:
id: 7f809e91-9e5d-4678-98c1-c5085956c480
target:
id: c6f18447-b879-4696-8873-f981b2cedd2b
z: 12
85c57444-e5bb-4230-bc85-e545cd4558f6:
source:
id: dabb0116-abe0-48a6-a8af-cf9111c879a5
target:
id: aece53ae-b82d-4267-bc16-ed964b05db27
z: 13

View File

@ -1,12 +1,9 @@
import * as AWS from 'aws-sdk';
class RemoteBuilderTaskDef {
class CloudRunnerAWSTaskDef {
public taskDefStackName!: string;
public taskDefCloudFormation!: string;
public taskDefStackNameTTL!: string;
public ttlCloudFormation!: string;
public taskDefResources: AWS.CloudFormation.StackResources | undefined;
public baseResources: AWS.CloudFormation.StackResources | undefined;
public logid!: string;
}
export default RemoteBuilderTaskDef;
export default CloudRunnerAWSTaskDef;

View File

@ -0,0 +1,98 @@
import * as SDK from 'aws-sdk';
import CloudRunnerSecret from '../services/cloud-runner-secret';
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
import AWSTaskRunner from './aws-task-runner';
import { CloudRunnerProviderInterface } from '../services/cloud-runner-provider-interface';
import BuildParameters from '../../build-parameters';
import CloudRunnerLogger from '../services/cloud-runner-logger';
import { AWSJobStack } from './aws-job-stack';
import { AWSBaseStack } from './aws-base-stack';
import { Input } from '../..';
class AWSBuildEnvironment implements CloudRunnerProviderInterface {
private baseStackName: string;
constructor(buildParameters: BuildParameters) {
this.baseStackName = buildParameters.awsBaseStackName;
}
async cleanupSharedResources(
// eslint-disable-next-line no-unused-vars
buildGuid: string,
// eslint-disable-next-line no-unused-vars
buildParameters: BuildParameters,
// eslint-disable-next-line no-unused-vars
branchName: string,
// eslint-disable-next-line no-unused-vars
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
) {}
async setupSharedResources(
// eslint-disable-next-line no-unused-vars
buildGuid: string,
// eslint-disable-next-line no-unused-vars
buildParameters: BuildParameters,
// eslint-disable-next-line no-unused-vars
branchName: string,
// eslint-disable-next-line no-unused-vars
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
) {}
async runTask(
buildGuid: string,
image: string,
commands: string,
mountdir: string,
workingdir: string,
environment: CloudRunnerEnvironmentVariable[],
secrets: CloudRunnerSecret[],
): Promise<string> {
process.env.AWS_REGION = Input.region;
const ECS = new SDK.ECS();
const CF = new SDK.CloudFormation();
CloudRunnerLogger.log(`AWS Region: ${CF.config.region}`);
const entrypoint = ['/bin/sh'];
const startTimeMs = Date.now();
await new AWSBaseStack(this.baseStackName).setupBaseStack(CF);
const taskDef = await new AWSJobStack(this.baseStackName).setupCloudFormations(
CF,
buildGuid,
image,
entrypoint,
commands,
mountdir,
workingdir,
secrets,
);
let postRunTaskTimeMs;
let output = '';
try {
const postSetupStacksTimeMs = Date.now();
CloudRunnerLogger.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`);
output = await AWSTaskRunner.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
postRunTaskTimeMs = Date.now();
CloudRunnerLogger.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`);
} finally {
await this.cleanupResources(CF, taskDef);
const postCleanupTimeMs = Date.now();
if (postRunTaskTimeMs !== undefined)
CloudRunnerLogger.log(`Cleanup job time: ${Math.floor((postCleanupTimeMs - postRunTaskTimeMs) / 1000)}s`);
}
return output;
}
async cleanupResources(CF: SDK.CloudFormation, taskDef: CloudRunnerAWSTaskDef) {
CloudRunnerLogger.log('Cleanup starting');
await CF.deleteStack({
StackName: taskDef.taskDefStackName,
}).promise();
await CF.waitFor('stackDeleteComplete', {
StackName: taskDef.taskDefStackName,
}).promise();
CloudRunnerLogger.log(`Deleted Stack: ${taskDef.taskDefStackName}`);
CloudRunnerLogger.log('Cleanup complete');
}
}
export default AWSBuildEnvironment;

View File

@ -0,0 +1,3 @@
export class CloudRunnerStatics {
public static readonly logPrefix = `Cloud-Runner-System`;
}

View File

@ -0,0 +1,50 @@
import { BuildParameters, ImageTag } from '..';
import CloudRunner from './cloud-runner';
import Input from '../input';
import { CloudRunnerStatics } from './cloud-runner-statics';
import { TaskParameterSerializer } from './services/task-parameter-serializer';
import UnityVersioning from '../unity-versioning';
describe('Cloud Runner', () => {
it('responds', () => {});
});
describe('Cloud Runner', () => {
const testSecretName = 'testSecretName';
const testSecretValue = 'testSecretValue';
if (Input.cloudRunnerTests) {
it('All build parameters sent to cloud runner as env vars', async () => {
Input.cliOptions = {
versioning: 'None',
projectPath: 'test-project',
unityVersion: UnityVersioning.read('test-project'),
customJob: `
- name: 'step 1'
image: 'alpine'
commands: 'printenv'
secrets:
- name: '${testSecretName}'
value: '${testSecretValue}'
`,
};
Input.githubInputEnabled = false;
const buildParameter = await BuildParameters.create();
const baseImage = new ImageTag(buildParameter);
const file = await CloudRunner.run(buildParameter, baseImage.toString());
expect(file).toContain(JSON.stringify(buildParameter));
expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`);
const environmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables();
const newLinePurgedFile = file
.replace(/\s+/g, '')
.replace(new RegExp(`\\[${CloudRunnerStatics.logPrefix}\\]`, 'g'), '');
for (const element of environmentVariables) {
if (element.value !== undefined && typeof element.value !== 'function') {
if (typeof element.value === `string`) {
element.value = element.value.replace(/\s+/g, '');
}
expect(newLinePurgedFile).toContain(`${element.name}=${element.value}`);
}
}
Input.githubInputEnabled = true;
}, 1000000);
}
});

View File

@ -0,0 +1,72 @@
import AWSBuildPlatform from './aws';
import { BuildParameters } from '..';
import { CloudRunnerState } from './state/cloud-runner-state';
import Kubernetes from './k8s';
import CloudRunnerLogger from './services/cloud-runner-logger';
import { CloudRunnerStepState } from './state/cloud-runner-step-state';
import { WorkflowCompositionRoot } from './workflows/workflow-composition-root';
import { CloudRunnerError } from './error/cloud-runner-error';
import { TaskParameterSerializer } from './services/task-parameter-serializer';
import * as core from '@actions/core';
class CloudRunner {
private static setup(buildParameters: BuildParameters) {
CloudRunnerLogger.setup();
CloudRunnerState.setup(buildParameters);
CloudRunner.setupBuildPlatform();
const parameters = TaskParameterSerializer.readBuildEnvironmentVariables();
for (const element of parameters) {
core.setOutput(element.name, element.value);
}
}
private static setupBuildPlatform() {
switch (CloudRunnerState.buildParams.cloudRunnerCluster) {
case 'k8s':
CloudRunnerLogger.log('Cloud Runner platform selected Kubernetes');
CloudRunnerState.CloudRunnerProviderPlatform = new Kubernetes(CloudRunnerState.buildParams);
break;
default:
case 'aws':
CloudRunnerLogger.log('Cloud Runner platform selected AWS');
CloudRunnerState.CloudRunnerProviderPlatform = new AWSBuildPlatform(CloudRunnerState.buildParams);
break;
}
}
static async run(buildParameters: BuildParameters, baseImage: string) {
CloudRunner.setup(buildParameters);
try {
core.startGroup('Setup remote runner');
await CloudRunnerState.CloudRunnerProviderPlatform.setupSharedResources(
CloudRunnerState.buildParams.buildGuid,
CloudRunnerState.buildParams,
CloudRunnerState.branchName,
CloudRunnerState.defaultSecrets,
);
core.endGroup();
const output = await new WorkflowCompositionRoot().run(
new CloudRunnerStepState(
baseImage,
TaskParameterSerializer.readBuildEnvironmentVariables(),
CloudRunnerState.defaultSecrets,
),
);
core.startGroup('Cleanup');
await CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedResources(
CloudRunnerState.buildParams.buildGuid,
CloudRunnerState.buildParams,
CloudRunnerState.branchName,
CloudRunnerState.defaultSecrets,
);
CloudRunnerLogger.log(`Cleanup complete`);
core.endGroup();
return output;
} catch (error) {
core.endGroup();
await CloudRunnerError.handleException(error);
throw error;
}
}
}
export default CloudRunner;

View File

@ -0,0 +1,16 @@
import CloudRunnerLogger from '../services/cloud-runner-logger';
import { CloudRunnerState } from '../state/cloud-runner-state';
import * as core from '@actions/core';
export class CloudRunnerError {
public static async handleException(error: unknown) {
CloudRunnerLogger.error(JSON.stringify(error, undefined, 4));
core.setFailed('Cloud Runner failed');
await CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedResources(
CloudRunnerState.buildParams.buildGuid,
CloudRunnerState.buildParams,
CloudRunnerState.branchName,
CloudRunnerState.defaultSecrets,
);
}
}

View File

@ -0,0 +1,197 @@
import * as k8s from '@kubernetes/client-node';
import { BuildParameters, Output } from '../..';
import * as core from '@actions/core';
import { CloudRunnerProviderInterface } from '../services/cloud-runner-provider-interface';
import CloudRunnerSecret from '../services/cloud-runner-secret';
import KubernetesStorage from './kubernetes-storage';
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
import KubernetesTaskRunner from './kubernetes-task-runner';
import KubernetesSecret from './kubernetes-secret';
import waitUntil from 'async-wait-until';
import KubernetesJobSpecFactory from './kubernetes-job-spec-factory';
import KubernetesServiceAccount from './kubernetes-service-account';
import CloudRunnerLogger from '../services/cloud-runner-logger';
import { CoreV1Api } from '@kubernetes/client-node';
class Kubernetes implements CloudRunnerProviderInterface {
private kubeConfig: k8s.KubeConfig;
private kubeClient: k8s.CoreV1Api;
private kubeClientBatch: k8s.BatchV1Api;
private buildGuid: string = '';
private buildParameters: BuildParameters;
private pvcName: string = '';
private secretName: string = '';
private jobName: string = '';
private namespace: string;
private podName: string = '';
private containerName: string = '';
private cleanupCronJobName: string = '';
private serviceAccountName: string = '';
constructor(buildParameters: BuildParameters) {
this.kubeConfig = new k8s.KubeConfig();
this.kubeConfig.loadFromDefault();
this.kubeClient = this.kubeConfig.makeApiClient(k8s.CoreV1Api);
this.kubeClientBatch = this.kubeConfig.makeApiClient(k8s.BatchV1Api);
CloudRunnerLogger.log('Loaded default Kubernetes configuration for this environment');
this.namespace = 'default';
this.buildParameters = buildParameters;
}
public async setupSharedResources(
buildGuid: string,
buildParameters: BuildParameters,
// eslint-disable-next-line no-unused-vars
branchName: string,
// eslint-disable-next-line no-unused-vars
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
) {
try {
this.pvcName = `unity-builder-pvc-${buildGuid}`;
this.cleanupCronJobName = `unity-builder-cronjob-${buildGuid}`;
this.serviceAccountName = `service-account-${buildGuid}`;
await KubernetesStorage.createPersistentVolumeClaim(
buildParameters,
this.pvcName,
this.kubeClient,
this.namespace,
);
await KubernetesServiceAccount.createServiceAccount(this.serviceAccountName, this.namespace, this.kubeClient);
} catch (error) {
throw error;
}
}
async runTask(
buildGuid: string,
image: string,
commands: string,
mountdir: string,
workingdir: string,
environment: CloudRunnerEnvironmentVariable[],
secrets: CloudRunnerSecret[],
): Promise<string> {
try {
// setup
this.buildGuid = buildGuid;
this.secretName = `build-credentials-${buildGuid}`;
this.jobName = `unity-builder-job-${buildGuid}`;
this.containerName = `main`;
await KubernetesSecret.createSecret(secrets, this.secretName, this.namespace, this.kubeClient);
const jobSpec = KubernetesJobSpecFactory.getJobSpec(
commands,
image,
mountdir,
workingdir,
environment,
secrets,
this.buildGuid,
this.buildParameters,
this.secretName,
this.pvcName,
this.jobName,
k8s,
);
//run
const jobResult = await this.kubeClientBatch.createNamespacedJob(this.namespace, jobSpec);
CloudRunnerLogger.log(`Creating build job ${JSON.stringify(jobResult.body.metadata, undefined, 4)}`);
await new Promise((promise) => setTimeout(promise, 5000));
CloudRunnerLogger.log('Job created');
this.setPodNameAndContainerName(await Kubernetes.findPodFromJob(this.kubeClient, this.jobName, this.namespace));
CloudRunnerLogger.log('Watching pod until running');
let output = '';
// eslint-disable-next-line no-constant-condition
while (true) {
try {
await KubernetesTaskRunner.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
CloudRunnerLogger.log('Pod running, streaming logs');
output = await KubernetesTaskRunner.runTask(
this.kubeConfig,
this.kubeClient,
this.jobName,
this.podName,
'main',
this.namespace,
CloudRunnerLogger.log,
);
break;
} catch (error: any) {
if (error.message.includes(`HTTP`)) {
continue;
} else {
throw error;
}
}
}
await this.cleanupTaskResources();
return output;
} catch (error) {
CloudRunnerLogger.log('Running job failed');
core.error(JSON.stringify(error, undefined, 4));
await this.cleanupTaskResources();
throw error;
}
}
setPodNameAndContainerName(pod: k8s.V1Pod) {
this.podName = pod.metadata?.name || '';
this.containerName = pod.status?.containerStatuses?.[0].name || '';
}
async cleanupTaskResources() {
CloudRunnerLogger.log('cleaning up');
try {
await this.kubeClientBatch.deleteNamespacedJob(this.jobName, this.namespace);
await this.kubeClient.deleteNamespacedPod(this.podName, this.namespace);
await this.kubeClient.deleteNamespacedSecret(this.secretName, this.namespace);
await new Promise((promise) => setTimeout(promise, 5000));
} catch (error) {
CloudRunnerLogger.log('Failed to cleanup, error:');
core.error(JSON.stringify(error, undefined, 4));
CloudRunnerLogger.log('Abandoning cleanup, build error:');
throw error;
}
try {
await waitUntil(
async () => {
const jobBody = (await this.kubeClientBatch.readNamespacedJob(this.jobName, this.namespace)).body;
const podBody = (await this.kubeClient.readNamespacedPod(this.podName, this.namespace)).body;
return (jobBody === null || jobBody.status?.active === 0) && podBody === null;
},
{
timeout: 500000,
intervalBetweenAttempts: 15000,
},
);
// eslint-disable-next-line no-empty
} catch {}
}
async cleanupSharedResources(
buildGuid: string,
buildParameters: BuildParameters,
// eslint-disable-next-line no-unused-vars
branchName: string,
// eslint-disable-next-line no-unused-vars
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
) {
CloudRunnerLogger.log(`deleting PVC`);
await this.kubeClient.deleteNamespacedPersistentVolumeClaim(this.pvcName, this.namespace);
await Output.setBuildVersion(buildParameters.buildVersion);
// eslint-disable-next-line unicorn/no-process-exit
process.exit();
}
static async findPodFromJob(kubeClient: CoreV1Api, jobName: string, namespace: string) {
const namespacedPods = await kubeClient.listNamespacedPod(namespace);
const pod = namespacedPods.body.items.find((x) => x.metadata?.labels?.['job-name'] === jobName);
if (pod === undefined) {
throw new Error("pod with job-name label doesn't exist");
}
return pod;
}
}
export default Kubernetes;

View File

@ -0,0 +1,161 @@
import { V1EnvVar, V1EnvVarSource, V1SecretKeySelector } from '@kubernetes/client-node';
import BuildParameters from '../../build-parameters';
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
import CloudRunnerSecret from '../services/cloud-runner-secret';
import { CloudRunnerState } from '../state/cloud-runner-state';
class KubernetesJobSpecFactory {
static getJobSpec(
command: string,
image: string,
mountdir: string,
workingDirectory: string,
environment: CloudRunnerEnvironmentVariable[],
secrets: CloudRunnerSecret[],
buildGuid: string,
buildParameters: BuildParameters,
secretName,
pvcName,
jobName,
k8s,
) {
environment.push(
...[
{
name: 'GITHUB_SHA',
value: buildGuid,
},
{
name: 'GITHUB_WORKSPACE',
value: '/data/repo',
},
{
name: 'PROJECT_PATH',
value: buildParameters.projectPath,
},
{
name: 'BUILD_PATH',
value: buildParameters.buildPath,
},
{
name: 'BUILD_FILE',
value: buildParameters.buildFile,
},
{
name: 'BUILD_NAME',
value: buildParameters.buildName,
},
{
name: 'BUILD_METHOD',
value: buildParameters.buildMethod,
},
{
name: 'CUSTOM_PARAMETERS',
value: buildParameters.customParameters,
},
{
name: 'CHOWN_FILES_TO',
value: buildParameters.chownFilesTo,
},
{
name: 'BUILD_TARGET',
value: buildParameters.platform,
},
{
name: 'ANDROID_VERSION_CODE',
value: buildParameters.androidVersionCode.toString(),
},
{
name: 'ANDROID_KEYSTORE_NAME',
value: buildParameters.androidKeystoreName,
},
{
name: 'ANDROID_KEYALIAS_NAME',
value: buildParameters.androidKeyaliasName,
},
],
);
const job = new k8s.V1Job();
job.apiVersion = 'batch/v1';
job.kind = 'Job';
job.metadata = {
name: jobName,
labels: {
app: 'unity-builder',
buildGuid,
},
};
job.spec = {
backoffLimit: 0,
template: {
spec: {
volumes: [
{
name: 'build-mount',
persistentVolumeClaim: {
claimName: pvcName,
},
},
],
containers: [
{
name: 'main',
image,
command: ['/bin/sh'],
args: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(command, CloudRunnerState.buildParams)],
workingDir: `${workingDirectory}`,
resources: {
requests: {
memory: buildParameters.cloudRunnerMemory,
cpu: buildParameters.cloudRunnerCpu,
},
},
env: [
...environment.map((x) => {
const environmentVariable = new V1EnvVar();
environmentVariable.name = x.name;
environmentVariable.value = x.value;
return environmentVariable;
}),
...secrets.map((x) => {
const secret = new V1EnvVarSource();
secret.secretKeyRef = new V1SecretKeySelector();
secret.secretKeyRef.key = x.ParameterKey;
secret.secretKeyRef.name = secretName;
const environmentVariable = new V1EnvVar();
environmentVariable.name = x.EnvironmentVariable;
environmentVariable.valueFrom = secret;
return environmentVariable;
}),
],
volumeMounts: [
{
name: 'build-mount',
mountPath: `/${mountdir}`,
},
],
lifecycle: {
preStop: {
exec: {
command: [
'bin/bash',
'-c',
`cd /data/builder/action/steps;
chmod +x /return_license.sh;
/return_license.sh;`,
],
},
},
},
},
],
restartPolicy: 'Never',
},
},
};
return job;
}
}
export default KubernetesJobSpecFactory;

View File

@ -0,0 +1,28 @@
import { CoreV1Api } from '@kubernetes/client-node';
import CloudRunnerSecret from '../services/cloud-runner-secret';
import * as k8s from '@kubernetes/client-node';
const base64 = require('base-64');
class KubernetesSecret {
static async createSecret(
secrets: CloudRunnerSecret[],
secretName: string,
namespace: string,
kubeClient: CoreV1Api,
) {
const secret = new k8s.V1Secret();
secret.apiVersion = 'v1';
secret.kind = 'Secret';
secret.type = 'Opaque';
secret.metadata = {
name: secretName,
};
secret.data = {};
for (const buildSecret of secrets) {
secret.data[buildSecret.ParameterKey] = base64.encode(buildSecret.ParameterValue);
}
return kubeClient.createNamespacedSecret(namespace, secret);
}
}
export default KubernetesSecret;

View File

@ -0,0 +1,17 @@
import { CoreV1Api } from '@kubernetes/client-node';
import * as k8s from '@kubernetes/client-node';
class KubernetesServiceAccount {
static async createServiceAccount(serviceAccountName: string, namespace: string, kubeClient: CoreV1Api) {
const serviceAccount = new k8s.V1ServiceAccount();
serviceAccount.apiVersion = 'v1';
serviceAccount.kind = 'ServiceAccount';
serviceAccount.metadata = {
name: serviceAccountName,
};
serviceAccount.automountServiceAccountToken = false;
return kubeClient.createNamespacedServiceAccount(namespace, serviceAccount);
}
}
export default KubernetesServiceAccount;

View File

@ -0,0 +1,114 @@
import waitUntil from 'async-wait-until';
import * as core from '@actions/core';
import * as k8s from '@kubernetes/client-node';
import BuildParameters from '../../build-parameters';
import CloudRunnerLogger from '../services/cloud-runner-logger';
import YAML from 'yaml';
class KubernetesStorage {
public static async createPersistentVolumeClaim(
buildParameters: BuildParameters,
pvcName: string,
kubeClient: k8s.CoreV1Api,
namespace: string,
) {
if (buildParameters.kubeVolume) {
CloudRunnerLogger.log(buildParameters.kubeVolume);
pvcName = buildParameters.kubeVolume;
return;
}
const pvcList = (await kubeClient.listNamespacedPersistentVolumeClaim(namespace)).body.items.map(
(x) => x.metadata?.name,
);
CloudRunnerLogger.log(`Current PVCs in namespace ${namespace}`);
CloudRunnerLogger.log(JSON.stringify(pvcList, undefined, 4));
if (pvcList.includes(pvcName)) {
CloudRunnerLogger.log(`pvc ${pvcName} already exists`);
core.setOutput('volume', pvcName);
return;
}
CloudRunnerLogger.log(`Creating PVC ${pvcName} (does not exist)`);
const result = await KubernetesStorage.createPVC(pvcName, buildParameters, kubeClient, namespace);
await KubernetesStorage.handleResult(result, kubeClient, namespace, pvcName);
}
public static async getPVCPhase(kubeClient: k8s.CoreV1Api, name: string, namespace: string) {
try {
return (await kubeClient.readNamespacedPersistentVolumeClaim(name, namespace)).body.status?.phase;
} catch (error) {
core.error('Failed to get PVC phase');
core.error(JSON.stringify(error, undefined, 4));
throw error;
}
}
public static async watchUntilPVCNotPending(kubeClient: k8s.CoreV1Api, name: string, namespace: string) {
try {
CloudRunnerLogger.log(`watch Until PVC Not Pending ${name} ${namespace}`);
CloudRunnerLogger.log(`${await this.getPVCPhase(kubeClient, name, namespace)}`);
await waitUntil(
async () => {
return (await this.getPVCPhase(kubeClient, name, namespace)) !== 'Pending';
},
{
timeout: 500000,
intervalBetweenAttempts: 15000,
},
);
} catch (error: any) {
core.error('Failed to watch PVC');
core.error(error.toString());
core.error(
`PVC Body: ${JSON.stringify(
(await kubeClient.readNamespacedPersistentVolumeClaim(name, namespace)).body,
undefined,
4,
)}`,
);
throw error;
}
}
private static async createPVC(
pvcName: string,
buildParameters: BuildParameters,
kubeClient: k8s.CoreV1Api,
namespace: string,
) {
const pvc = new k8s.V1PersistentVolumeClaim();
pvc.apiVersion = 'v1';
pvc.kind = 'PersistentVolumeClaim';
pvc.metadata = {
name: pvcName,
};
pvc.spec = {
accessModes: ['ReadWriteOnce'],
storageClassName: process.env.K8s_STORAGE_CLASS || 'standard',
resources: {
requests: {
storage: buildParameters.kubeVolumeSize,
},
},
};
if (process.env.K8s_STORAGE_PVC_SPEC) {
YAML.parse(process.env.K8s_STORAGE_PVC_SPEC);
}
const result = await kubeClient.createNamespacedPersistentVolumeClaim(namespace, pvc);
return result;
}
private static async handleResult(
result: { response: import('http').IncomingMessage; body: k8s.V1PersistentVolumeClaim },
kubeClient: k8s.CoreV1Api,
namespace: string,
pvcName: string,
) {
const name = result.body.metadata?.name || '';
CloudRunnerLogger.log(`PVC ${name} created`);
await this.watchUntilPVCNotPending(kubeClient, name, namespace);
CloudRunnerLogger.log(`PVC ${name} is ready and not pending`);
core.setOutput('volume', pvcName);
}
}
export default KubernetesStorage;

View File

@ -0,0 +1,104 @@
import { CoreV1Api, KubeConfig, Log } from '@kubernetes/client-node';
import { Writable } from 'stream';
import CloudRunnerLogger from '../services/cloud-runner-logger';
import * as core from '@actions/core';
import { CloudRunnerStatics } from '../cloud-runner-statics';
import waitUntil from 'async-wait-until';
import { Input } from '../..';
class KubernetesTaskRunner {
static async runTask(
kubeConfig: KubeConfig,
kubeClient: CoreV1Api,
jobName: string,
podName: string,
containerName: string,
namespace: string,
logCallback: any,
) {
CloudRunnerLogger.log(`Streaming logs from pod: ${podName} container: ${containerName} namespace: ${namespace}`);
const stream = new Writable();
let output = '';
let didStreamAnyLogs: boolean = false;
stream._write = (chunk, encoding, next) => {
didStreamAnyLogs = true;
let message = chunk.toString().trimRight(`\n`);
message = `[${CloudRunnerStatics.logPrefix}] ${message}`;
if (Input.cloudRunnerTests) {
output += message;
}
logCallback(message);
next();
};
const logOptions = {
follow: true,
pretty: false,
previous: false,
};
try {
const resultError = await new Promise((resolve) =>
new Log(kubeConfig).log(namespace, podName, containerName, stream, resolve, logOptions),
);
stream.destroy();
if (resultError) {
throw resultError;
}
if (!didStreamAnyLogs) {
core.error('Failed to stream any logs, listing namespace events, check for an error with the container');
core.error(
JSON.stringify(
{
events: (await kubeClient.listNamespacedEvent(namespace)).body.items
.filter((x) => {
return x.involvedObject.name === podName || x.involvedObject.name === jobName;
})
.map((x) => {
return {
type: x.involvedObject.kind,
name: x.involvedObject.name,
message: x.message,
};
}),
},
undefined,
4,
),
);
throw new Error(`No logs streamed from k8s`);
}
} catch (error) {
if (stream) {
stream.destroy();
}
throw error;
}
CloudRunnerLogger.log('end of log stream');
return output;
}
static async watchUntilPodRunning(kubeClient: CoreV1Api, podName: string, namespace: string) {
let success: boolean = false;
CloudRunnerLogger.log(`Watching ${podName} ${namespace}`);
await waitUntil(
async () => {
const status = await kubeClient.readNamespacedPodStatus(podName, namespace);
const phase = status?.body.status?.phase;
success = phase === 'Running';
CloudRunnerLogger.log(
`${status.body.status?.phase} ${status.body.status?.conditions?.[0].reason || ''} ${
status.body.status?.conditions?.[0].message || ''
}`,
);
if (success || phase !== 'Pending') return true;
return false;
},
{
timeout: 2000000,
intervalBetweenAttempts: 15000,
},
);
return success;
}
}
export default KubernetesTaskRunner;

View File

@ -0,0 +1,40 @@
import { BuildParameters, Input } from '../..';
import YAML from 'yaml';
import CloudRunnerSecret from './cloud-runner-secret';
export class CloudRunnerBuildCommandProcessor {
public static ProcessCommands(commands: string, buildParameters: BuildParameters): string {
const hooks = CloudRunnerBuildCommandProcessor.getHooks().filter((x) => x.step.includes(`all`));
return `echo "---"
echo "start cloud runner init"
${Input.cloudRunnerTests ? '' : '#'} printenv
echo "start cloud runner job"
${hooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
${commands}
${hooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
echo "end of cloud runner job
---${buildParameters.logId}"
`;
}
public static getHooks(): Hook[] {
const experimentHooks = process.env.EXPERIMENTAL_HOOKS;
let output = new Array<Hook>();
if (experimentHooks && experimentHooks !== '') {
try {
output = YAML.parse(experimentHooks);
} catch (error) {
throw error;
}
}
return output.filter((x) => x.step !== undefined && x.hook !== undefined && x.hook.length > 0);
}
}
export class Hook {
public commands;
public secrets: CloudRunnerSecret[] = new Array<CloudRunnerSecret>();
public name;
public hook!: string[];
public step!: string[];
}

View File

@ -0,0 +1,4 @@
class CloudRunnerConstants {
static alphabet = '0123456789abcdefghijklmnopqrstuvwxyz';
}
export default CloudRunnerConstants;

View File

@ -0,0 +1,5 @@
class CloudRunnerEnvironmentVariable {
public name!: string;
public value!: string;
}
export default CloudRunnerEnvironmentVariable;

View File

@ -0,0 +1,47 @@
import * as core from '@actions/core';
class CloudRunnerLogger {
private static timestamp: number;
private static globalTimestamp: number;
public static setup() {
this.timestamp = this.createTimestamp();
this.globalTimestamp = this.timestamp;
}
public static log(message: string) {
core.info(message);
}
public static logWarning(message: string) {
core.warning(message);
}
public static logLine(message: string) {
core.info(`${message}\n`);
}
public static error(message: string) {
core.error(message);
}
public static logWithTime(message: string) {
const newTimestamp = this.createTimestamp();
core.info(
`${message} (Since previous: ${this.calculateTimeDiff(
newTimestamp,
this.timestamp,
)}, Total time: ${this.calculateTimeDiff(newTimestamp, this.globalTimestamp)})`,
);
this.timestamp = newTimestamp;
}
private static calculateTimeDiff(x: number, y: number) {
return Math.floor((x - y) / 1000);
}
private static createTimestamp() {
return Date.now();
}
}
export default CloudRunnerLogger;

View File

@ -0,0 +1,10 @@
import { customAlphabet } from 'nanoid';
import CloudRunnerConstants from './cloud-runner-constants';
class CloudRunnerNamespace {
static generateBuildName(runNumber: string | number, platform: string) {
const nanoid = customAlphabet(CloudRunnerConstants.alphabet, 4);
return `${runNumber}-${platform.toLowerCase().replace('standalone', '')}-${nanoid()}`;
}
}
export default CloudRunnerNamespace;

View File

@ -0,0 +1,42 @@
import BuildParameters from '../../build-parameters';
import CloudRunnerEnvironmentVariable from './cloud-runner-environment-variable';
import CloudRunnerSecret from './cloud-runner-secret';
export interface CloudRunnerProviderInterface {
cleanupSharedResources(
// eslint-disable-next-line no-unused-vars
buildGuid: string,
// eslint-disable-next-line no-unused-vars
buildParameters: BuildParameters,
// eslint-disable-next-line no-unused-vars
branchName: string,
// eslint-disable-next-line no-unused-vars
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
);
setupSharedResources(
// eslint-disable-next-line no-unused-vars
buildGuid: string,
// eslint-disable-next-line no-unused-vars
buildParameters: BuildParameters,
// eslint-disable-next-line no-unused-vars
branchName: string,
// eslint-disable-next-line no-unused-vars
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
);
runTask(
// eslint-disable-next-line no-unused-vars
buildGuid: string,
// eslint-disable-next-line no-unused-vars
image: string,
// eslint-disable-next-line no-unused-vars
commands: string,
// eslint-disable-next-line no-unused-vars
mountdir: string,
// eslint-disable-next-line no-unused-vars
workingdir: string,
// eslint-disable-next-line no-unused-vars
environment: CloudRunnerEnvironmentVariable[],
// eslint-disable-next-line no-unused-vars
secrets: CloudRunnerSecret[],
): Promise<string>;
}

View File

@ -1,6 +1,6 @@
class RemoteBuilderSecret {
class CloudRunnerSecret {
public ParameterKey!: string;
public EnvironmentVariable!: string;
public ParameterValue!: string;
}
export default RemoteBuilderSecret;
export default CloudRunnerSecret;

View File

@ -0,0 +1,85 @@
import { Input } from '../..';
import ImageEnvironmentFactory from '../../image-environment-factory';
import CloudRunnerEnvironmentVariable from './cloud-runner-environment-variable';
import { CloudRunnerState } from '../state/cloud-runner-state';
import { CloudRunnerBuildCommandProcessor } from './cloud-runner-build-command-process';
export class TaskParameterSerializer {
public static readBuildEnvironmentVariables(): CloudRunnerEnvironmentVariable[] {
TaskParameterSerializer.setupDefaultSecrets();
return [
{
name: 'ContainerMemory',
value: CloudRunnerState.buildParams.cloudRunnerMemory,
},
{
name: 'ContainerCpu',
value: CloudRunnerState.buildParams.cloudRunnerCpu,
},
{
name: 'BUILD_TARGET',
value: CloudRunnerState.buildParams.platform,
},
...TaskParameterSerializer.serializeBuildParamsAndInput,
];
}
private static get serializeBuildParamsAndInput() {
let array = new Array();
array = TaskParameterSerializer.readBuildParameters(array);
array = TaskParameterSerializer.readInput(array);
const configurableHooks = CloudRunnerBuildCommandProcessor.getHooks();
const secrets = configurableHooks.map((x) => x.secrets).filter((x) => x !== undefined && x.length > 0);
if (secrets.length > 0) {
// eslint-disable-next-line unicorn/no-array-reduce
array.push(secrets.reduce((x, y) => [...x, ...y]));
}
array = array.filter(
(x) => x.value !== undefined && x.name !== '0' && x.value !== '' && x.name !== 'prototype' && x.name !== 'length',
);
array = array.map((x) => {
x.name = Input.ToEnvVarFormat(x.name);
x.value = `${x.value}`;
return x;
});
return array;
}
private static readBuildParameters(array: any[]) {
const keys = Object.keys(CloudRunnerState.buildParams);
for (const element of keys) {
array.push({
name: element,
value: CloudRunnerState.buildParams[element],
});
}
array.push({ name: 'buildParameters', value: JSON.stringify(CloudRunnerState.buildParams) });
return array;
}
private static readInput(array: any[]) {
const input = Object.getOwnPropertyNames(Input);
for (const element of input) {
if (typeof Input[element] !== 'function' && array.filter((x) => x.name === element).length === 0) {
array.push({
name: element,
value: `${Input[element]}`,
});
}
}
return array;
}
private static setupDefaultSecrets() {
if (CloudRunnerState.defaultSecrets === undefined)
CloudRunnerState.defaultSecrets = ImageEnvironmentFactory.getEnvironmentVariables(
CloudRunnerState.buildParams,
).map((x) => {
return {
ParameterKey: x.name,
EnvironmentVariable: x.name,
ParameterValue: x.value,
};
});
}
}

View File

@ -0,0 +1,81 @@
import path from 'path';
import { BuildParameters } from '../..';
import { CloudRunnerProviderInterface } from '../services/cloud-runner-provider-interface';
import CloudRunnerSecret from '../services/cloud-runner-secret';
export class CloudRunnerState {
public static CloudRunnerProviderPlatform: CloudRunnerProviderInterface;
public static buildParams: BuildParameters;
public static defaultSecrets: CloudRunnerSecret[];
public static readonly repositoryFolder = 'repo';
// only the following paths that do not start a path.join with another "Full" suffixed property need to start with an absolute /
public static get buildPathFull(): string {
return path.join(`/`, CloudRunnerState.buildVolumeFolder, CloudRunnerState.buildParams.buildGuid);
}
public static get cacheFolderFull(): string {
return path.join(
'/',
CloudRunnerState.buildVolumeFolder,
CloudRunnerState.cacheFolder,
CloudRunnerState.branchName,
);
}
static setup(buildParameters: BuildParameters) {
CloudRunnerState.buildParams = buildParameters;
}
public static get branchName(): string {
return CloudRunnerState.buildParams.branch;
}
public static get builderPathFull(): string {
return path.join(CloudRunnerState.buildPathFull, `builder`);
}
public static get repoPathFull(): string {
return path.join(CloudRunnerState.buildPathFull, CloudRunnerState.repositoryFolder);
}
public static get projectPathFull(): string {
return path.join(CloudRunnerState.repoPathFull, CloudRunnerState.buildParams.projectPath);
}
public static get libraryFolderFull(): string {
return path.join(CloudRunnerState.projectPathFull, `Library`);
}
public static get lfsDirectoryFull(): string {
return path.join(CloudRunnerState.repoPathFull, `.git`, `lfs`);
}
public static get purgeRemoteCaching(): boolean {
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
}
public static get lfsCacheFolderFull() {
return path.join(CloudRunnerState.cacheFolderFull, `lfs`);
}
public static get libraryCacheFolderFull() {
return path.join(CloudRunnerState.cacheFolderFull, `Library`);
}
public static get unityBuilderRepoUrl(): string {
return `https://${CloudRunnerState.buildParams.githubToken}@github.com/game-ci/unity-builder.git`;
}
public static get targetBuildRepoUrl(): string {
return `https://${CloudRunnerState.buildParams.githubToken}@github.com/${CloudRunnerState.buildParams.githubRepo}.git`;
}
public static get buildVolumeFolder() {
return 'data';
}
public static get cacheFolder() {
return 'cache';
}
}

View File

@ -0,0 +1,13 @@
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
import CloudRunnerSecret from '../services/cloud-runner-secret';
export class CloudRunnerStepState {
public image: string;
public environment: CloudRunnerEnvironmentVariable[];
public secrets: CloudRunnerSecret[];
constructor(image: string, environmentVariables: CloudRunnerEnvironmentVariable[], secrets: CloudRunnerSecret[]) {
this.image = image;
this.environment = environmentVariables;
this.secrets = secrets;
}
}

View File

@ -0,0 +1,77 @@
import path from 'path';
import { Input } from '../..';
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
import CloudRunnerLogger from '../services/cloud-runner-logger';
import CloudRunnerSecret from '../services/cloud-runner-secret';
import { CloudRunnerState } from '../state/cloud-runner-state';
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
import { StepInterface } from './step-interface';
export class BuildStep implements StepInterface {
async run(cloudRunnerStepState: CloudRunnerStepState) {
return await BuildStep.BuildStep(
cloudRunnerStepState.image,
cloudRunnerStepState.environment,
cloudRunnerStepState.secrets,
);
}
private static async BuildStep(
image: string,
environmentVariables: CloudRunnerEnvironmentVariable[],
secrets: CloudRunnerSecret[],
) {
CloudRunnerLogger.logLine(` `);
CloudRunnerLogger.logLine('Starting part 2/2 (build unity project)');
const hooks = CloudRunnerBuildCommandProcessor.getHooks().filter((x) => x.step.includes(`setup`));
return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
CloudRunnerState.buildParams.buildGuid,
image,
`${hooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
export GITHUB_WORKSPACE="${CloudRunnerState.repoPathFull}"
cp -r "${path
.join(CloudRunnerState.builderPathFull, 'dist', 'default-build-script')
.replace(/\\/g, `/`)}" "/UnityBuilderAction"
cp -r "${path
.join(CloudRunnerState.builderPathFull, 'dist', 'platforms', 'ubuntu', 'entrypoint.sh')
.replace(/\\/g, `/`)}" "/entrypoint.sh"
cp -r "${path
.join(CloudRunnerState.builderPathFull, 'dist', 'platforms', 'ubuntu', 'steps')
.replace(/\\/g, `/`)}" "/steps"
chmod -R +x "/entrypoint.sh"
chmod -R +x "/steps"
/entrypoint.sh
apt-get update
apt-get install -y -q zip tree
cd "${CloudRunnerState.libraryFolderFull.replace(/\\/g, `/`)}/.."
zip -r "lib-${CloudRunnerState.buildParams.buildGuid}.zip" "Library"
mv "lib-${CloudRunnerState.buildParams.buildGuid}.zip" "${CloudRunnerState.cacheFolderFull.replace(
/\\/g,
`/`,
)}/Library"
cd "${CloudRunnerState.repoPathFull.replace(/\\/g, `/`)}"
${Input.cloudRunnerTests ? '' : '#'} tree -lh
zip -r "build-${CloudRunnerState.buildParams.buildGuid}.zip" "build"
${Input.cloudRunnerTests ? '' : '#'} tree -lh
${Input.cloudRunnerTests ? '' : '#'} tree -lh "${CloudRunnerState.cacheFolderFull.replace(/\\/g, `/`)}"
mv "build-${CloudRunnerState.buildParams.buildGuid}.zip" "${CloudRunnerState.cacheFolderFull.replace(
/\\/g,
`/`,
)}"
chmod +x ${path.join(CloudRunnerState.builderPathFull, 'dist', `index.js`).replace(/\\/g, `/`)}
node ${path
.join(CloudRunnerState.builderPathFull, 'dist', `index.js`)
.replace(/\\/g, `/`)} -m cache-push "Library" "lib-${
CloudRunnerState.buildParams.buildGuid
}.zip" "${CloudRunnerState.cacheFolderFull.replace(/\\/g, `/`)}/Library"
${Input.cloudRunnerTests ? '' : '#'} tree -lh "${CloudRunnerState.cacheFolderFull}"
${hooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
`,
`/${CloudRunnerState.buildVolumeFolder}`,
`/${CloudRunnerState.projectPathFull}`,
environmentVariables,
secrets,
);
}
}

View File

@ -0,0 +1,59 @@
import path from 'path';
import { Input } from '../..';
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
import CloudRunnerLogger from '../services/cloud-runner-logger';
import CloudRunnerSecret from '../services/cloud-runner-secret';
import { CloudRunnerState } from '../state/cloud-runner-state';
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
import { StepInterface } from './step-interface';
export class SetupStep implements StepInterface {
async run(cloudRunnerStepState: CloudRunnerStepState) {
try {
return await SetupStep.downloadRepository(
cloudRunnerStepState.image,
cloudRunnerStepState.environment,
cloudRunnerStepState.secrets,
);
} catch (error) {
throw error;
}
}
private static async downloadRepository(
image: string,
environmentVariables: CloudRunnerEnvironmentVariable[],
secrets: CloudRunnerSecret[],
) {
try {
CloudRunnerLogger.log(` `);
CloudRunnerLogger.logLine('Starting step 1/2 (setup game files from repository)');
const hooks = CloudRunnerBuildCommandProcessor.getHooks().filter((x) => x.step.includes(`setup`));
return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
CloudRunnerState.buildParams.buildGuid,
image,
`apk update -q
apk add git-lfs jq tree zip unzip nodejs -q
${hooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
mkdir -p ${CloudRunnerState.builderPathFull.replace(/\\/g, `/`)}
git clone -q -b ${CloudRunnerState.branchName} ${
CloudRunnerState.unityBuilderRepoUrl
} "${CloudRunnerState.builderPathFull.replace(/\\/g, `/`)}"
${Input.cloudRunnerTests ? '' : '#'} tree ${CloudRunnerState.builderPathFull.replace(/\\/g, `/`)}
chmod +x ${path.join(CloudRunnerState.builderPathFull, 'dist', `index.js`).replace(/\\/g, `/`)}
node ${path.join(CloudRunnerState.builderPathFull, 'dist', `index.js`).replace(/\\/g, `/`)} -m remote-cli
${hooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
`,
`/${CloudRunnerState.buildVolumeFolder}`,
`/${CloudRunnerState.buildVolumeFolder}/`,
environmentVariables,
secrets,
);
} catch (error) {
CloudRunnerLogger.logLine(`Failed download repository step 1/2`);
throw error;
}
}
}

View File

@ -0,0 +1,8 @@
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
export interface StepInterface {
run(
// eslint-disable-next-line no-unused-vars
cloudRunnerStepState: CloudRunnerStepState,
);
}

View File

@ -0,0 +1,68 @@
import CloudRunnerLogger from '../services/cloud-runner-logger';
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
import { CloudRunnerState } from '../state/cloud-runner-state';
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
import { BuildStep } from '../steps/build-step';
import { SetupStep } from '../steps/setup-step';
import { CustomWorkflow } from './custom-workflow';
import { WorkflowInterface } from './workflow-interface';
import * as core from '@actions/core';
export class BuildAutomationWorkflow implements WorkflowInterface {
async run(cloudRunnerStepState: CloudRunnerStepState) {
try {
return await BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image);
} catch (error) {
throw error;
}
}
private static async standardBuildAutomation(baseImage: any) {
try {
CloudRunnerLogger.log(`Cloud Runner is running standard build automation`);
core.startGroup('pre build steps');
let output = '';
if (CloudRunnerState.buildParams.preBuildSteps !== '') {
output += await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.preBuildSteps);
}
core.endGroup();
CloudRunnerLogger.logWithTime('Configurable pre build step(s) time');
core.startGroup('setup');
output += await new SetupStep().run(
new CloudRunnerStepState(
'alpine/git',
TaskParameterSerializer.readBuildEnvironmentVariables(),
CloudRunnerState.defaultSecrets,
),
);
core.endGroup();
CloudRunnerLogger.logWithTime('Download repository step time');
core.startGroup('build');
output += await new BuildStep().run(
new CloudRunnerStepState(
baseImage,
TaskParameterSerializer.readBuildEnvironmentVariables(),
CloudRunnerState.defaultSecrets,
),
);
core.endGroup();
CloudRunnerLogger.logWithTime('Build time');
core.startGroup('post build steps');
if (CloudRunnerState.buildParams.postBuildSteps !== '') {
output += await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.postBuildSteps);
}
core.endGroup();
CloudRunnerLogger.logWithTime('Configurable post build step(s) time');
CloudRunnerLogger.log(`Cloud Runner finished running standard build automation`);
return output;
} catch (error) {
throw error;
}
}
}

View File

@ -0,0 +1,46 @@
import CloudRunnerLogger from '../services/cloud-runner-logger';
import CloudRunnerSecret from '../services/cloud-runner-secret';
import { CloudRunnerState } from '../state/cloud-runner-state';
import YAML from 'yaml';
import { Input } from '../..';
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
export class CustomWorkflow {
public static async runCustomJob(buildSteps) {
try {
CloudRunnerLogger.log(`Cloud Runner is running in custom job mode`);
if (Input.cloudRunnerTests) {
CloudRunnerLogger.log(`Parsing build steps: ${buildSteps}`);
}
try {
buildSteps = YAML.parse(buildSteps);
let output = '';
for (const step of buildSteps) {
const stepSecrets: CloudRunnerSecret[] = step.secrets.map((x) => {
const secret: CloudRunnerSecret = {
ParameterKey: x.name,
EnvironmentVariable: Input.ToEnvVarFormat(x.name),
ParameterValue: x.value,
};
return secret;
});
output += await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
CloudRunnerState.buildParams.buildGuid,
step['image'],
step['commands'],
`/${CloudRunnerState.buildVolumeFolder}`,
`/${CloudRunnerState.buildVolumeFolder}/`,
TaskParameterSerializer.readBuildEnvironmentVariables(),
[...CloudRunnerState.defaultSecrets, ...stepSecrets],
);
}
return output;
} catch (error) {
CloudRunnerLogger.log(`failed to parse a custom job "${buildSteps}"`);
throw error;
}
} catch (error) {
throw error;
}
}
}

View File

@ -0,0 +1,42 @@
import { CloudRunnerState } from '../state/cloud-runner-state';
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
import { CustomWorkflow } from './custom-workflow';
import { WorkflowInterface } from './workflow-interface';
import { BuildAutomationWorkflow } from './build-automation-workflow';
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
import { SetupStep } from '../steps/setup-step';
export class WorkflowCompositionRoot implements WorkflowInterface {
async run(cloudRunnerStepState: CloudRunnerStepState) {
try {
return await WorkflowCompositionRoot.runJob(cloudRunnerStepState.image.toString());
} catch (error) {
throw error;
}
}
private static async runJob(baseImage: any) {
try {
if (CloudRunnerState.buildParams.customJob === `setup`) {
return await new SetupStep().run(
new CloudRunnerStepState(
baseImage,
TaskParameterSerializer.readBuildEnvironmentVariables(),
CloudRunnerState.defaultSecrets,
),
);
} else if (CloudRunnerState.buildParams.customJob !== '') {
return await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.customJob);
}
return await new BuildAutomationWorkflow().run(
new CloudRunnerStepState(
baseImage,
TaskParameterSerializer.readBuildEnvironmentVariables(),
CloudRunnerState.defaultSecrets,
),
);
} catch (error) {
throw error;
}
}
}

View File

@ -0,0 +1,8 @@
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
export interface WorkflowInterface {
run(
// eslint-disable-next-line no-unused-vars
cloudRunnerStepState: CloudRunnerStepState,
);
}

View File

@ -1,5 +1,6 @@
import { exec } from '@actions/exec';
import ImageTag from './image-tag';
import ImageEnvironmentFactory from './image-environment-factory';
class Docker {
static async build(buildParameters, silent = false) {
@ -18,31 +19,7 @@ class Docker {
}
static async run(image, parameters, silent = false) {
const {
version,
workspace,
unitySerial,
runnerTempPath,
platform,
projectPath,
buildName,
buildPath,
buildFile,
buildMethod,
buildVersion,
androidVersionCode,
androidKeystoreName,
androidKeystoreBase64,
androidKeystorePass,
androidKeyaliasName,
androidKeyaliasPass,
androidTargetSdkVersion,
androidSdkManagerParameters,
customParameters,
sshAgent,
gitPrivateToken,
chownFilesTo,
} = parameters;
const { workspace, unitySerial, runnerTempPath, sshAgent } = parameters;
const baseOsSpecificArguments = this.getBaseOsSpecificArguments(
process.platform,
@ -55,45 +32,7 @@ class Docker {
const runCommand = `docker run \
--workdir /github/workspace \
--rm \
--env UNITY_LICENSE \
--env UNITY_LICENSE_FILE \
--env UNITY_EMAIL \
--env UNITY_PASSWORD \
--env UNITY_VERSION="${version}" \
--env USYM_UPLOAD_AUTH_TOKEN \
--env PROJECT_PATH="${projectPath}" \
--env BUILD_TARGET="${platform}" \
--env BUILD_NAME="${buildName}" \
--env BUILD_PATH="${buildPath}" \
--env BUILD_FILE="${buildFile}" \
--env BUILD_METHOD="${buildMethod}" \
--env VERSION="${buildVersion}" \
--env ANDROID_VERSION_CODE="${androidVersionCode}" \
--env ANDROID_KEYSTORE_NAME="${androidKeystoreName}" \
--env ANDROID_KEYSTORE_BASE64="${androidKeystoreBase64}" \
--env ANDROID_KEYSTORE_PASS="${androidKeystorePass}" \
--env ANDROID_KEYALIAS_NAME="${androidKeyaliasName}" \
--env ANDROID_KEYALIAS_PASS="${androidKeyaliasPass}" \
--env ANDROID_TARGET_SDK_VERSION="${androidTargetSdkVersion}" \
--env ANDROID_SDK_MANAGER_PARAMETERS="${androidSdkManagerParameters}" \
--env CUSTOM_PARAMETERS="${customParameters}" \
--env CHOWN_FILES_TO="${chownFilesTo}" \
--env GITHUB_REF \
--env GITHUB_SHA \
--env GITHUB_REPOSITORY \
--env GITHUB_ACTOR \
--env GITHUB_WORKFLOW \
--env GITHUB_HEAD_REF \
--env GITHUB_BASE_REF \
--env GITHUB_EVENT_NAME \
--env GITHUB_WORKSPACE=/github/workspace \
--env GITHUB_ACTION \
--env GITHUB_EVENT_PATH \
--env RUNNER_OS \
--env RUNNER_TOOL_CACHE \
--env RUNNER_TEMP \
--env RUNNER_WORKSPACE \
--env GIT_PRIVATE_TOKEN="${gitPrivateToken}" \
${ImageEnvironmentFactory.getEnvVarString(parameters)} \
${baseOsSpecificArguments} \
${image}`;

View File

@ -0,0 +1,70 @@
import BuildParameters from './build-parameters';
import { ReadLicense } from './input-readers/test-license-reader';
class Parameter {
public name;
public value;
}
class ImageEnvironmentFactory {
public static getEnvVarString(parameters) {
const environmentVariables = ImageEnvironmentFactory.getEnvironmentVariables(parameters);
let string = '';
for (const p of environmentVariables) {
if (p.value === '' || p.value === undefined) {
continue;
}
if (p.value.toString().includes(`\n`)) {
string += `--env ${p.name} `;
continue;
}
string += `--env ${p.name}="${p.value}" `;
}
return string;
}
public static getEnvironmentVariables(parameters: BuildParameters) {
const environmentVariables: Parameter[] = [
{ name: 'UNITY_LICENSE', value: process.env.UNITY_LICENSE || ReadLicense() },
{ name: 'UNITY_LICENSE_FILE', value: process.env.UNITY_LICENSE_FILE },
{ name: 'UNITY_EMAIL', value: process.env.UNITY_EMAIL },
{ name: 'UNITY_PASSWORD', value: process.env.UNITY_PASSWORD },
{ name: 'UNITY_SERIAL', value: parameters.unitySerial },
{ name: 'UNITY_VERSION', value: parameters.version },
{ name: 'USYM_UPLOAD_AUTH_TOKEN', value: process.env.USYM_UPLOAD_AUTH_TOKEN },
{ name: 'PROJECT_PATH', value: parameters.projectPath },
{ name: 'BUILD_TARGET', value: parameters.platform },
{ name: 'BUILD_NAME', value: parameters.buildName },
{ name: 'BUILD_PATH', value: parameters.buildPath },
{ name: 'BUILD_FILE', value: parameters.buildFile },
{ name: 'BUILD_METHOD', value: parameters.buildMethod },
{ name: 'VERSION', value: parameters.buildVersion },
{ name: 'ANDROID_VERSION_CODE', value: parameters.androidVersionCode },
{ name: 'ANDROID_KEYSTORE_NAME', value: parameters.androidKeystoreName },
{ name: 'ANDROID_KEYSTORE_BASE64', value: parameters.androidKeystoreBase64 },
{ name: 'ANDROID_KEYSTORE_PASS', value: parameters.androidKeystorePass },
{ name: 'ANDROID_KEYALIAS_NAME', value: parameters.androidKeyaliasName },
{ name: 'ANDROID_KEYALIAS_PASS', value: parameters.androidKeyaliasPass },
{ name: 'CUSTOM_PARAMETERS', value: parameters.customParameters },
{ name: 'CHOWN_FILES_TO', value: parameters.chownFilesTo },
{ name: 'GITHUB_REF', value: process.env.GITHUB_REF },
{ name: 'GITHUB_SHA', value: process.env.GITHUB_SHA },
{ name: 'GITHUB_REPOSITORY', value: process.env.GITHUB_REPOSITORY },
{ name: 'GITHUB_ACTOR', value: process.env.GITHUB_ACTOR },
{ name: 'GITHUB_WORKFLOW', value: process.env.GITHUB_WORKFLOW },
{ name: 'GITHUB_HEAD_REF', value: process.env.GITHUB_HEAD_REF },
{ name: 'GITHUB_BASE_REF', value: process.env.GITHUB_BASE_REF },
{ name: 'GITHUB_EVENT_NAME', value: process.env.GITHUB_EVENT_NAME },
{ name: 'GITHUB_WORKSPACE', value: '/github/workspace' },
{ name: 'GITHUB_ACTION', value: process.env.GITHUB_ACTION },
{ name: 'GITHUB_EVENT_PATH', value: process.env.GITHUB_EVENT_PATH },
{ name: 'RUNNER_OS', value: process.env.RUNNER_OS },
{ name: 'RUNNER_TOOL_CACHE', value: process.env.RUNNER_TOOL_CACHE },
{ name: 'RUNNER_TEMP', value: process.env.RUNNER_TEMP },
{ name: 'RUNNER_WORKSPACE', value: process.env.RUNNER_WORKSPACE },
];
if (parameters.sshAgent) environmentVariables.push({ name: 'SSH_AUTH_SOCK', value: '/ssh-agent' });
return environmentVariables;
}
}
export default ImageEnvironmentFactory;

View File

@ -9,8 +9,7 @@ import Platform from './platform';
import Project from './project';
import Unity from './unity';
import Versioning from './versioning';
import Kubernetes from './kubernetes';
import RemoteBuilder from './remote-builder/remote-builder';
import CloudRunner from './cloud-runner/cloud-runner';
export {
Action,
@ -24,6 +23,5 @@ export {
Project,
Unity,
Versioning,
Kubernetes,
RemoteBuilder,
CloudRunner as CloudRunner,
};

View File

@ -0,0 +1,17 @@
import fs from 'fs';
import path from 'path';
import YAML from 'yaml';
export class ActionYamlReader {
private actionYamlParsed: any;
public constructor() {
let filename = `action.yml`;
if (!fs.existsSync(filename)) {
filename = path.join(__dirname, `..`, filename);
}
this.actionYamlParsed = YAML.parse(fs.readFileSync(filename).toString());
}
public GetActionYamlValue(key: string) {
return this.actionYamlParsed.inputs[key]?.description || 'No description found in action.yml';
}
}

View File

@ -0,0 +1,8 @@
import { GitRepoReader } from './git-repo';
describe(`git repo tests`, () => {
it(`Branch value parsed from CLI to not contain illegal characters`, async () => {
expect(await GitRepoReader.GetBranch()).not.toContain(`\n`);
expect(await GitRepoReader.GetBranch()).not.toContain(` `);
});
});

View File

@ -0,0 +1,20 @@
import { assert } from 'console';
import System from '../system';
import fs from 'fs';
import { CloudRunnerSystem } from '../cli/remote-client/remote-client-services/cloud-runner-system';
export class GitRepoReader {
static GetSha() {
return '';
}
public static async GetRemote() {
return (await CloudRunnerSystem.Run(`git remote -v`))
.split(' ')[1]
.split('https://github.com/')[1]
.split('.git')[0];
}
public static async GetBranch() {
assert(fs.existsSync(`.git`));
return (await System.run(`git branch`, [], {}, false)).split('*')[1].split(`\n`)[0].replace(/ /g, ``);
}
}

View File

@ -0,0 +1,9 @@
import { GithubCliReader } from './github-cli';
import * as core from '@actions/core';
describe(`github cli`, () => {
it(`returns`, async () => {
const token = await GithubCliReader.GetGitHubAuthToken();
core.info(token);
});
});

View File

@ -0,0 +1,20 @@
import { CloudRunnerSystem } from '../cli/remote-client/remote-client-services/cloud-runner-system';
import * as core from '@actions/core';
export class GithubCliReader {
static async GetGitHubAuthToken() {
try {
const authStatus = await CloudRunnerSystem.Run(`gh auth status`, true);
if (authStatus.includes('You are not logged') || authStatus === '') {
return '';
}
return (await CloudRunnerSystem.Run(`gh auth status -t`))
.split(`Token: `)[1]
.replace(/ /g, '')
.replace(/\n/g, '');
} catch (error: any) {
core.info(error || 'Failed to get github auth token from gh cli');
return '';
}
}
}

View File

@ -0,0 +1,8 @@
import path from 'path';
import fs from 'fs';
import YAML from 'yaml';
export function ReadLicense() {
const pipelineFile = path.join(__dirname, `.github`, `workflows`, `cloud-runner-k8s-pipeline.yml`);
return fs.existsSync(pipelineFile) ? YAML.parse(fs.readFileSync(pipelineFile, 'utf8')).env.UNITY_LICENSE : '';
}

View File

@ -48,7 +48,7 @@ describe('Input', () => {
describe('projectPath', () => {
it('returns the default value', () => {
expect(Input.projectPath).toStrictEqual('.');
expect(Input.projectPath).toStrictEqual('test-project');
});
it('takes input from the users workflow', () => {

View File

@ -1,3 +1,7 @@
import fs from 'fs';
import path from 'path';
import { GitRepoReader } from './input-readers/git-repo';
import { GithubCliReader } from './input-readers/github-cli';
import Platform from './platform';
const core = require('@actions/core');
@ -8,71 +12,134 @@ const core = require('@actions/core');
* Note that input is always passed as a string, even booleans.
*/
class Input {
public static cliOptions;
public static githubInputEnabled: boolean = true;
// also enabled debug logging for cloud runner
static get cloudRunnerTests(): boolean {
return Input.getInput(`cloudRunnerTests`) || Input.getInput(`CloudRunnerTests`) || false;
}
private static getInput(query) {
const coreInput = core.getInput(query);
if (Input.githubInputEnabled && coreInput && coreInput !== '') {
return coreInput;
}
return Input.cliOptions !== undefined && Input.cliOptions[query] !== undefined
? Input.cliOptions[query]
: process.env[query] !== undefined
? process.env[query]
: process.env[Input.ToEnvVarFormat(query)]
? process.env[Input.ToEnvVarFormat(query)]
: '';
}
static get region(): string {
return Input.getInput('region') || 'eu-west-2';
}
static async githubRepo() {
return (
Input.getInput('GITHUB_REPOSITORY') ||
Input.getInput('GITHUB_REPO') ||
(await GitRepoReader.GetRemote()) ||
'game-ci/unity-builder'
);
}
static async branch() {
if (await GitRepoReader.GetBranch()) {
return await GitRepoReader.GetBranch();
} else if (Input.getInput(`GITHUB_REF`)) {
return Input.getInput(`GITHUB_REF`).replace('refs/', '').replace(`head/`, '');
} else if (Input.getInput('branch')) {
return Input.getInput('branch');
} else {
return 'main';
}
}
static get gitSha() {
if (Input.getInput(`GITHUB_SHA`)) {
return Input.getInput(`GITHUB_SHA`);
} else if (Input.getInput(`GitSHA`)) {
return Input.getInput(`GitSHA`);
} else if (GitRepoReader.GetSha()) {
return GitRepoReader.GetSha();
}
}
static get runNumber() {
return Input.getInput('GITHUB_RUN_NUMBER') || '0';
}
static get unityVersion() {
return core.getInput('unityVersion') || 'auto';
return Input.getInput('unityVersion') || 'auto';
}
static get customImage() {
return core.getInput('customImage');
return Input.getInput('customImage');
}
static get targetPlatform() {
return core.getInput('targetPlatform') || Platform.default;
return Input.getInput('targetPlatform') || Platform.default;
}
static get projectPath() {
const rawProjectPath = core.getInput('projectPath') || '.';
const input = Input.getInput('projectPath');
const rawProjectPath = input
? input
: fs.existsSync(path.join('test-project', 'ProjectSettings', 'ProjectVersion.txt')) &&
!fs.existsSync(path.join('ProjectSettings', 'ProjectVersion.txt'))
? 'test-project'
: '.';
return rawProjectPath.replace(/\/$/, '');
}
static get buildName() {
return core.getInput('buildName') || this.targetPlatform;
return Input.getInput('buildName') || this.targetPlatform;
}
static get buildsPath() {
return core.getInput('buildsPath') || 'build';
return Input.getInput('buildsPath') || 'build';
}
static get buildMethod() {
return core.getInput('buildMethod'); // processed in docker file
return Input.getInput('buildMethod') || ''; // processed in docker file
}
static get versioningStrategy() {
return core.getInput('versioning') || 'Semantic';
return Input.getInput('versioning') || 'Semantic';
}
static get specifiedVersion() {
return core.getInput('version') || '';
return Input.getInput('version') || '';
}
static get androidVersionCode() {
return core.getInput('androidVersionCode');
return Input.getInput('androidVersionCode');
}
static get androidAppBundle() {
const input = core.getInput('androidAppBundle') || false;
const input = Input.getInput('androidAppBundle') || false;
return input === 'true';
}
static get androidKeystoreName() {
return core.getInput('androidKeystoreName') || '';
return Input.getInput('androidKeystoreName') || '';
}
static get androidKeystoreBase64() {
return core.getInput('androidKeystoreBase64') || '';
return Input.getInput('androidKeystoreBase64') || '';
}
static get androidKeystorePass() {
return core.getInput('androidKeystorePass') || '';
return Input.getInput('androidKeystorePass') || '';
}
static get androidKeyaliasName() {
return core.getInput('androidKeyaliasName') || '';
return Input.getInput('androidKeyaliasName') || '';
}
static get androidKeyaliasPass() {
return core.getInput('androidKeyaliasPass') || '';
return Input.getInput('androidKeyaliasPass') || '';
}
static get androidTargetSdkVersion() {
@ -80,57 +147,77 @@ class Input {
}
static get allowDirtyBuild() {
const input = core.getInput('allowDirtyBuild') || false;
const input = Input.getInput('allowDirtyBuild') || false;
return input === 'true';
}
static get customParameters() {
return core.getInput('customParameters') || '';
return Input.getInput('customParameters') || '';
}
static get sshAgent() {
return core.getInput('sshAgent') || '';
return Input.getInput('sshAgent') || '';
}
static get gitPrivateToken() {
return core.getInput('gitPrivateToken') || '';
static async githubToken() {
return Input.getInput('githubToken') || (await GithubCliReader.GetGitHubAuthToken()) || '';
}
static async gitPrivateToken() {
return core.getInput('gitPrivateToken') || (await Input.githubToken());
}
static get chownFilesTo() {
return core.getInput('chownFilesTo') || '';
return Input.getInput('chownFilesTo') || '';
}
static get remoteBuildCluster() {
return core.getInput('remoteBuildCluster') || '';
static get postBuildSteps() {
return Input.getInput('postBuildSteps') || '';
}
static get awsStackName() {
return core.getInput('awsStackName') || '';
static get preBuildSteps() {
return Input.getInput('preBuildSteps') || '';
}
static get customJob() {
return Input.getInput('customJob') || '';
}
static get cloudRunnerCluster() {
return Input.getInput('cloudRunnerCluster') || '';
}
static get awsBaseStackName() {
return Input.getInput('awsBaseStackName') || 'game-ci';
}
static get kubeConfig() {
return core.getInput('kubeConfig') || '';
return Input.getInput('kubeConfig') || '';
}
static get githubToken() {
return core.getInput('githubToken') || '';
static get cloudRunnerMemory() {
return Input.getInput('cloudRunnerMemory') || '750M';
}
static get remoteBuildMemory() {
return core.getInput('remoteBuildMemory') || '800M';
}
static get remoteBuildCpu() {
return core.getInput('remoteBuildCpu') || '0.25';
static get cloudRunnerCpu() {
return Input.getInput('cloudRunnerCpu') || '1.0';
}
static get kubeVolumeSize() {
return core.getInput('kubeVolumeSize') || '5Gi';
return Input.getInput('kubeVolumeSize') || '5Gi';
}
static get kubeVolume() {
return core.getInput('kubeVolume') || '';
return Input.getInput('kubeVolume') || '';
}
public static ToEnvVarFormat(input: string) {
return input
.replace(/([A-Z])/g, ' $1')
.trim()
.toUpperCase()
.replace(/ /g, '_');
}
}

View File

@ -1,354 +0,0 @@
// @ts-ignore
import { Client, KubeConfig } from 'kubernetes-client';
import Request from 'kubernetes-client/backends/request';
const core = require('@actions/core');
const base64 = require('base-64');
const pollInterval = 10000;
class Kubernetes {
private static kubeClient: any;
private static buildId: string;
private static buildParameters: any;
private static baseImage: any;
private static pvcName: string;
private static secretName: string;
private static jobName: string;
private static namespace: string;
static async runBuildJob(buildParameters, baseImage) {
const kubeconfig = new KubeConfig();
kubeconfig.loadFromString(base64.decode(buildParameters.kubeConfig));
const backend = new Request({ kubeconfig });
const kubeClient = new Client(backend);
await kubeClient.loadSpec();
const buildId = Kubernetes.uuidv4();
const pvcName = `unity-builder-pvc-${buildId}`;
const secretName = `build-credentials-${buildId}`;
const jobName = `unity-builder-job-${buildId}`;
const namespace = 'default';
this.kubeClient = kubeClient;
this.buildId = buildId;
this.buildParameters = buildParameters;
this.baseImage = baseImage;
this.pvcName = pvcName;
this.secretName = secretName;
this.jobName = jobName;
this.namespace = namespace;
await Kubernetes.createSecret();
await Kubernetes.createPersistentVolumeClaim();
await Kubernetes.scheduleBuildJob();
await Kubernetes.watchBuildJobUntilFinished();
await Kubernetes.cleanup();
core.setOutput('volume', pvcName);
}
static async createSecret() {
const secretManifest = {
apiVersion: 'v1',
kind: 'Secret',
metadata: {
name: this.secretName,
},
type: 'Opaque',
data: {
GITHUB_TOKEN: base64.encode(this.buildParameters.githubToken),
UNITY_LICENSE: base64.encode(process.env.UNITY_LICENSE),
ANDROID_KEYSTORE_BASE64: base64.encode(this.buildParameters.androidKeystoreBase64),
ANDROID_KEYSTORE_PASS: base64.encode(this.buildParameters.androidKeystorePass),
ANDROID_KEYALIAS_PASS: base64.encode(this.buildParameters.androidKeyaliasPass),
},
};
await this.kubeClient.api.v1.namespaces(this.namespace).secrets.post({ body: secretManifest });
}
static async createPersistentVolumeClaim() {
if (this.buildParameters.kubeVolume) {
core.info(this.buildParameters.kubeVolume);
this.pvcName = this.buildParameters.kubeVolume;
return;
}
const pvcManifest = {
apiVersion: 'v1',
kind: 'PersistentVolumeClaim',
metadata: {
name: this.pvcName,
},
spec: {
accessModes: ['ReadWriteOnce'],
volumeMode: 'Filesystem',
resources: {
requests: {
storage: this.buildParameters.kubeVolumeSize,
},
},
},
};
await this.kubeClient.api.v1.namespaces(this.namespace).persistentvolumeclaims.post({ body: pvcManifest });
core.info('Persistent Volume created, waiting for ready state...');
await Kubernetes.watchPersistentVolumeClaimUntilReady();
core.info('Persistent Volume ready for claims');
}
static async watchPersistentVolumeClaimUntilReady() {
await new Promise((resolve) => setTimeout(resolve, pollInterval));
const queryResult = await this.kubeClient.api.v1
.namespaces(this.namespace)
.persistentvolumeclaims(this.pvcName)
.get();
if (queryResult.body.status.phase === 'Pending') {
await Kubernetes.watchPersistentVolumeClaimUntilReady();
}
}
static async scheduleBuildJob() {
core.info('Creating build job');
const jobManifest = {
apiVersion: 'batch/v1',
kind: 'Job',
metadata: {
name: this.jobName,
labels: {
app: 'unity-builder',
},
},
spec: {
template: {
backoffLimit: 1,
spec: {
volumes: [
{
name: 'data',
persistentVolumeClaim: {
claimName: this.pvcName,
},
},
{
name: 'credentials',
secret: {
secretName: this.secretName,
},
},
],
initContainers: [
{
name: 'clone',
image: 'alpine/git',
command: [
'/bin/sh',
'-c',
`apk update;
apk add git-lfs;
export GITHUB_TOKEN=$(cat /credentials/GITHUB_TOKEN);
cd /data;
git clone https://github.com/${process.env.GITHUB_REPOSITORY}.git repo;
git clone https://github.com/webbertakken/unity-builder.git builder;
cd repo;
git checkout $GITHUB_SHA;
ls`,
],
volumeMounts: [
{
name: 'data',
mountPath: '/data',
},
{
name: 'credentials',
mountPath: '/credentials',
readOnly: true,
},
],
env: [
{
name: 'GITHUB_SHA',
value: this.buildId,
},
],
},
],
containers: [
{
name: 'main',
image: `${this.baseImage.toString()}`,
command: [
'bin/bash',
'-c',
`for f in ./credentials/*; do export $(basename $f)="$(cat $f)"; done
cp -r /data/builder/action/default-build-script /UnityBuilderAction
cp -r /data/builder/action/entrypoint.sh /entrypoint.sh
cp -r /data/builder/action/steps /steps
chmod -R +x /entrypoint.sh;
chmod -R +x /steps;
/entrypoint.sh;
`,
],
resources: {
requests: {
memory: this.buildParameters.kubeContainerMemory,
cpu: this.buildParameters.kubeContainerCPU,
},
},
env: [
{
name: 'GITHUB_WORKSPACE',
value: '/data/repo',
},
{
name: 'PROJECT_PATH',
value: this.buildParameters.projectPath,
},
{
name: 'BUILD_PATH',
value: this.buildParameters.buildPath,
},
{
name: 'BUILD_FILE',
value: this.buildParameters.buildFile,
},
{
name: 'BUILD_NAME',
value: this.buildParameters.buildName,
},
{
name: 'BUILD_METHOD',
value: this.buildParameters.buildMethod,
},
{
name: 'CUSTOM_PARAMETERS',
value: this.buildParameters.customParameters,
},
{
name: 'CHOWN_FILES_TO',
value: this.buildParameters.chownFilesTo,
},
{
name: 'BUILD_TARGET',
value: this.buildParameters.platform,
},
{
name: 'ANDROID_VERSION_CODE',
value: this.buildParameters.androidVersionCode.toString(),
},
{
name: 'ANDROID_KEYSTORE_NAME',
value: this.buildParameters.androidKeystoreName,
},
{
name: 'ANDROID_KEYALIAS_NAME',
value: this.buildParameters.androidKeyaliasName,
},
],
volumeMounts: [
{
name: 'data',
mountPath: '/data',
},
{
name: 'credentials',
mountPath: '/credentials',
readOnly: true,
},
],
lifeCycle: {
preStop: {
exec: {
command: [
'bin/bash',
'-c',
`cd /data/builder/action/steps;
chmod +x /return_license.sh;
/return_license.sh;`,
],
},
},
},
},
],
restartPolicy: 'Never',
},
},
},
};
await this.kubeClient.apis.batch.v1.namespaces(this.namespace).jobs.post({ body: jobManifest });
core.info('Job created');
}
static async watchBuildJobUntilFinished() {
let podname;
let ready = false;
while (!ready) {
await new Promise((resolve) => setTimeout(resolve, pollInterval));
const pods = await this.kubeClient.api.v1.namespaces(this.namespace).pods.get();
for (let index = 0; index < pods.body.items.length; index++) {
const element = pods.body.items[index];
if (element.metadata.labels['job-name'] === this.jobName && element.status.phase !== 'Pending') {
core.info('Pod no longer pending');
if (element.status.phase === 'Failure') {
core.error('Kubernetes job failed');
} else {
ready = true;
podname = element.metadata.name;
}
}
}
}
core.info(`Watching build job ${podname}`);
let logQueryTime;
let complete = false;
while (!complete) {
await new Promise((resolve) => setTimeout(resolve, pollInterval));
const podStatus = await this.kubeClient.api.v1.namespaces(this.namespace).pod(podname).get();
if (podStatus.body.status.phase !== 'Running') {
complete = true;
}
const logs = await this.kubeClient.api.v1
.namespaces(this.namespace)
.pod(podname)
.log.get({
qs: {
sinceTime: logQueryTime,
timestamps: true,
},
});
if (logs.body !== undefined) {
const arrayOfLines = logs.body.match(/[^\n\r]+/g).reverse();
for (const element of arrayOfLines) {
const [time, ...line] = element.split(' ');
if (time !== logQueryTime) {
core.info(line.join(' '));
} else {
break;
}
}
if (podStatus.body.status.phase === 'Failed') {
throw new Error('Kubernetes job failed');
}
logQueryTime = arrayOfLines[0].split(' ')[0];
}
}
}
static async cleanup() {
await this.kubeClient.apis.batch.v1.namespaces(this.namespace).jobs(this.jobName).delete();
await this.kubeClient.api.v1.namespaces(this.namespace).secrets(this.secretName).delete();
}
static uuidv4() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => {
const r = Math.trunc(Math.random() * 16);
const v = c === 'x' ? r : (r & 0x3) | 0x8;
return v.toString(16);
});
}
}
export default Kubernetes;

View File

@ -1,253 +0,0 @@
import * as SDK from 'aws-sdk';
import { customAlphabet } from 'nanoid';
import RemoteBuilderSecret from './remote-builder-secret';
import RemoteBuilderEnvironmentVariable from './remote-builder-environment-variable';
import * as fs from 'fs';
import * as core from '@actions/core';
import RemoteBuilderTaskDef from './remote-builder-task-def';
import RemoteBuilderConstants from './remote-builder-constants';
import AWSBuildRunner from './aws-build-runner';
class AWSBuildEnvironment {
static async runBuild(
buildId: string,
stackName: string,
image: string,
commands: string[],
mountdir: string,
workingdir: string,
environment: RemoteBuilderEnvironmentVariable[],
secrets: RemoteBuilderSecret[],
) {
const ECS = new SDK.ECS();
const CF = new SDK.CloudFormation();
const entrypoint = ['/bin/sh'];
const taskDef = await this.setupCloudFormations(
CF,
buildId,
stackName,
image,
entrypoint,
commands,
mountdir,
workingdir,
secrets,
);
try {
await AWSBuildRunner.runTask(taskDef, ECS, CF, environment, buildId);
} finally {
await this.cleanupResources(CF, taskDef);
}
}
// static async setupPlatformResources() {
// throw new Error('Method not implemented.');
// }
static getParameterTemplate(p1) {
return `
${p1}:
Type: String
Default: ''
`;
}
static getSecretTemplate(p1) {
return `
${p1}Secret:
Type: AWS::SecretsManager::Secret
Properties:
Name: !Join [ "", [ '${p1}', !Ref BUILDID ] ]
SecretString: !Ref ${p1}
`;
}
static getSecretDefinitionTemplate(p1, p2) {
return `
- Name: '${p1}'
ValueFrom: !Ref ${p2}Secret
`;
}
static insertAtTemplate(template, insertionKey, insertion) {
const index = template.search(insertionKey) + insertionKey.length + '\n'.length;
template = [template.slice(0, index), insertion, template.slice(index)].join('');
return template;
}
static async setupCloudFormations(
CF: SDK.CloudFormation,
buildUid: string,
stackName: string,
image: string,
entrypoint: string[],
commands: string[],
mountdir: string,
workingdir: string,
secrets: RemoteBuilderSecret[],
): Promise<RemoteBuilderTaskDef> {
const logid = customAlphabet(RemoteBuilderConstants.alphabet, 9)();
commands[1] += `
echo "${logid}"
`;
const taskDefStackName = `${stackName}-${buildUid}`;
let taskDefCloudFormation = this.readTaskCloudFormationTemplate();
const cleanupTaskDefStackName = `${taskDefStackName}-cleanup`;
const cleanupCloudFormation = fs.readFileSync(`${__dirname}/cloud-formations/cloudformation-stack-ttl.yml`, 'utf8');
try {
for (const secret of secrets) {
taskDefCloudFormation = this.insertAtTemplate(
taskDefCloudFormation,
'p1 - input',
this.getParameterTemplate(secret.ParameterKey.replace(/[^\dA-Za-z]/g, '')),
);
taskDefCloudFormation = this.insertAtTemplate(
taskDefCloudFormation,
'p2 - secret',
this.getSecretTemplate(secret.ParameterKey.replace(/[^\dA-Za-z]/g, '')),
);
taskDefCloudFormation = this.insertAtTemplate(
taskDefCloudFormation,
'p3 - container def',
this.getSecretDefinitionTemplate(secret.EnvironmentVariable, secret.ParameterKey.replace(/[^\dA-Za-z]/g, '')),
);
}
const mappedSecrets = secrets.map((x) => {
return { ParameterKey: x.ParameterKey.replace(/[^\dA-Za-z]/g, ''), ParameterValue: x.ParameterValue };
});
await CF.createStack({
StackName: taskDefStackName,
TemplateBody: taskDefCloudFormation,
Parameters: [
{
ParameterKey: 'ImageUrl',
ParameterValue: image,
},
{
ParameterKey: 'ServiceName',
ParameterValue: taskDefStackName,
},
{
ParameterKey: 'Command',
ParameterValue: commands.join(','),
},
{
ParameterKey: 'EntryPoint',
ParameterValue: entrypoint.join(','),
},
{
ParameterKey: 'WorkingDirectory',
ParameterValue: workingdir,
},
{
ParameterKey: 'EFSMountDirectory',
ParameterValue: mountdir,
},
{
ParameterKey: 'BUILDID',
ParameterValue: buildUid,
},
...mappedSecrets,
],
}).promise();
core.info('Creating worker cluster...');
await CF.createStack({
StackName: cleanupTaskDefStackName,
TemplateBody: cleanupCloudFormation,
Capabilities: ['CAPABILITY_IAM'],
Parameters: [
{
ParameterKey: 'StackName',
ParameterValue: taskDefStackName,
},
{
ParameterKey: 'DeleteStackName',
ParameterValue: cleanupTaskDefStackName,
},
{
ParameterKey: 'TTL',
ParameterValue: '100',
},
{
ParameterKey: 'BUILDID',
ParameterValue: buildUid,
},
],
}).promise();
core.info('Creating cleanup cluster...');
await CF.waitFor('stackCreateComplete', { StackName: taskDefStackName }).promise();
} catch (error) {
await AWSBuildEnvironment.handleStackCreationFailure(error, CF, taskDefStackName, taskDefCloudFormation, secrets);
throw error;
}
const taskDefResources = (
await CF.describeStackResources({
StackName: taskDefStackName,
}).promise()
).StackResources;
const baseResources = (await CF.describeStackResources({ StackName: stackName }).promise()).StackResources;
// in the future we should offer a parameter to choose if you want the guarnteed shutdown.
core.info('Worker cluster created successfully (skipping wait for cleanup cluster to be ready)');
return {
taskDefStackName,
taskDefCloudFormation,
taskDefStackNameTTL: cleanupTaskDefStackName,
ttlCloudFormation: cleanupCloudFormation,
taskDefResources,
baseResources,
logid,
};
}
private static async handleStackCreationFailure(
error: any,
CF: SDK.CloudFormation,
taskDefStackName: string,
taskDefCloudFormation: string,
secrets: RemoteBuilderSecret[],
) {
core.info(JSON.stringify(secrets, undefined, 4));
core.info(taskDefCloudFormation);
const events = (await CF.describeStackEvents({ StackName: taskDefStackName }).promise()).StackEvents;
const resources = (await CF.describeStackResources({ StackName: taskDefStackName }).promise()).StackResources;
core.info(JSON.stringify(events, undefined, 4));
core.info(JSON.stringify(resources, undefined, 4));
core.error(error);
}
static readTaskCloudFormationTemplate(): string {
return fs.readFileSync(`${__dirname}/cloud-formations/task-def-formation.yml`, 'utf8');
}
static async cleanupResources(CF: SDK.CloudFormation, taskDef: RemoteBuilderTaskDef) {
core.info('Cleanup starting');
await CF.deleteStack({
StackName: taskDef.taskDefStackName,
}).promise();
await CF.deleteStack({
StackName: taskDef.taskDefStackNameTTL,
}).promise();
await CF.waitFor('stackDeleteComplete', {
StackName: taskDef.taskDefStackName,
}).promise();
// Currently too slow and causes too much waiting
await CF.waitFor('stackDeleteComplete', {
StackName: taskDef.taskDefStackNameTTL,
}).promise();
core.info('Cleanup complete');
}
}
export default AWSBuildEnvironment;

View File

@ -1,165 +0,0 @@
import * as AWS from 'aws-sdk';
import RemoteBuilderEnvironmentVariable from './remote-builder-environment-variable';
import * as core from '@actions/core';
import RemoteBuilderTaskDef from './remote-builder-task-def';
import * as zlib from 'zlib';
class AWSBuildRunner {
static async runTask(
taskDef: RemoteBuilderTaskDef,
ECS: AWS.ECS,
CF: AWS.CloudFormation,
environment: RemoteBuilderEnvironmentVariable[],
buildUid: string,
) {
const cluster = taskDef.baseResources?.find((x) => x.LogicalResourceId === 'ECSCluster')?.PhysicalResourceId || '';
const taskDefinition =
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'TaskDefinition')?.PhysicalResourceId || '';
const SubnetOne =
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'PublicSubnetOne')?.PhysicalResourceId || '';
const SubnetTwo =
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'PublicSubnetTwo')?.PhysicalResourceId || '';
const ContainerSecurityGroup =
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'ContainerSecurityGroup')?.PhysicalResourceId || '';
const streamName =
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'KinesisStream')?.PhysicalResourceId || '';
const task = await ECS.runTask({
cluster,
taskDefinition,
platformVersion: '1.4.0',
overrides: {
containerOverrides: [
{
name: taskDef.taskDefStackName,
environment: [...environment, { name: 'BUILDID', value: buildUid }],
},
],
},
launchType: 'FARGATE',
networkConfiguration: {
awsvpcConfiguration: {
subnets: [SubnetOne, SubnetTwo],
assignPublicIp: 'ENABLED',
securityGroups: [ContainerSecurityGroup],
},
},
}).promise();
core.info('Task is starting on worker cluster');
const taskArn = task.tasks?.[0].taskArn || '';
try {
await ECS.waitFor('tasksRunning', { tasks: [taskArn], cluster }).promise();
} catch (error) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const describeTasks = await ECS.describeTasks({
tasks: [taskArn],
cluster,
}).promise();
core.info(`Task has ended ${describeTasks.tasks?.[0].containers?.[0].lastStatus}`);
core.setFailed(error);
core.error(error);
}
core.info(`Task is running on worker cluster`);
await this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
await ECS.waitFor('tasksStopped', { cluster, tasks: [taskArn] }).promise();
const exitCode = (
await ECS.describeTasks({
tasks: [taskArn],
cluster,
}).promise()
).tasks?.[0].containers?.[0].exitCode;
if (exitCode !== 0) {
core.error(`job failed with exit code ${exitCode}`);
throw new Error(`job failed with exit code ${exitCode}`);
} else {
core.info(`Task has finished successfully`);
}
}
static async streamLogsUntilTaskStops(
ECS: AWS.ECS,
CF: AWS.CloudFormation,
taskDef: RemoteBuilderTaskDef,
clusterName: string,
taskArn: string,
kinesisStreamName: string,
) {
// watching logs
const kinesis = new AWS.Kinesis();
const getTaskData = async () => {
const tasks = await ECS.describeTasks({
cluster: clusterName,
tasks: [taskArn],
}).promise();
return tasks.tasks?.[0];
};
const stream = await kinesis
.describeStream({
StreamName: kinesisStreamName,
})
.promise();
let iterator =
(
await kinesis
.getShardIterator({
ShardIteratorType: 'TRIM_HORIZON',
StreamName: stream.StreamDescription.StreamName,
ShardId: stream.StreamDescription.Shards[0].ShardId,
})
.promise()
).ShardIterator || '';
await CF.waitFor('stackCreateComplete', { StackName: taskDef.taskDefStackNameTTL }).promise();
core.info(`Task status is ${(await getTaskData())?.lastStatus}`);
const logBaseUrl = `https://${AWS.config.region}.console.aws.amazon.com/cloudwatch/home?region=${AWS.config.region}#logsV2:log-groups/log-group/${taskDef.taskDefStackName}`;
core.info(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
let readingLogs = true;
let timestamp: number = 0;
while (readingLogs) {
await new Promise((resolve) => setTimeout(resolve, 1500));
const taskData = await getTaskData();
if (taskData?.lastStatus !== 'RUNNING') {
if (timestamp === 0) {
core.info('Task stopped, streaming end of logs');
timestamp = Date.now();
}
if (timestamp !== 0 && Date.now() - timestamp < 30000) {
core.info('Task status is not RUNNING for 30 seconds, last query for logs');
readingLogs = false;
}
}
const records = await kinesis
.getRecords({
ShardIterator: iterator,
})
.promise();
iterator = records.NextShardIterator || '';
if (records.Records.length > 0 && iterator) {
for (let index = 0; index < records.Records.length; index++) {
const json = JSON.parse(
zlib.gunzipSync(Buffer.from(records.Records[index].Data as string, 'base64')).toString('utf8'),
);
if (json.messageType === 'DATA_MESSAGE') {
for (let logEventsIndex = 0; logEventsIndex < json.logEvents.length; logEventsIndex++) {
if (json.logEvents[logEventsIndex].message.includes(taskDef.logid)) {
core.info('End of task logs');
readingLogs = false;
} else {
core.info(json.logEvents[logEventsIndex].message);
}
}
}
}
}
}
}
}
export default AWSBuildRunner;

View File

@ -1,4 +0,0 @@
class RemoteBuilderConstants {
static alphabet = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz';
}
export default RemoteBuilderConstants;

View File

@ -1,5 +0,0 @@
class RemoteBuilderEnvironmentVariable {
public name!: string;
public value!: string;
}
export default RemoteBuilderEnvironmentVariable;

View File

@ -1,419 +0,0 @@
import { customAlphabet } from 'nanoid';
import AWSBuildPlatform from './aws-build-platform';
import * as core from '@actions/core';
import RemoteBuilderConstants from './remote-builder-constants';
import { BuildParameters } from '..';
const repositoryDirectoryName = 'repo';
const efsDirectoryName = 'data';
const cacheDirectoryName = 'cache';
class RemoteBuilder {
static SteamDeploy: boolean = false;
static async build(buildParameters: BuildParameters, baseImage) {
try {
this.SteamDeploy = process.env.STEAM_DEPLOY !== undefined || false;
const nanoid = customAlphabet(RemoteBuilderConstants.alphabet, 4);
const buildUid = `${process.env.GITHUB_RUN_NUMBER}-${buildParameters.platform
.replace('Standalone', '')
.replace('standalone', '')}-${nanoid()}`;
const defaultBranchName =
process.env.GITHUB_REF?.split('/')
.filter((x) => {
x = x[0].toUpperCase() + x.slice(1);
return x;
})
.join('') || '';
const branchName =
process.env.REMOTE_BUILDER_CACHE !== undefined ? process.env.REMOTE_BUILDER_CACHE : defaultBranchName;
const token: string = buildParameters.githubToken;
const defaultSecretsArray = [
{
ParameterKey: 'GithubToken',
EnvironmentVariable: 'GITHUB_TOKEN',
ParameterValue: token,
},
];
await RemoteBuilder.SetupStep(buildUid, buildParameters, branchName, defaultSecretsArray);
await RemoteBuilder.BuildStep(buildUid, buildParameters, baseImage, defaultSecretsArray);
await RemoteBuilder.CompressionStep(buildUid, buildParameters, branchName, defaultSecretsArray);
await RemoteBuilder.UploadArtifacts(buildUid, buildParameters, branchName, defaultSecretsArray);
if (this.SteamDeploy) {
await RemoteBuilder.DeployToSteam(buildUid, buildParameters, defaultSecretsArray);
}
} catch (error) {
core.setFailed(error);
core.error(error);
}
}
private static async SetupStep(
buildUid: string,
buildParameters: BuildParameters,
branchName: string | undefined,
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
) {
core.info('Starting step 1/4 clone and restore cache)');
await AWSBuildPlatform.runBuild(
buildUid,
buildParameters.awsStackName,
'alpine/git',
[
'-c',
`apk update;
apk add unzip;
apk add git-lfs;
apk add jq;
# Get source repo for project to be built and game-ci repo for utilties
git clone https://${buildParameters.githubToken}@github.com/${
process.env.GITHUB_REPOSITORY
}.git ${buildUid}/${repositoryDirectoryName} -q
git clone https://${buildParameters.githubToken}@github.com/game-ci/unity-builder.git ${buildUid}/builder -q
git clone https://${buildParameters.githubToken}@github.com/game-ci/steam-deploy.git ${buildUid}/steam -q
cd /${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/
git checkout $GITHUB_SHA
cd /${efsDirectoryName}/
# Look for usable cache
if [ ! -d ${cacheDirectoryName} ]; then
mkdir ${cacheDirectoryName}
fi
cd ${cacheDirectoryName}
if [ ! -d "${branchName}" ]; then
mkdir "${branchName}"
fi
cd "${branchName}"
echo ''
echo "Cached Libraries for ${branchName} from previous builds:"
ls
echo ''
ls "/${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/${buildParameters.projectPath}"
libDir="/${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/${buildParameters.projectPath}/Library"
if [ -d "$libDir" ]; then
rm -r "$libDir"
echo "Setup .gitignore to ignore Library folder and remove it from builds"
fi
echo 'Checking cache'
# Restore cache
latest=$(ls -t | head -1)
if [ ! -z "$latest" ]; then
echo "Library cache exists from build $latest from ${branchName}"
echo 'Creating empty Library folder for cache'
mkdir $libDir
unzip -q $latest -d $libDir
# purge cache
${process.env.PURGE_REMOTE_BUILDER_CACHE === undefined ? '#' : ''} rm -r $libDir
else
echo 'Cache does not exist'
fi
# Print out important directories
echo ''
echo 'Repo:'
ls /${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/
echo ''
echo 'Project:'
ls /${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/${buildParameters.projectPath}
echo ''
echo 'Library:'
ls /${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/${buildParameters.projectPath}/Library/
echo ''
`,
],
`/${efsDirectoryName}`,
`/${efsDirectoryName}/`,
[
{
name: 'GITHUB_SHA',
value: process.env.GITHUB_SHA || '',
},
],
defaultSecretsArray,
);
}
private static async BuildStep(
buildUid: string,
buildParameters: BuildParameters,
baseImage: any,
defaultSecretsArray: any[],
) {
const buildSecrets = new Array();
buildSecrets.push(...defaultSecretsArray);
if (process.env.UNITY_LICENSE)
buildSecrets.push({
ParameterKey: 'UnityLicense',
EnvironmentVariable: 'UNITY_LICENSE',
ParameterValue: process.env.UNITY_LICENSE,
});
if (process.env.UNITY_EMAIL)
buildSecrets.push({
ParameterKey: 'UnityEmail',
EnvironmentVariable: 'UNITY_EMAIL',
ParameterValue: process.env.UNITY_EMAIL,
});
if (process.env.UNITY_PASSWORD)
buildSecrets.push({
ParameterKey: 'UnityPassword',
EnvironmentVariable: 'UNITY_PASSWORD',
ParameterValue: process.env.UNITY_PASSWORD,
});
if (process.env.UNITY_SERIAL)
buildSecrets.push({
ParameterKey: 'UnitySerial',
EnvironmentVariable: 'UNITY_SERIAL',
ParameterValue: process.env.UNITY_SERIAL,
});
if (buildParameters.androidKeystoreBase64)
buildSecrets.push({
ParameterKey: 'AndroidKeystoreBase64',
EnvironmentVariable: 'ANDROID_KEYSTORE_BASE64',
ParameterValue: buildParameters.androidKeystoreBase64,
});
if (buildParameters.androidKeystorePass)
buildSecrets.push({
ParameterKey: 'AndroidKeystorePass',
EnvironmentVariable: 'ANDROID_KEYSTORE_PASS',
ParameterValue: buildParameters.androidKeystorePass,
});
if (buildParameters.androidKeyaliasPass)
buildSecrets.push({
ParameterKey: 'AndroidKeyAliasPass',
EnvironmentVariable: 'AWS_ACCESS_KEY_ALIAS_PASS',
ParameterValue: buildParameters.androidKeyaliasPass,
});
core.info('Starting part 2/4 (build unity project)');
await AWSBuildPlatform.runBuild(
buildUid,
buildParameters.awsStackName,
baseImage.toString(),
[
'-c',
`
cp -r /${efsDirectoryName}/${buildUid}/builder/dist/default-build-script/ /UnityBuilderAction;
cp -r /${efsDirectoryName}/${buildUid}/builder/dist/entrypoint.sh /entrypoint.sh;
cp -r /${efsDirectoryName}/${buildUid}/builder/dist/steps/ /steps;
chmod -R +x /entrypoint.sh;
chmod -R +x /steps;
/entrypoint.sh;
`,
],
`/${efsDirectoryName}`,
`/${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/`,
[
{
name: 'ContainerMemory',
value: buildParameters.remoteBuildMemory,
},
{
name: 'ContainerCpu',
value: buildParameters.remoteBuildCpu,
},
{
name: 'GITHUB_WORKSPACE',
value: `/${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/`,
},
{
name: 'PROJECT_PATH',
value: buildParameters.projectPath,
},
{
name: 'BUILD_PATH',
value: buildParameters.buildPath,
},
{
name: 'BUILD_FILE',
value: buildParameters.buildFile,
},
{
name: 'BUILD_NAME',
value: buildParameters.buildName,
},
{
name: 'BUILD_METHOD',
value: buildParameters.buildMethod,
},
{
name: 'CUSTOM_PARAMETERS',
value: buildParameters.customParameters,
},
{
name: 'BUILD_TARGET',
value: buildParameters.platform,
},
{
name: 'ANDROID_VERSION_CODE',
value: buildParameters.androidVersionCode.toString(),
},
{
name: 'ANDROID_KEYSTORE_NAME',
value: buildParameters.androidKeystoreName,
},
{
name: 'ANDROID_KEYALIAS_NAME',
value: buildParameters.androidKeyaliasName,
},
],
buildSecrets,
);
}
private static async CompressionStep(
buildUid: string,
buildParameters: BuildParameters,
branchName: string | undefined,
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
) {
core.info('Starting step 3/4 build compression');
// Cleanup
await AWSBuildPlatform.runBuild(
buildUid,
buildParameters.awsStackName,
'alpine',
[
'-c',
`
apk update
apk add zip
cd Library
zip -r lib-${buildUid}.zip .*
mv lib-${buildUid}.zip /${efsDirectoryName}/${cacheDirectoryName}/${branchName}/lib-${buildUid}.zip
cd ../../
zip -r build-${buildUid}.zip ${buildParameters.buildPath}/*
mv build-${buildUid}.zip /${efsDirectoryName}/${buildUid}/build-${buildUid}.zip
`,
],
`/${efsDirectoryName}`,
`/${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/${buildParameters.projectPath}`,
[
{
name: 'GITHUB_SHA',
value: process.env.GITHUB_SHA || '',
},
],
defaultSecretsArray,
);
core.info('compression step complete');
}
private static async UploadArtifacts(
buildUid: string,
buildParameters: BuildParameters,
branchName: string | undefined,
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
) {
core.info('Starting step 4/4 upload build to s3');
await AWSBuildPlatform.runBuild(
buildUid,
buildParameters.awsStackName,
'amazon/aws-cli',
[
'-c',
`
aws s3 cp ${buildUid}/build-${buildUid}.zip s3://game-ci-storage/
# no need to upload Library cache for now
# aws s3 cp /${efsDirectoryName}/${cacheDirectoryName}/${branchName}/lib-${buildUid}.zip s3://game-ci-storage/
${this.SteamDeploy ? '#' : ''} rm -r ${buildUid}
`,
],
`/${efsDirectoryName}`,
`/${efsDirectoryName}/`,
[
{
name: 'GITHUB_SHA',
value: process.env.GITHUB_SHA || '',
},
{
name: 'AWS_DEFAULT_REGION',
value: process.env.AWS_DEFAULT_REGION || '',
},
],
[
{
ParameterKey: 'AWSAccessKeyID',
EnvironmentVariable: 'AWS_ACCESS_KEY_ID',
ParameterValue: process.env.AWS_ACCESS_KEY_ID || '',
},
{
ParameterKey: 'AWSSecretAccessKey',
EnvironmentVariable: 'AWS_SECRET_ACCESS_KEY',
ParameterValue: process.env.AWS_SECRET_ACCESS_KEY || '',
},
...defaultSecretsArray,
],
);
}
private static async DeployToSteam(
buildUid: string,
buildParameters: BuildParameters,
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
) {
core.info('Starting steam deployment');
await AWSBuildPlatform.runBuild(
buildUid,
buildParameters.awsStackName,
'cm2network/steamcmd:root',
[
'-c',
`
ls
ls /
cp -r /${efsDirectoryName}/${buildUid}/steam/action/entrypoint.sh /entrypoint.sh;
cp -r /${efsDirectoryName}/${buildUid}/steam/action/steps/ /steps;
chmod -R +x /entrypoint.sh;
chmod -R +x /steps;
/entrypoint.sh;
rm -r /${efsDirectoryName}/${buildUid}
`,
],
`/${efsDirectoryName}`,
`/${efsDirectoryName}/${buildUid}/steam/action/`,
[
{
name: 'GITHUB_SHA',
value: process.env.GITHUB_SHA || '',
},
],
[
{
EnvironmentVariable: 'INPUT_APPID',
ParameterKey: 'appId',
ParameterValue: process.env.APP_ID || '',
},
{
EnvironmentVariable: 'INPUT_BUILDDESCRIPTION',
ParameterKey: 'buildDescription',
ParameterValue: process.env.BUILD_DESCRIPTION || '',
},
{
EnvironmentVariable: 'INPUT_ROOTPATH',
ParameterKey: 'rootPath',
ParameterValue: process.env.ROOT_PATH || '',
},
{
EnvironmentVariable: 'INPUT_RELEASEBRANCH',
ParameterKey: 'releaseBranch',
ParameterValue: process.env.RELEASE_BRANCH || '',
},
{
EnvironmentVariable: 'INPUT_LOCALCONTENTSERVER',
ParameterKey: 'localContentServer',
ParameterValue: process.env.LOCAL_CONTENT_SERVER || '',
},
{
EnvironmentVariable: 'INPUT_PREVIEWENABLED',
ParameterKey: 'previewEnabled',
ParameterValue: process.env.PREVIEW_ENABLED || '',
},
...defaultSecretsArray,
],
);
}
}
export default RemoteBuilder;

View File

@ -2,7 +2,7 @@ import * as core from '@actions/core';
import { exec } from '@actions/exec';
class System {
static async run(command, arguments_: any = [], options = {}) {
static async run(command, arguments_: any = [], options = {}, shouldLog = true) {
let result = '';
let error = '';
let debug = '';
@ -20,15 +20,15 @@ class System {
};
const showOutput = () => {
if (debug !== '') {
if (debug !== '' && shouldLog) {
core.debug(debug);
}
if (result !== '') {
if (result !== '' && shouldLog) {
core.info(result);
}
if (error !== '') {
if (error !== '' && shouldLog) {
core.warning(error);
}
};

BIN
test-project/Assets/LFS_Test_File.jpg (Stored with Git LFS) Executable file

Binary file not shown.

View File

@ -1,12 +1,13 @@
{
"compilerOptions": {
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
"strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": false, /* Re-enable after fixing compatibility */ /* Raise error on expressions and declarations with an implied 'any' type. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
"experimentalDecorators": true,
"target": "es6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */,
"outDir": "./lib" /* Redirect output structure to the directory. */,
"rootDir": "./src" /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */,
"strict": true /* Enable all strict type-checking options. */,
"noImplicitAny": false /* Re-enable after fixing compatibility */ /* Raise error on expressions and declarations with an implied 'any' type. */,
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
},
"exclude": ["node_modules", "**/*.test.ts"]
}

528
yarn.lock
View File

@ -344,6 +344,18 @@
exec-sh "^0.3.2"
minimist "^1.2.0"
"@cspotcode/source-map-consumer@0.8.0":
version "0.8.0"
resolved "https://registry.yarnpkg.com/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz#33bf4b7b39c178821606f669bbc447a6a629786b"
integrity sha512-41qniHzTU8yAGbCp04ohlmSrZf8bkf/iJsl3V0dRGsQN/5GFfx+LbCSsCpp2gqrqjTVg/K6O8ycoV35JIwAzAg==
"@cspotcode/source-map-support@0.7.0":
version "0.7.0"
resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz#4789840aa859e46d2f3173727ab707c66bf344f5"
integrity sha512-X4xqRHqN8ACt2aHVe51OxeA2HjbcL4MqFqXkrmQszJ1NOUuUu5u6Vqx/0lZSVNku7velL5FC/s5uEAj1lsBMhA==
dependencies:
"@cspotcode/source-map-consumer" "0.8.0"
"@eslint/eslintrc@^0.4.0":
version "0.4.0"
resolved "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.0.tgz"
@ -546,6 +558,17 @@
"@types/yargs" "^15.0.0"
chalk "^4.0.0"
"@jest/types@^27.4.2":
version "27.4.2"
resolved "https://registry.yarnpkg.com/@jest/types/-/types-27.4.2.tgz#96536ebd34da6392c2b7c7737d693885b5dd44a5"
integrity sha512-j35yw0PMTPpZsUoOBiuHzr1zTYoad1cVIE0ajEjcrJONxxrko/IRGKkXx3os0Nsi4Hu3+5VmDbVfq5WhG/pWAg==
dependencies:
"@types/istanbul-lib-coverage" "^2.0.0"
"@types/istanbul-reports" "^3.0.0"
"@types/node" "*"
"@types/yargs" "^16.0.0"
chalk "^4.0.0"
"@kubernetes/client-node@0.10.2":
version "0.10.2"
resolved "https://registry.npmjs.org/@kubernetes/client-node/-/client-node-0.10.2.tgz"
@ -566,6 +589,34 @@
underscore "^1.9.1"
ws "^6.1.0"
"@kubernetes/client-node@^0.14.3":
version "0.14.3"
resolved "https://registry.yarnpkg.com/@kubernetes/client-node/-/client-node-0.14.3.tgz#5ed9b88873419080547f22cb74eb502bf6671fca"
integrity sha512-9hHGDNm2JEFQcRTpDxVoAVr0fowU+JH/l5atCXY9VXwvFM18pW5wr2LzLP+Q2Rh+uQv7Moz4gEjEKSCgVKykEQ==
dependencies:
"@types/js-yaml" "^3.12.1"
"@types/node" "^10.12.0"
"@types/request" "^2.47.1"
"@types/stream-buffers" "^3.0.3"
"@types/tar" "^4.0.3"
"@types/underscore" "^1.8.9"
"@types/ws" "^6.0.1"
byline "^5.0.0"
execa "1.0.0"
isomorphic-ws "^4.0.1"
js-yaml "^3.13.1"
jsonpath-plus "^0.19.0"
openid-client "^4.1.1"
request "^2.88.0"
rfc4648 "^1.3.0"
shelljs "^0.8.2"
stream-buffers "^3.0.2"
tar "^6.0.2"
tmp-promise "^3.0.2"
tslib "^1.9.3"
underscore "^1.9.1"
ws "^7.3.1"
"@nodelib/fs.scandir@2.1.4":
version "2.1.4"
resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz"
@ -775,6 +826,11 @@
resolved "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz"
integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==
"@sindresorhus/is@^4.0.0":
version "4.0.1"
resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.0.1.tgz#d26729db850fa327b7cacc5522252194404226f5"
integrity sha512-Qm9hBEBu18wt1PO2flE7LPb30BHMQt1eQgbV76YntdNk73XZGpn3izvGTYxbGgzXKgbCjiia0uxTd3aTNQrY/g==
"@sinonjs/commons@^1.7.0":
version "1.8.3"
resolved "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz"
@ -796,6 +852,33 @@
dependencies:
defer-to-connect "^1.0.1"
"@szmarczak/http-timer@^4.0.5":
version "4.0.5"
resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-4.0.5.tgz#bfbd50211e9dfa51ba07da58a14cdfd333205152"
integrity sha512-PyRA9sm1Yayuj5OIoJ1hGt2YISX45w9WcFbh6ddT0Z/0yaFxOtGLInr4jUfU1EAFVs0Yfyfev4RNwBlUaHdlDQ==
dependencies:
defer-to-connect "^2.0.0"
"@tsconfig/node10@^1.0.7":
version "1.0.8"
resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.8.tgz#c1e4e80d6f964fbecb3359c43bd48b40f7cadad9"
integrity sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg==
"@tsconfig/node12@^1.0.7":
version "1.0.9"
resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.9.tgz#62c1f6dee2ebd9aead80dc3afa56810e58e1a04c"
integrity sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw==
"@tsconfig/node14@^1.0.0":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.1.tgz#95f2d167ffb9b8d2068b0b235302fafd4df711f2"
integrity sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg==
"@tsconfig/node16@^1.0.2":
version "1.0.2"
resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.2.tgz#423c77877d0569db20e1fc80885ac4118314010e"
integrity sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA==
"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.7":
version "7.1.14"
resolved "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.14.tgz"
@ -829,15 +912,25 @@
dependencies:
"@babel/types" "^7.3.0"
"@types/cacheable-request@^6.0.1":
version "6.0.1"
resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.1.tgz#5d22f3dded1fd3a84c0bbeb5039a7419c2c91976"
integrity sha512-ykFq2zmBGOCbpIXtoVbz4SKY5QriWPh3AjyU4G74RYbtt5yOc5OfaY75ftjg7mikMOla1CTGpX3lLbuJh8DTrQ==
dependencies:
"@types/http-cache-semantics" "*"
"@types/keyv" "*"
"@types/node" "*"
"@types/responselike" "*"
"@types/caseless@*":
version "0.12.2"
resolved "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz"
integrity sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==
"@types/got@^9.6.9":
version "9.6.11"
resolved "https://registry.npmjs.org/@types/got/-/got-9.6.11.tgz"
integrity sha512-dr3IiDNg5TDesGyuwTrN77E1Cd7DCdmCFtEfSGqr83jMMtcwhf/SGPbN2goY4JUWQfvxwY56+e5tjfi+oXeSdA==
version "9.6.12"
resolved "https://registry.yarnpkg.com/@types/got/-/got-9.6.12.tgz#fd42a6e1f5f64cd6bb422279b08c30bb5a15a56f"
integrity sha512-X4pj/HGHbXVLqTpKjA2ahI4rV/nNBc9mGO2I/0CgAra+F2dKgMXnENv2SRpemScBzBAI4vMelIVYViQxlSE6xA==
dependencies:
"@types/node" "*"
"@types/tough-cookie" "*"
@ -850,6 +943,11 @@
dependencies:
"@types/node" "*"
"@types/http-cache-semantics@*":
version "4.0.0"
resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.0.tgz#9140779736aa2655635ee756e2467d787cfe8a2a"
integrity sha512-c3Xy026kOF7QOTn00hbIllV1dLR9hG9NkSrLQgCVs8NF6sBU+VGWjD3wLPhmh1TYAc7ugCFsvHYMN4VcBN1U1A==
"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1":
version "2.0.3"
resolved "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz"
@ -869,13 +967,13 @@
dependencies:
"@types/istanbul-lib-report" "*"
"@types/jest@^26.0.15":
version "26.0.22"
resolved "https://registry.npmjs.org/@types/jest/-/jest-26.0.22.tgz"
integrity sha512-eeWwWjlqxvBxc4oQdkueW5OF/gtfSceKk4OnOAGlUSwS/liBRtZppbJuz1YkgbrbfGOoeBHun9fOvXnjNwrSOw==
"@types/jest@^27.0.3":
version "27.0.3"
resolved "https://registry.yarnpkg.com/@types/jest/-/jest-27.0.3.tgz#0cf9dfe9009e467f70a342f0f94ead19842a783a"
integrity sha512-cmmwv9t7gBYt7hNKH5Spu7Kuu/DotGa+Ff+JGRKZ4db5eh8PnKS4LuebJ3YLUoyOyIHraTGyULn23YtEAm0VSg==
dependencies:
jest-diff "^26.0.0"
pretty-format "^26.0.0"
jest-diff "^27.0.0"
pretty-format "^27.0.0"
"@types/js-yaml@^3.12.1":
version "3.12.6"
@ -892,6 +990,20 @@
resolved "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz"
integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4=
"@types/keyv@*":
version "3.1.1"
resolved "https://registry.yarnpkg.com/@types/keyv/-/keyv-3.1.1.tgz#e45a45324fca9dab716ab1230ee249c9fb52cfa7"
integrity sha512-MPtoySlAZQ37VoLaPcTHCu1RWJ4llDkULYZIzOYxlhxBqYPB0RsRlmMU0R6tahtFe27mIdkHV+551ZWV4PLmVw==
dependencies:
"@types/node" "*"
"@types/minipass@*":
version "2.2.0"
resolved "https://registry.yarnpkg.com/@types/minipass/-/minipass-2.2.0.tgz#51ad404e8eb1fa961f75ec61205796807b6f9651"
integrity sha512-wuzZksN4w4kyfoOv/dlpov4NOunwutLA/q7uc00xU02ZyUY+aoM5PWIXEKBMnm0NHd4a+N71BMjq+x7+2Af1fg==
dependencies:
"@types/node" "*"
"@types/node@*", "@types/node@>= 8", "@types/node@^14.14.9":
version "14.14.41"
resolved "https://registry.npmjs.org/@types/node/-/node-14.14.41.tgz"
@ -922,6 +1034,13 @@
"@types/tough-cookie" "*"
form-data "^2.5.0"
"@types/responselike@*", "@types/responselike@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.0.tgz#251f4fe7d154d2bad125abe1b429b23afd262e29"
integrity sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA==
dependencies:
"@types/node" "*"
"@types/semver@^7.3.5":
version "7.3.9"
resolved "https://registry.npmjs.org/@types/semver/-/semver-7.3.9.tgz"
@ -932,6 +1051,21 @@
resolved "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.0.tgz"
integrity sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw==
"@types/stream-buffers@^3.0.3":
version "3.0.3"
resolved "https://registry.yarnpkg.com/@types/stream-buffers/-/stream-buffers-3.0.3.tgz#34e565bf64e3e4bdeee23fd4aa58d4636014a02b"
integrity sha512-NeFeX7YfFZDYsCfbuaOmFQ0OjSmHreKBpp7MQ4alWQBHeh2USLsj7qyMyn9t82kjqIX516CR/5SRHnARduRtbQ==
dependencies:
"@types/node" "*"
"@types/tar@^4.0.3":
version "4.0.4"
resolved "https://registry.yarnpkg.com/@types/tar/-/tar-4.0.4.tgz#d680de60855e7778a51c672b755869a3b8d2889f"
integrity sha512-0Xv+xcmkTsOZdIF4yCnd7RkOOyfyqPaqJ7RZFKnwdxfDbkN3eAAE9sHl8zJFqBz4VhxolW9EErbjR1oyH7jK2A==
dependencies:
"@types/minipass" "*"
"@types/node" "*"
"@types/tough-cookie@*":
version "4.0.0"
resolved "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.0.tgz"
@ -961,6 +1095,13 @@
dependencies:
"@types/yargs-parser" "*"
"@types/yargs@^16.0.0":
version "16.0.4"
resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-16.0.4.tgz#26aad98dd2c2a38e421086ea9ad42b9e51642977"
integrity sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==
dependencies:
"@types/yargs-parser" "*"
"@typescript-eslint/eslint-plugin@^4.20.0":
version "4.22.0"
resolved "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.22.0.tgz"
@ -1059,6 +1200,11 @@ acorn-walk@^7.1.1:
resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz"
integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==
acorn-walk@^8.1.1:
version "8.2.0"
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1"
integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==
acorn@^7.1.1, acorn@^7.4.0:
version "7.4.1"
resolved "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz"
@ -1069,7 +1215,12 @@ acorn@^8.1.0:
resolved "https://registry.npmjs.org/acorn/-/acorn-8.1.1.tgz"
integrity sha512-xYiIVjNuqtKXMxlRMDc6mZUhXehod4a3gbZ1qRlM7icK4EbxUFNLhWoPblCvFtB2Y9CIqHP3CF/rdxLItaQv8g==
aggregate-error@^3.0.0:
acorn@^8.4.1:
version "8.7.0"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf"
integrity sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==
aggregate-error@^3.0.0, aggregate-error@^3.1.0:
version "3.1.0"
resolved "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz"
integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==
@ -1109,7 +1260,7 @@ ansi-escapes@^4.2.1, ansi-escapes@^4.3.0:
dependencies:
type-fest "^0.21.3"
ansi-regex@^5.0.0:
ansi-regex@^5.0.0, ansi-regex@^5.0.1:
version "5.0.1"
resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"
integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==
@ -1133,6 +1284,11 @@ ansi-styles@^4.0.0, ansi-styles@^4.1.0:
dependencies:
color-convert "^2.0.1"
ansi-styles@^5.0.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b"
integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==
ansi-styles@^6.0.0:
version "6.1.0"
resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.1.0.tgz"
@ -1154,6 +1310,11 @@ anymatch@^3.0.3:
normalize-path "^3.0.0"
picomatch "^2.0.4"
arg@^4.1.0:
version "4.1.3"
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089"
integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
argparse@^1.0.7:
version "1.0.10"
resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz"
@ -1233,6 +1394,11 @@ async-limiter@~1.0.0:
resolved "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz"
integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==
async-wait-until@^2.0.7:
version "2.0.7"
resolved "https://registry.yarnpkg.com/async-wait-until/-/async-wait-until-2.0.7.tgz#ed4ccfe076850105c1de555381630b9fad882f5d"
integrity sha512-SjHxM2f5ev4o87gYppr8HmWPjOHw06Pg5KZvkSl6FMqa3TTHzDGIWCZx61XWjxO5ArPcZBuJYbAa809FNyx3QQ==
asynckit@^0.4.0:
version "0.4.0"
resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz"
@ -1336,7 +1502,7 @@ babel-preset-jest@^26.6.2:
balanced-match@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz"
integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
base-64@^1.0.0:
@ -1386,7 +1552,7 @@ before-after-hook@^2.2.0:
brace-expansion@^1.1.7:
version "1.1.11"
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz"
integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
dependencies:
balanced-match "^1.0.0"
@ -1450,7 +1616,12 @@ btoa-lite@^1.0.0:
resolved "https://registry.npmjs.org/btoa-lite/-/btoa-lite-1.0.0.tgz"
integrity sha1-M3dm2hWAEhD92VbCLpxokaudAzc=
buffer-from@1.x, buffer-from@^1.0.0:
buffer-from@1.x:
version "1.1.2"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
buffer-from@^1.0.0:
version "1.1.1"
resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz"
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
@ -1464,6 +1635,11 @@ buffer@4.9.2:
ieee754 "^1.1.4"
isarray "^1.0.0"
byline@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/byline/-/byline-5.0.0.tgz#741c5216468eadc457b03410118ad77de8c1ddb1"
integrity sha1-dBxSFkaOrcRXsDQQEYrXfejB3bE=
cache-base@^1.0.1:
version "1.0.1"
resolved "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz"
@ -1479,6 +1655,11 @@ cache-base@^1.0.1:
union-value "^1.0.0"
unset-value "^1.0.0"
cacheable-lookup@^5.0.3:
version "5.0.4"
resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz#5a6b865b2c44357be3d5ebc2a467b032719a7005"
integrity sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==
cacheable-request@^6.0.0:
version "6.1.0"
resolved "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz"
@ -1492,6 +1673,19 @@ cacheable-request@^6.0.0:
normalize-url "^4.1.0"
responselike "^1.0.2"
cacheable-request@^7.0.1:
version "7.0.1"
resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-7.0.1.tgz#062031c2856232782ed694a257fa35da93942a58"
integrity sha512-lt0mJ6YAnsrBErpTMWeu5kl/tg9xMAWjavYTN6VQXM1A/teBITuNcccXsCxF0tDQQJf9DfAaX5O4e0zp0KlfZw==
dependencies:
clone-response "^1.0.2"
get-stream "^5.1.0"
http-cache-semantics "^4.0.0"
keyv "^4.0.0"
lowercase-keys "^2.0.0"
normalize-url "^4.1.0"
responselike "^2.0.0"
call-bind@^1.0.0, call-bind@^1.0.2:
version "1.0.2"
resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz"
@ -1554,6 +1748,11 @@ char-regex@^1.0.2:
resolved "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz"
integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==
chownr@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece"
integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==
ci-info@^2.0.0:
version "2.0.0"
resolved "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz"
@ -1684,6 +1883,18 @@ combined-stream@^1.0.6, combined-stream@~1.0.6:
dependencies:
delayed-stream "~1.0.0"
commander-ts@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/commander-ts/-/commander-ts-0.2.0.tgz#14391337c1c725399cdfca5717da8e4fd0688eda"
integrity sha512-9XaUF3/3nmVtkDmAkijjhgEcwrMwKewaAJtN+GyTJBG3CJ5DfGB/JsXCVZcBW6SZB+QqiJxbK3e2/62tweMO8g==
dependencies:
commander "^7.2.0"
commander@^7.2.0:
version "7.2.0"
resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7"
integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==
commander@^8.3.0:
version "8.3.0"
resolved "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz"
@ -1696,7 +1907,7 @@ component-emitter@^1.2.1:
concat-map@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz"
integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
contains-path@^0.1.0:
@ -1721,6 +1932,18 @@ core-util-is@1.0.2:
resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz"
integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
create-require@^1.1.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333"
integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==
cross-env@^7.0.3:
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-7.0.3.tgz#865264b29677dc015ba8418918965dd232fc54cf"
integrity sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==
dependencies:
cross-spawn "^7.0.1"
cross-spawn@^6.0.0:
version "6.0.5"
resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz"
@ -1732,7 +1955,7 @@ cross-spawn@^6.0.0:
shebang-command "^1.2.0"
which "^1.2.9"
cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3:
cross-spawn@^7.0.0, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3:
version "7.0.3"
resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz"
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
@ -1810,6 +2033,13 @@ decompress-response@^3.3.0:
dependencies:
mimic-response "^1.0.0"
decompress-response@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc"
integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==
dependencies:
mimic-response "^3.1.0"
dedent@^0.7.0:
version "0.7.0"
resolved "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz"
@ -1830,6 +2060,11 @@ defer-to-connect@^1.0.1:
resolved "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz"
integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==
defer-to-connect@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587"
integrity sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==
define-properties@^1.1.3:
version "1.1.3"
resolved "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz"
@ -1884,6 +2119,16 @@ diff-sequences@^26.6.2:
resolved "https://registry.npmjs.org/diff-sequences/-/diff-sequences-26.6.2.tgz"
integrity sha512-Mv/TDa3nZ9sbc5soK+OoA74BsS3mL37yixCvUAQkiuA4Wz6YtwP/K47n2rv2ovzHZvoiQeA5FTQOschKkEwB0Q==
diff-sequences@^27.4.0:
version "27.4.0"
resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.4.0.tgz#d783920ad8d06ec718a060d00196dfef25b132a5"
integrity sha512-YqiQzkrsmHMH5uuh8OdQFU9/ZpADnwzml8z0O5HvRNda+5UZsaX/xN+AAxfR2hWq1Y7HZnAzO9J5lJXOuDz2Ww==
diff@^4.0.1:
version "4.0.2"
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
dir-glob@^3.0.1:
version "3.0.1"
resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz"
@ -2258,7 +2503,7 @@ exec-sh@^0.3.2:
resolved "https://registry.npmjs.org/exec-sh/-/exec-sh-0.3.6.tgz"
integrity sha512-nQn+hI3yp+oD0huYhKwvYI32+JFeq+XkNcD1GAo3Y/MjxsfVGmrrzrnzjWiNY6f+pUCP440fThsFh5gZrRAU/w==
execa@^1.0.0:
execa@1.0.0, execa@^1.0.0:
version "1.0.0"
resolved "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz"
integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==
@ -2508,9 +2753,16 @@ fragment-cache@^0.2.1:
dependencies:
map-cache "^0.2.2"
fs-minipass@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==
dependencies:
minipass "^3.0.0"
fs.realpath@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz"
integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
fsevents@^2.1.2:
@ -2520,7 +2772,7 @@ fsevents@^2.1.2:
function-bind@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz"
integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
functional-red-black-tree@^1.0.1:
@ -2591,9 +2843,9 @@ glob-parent@^5.0.0, glob-parent@^5.1.0:
is-glob "^4.0.1"
glob@^7.0.0, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4:
version "7.2.0"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023"
integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==
version "7.1.6"
resolved "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz"
integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==
dependencies:
fs.realpath "^1.0.0"
inflight "^1.0.4"
@ -2633,6 +2885,23 @@ globby@^11.0.1:
merge2 "^1.3.0"
slash "^3.0.0"
got@^11.8.0:
version "11.8.2"
resolved "https://registry.yarnpkg.com/got/-/got-11.8.2.tgz#7abb3959ea28c31f3576f1576c1effce23f33599"
integrity sha512-D0QywKgIe30ODs+fm8wMZiAcZjypcCodPNuMz5H9Mny7RJ+IjJ10BdmGW7OM7fHXP+O7r6ZwapQ/YQmMSvB0UQ==
dependencies:
"@sindresorhus/is" "^4.0.0"
"@szmarczak/http-timer" "^4.0.5"
"@types/cacheable-request" "^6.0.1"
"@types/responselike" "^1.0.0"
cacheable-lookup "^5.0.3"
cacheable-request "^7.0.1"
decompress-response "^6.0.0"
http2-wrapper "^1.0.0-beta.5.2"
lowercase-keys "^2.0.0"
p-cancelable "^2.0.0"
responselike "^2.0.0"
got@^9.6.0:
version "9.6.0"
resolved "https://registry.npmjs.org/got/-/got-9.6.0.tgz"
@ -2726,7 +2995,7 @@ has-values@^1.0.0:
has@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796"
resolved "https://registry.npmjs.org/has/-/has-1.0.3.tgz"
integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==
dependencies:
function-bind "^1.1.1"
@ -2762,6 +3031,14 @@ http-signature@~1.2.0:
jsprim "^1.2.2"
sshpk "^1.7.0"
http2-wrapper@^1.0.0-beta.5.2:
version "1.0.3"
resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-1.0.3.tgz#b8f55e0c1f25d4ebd08b3b0c2c079f9590800b3d"
integrity sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==
dependencies:
quick-lru "^5.1.1"
resolve-alpn "^1.0.0"
human-signals@^1.1.1:
version "1.1.1"
resolved "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz"
@ -2837,7 +3114,7 @@ indent-string@^4.0.0:
inflight@^1.0.4:
version "1.0.6"
resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz"
integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
dependencies:
once "^1.3.0"
@ -2845,12 +3122,12 @@ inflight@^1.0.4:
inherits@2:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
interpret@^1.0.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e"
resolved "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz"
integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==
is-accessor-descriptor@^0.1.6:
@ -3239,7 +3516,7 @@ jest-config@^26.6.3:
micromatch "^4.0.2"
pretty-format "^26.6.2"
jest-diff@^26.0.0, jest-diff@^26.6.2:
jest-diff@^26.6.2:
version "26.6.2"
resolved "https://registry.npmjs.org/jest-diff/-/jest-diff-26.6.2.tgz"
integrity sha512-6m+9Z3Gv9wN0WFVasqjCL/06+EFCMTqDEUl/b87HYK2rAPTyfz4ZIuSlPhY51PIQRWx5TaxeF1qmXKe9gfN3sA==
@ -3249,6 +3526,16 @@ jest-diff@^26.0.0, jest-diff@^26.6.2:
jest-get-type "^26.3.0"
pretty-format "^26.6.2"
jest-diff@^27.0.0:
version "27.4.2"
resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-27.4.2.tgz#786b2a5211d854f848e2dcc1e324448e9481f36f"
integrity sha512-ujc9ToyUZDh9KcqvQDkk/gkbf6zSaeEg9AiBxtttXW59H/AcqEYp1ciXAtJp+jXWva5nAf/ePtSsgWwE5mqp4Q==
dependencies:
chalk "^4.0.0"
diff-sequences "^27.4.0"
jest-get-type "^27.4.0"
pretty-format "^27.4.2"
jest-docblock@^26.0.0:
version "26.0.0"
resolved "https://registry.npmjs.org/jest-docblock/-/jest-docblock-26.0.0.tgz"
@ -3297,6 +3584,11 @@ jest-get-type@^26.3.0:
resolved "https://registry.npmjs.org/jest-get-type/-/jest-get-type-26.3.0.tgz"
integrity sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig==
jest-get-type@^27.4.0:
version "27.4.0"
resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-27.4.0.tgz#7503d2663fffa431638337b3998d39c5e928e9b5"
integrity sha512-tk9o+ld5TWq41DkK14L4wox4s2D9MtTpKaAVzXfr5CUKm5ZK2ExcaFE0qls2W71zE/6R2TxxrK9w2r6svAFDBQ==
jest-haste-map@^26.6.2:
version "26.6.2"
resolved "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-26.6.2.tgz"
@ -3572,6 +3864,13 @@ jose@^1.27.1:
dependencies:
"@panva/asn1.js" "^1.0.0"
jose@^2.0.5:
version "2.0.5"
resolved "https://registry.yarnpkg.com/jose/-/jose-2.0.5.tgz#29746a18d9fff7dcf9d5d2a6f62cb0c7cd27abd3"
integrity sha512-BAiDNeDKTMgk4tvD0BbxJ8xHEHBZgpeRZ1zGPPsitSyMgjoMWiLGYAE7H7NpP5h0lPppQajQs871E8NHUrzVPA==
dependencies:
"@panva/asn1.js" "^1.0.0"
js-tokens@^4.0.0:
version "4.0.0"
resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz"
@ -3632,6 +3931,11 @@ json-buffer@3.0.0:
resolved "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz"
integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=
json-buffer@3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13"
integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==
json-parse-even-better-errors@^2.3.0:
version "2.3.1"
resolved "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz"
@ -3703,6 +4007,13 @@ keyv@^3.0.0:
dependencies:
json-buffer "3.0.0"
keyv@^4.0.0:
version "4.0.3"
resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.0.3.tgz#4f3aa98de254803cafcd2896734108daa35e4254"
integrity sha512-zdGa2TOpSZPq5mU6iowDARnMBZgtCqJ11dJROFi6tg6kTn4nuUdU09lFyLFSaHrWqpIJ+EBq4E8/Dc0Vx5vLdA==
dependencies:
json-buffer "3.0.1"
kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0:
version "3.2.2"
resolved "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz"
@ -3915,7 +4226,7 @@ make-dir@^3.0.0:
dependencies:
semver "^6.0.0"
make-error@1.x, make-error@^1.3.6:
make-error@1.x, make-error@^1.1.1, make-error@^1.3.6:
version "1.3.6"
resolved "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz"
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
@ -3998,9 +4309,14 @@ mimic-response@^1.0.0, mimic-response@^1.0.1:
resolved "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz"
integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==
mimic-response@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9"
integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==
minimatch@^3.0.4:
version "3.0.4"
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz"
integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
dependencies:
brace-expansion "^1.1.7"
@ -4010,6 +4326,21 @@ minimist@^1.1.1, minimist@^1.2.0, minimist@^1.2.5:
resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz"
integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==
minipass@^3.0.0:
version "3.1.3"
resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.3.tgz#7d42ff1f39635482e15f9cdb53184deebd5815fd"
integrity sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==
dependencies:
yallist "^4.0.0"
minizlib@^2.1.1:
version "2.1.2"
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931"
integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==
dependencies:
minipass "^3.0.0"
yallist "^4.0.0"
mixin-deep@^1.2.0:
version "1.3.2"
resolved "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz"
@ -4018,7 +4349,7 @@ mixin-deep@^1.2.0:
for-in "^1.0.2"
is-extendable "^1.0.1"
mkdirp@1.x:
mkdirp@1.x, mkdirp@^1.0.3:
version "1.0.4"
resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz"
integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==
@ -4218,7 +4549,7 @@ octokit-pagination-methods@^1.1.0:
resolved "https://registry.npmjs.org/octokit-pagination-methods/-/octokit-pagination-methods-1.1.0.tgz"
integrity sha512-fZ4qZdQ2nxJvtcasX7Ghl+WlWS/d9IgnBIwFZXVNNZUmzpno91SX5bc5vuxiuKoCtK78XxGGNuSCrDC7xYB3OQ==
oidc-token-hash@^5.0.0:
oidc-token-hash@^5.0.0, oidc-token-hash@^5.0.1:
version "5.0.1"
resolved "https://registry.npmjs.org/oidc-token-hash/-/oidc-token-hash-5.0.1.tgz"
integrity sha512-EvoOtz6FIEBzE+9q253HsLCVRiK/0doEJ2HCvvqMQb3dHZrP3WlJKYtJ55CRTw4jmYomzH4wkPuCj/I3ZvpKxQ==
@ -4252,6 +4583,19 @@ openid-client@^3.14.0:
oidc-token-hash "^5.0.0"
p-any "^3.0.0"
openid-client@^4.1.1:
version "4.7.3"
resolved "https://registry.yarnpkg.com/openid-client/-/openid-client-4.7.3.tgz#ea4f6f9ff6203dfbbe3d9bb4415d5dce751b0a70"
integrity sha512-YLwZQLSjo3gdSVxw/G25ddoRp9oCpXkREZXssmenlejZQPsnTq+yQtFUcBmC7u3VVkx+gwqXZF7X0CtAAJrRRg==
dependencies:
aggregate-error "^3.1.0"
got "^11.8.0"
jose "^2.0.5"
lru-cache "^6.0.0"
make-error "^1.3.6"
object-hash "^2.0.1"
oidc-token-hash "^5.0.1"
optionator@^0.8.1:
version "0.8.3"
resolved "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz"
@ -4411,7 +4755,7 @@ path-exists@^4.0.0:
path-is-absolute@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz"
integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
path-key@^2.0.0, path-key@^2.0.1:
@ -4426,7 +4770,7 @@ path-key@^3.0.0, path-key@^3.1.0:
path-parse@^1.0.6, path-parse@^1.0.7:
version "1.0.7"
resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz"
integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
path-type@^2.0.0:
@ -4514,7 +4858,7 @@ prettier@^2.2.1:
resolved "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz"
integrity sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==
pretty-format@^26.0.0, pretty-format@^26.6.2:
pretty-format@^26.6.2:
version "26.6.2"
resolved "https://registry.npmjs.org/pretty-format/-/pretty-format-26.6.2.tgz"
integrity sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg==
@ -4524,6 +4868,16 @@ pretty-format@^26.0.0, pretty-format@^26.6.2:
ansi-styles "^4.0.0"
react-is "^17.0.1"
pretty-format@^27.0.0, pretty-format@^27.4.2:
version "27.4.2"
resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.4.2.tgz#e4ce92ad66c3888423d332b40477c87d1dac1fb8"
integrity sha512-p0wNtJ9oLuvgOQDEIZ9zQjZffK7KtyR6Si0jnXULIDwrlNF8Cuir3AZP0hHv0jmKuNN/edOnbMjnzd4uTcmWiw==
dependencies:
"@jest/types" "^27.4.2"
ansi-regex "^5.0.1"
ansi-styles "^5.0.0"
react-is "^17.0.1"
progress@^2.0.0:
version "2.0.3"
resolved "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz"
@ -4582,6 +4936,11 @@ queue-microtask@^1.2.2:
resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz"
integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
quick-lru@^5.1.1:
version "5.1.1"
resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932"
integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==
react-is@^17.0.1:
version "17.0.2"
resolved "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz"
@ -4625,11 +4984,16 @@ read-pkg@^5.2.0:
rechoir@^0.6.2:
version "0.6.2"
resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384"
resolved "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz"
integrity sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=
dependencies:
resolve "^1.1.6"
reflect-metadata@^0.1.13:
version "0.1.13"
resolved "https://registry.yarnpkg.com/reflect-metadata/-/reflect-metadata-0.1.13.tgz#67ae3ca57c972a2aa1642b10fe363fe32d49dc08"
integrity sha512-Ts1Y/anZELhSsjMcU605fU9RE4Oi3p5ORujwbIKXfWa+0Zxs510Qrmrce5/Jowq3cHSZSJqBjypxmHarc+vEWg==
regex-not@^1.0.0, regex-not@^1.0.2:
version "1.0.2"
resolved "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz"
@ -4725,6 +5089,11 @@ reserved-words@^0.1.2:
resolved "https://registry.npmjs.org/reserved-words/-/reserved-words-0.1.2.tgz"
integrity sha1-AKCUD5jNUBrqqsMWQR2a3FKzGrE=
resolve-alpn@^1.0.0:
version "1.1.2"
resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.1.2.tgz#30b60cfbb0c0b8dc897940fe13fe255afcdd4d28"
integrity sha512-8OyfzhAtA32LVUsJSke3auIyINcwdh5l3cvYKdKO0nvsYSKuiLfTM5i78PJswFPT8y6cPW+L1v6/hE95chcpDA==
resolve-cwd@^3.0.0:
version "3.0.0"
resolved "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz"
@ -4771,6 +5140,13 @@ responselike@^1.0.2:
dependencies:
lowercase-keys "^1.0.0"
responselike@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/responselike/-/responselike-2.0.0.tgz#26391bcc3174f750f9a79eacc40a12a5c42d7723"
integrity sha512-xH48u3FTB9VsZw7R+vvgaKeLKzT6jOogbQhEe/jewwnZgzPcnyWui2Av6JpoYZF/91uueC+lqhWqeURw5/qhCw==
dependencies:
lowercase-keys "^2.0.0"
restore-cursor@^3.1.0:
version "3.1.0"
resolved "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz"
@ -4789,6 +5165,11 @@ reusify@^1.0.4:
resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz"
integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==
rfc4648@^1.3.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/rfc4648/-/rfc4648-1.4.0.tgz#c75b2856ad2e2d588b6ddb985d556f1f7f2a2abd"
integrity sha512-3qIzGhHlMHA6PoT6+cdPKZ+ZqtxkIvg8DZGKA5z6PQ33/uuhoJ+Ws/D/J9rXW6gXodgH8QYlz2UCl+sdUDmNIg==
rfdc@^1.3.0:
version "1.3.0"
resolved "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz"
@ -4815,7 +5196,7 @@ run-parallel@^1.1.9:
rxjs@^7.4.0:
version "7.4.0"
resolved "https://registry.npmjs.org/rxjs/-/rxjs-7.4.0.tgz"
resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.4.0.tgz#a12a44d7eebf016f5ff2441b87f28c9a51cebc68"
integrity sha512-7SQDi7xeTMCJpqViXh8gL/lebcwlp3d831F05+9B44A4B0WfsEwUQHR64gsH1kvJ+Ep/J9K2+n1hVl1CsGN23w==
dependencies:
tslib "~2.1.0"
@ -4938,9 +5319,9 @@ shebang-regex@^3.0.0:
integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
shelljs@^0.8.2:
version "0.8.5"
resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c"
integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==
version "0.8.4"
resolved "https://registry.npmjs.org/shelljs/-/shelljs-0.8.4.tgz"
integrity sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ==
dependencies:
glob "^7.0.0"
interpret "^1.0.0"
@ -5143,6 +5524,11 @@ stealthy-require@^1.1.1:
resolved "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz"
integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=
stream-buffers@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/stream-buffers/-/stream-buffers-3.0.2.tgz#5249005a8d5c2d00b3a32e6e0a6ea209dc4f3521"
integrity sha512-DQi1h8VEBA/lURbSwFtEHnSTb9s2/pwLEaFuNhXwy1Dx3Sa0lOuYT2yNUr4/j2fs8oCAMANtrZ5OrPZtyVs3MQ==
string-argv@^0.3.1:
version "0.3.1"
resolved "https://registry.npmjs.org/string-argv/-/string-argv-0.3.1.tgz"
@ -5295,6 +5681,18 @@ table@^6.0.4:
slice-ansi "^4.0.0"
string-width "^4.2.0"
tar@^6.0.2:
version "6.1.0"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.0.tgz#d1724e9bcc04b977b18d5c573b333a2207229a83"
integrity sha512-DUCttfhsnLCjwoDoFcI+B2iJgYa93vBnDUATYEeRx6sntCTdN01VnqsIuTlALXla/LWooNg0yEGeB+Y8WdFxGA==
dependencies:
chownr "^2.0.0"
fs-minipass "^2.0.0"
minipass "^3.0.0"
minizlib "^2.1.1"
mkdirp "^1.0.3"
yallist "^4.0.0"
terminal-link@^2.0.0:
version "2.1.1"
resolved "https://registry.npmjs.org/terminal-link/-/terminal-link-2.1.1.tgz"
@ -5327,6 +5725,20 @@ through@^2.3.8:
resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz"
integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=
tmp-promise@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/tmp-promise/-/tmp-promise-3.0.2.tgz#6e933782abff8b00c3119d63589ca1fb9caaa62a"
integrity sha512-OyCLAKU1HzBjL6Ev3gxUeraJNlbNingmi8IrHHEsYH8LTmEuhvYfqvhn2F/je+mjf4N58UmZ96OMEy1JanSCpA==
dependencies:
tmp "^0.2.0"
tmp@^0.2.0:
version "0.2.1"
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.1.tgz#8457fc3037dcf4719c251367a1af6500ee1ccf14"
integrity sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==
dependencies:
rimraf "^3.0.0"
tmpl@1.0.x:
version "1.0.5"
resolved "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz"
@ -5403,10 +5815,10 @@ tr46@~0.0.3:
resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=
ts-jest@^26.4.4:
version "26.5.5"
resolved "https://registry.npmjs.org/ts-jest/-/ts-jest-26.5.5.tgz"
integrity sha512-7tP4m+silwt1NHqzNRAPjW1BswnAhopTdc2K3HEkRZjF0ZG2F/e/ypVH0xiZIMfItFtD3CX0XFbwPzp9fIEUVg==
ts-jest@^26.4.2:
version "26.5.6"
resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-26.5.6.tgz#c32e0746425274e1dfe333f43cd3c800e014ec35"
integrity sha512-rua+rCP8DxpA8b4DQD/6X2HQS8Zy/xzViVYfEs2OQu68tkCuKLV0Md8pmX55+W24uRIyAsf/BajRfxOs+R2MKA==
dependencies:
bs-logger "0.x"
buffer-from "1.x"
@ -5419,6 +5831,24 @@ ts-jest@^26.4.4:
semver "7.x"
yargs-parser "20.x"
ts-node@^10.4.0:
version "10.4.0"
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.4.0.tgz#680f88945885f4e6cf450e7f0d6223dd404895f7"
integrity sha512-g0FlPvvCXSIO1JDF6S232P5jPYqBkRL9qly81ZgAOSU7rwI0stphCgd2kLiCrU9DjQCrJMWEqcNSjQL02s6d8A==
dependencies:
"@cspotcode/source-map-support" "0.7.0"
"@tsconfig/node10" "^1.0.7"
"@tsconfig/node12" "^1.0.7"
"@tsconfig/node14" "^1.0.0"
"@tsconfig/node16" "^1.0.2"
acorn "^8.4.1"
acorn-walk "^8.1.1"
arg "^4.1.0"
create-require "^1.1.0"
diff "^4.0.1"
make-error "^1.1.1"
yn "3.1.1"
tsconfig-paths@^3.9.0:
version "3.9.0"
resolved "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz"
@ -5774,7 +6204,7 @@ wrap-ansi@^7.0.0:
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz"
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
write-file-atomic@^3.0.0:
@ -5799,6 +6229,11 @@ ws@^7.2.3, ws@^7.4.4:
resolved "https://registry.npmjs.org/ws/-/ws-7.5.6.tgz"
integrity sha512-6GLgCqo2cy2A2rjCNFlxQS6ZljG/coZfZXclldI8FB/1G3CCI36Zd8xy2HrFVACi8tfk5XrgLQEk+P0Tnz9UcA==
ws@^7.3.1:
version "7.4.5"
resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.5.tgz#a484dd851e9beb6fdb420027e3885e8ce48986c1"
integrity sha512-xzyu3hFvomRfXKH8vOFMU3OguG6oOvhXMo3xsGy3xWExqaM2dxBbVxuD99O7m3ZUFMvvscsZDqxfgMaRr/Nr1g==
xml-name-validator@^3.0.0:
version "3.0.0"
resolved "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-3.0.0.tgz"
@ -5866,3 +6301,8 @@ yargs@^15.4.1:
which-module "^2.0.0"
y18n "^4.0.0"
yargs-parser "^18.1.2"
yn@3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==