Cloud runner preview (#419)
parent
17faf202fb
commit
f226bef5af
|
|
@ -1,7 +1,7 @@
|
|||
name: Cloud Runner
|
||||
|
||||
on:
|
||||
push: { branches: [cloud-runner-develop, main] }
|
||||
push: { branches: [cloud-runner-develop, cloud-runner-preview, main] }
|
||||
# push: { branches: [main] }
|
||||
# pull_request:
|
||||
# paths-ignore:
|
||||
|
|
@ -29,6 +29,8 @@ jobs:
|
|||
awsBuild:
|
||||
name: AWS Fargate Build
|
||||
if: github.event.pull_request.draft == false
|
||||
env:
|
||||
CLOUD_RUNNER_CLUSTER: aws
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
|
@ -96,7 +98,7 @@ jobs:
|
|||
aws s3 ls game-ci-test-storage
|
||||
ls /data/cache/$CACHE_KEY
|
||||
ls /data/cache/$CACHE_KEY/build
|
||||
aws s3 cp /data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar s3://game-ci-test-storage/$CACHE_KEY/build-$BUILD_GUID.tar
|
||||
aws s3 cp /data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar.lz4 s3://game-ci-test-storage/$CACHE_KEY/build-$BUILD_GUID.tar.lz4
|
||||
secrets:
|
||||
- name: awsAccessKeyId
|
||||
value: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
|
|
@ -105,7 +107,7 @@ jobs:
|
|||
- name: awsDefaultRegion
|
||||
value: eu-west-2
|
||||
- run: |
|
||||
aws s3 cp s3://game-ci-test-storage/${{ steps.aws-fargate-unity-build.outputs.CACHE_KEY }}/build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar
|
||||
aws s3 cp s3://game-ci-test-storage/${{ steps.aws-fargate-unity-build.outputs.CACHE_KEY }}/build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar.lz4 build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar.lz4
|
||||
ls
|
||||
- run: yarn run cli -m aws-list-all
|
||||
###########################
|
||||
|
|
@ -115,11 +117,13 @@ jobs:
|
|||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: AWS Build (${{ matrix.targetPlatform }})
|
||||
path: build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar
|
||||
path: build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar.lz4
|
||||
retention-days: 14
|
||||
k8sBuilds:
|
||||
name: K8s (GKE Autopilot) build for ${{ matrix.targetPlatform }} on version ${{ matrix.unityVersion }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CLOUD_RUNNER_CLUSTER: k8s
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
|
@ -196,7 +200,7 @@ jobs:
|
|||
aws s3 ls
|
||||
aws s3 ls game-ci-test-storage
|
||||
ls /data/cache/$CACHE_KEY
|
||||
aws s3 cp /data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar s3://game-ci-test-storage/$CACHE_KEY/build-$BUILD_GUID.tar
|
||||
aws s3 cp /data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar.lz4 s3://game-ci-test-storage/$CACHE_KEY/build-$BUILD_GUID.tar.lz4
|
||||
secrets:
|
||||
- name: awsAccessKeyId
|
||||
value: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
|
|
@ -205,7 +209,7 @@ jobs:
|
|||
- name: awsDefaultRegion
|
||||
value: eu-west-2
|
||||
- run: |
|
||||
aws s3 cp s3://game-ci-test-storage/${{ steps.k8s-unity-build.outputs.CACHE_KEY }}/build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar
|
||||
aws s3 cp s3://game-ci-test-storage/${{ steps.k8s-unity-build.outputs.CACHE_KEY }}/build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar.lz4 build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar.lz4
|
||||
ls
|
||||
###########################
|
||||
# Upload #
|
||||
|
|
@ -214,5 +218,5 @@ jobs:
|
|||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: K8s Build (${{ matrix.targetPlatform }})
|
||||
path: build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar
|
||||
path: build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar.lz4
|
||||
retention-days: 14
|
||||
|
|
|
|||
18
action.yml
18
action.yml
|
|
@ -122,14 +122,6 @@ inputs:
|
|||
default: ''
|
||||
required: false
|
||||
description: 'Amount of memory to assign the remote build container'
|
||||
cachePushOverrideCommand:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'A command run every time a file is pushed to cache, formatted with input file path and remote cache path'
|
||||
cachePullOverrideCommand:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'A command run every time before a file is being pulled from cache, formatted with request cache file and destination path'
|
||||
readInputFromOverrideList:
|
||||
default: ''
|
||||
required: false
|
||||
|
|
@ -158,14 +150,10 @@ inputs:
|
|||
default: ''
|
||||
required: false
|
||||
description: 'Cache key to indicate bucket for cache'
|
||||
checkDependencyHealthOverride:
|
||||
default: ''
|
||||
watchToEnd:
|
||||
default: 'true'
|
||||
required: false
|
||||
description: 'Use to specify a way to check depdency services health to enable resilient self-starting jobs'
|
||||
startDependenciesOverride:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'Use to specify a way to start depdency services health to enable resilient self-starting jobs'
|
||||
description: 'Watch build to end or leave unattended: true/false'
|
||||
outputs:
|
||||
volume:
|
||||
description: 'The Persistent Volume (PV) where the build artifacts have been stored by Kubernetes'
|
||||
|
|
|
|||
|
|
@ -10,7 +10,10 @@ namespace UnityBuilderAction.Input
|
|||
{
|
||||
EditorUserBuildSettings.buildAppBundle = options["customBuildPath"].EndsWith(".aab");
|
||||
if (options.TryGetValue("androidKeystoreName", out string keystoreName) && !string.IsNullOrEmpty(keystoreName))
|
||||
{
|
||||
PlayerSettings.Android.useCustomKeystore = true;
|
||||
PlayerSettings.Android.keystoreName = keystoreName;
|
||||
}
|
||||
if (options.TryGetValue("androidKeystorePass", out string keystorePass) && !string.IsNullOrEmpty(keystorePass))
|
||||
PlayerSettings.Android.keystorePass = keystorePass;
|
||||
if (options.TryGetValue("androidKeyaliasName", out string keyaliasName) && !string.IsNullOrEmpty(keyaliasName))
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
|
|
@ -18,6 +18,7 @@ module.exports = {
|
|||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest',
|
||||
},
|
||||
autoRun: false,
|
||||
|
||||
// Indicates whether each individual test should be reported during the run
|
||||
verbose: true,
|
||||
|
|
|
|||
|
|
@ -9,9 +9,6 @@ extends: {}
|
|||
pre-push:
|
||||
parallel: true
|
||||
commands:
|
||||
packages-audit:
|
||||
tags: security
|
||||
run: yarn audit
|
||||
|
||||
pre-commit:
|
||||
parallel: true
|
||||
|
|
|
|||
|
|
@ -12,9 +12,9 @@
|
|||
"lint": "prettier --check \"src/**/*.{js,ts}\" && eslint src/**/*.ts",
|
||||
"format": "prettier --write \"src/**/*.{js,ts}\"",
|
||||
"cli": "yarn ts-node src/index.ts -m cli",
|
||||
"gcp-secrets-tests": "cross-env cloudRunnerCluster=aws cloudRunnerTests=true readInputOverrideCommand=\"gcloud secrets versions access 1 --secret=\"{0}\"\" populateOverride=true readInputFromOverrideList=UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD yarn test -i -t \"cloud runner\"",
|
||||
"gcp-secrets-cli": "cross-env cloudRunnerTests=true readInputOverrideCommand=\"gcloud secrets versions access 1 --secret=\"{0}\"\" yarn ts-node src/index.ts -m cli --populateOverride true --readInputFromOverrideList UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD",
|
||||
"aws-secrets-cli": "cross-env cloudRunnerTests=true readInputOverrideCommand=\"aws secretsmanager get-secret-value --secret-id {0}\" yarn ts-node src/index.ts -m cli --populateOverride true --readInputFromOverrideList UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD",
|
||||
"gcp-secrets-tests": "cross-env cloudRunnerCluster=aws cloudRunnerTests=true readInputOverrideCommand=\"gcp-secret-manager\" populateOverride=true readInputFromOverrideList=UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD yarn test -i -t \"cloud runner caching\"",
|
||||
"gcp-secrets-cli": "cross-env cloudRunnerTests=true readInputOverrideCommand=\"gcp-secret-manager\" yarn ts-node src/index.ts -m cli --populateOverride true --readInputFromOverrideList UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD",
|
||||
"aws-secrets-cli": "cross-env cloudRunnerTests=true readInputOverrideCommand=\"aws-secret-manager\" yarn ts-node src/index.ts -m cli --populateOverride true --readInputFromOverrideList UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD",
|
||||
"cli-aws": "cross-env cloudRunnerCluster=aws yarn run test-cli",
|
||||
"cli-k8s": "cross-env cloudRunnerCluster=k8s yarn run test-cli",
|
||||
"test-cli": "cross-env cloudRunnerTests=true yarn ts-node src/index.ts -m cli --projectPath test-project",
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ import Versioning from './versioning';
|
|||
import { GitRepoReader } from './input-readers/git-repo';
|
||||
import { GithubCliReader } from './input-readers/github-cli';
|
||||
import { Cli } from './cli/cli';
|
||||
import GitHub from './github';
|
||||
import CloudRunnerOptions from './cloud-runner/cloud-runner-options';
|
||||
|
||||
class BuildParameters {
|
||||
public editorVersion!: string;
|
||||
|
|
@ -44,12 +46,8 @@ class BuildParameters {
|
|||
public kubeStorageClass!: string;
|
||||
public chownFilesTo!: string;
|
||||
public customJobHooks!: string;
|
||||
public cachePushOverrideCommand!: string;
|
||||
public cachePullOverrideCommand!: string;
|
||||
public readInputFromOverrideList!: string;
|
||||
public readInputOverrideCommand!: string;
|
||||
public checkDependencyHealthOverride!: string;
|
||||
public startDependenciesOverride!: string;
|
||||
public cacheKey!: string;
|
||||
|
||||
public postBuildSteps!: string;
|
||||
|
|
@ -76,7 +74,7 @@ class BuildParameters {
|
|||
// Todo - Don't use process.env directly, that's what the input model class is for.
|
||||
// ---
|
||||
let unitySerial = '';
|
||||
if (!process.env.UNITY_SERIAL && Input.githubInputEnabled) {
|
||||
if (!process.env.UNITY_SERIAL && GitHub.githubInputEnabled) {
|
||||
// No serial was present, so it is a personal license that we need to convert
|
||||
if (!process.env.UNITY_LICENSE) {
|
||||
throw new Error(`Missing Unity License File and no Serial was found. If this
|
||||
|
|
@ -93,7 +91,6 @@ class BuildParameters {
|
|||
editorVersion,
|
||||
customImage: Input.customImage,
|
||||
unitySerial,
|
||||
|
||||
runnerTempPath: process.env.RUNNER_TEMP,
|
||||
targetPlatform: Input.targetPlatform,
|
||||
projectPath: Input.projectPath,
|
||||
|
|
@ -114,36 +111,32 @@ class BuildParameters {
|
|||
sshAgent: Input.sshAgent,
|
||||
gitPrivateToken: Input.gitPrivateToken || (await GithubCliReader.GetGitHubAuthToken()),
|
||||
chownFilesTo: Input.chownFilesTo,
|
||||
cloudRunnerCluster: Input.cloudRunnerCluster,
|
||||
cloudRunnerBuilderPlatform: Input.cloudRunnerBuilderPlatform,
|
||||
awsBaseStackName: Input.awsBaseStackName,
|
||||
kubeConfig: Input.kubeConfig,
|
||||
cloudRunnerMemory: Input.cloudRunnerMemory,
|
||||
cloudRunnerCpu: Input.cloudRunnerCpu,
|
||||
kubeVolumeSize: Input.kubeVolumeSize,
|
||||
kubeVolume: Input.kubeVolume,
|
||||
postBuildSteps: Input.postBuildSteps,
|
||||
preBuildSteps: Input.preBuildSteps,
|
||||
customJob: Input.customJob,
|
||||
cloudRunnerCluster: CloudRunnerOptions.cloudRunnerCluster,
|
||||
cloudRunnerBuilderPlatform: CloudRunnerOptions.cloudRunnerBuilderPlatform,
|
||||
awsBaseStackName: CloudRunnerOptions.awsBaseStackName,
|
||||
kubeConfig: CloudRunnerOptions.kubeConfig,
|
||||
cloudRunnerMemory: CloudRunnerOptions.cloudRunnerMemory,
|
||||
cloudRunnerCpu: CloudRunnerOptions.cloudRunnerCpu,
|
||||
kubeVolumeSize: CloudRunnerOptions.kubeVolumeSize,
|
||||
kubeVolume: CloudRunnerOptions.kubeVolume,
|
||||
postBuildSteps: CloudRunnerOptions.postBuildSteps,
|
||||
preBuildSteps: CloudRunnerOptions.preBuildSteps,
|
||||
customJob: CloudRunnerOptions.customJob,
|
||||
runNumber: Input.runNumber,
|
||||
branch: Input.branch.replace('/head', '') || (await GitRepoReader.GetBranch()),
|
||||
cloudRunnerBranch: Input.cloudRunnerBranch.split('/').reverse()[0],
|
||||
cloudRunnerIntegrationTests: Input.cloudRunnerTests,
|
||||
cloudRunnerBranch: CloudRunnerOptions.cloudRunnerBranch.split('/').reverse()[0],
|
||||
cloudRunnerIntegrationTests: CloudRunnerOptions.cloudRunnerTests,
|
||||
githubRepo: Input.githubRepo || (await GitRepoReader.GetRemote()) || 'game-ci/unity-builder',
|
||||
isCliMode: Cli.isCliMode,
|
||||
awsStackName: Input.awsBaseStackName,
|
||||
awsStackName: CloudRunnerOptions.awsBaseStackName,
|
||||
gitSha: Input.gitSha,
|
||||
logId: customAlphabet(CloudRunnerConstants.alphabet, 9)(),
|
||||
buildGuid: CloudRunnerBuildGuid.generateGuid(Input.runNumber, Input.targetPlatform),
|
||||
customJobHooks: Input.customJobHooks(),
|
||||
cachePullOverrideCommand: Input.cachePullOverrideCommand(),
|
||||
cachePushOverrideCommand: Input.cachePushOverrideCommand(),
|
||||
readInputOverrideCommand: Input.readInputOverrideCommand(),
|
||||
readInputFromOverrideList: Input.readInputFromOverrideList(),
|
||||
kubeStorageClass: Input.kubeStorageClass,
|
||||
checkDependencyHealthOverride: Input.checkDependencyHealthOverride,
|
||||
startDependenciesOverride: Input.startDependenciesOverride,
|
||||
cacheKey: Input.cacheKey,
|
||||
customJobHooks: CloudRunnerOptions.customJobHooks(),
|
||||
readInputOverrideCommand: CloudRunnerOptions.readInputOverrideCommand(),
|
||||
readInputFromOverrideList: CloudRunnerOptions.readInputFromOverrideList(),
|
||||
kubeStorageClass: CloudRunnerOptions.kubeStorageClass,
|
||||
cacheKey: CloudRunnerOptions.cacheKey,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ import { AwsCliCommands } from '../cloud-runner/providers/aws/commands/aws-cli-c
|
|||
import { Caching } from '../cloud-runner/remote-client/caching';
|
||||
import { LfsHashing } from '../cloud-runner/services/lfs-hashing';
|
||||
import { RemoteClient } from '../cloud-runner/remote-client';
|
||||
import CloudRunnerOptionsReader from '../cloud-runner/services/cloud-runner-options-reader';
|
||||
import GitHub from '../github';
|
||||
|
||||
export class Cli {
|
||||
public static options;
|
||||
|
|
@ -33,7 +35,8 @@ export class Cli {
|
|||
CliFunctionsRepository.PushCliFunctionSource(RemoteClient);
|
||||
const program = new Command();
|
||||
program.version('0.0.1');
|
||||
const properties = Object.getOwnPropertyNames(Input);
|
||||
|
||||
const properties = CloudRunnerOptionsReader.GetProperties();
|
||||
const actionYamlReader: ActionYamlReader = new ActionYamlReader();
|
||||
for (const element of properties) {
|
||||
program.option(`--${element} <${element}>`, actionYamlReader.GetActionYamlValue(element));
|
||||
|
|
@ -48,6 +51,7 @@ export class Cli {
|
|||
program.option('--cachePushFrom <cachePushFrom>', 'cache push from source folder');
|
||||
program.option('--cachePushTo <cachePushTo>', 'cache push to caching folder');
|
||||
program.option('--artifactName <artifactName>', 'caching artifact name');
|
||||
program.option('--select <select>', 'select a particular resource');
|
||||
program.parse(process.argv);
|
||||
Cli.options = program.opts();
|
||||
|
||||
|
|
@ -55,23 +59,31 @@ export class Cli {
|
|||
}
|
||||
|
||||
static async RunCli(): Promise<void> {
|
||||
Input.githubInputEnabled = false;
|
||||
GitHub.githubInputEnabled = false;
|
||||
if (Cli.options['populateOverride'] === `true`) {
|
||||
await CloudRunnerQueryOverride.PopulateQueryOverrideInput();
|
||||
}
|
||||
if (Cli.options['logInput']) {
|
||||
Cli.logInput();
|
||||
}
|
||||
const results = CliFunctionsRepository.GetCliFunctions(Cli.options.mode);
|
||||
CloudRunnerLogger.log(`Entrypoint: ${results.key}`);
|
||||
Cli.options.versioning = 'None';
|
||||
|
||||
return await results.target[results.propertyKey]();
|
||||
const buildParameter = JSON.parse(process.env.BUILD_PARAMETERS || '{}');
|
||||
CloudRunnerLogger.log(`Build Params:
|
||||
${JSON.stringify(buildParameter, undefined, 4)}
|
||||
`);
|
||||
CloudRunner.buildParameters = buildParameter;
|
||||
|
||||
return await results.target[results.propertyKey](Cli.options);
|
||||
}
|
||||
|
||||
@CliFunction(`print-input`, `prints all input`)
|
||||
private static logInput() {
|
||||
core.info(`\n`);
|
||||
core.info(`INPUT:`);
|
||||
const properties = Object.getOwnPropertyNames(Input);
|
||||
const properties = CloudRunnerOptionsReader.GetProperties();
|
||||
for (const element of properties) {
|
||||
if (
|
||||
Input[element] !== undefined &&
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
export class SharedWorkspaceLocking {}
|
||||
|
||||
export default SharedWorkspaceLocking;
|
||||
|
|
@ -0,0 +1,179 @@
|
|||
import { Cli } from '../cli/cli';
|
||||
import CloudRunnerQueryOverride from './services/cloud-runner-query-override';
|
||||
import GitHub from '../github';
|
||||
const core = require('@actions/core');
|
||||
|
||||
class CloudRunnerOptions {
|
||||
public static getInput(query) {
|
||||
if (GitHub.githubInputEnabled) {
|
||||
const coreInput = core.getInput(query);
|
||||
if (coreInput && coreInput !== '') {
|
||||
return coreInput;
|
||||
}
|
||||
}
|
||||
const alternativeQuery = CloudRunnerOptions.ToEnvVarFormat(query);
|
||||
|
||||
// Query input sources
|
||||
if (Cli.query(query, alternativeQuery)) {
|
||||
return Cli.query(query, alternativeQuery);
|
||||
}
|
||||
|
||||
if (CloudRunnerQueryOverride.query(query, alternativeQuery)) {
|
||||
return CloudRunnerQueryOverride.query(query, alternativeQuery);
|
||||
}
|
||||
|
||||
if (process.env[query] !== undefined) {
|
||||
return process.env[query];
|
||||
}
|
||||
|
||||
if (alternativeQuery !== query && process.env[alternativeQuery] !== undefined) {
|
||||
return process.env[alternativeQuery];
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static get region(): string {
|
||||
return CloudRunnerOptions.getInput('region') || 'eu-west-2';
|
||||
}
|
||||
|
||||
static get githubRepo() {
|
||||
return CloudRunnerOptions.getInput('GITHUB_REPOSITORY') || CloudRunnerOptions.getInput('GITHUB_REPO') || undefined;
|
||||
}
|
||||
static get branch() {
|
||||
if (CloudRunnerOptions.getInput(`GITHUB_REF`)) {
|
||||
return CloudRunnerOptions.getInput(`GITHUB_REF`).replace('refs/', '').replace(`head/`, '').replace(`heads/`, '');
|
||||
} else if (CloudRunnerOptions.getInput('branch')) {
|
||||
return CloudRunnerOptions.getInput('branch');
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
static get cloudRunnerBuilderPlatform() {
|
||||
const input = CloudRunnerOptions.getInput('cloudRunnerBuilderPlatform');
|
||||
if (input) {
|
||||
return input;
|
||||
}
|
||||
if (CloudRunnerOptions.cloudRunnerCluster !== 'local') {
|
||||
return 'linux';
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static get gitSha() {
|
||||
if (CloudRunnerOptions.getInput(`GITHUB_SHA`)) {
|
||||
return CloudRunnerOptions.getInput(`GITHUB_SHA`);
|
||||
} else if (CloudRunnerOptions.getInput(`GitSHA`)) {
|
||||
return CloudRunnerOptions.getInput(`GitSHA`);
|
||||
}
|
||||
}
|
||||
|
||||
static get customJob() {
|
||||
return CloudRunnerOptions.getInput('customJob') || '';
|
||||
}
|
||||
|
||||
static customJobHooks() {
|
||||
return CloudRunnerOptions.getInput('customJobHooks') || '';
|
||||
}
|
||||
|
||||
static readInputFromOverrideList() {
|
||||
return CloudRunnerOptions.getInput('readInputFromOverrideList') || '';
|
||||
}
|
||||
|
||||
static readInputOverrideCommand() {
|
||||
const value = CloudRunnerOptions.getInput('readInputOverrideCommand');
|
||||
|
||||
if (value === 'gcp-secret-manager') {
|
||||
return 'gcloud secrets versions access 1 --secret="{0}"';
|
||||
} else if (value === 'aws-secret-manager') {
|
||||
return 'aws secretsmanager get-secret-value --secret-id {0}';
|
||||
}
|
||||
|
||||
return value || '';
|
||||
}
|
||||
|
||||
static get cloudRunnerBranch() {
|
||||
return CloudRunnerOptions.getInput('cloudRunnerBranch') || 'cloud-runner-develop';
|
||||
}
|
||||
|
||||
static get postBuildSteps() {
|
||||
return CloudRunnerOptions.getInput('postBuildSteps') || '';
|
||||
}
|
||||
|
||||
static get preBuildSteps() {
|
||||
return CloudRunnerOptions.getInput('preBuildSteps') || '';
|
||||
}
|
||||
|
||||
static get awsBaseStackName() {
|
||||
return CloudRunnerOptions.getInput('awsBaseStackName') || 'game-ci';
|
||||
}
|
||||
|
||||
static get cloudRunnerCluster() {
|
||||
if (Cli.isCliMode) {
|
||||
return CloudRunnerOptions.getInput('cloudRunnerCluster') || 'aws';
|
||||
}
|
||||
|
||||
return CloudRunnerOptions.getInput('cloudRunnerCluster') || 'local';
|
||||
}
|
||||
|
||||
static get cloudRunnerCpu() {
|
||||
return CloudRunnerOptions.getInput('cloudRunnerCpu');
|
||||
}
|
||||
|
||||
static get cloudRunnerMemory() {
|
||||
return CloudRunnerOptions.getInput('cloudRunnerMemory');
|
||||
}
|
||||
|
||||
static get kubeConfig() {
|
||||
return CloudRunnerOptions.getInput('kubeConfig') || '';
|
||||
}
|
||||
|
||||
static get kubeVolume() {
|
||||
return CloudRunnerOptions.getInput('kubeVolume') || '';
|
||||
}
|
||||
|
||||
static get kubeVolumeSize() {
|
||||
return CloudRunnerOptions.getInput('kubeVolumeSize') || '5Gi';
|
||||
}
|
||||
|
||||
static get kubeStorageClass(): string {
|
||||
return CloudRunnerOptions.getInput('kubeStorageClass') || '';
|
||||
}
|
||||
|
||||
static get cacheKey(): string {
|
||||
return CloudRunnerOptions.getInput('cacheKey') || CloudRunnerOptions.branch;
|
||||
}
|
||||
|
||||
static get cloudRunnerTests(): boolean {
|
||||
return CloudRunnerOptions.getInput(`cloudRunnerTests`) || false;
|
||||
}
|
||||
|
||||
static get watchCloudRunnerToEnd(): boolean {
|
||||
const input = CloudRunnerOptions.getInput(`watchToEnd`);
|
||||
|
||||
return !input || input === 'true';
|
||||
}
|
||||
|
||||
public static get retainWorkspaces(): boolean {
|
||||
return CloudRunnerOptions.getInput(`retainWorkspaces`) || false;
|
||||
}
|
||||
|
||||
static get retainWorkspacesMax(): number {
|
||||
return Number(CloudRunnerOptions.getInput(`retainWorkspacesMax`)) || 5;
|
||||
}
|
||||
|
||||
public static ToEnvVarFormat(input: string) {
|
||||
if (input.toUpperCase() === input) {
|
||||
return input;
|
||||
}
|
||||
|
||||
return input
|
||||
.replace(/([A-Z])/g, ' $1')
|
||||
.trim()
|
||||
.toUpperCase()
|
||||
.replace(/ /g, '_');
|
||||
}
|
||||
}
|
||||
|
||||
export default CloudRunnerOptions;
|
||||
|
|
@ -1,142 +0,0 @@
|
|||
import { BuildParameters, ImageTag } from '..';
|
||||
import CloudRunner from './cloud-runner';
|
||||
import Input from '../input';
|
||||
import { CloudRunnerStatics } from './cloud-runner-statics';
|
||||
import { TaskParameterSerializer } from './services/task-parameter-serializer';
|
||||
import UnityVersioning from '../unity-versioning';
|
||||
import { Cli } from '../cli/cli';
|
||||
import CloudRunnerLogger from './services/cloud-runner-logger';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
describe('Cloud Runner', () => {
|
||||
it('responds', () => {});
|
||||
});
|
||||
describe('Cloud Runner', () => {
|
||||
const testSecretName = 'testSecretName';
|
||||
const testSecretValue = 'testSecretValue';
|
||||
if (Input.cloudRunnerTests) {
|
||||
it('All build parameters sent to cloud runner as env vars', async () => {
|
||||
// Build parameters
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.read('test-project'),
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
customJob: `
|
||||
- name: 'step 1'
|
||||
image: 'alpine'
|
||||
commands: 'printenv'
|
||||
secrets:
|
||||
- name: '${testSecretName}'
|
||||
value: '${testSecretValue}'
|
||||
`,
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
|
||||
// Setup parameters
|
||||
const buildParameter = await BuildParameters.create();
|
||||
Input.githubInputEnabled = true;
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
|
||||
// Run the job
|
||||
const file = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
|
||||
// Assert results
|
||||
expect(file).toContain(JSON.stringify(buildParameter));
|
||||
expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`);
|
||||
const environmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables();
|
||||
const newLinePurgedFile = file
|
||||
.replace(/\s+/g, '')
|
||||
.replace(new RegExp(`\\[${CloudRunnerStatics.logPrefix}\\]`, 'g'), '');
|
||||
for (const element of environmentVariables) {
|
||||
if (element.value !== undefined && typeof element.value !== 'function') {
|
||||
if (typeof element.value === `string`) {
|
||||
element.value = element.value.replace(/\s+/g, '');
|
||||
}
|
||||
CloudRunnerLogger.log(`checking input/build param ${element.name} ${element.value}`);
|
||||
}
|
||||
}
|
||||
for (const element of environmentVariables) {
|
||||
if (element.value !== undefined && typeof element.value !== 'function') {
|
||||
expect(newLinePurgedFile).toContain(`${element.name}`);
|
||||
expect(newLinePurgedFile).toContain(`${element.name}=${element.value}`);
|
||||
}
|
||||
}
|
||||
delete Cli.options;
|
||||
}, 1000000);
|
||||
it('Run one build it should not use cache, run subsequent build which should use cache', async () => {
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
const results = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
const libraryString = 'Rebuilding Library because the asset database could not be found!';
|
||||
const buildSucceededString = 'Build succeeded';
|
||||
expect(results).toContain(libraryString);
|
||||
expect(results).toContain(buildSucceededString);
|
||||
CloudRunnerLogger.log(`run 1 succeeded`);
|
||||
const buildParameter2 = await BuildParameters.create();
|
||||
const baseImage2 = new ImageTag(buildParameter2);
|
||||
const results2 = await CloudRunner.run(buildParameter2, baseImage2.toString());
|
||||
CloudRunnerLogger.log(`run 2 succeeded`);
|
||||
expect(results2).toContain(buildSucceededString);
|
||||
expect(results2).toEqual(expect.not.stringContaining(libraryString));
|
||||
Input.githubInputEnabled = true;
|
||||
delete Cli.options;
|
||||
}, 1000000);
|
||||
}
|
||||
it('Local cloud runner returns commands', async () => {
|
||||
// Build parameters
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.read('test-project'),
|
||||
cloudRunnerCluster: 'local-system',
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
customJob: `
|
||||
- name: 'step 1'
|
||||
image: 'alpine'
|
||||
commands: 'dir'
|
||||
secrets:
|
||||
- name: '${testSecretName}'
|
||||
value: '${testSecretValue}'
|
||||
`,
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
|
||||
// Setup parameters
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
|
||||
// Run the job
|
||||
await expect(CloudRunner.run(buildParameter, baseImage.toString())).resolves.not.toThrow();
|
||||
Input.githubInputEnabled = true;
|
||||
delete Cli.options;
|
||||
}, 1000000);
|
||||
it('Test cloud runner returns commands', async () => {
|
||||
// Build parameters
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.read('test-project'),
|
||||
cloudRunnerCluster: 'test',
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
|
||||
// Setup parameters
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
|
||||
// Run the job
|
||||
await expect(CloudRunner.run(buildParameter, baseImage.toString())).resolves.not.toThrow();
|
||||
Input.githubInputEnabled = true;
|
||||
delete Cli.options;
|
||||
}, 1000000);
|
||||
});
|
||||
|
|
@ -13,19 +13,21 @@ import CloudRunnerEnvironmentVariable from './services/cloud-runner-environment-
|
|||
import TestCloudRunner from './providers/test';
|
||||
import LocalCloudRunner from './providers/local';
|
||||
import LocalDockerCloudRunner from './providers/local-docker';
|
||||
import GitHub from '../github';
|
||||
|
||||
class CloudRunner {
|
||||
public static Provider: ProviderInterface;
|
||||
static buildParameters: BuildParameters;
|
||||
public static defaultSecrets: CloudRunnerSecret[];
|
||||
public static cloudRunnerEnvironmentVariables: CloudRunnerEnvironmentVariable[];
|
||||
private static setup(buildParameters: BuildParameters) {
|
||||
public static setup(buildParameters: BuildParameters) {
|
||||
CloudRunnerLogger.setup();
|
||||
CloudRunner.buildParameters = buildParameters;
|
||||
CloudRunner.setupBuildPlatform();
|
||||
CloudRunner.defaultSecrets = TaskParameterSerializer.readDefaultSecrets();
|
||||
CloudRunner.cloudRunnerEnvironmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables();
|
||||
if (!buildParameters.isCliMode) {
|
||||
CloudRunner.cloudRunnerEnvironmentVariables =
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(buildParameters);
|
||||
if (!buildParameters.isCliMode && GitHub.githubInputEnabled) {
|
||||
const buildParameterPropertyNames = Object.getOwnPropertyNames(buildParameters);
|
||||
for (const element of CloudRunner.cloudRunnerEnvironmentVariables) {
|
||||
core.setOutput(Input.ToEnvVarFormat(element.name), element.value);
|
||||
|
|
@ -48,12 +50,12 @@ class CloudRunner {
|
|||
case 'test':
|
||||
CloudRunner.Provider = new TestCloudRunner();
|
||||
break;
|
||||
case 'local-system':
|
||||
CloudRunner.Provider = new LocalCloudRunner();
|
||||
break;
|
||||
case 'local-docker':
|
||||
CloudRunner.Provider = new LocalDockerCloudRunner();
|
||||
break;
|
||||
default:
|
||||
CloudRunner.Provider = new LocalCloudRunner();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ export class CloudRunnerError {
|
|||
public static async handleException(error: unknown) {
|
||||
CloudRunnerLogger.error(JSON.stringify(error, undefined, 4));
|
||||
core.setFailed('Cloud Runner failed');
|
||||
if (CloudRunner.Provider !== undefined) {
|
||||
await CloudRunner.Provider.cleanup(
|
||||
CloudRunner.buildParameters.buildGuid,
|
||||
CloudRunner.buildParameters,
|
||||
|
|
@ -14,3 +15,4 @@ export class CloudRunnerError {
|
|||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -69,6 +69,7 @@ export class AWSJobStack {
|
|||
const secretsMappedToCloudFormationParameters = secrets.map((x) => {
|
||||
return { ParameterKey: x.ParameterKey.replace(/[^\dA-Za-z]/g, ''), ParameterValue: x.ParameterValue };
|
||||
});
|
||||
const logGroupName = `${this.baseStackName}/${taskDefStackName}`;
|
||||
const parameters = [
|
||||
{
|
||||
ParameterKey: 'EnvironmentName',
|
||||
|
|
@ -82,6 +83,10 @@ export class AWSJobStack {
|
|||
ParameterKey: 'ServiceName',
|
||||
ParameterValue: taskDefStackName,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'LogGroupName',
|
||||
ParameterValue: logGroupName,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'Command',
|
||||
ParameterValue: 'echo "this template should be overwritten when running a task"',
|
||||
|
|
|
|||
|
|
@ -6,18 +6,18 @@ import * as zlib from 'zlib';
|
|||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import { Input } from '../../..';
|
||||
import CloudRunner from '../../cloud-runner';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../../services/cloud-runner-build-command-process';
|
||||
import { CloudRunnerCustomHooks } from '../../services/cloud-runner-custom-hooks';
|
||||
import { FollowLogStreamService } from '../../services/follow-log-stream-service';
|
||||
import CloudRunnerOptions from '../../cloud-runner-options';
|
||||
|
||||
class AWSTaskRunner {
|
||||
public static ECS: AWS.ECS;
|
||||
public static Kinesis: AWS.Kinesis;
|
||||
static async runTask(
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
ECS: AWS.ECS,
|
||||
CF: AWS.CloudFormation,
|
||||
environment: CloudRunnerEnvironmentVariable[],
|
||||
buildGuid: string,
|
||||
commands: string,
|
||||
) {
|
||||
): Promise<{ output: string; shouldCleanup: boolean }> {
|
||||
const cluster = taskDef.baseResources?.find((x) => x.LogicalResourceId === 'ECSCluster')?.PhysicalResourceId || '';
|
||||
const taskDefinition =
|
||||
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'TaskDefinition')?.PhysicalResourceId || '';
|
||||
|
|
@ -30,7 +30,7 @@ class AWSTaskRunner {
|
|||
const streamName =
|
||||
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'KinesisStream')?.PhysicalResourceId || '';
|
||||
|
||||
const task = await ECS.runTask({
|
||||
const task = await AWSTaskRunner.ECS.runTask({
|
||||
cluster,
|
||||
taskDefinition,
|
||||
platformVersion: '1.4.0',
|
||||
|
|
@ -39,7 +39,7 @@ class AWSTaskRunner {
|
|||
{
|
||||
name: taskDef.taskDefStackName,
|
||||
environment,
|
||||
command: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(commands, CloudRunner.buildParameters)],
|
||||
command: ['-c', CloudRunnerCustomHooks.ApplyHooksToCommands(commands, CloudRunner.buildParameters)],
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
@ -54,19 +54,18 @@ class AWSTaskRunner {
|
|||
}).promise();
|
||||
const taskArn = task.tasks?.[0].taskArn || '';
|
||||
CloudRunnerLogger.log('Cloud runner job is starting');
|
||||
await AWSTaskRunner.waitUntilTaskRunning(ECS, taskArn, cluster);
|
||||
await AWSTaskRunner.waitUntilTaskRunning(taskArn, cluster);
|
||||
CloudRunnerLogger.log(
|
||||
`Cloud runner job status is running ${(await AWSTaskRunner.describeTasks(ECS, cluster, taskArn))?.lastStatus}`,
|
||||
`Cloud runner job status is running ${(await AWSTaskRunner.describeTasks(cluster, taskArn))?.lastStatus}`,
|
||||
);
|
||||
const { output, shouldCleanup } = await this.streamLogsUntilTaskStops(
|
||||
ECS,
|
||||
CF,
|
||||
taskDef,
|
||||
cluster,
|
||||
taskArn,
|
||||
streamName,
|
||||
);
|
||||
const taskData = await AWSTaskRunner.describeTasks(ECS, cluster, taskArn);
|
||||
if (!CloudRunnerOptions.watchCloudRunnerToEnd) {
|
||||
const shouldCleanup: boolean = false;
|
||||
const output: string = '';
|
||||
|
||||
return { output, shouldCleanup };
|
||||
}
|
||||
const { output, shouldCleanup } = await this.streamLogsUntilTaskStops(cluster, taskArn, streamName);
|
||||
const taskData = await AWSTaskRunner.describeTasks(cluster, taskArn);
|
||||
const exitCode = taskData.containers?.[0].exitCode;
|
||||
const wasSuccessful = exitCode === 0 || (exitCode === undefined && taskData.lastStatus === 'RUNNING');
|
||||
if (wasSuccessful) {
|
||||
|
|
@ -85,15 +84,15 @@ class AWSTaskRunner {
|
|||
}
|
||||
}
|
||||
|
||||
private static async waitUntilTaskRunning(ECS: AWS.ECS, taskArn: string, cluster: string) {
|
||||
private static async waitUntilTaskRunning(taskArn: string, cluster: string) {
|
||||
try {
|
||||
await ECS.waitFor('tasksRunning', { tasks: [taskArn], cluster }).promise();
|
||||
await AWSTaskRunner.ECS.waitFor('tasksRunning', { tasks: [taskArn], cluster }).promise();
|
||||
} catch (error_) {
|
||||
const error = error_ as Error;
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
CloudRunnerLogger.log(
|
||||
`Cloud runner job has ended ${
|
||||
(await AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].lastStatus
|
||||
(await AWSTaskRunner.describeTasks(cluster, taskArn)).containers?.[0].lastStatus
|
||||
}`,
|
||||
);
|
||||
|
||||
|
|
@ -102,8 +101,8 @@ class AWSTaskRunner {
|
|||
}
|
||||
}
|
||||
|
||||
static async describeTasks(ECS: AWS.ECS, clusterName: string, taskArn: string) {
|
||||
const tasks = await ECS.describeTasks({
|
||||
static async describeTasks(clusterName: string, taskArn: string) {
|
||||
const tasks = await AWSTaskRunner.ECS.describeTasks({
|
||||
cluster: clusterName,
|
||||
tasks: [taskArn],
|
||||
}).promise();
|
||||
|
|
@ -114,17 +113,9 @@ class AWSTaskRunner {
|
|||
}
|
||||
}
|
||||
|
||||
static async streamLogsUntilTaskStops(
|
||||
ECS: AWS.ECS,
|
||||
CF: AWS.CloudFormation,
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
clusterName: string,
|
||||
taskArn: string,
|
||||
kinesisStreamName: string,
|
||||
) {
|
||||
const kinesis = new AWS.Kinesis();
|
||||
const stream = await AWSTaskRunner.getLogStream(kinesis, kinesisStreamName);
|
||||
let iterator = await AWSTaskRunner.getLogIterator(kinesis, stream);
|
||||
static async streamLogsUntilTaskStops(clusterName: string, taskArn: string, kinesisStreamName: string) {
|
||||
const stream = await AWSTaskRunner.getLogStream(kinesisStreamName);
|
||||
let iterator = await AWSTaskRunner.getLogIterator(stream);
|
||||
|
||||
const logBaseUrl = `https://${Input.region}.console.aws.amazon.com/cloudwatch/home?region=${Input.region}#logsV2:log-groups/log-group/${CloudRunner.buildParameters.awsBaseStackName}-${CloudRunner.buildParameters.buildGuid}`;
|
||||
CloudRunnerLogger.log(`You view the log stream on AWS Cloud Watch: ${logBaseUrl}`);
|
||||
|
|
@ -134,13 +125,11 @@ class AWSTaskRunner {
|
|||
let output = '';
|
||||
while (shouldReadLogs) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
const taskData = await AWSTaskRunner.describeTasks(ECS, clusterName, taskArn);
|
||||
const taskData = await AWSTaskRunner.describeTasks(clusterName, taskArn);
|
||||
({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs));
|
||||
({ iterator, shouldReadLogs, output, shouldCleanup } = await AWSTaskRunner.handleLogStreamIteration(
|
||||
kinesis,
|
||||
iterator,
|
||||
shouldReadLogs,
|
||||
taskDef,
|
||||
output,
|
||||
shouldCleanup,
|
||||
));
|
||||
|
|
@ -150,23 +139,18 @@ class AWSTaskRunner {
|
|||
}
|
||||
|
||||
private static async handleLogStreamIteration(
|
||||
kinesis: AWS.Kinesis,
|
||||
iterator: string,
|
||||
shouldReadLogs: boolean,
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
output: string,
|
||||
shouldCleanup: boolean,
|
||||
) {
|
||||
const records = await kinesis
|
||||
.getRecords({
|
||||
const records = await AWSTaskRunner.Kinesis.getRecords({
|
||||
ShardIterator: iterator,
|
||||
})
|
||||
.promise();
|
||||
}).promise();
|
||||
iterator = records.NextShardIterator || '';
|
||||
({ shouldReadLogs, output, shouldCleanup } = AWSTaskRunner.logRecords(
|
||||
records,
|
||||
iterator,
|
||||
taskDef,
|
||||
shouldReadLogs,
|
||||
output,
|
||||
shouldCleanup,
|
||||
|
|
@ -197,7 +181,6 @@ class AWSTaskRunner {
|
|||
private static logRecords(
|
||||
records,
|
||||
iterator: string,
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
shouldReadLogs: boolean,
|
||||
output: string,
|
||||
shouldCleanup: boolean,
|
||||
|
|
@ -224,24 +207,20 @@ class AWSTaskRunner {
|
|||
return { shouldReadLogs, output, shouldCleanup };
|
||||
}
|
||||
|
||||
private static async getLogStream(kinesis: AWS.Kinesis, kinesisStreamName: string) {
|
||||
return await kinesis
|
||||
.describeStream({
|
||||
private static async getLogStream(kinesisStreamName: string) {
|
||||
return await AWSTaskRunner.Kinesis.describeStream({
|
||||
StreamName: kinesisStreamName,
|
||||
})
|
||||
.promise();
|
||||
}).promise();
|
||||
}
|
||||
|
||||
private static async getLogIterator(kinesis: AWS.Kinesis, stream) {
|
||||
private static async getLogIterator(stream) {
|
||||
return (
|
||||
(
|
||||
await kinesis
|
||||
.getShardIterator({
|
||||
await AWSTaskRunner.Kinesis.getShardIterator({
|
||||
ShardIteratorType: 'TRIM_HORIZON',
|
||||
StreamName: stream.StreamDescription.StreamName,
|
||||
ShardId: stream.StreamDescription.Shards[0].ShardId,
|
||||
})
|
||||
.promise()
|
||||
}).promise()
|
||||
).ShardIterator || ''
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,6 +11,10 @@ Parameters:
|
|||
Type: String
|
||||
Default: example
|
||||
Description: A name for the service
|
||||
LogGroupName:
|
||||
Type: String
|
||||
Default: example
|
||||
Description: A name for the log groups in cloud watch
|
||||
ImageUrl:
|
||||
Type: String
|
||||
Default: nginx
|
||||
|
|
@ -68,7 +72,7 @@ Resources:
|
|||
LogGroup:
|
||||
Type: 'AWS::Logs::LogGroup'
|
||||
Properties:
|
||||
LogGroupName: !Ref ServiceName
|
||||
LogGroupName: !Ref LogGroupName
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
|
|
@ -78,7 +82,7 @@ Resources:
|
|||
FilterPattern: ''
|
||||
RoleArn:
|
||||
'Fn::ImportValue': !Sub '${'${EnvironmentName}'}:CloudWatchIAMRole'
|
||||
LogGroupName: !Ref ServiceName
|
||||
LogGroupName: !Ref LogGroupName
|
||||
DestinationArn:
|
||||
'Fn::GetAtt':
|
||||
- KinesisStream
|
||||
|
|
@ -147,7 +151,7 @@ Resources:
|
|||
LogConfiguration:
|
||||
LogDriver: awslogs
|
||||
Options:
|
||||
awslogs-group: !Ref ServiceName
|
||||
awslogs-group: !Ref LogGroupName
|
||||
awslogs-region: !Ref 'AWS::Region'
|
||||
awslogs-stream-prefix: !Ref ServiceName
|
||||
DependsOn:
|
||||
|
|
|
|||
|
|
@ -1,181 +1,62 @@
|
|||
import AWS from 'aws-sdk';
|
||||
import { CliFunction } from '../../../../cli/cli-functions-repository';
|
||||
import Input from '../../../../input';
|
||||
import CloudRunnerLogger from '../../../services/cloud-runner-logger';
|
||||
import { BaseStackFormation } from '../cloud-formations/base-stack-formation';
|
||||
|
||||
import { TaskService } from '../services/task-service';
|
||||
import { GarbageCollectionService } from '../services/garbage-collection-service';
|
||||
import { TertiaryResourcesService } from '../services/tertiary-resources-service';
|
||||
export class AwsCliCommands {
|
||||
@CliFunction(`aws-list-all`, `List all resources`)
|
||||
static async awsListAll() {
|
||||
await AwsCliCommands.awsListStacks(undefined, true);
|
||||
public static async awsListAll() {
|
||||
await AwsCliCommands.awsListStacks();
|
||||
await AwsCliCommands.awsListTasks();
|
||||
await AwsCliCommands.awsListLogGroups(undefined, true);
|
||||
await AwsCliCommands.awsListLogGroups();
|
||||
}
|
||||
@CliFunction(`aws-garbage-collect-list`, `garbage collect aws resources not in use !WIP!`)
|
||||
static async garbageCollectAws() {
|
||||
await AwsCliCommands.cleanup(false);
|
||||
await GarbageCollectionService.cleanup(false);
|
||||
}
|
||||
@CliFunction(`aws-garbage-collect-all`, `garbage collect aws resources regardless of whether they are in use`)
|
||||
static async garbageCollectAwsAll() {
|
||||
await AwsCliCommands.cleanup(true);
|
||||
await GarbageCollectionService.cleanup(true);
|
||||
}
|
||||
@CliFunction(
|
||||
`aws-garbage-collect-all-1d-older`,
|
||||
`garbage collect aws resources created more than 1d ago (ignore if they are in use)`,
|
||||
)
|
||||
static async garbageCollectAwsAllOlderThanOneDay() {
|
||||
await AwsCliCommands.cleanup(true, true);
|
||||
await GarbageCollectionService.cleanup(true, true);
|
||||
}
|
||||
static isOlderThan1day(date: any) {
|
||||
const ageDate = new Date(date.getTime() - Date.now());
|
||||
|
||||
return ageDate.getDay() > 0;
|
||||
}
|
||||
@CliFunction(`aws-list-stacks`, `List stacks`)
|
||||
static async awsListStacks(perResultCallback: any = false, verbose: boolean = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = new AWS.CloudFormation();
|
||||
const stacks =
|
||||
(await CF.listStacks().promise()).StackSummaries?.filter(
|
||||
(_x) => _x.StackStatus !== 'DELETE_COMPLETE', // &&
|
||||
// _x.TemplateDescription === TaskDefinitionFormation.description.replace('\n', ''),
|
||||
) || [];
|
||||
CloudRunnerLogger.log(`Stacks ${stacks.length}`);
|
||||
for (const element of stacks) {
|
||||
const ageDate = new Date(element.CreationTime.getTime() - Date.now());
|
||||
if (verbose)
|
||||
CloudRunnerLogger.log(
|
||||
`Task Stack ${element.StackName} - Age D${ageDate.getDay()} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
||||
);
|
||||
if (perResultCallback) await perResultCallback(element);
|
||||
}
|
||||
const baseStacks =
|
||||
(await CF.listStacks().promise()).StackSummaries?.filter(
|
||||
(_x) =>
|
||||
_x.StackStatus !== 'DELETE_COMPLETE' && _x.TemplateDescription === BaseStackFormation.baseStackDecription,
|
||||
) || [];
|
||||
CloudRunnerLogger.log(`Base Stacks ${baseStacks.length}`);
|
||||
for (const element of baseStacks) {
|
||||
const ageDate = new Date(element.CreationTime.getTime() - Date.now());
|
||||
if (verbose)
|
||||
CloudRunnerLogger.log(
|
||||
`Base Stack ${
|
||||
element.StackName
|
||||
} - Age D${ageDate.getHours()} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
||||
);
|
||||
if (perResultCallback) await perResultCallback(element);
|
||||
}
|
||||
if (stacks === undefined) {
|
||||
return;
|
||||
}
|
||||
static async awsListStacks(perResultCallback: any = false) {
|
||||
return TaskService.awsListStacks(perResultCallback);
|
||||
}
|
||||
@CliFunction(`aws-list-tasks`, `List tasks`)
|
||||
static async awsListTasks(perResultCallback: any = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const ecs = new AWS.ECS();
|
||||
const clusters = (await ecs.listClusters().promise()).clusterArns || [];
|
||||
CloudRunnerLogger.log(`Clusters ${clusters.length}`);
|
||||
for (const element of clusters) {
|
||||
const input: AWS.ECS.ListTasksRequest = {
|
||||
cluster: element,
|
||||
};
|
||||
return TaskService.awsListJobs(perResultCallback);
|
||||
}
|
||||
|
||||
const list = (await ecs.listTasks(input).promise()).taskArns || [];
|
||||
if (list.length > 0) {
|
||||
const describeInput: AWS.ECS.DescribeTasksRequest = { tasks: list, cluster: element };
|
||||
const describeList = (await ecs.describeTasks(describeInput).promise()).tasks || [];
|
||||
if (describeList === []) {
|
||||
continue;
|
||||
}
|
||||
CloudRunnerLogger.log(`Tasks ${describeList.length}`);
|
||||
for (const taskElement of describeList) {
|
||||
if (taskElement === undefined) {
|
||||
continue;
|
||||
}
|
||||
taskElement.overrides = {};
|
||||
taskElement.attachments = [];
|
||||
if (taskElement.createdAt === undefined) {
|
||||
CloudRunnerLogger.log(`Skipping ${taskElement.taskDefinitionArn} no createdAt date`);
|
||||
continue;
|
||||
}
|
||||
if (perResultCallback) await perResultCallback(taskElement, element);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@CliFunction(`aws-list-log-groups`, `List tasks`)
|
||||
static async awsListLogGroups(perResultCallback: any = false, verbose: boolean = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const ecs = new AWS.CloudWatchLogs();
|
||||
let logStreamInput: AWS.CloudWatchLogs.DescribeLogGroupsRequest = {
|
||||
/* logGroupNamePrefix: 'game-ci' */
|
||||
};
|
||||
let logGroupsDescribe = await ecs.describeLogGroups(logStreamInput).promise();
|
||||
const logGroups = logGroupsDescribe.logGroups || [];
|
||||
while (logGroupsDescribe.nextToken) {
|
||||
logStreamInput = { /* logGroupNamePrefix: 'game-ci',*/ nextToken: logGroupsDescribe.nextToken };
|
||||
logGroupsDescribe = await ecs.describeLogGroups(logStreamInput).promise();
|
||||
logGroups.push(...(logGroupsDescribe?.logGroups || []));
|
||||
static async awsListLogGroups(perResultCallback: any = false) {
|
||||
await TertiaryResourcesService.AwsListLogGroups(perResultCallback);
|
||||
}
|
||||
|
||||
CloudRunnerLogger.log(`Log Groups ${logGroups.length}`);
|
||||
for (const element of logGroups) {
|
||||
if (element.creationTime === undefined) {
|
||||
CloudRunnerLogger.log(`Skipping ${element.logGroupName} no createdAt date`);
|
||||
continue;
|
||||
}
|
||||
const ageDate = new Date(new Date(element.creationTime).getTime() - Date.now());
|
||||
if (verbose)
|
||||
CloudRunnerLogger.log(
|
||||
`Log Group Name ${
|
||||
element.logGroupName
|
||||
} - Age D${ageDate.getDay()} H${ageDate.getHours()} M${ageDate.getMinutes()} - 1d old ${AwsCliCommands.isOlderThan1day(
|
||||
new Date(element.creationTime),
|
||||
)}`,
|
||||
);
|
||||
if (perResultCallback) await perResultCallback(element, element);
|
||||
}
|
||||
@CliFunction(`aws-list-jobs`, `List tasks`)
|
||||
public static async awsListJobs(perResultCallback: any = false) {
|
||||
return TaskService.awsListJobs(perResultCallback);
|
||||
}
|
||||
|
||||
private static async cleanup(deleteResources = false, OneDayOlderOnly: boolean = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = new AWS.CloudFormation();
|
||||
const ecs = new AWS.ECS();
|
||||
const cwl = new AWS.CloudWatchLogs();
|
||||
const taskDefinitionsInUse = new Array();
|
||||
await AwsCliCommands.awsListTasks(async (taskElement, element) => {
|
||||
taskDefinitionsInUse.push(taskElement.taskDefinitionArn);
|
||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(taskElement.CreatedAt))) {
|
||||
CloudRunnerLogger.log(`Stopping task ${taskElement.containers?.[0].name}`);
|
||||
await ecs.stopTask({ task: taskElement.taskArn || '', cluster: element }).promise();
|
||||
@CliFunction(`list-tasks`, `List tasks`)
|
||||
static async listTasks(perResultCallback: any = false) {
|
||||
return TaskService.awsListJobs(perResultCallback);
|
||||
}
|
||||
});
|
||||
await AwsCliCommands.awsListStacks(async (element) => {
|
||||
if (
|
||||
(await CF.describeStackResources({ StackName: element.StackName }).promise()).StackResources?.some(
|
||||
(x) => x.ResourceType === 'AWS::ECS::TaskDefinition' && taskDefinitionsInUse.includes(x.PhysicalResourceId),
|
||||
)
|
||||
) {
|
||||
CloudRunnerLogger.log(`Skipping ${element.StackName} - active task was running not deleting`);
|
||||
|
||||
return;
|
||||
@CliFunction(`watch`, `List tasks`)
|
||||
static async watchTasks() {
|
||||
return TaskService.watch();
|
||||
}
|
||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(element.CreationTime))) {
|
||||
if (element.StackName === 'game-ci' || element.TemplateDescription === 'Game-CI base stack') {
|
||||
CloudRunnerLogger.log(`Skipping ${element.StackName} ignore list`);
|
||||
|
||||
return;
|
||||
}
|
||||
CloudRunnerLogger.log(`Deleting ${element.logGroupName}`);
|
||||
const deleteStackInput: AWS.CloudFormation.DeleteStackInput = { StackName: element.StackName };
|
||||
await CF.deleteStack(deleteStackInput).promise();
|
||||
}
|
||||
});
|
||||
await AwsCliCommands.awsListLogGroups(async (element) => {
|
||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(new Date(element.createdAt)))) {
|
||||
CloudRunnerLogger.log(`Deleting ${element.logGroupName}`);
|
||||
await cwl.deleteLogGroup({ logGroupName: element.logGroupName || '' }).promise();
|
||||
}
|
||||
});
|
||||
@CliFunction(`describe-resource`, `desribe tasks`)
|
||||
static async describe(options) {
|
||||
// return CloudRunner.Provider.inspect();
|
||||
return await TaskService.awsDescribeJob(options.select);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,9 @@ import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
|||
import { AWSJobStack } from './aws-job-stack';
|
||||
import { AWSBaseStack } from './aws-base-stack';
|
||||
import { Input } from '../../..';
|
||||
import { AwsCliCommands } from './commands/aws-cli-commands';
|
||||
import { TertiaryResourcesService } from './services/tertiary-resources-service';
|
||||
import { TaskService } from './services/task-service';
|
||||
|
||||
class AWSBuildEnvironment implements ProviderInterface {
|
||||
private baseStackName: string;
|
||||
|
|
@ -16,6 +19,34 @@ class AWSBuildEnvironment implements ProviderInterface {
|
|||
constructor(buildParameters: BuildParameters) {
|
||||
this.baseStackName = buildParameters.awsBaseStackName;
|
||||
}
|
||||
async inspect(): Promise<string> {
|
||||
return await TaskService.awsDescribeJob('');
|
||||
}
|
||||
watch(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
async listOtherResources(): Promise<string> {
|
||||
await TertiaryResourcesService.AwsListLogGroups();
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
garbageCollect(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
filter: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
previewOnly: boolean,
|
||||
): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
async listResources() {
|
||||
await AwsCliCommands.awsListAll();
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
async cleanup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
|
|
@ -49,6 +80,8 @@ class AWSBuildEnvironment implements ProviderInterface {
|
|||
process.env.AWS_REGION = Input.region;
|
||||
const ECS = new SDK.ECS();
|
||||
const CF = new SDK.CloudFormation();
|
||||
AWSTaskRunner.ECS = ECS;
|
||||
AWSTaskRunner.Kinesis = new SDK.Kinesis();
|
||||
CloudRunnerLogger.log(`AWS Region: ${CF.config.region}`);
|
||||
const entrypoint = ['/bin/sh'];
|
||||
const startTimeMs = Date.now();
|
||||
|
|
@ -69,7 +102,7 @@ class AWSBuildEnvironment implements ProviderInterface {
|
|||
try {
|
||||
const postSetupStacksTimeMs = Date.now();
|
||||
CloudRunnerLogger.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`);
|
||||
const { output, shouldCleanup } = await AWSTaskRunner.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
|
||||
const { output, shouldCleanup } = await AWSTaskRunner.runTask(taskDef, environment, commands);
|
||||
postRunTaskTimeMs = Date.now();
|
||||
CloudRunnerLogger.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`);
|
||||
if (shouldCleanup) {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,73 @@
|
|||
import AWS from 'aws-sdk';
|
||||
import { CliFunction } from '../../../../cli/cli-functions-repository';
|
||||
import Input from '../../../../input';
|
||||
import CloudRunnerLogger from '../../../services/cloud-runner-logger';
|
||||
import { AwsCliCommands } from '../commands/aws-cli-commands';
|
||||
|
||||
export class GarbageCollectionService {
|
||||
@CliFunction(`aws-garbage-collect-list`, `garbage collect aws resources not in use !WIP!`)
|
||||
static async garbageCollectAws() {
|
||||
await GarbageCollectionService.cleanup(false);
|
||||
}
|
||||
@CliFunction(`aws-garbage-collect-all`, `garbage collect aws resources regardless of whether they are in use`)
|
||||
static async garbageCollectAwsAll() {
|
||||
await GarbageCollectionService.cleanup(true);
|
||||
}
|
||||
@CliFunction(
|
||||
`aws-garbage-collect-all-1d-older`,
|
||||
`garbage collect aws resources created more than 1d ago (ignore if they are in use)`,
|
||||
)
|
||||
static async garbageCollectAwsAllOlderThanOneDay() {
|
||||
await GarbageCollectionService.cleanup(true, true);
|
||||
}
|
||||
static isOlderThan1day(date: any) {
|
||||
const ageDate = new Date(date.getTime() - Date.now());
|
||||
|
||||
return ageDate.getDay() > 0;
|
||||
}
|
||||
|
||||
public static async cleanup(deleteResources = false, OneDayOlderOnly: boolean = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = new AWS.CloudFormation();
|
||||
const ecs = new AWS.ECS();
|
||||
const cwl = new AWS.CloudWatchLogs();
|
||||
const taskDefinitionsInUse = new Array();
|
||||
await AwsCliCommands.awsListTasks(async (taskElement, element) => {
|
||||
taskDefinitionsInUse.push(taskElement.taskDefinitionArn);
|
||||
if (deleteResources && (!OneDayOlderOnly || GarbageCollectionService.isOlderThan1day(taskElement.CreatedAt))) {
|
||||
CloudRunnerLogger.log(`Stopping task ${taskElement.containers?.[0].name}`);
|
||||
await ecs.stopTask({ task: taskElement.taskArn || '', cluster: element }).promise();
|
||||
}
|
||||
});
|
||||
await AwsCliCommands.awsListStacks(async (element) => {
|
||||
if (
|
||||
(await CF.describeStackResources({ StackName: element.StackName }).promise()).StackResources?.some(
|
||||
(x) => x.ResourceType === 'AWS::ECS::TaskDefinition' && taskDefinitionsInUse.includes(x.PhysicalResourceId),
|
||||
)
|
||||
) {
|
||||
CloudRunnerLogger.log(`Skipping ${element.StackName} - active task was running not deleting`);
|
||||
|
||||
return;
|
||||
}
|
||||
if (deleteResources && (!OneDayOlderOnly || GarbageCollectionService.isOlderThan1day(element.CreationTime))) {
|
||||
if (element.StackName === 'game-ci' || element.TemplateDescription === 'Game-CI base stack') {
|
||||
CloudRunnerLogger.log(`Skipping ${element.StackName} ignore list`);
|
||||
|
||||
return;
|
||||
}
|
||||
CloudRunnerLogger.log(`Deleting ${element.logGroupName}`);
|
||||
const deleteStackInput: AWS.CloudFormation.DeleteStackInput = { StackName: element.StackName };
|
||||
await CF.deleteStack(deleteStackInput).promise();
|
||||
}
|
||||
});
|
||||
await AwsCliCommands.awsListLogGroups(async (element) => {
|
||||
if (
|
||||
deleteResources &&
|
||||
(!OneDayOlderOnly || GarbageCollectionService.isOlderThan1day(new Date(element.createdAt)))
|
||||
) {
|
||||
CloudRunnerLogger.log(`Deleting ${element.logGroupName}`);
|
||||
await cwl.deleteLogGroup({ logGroupName: element.logGroupName || '' }).promise();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,126 @@
|
|||
import AWS from 'aws-sdk';
|
||||
import Input from '../../../../input';
|
||||
import CloudRunnerLogger from '../../../services/cloud-runner-logger';
|
||||
import { BaseStackFormation } from '../cloud-formations/base-stack-formation';
|
||||
|
||||
export class TaskService {
|
||||
static watch() {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
public static async awsListStacks(perResultCallback: any = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = new AWS.CloudFormation();
|
||||
const stacks =
|
||||
(await CF.listStacks().promise()).StackSummaries?.filter(
|
||||
(_x) => _x.StackStatus !== 'DELETE_COMPLETE', // &&
|
||||
// _x.TemplateDescription === TaskDefinitionFormation.description.replace('\n', ''),
|
||||
) || [];
|
||||
CloudRunnerLogger.log(`Stacks ${stacks.length}`);
|
||||
for (const element of stacks) {
|
||||
const ageDate: Date = new Date(Date.now() - element.CreationTime.getTime());
|
||||
|
||||
// if (verbose)
|
||||
CloudRunnerLogger.log(
|
||||
`Task Stack ${element.StackName} - Age D${Math.floor(
|
||||
ageDate.getHours() / 24,
|
||||
)} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
||||
);
|
||||
if (perResultCallback) await perResultCallback(element);
|
||||
}
|
||||
const baseStacks =
|
||||
(await CF.listStacks().promise()).StackSummaries?.filter(
|
||||
(_x) =>
|
||||
_x.StackStatus !== 'DELETE_COMPLETE' && _x.TemplateDescription === BaseStackFormation.baseStackDecription,
|
||||
) || [];
|
||||
CloudRunnerLogger.log(`Base Stacks ${baseStacks.length}`);
|
||||
for (const element of baseStacks) {
|
||||
const ageDate: Date = new Date(Date.now() - element.CreationTime.getTime());
|
||||
|
||||
// if (verbose)
|
||||
CloudRunnerLogger.log(
|
||||
`Task Stack ${element.StackName} - Age D${Math.floor(
|
||||
ageDate.getHours() / 24,
|
||||
)} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
||||
);
|
||||
if (perResultCallback) await perResultCallback(element);
|
||||
}
|
||||
if (stacks === undefined) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
public static async awsListTasks(perResultCallback: any = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const ecs = new AWS.ECS();
|
||||
const clusters = (await ecs.listClusters().promise()).clusterArns || [];
|
||||
CloudRunnerLogger.log(`Clusters ${clusters.length}`);
|
||||
for (const element of clusters) {
|
||||
const input: AWS.ECS.ListTasksRequest = {
|
||||
cluster: element,
|
||||
};
|
||||
|
||||
const list = (await ecs.listTasks(input).promise()).taskArns || [];
|
||||
if (list.length > 0) {
|
||||
const describeInput: AWS.ECS.DescribeTasksRequest = { tasks: list, cluster: element };
|
||||
const describeList = (await ecs.describeTasks(describeInput).promise()).tasks || [];
|
||||
if (describeList.length === 0) {
|
||||
continue;
|
||||
}
|
||||
CloudRunnerLogger.log(`Tasks ${describeList.length}`);
|
||||
for (const taskElement of describeList) {
|
||||
if (taskElement === undefined) {
|
||||
continue;
|
||||
}
|
||||
taskElement.overrides = {};
|
||||
taskElement.attachments = [];
|
||||
if (taskElement.createdAt === undefined) {
|
||||
CloudRunnerLogger.log(`Skipping ${taskElement.taskDefinitionArn} no createdAt date`);
|
||||
continue;
|
||||
}
|
||||
if (perResultCallback) await perResultCallback(taskElement, element);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
public static async awsListJobs(perResultCallback: any = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = new AWS.CloudFormation();
|
||||
const stacks =
|
||||
(await CF.listStacks().promise()).StackSummaries?.filter(
|
||||
(_x) =>
|
||||
_x.StackStatus !== 'DELETE_COMPLETE' && _x.TemplateDescription !== BaseStackFormation.baseStackDecription,
|
||||
) || [];
|
||||
CloudRunnerLogger.log(`Stacks ${stacks.length}`);
|
||||
for (const element of stacks) {
|
||||
const ageDate: Date = new Date(Date.now() - element.CreationTime.getTime());
|
||||
|
||||
// if (verbose)
|
||||
CloudRunnerLogger.log(
|
||||
`Task Stack ${element.StackName} - Age D${Math.floor(
|
||||
ageDate.getHours() / 24,
|
||||
)} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
||||
);
|
||||
if (perResultCallback) await perResultCallback(element);
|
||||
}
|
||||
}
|
||||
public static async awsDescribeJob(job: string) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = new AWS.CloudFormation();
|
||||
const stack = (await CF.listStacks().promise()).StackSummaries?.find((_x) => _x.StackName === job) || undefined;
|
||||
const stackInfo = (await CF.describeStackResources({ StackName: job }).promise()) || undefined;
|
||||
const stackInfo2 = (await CF.describeStacks({ StackName: job }).promise()) || undefined;
|
||||
if (stack === undefined) {
|
||||
throw new Error('stack not defined');
|
||||
}
|
||||
const ageDate: Date = new Date(Date.now() - stack.CreationTime.getTime());
|
||||
const message = `
|
||||
Task Stack ${stack.StackName}
|
||||
Age D${Math.floor(ageDate.getHours() / 24)} H${ageDate.getHours()} M${ageDate.getMinutes()}
|
||||
${JSON.stringify(stack, undefined, 4)}
|
||||
${JSON.stringify(stackInfo, undefined, 4)}
|
||||
${JSON.stringify(stackInfo2, undefined, 4)}
|
||||
`;
|
||||
CloudRunnerLogger.log(message);
|
||||
|
||||
return message;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
import AWS from 'aws-sdk';
|
||||
import Input from '../../../../input';
|
||||
import CloudRunnerLogger from '../../../services/cloud-runner-logger';
|
||||
|
||||
export class TertiaryResourcesService {
|
||||
public static async AwsListLogGroups(perResultCallback: any = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const ecs = new AWS.CloudWatchLogs();
|
||||
let logStreamInput: AWS.CloudWatchLogs.DescribeLogGroupsRequest = {
|
||||
/* logGroupNamePrefix: 'game-ci' */
|
||||
};
|
||||
let logGroupsDescribe = await ecs.describeLogGroups(logStreamInput).promise();
|
||||
const logGroups = logGroupsDescribe.logGroups || [];
|
||||
while (logGroupsDescribe.nextToken) {
|
||||
logStreamInput = { /* logGroupNamePrefix: 'game-ci',*/ nextToken: logGroupsDescribe.nextToken };
|
||||
logGroupsDescribe = await ecs.describeLogGroups(logStreamInput).promise();
|
||||
logGroups.push(...(logGroupsDescribe?.logGroups || []));
|
||||
}
|
||||
|
||||
CloudRunnerLogger.log(`Log Groups ${logGroups.length}`);
|
||||
for (const element of logGroups) {
|
||||
if (element.creationTime === undefined) {
|
||||
CloudRunnerLogger.log(`Skipping ${element.logGroupName} no createdAt date`);
|
||||
continue;
|
||||
}
|
||||
const ageDate: Date = new Date(Date.now() - element.creationTime);
|
||||
|
||||
// if (verbose)
|
||||
CloudRunnerLogger.log(
|
||||
`Task Stack ${element.logGroupName} - Age D${Math.floor(
|
||||
ageDate.getHours() / 24,
|
||||
)} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
||||
);
|
||||
if (perResultCallback) await perResultCallback(element, element);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -12,7 +12,6 @@ import KubernetesJobSpecFactory from './kubernetes-job-spec-factory';
|
|||
import KubernetesServiceAccount from './kubernetes-service-account';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import { CoreV1Api } from '@kubernetes/client-node';
|
||||
import DependencyOverrideService from '../../services/depdency-override-service';
|
||||
|
||||
class Kubernetes implements ProviderInterface {
|
||||
private kubeConfig: k8s.KubeConfig;
|
||||
|
|
@ -39,6 +38,23 @@ class Kubernetes implements ProviderInterface {
|
|||
this.namespace = 'default';
|
||||
this.buildParameters = buildParameters;
|
||||
}
|
||||
inspect(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
watch(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
listResources(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
garbageCollect(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
filter: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
previewOnly: boolean,
|
||||
): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
public async setup(
|
||||
buildGuid: string,
|
||||
buildParameters: BuildParameters,
|
||||
|
|
@ -51,9 +67,6 @@ class Kubernetes implements ProviderInterface {
|
|||
this.pvcName = `unity-builder-pvc-${buildGuid}`;
|
||||
this.cleanupCronJobName = `unity-builder-cronjob-${buildGuid}`;
|
||||
this.serviceAccountName = `service-account-${buildGuid}`;
|
||||
if (await DependencyOverrideService.CheckHealth()) {
|
||||
await DependencyOverrideService.TryStartDependencies();
|
||||
}
|
||||
await KubernetesStorage.createPersistentVolumeClaim(
|
||||
buildParameters,
|
||||
this.pvcName,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { V1EnvVar, V1EnvVarSource, V1SecretKeySelector } from '@kubernetes/client-node';
|
||||
import BuildParameters from '../../../build-parameters';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../../services/cloud-runner-build-command-process';
|
||||
import { CloudRunnerCustomHooks } from '../../services/cloud-runner-custom-hooks';
|
||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
import CloudRunner from '../../cloud-runner';
|
||||
|
|
@ -103,7 +103,7 @@ class KubernetesJobSpecFactory {
|
|||
name: 'main',
|
||||
image,
|
||||
command: ['/bin/sh'],
|
||||
args: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(command, CloudRunner.buildParameters)],
|
||||
args: ['-c', CloudRunnerCustomHooks.ApplyHooksToCommands(command, CloudRunner.buildParameters)],
|
||||
|
||||
workingDir: `${workingDirectory}`,
|
||||
resources: {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,23 @@ import { ProviderInterface } from '../provider-interface';
|
|||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
|
||||
class LocalDockerCloudRunner implements ProviderInterface {
|
||||
inspect(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
watch(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
listResources(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
garbageCollect(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
filter: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
previewOnly: boolean,
|
||||
): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
cleanup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,23 @@ import { ProviderInterface } from '../provider-interface';
|
|||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
|
||||
class LocalCloudRunner implements ProviderInterface {
|
||||
inspect(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
watch(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
listResources(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
garbageCollect(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
filter: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
previewOnly: boolean,
|
||||
): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
cleanup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
|
|
|
|||
|
|
@ -39,4 +39,13 @@ export interface ProviderInterface {
|
|||
// eslint-disable-next-line no-unused-vars
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<string>;
|
||||
listResources(): Promise<string>;
|
||||
garbageCollect(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
filter: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
previewOnly: boolean,
|
||||
): Promise<string>;
|
||||
inspect(): Promise<string>;
|
||||
watch(): Promise<string>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,6 +5,23 @@ import { ProviderInterface } from '../provider-interface';
|
|||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
|
||||
class TestCloudRunner implements ProviderInterface {
|
||||
inspect(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
watch(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
listResources(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
garbageCollect(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
filter: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
previewOnly: boolean,
|
||||
): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
cleanup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
|
|
|
|||
|
|
@ -2,12 +2,12 @@ import fs from 'fs';
|
|||
import path from 'path';
|
||||
import BuildParameters from '../../build-parameters';
|
||||
import { Cli } from '../../cli/cli';
|
||||
import Input from '../../input';
|
||||
import UnityVersioning from '../../unity-versioning';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
import { CloudRunnerSystem } from '../services/cloud-runner-system';
|
||||
import { Caching } from './caching';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import GitHub from '../../github';
|
||||
|
||||
describe('Cloud Runner Caching', () => {
|
||||
it('responds', () => {});
|
||||
|
|
@ -22,7 +22,7 @@ describe('Cloud Runner Caching', () => {
|
|||
targetPlatform: 'StandaloneLinux64',
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
GitHub.githubInputEnabled = false;
|
||||
const buildParameter = await BuildParameters.create();
|
||||
CloudRunner.buildParameters = buildParameter;
|
||||
|
||||
|
|
@ -56,7 +56,7 @@ describe('Cloud Runner Caching', () => {
|
|||
fs.rmdirSync(testFolder, { recursive: true });
|
||||
fs.rmdirSync(cacheFolder, { recursive: true });
|
||||
|
||||
Input.githubInputEnabled = true;
|
||||
GitHub.githubInputEnabled = true;
|
||||
delete Cli.options;
|
||||
}, 1000000);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -59,27 +59,18 @@ export class Caching {
|
|||
)}`,
|
||||
);
|
||||
}
|
||||
// eslint-disable-next-line func-style
|
||||
const formatFunction = function (format: string) {
|
||||
const arguments_ = Array.prototype.slice.call(
|
||||
[path.resolve(sourceFolder, '..'), cacheFolder, cacheArtifactName],
|
||||
1,
|
||||
await CloudRunnerSystem.Run(`tar -cf ${cacheArtifactName}.tar.lz4 ${path.basename(sourceFolder)}`);
|
||||
await CloudRunnerSystem.Run(`du ${cacheArtifactName}.tar.lz4`);
|
||||
const contents = await fs.promises.readdir(path.basename(sourceFolder));
|
||||
CloudRunnerLogger.log(
|
||||
`There is ${contents.length} files/dir in the source folder ${path.basename(sourceFolder)}`,
|
||||
);
|
||||
|
||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
||||
});
|
||||
};
|
||||
await CloudRunnerSystem.Run(`tar -cf ${cacheArtifactName}.tar ${path.basename(sourceFolder)}`);
|
||||
assert(await fileExists(`${cacheArtifactName}.tar`), 'cache archive exists');
|
||||
assert(await fileExists(`${cacheArtifactName}.tar.lz4`), 'cache archive exists');
|
||||
assert(await fileExists(path.basename(sourceFolder)), 'source folder exists');
|
||||
if (CloudRunner.buildParameters.cachePushOverrideCommand) {
|
||||
await CloudRunnerSystem.Run(formatFunction(CloudRunner.buildParameters.cachePushOverrideCommand));
|
||||
}
|
||||
await CloudRunnerSystem.Run(`mv ${cacheArtifactName}.tar ${cacheFolder}`);
|
||||
await CloudRunnerSystem.Run(`mv ${cacheArtifactName}.tar.lz4 ${cacheFolder}`);
|
||||
RemoteClientLogger.log(`moved cache entry ${cacheArtifactName} to ${cacheFolder}`);
|
||||
assert(
|
||||
await fileExists(`${path.join(cacheFolder, cacheArtifactName)}.tar`),
|
||||
await fileExists(`${path.join(cacheFolder, cacheArtifactName)}.tar.lz4`),
|
||||
'cache archive exists inside cache folder',
|
||||
);
|
||||
} catch (error) {
|
||||
|
|
@ -101,38 +92,24 @@ export class Caching {
|
|||
await fs.promises.mkdir(destinationFolder);
|
||||
}
|
||||
|
||||
const latestInBranch = await (await CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .tar$ | head -1`))
|
||||
const latestInBranch = await (await CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .tar.lz4$ | head -1`))
|
||||
.replace(/\n/g, ``)
|
||||
.replace('.tar', '');
|
||||
.replace('.tar.lz4', '');
|
||||
|
||||
process.chdir(cacheFolder);
|
||||
|
||||
const cacheSelection =
|
||||
cacheArtifactName !== `` && (await fileExists(`${cacheArtifactName}.tar`)) ? cacheArtifactName : latestInBranch;
|
||||
cacheArtifactName !== `` && (await fileExists(`${cacheArtifactName}.tar.lz4`))
|
||||
? cacheArtifactName
|
||||
: latestInBranch;
|
||||
await CloudRunnerLogger.log(`cache key ${cacheArtifactName} selection ${cacheSelection}`);
|
||||
|
||||
// eslint-disable-next-line func-style
|
||||
const formatFunction = function (format: string) {
|
||||
const arguments_ = Array.prototype.slice.call(
|
||||
[path.resolve(destinationFolder, '..'), cacheFolder, cacheArtifactName],
|
||||
1,
|
||||
);
|
||||
|
||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
||||
});
|
||||
};
|
||||
|
||||
if (CloudRunner.buildParameters.cachePullOverrideCommand) {
|
||||
await CloudRunnerSystem.Run(formatFunction(CloudRunner.buildParameters.cachePullOverrideCommand));
|
||||
}
|
||||
|
||||
if (await fileExists(`${cacheSelection}.tar`)) {
|
||||
if (await fileExists(`${cacheSelection}.tar.lz4`)) {
|
||||
const resultsFolder = `results${CloudRunner.buildParameters.buildGuid}`;
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
||||
RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.tar`);
|
||||
RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.tar.lz4`);
|
||||
const fullResultsFolder = path.join(cacheFolder, resultsFolder);
|
||||
await CloudRunnerSystem.Run(`tar -xf ${cacheSelection}.tar -C ${fullResultsFolder}`);
|
||||
await CloudRunnerSystem.Run(`tar -xf ${cacheSelection}.tar.lz4 -C ${fullResultsFolder}`);
|
||||
RemoteClientLogger.log(`cache item extracted to ${fullResultsFolder}`);
|
||||
assert(await fileExists(fullResultsFolder), `cache extraction results folder exists`);
|
||||
const destinationParentFolder = path.resolve(destinationFolder, '..');
|
||||
|
|
@ -152,15 +129,15 @@ export class Caching {
|
|||
} else {
|
||||
RemoteClientLogger.logWarning(`cache item ${cacheArtifactName} doesn't exist ${destinationFolder}`);
|
||||
if (cacheSelection !== ``) {
|
||||
RemoteClientLogger.logWarning(`cache item ${cacheArtifactName}.tar doesn't exist ${destinationFolder}`);
|
||||
RemoteClientLogger.logWarning(`cache item ${cacheArtifactName}.tar.lz4 doesn't exist ${destinationFolder}`);
|
||||
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
process.chdir(`${startPath}`);
|
||||
process.chdir(startPath);
|
||||
throw error;
|
||||
}
|
||||
process.chdir(`${startPath}`);
|
||||
process.chdir(startPath);
|
||||
}
|
||||
|
||||
public static async handleCachePurging() {
|
||||
|
|
|
|||
|
|
@ -13,30 +13,38 @@ import { CloudRunnerSystem } from '../services/cloud-runner-system';
|
|||
export class RemoteClient {
|
||||
public static async bootstrapRepository() {
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute}`);
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.repoPathAbsolute}`);
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.cacheFolderFull}`);
|
||||
process.chdir(CloudRunnerFolders.repoPathAbsolute);
|
||||
await CloudRunnerSystem.Run(
|
||||
`mkdir -p ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute)}`,
|
||||
);
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute)}`);
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.cacheFolderFull)}`);
|
||||
process.chdir(CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute));
|
||||
await RemoteClient.cloneRepoWithoutLFSFiles();
|
||||
await RemoteClient.sizeOfFolder('repo before lfs cache pull', CloudRunnerFolders.repoPathAbsolute);
|
||||
await RemoteClient.sizeOfFolder(
|
||||
'repo before lfs cache pull',
|
||||
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute),
|
||||
);
|
||||
const lfsHashes = await LfsHashing.createLFSHashFiles();
|
||||
if (fs.existsSync(CloudRunnerFolders.libraryFolderAbsolute)) {
|
||||
RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
||||
}
|
||||
await Caching.PullFromCache(
|
||||
CloudRunnerFolders.lfsCacheFolderFull,
|
||||
CloudRunnerFolders.lfsFolderAbsolute,
|
||||
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.lfsCacheFolderFull),
|
||||
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.lfsFolderAbsolute),
|
||||
`${lfsHashes.lfsGuidSum}`,
|
||||
);
|
||||
await RemoteClient.sizeOfFolder('repo after lfs cache pull', CloudRunnerFolders.repoPathAbsolute);
|
||||
await RemoteClient.pullLatestLFS();
|
||||
await RemoteClient.sizeOfFolder('repo before lfs git pull', CloudRunnerFolders.repoPathAbsolute);
|
||||
await Caching.PushToCache(
|
||||
CloudRunnerFolders.lfsCacheFolderFull,
|
||||
CloudRunnerFolders.lfsFolderAbsolute,
|
||||
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.lfsCacheFolderFull),
|
||||
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.lfsFolderAbsolute),
|
||||
`${lfsHashes.lfsGuidSum}`,
|
||||
);
|
||||
await Caching.PullFromCache(CloudRunnerFolders.libraryCacheFolderFull, CloudRunnerFolders.libraryFolderAbsolute);
|
||||
await Caching.PullFromCache(
|
||||
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.libraryCacheFolderFull),
|
||||
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.libraryFolderAbsolute),
|
||||
);
|
||||
await RemoteClient.sizeOfFolder('repo after library cache pull', CloudRunnerFolders.repoPathAbsolute);
|
||||
await Caching.handleCachePurging();
|
||||
} catch (error) {
|
||||
|
|
@ -70,7 +78,7 @@ export class RemoteClient {
|
|||
RemoteClientLogger.log(`${CloudRunner.buildParameters.branch}`);
|
||||
await CloudRunnerSystem.Run(`git checkout ${CloudRunner.buildParameters.branch}`);
|
||||
assert(fs.existsSync(path.join(`.git`, `lfs`)), 'LFS folder should not exist before caching');
|
||||
RemoteClientLogger.log(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
RemoteClientLogger.log(`Checked out ${CloudRunner.buildParameters.branch}`);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
|
|
@ -87,11 +95,6 @@ export class RemoteClient {
|
|||
|
||||
@CliFunction(`remote-cli`, `sets up a repository, usually before a game-ci build`)
|
||||
static async runRemoteClientJob() {
|
||||
const buildParameter = JSON.parse(process.env.BUILD_PARAMETERS || '{}');
|
||||
RemoteClientLogger.log(`Build Params:
|
||||
${JSON.stringify(buildParameter, undefined, 4)}
|
||||
`);
|
||||
CloudRunner.buildParameters = buildParameter;
|
||||
await RemoteClient.bootstrapRepository();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,11 +3,10 @@ import YAML from 'yaml';
|
|||
import CloudRunnerSecret from './cloud-runner-secret';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
|
||||
export class CloudRunnerBuildCommandProcessor {
|
||||
public static ProcessCommands(commands: string, buildParameters: BuildParameters): string {
|
||||
const hooks = CloudRunnerBuildCommandProcessor.getHooks(buildParameters.customJobHooks).filter((x) =>
|
||||
x.step.includes(`all`),
|
||||
);
|
||||
export class CloudRunnerCustomHooks {
|
||||
// TODO also accept hooks as yaml files in the repo
|
||||
public static ApplyHooksToCommands(commands: string, buildParameters: BuildParameters): string {
|
||||
const hooks = CloudRunnerCustomHooks.getHooks(buildParameters.customJobHooks).filter((x) => x.step.includes(`all`));
|
||||
|
||||
return `echo "---"
|
||||
echo "start cloud runner init"
|
||||
|
|
@ -1,9 +1,13 @@
|
|||
import path from 'path';
|
||||
import { CloudRunner } from '../..';
|
||||
import CloudRunner from './../cloud-runner';
|
||||
|
||||
export class CloudRunnerFolders {
|
||||
public static readonly repositoryFolder = 'repo';
|
||||
|
||||
public static ToLinuxFolder(folder: string) {
|
||||
return folder.replace(/\\/g, `/`);
|
||||
}
|
||||
|
||||
// Only the following paths that do not start a path.join with another "Full" suffixed property need to start with an absolute /
|
||||
|
||||
public static get uniqueCloudRunnerJobFolderAbsolute(): string {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
import Input from '../../input';
|
||||
import CloudRunnerOptions from '../cloud-runner-options';
|
||||
|
||||
class CloudRunnerOptionsReader {
|
||||
static GetProperties() {
|
||||
return [...Object.getOwnPropertyNames(Input), ...Object.getOwnPropertyNames(CloudRunnerOptions)];
|
||||
}
|
||||
}
|
||||
|
||||
export default CloudRunnerOptionsReader;
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import Input from '../../input';
|
||||
import { GenericInputReader } from '../../input-readers/generic-input-reader';
|
||||
import CloudRunnerOptions from '../cloud-runner-options';
|
||||
|
||||
const formatFunction = (value, arguments_) => {
|
||||
for (const element of arguments_) {
|
||||
|
|
@ -12,6 +13,8 @@ const formatFunction = (value, arguments_) => {
|
|||
class CloudRunnerQueryOverride {
|
||||
static queryOverrides: any;
|
||||
|
||||
// TODO accept premade secret sources or custom secret source definition yamls
|
||||
|
||||
public static query(key, alternativeKey) {
|
||||
if (CloudRunnerQueryOverride.queryOverrides && CloudRunnerQueryOverride.queryOverrides[key] !== undefined) {
|
||||
return CloudRunnerQueryOverride.queryOverrides[key];
|
||||
|
|
@ -28,11 +31,11 @@ class CloudRunnerQueryOverride {
|
|||
}
|
||||
|
||||
private static shouldUseOverride(query) {
|
||||
if (Input.readInputOverrideCommand() !== '') {
|
||||
if (Input.readInputFromOverrideList() !== '') {
|
||||
if (CloudRunnerOptions.readInputOverrideCommand() !== '') {
|
||||
if (CloudRunnerOptions.readInputFromOverrideList() !== '') {
|
||||
const doesInclude =
|
||||
Input.readInputFromOverrideList().split(',').includes(query) ||
|
||||
Input.readInputFromOverrideList().split(',').includes(Input.ToEnvVarFormat(query));
|
||||
CloudRunnerOptions.readInputFromOverrideList().split(',').includes(query) ||
|
||||
CloudRunnerOptions.readInputFromOverrideList().split(',').includes(Input.ToEnvVarFormat(query));
|
||||
|
||||
return doesInclude ? true : false;
|
||||
} else {
|
||||
|
|
@ -46,11 +49,13 @@ class CloudRunnerQueryOverride {
|
|||
throw new Error(`Should not be trying to run override query on ${query}`);
|
||||
}
|
||||
|
||||
return await GenericInputReader.Run(formatFunction(Input.readInputOverrideCommand(), [{ key: 0, value: query }]));
|
||||
return await GenericInputReader.Run(
|
||||
formatFunction(CloudRunnerOptions.readInputOverrideCommand(), [{ key: 0, value: query }]),
|
||||
);
|
||||
}
|
||||
|
||||
public static async PopulateQueryOverrideInput() {
|
||||
const queries = Input.readInputFromOverrideList().split(',');
|
||||
const queries = CloudRunnerOptions.readInputFromOverrideList().split(',');
|
||||
CloudRunnerQueryOverride.queryOverrides = new Array();
|
||||
for (const element of queries) {
|
||||
if (CloudRunnerQueryOverride.shouldUseOverride(element)) {
|
||||
|
|
|
|||
|
|
@ -1,22 +0,0 @@
|
|||
import Input from '../../input';
|
||||
import { CloudRunnerSystem } from './cloud-runner-system';
|
||||
|
||||
class DependencyOverrideService {
|
||||
public static async CheckHealth() {
|
||||
if (Input.checkDependencyHealthOverride) {
|
||||
try {
|
||||
await CloudRunnerSystem.Run(Input.checkDependencyHealthOverride);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
public static async TryStartDependencies() {
|
||||
if (Input.startDependenciesOverride) {
|
||||
await CloudRunnerSystem.Run(Input.startDependenciesOverride);
|
||||
}
|
||||
}
|
||||
}
|
||||
export default DependencyOverrideService;
|
||||
|
|
@ -1,42 +1,45 @@
|
|||
import { CloudRunner, Input } from '../..';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
import { Input } from '../..';
|
||||
import ImageEnvironmentFactory from '../../image-environment-factory';
|
||||
import CloudRunnerEnvironmentVariable from './cloud-runner-environment-variable';
|
||||
import { CloudRunnerBuildCommandProcessor } from './cloud-runner-build-command-process';
|
||||
import { CloudRunnerCustomHooks } from './cloud-runner-custom-hooks';
|
||||
import CloudRunnerSecret from './cloud-runner-secret';
|
||||
import CloudRunnerQueryOverride from './cloud-runner-query-override';
|
||||
import CloudRunnerOptionsReader from './cloud-runner-options-reader';
|
||||
import BuildParameters from '../../build-parameters';
|
||||
|
||||
export class TaskParameterSerializer {
|
||||
public static readBuildEnvironmentVariables(): CloudRunnerEnvironmentVariable[] {
|
||||
public static readBuildEnvironmentVariables(buildParameters: BuildParameters): CloudRunnerEnvironmentVariable[] {
|
||||
return [
|
||||
{
|
||||
name: 'ContainerMemory',
|
||||
value: CloudRunner.buildParameters.cloudRunnerMemory,
|
||||
value: buildParameters.cloudRunnerMemory,
|
||||
},
|
||||
{
|
||||
name: 'ContainerCpu',
|
||||
value: CloudRunner.buildParameters.cloudRunnerCpu,
|
||||
value: buildParameters.cloudRunnerCpu,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_TARGET',
|
||||
value: CloudRunner.buildParameters.targetPlatform,
|
||||
value: buildParameters.targetPlatform,
|
||||
},
|
||||
...TaskParameterSerializer.serializeBuildParamsAndInput,
|
||||
...TaskParameterSerializer.serializeBuildParamsAndInput(buildParameters),
|
||||
];
|
||||
}
|
||||
private static get serializeBuildParamsAndInput() {
|
||||
private static serializeBuildParamsAndInput(buildParameters: BuildParameters) {
|
||||
let array = new Array();
|
||||
array = TaskParameterSerializer.readBuildParameters(array);
|
||||
array = TaskParameterSerializer.readBuildParameters(array, buildParameters);
|
||||
array = TaskParameterSerializer.readInput(array);
|
||||
const configurableHooks = CloudRunnerBuildCommandProcessor.getHooks(CloudRunner.buildParameters.customJobHooks);
|
||||
const configurableHooks = CloudRunnerCustomHooks.getHooks(buildParameters.customJobHooks);
|
||||
const secrets = configurableHooks.map((x) => x.secrets).filter((x) => x !== undefined && x.length > 0);
|
||||
if (secrets.length > 0) {
|
||||
// eslint-disable-next-line unicorn/no-array-reduce
|
||||
array.push(secrets.reduce((x, y) => [...x, ...y]));
|
||||
}
|
||||
|
||||
array = array.filter(
|
||||
(x) => x.value !== undefined && x.name !== '0' && x.value !== '' && x.name !== 'prototype' && x.name !== 'length',
|
||||
);
|
||||
const blocked = new Set(['0', 'length', 'prototype', '', 'projectPath', 'unityVersion']);
|
||||
|
||||
array = array.filter((x) => !blocked.has(x.name));
|
||||
array = array.map((x) => {
|
||||
x.name = Input.ToEnvVarFormat(x.name);
|
||||
x.value = `${x.value}`;
|
||||
|
|
@ -47,21 +50,21 @@ export class TaskParameterSerializer {
|
|||
return array;
|
||||
}
|
||||
|
||||
private static readBuildParameters(array: any[]) {
|
||||
const keys = Object.keys(CloudRunner.buildParameters);
|
||||
private static readBuildParameters(array: any[], buildParameters: BuildParameters) {
|
||||
const keys = Object.keys(buildParameters);
|
||||
for (const element of keys) {
|
||||
array.push({
|
||||
name: element,
|
||||
value: CloudRunner.buildParameters[element],
|
||||
value: buildParameters[element],
|
||||
});
|
||||
}
|
||||
array.push({ name: 'buildParameters', value: JSON.stringify(CloudRunner.buildParameters) });
|
||||
array.push({ name: 'buildParameters', value: JSON.stringify(buildParameters) });
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
private static readInput(array: any[]) {
|
||||
const input = Object.getOwnPropertyNames(Input);
|
||||
const input = CloudRunnerOptionsReader.GetProperties();
|
||||
for (const element of input) {
|
||||
if (typeof Input[element] !== 'function' && array.filter((x) => x.name === element).length === 0) {
|
||||
array.push({
|
||||
|
|
|
|||
|
|
@ -0,0 +1,77 @@
|
|||
import { BuildParameters, ImageTag } from '../..';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
import Input from '../../input';
|
||||
import { CloudRunnerStatics } from '../cloud-runner-statics';
|
||||
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
|
||||
import UnityVersioning from '../../unity-versioning';
|
||||
import { Cli } from '../../cli/cli';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerOptions from '../cloud-runner-options';
|
||||
import GitHub from '../../github';
|
||||
|
||||
async function CreateParameters(overrides) {
|
||||
if (overrides) {
|
||||
Cli.options = overrides;
|
||||
}
|
||||
const originalValue = GitHub.githubInputEnabled;
|
||||
GitHub.githubInputEnabled = false;
|
||||
const results = await BuildParameters.create();
|
||||
GitHub.githubInputEnabled = originalValue;
|
||||
delete Cli.options;
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
describe('Cloud Runner', () => {
|
||||
it('Responds', () => {});
|
||||
});
|
||||
describe('Cloud Runner', () => {
|
||||
const testSecretName = 'testSecretName';
|
||||
const testSecretValue = 'testSecretValue';
|
||||
it('Responds', () => {});
|
||||
if (CloudRunnerOptions.cloudRunnerTests) {
|
||||
it('All build parameters sent to cloud runner as env vars', async () => {
|
||||
// Setup parameters
|
||||
const buildParameter = await CreateParameters({
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
customJob: `
|
||||
- name: 'step 1'
|
||||
image: 'alpine'
|
||||
commands: 'printenv'
|
||||
secrets:
|
||||
- name: '${testSecretName}'
|
||||
value: '${testSecretValue}'
|
||||
`,
|
||||
});
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
|
||||
// Run the job
|
||||
const file = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
|
||||
// Assert results
|
||||
expect(file).toContain(JSON.stringify(buildParameter));
|
||||
expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`);
|
||||
const environmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables(buildParameter);
|
||||
const newLinePurgedFile = file
|
||||
.replace(/\s+/g, '')
|
||||
.replace(new RegExp(`\\[${CloudRunnerStatics.logPrefix}\\]`, 'g'), '');
|
||||
for (const element of environmentVariables) {
|
||||
if (element.value !== undefined && typeof element.value !== 'function') {
|
||||
if (typeof element.value === `string`) {
|
||||
element.value = element.value.replace(/\s+/g, '');
|
||||
}
|
||||
CloudRunnerLogger.log(`checking input/build param ${element.name} ${element.value}`);
|
||||
}
|
||||
}
|
||||
for (const element of environmentVariables) {
|
||||
if (element.value !== undefined && typeof element.value !== 'function') {
|
||||
expect(newLinePurgedFile).toContain(`${element.name}`);
|
||||
expect(newLinePurgedFile).toContain(`${element.name}=${element.value}`);
|
||||
}
|
||||
}
|
||||
}, 100000);
|
||||
}
|
||||
});
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
import CloudRunner from '../cloud-runner';
|
||||
import { BuildParameters, ImageTag } from '../..';
|
||||
import UnityVersioning from '../../unity-versioning';
|
||||
import { Cli } from '../../cli/cli';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import CloudRunnerOptions from '../cloud-runner-options';
|
||||
import GitHub from '../../github';
|
||||
|
||||
async function CreateParameters(overrides) {
|
||||
if (overrides) {
|
||||
Cli.options = overrides;
|
||||
}
|
||||
const originalValue = GitHub.githubInputEnabled;
|
||||
GitHub.githubInputEnabled = false;
|
||||
const results = await BuildParameters.create();
|
||||
GitHub.githubInputEnabled = originalValue;
|
||||
delete Cli.options;
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
describe('Cloud Runner Caching', () => {
|
||||
it('Responds', () => {});
|
||||
});
|
||||
describe('Cloud Runner Caching', () => {
|
||||
if (CloudRunnerOptions.cloudRunnerTests) {
|
||||
it('Run one build it should not use cache, run subsequent build which should use cache', async () => {
|
||||
const overrides = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
};
|
||||
const buildParameter = await CreateParameters(overrides);
|
||||
expect(buildParameter.projectPath).toEqual(overrides.projectPath);
|
||||
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
const results = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
const libraryString = 'Rebuilding Library because the asset database could not be found!';
|
||||
const buildSucceededString = 'Build succeeded';
|
||||
|
||||
expect(results).toContain(libraryString);
|
||||
expect(results).toContain(buildSucceededString);
|
||||
expect(results).not.toContain('There is 0 files/dir in the source folder Library');
|
||||
expect(results).not.toContain('There is 0 files/dir in the source folder LFS');
|
||||
|
||||
CloudRunnerLogger.log(`run 1 succeeded`);
|
||||
const buildParameter2 = await CreateParameters(overrides);
|
||||
const baseImage2 = new ImageTag(buildParameter2);
|
||||
const results2 = await CloudRunner.run(buildParameter2, baseImage2.toString());
|
||||
CloudRunnerLogger.log(`run 2 succeeded`);
|
||||
|
||||
expect(results2).toContain(buildSucceededString);
|
||||
expect(results2).not.toContain('There is 0 files/dir in the cache pulled contents for Library');
|
||||
expect(results2).not.toContain('There is 0 files/dir in the cache pulled contents for LFS');
|
||||
expect(results2).not.toContain(libraryString);
|
||||
}, 10000000);
|
||||
}
|
||||
});
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
import { BuildParameters, ImageTag } from '../..';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
import Input from '../../input';
|
||||
import { CloudRunnerStatics } from '../cloud-runner-statics';
|
||||
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
|
||||
import UnityVersioning from '../../unity-versioning';
|
||||
import { Cli } from '../../cli/cli';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerOptions from '../cloud-runner-options';
|
||||
import GitHub from '../../github';
|
||||
|
||||
async function CreateParameters(overrides) {
|
||||
if (overrides) {
|
||||
Cli.options = overrides;
|
||||
}
|
||||
const originalValue = GitHub.githubInputEnabled;
|
||||
GitHub.githubInputEnabled = false;
|
||||
const results = await BuildParameters.create();
|
||||
GitHub.githubInputEnabled = originalValue;
|
||||
delete Cli.options;
|
||||
|
||||
return results;
|
||||
}
|
||||
describe('Cloud Runner', () => {
|
||||
const testSecretName = 'testSecretName';
|
||||
const testSecretValue = 'testSecretValue';
|
||||
it('Responds', () => {});
|
||||
|
||||
if (CloudRunnerOptions.cloudRunnerTests) {
|
||||
it('All build parameters sent to cloud runner as env vars', async () => {
|
||||
// Setup parameters
|
||||
const buildParameter = await CreateParameters({
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.read('test-project'),
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
customJob: `
|
||||
- name: 'step 1'
|
||||
image: 'alpine'
|
||||
commands: 'printenv'
|
||||
secrets:
|
||||
- name: '${testSecretName}'
|
||||
value: '${testSecretValue}'
|
||||
`,
|
||||
});
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
|
||||
// Run the job
|
||||
const file = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
|
||||
// Assert results
|
||||
expect(file).toContain(JSON.stringify(buildParameter));
|
||||
expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`);
|
||||
const environmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables(buildParameter);
|
||||
const newLinePurgedFile = file
|
||||
.replace(/\s+/g, '')
|
||||
.replace(new RegExp(`\\[${CloudRunnerStatics.logPrefix}\\]`, 'g'), '');
|
||||
for (const element of environmentVariables) {
|
||||
if (element.value !== undefined && typeof element.value !== 'function') {
|
||||
if (typeof element.value === `string`) {
|
||||
element.value = element.value.replace(/\s+/g, '');
|
||||
}
|
||||
CloudRunnerLogger.log(`checking input/build param ${element.name} ${element.value}`);
|
||||
}
|
||||
}
|
||||
for (const element of environmentVariables) {
|
||||
if (element.value !== undefined && typeof element.value !== 'function') {
|
||||
expect(newLinePurgedFile).toContain(`${element.name}`);
|
||||
CloudRunnerLogger.log(`Contains ${element.name}`);
|
||||
expect(newLinePurgedFile).toContain(`${element.name}=${element.value}`);
|
||||
}
|
||||
}
|
||||
}, 100000);
|
||||
}
|
||||
});
|
||||
|
|
@ -4,9 +4,10 @@ import { CloudRunnerStepState } from '../cloud-runner-step-state';
|
|||
import { CustomWorkflow } from './custom-workflow';
|
||||
import { WorkflowInterface } from './workflow-interface';
|
||||
import * as core from '@actions/core';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
||||
import { CloudRunnerCustomHooks } from '../services/cloud-runner-custom-hooks';
|
||||
import path from 'path';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
import CloudRunnerOptions from '../cloud-runner-options';
|
||||
|
||||
export class BuildAutomationWorkflow implements WorkflowInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
|
|
@ -18,6 +19,7 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
|||
}
|
||||
|
||||
private static async standardBuildAutomation(baseImage: any) {
|
||||
// TODO accept post and pre build steps as yaml files in the repo
|
||||
try {
|
||||
CloudRunnerLogger.log(`Cloud Runner is running standard build automation`);
|
||||
|
||||
|
|
@ -62,20 +64,22 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
|||
}
|
||||
|
||||
private static get BuildWorkflow() {
|
||||
const setupHooks = CloudRunnerBuildCommandProcessor.getHooks(CloudRunner.buildParameters.customJobHooks).filter(
|
||||
(x) => x.step.includes(`setup`),
|
||||
const setupHooks = CloudRunnerCustomHooks.getHooks(CloudRunner.buildParameters.customJobHooks).filter((x) =>
|
||||
x.step.includes(`setup`),
|
||||
);
|
||||
const buildHooks = CloudRunnerBuildCommandProcessor.getHooks(CloudRunner.buildParameters.customJobHooks).filter(
|
||||
(x) => x.step.includes(`build`),
|
||||
const buildHooks = CloudRunnerCustomHooks.getHooks(CloudRunner.buildParameters.customJobHooks).filter((x) =>
|
||||
x.step.includes(`build`),
|
||||
);
|
||||
const builderPath = CloudRunnerFolders.ToLinuxFolder(
|
||||
path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', `index.js`),
|
||||
);
|
||||
const builderPath = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', `index.js`).replace(/\\/g, `/`);
|
||||
|
||||
return `apt-get update > /dev/null
|
||||
apt-get install -y tar tree npm git-lfs jq git > /dev/null
|
||||
npm install -g n > /dev/null
|
||||
n stable > /dev/null
|
||||
${setupHooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
export GITHUB_WORKSPACE="${CloudRunnerFolders.repoPathAbsolute.replace(/\\/g, `/`)}"
|
||||
export GITHUB_WORKSPACE="${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute)}"
|
||||
${BuildAutomationWorkflow.setupCommands(builderPath)}
|
||||
${setupHooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||
${buildHooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
|
|
@ -86,38 +90,44 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
|||
private static setupCommands(builderPath) {
|
||||
return `export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
echo "game ci cloud runner clone"
|
||||
mkdir -p ${CloudRunnerFolders.builderPathAbsolute.replace(/\\/g, `/`)}
|
||||
git clone -q -b ${CloudRunner.buildParameters.cloudRunnerBranch} ${
|
||||
CloudRunnerFolders.unityBuilderRepoUrl
|
||||
} "${CloudRunnerFolders.builderPathAbsolute.replace(/\\/g, `/`)}"
|
||||
mkdir -p ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.builderPathAbsolute)}
|
||||
git clone -q -b ${CloudRunner.buildParameters.cloudRunnerBranch} ${CloudRunnerFolders.ToLinuxFolder(
|
||||
CloudRunnerFolders.unityBuilderRepoUrl,
|
||||
)} "${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.builderPathAbsolute)}"
|
||||
chmod +x ${builderPath}
|
||||
echo "game ci cloud runner bootstrap"
|
||||
node ${builderPath} -m remote-cli`;
|
||||
}
|
||||
|
||||
// ToDo: Replace with a very simple "node ${builderPath} -m build-cli" to run the scripts below without enlarging the request size
|
||||
private static BuildCommands(builderPath, guid) {
|
||||
const linuxCacheFolder = CloudRunnerFolders.cacheFolderFull.replace(/\\/g, `/`);
|
||||
const linuxCacheFolder = CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.cacheFolderFull);
|
||||
const distFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist');
|
||||
const ubuntuPlatformsFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', 'platforms', 'ubuntu');
|
||||
|
||||
return `echo "game ci cloud runner init"
|
||||
mkdir -p ${`${CloudRunnerFolders.projectBuildFolderAbsolute}/build`.replace(/\\/g, `/`)}
|
||||
cd ${CloudRunnerFolders.projectPathAbsolute}
|
||||
cp -r "${path.join(distFolder, 'default-build-script').replace(/\\/g, `/`)}" "/UnityBuilderAction"
|
||||
cp -r "${path.join(ubuntuPlatformsFolder, 'entrypoint.sh').replace(/\\/g, `/`)}" "/entrypoint.sh"
|
||||
cp -r "${path.join(ubuntuPlatformsFolder, 'steps').replace(/\\/g, `/`)}" "/steps"
|
||||
mkdir -p ${`${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.projectBuildFolderAbsolute)}/build`}
|
||||
cd ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.projectPathAbsolute)}
|
||||
cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(distFolder, 'default-build-script'))}" "/UnityBuilderAction"
|
||||
cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(ubuntuPlatformsFolder, 'entrypoint.sh'))}" "/entrypoint.sh"
|
||||
cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(ubuntuPlatformsFolder, 'steps'))}" "/steps"
|
||||
chmod -R +x "/entrypoint.sh"
|
||||
chmod -R +x "/steps"
|
||||
echo "game ci cloud runner start"
|
||||
/entrypoint.sh
|
||||
echo "game ci cloud runner push library to cache"
|
||||
chmod +x ${builderPath}
|
||||
node ${builderPath} -m cache-push --cachePushFrom ${
|
||||
CloudRunnerFolders.libraryFolderAbsolute
|
||||
} --artifactName lib-${guid} --cachePushTo ${linuxCacheFolder}/Library
|
||||
node ${builderPath} -m cache-push --cachePushFrom ${CloudRunnerFolders.ToLinuxFolder(
|
||||
CloudRunnerFolders.libraryFolderAbsolute,
|
||||
)} --artifactName lib-${guid} --cachePushTo ${CloudRunnerFolders.ToLinuxFolder(`${linuxCacheFolder}/Library`)}
|
||||
echo "game ci cloud runner push build to cache"
|
||||
node ${builderPath} -m cache-push --cachePushFrom ${
|
||||
CloudRunnerFolders.projectBuildFolderAbsolute
|
||||
} --artifactName build-${guid} --cachePushTo ${`${linuxCacheFolder}/build`.replace(/\\/g, `/`)}`;
|
||||
node ${builderPath} -m cache-push --cachePushFrom ${CloudRunnerFolders.ToLinuxFolder(
|
||||
CloudRunnerFolders.projectBuildFolderAbsolute,
|
||||
)} --artifactName build-${guid} --cachePushTo ${`${CloudRunnerFolders.ToLinuxFolder(`${linuxCacheFolder}/build`)}`}
|
||||
${BuildAutomationWorkflow.GetCleanupCommand(CloudRunnerFolders.projectPathAbsolute)}`;
|
||||
}
|
||||
|
||||
private static GetCleanupCommand(cleanupPath: string) {
|
||||
return CloudRunnerOptions.retainWorkspaces ? `` : `rm -r ${CloudRunnerFolders.ToLinuxFolder(cleanupPath)}`;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ export class CustomWorkflow {
|
|||
step['image'],
|
||||
step['commands'],
|
||||
`/${CloudRunnerFolders.buildVolumeFolder}`,
|
||||
`/${CloudRunnerFolders.buildVolumeFolder}/`,
|
||||
`/${CloudRunnerFolders.projectPathAbsolute}/`,
|
||||
CloudRunner.cloudRunnerEnvironmentVariables,
|
||||
[...CloudRunner.defaultSecrets, ...stepSecrets],
|
||||
);
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
class GitHub {
|
||||
public static githubInputEnabled: boolean = true;
|
||||
}
|
||||
|
||||
export default GitHub;
|
||||
|
|
@ -46,6 +46,7 @@ class ImageEnvironmentFactory {
|
|||
{ name: 'ANDROID_KEYSTORE_PASS', value: parameters.androidKeystorePass },
|
||||
{ name: 'ANDROID_KEYALIAS_NAME', value: parameters.androidKeyaliasName },
|
||||
{ name: 'ANDROID_KEYALIAS_PASS', value: parameters.androidKeyaliasPass },
|
||||
{ name: 'ANDROID_TARGET_SDK_VERSION', value: parameters.androidTargetSdkVersion },
|
||||
{ name: 'ANDROID_SDK_MANAGER_PARAMETERS', value: parameters.androidSdkManagerParameters },
|
||||
{ name: 'CUSTOM_PARAMETERS', value: parameters.customParameters },
|
||||
{ name: 'CHOWN_FILES_TO', value: parameters.chownFilesTo },
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ class ImageTag {
|
|||
case Platform.types.StandaloneWindows:
|
||||
case Platform.types.StandaloneWindows64:
|
||||
// Can only build windows-il2cpp on a windows based system
|
||||
if (process.platform === 'win32') {
|
||||
if (process.env.il2cppEnabled && process.platform === 'win32') {
|
||||
// Unity versions before 2019.3 do not support il2cpp
|
||||
if (major >= 2020 || (major === 2019 && minor >= 3)) {
|
||||
return windowsIl2cpp;
|
||||
|
|
@ -96,7 +96,7 @@ class ImageTag {
|
|||
return windows;
|
||||
case Platform.types.StandaloneLinux64: {
|
||||
// Unity versions before 2019.3 do not support il2cpp
|
||||
if (major >= 2020 || (major === 2019 && minor >= 3)) {
|
||||
if ((process.env.il2cppEnabled && major >= 2020) || (major === 2019 && minor >= 3)) {
|
||||
return linuxIl2cpp;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
||||
import Input from '../input';
|
||||
import CloudRunnerOptions from '../cloud-runner/cloud-runner-options';
|
||||
|
||||
export class GenericInputReader {
|
||||
public static async Run(command) {
|
||||
if (Input.cloudRunnerCluster === 'local') {
|
||||
if (CloudRunnerOptions.cloudRunnerCluster === 'local') {
|
||||
return '';
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,15 +2,19 @@ import { assert } from 'console';
|
|||
import fs from 'fs';
|
||||
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
||||
import CloudRunnerLogger from '../cloud-runner/services/cloud-runner-logger';
|
||||
import CloudRunnerOptions from '../cloud-runner/cloud-runner-options';
|
||||
import Input from '../input';
|
||||
|
||||
export class GitRepoReader {
|
||||
public static async GetRemote() {
|
||||
if (Input.cloudRunnerCluster === 'local') {
|
||||
if (CloudRunnerOptions.cloudRunnerCluster === 'local') {
|
||||
return '';
|
||||
}
|
||||
assert(fs.existsSync(`.git`));
|
||||
const value = (await CloudRunnerSystem.Run(`git remote -v`, false, true)).replace(/ /g, ``);
|
||||
const value = (await CloudRunnerSystem.Run(`cd ${Input.projectPath} && git remote -v`, false, true)).replace(
|
||||
/ /g,
|
||||
``,
|
||||
);
|
||||
CloudRunnerLogger.log(`value ${value}`);
|
||||
assert(value.includes('github.com'));
|
||||
|
||||
|
|
@ -18,12 +22,12 @@ export class GitRepoReader {
|
|||
}
|
||||
|
||||
public static async GetBranch() {
|
||||
if (Input.cloudRunnerCluster === 'local') {
|
||||
if (CloudRunnerOptions.cloudRunnerCluster === 'local') {
|
||||
return '';
|
||||
}
|
||||
assert(fs.existsSync(`.git`));
|
||||
|
||||
return (await CloudRunnerSystem.Run(`git branch --show-current`, false, true))
|
||||
return (await CloudRunnerSystem.Run(`cd ${Input.projectPath} && git branch --show-current`, false, true))
|
||||
.split('\n')[0]
|
||||
.replace(/ /g, ``)
|
||||
.replace('/head', '');
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
||||
import * as core from '@actions/core';
|
||||
import Input from '../input';
|
||||
import CloudRunnerOptions from '../cloud-runner/cloud-runner-options';
|
||||
|
||||
export class GithubCliReader {
|
||||
static async GetGitHubAuthToken() {
|
||||
if (Input.cloudRunnerCluster === 'local') {
|
||||
if (CloudRunnerOptions.cloudRunnerCluster === 'local') {
|
||||
return '';
|
||||
}
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import YAML from 'yaml';
|
||||
import Input from '../input';
|
||||
import CloudRunnerOptions from '../cloud-runner/cloud-runner-options';
|
||||
|
||||
export function ReadLicense() {
|
||||
if (Input.cloudRunnerCluster === 'local') {
|
||||
if (CloudRunnerOptions.cloudRunnerCluster === 'local') {
|
||||
return '';
|
||||
}
|
||||
const pipelineFile = path.join(__dirname, `.github`, `workflows`, `cloud-runner-k8s-pipeline.yml`);
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import path from 'path';
|
|||
import { Cli } from './cli/cli';
|
||||
import CloudRunnerQueryOverride from './cloud-runner/services/cloud-runner-query-override';
|
||||
import Platform from './platform';
|
||||
import GitHub from './github';
|
||||
|
||||
const core = require('@actions/core');
|
||||
|
||||
|
|
@ -14,10 +15,8 @@ const core = require('@actions/core');
|
|||
* Todo: rename to UserInput and remove anything that is not direct input from the user / ci workflow
|
||||
*/
|
||||
class Input {
|
||||
public static githubInputEnabled: boolean = true;
|
||||
|
||||
public static getInput(query) {
|
||||
if (Input.githubInputEnabled) {
|
||||
if (GitHub.githubInputEnabled) {
|
||||
const coreInput = core.getInput(query);
|
||||
if (coreInput && coreInput !== '') {
|
||||
return coreInput;
|
||||
|
|
@ -61,17 +60,6 @@ class Input {
|
|||
return '';
|
||||
}
|
||||
}
|
||||
static get cloudRunnerBuilderPlatform() {
|
||||
const input = Input.getInput('cloudRunnerBuilderPlatform');
|
||||
if (input) {
|
||||
return input;
|
||||
}
|
||||
if (Input.cloudRunnerCluster !== 'local') {
|
||||
return 'linux';
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static get gitSha() {
|
||||
if (Input.getInput(`GITHUB_SHA`)) {
|
||||
|
|
@ -175,34 +163,6 @@ class Input {
|
|||
return core.getInput('gitPrivateToken') || false;
|
||||
}
|
||||
|
||||
static get customJob() {
|
||||
return Input.getInput('customJob') || '';
|
||||
}
|
||||
|
||||
static customJobHooks() {
|
||||
return Input.getInput('customJobHooks') || '';
|
||||
}
|
||||
|
||||
static cachePushOverrideCommand() {
|
||||
return Input.getInput('cachePushOverrideCommand') || '';
|
||||
}
|
||||
|
||||
static cachePullOverrideCommand() {
|
||||
return Input.getInput('cachePullOverrideCommand') || '';
|
||||
}
|
||||
|
||||
static readInputFromOverrideList() {
|
||||
return Input.getInput('readInputFromOverrideList') || '';
|
||||
}
|
||||
|
||||
static readInputOverrideCommand() {
|
||||
return Input.getInput('readInputOverrideCommand') || '';
|
||||
}
|
||||
|
||||
static get cloudRunnerBranch() {
|
||||
return Input.getInput('cloudRunnerBranch') || 'cloud-runner-develop';
|
||||
}
|
||||
|
||||
static get chownFilesTo() {
|
||||
return Input.getInput('chownFilesTo') || '';
|
||||
}
|
||||
|
|
@ -213,66 +173,6 @@ class Input {
|
|||
return input === 'true';
|
||||
}
|
||||
|
||||
static get postBuildSteps() {
|
||||
return Input.getInput('postBuildSteps') || '';
|
||||
}
|
||||
|
||||
static get preBuildSteps() {
|
||||
return Input.getInput('preBuildSteps') || '';
|
||||
}
|
||||
|
||||
static get awsBaseStackName() {
|
||||
return Input.getInput('awsBaseStackName') || 'game-ci';
|
||||
}
|
||||
|
||||
static get cloudRunnerCluster() {
|
||||
if (Cli.isCliMode) {
|
||||
return Input.getInput('cloudRunnerCluster') || 'aws';
|
||||
}
|
||||
|
||||
return Input.getInput('cloudRunnerCluster') || 'local';
|
||||
}
|
||||
|
||||
static get cloudRunnerCpu() {
|
||||
return Input.getInput('cloudRunnerCpu');
|
||||
}
|
||||
|
||||
static get cloudRunnerMemory() {
|
||||
return Input.getInput('cloudRunnerMemory');
|
||||
}
|
||||
|
||||
static get kubeConfig() {
|
||||
return Input.getInput('kubeConfig') || '';
|
||||
}
|
||||
|
||||
static get kubeVolume() {
|
||||
return Input.getInput('kubeVolume') || '';
|
||||
}
|
||||
|
||||
static get kubeVolumeSize() {
|
||||
return Input.getInput('kubeVolumeSize') || '5Gi';
|
||||
}
|
||||
|
||||
static get kubeStorageClass(): string {
|
||||
return Input.getInput('kubeStorageClass') || '';
|
||||
}
|
||||
|
||||
static get checkDependencyHealthOverride(): string {
|
||||
return Input.getInput('checkDependencyHealthOverride') || '';
|
||||
}
|
||||
|
||||
static get startDependenciesOverride(): string {
|
||||
return Input.getInput('startDependenciesOverride') || '';
|
||||
}
|
||||
|
||||
static get cacheKey(): string {
|
||||
return Input.getInput('cacheKey') || Input.branch;
|
||||
}
|
||||
|
||||
static get cloudRunnerTests(): boolean {
|
||||
return Input.getInput(`cloudRunnerTests`) || false;
|
||||
}
|
||||
|
||||
public static ToEnvVarFormat(input: string) {
|
||||
if (input.toUpperCase() === input) {
|
||||
return input;
|
||||
|
|
|
|||
Loading…
Reference in New Issue