Further improvement of CLI implementation and error handling
parent
13088351bd
commit
08810c4d51
|
|
@ -39,7 +39,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
const core = __importStar(__webpack_require__(42186));
|
||||
const model_1 = __webpack_require__(41359);
|
||||
const commander_ts_1 = __webpack_require__(40451);
|
||||
const remote_client_cli_ts_1 = __webpack_require__(58268);
|
||||
const remote_client_1 = __webpack_require__(48135);
|
||||
function runMain() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
|
|
@ -71,33 +71,37 @@ function runMain() {
|
|||
}
|
||||
function runCli() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Created base image`);
|
||||
options.projectPath = './test-project';
|
||||
options.projectPath = 'test-project';
|
||||
options.versioning = 'None';
|
||||
model_1.Input.cliOptions = options;
|
||||
const buildParameter = yield model_1.BuildParameters.create();
|
||||
yield model_1.CloudRunner.run(buildParameter, ' ');
|
||||
const baseImage = new model_1.ImageTag(buildParameter);
|
||||
yield model_1.CloudRunner.run(buildParameter, baseImage.toString());
|
||||
});
|
||||
}
|
||||
function runRemoteCli(options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield remote_client_cli_ts_1.RemoteClientCli.RunRemoteClient(options);
|
||||
yield remote_client_1.RemoteClientCli.RunRemoteClient(options);
|
||||
});
|
||||
}
|
||||
const program = new commander_ts_1.Command();
|
||||
program.version('0.0.1');
|
||||
program.option('-m, --mode <mode>', 'cli or default');
|
||||
program.option('-region, --region <region>', 'aws region');
|
||||
program.option('-b, --branch <branch>', 'unity builder branch to clone');
|
||||
program.option('-sN, --awsBaseStackName <awsBaseStackName>', 'aws stack name');
|
||||
program.parse(process.argv);
|
||||
const options = program.opts();
|
||||
model_1.Input.githubEnabled = false;
|
||||
process.env.AWS_REGION = options.region;
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Created base image ${options.mode}`);
|
||||
console.log(`Entrypoint: ${options.mode}`);
|
||||
switch (options.mode) {
|
||||
case 'cli':
|
||||
model_1.Input.githubEnabled = false;
|
||||
runCli();
|
||||
break;
|
||||
case 'remote-cli':
|
||||
model_1.Input.githubEnabled = false;
|
||||
runRemoteCli(options);
|
||||
break;
|
||||
default:
|
||||
|
|
@ -275,6 +279,7 @@ class BuildParameters {
|
|||
preBuildSteps: input_1.default.preBuildSteps,
|
||||
customBuildSteps: input_1.default.customBuildSteps,
|
||||
runNumber: input_1.default.runNumber,
|
||||
branch: input_1.default.branch,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
|
@ -397,7 +402,7 @@ class AWSBaseStack {
|
|||
var _a, _b, _c, _d, _e;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const baseStackName = this.baseStackName;
|
||||
const baseStack = fs.readFileSync(`${__dirname}/cloud-formations/base-setup.yml`, 'utf8');
|
||||
const baseStack = fs.readFileSync(`${__dirname}\\cloud-formations\\base-setup.yml`, 'utf8');
|
||||
// Cloud Formation Input
|
||||
const describeStackInput = {
|
||||
StackName: baseStackName,
|
||||
|
|
@ -426,7 +431,7 @@ class AWSBaseStack {
|
|||
Parameters: parameters,
|
||||
Capabilities: ['CAPABILITY_IAM'],
|
||||
};
|
||||
const stacks = (_a = (yield CF.listStacks({ StackStatusFilter: ['UPDATE_COMPLETE', 'CREATE_COMPLETE'] }).promise()).StackSummaries) === null || _a === void 0 ? void 0 : _a.map((x) => x.StackName);
|
||||
const stacks = (_a = (yield CF.listStacks({ StackStatusFilter: ['UPDATE_COMPLETE', 'CREATE_COMPLETE', 'ROLLBACK_COMPLETE'] }).promise()).StackSummaries) === null || _a === void 0 ? void 0 : _a.map((x) => x.StackName);
|
||||
const stackExists = (stacks === null || stacks === void 0 ? void 0 : stacks.includes(baseStackName)) || false;
|
||||
const describeStack = () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield CF.describeStacks(describeStackInput).promise();
|
||||
|
|
@ -518,6 +523,7 @@ const SDK = __importStar(__webpack_require__(71786));
|
|||
const aws_build_runner_1 = __importDefault(__webpack_require__(29102));
|
||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||
const aws_job_stack_1 = __webpack_require__(85819);
|
||||
const aws_base_stack_1 = __webpack_require__(28730);
|
||||
class AWSBuildEnvironment {
|
||||
constructor(buildParameters) {
|
||||
this.baseStackName = buildParameters.awsBaseStackName;
|
||||
|
|
@ -548,8 +554,10 @@ class AWSBuildEnvironment {
|
|||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const ECS = new SDK.ECS();
|
||||
const CF = new SDK.CloudFormation();
|
||||
cloud_runner_logger_1.default.log(`AWS Region: ${CF.config.region}`);
|
||||
const entrypoint = ['/bin/sh'];
|
||||
const t0 = Date.now();
|
||||
yield new aws_base_stack_1.AWSBaseStack(this.baseStackName).setupBaseStack(CF);
|
||||
const taskDef = yield new aws_job_stack_1.AWSJobStack(this.baseStackName).setupCloudFormations(CF, buildId, image, entrypoint, commands, mountdir, workingdir, secrets);
|
||||
let t2;
|
||||
try {
|
||||
|
|
@ -905,7 +913,6 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
exports.AWSJobStack = void 0;
|
||||
const cloud_runner_constants_1 = __importDefault(__webpack_require__(91311));
|
||||
const nanoid_1 = __webpack_require__(39140);
|
||||
const aws_base_stack_1 = __webpack_require__(28730);
|
||||
const aws_templates_1 = __webpack_require__(6436);
|
||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||
const fs = __importStar(__webpack_require__(35747));
|
||||
|
|
@ -917,10 +924,9 @@ class AWSJobStack {
|
|||
setupCloudFormations(CF, buildGuid, image, entrypoint, commands, mountdir, workingdir, secrets) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const logGuid = nanoid_1.customAlphabet(cloud_runner_constants_1.default.alphabet, 9)();
|
||||
commands[1] += `
|
||||
echo "${logGuid}"
|
||||
commands[1] += `
|
||||
echo "${logGuid}"
|
||||
`;
|
||||
yield new aws_base_stack_1.AWSBaseStack(this.baseStackName).setupBaseStack(CF);
|
||||
const taskDefStackName = `${this.baseStackName}-${buildGuid}`;
|
||||
let taskDefCloudFormation = aws_templates_1.AWSTemplates.readTaskCloudFormationTemplate();
|
||||
const cleanupTaskDefStackName = `${taskDefStackName}-cleanup`;
|
||||
|
|
@ -1136,7 +1142,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
const aws_build_platform_1 = __importDefault(__webpack_require__(81683));
|
||||
const cloud_runner_namespace_1 = __importDefault(__webpack_require__(63287));
|
||||
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||
const kubernetes_build_platform_1 = __importDefault(__webpack_require__(10471));
|
||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||
|
|
@ -1146,24 +1151,20 @@ const cloud_runner_error_1 = __webpack_require__(2600);
|
|||
class CloudRunner {
|
||||
static setup(buildParameters) {
|
||||
cloud_runner_logger_1.default.setup();
|
||||
cloud_runner_state_1.CloudRunnerState.buildParams = buildParameters;
|
||||
cloud_runner_state_1.CloudRunnerState.buildGuid = cloud_runner_namespace_1.default.generateBuildName(cloud_runner_state_1.CloudRunnerState.readRunNumber(), buildParameters.platform);
|
||||
cloud_runner_state_1.CloudRunnerState.setupBranchName();
|
||||
cloud_runner_state_1.CloudRunnerState.setupFolderVariables();
|
||||
cloud_runner_state_1.CloudRunnerState.setupDefaultSecrets();
|
||||
cloud_runner_state_1.CloudRunnerState.setup(buildParameters);
|
||||
CloudRunner.setupBuildPlatform();
|
||||
}
|
||||
static setupBuildPlatform() {
|
||||
switch (cloud_runner_state_1.CloudRunnerState.buildParams.cloudRunnerCluster) {
|
||||
case 'aws':
|
||||
cloud_runner_logger_1.default.log('Building with AWS');
|
||||
cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform = new aws_build_platform_1.default(cloud_runner_state_1.CloudRunnerState.buildParams);
|
||||
break;
|
||||
default:
|
||||
case 'k8s':
|
||||
cloud_runner_logger_1.default.log('Building with Kubernetes');
|
||||
cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform = new kubernetes_build_platform_1.default(cloud_runner_state_1.CloudRunnerState.buildParams);
|
||||
break;
|
||||
default:
|
||||
case 'aws':
|
||||
cloud_runner_logger_1.default.log('Building with AWS');
|
||||
cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform = new aws_build_platform_1.default(cloud_runner_state_1.CloudRunnerState.buildParams);
|
||||
break;
|
||||
}
|
||||
}
|
||||
static run(buildParameters, baseImage) {
|
||||
|
|
@ -1938,7 +1939,7 @@ exports.default = KubernetesUtilities;
|
|||
|
||||
/***/ }),
|
||||
|
||||
/***/ 58268:
|
||||
/***/ 48135:
|
||||
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
|
@ -1954,7 +1955,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.RemoteClientCli = void 0;
|
||||
const download_repository_1 = __webpack_require__(76596);
|
||||
const download_repository_1 = __webpack_require__(42996);
|
||||
class RemoteClientCli {
|
||||
static RunRemoteClient(options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
|
|
@ -2019,6 +2020,9 @@ class CloudRunnerLogger {
|
|||
static log(message) {
|
||||
core.info(message);
|
||||
}
|
||||
static logLine(message) {
|
||||
core.info(`${message}\n`);
|
||||
}
|
||||
static error(message) {
|
||||
core.error(message);
|
||||
}
|
||||
|
|
@ -2072,7 +2076,16 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.CloudRunnerState = void 0;
|
||||
const image_environment_factory_1 = __importDefault(__webpack_require__(25145));
|
||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||
const cloud_runner_namespace_1 = __importDefault(__webpack_require__(63287));
|
||||
class CloudRunnerState {
|
||||
static setup(buildParameters) {
|
||||
CloudRunnerState.buildParams = buildParameters;
|
||||
CloudRunnerState.buildGuid = cloud_runner_namespace_1.default.generateBuildName(CloudRunnerState.readRunNumber(), buildParameters.platform);
|
||||
CloudRunnerState.setupBranchName();
|
||||
CloudRunnerState.setupFolderVariables();
|
||||
CloudRunnerState.setupDefaultSecrets();
|
||||
}
|
||||
static readBuildEnvironmentVariables() {
|
||||
return [
|
||||
{
|
||||
|
|
@ -2127,6 +2140,10 @@ class CloudRunnerState {
|
|||
name: 'ANDROID_KEYALIAS_NAME',
|
||||
value: CloudRunnerState.buildParams.androidKeyaliasName,
|
||||
},
|
||||
{
|
||||
name: 'SERIALIZED_STATE',
|
||||
value: JSON.stringify(CloudRunnerState),
|
||||
},
|
||||
];
|
||||
}
|
||||
static getHandleCachingCommand() {
|
||||
|
|
@ -2136,7 +2153,9 @@ class CloudRunnerState {
|
|||
return `${CloudRunnerState.builderPathFull}/dist/cloud-runner/cloneNoLFS.sh "${CloudRunnerState.repoPathFull}" "${CloudRunnerState.targetBuildRepoUrl}"`;
|
||||
}
|
||||
static getCloneBuilder() {
|
||||
return `git clone -q ${CloudRunnerState.CloudRunnerBranch} ${CloudRunnerState.unityBuilderRepoUrl} ${CloudRunnerState.builderPathFull}`;
|
||||
const cloneCommand = `git clone -b ${CloudRunnerState.branchName} ${CloudRunnerState.unityBuilderRepoUrl} ${CloudRunnerState.builderPathFull}`;
|
||||
cloud_runner_logger_1.default.log(cloneCommand);
|
||||
return cloneCommand;
|
||||
}
|
||||
static readRunNumber() {
|
||||
const runNumber = CloudRunnerState.buildParams.runNumber;
|
||||
|
|
@ -2155,20 +2174,11 @@ class CloudRunnerState {
|
|||
CloudRunnerState.cacheFolderFull = `/${CloudRunnerState.buildVolumeFolder}/${CloudRunnerState.cacheFolder}/${CloudRunnerState.branchName}`;
|
||||
CloudRunnerState.lfsDirectory = `${CloudRunnerState.repoPathFull}/.git/lfs`;
|
||||
CloudRunnerState.purgeRemoteCaching = process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
||||
CloudRunnerState.CloudRunnerBranch = process.env.CloudRunnerBranch
|
||||
? `--branch "${process.env.CloudRunnerBranch}"`
|
||||
: '';
|
||||
CloudRunnerState.unityBuilderRepoUrl = `https://${CloudRunnerState.buildParams.githubToken}@github.com/game-ci/unity-builder.git`;
|
||||
CloudRunnerState.targetBuildRepoUrl = `https://${CloudRunnerState.buildParams.githubToken}@github.com/${process.env.GITHUB_REPOSITORY}.git`;
|
||||
}
|
||||
static setupBranchName() {
|
||||
var _a;
|
||||
const defaultBranchName = ((_a = process.env.GITHUB_REF) === null || _a === void 0 ? void 0 : _a.split('/').filter((x) => {
|
||||
x = x[0].toUpperCase() + x.slice(1);
|
||||
return x;
|
||||
}).join('')) || '';
|
||||
CloudRunnerState.branchName =
|
||||
process.env.REMOTE_BUILDER_CACHE !== undefined ? process.env.REMOTE_BUILDER_CACHE : defaultBranchName;
|
||||
CloudRunnerState.branchName = CloudRunnerState.buildParams.branch;
|
||||
}
|
||||
static setupDefaultSecrets() {
|
||||
CloudRunnerState.defaultSecrets = [
|
||||
|
|
@ -2284,19 +2294,19 @@ class BuildStep {
|
|||
}
|
||||
static BuildStep(image, environmentVariables, secrets) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
cloud_runner_logger_1.default.log('Starting part 2/4 (build unity project)');
|
||||
cloud_runner_logger_1.default.logLine('Starting part 2/4 (build unity project)');
|
||||
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(cloud_runner_state_1.CloudRunnerState.buildGuid, image, [
|
||||
`
|
||||
printenv
|
||||
export GITHUB_WORKSPACE="${cloud_runner_state_1.CloudRunnerState.repoPathFull}"
|
||||
cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/default-build-script/" "/UnityBuilderAction"
|
||||
cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/entrypoint.sh" "/entrypoint.sh"
|
||||
cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/steps/" "/steps"
|
||||
chmod -R +x "/entrypoint.sh"
|
||||
chmod -R +x "/steps"
|
||||
/entrypoint.sh
|
||||
${process.env.DEBUG ? '' : '#'}tree -L 4 "${cloud_runner_state_1.CloudRunnerState.buildPathFull}"
|
||||
${process.env.DEBUG ? '' : '#'}ls -lh "/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}"
|
||||
`
|
||||
printenv
|
||||
export GITHUB_WORKSPACE="${cloud_runner_state_1.CloudRunnerState.repoPathFull}"
|
||||
cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/default-build-script/" "/UnityBuilderAction"
|
||||
cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/entrypoint.sh" "/entrypoint.sh"
|
||||
cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/steps/" "/steps"
|
||||
chmod -R +x "/entrypoint.sh"
|
||||
chmod -R +x "/steps"
|
||||
/entrypoint.sh
|
||||
${process.env.DEBUG ? '' : '#'}tree -L 4 "${cloud_runner_state_1.CloudRunnerState.buildPathFull}"
|
||||
${process.env.DEBUG ? '' : '#'}ls -lh "/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}"
|
||||
`,
|
||||
], `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, `/${cloud_runner_state_1.CloudRunnerState.projectPathFull}`, environmentVariables, secrets);
|
||||
});
|
||||
|
|
@ -2337,26 +2347,26 @@ class CompressionStep {
|
|||
static CompressionStep(environmentVariables, secrets) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
cloud_runner_logger_1.default.log('Starting step 3/4 build compression');
|
||||
cloud_runner_logger_1.default.logLine('Starting step 3/4 build compression');
|
||||
// Cleanup
|
||||
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(cloud_runner_state_1.CloudRunnerState.buildGuid, 'alpine', [
|
||||
`
|
||||
printenv
|
||||
apk update -q
|
||||
apk add zip tree -q
|
||||
${process.env.DEBUG ? '' : '#'}tree -L 4 "$repoPathFull"
|
||||
${process.env.DEBUG ? '' : '#'}ls -lh "$repoPathFull"
|
||||
cd "$libraryFolderFull/.."
|
||||
zip -r "lib-$BUILDID.zip" "./Library"
|
||||
mv "lib-$BUILDID.zip" "/$cacheFolderFull/lib"
|
||||
cd "$repoPathFull"
|
||||
ls -lh "$repoPathFull"
|
||||
zip -r "build-$BUILDID.zip" "./${cloud_runner_state_1.CloudRunnerState.buildParams.buildPath}"
|
||||
mv "build-$BUILDID.zip" "/$cacheFolderFull/build-$BUILDID.zip"
|
||||
${process.env.DEBUG ? '' : '#'}tree -L 4 "/$cacheFolderFull"
|
||||
${process.env.DEBUG ? '' : '#'}tree -L 4 "/$cacheFolderFull/.."
|
||||
${process.env.DEBUG ? '' : '#'}tree -L 4 "$repoPathFull"
|
||||
${process.env.DEBUG ? '' : '#'}ls -lh "$repoPathFull"
|
||||
`
|
||||
printenv
|
||||
apk update -q
|
||||
apk add zip tree -q
|
||||
${process.env.DEBUG ? '' : '#'}tree -L 4 "$repoPathFull"
|
||||
${process.env.DEBUG ? '' : '#'}ls -lh "$repoPathFull"
|
||||
cd "$libraryFolderFull/.."
|
||||
zip -r "lib-$BUILDID.zip" "./Library"
|
||||
mv "lib-$BUILDID.zip" "/$cacheFolderFull/lib"
|
||||
cd "$repoPathFull"
|
||||
ls -lh "$repoPathFull"
|
||||
zip -r "build-$BUILDID.zip" "./${cloud_runner_state_1.CloudRunnerState.buildParams.buildPath}"
|
||||
mv "build-$BUILDID.zip" "/$cacheFolderFull/build-$BUILDID.zip"
|
||||
${process.env.DEBUG ? '' : '#'}tree -L 4 "/$cacheFolderFull"
|
||||
${process.env.DEBUG ? '' : '#'}tree -L 4 "/$cacheFolderFull/.."
|
||||
${process.env.DEBUG ? '' : '#'}tree -L 4 "$repoPathFull"
|
||||
${process.env.DEBUG ? '' : '#'}ls -lh "$repoPathFull"
|
||||
`,
|
||||
], `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, [
|
||||
...environmentVariables,
|
||||
|
|
@ -2415,27 +2425,33 @@ class DownloadRepositoryStep {
|
|||
static downloadRepositoryStep(image, environmentVariables, secrets) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
cloud_runner_logger_1.default.log('Starting step 1/4 clone and restore cache');
|
||||
cloud_runner_logger_1.default.logLine('Starting step 1/4 clone and restore cache');
|
||||
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(cloud_runner_state_1.CloudRunnerState.buildGuid, image, [
|
||||
` printenv
|
||||
apk update -q
|
||||
apk add unzip zip git-lfs jq tree -q
|
||||
mkdir -p ${cloud_runner_state_1.CloudRunnerState.buildPathFull}
|
||||
apk add unzip zip git-lfs jq tree nodejs -q
|
||||
|
||||
export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
# mkdir -p ${cloud_runner_state_1.CloudRunnerState.buildPathFull}
|
||||
mkdir -p ${cloud_runner_state_1.CloudRunnerState.builderPathFull}
|
||||
mkdir -p ${cloud_runner_state_1.CloudRunnerState.repoPathFull}
|
||||
# mkdir -p ${cloud_runner_state_1.CloudRunnerState.repoPathFull}
|
||||
echo "${cloud_runner_state_1.CloudRunnerState.getCloneBuilder()}"
|
||||
${cloud_runner_state_1.CloudRunnerState.getCloneBuilder()}
|
||||
${cloud_runner_state_1.CloudRunnerState.unityBuilderRepoUrl}/dist/index.js -- -m cli
|
||||
echo ' '
|
||||
echo 'Initializing source repository for cloning with caching of LFS files'
|
||||
${cloud_runner_state_1.CloudRunnerState.getCloneNoLFSCommand()}
|
||||
echo 'Source repository initialized'
|
||||
echo ' '
|
||||
echo 'Starting checks of cache for the Unity project Library and git LFS files'
|
||||
${cloud_runner_state_1.CloudRunnerState.getHandleCachingCommand()}
|
||||
chmod +x ${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/index.js
|
||||
node ${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/index.js -m remote-cli
|
||||
# echo ' '
|
||||
# echo 'Initializing source repository for cloning with caching of LFS files'
|
||||
# ${cloud_runner_state_1.CloudRunnerState.getCloneNoLFSCommand()}
|
||||
# echo 'Source repository initialized'
|
||||
# ls ${cloud_runner_state_1.CloudRunnerState.projectPathFull}
|
||||
# echo ' '
|
||||
# echo 'Starting checks of cache for the Unity project Library and git LFS files'
|
||||
# ${cloud_runner_state_1.CloudRunnerState.getHandleCachingCommand()}
|
||||
`,
|
||||
], `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}/`, environmentVariables, secrets);
|
||||
}
|
||||
catch (error) {
|
||||
cloud_runner_logger_1.default.logLine(`ENV VARS ${JSON.stringify(environmentVariables)} SECRETS ${JSON.stringify(secrets)}`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
|
@ -2446,7 +2462,7 @@ exports.DownloadRepositoryStep = DownloadRepositoryStep;
|
|||
|
||||
/***/ }),
|
||||
|
||||
/***/ 76596:
|
||||
/***/ 42996:
|
||||
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
|
@ -2467,7 +2483,11 @@ class DownloadRepository {
|
|||
static run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield new Promise((promise) => {
|
||||
exec('printenv', (error, stdout, stderr) => {
|
||||
exec(`
|
||||
echo "test"
|
||||
apk update -q
|
||||
apk add unzip zip git-lfs jq tree -q
|
||||
`, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`error: ${error.message}`);
|
||||
|
|
@ -2999,8 +3019,8 @@ class ImageTag {
|
|||
case platform_1.default.types.Test:
|
||||
return generic;
|
||||
default:
|
||||
throw new Error(`
|
||||
Platform must be one of the ones described in the documentation.
|
||||
throw new Error(`
|
||||
Platform must be one of the ones described in the documentation.
|
||||
"${platform}" is currently not supported.`);
|
||||
}
|
||||
}
|
||||
|
|
@ -3081,12 +3101,32 @@ class Input {
|
|||
static getInput(query) {
|
||||
return Input.githubEnabled
|
||||
? core.getInput(query)
|
||||
: Input.cliOptions !== undefined
|
||||
: Input.cliOptions[query] !== undefined
|
||||
? Input.cliOptions[query]
|
||||
: process.env[query] !== undefined
|
||||
? process.env[query]
|
||||
: false;
|
||||
}
|
||||
static get branch() {
|
||||
if (Input.getInput(`REMOTE_BUILDER_CACHE`)) {
|
||||
return Input.getInput(`REMOTE_BUILDER_CACHE`);
|
||||
}
|
||||
else if (Input.getInput(`GITHUB_REF`)) {
|
||||
return Input.getInput(`GITHUB_REF`)
|
||||
.split('/')
|
||||
.filter((x) => {
|
||||
x = x[0].toUpperCase() + x.slice(1);
|
||||
return x;
|
||||
})
|
||||
.join('');
|
||||
}
|
||||
else if (Input.getInput('branch')) {
|
||||
return Input.getInput('branch');
|
||||
}
|
||||
else {
|
||||
return 'main';
|
||||
}
|
||||
}
|
||||
static get runNumber() {
|
||||
return Input.getInput('GITHUB_RUN_NUMBER') || '0';
|
||||
}
|
||||
|
|
@ -3110,7 +3150,7 @@ class Input {
|
|||
return Input.getInput('buildsPath') || 'build';
|
||||
}
|
||||
static get buildMethod() {
|
||||
return Input.getInput('buildMethod'); // processed in docker file
|
||||
return Input.getInput('buildMethod') || ''; // processed in docker file
|
||||
}
|
||||
static get versioningStrategy() {
|
||||
return Input.getInput('versioning') || 'Semantic';
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
19
src/index.ts
19
src/index.ts
|
|
@ -1,7 +1,7 @@
|
|||
import * as core from '@actions/core';
|
||||
import { Action, BuildParameters, Cache, Docker, ImageTag, Output, CloudRunner, Input } from './model';
|
||||
import { Command } from 'commander-ts';
|
||||
import { RemoteClientCli } from './model/cloud-runner/remote-client-cli.ts';
|
||||
import { RemoteClientCli } from './model/cloud-runner/remote-client';
|
||||
async function runMain() {
|
||||
try {
|
||||
Action.checkCompatibility();
|
||||
|
|
@ -34,13 +34,12 @@ async function runMain() {
|
|||
}
|
||||
}
|
||||
async function runCli() {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Created base image`);
|
||||
options.projectPath = './test-project';
|
||||
options.projectPath = 'test-project';
|
||||
options.versioning = 'None';
|
||||
Input.cliOptions = options;
|
||||
const buildParameter = await BuildParameters.create();
|
||||
await CloudRunner.run(buildParameter, ' ');
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
}
|
||||
async function runRemoteCli(options) {
|
||||
await RemoteClientCli.RunRemoteClient(options);
|
||||
|
|
@ -49,19 +48,25 @@ async function runRemoteCli(options) {
|
|||
const program = new Command();
|
||||
program.version('0.0.1');
|
||||
program.option('-m, --mode <mode>', 'cli or default');
|
||||
program.option('-region, --region <region>', 'aws region');
|
||||
program.option('-b, --branch <branch>', 'unity builder branch to clone');
|
||||
program.option('-sN, --awsBaseStackName <awsBaseStackName>', 'aws stack name');
|
||||
program.parse(process.argv);
|
||||
|
||||
const options = program.opts();
|
||||
|
||||
Input.githubEnabled = false;
|
||||
process.env.AWS_REGION = options.region;
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Created base image ${options.mode}`);
|
||||
console.log(`Entrypoint: ${options.mode}`);
|
||||
|
||||
switch (options.mode) {
|
||||
case 'cli':
|
||||
Input.githubEnabled = false;
|
||||
runCli();
|
||||
break;
|
||||
case 'remote-cli':
|
||||
Input.githubEnabled = false;
|
||||
runRemoteCli(options);
|
||||
break;
|
||||
default:
|
||||
|
|
|
|||
|
|
@ -37,6 +37,7 @@ class BuildParameters {
|
|||
public preBuildSteps;
|
||||
public customBuildSteps;
|
||||
public runNumber;
|
||||
public branch;
|
||||
|
||||
static async create(): Promise<BuildParameters> {
|
||||
const buildFile = this.parseBuildFile(Input.buildName, Input.targetPlatform, Input.androidAppBundle);
|
||||
|
|
@ -79,6 +80,7 @@ class BuildParameters {
|
|||
preBuildSteps: Input.preBuildSteps,
|
||||
customBuildSteps: Input.customBuildSteps,
|
||||
runNumber: Input.runNumber,
|
||||
branch: Input.branch,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ export class AWSBaseStack {
|
|||
|
||||
async setupBaseStack(CF: SDK.CloudFormation) {
|
||||
const baseStackName = this.baseStackName;
|
||||
const baseStack = fs.readFileSync(`${__dirname}/cloud-formations/base-setup.yml`, 'utf8');
|
||||
const baseStack = fs.readFileSync(`${__dirname}\\cloud-formations\\base-setup.yml`, 'utf8');
|
||||
|
||||
// Cloud Formation Input
|
||||
const describeStackInput: SDK.CloudFormation.DescribeStacksInput = {
|
||||
|
|
@ -44,7 +44,7 @@ export class AWSBaseStack {
|
|||
};
|
||||
|
||||
const stacks = (
|
||||
await CF.listStacks({ StackStatusFilter: ['UPDATE_COMPLETE', 'CREATE_COMPLETE'] }).promise()
|
||||
await CF.listStacks({ StackStatusFilter: ['UPDATE_COMPLETE', 'CREATE_COMPLETE', 'ROLLBACK_COMPLETE'] }).promise()
|
||||
).StackSummaries?.map((x) => x.StackName);
|
||||
const stackExists: Boolean = stacks?.includes(baseStackName) || false;
|
||||
const describeStack = async () => {
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import { CloudRunnerProviderInterface } from '../services/cloud-runner-provider-
|
|||
import BuildParameters from '../../build-parameters';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { AWSJobStack } from './aws-job-stack';
|
||||
import { AWSBaseStack } from './aws-base-stack';
|
||||
|
||||
class AWSBuildEnvironment implements CloudRunnerProviderInterface {
|
||||
private baseStackName: string;
|
||||
|
|
@ -46,8 +47,12 @@ class AWSBuildEnvironment implements CloudRunnerProviderInterface {
|
|||
): Promise<void> {
|
||||
const ECS = new SDK.ECS();
|
||||
const CF = new SDK.CloudFormation();
|
||||
|
||||
CloudRunnerLogger.log(`AWS Region: ${CF.config.region}`);
|
||||
const entrypoint = ['/bin/sh'];
|
||||
const t0 = Date.now();
|
||||
|
||||
await new AWSBaseStack(this.baseStackName).setupBaseStack(CF);
|
||||
const taskDef = await new AWSJobStack(this.baseStackName).setupCloudFormations(
|
||||
CF,
|
||||
buildId,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import CloudRunnerTaskDef from '../services/cloud-runner-task-def';
|
|||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import CloudRunnerConstants from '../services/cloud-runner-constants';
|
||||
import { customAlphabet } from 'nanoid';
|
||||
import { AWSBaseStack } from './aws-base-stack';
|
||||
import { AWSTemplates } from './aws-templates';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import * as fs from 'fs';
|
||||
|
|
@ -29,7 +28,6 @@ export class AWSJobStack {
|
|||
commands[1] += `
|
||||
echo "${logGuid}"
|
||||
`;
|
||||
await new AWSBaseStack(this.baseStackName).setupBaseStack(CF);
|
||||
const taskDefStackName = `${this.baseStackName}-${buildGuid}`;
|
||||
let taskDefCloudFormation = AWSTemplates.readTaskCloudFormationTemplate();
|
||||
const cleanupTaskDefStackName = `${taskDefStackName}-cleanup`;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,407 @@
|
|||
AWSTemplateFormatVersion: '2010-09-09'
|
||||
Description: AWS Fargate cluster that can span public and private subnets. Supports
|
||||
public facing load balancers, private internal load balancers, and
|
||||
both internal and external service discovery namespaces.
|
||||
Parameters:
|
||||
EnvironmentName:
|
||||
Type: String
|
||||
Default: development
|
||||
Description: 'Your deployment environment: DEV, QA , PROD'
|
||||
Storage:
|
||||
Type: String
|
||||
Default: game-ci-storage
|
||||
Description: 'Your s3 storage bucket'
|
||||
Version:
|
||||
Type: String
|
||||
Description: 'hash of template'
|
||||
|
||||
# ContainerPort:
|
||||
# Type: Number
|
||||
# Default: 80
|
||||
# Description: What port number the application inside the docker container is binding to
|
||||
|
||||
Mappings:
|
||||
# Hard values for the subnet masks. These masks define
|
||||
# the range of internal IP addresses that can be assigned.
|
||||
# The VPC can have all IP's from 10.0.0.0 to 10.0.255.255
|
||||
# There are four subnets which cover the ranges:
|
||||
#
|
||||
# 10.0.0.0 - 10.0.0.255
|
||||
# 10.0.1.0 - 10.0.1.255
|
||||
# 10.0.2.0 - 10.0.2.255
|
||||
# 10.0.3.0 - 10.0.3.255
|
||||
|
||||
SubnetConfig:
|
||||
VPC:
|
||||
CIDR: '10.0.0.0/16'
|
||||
PublicOne:
|
||||
CIDR: '10.0.0.0/24'
|
||||
PublicTwo:
|
||||
CIDR: '10.0.1.0/24'
|
||||
|
||||
Resources:
|
||||
# VPC in which containers will be networked.
|
||||
# It has two public subnets, and two private subnets.
|
||||
# We distribute the subnets across the first two available subnets
|
||||
# for the region, for high availability.
|
||||
VPC:
|
||||
Type: AWS::EC2::VPC
|
||||
Properties:
|
||||
EnableDnsSupport: true
|
||||
EnableDnsHostnames: true
|
||||
CidrBlock: !FindInMap ['SubnetConfig', 'VPC', 'CIDR']
|
||||
|
||||
EFSServerSecurityGroup:
|
||||
Type: AWS::EC2::SecurityGroup
|
||||
Properties:
|
||||
GroupName: 'efs-server-endpoints'
|
||||
GroupDescription: Which client ip addrs are allowed to access EFS server
|
||||
VpcId: !Ref 'VPC'
|
||||
SecurityGroupIngress:
|
||||
- IpProtocol: tcp
|
||||
FromPort: 2049
|
||||
ToPort: 2049
|
||||
SourceSecurityGroupId: !Ref ContainerSecurityGroup
|
||||
#CidrIp: !FindInMap ['SubnetConfig', 'VPC', 'CIDR']
|
||||
# A security group for the containers we will run in Fargate.
|
||||
# Rules are added to this security group based on what ingress you
|
||||
# add for the cluster.
|
||||
ContainerSecurityGroup:
|
||||
Type: AWS::EC2::SecurityGroup
|
||||
Properties:
|
||||
GroupName: 'task security group'
|
||||
GroupDescription: Access to the Fargate containers
|
||||
VpcId: !Ref 'VPC'
|
||||
# SecurityGroupIngress:
|
||||
# - IpProtocol: tcp
|
||||
# FromPort: !Ref ContainerPort
|
||||
# ToPort: !Ref ContainerPort
|
||||
# CidrIp: 0.0.0.0/0
|
||||
SecurityGroupEgress:
|
||||
- IpProtocol: -1
|
||||
FromPort: 2049
|
||||
ToPort: 2049
|
||||
CidrIp: '0.0.0.0/0'
|
||||
|
||||
# Two public subnets, where containers can have public IP addresses
|
||||
PublicSubnetOne:
|
||||
Type: AWS::EC2::Subnet
|
||||
Properties:
|
||||
AvailabilityZone: !Select
|
||||
- 0
|
||||
- Fn::GetAZs: !Ref 'AWS::Region'
|
||||
VpcId: !Ref 'VPC'
|
||||
CidrBlock: !FindInMap ['SubnetConfig', 'PublicOne', 'CIDR']
|
||||
# MapPublicIpOnLaunch: true
|
||||
|
||||
PublicSubnetTwo:
|
||||
Type: AWS::EC2::Subnet
|
||||
Properties:
|
||||
AvailabilityZone: !Select
|
||||
- 1
|
||||
- Fn::GetAZs: !Ref 'AWS::Region'
|
||||
VpcId: !Ref 'VPC'
|
||||
CidrBlock: !FindInMap ['SubnetConfig', 'PublicTwo', 'CIDR']
|
||||
# MapPublicIpOnLaunch: true
|
||||
|
||||
# Setup networking resources for the public subnets. Containers
|
||||
# in the public subnets have public IP addresses and the routing table
|
||||
# sends network traffic via the internet gateway.
|
||||
InternetGateway:
|
||||
Type: AWS::EC2::InternetGateway
|
||||
GatewayAttachement:
|
||||
Type: AWS::EC2::VPCGatewayAttachment
|
||||
Properties:
|
||||
VpcId: !Ref 'VPC'
|
||||
InternetGatewayId: !Ref 'InternetGateway'
|
||||
|
||||
# Attaching a Internet Gateway to route table makes it public.
|
||||
PublicRouteTable:
|
||||
Type: AWS::EC2::RouteTable
|
||||
Properties:
|
||||
VpcId: !Ref 'VPC'
|
||||
PublicRoute:
|
||||
Type: AWS::EC2::Route
|
||||
DependsOn: GatewayAttachement
|
||||
Properties:
|
||||
RouteTableId: !Ref 'PublicRouteTable'
|
||||
DestinationCidrBlock: '0.0.0.0/0'
|
||||
GatewayId: !Ref 'InternetGateway'
|
||||
|
||||
# Attaching a public route table makes a subnet public.
|
||||
PublicSubnetOneRouteTableAssociation:
|
||||
Type: AWS::EC2::SubnetRouteTableAssociation
|
||||
Properties:
|
||||
SubnetId: !Ref PublicSubnetOne
|
||||
RouteTableId: !Ref PublicRouteTable
|
||||
PublicSubnetTwoRouteTableAssociation:
|
||||
Type: AWS::EC2::SubnetRouteTableAssociation
|
||||
Properties:
|
||||
SubnetId: !Ref PublicSubnetTwo
|
||||
RouteTableId: !Ref PublicRouteTable
|
||||
|
||||
# ECS Resources
|
||||
ECSCluster:
|
||||
Type: AWS::ECS::Cluster
|
||||
|
||||
# A role used to allow AWS Autoscaling to inspect stats and adjust scaleable targets
|
||||
# on your AWS account
|
||||
AutoscalingRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [application-autoscaling.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: service-autoscaling
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- 'application-autoscaling:*'
|
||||
- 'cloudwatch:DescribeAlarms'
|
||||
- 'cloudwatch:PutMetricAlarm'
|
||||
- 'ecs:DescribeServices'
|
||||
- 'ecs:UpdateService'
|
||||
Resource: '*'
|
||||
|
||||
# This is an IAM role which authorizes ECS to manage resources on your
|
||||
# account on your behalf, such as updating your load balancer with the
|
||||
# details of where your containers are, so that traffic can reach your
|
||||
# containers.
|
||||
ECSRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [ecs.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: ecs-service
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
# Rules which allow ECS to attach network interfaces to instances
|
||||
# on your behalf in order for awsvpc networking mode to work right
|
||||
- 'ec2:AttachNetworkInterface'
|
||||
- 'ec2:CreateNetworkInterface'
|
||||
- 'ec2:CreateNetworkInterfacePermission'
|
||||
- 'ec2:DeleteNetworkInterface'
|
||||
- 'ec2:DeleteNetworkInterfacePermission'
|
||||
- 'ec2:Describe*'
|
||||
- 'ec2:DetachNetworkInterface'
|
||||
|
||||
# Rules which allow ECS to update load balancers on your behalf
|
||||
# with the information sabout how to send traffic to your containers
|
||||
- 'elasticloadbalancing:DeregisterInstancesFromLoadBalancer'
|
||||
- 'elasticloadbalancing:DeregisterTargets'
|
||||
- 'elasticloadbalancing:Describe*'
|
||||
- 'elasticloadbalancing:RegisterInstancesWithLoadBalancer'
|
||||
- 'elasticloadbalancing:RegisterTargets'
|
||||
Resource: '*'
|
||||
|
||||
# This is a role which is used by the ECS tasks themselves.
|
||||
ECSTaskExecutionRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [ecs-tasks.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: AmazonECSTaskExecutionRolePolicy
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
# Allow upload to S3
|
||||
- 's3:GetObject'
|
||||
- 's3:GetObjectVersion'
|
||||
- 's3:PutObject'
|
||||
|
||||
# Allow the use of secret manager
|
||||
- 'secretsmanager:GetSecretValue'
|
||||
- 'kms:Decrypt'
|
||||
|
||||
# Allow the ECS Tasks to download images from ECR
|
||||
- 'ecr:GetAuthorizationToken'
|
||||
- 'ecr:BatchCheckLayerAvailability'
|
||||
- 'ecr:GetDownloadUrlForLayer'
|
||||
- 'ecr:BatchGetImage'
|
||||
|
||||
# Allow the ECS tasks to upload logs to CloudWatch
|
||||
- 'logs:CreateLogStream'
|
||||
- 'logs:PutLogEvents'
|
||||
Resource: '*'
|
||||
|
||||
DeleteCFNLambdaExecutionRole:
|
||||
Type: 'AWS::IAM::Role'
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: 'Allow'
|
||||
Principal:
|
||||
Service: ['lambda.amazonaws.com']
|
||||
Action: 'sts:AssumeRole'
|
||||
Path: '/'
|
||||
Policies:
|
||||
- PolicyName: DeleteCFNLambdaExecutionRole
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: 'Allow'
|
||||
Action:
|
||||
- 'logs:CreateLogGroup'
|
||||
- 'logs:CreateLogStream'
|
||||
- 'logs:PutLogEvents'
|
||||
Resource: 'arn:aws:logs:*:*:*'
|
||||
- Effect: 'Allow'
|
||||
Action:
|
||||
- 'cloudformation:DeleteStack'
|
||||
- 'kinesis:DeleteStream'
|
||||
- 'secretsmanager:DeleteSecret'
|
||||
- 'kinesis:DescribeStreamSummary'
|
||||
- 'logs:DeleteLogGroup'
|
||||
- 'logs:DeleteSubscriptionFilter'
|
||||
- 'ecs:DeregisterTaskDefinition'
|
||||
- 'lambda:DeleteFunction'
|
||||
- 'lambda:InvokeFunction'
|
||||
- 'events:RemoveTargets'
|
||||
- 'events:DeleteRule'
|
||||
- 'lambda:RemovePermission'
|
||||
Resource: '*'
|
||||
|
||||
### cloud watch to kinesis role
|
||||
|
||||
CloudWatchIAMRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [logs.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: service-autoscaling
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- 'kinesis:PutRecord'
|
||||
Resource: '*'
|
||||
#####################EFS#####################
|
||||
|
||||
EfsFileStorage:
|
||||
Type: 'AWS::EFS::FileSystem'
|
||||
Properties:
|
||||
BackupPolicy:
|
||||
Status: ENABLED
|
||||
PerformanceMode: maxIO
|
||||
Encrypted: false
|
||||
|
||||
FileSystemPolicy:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: 'Allow'
|
||||
Action:
|
||||
- 'elasticfilesystem:ClientMount'
|
||||
- 'elasticfilesystem:ClientWrite'
|
||||
- 'elasticfilesystem:ClientRootAccess'
|
||||
Principal:
|
||||
AWS: '*'
|
||||
|
||||
MountTargetResource1:
|
||||
Type: AWS::EFS::MountTarget
|
||||
Properties:
|
||||
FileSystemId: !Ref EfsFileStorage
|
||||
SubnetId: !Ref PublicSubnetOne
|
||||
SecurityGroups:
|
||||
- !Ref EFSServerSecurityGroup
|
||||
|
||||
MountTargetResource2:
|
||||
Type: AWS::EFS::MountTarget
|
||||
Properties:
|
||||
FileSystemId: !Ref EfsFileStorage
|
||||
SubnetId: !Ref PublicSubnetTwo
|
||||
SecurityGroups:
|
||||
- !Ref EFSServerSecurityGroup
|
||||
|
||||
S3Bucket:
|
||||
Type: 'AWS::S3::Bucket'
|
||||
DeletionPolicy: Retain
|
||||
Properties:
|
||||
BucketName: !Ref Storage
|
||||
|
||||
Outputs:
|
||||
EfsFileStorageId:
|
||||
Description: 'The connection endpoint for the database.'
|
||||
Value: !Ref EfsFileStorage
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:EfsFileStorageId
|
||||
ClusterName:
|
||||
Description: The name of the ECS cluster
|
||||
Value: !Ref 'ECSCluster'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ClusterName
|
||||
AutoscalingRole:
|
||||
Description: The ARN of the role used for autoscaling
|
||||
Value: !GetAtt 'AutoscalingRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:AutoscalingRole
|
||||
ECSRole:
|
||||
Description: The ARN of the ECS role
|
||||
Value: !GetAtt 'ECSRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ECSRole
|
||||
ECSTaskExecutionRole:
|
||||
Description: The ARN of the ECS role tsk execution role
|
||||
Value: !GetAtt 'ECSTaskExecutionRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ECSTaskExecutionRole
|
||||
|
||||
DeleteCFNLambdaExecutionRole:
|
||||
Description: Lambda execution role for cleaning up cloud formations
|
||||
Value: !GetAtt 'DeleteCFNLambdaExecutionRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:DeleteCFNLambdaExecutionRole
|
||||
|
||||
CloudWatchIAMRole:
|
||||
Description: The ARN of the CloudWatch role for subscription filter
|
||||
Value: !GetAtt 'CloudWatchIAMRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:CloudWatchIAMRole
|
||||
VpcId:
|
||||
Description: The ID of the VPC that this stack is deployed in
|
||||
Value: !Ref 'VPC'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:VpcId
|
||||
PublicSubnetOne:
|
||||
Description: Public subnet one
|
||||
Value: !Ref 'PublicSubnetOne'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:PublicSubnetOne
|
||||
PublicSubnetTwo:
|
||||
Description: Public subnet two
|
||||
Value: !Ref 'PublicSubnetTwo'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:PublicSubnetTwo
|
||||
|
||||
ContainerSecurityGroup:
|
||||
Description: A security group used to allow Fargate containers to receive traffic
|
||||
Value: !Ref 'ContainerSecurityGroup'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ContainerSecurityGroup
|
||||
|
|
@ -0,0 +1,142 @@
|
|||
AWSTemplateFormatVersion: '2010-09-09'
|
||||
Description: Schedule automatic deletion of CloudFormation stacks
|
||||
Metadata:
|
||||
AWS::CloudFormation::Interface:
|
||||
ParameterGroups:
|
||||
- Label:
|
||||
default: Input configuration
|
||||
Parameters:
|
||||
- StackName
|
||||
- TTL
|
||||
ParameterLabels:
|
||||
StackName:
|
||||
default: Stack name
|
||||
TTL:
|
||||
default: Time-to-live
|
||||
Parameters:
|
||||
EnvironmentName:
|
||||
Type: String
|
||||
Default: development
|
||||
Description: 'Your deployment environment: DEV, QA , PROD'
|
||||
BUILDID:
|
||||
Type: String
|
||||
Default: ''
|
||||
StackName:
|
||||
Type: String
|
||||
Description: Stack name that will be deleted.
|
||||
DeleteStackName:
|
||||
Type: String
|
||||
Description: Stack name that will be deleted.
|
||||
TTL:
|
||||
Type: Number
|
||||
Description: Time-to-live in minutes for the stack.
|
||||
Resources:
|
||||
DeleteCFNLambda:
|
||||
Type: 'AWS::Lambda::Function'
|
||||
Properties:
|
||||
FunctionName: !Join ['', ['DeleteCFNLambda', !Ref BUILDID]]
|
||||
Code:
|
||||
ZipFile: |
|
||||
import boto3
|
||||
import os
|
||||
import json
|
||||
|
||||
stack_name = os.environ['stackName']
|
||||
delete_stack_name = os.environ['deleteStackName']
|
||||
|
||||
def delete_cfn(stack_name):
|
||||
try:
|
||||
cfn = boto3.resource('cloudformation')
|
||||
stack = cfn.Stack(stack_name)
|
||||
stack.delete()
|
||||
return "SUCCESS"
|
||||
except:
|
||||
return "ERROR"
|
||||
|
||||
def handler(event, context):
|
||||
print("Received event:")
|
||||
print(json.dumps(event))
|
||||
result = delete_cfn(stack_name)
|
||||
delete_cfn(delete_stack_name)
|
||||
return result
|
||||
Environment:
|
||||
Variables:
|
||||
stackName: !Ref 'StackName'
|
||||
deleteStackName: !Ref 'DeleteStackName'
|
||||
Handler: 'index.handler'
|
||||
Runtime: 'python3.6'
|
||||
Timeout: '5'
|
||||
Role:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:DeleteCFNLambdaExecutionRole'
|
||||
DeleteStackEventRule:
|
||||
DependsOn:
|
||||
- DeleteCFNLambda
|
||||
- GenerateCronExpression
|
||||
Type: 'AWS::Events::Rule'
|
||||
Properties:
|
||||
Name: !Join ['', ['DeleteStackEventRule', !Ref BUILDID]]
|
||||
Description: Delete stack event
|
||||
ScheduleExpression: !GetAtt GenerateCronExpression.cron_exp
|
||||
State: 'ENABLED'
|
||||
Targets:
|
||||
- Arn: !GetAtt DeleteCFNLambda.Arn
|
||||
Id: 'DeleteCFNLambda'
|
||||
PermissionForDeleteCFNLambda:
|
||||
Type: 'AWS::Lambda::Permission'
|
||||
DependsOn:
|
||||
- DeleteStackEventRule
|
||||
Properties:
|
||||
FunctionName: !Join ['', ['DeleteCFNLambda', !Ref BUILDID]]
|
||||
Action: 'lambda:InvokeFunction'
|
||||
Principal: 'events.amazonaws.com'
|
||||
SourceArn: !GetAtt DeleteStackEventRule.Arn
|
||||
GenerateCronExpLambda:
|
||||
Type: 'AWS::Lambda::Function'
|
||||
Properties:
|
||||
FunctionName: !Join ['', ['GenerateCronExpressionLambda', !Ref BUILDID]]
|
||||
Code:
|
||||
ZipFile: |
|
||||
from datetime import datetime, timedelta
|
||||
import os
|
||||
import logging
|
||||
import json
|
||||
import cfnresponse
|
||||
|
||||
def deletion_time(ttl):
|
||||
delete_at_time = datetime.now() + timedelta(minutes=int(ttl))
|
||||
hh = delete_at_time.hour
|
||||
mm = delete_at_time.minute
|
||||
yyyy = delete_at_time.year
|
||||
month = delete_at_time.month
|
||||
dd = delete_at_time.day
|
||||
# minutes hours day month day-of-week year
|
||||
cron_exp = "cron({} {} {} {} ? {})".format(mm, hh, dd, month, yyyy)
|
||||
return cron_exp
|
||||
|
||||
def handler(event, context):
|
||||
print('Received event: %s' % json.dumps(event))
|
||||
status = cfnresponse.SUCCESS
|
||||
try:
|
||||
if event['RequestType'] == 'Delete':
|
||||
cfnresponse.send(event, context, status, {})
|
||||
else:
|
||||
ttl = event['ResourceProperties']['ttl']
|
||||
responseData = {}
|
||||
responseData['cron_exp'] = deletion_time(ttl)
|
||||
cfnresponse.send(event, context, cfnresponse.SUCCESS, responseData)
|
||||
except Exception as e:
|
||||
logging.error('Exception: %s' % e, exc_info=True)
|
||||
status = cfnresponse.FAILED
|
||||
cfnresponse.send(event, context, status, {}, None)
|
||||
Handler: 'index.handler'
|
||||
Runtime: 'python3.6'
|
||||
Timeout: '5'
|
||||
Role:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:DeleteCFNLambdaExecutionRole'
|
||||
GenerateCronExpression:
|
||||
Type: 'Custom::GenerateCronExpression'
|
||||
Version: '1.0'
|
||||
Properties:
|
||||
Name: !Join ['', ['GenerateCronExpression', !Ref BUILDID]]
|
||||
ServiceToken: !GetAtt GenerateCronExpLambda.Arn
|
||||
ttl: !Ref 'TTL'
|
||||
|
|
@ -0,0 +1,221 @@
|
|||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: >-
|
||||
AWS Fargate cluster that can span public and private subnets. Supports public
|
||||
facing load balancers, private internal load balancers, and both internal and
|
||||
external service discovery namespaces.
|
||||
Parameters:
|
||||
EnvironmentName:
|
||||
Type: String
|
||||
Default: development
|
||||
Description: 'Your deployment environment: DEV, QA , PROD'
|
||||
ServiceName:
|
||||
Type: String
|
||||
Default: example
|
||||
Description: A name for the service
|
||||
ImageUrl:
|
||||
Type: String
|
||||
Default: nginx
|
||||
Description: >-
|
||||
The url of a docker image that contains the application process that will
|
||||
handle the traffic for this service
|
||||
ContainerPort:
|
||||
Type: Number
|
||||
Default: 80
|
||||
Description: What port number the application inside the docker container is binding to
|
||||
ContainerCpu:
|
||||
Type: Number
|
||||
Default: 1024
|
||||
Description: How much CPU to give the container. 1024 is 1 CPU
|
||||
ContainerMemory:
|
||||
Type: Number
|
||||
Default: 2048
|
||||
Description: How much memory in megabytes to give the container
|
||||
BUILDID:
|
||||
Type: String
|
||||
Default: ''
|
||||
Command:
|
||||
Type: String
|
||||
Default: 'ls'
|
||||
EntryPoint:
|
||||
Type: String
|
||||
Default: '/bin/sh'
|
||||
WorkingDirectory:
|
||||
Type: String
|
||||
Default: '/efsdata/'
|
||||
Role:
|
||||
Type: String
|
||||
Default: ''
|
||||
Description: >-
|
||||
(Optional) An IAM role to give the service's containers if the code within
|
||||
needs to access other AWS resources like S3 buckets, DynamoDB tables, etc
|
||||
EFSMountDirectory:
|
||||
Type: String
|
||||
Default: '/efsdata'
|
||||
# template secrets p1 - input
|
||||
Mappings:
|
||||
SubnetConfig:
|
||||
VPC:
|
||||
CIDR: 10.0.0.0/16
|
||||
PublicOne:
|
||||
CIDR: 10.0.0.0/24
|
||||
PublicTwo:
|
||||
CIDR: 10.0.1.0/24
|
||||
Conditions:
|
||||
HasCustomRole: !Not
|
||||
- !Equals
|
||||
- Ref: Role
|
||||
- ''
|
||||
Resources:
|
||||
LogGroup:
|
||||
Type: 'AWS::Logs::LogGroup'
|
||||
Properties:
|
||||
LogGroupName: !Ref ServiceName
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
SubscriptionFilter:
|
||||
Type: 'AWS::Logs::SubscriptionFilter'
|
||||
Properties:
|
||||
FilterPattern: ''
|
||||
RoleArn:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:CloudWatchIAMRole'
|
||||
LogGroupName: !Ref ServiceName
|
||||
DestinationArn:
|
||||
'Fn::GetAtt':
|
||||
- KinesisStream
|
||||
- Arn
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||
DependsOn:
|
||||
- LogGroup
|
||||
- KinesisStream
|
||||
KinesisStream:
|
||||
Type: 'AWS::Kinesis::Stream'
|
||||
Properties:
|
||||
Name: !Ref ServiceName
|
||||
ShardCount: 1
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
|
||||
# template secrets p2 - secret
|
||||
|
||||
TaskDefinition:
|
||||
Type: 'AWS::ECS::TaskDefinition'
|
||||
Properties:
|
||||
Family: !Ref ServiceName
|
||||
Cpu: !Ref ContainerCpu
|
||||
Memory: !Ref ContainerMemory
|
||||
NetworkMode: awsvpc
|
||||
Volumes:
|
||||
- Name: efs-data
|
||||
EFSVolumeConfiguration:
|
||||
FilesystemId:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:EfsFileStorageId'
|
||||
TransitEncryption: ENABLED
|
||||
RequiresCompatibilities:
|
||||
- FARGATE
|
||||
ExecutionRoleArn:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:ECSTaskExecutionRole'
|
||||
TaskRoleArn:
|
||||
'Fn::If':
|
||||
- HasCustomRole
|
||||
- !Ref Role
|
||||
- !Ref 'AWS::NoValue'
|
||||
ContainerDefinitions:
|
||||
- Name: !Ref ServiceName
|
||||
Cpu: !Ref ContainerCpu
|
||||
Memory: !Ref ContainerMemory
|
||||
Image: !Ref ImageUrl
|
||||
EntryPoint:
|
||||
Fn::Split:
|
||||
- ','
|
||||
- !Ref EntryPoint
|
||||
Command:
|
||||
Fn::Split:
|
||||
- ','
|
||||
- !Ref Command
|
||||
WorkingDirectory: !Ref WorkingDirectory
|
||||
Environment:
|
||||
- Name: ALLOW_EMPTY_PASSWORD
|
||||
Value: 'yes'
|
||||
# template - env vars
|
||||
MountPoints:
|
||||
- SourceVolume: efs-data
|
||||
ContainerPath: !Ref EFSMountDirectory
|
||||
ReadOnly: false
|
||||
Secrets:
|
||||
# template secrets p3 - container def
|
||||
LogConfiguration:
|
||||
LogDriver: awslogs
|
||||
Options:
|
||||
awslogs-group: !Ref ServiceName
|
||||
awslogs-region: !Ref 'AWS::Region'
|
||||
awslogs-stream-prefix: !Ref ServiceName
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: dabb0116-abe0-48a6-a8af-cf9111c879a5
|
||||
DependsOn:
|
||||
- LogGroup
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
dabb0116-abe0-48a6-a8af-cf9111c879a5:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 270
|
||||
'y': 90
|
||||
z: 1
|
||||
embeds: []
|
||||
dependson:
|
||||
- aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
c6f18447-b879-4696-8873-f981b2cedd2b:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 270
|
||||
'y': 210
|
||||
z: 1
|
||||
embeds: []
|
||||
7f809e91-9e5d-4678-98c1-c5085956c480:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 60
|
||||
'y': 300
|
||||
z: 1
|
||||
embeds: []
|
||||
dependson:
|
||||
- aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
- c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
aece53ae-b82d-4267-bc16-ed964b05db27:
|
||||
size:
|
||||
width: 150
|
||||
height: 150
|
||||
position:
|
||||
x: 60
|
||||
'y': 90
|
||||
z: 1
|
||||
embeds: []
|
||||
4d2da56c-3643-46b8-aaee-e46e19f95fcc:
|
||||
source:
|
||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||
target:
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
z: 11
|
||||
14eb957b-f094-4653-93c4-77b2f851953c:
|
||||
source:
|
||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||
target:
|
||||
id: c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
z: 12
|
||||
85c57444-e5bb-4230-bc85-e545cd4558f6:
|
||||
source:
|
||||
id: dabb0116-abe0-48a6-a8af-cf9111c879a5
|
||||
target:
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
z: 13
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
import AWSBuildPlatform from './aws/aws-build-platform';
|
||||
import { BuildParameters } from '..';
|
||||
import CloudRunnerNamespace from './services/cloud-runner-namespace';
|
||||
import { CloudRunnerState } from './state/cloud-runner-state';
|
||||
import Kubernetes from './k8s/kubernetes-build-platform';
|
||||
import CloudRunnerLogger from './services/cloud-runner-logger';
|
||||
|
|
@ -11,28 +10,21 @@ import { CloudRunnerError } from './error/cloud-runner-error';
|
|||
class CloudRunner {
|
||||
private static setup(buildParameters: BuildParameters) {
|
||||
CloudRunnerLogger.setup();
|
||||
CloudRunnerState.buildParams = buildParameters;
|
||||
CloudRunnerState.buildGuid = CloudRunnerNamespace.generateBuildName(
|
||||
CloudRunnerState.readRunNumber(),
|
||||
buildParameters.platform,
|
||||
);
|
||||
CloudRunnerState.setupBranchName();
|
||||
CloudRunnerState.setupFolderVariables();
|
||||
CloudRunnerState.setupDefaultSecrets();
|
||||
CloudRunnerState.setup(buildParameters);
|
||||
CloudRunner.setupBuildPlatform();
|
||||
}
|
||||
|
||||
private static setupBuildPlatform() {
|
||||
switch (CloudRunnerState.buildParams.cloudRunnerCluster) {
|
||||
case 'aws':
|
||||
CloudRunnerLogger.log('Building with AWS');
|
||||
CloudRunnerState.CloudRunnerProviderPlatform = new AWSBuildPlatform(CloudRunnerState.buildParams);
|
||||
break;
|
||||
default:
|
||||
case 'k8s':
|
||||
CloudRunnerLogger.log('Building with Kubernetes');
|
||||
CloudRunnerState.CloudRunnerProviderPlatform = new Kubernetes(CloudRunnerState.buildParams);
|
||||
break;
|
||||
default:
|
||||
case 'aws':
|
||||
CloudRunnerLogger.log('Building with AWS');
|
||||
CloudRunnerState.CloudRunnerProviderPlatform = new AWSBuildPlatform(CloudRunnerState.buildParams);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { DownloadRepository } from '../steps/remote-steps.ts/download-repository';
|
||||
import { DownloadRepository } from '../steps/remote-steps/download-repository';
|
||||
|
||||
export class RemoteClientCli {
|
||||
static async RunRemoteClient(options) {
|
||||
|
|
@ -13,6 +13,10 @@ class CloudRunnerLogger {
|
|||
core.info(message);
|
||||
}
|
||||
|
||||
public static logLine(message: string) {
|
||||
core.info(`${message}\n`);
|
||||
}
|
||||
|
||||
public static error(message: string) {
|
||||
core.error(message);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,22 @@
|
|||
import { BuildParameters } from '../..';
|
||||
import ImageEnvironmentFactory from '../../image-environment-factory';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerNamespace from '../services/cloud-runner-namespace';
|
||||
import { CloudRunnerProviderInterface } from '../services/cloud-runner-provider-interface';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
|
||||
export class CloudRunnerState {
|
||||
static setup(buildParameters: BuildParameters) {
|
||||
CloudRunnerState.buildParams = buildParameters;
|
||||
CloudRunnerState.buildGuid = CloudRunnerNamespace.generateBuildName(
|
||||
CloudRunnerState.readRunNumber(),
|
||||
buildParameters.platform,
|
||||
);
|
||||
CloudRunnerState.setupBranchName();
|
||||
CloudRunnerState.setupFolderVariables();
|
||||
CloudRunnerState.setupDefaultSecrets();
|
||||
}
|
||||
public static CloudRunnerProviderPlatform: CloudRunnerProviderInterface;
|
||||
public static buildParams: BuildParameters;
|
||||
public static defaultSecrets: CloudRunnerSecret[];
|
||||
|
|
@ -19,7 +31,6 @@ export class CloudRunnerState {
|
|||
public static cacheFolderFull: string;
|
||||
public static lfsDirectory: string;
|
||||
public static purgeRemoteCaching: boolean;
|
||||
public static CloudRunnerBranch: string;
|
||||
public static unityBuilderRepoUrl: string;
|
||||
public static targetBuildRepoUrl: string;
|
||||
public static readonly defaultGitShaEnvironmentVariable = [
|
||||
|
|
@ -31,6 +42,7 @@ export class CloudRunnerState {
|
|||
public static readonly repositoryFolder = 'repo';
|
||||
public static readonly buildVolumeFolder = 'data';
|
||||
public static readonly cacheFolder = 'cache';
|
||||
public static cloudRunnerBranch: string;
|
||||
|
||||
public static readBuildEnvironmentVariables(): CloudRunnerEnvironmentVariable[] {
|
||||
return [
|
||||
|
|
@ -86,6 +98,10 @@ export class CloudRunnerState {
|
|||
name: 'ANDROID_KEYALIAS_NAME',
|
||||
value: CloudRunnerState.buildParams.androidKeyaliasName,
|
||||
},
|
||||
{
|
||||
name: 'SERIALIZED_STATE',
|
||||
value: JSON.stringify(CloudRunnerState),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
|
|
@ -98,7 +114,9 @@ export class CloudRunnerState {
|
|||
}
|
||||
|
||||
public static getCloneBuilder() {
|
||||
return `git clone -q ${CloudRunnerState.CloudRunnerBranch} ${CloudRunnerState.unityBuilderRepoUrl} ${CloudRunnerState.builderPathFull}`;
|
||||
const cloneCommand = `git clone -b ${CloudRunnerState.branchName} ${CloudRunnerState.unityBuilderRepoUrl} ${CloudRunnerState.builderPathFull}`;
|
||||
CloudRunnerLogger.log(cloneCommand);
|
||||
return cloneCommand;
|
||||
}
|
||||
|
||||
public static readRunNumber() {
|
||||
|
|
@ -119,23 +137,12 @@ export class CloudRunnerState {
|
|||
CloudRunnerState.cacheFolderFull = `/${CloudRunnerState.buildVolumeFolder}/${CloudRunnerState.cacheFolder}/${CloudRunnerState.branchName}`;
|
||||
CloudRunnerState.lfsDirectory = `${CloudRunnerState.repoPathFull}/.git/lfs`;
|
||||
CloudRunnerState.purgeRemoteCaching = process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
||||
CloudRunnerState.CloudRunnerBranch = process.env.CloudRunnerBranch
|
||||
? `--branch "${process.env.CloudRunnerBranch}"`
|
||||
: '';
|
||||
CloudRunnerState.unityBuilderRepoUrl = `https://${CloudRunnerState.buildParams.githubToken}@github.com/game-ci/unity-builder.git`;
|
||||
CloudRunnerState.targetBuildRepoUrl = `https://${CloudRunnerState.buildParams.githubToken}@github.com/${process.env.GITHUB_REPOSITORY}.git`;
|
||||
}
|
||||
|
||||
public static setupBranchName() {
|
||||
const defaultBranchName =
|
||||
process.env.GITHUB_REF?.split('/')
|
||||
.filter((x) => {
|
||||
x = x[0].toUpperCase() + x.slice(1);
|
||||
return x;
|
||||
})
|
||||
.join('') || '';
|
||||
CloudRunnerState.branchName =
|
||||
process.env.REMOTE_BUILDER_CACHE !== undefined ? process.env.REMOTE_BUILDER_CACHE : defaultBranchName;
|
||||
CloudRunnerState.branchName = CloudRunnerState.buildParams.branch;
|
||||
}
|
||||
|
||||
public static setupDefaultSecrets() {
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ export class BuildStep implements StepInterface {
|
|||
environmentVariables: CloudRunnerEnvironmentVariable[],
|
||||
secrets: CloudRunnerSecret[],
|
||||
) {
|
||||
CloudRunnerLogger.log('Starting part 2/4 (build unity project)');
|
||||
CloudRunnerLogger.logLine('Starting part 2/4 (build unity project)');
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(
|
||||
CloudRunnerState.buildGuid,
|
||||
image,
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ export class CompressionStep implements StepInterface {
|
|||
secrets: CloudRunnerSecret[],
|
||||
) {
|
||||
try {
|
||||
CloudRunnerLogger.log('Starting step 3/4 build compression');
|
||||
CloudRunnerLogger.logLine('Starting step 3/4 build compression');
|
||||
// Cleanup
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(
|
||||
CloudRunnerState.buildGuid,
|
||||
|
|
|
|||
|
|
@ -24,26 +24,31 @@ export class DownloadRepositoryStep implements StepInterface {
|
|||
secrets: CloudRunnerSecret[],
|
||||
) {
|
||||
try {
|
||||
CloudRunnerLogger.log('Starting step 1/4 clone and restore cache');
|
||||
CloudRunnerLogger.logLine('Starting step 1/4 clone and restore cache');
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(
|
||||
CloudRunnerState.buildGuid,
|
||||
image,
|
||||
[
|
||||
` printenv
|
||||
apk update -q
|
||||
apk add unzip zip git-lfs jq tree -q
|
||||
mkdir -p ${CloudRunnerState.buildPathFull}
|
||||
apk add unzip zip git-lfs jq tree nodejs -q
|
||||
|
||||
export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
# mkdir -p ${CloudRunnerState.buildPathFull}
|
||||
mkdir -p ${CloudRunnerState.builderPathFull}
|
||||
mkdir -p ${CloudRunnerState.repoPathFull}
|
||||
# mkdir -p ${CloudRunnerState.repoPathFull}
|
||||
echo "${CloudRunnerState.getCloneBuilder()}"
|
||||
${CloudRunnerState.getCloneBuilder()}
|
||||
${CloudRunnerState.unityBuilderRepoUrl}/dist/index.js -- -m cli
|
||||
echo ' '
|
||||
echo 'Initializing source repository for cloning with caching of LFS files'
|
||||
${CloudRunnerState.getCloneNoLFSCommand()}
|
||||
echo 'Source repository initialized'
|
||||
echo ' '
|
||||
echo 'Starting checks of cache for the Unity project Library and git LFS files'
|
||||
${CloudRunnerState.getHandleCachingCommand()}
|
||||
chmod +x ${CloudRunnerState.builderPathFull}/dist/index.js
|
||||
node ${CloudRunnerState.builderPathFull}/dist/index.js -m remote-cli
|
||||
# echo ' '
|
||||
# echo 'Initializing source repository for cloning with caching of LFS files'
|
||||
# ${CloudRunnerState.getCloneNoLFSCommand()}
|
||||
# echo 'Source repository initialized'
|
||||
# ls ${CloudRunnerState.projectPathFull}
|
||||
# echo ' '
|
||||
# echo 'Starting checks of cache for the Unity project Library and git LFS files'
|
||||
# ${CloudRunnerState.getHandleCachingCommand()}
|
||||
`,
|
||||
],
|
||||
`/${CloudRunnerState.buildVolumeFolder}`,
|
||||
|
|
@ -52,6 +57,7 @@ export class DownloadRepositoryStep implements StepInterface {
|
|||
secrets,
|
||||
);
|
||||
} catch (error) {
|
||||
CloudRunnerLogger.logLine(`ENV VARS ${JSON.stringify(environmentVariables)} SECRETS ${JSON.stringify(secrets)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,25 +0,0 @@
|
|||
const { exec } = require('child_process');
|
||||
|
||||
export class DownloadRepository {
|
||||
public static async run() {
|
||||
await new Promise<void>((promise) => {
|
||||
exec('printenv', (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`error: ${error.message}`);
|
||||
promise();
|
||||
return;
|
||||
}
|
||||
if (stderr) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`stderr: ${stderr}`);
|
||||
promise();
|
||||
return;
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`stdout: ${stdout}`);
|
||||
promise();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
const { exec } = require('child_process');
|
||||
|
||||
export class DownloadRepository {
|
||||
public static async run() {
|
||||
await new Promise<void>((promise) => {
|
||||
exec(
|
||||
`
|
||||
echo "test"
|
||||
apk update -q
|
||||
apk add unzip zip git-lfs jq tree -q
|
||||
`,
|
||||
(error, stdout, stderr) => {
|
||||
if (error) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`error: ${error.message}`);
|
||||
promise();
|
||||
return;
|
||||
}
|
||||
if (stderr) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`stderr: ${stderr}`);
|
||||
promise();
|
||||
return;
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`stdout: ${stdout}`);
|
||||
promise();
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -10,16 +10,34 @@ const core = require('@actions/core');
|
|||
class Input {
|
||||
public static githubEnabled = true;
|
||||
public static cliOptions;
|
||||
static awsRegion: any;
|
||||
|
||||
private static getInput(query) {
|
||||
return Input.githubEnabled
|
||||
? core.getInput(query)
|
||||
: Input.cliOptions !== undefined
|
||||
: Input.cliOptions[query] !== undefined
|
||||
? Input.cliOptions[query]
|
||||
: process.env[query] !== undefined
|
||||
? process.env[query]
|
||||
: false;
|
||||
}
|
||||
static get branch() {
|
||||
if (Input.getInput(`REMOTE_BUILDER_CACHE`)) {
|
||||
return Input.getInput(`REMOTE_BUILDER_CACHE`);
|
||||
} else if (Input.getInput(`GITHUB_REF`)) {
|
||||
return Input.getInput(`GITHUB_REF`)
|
||||
.split('/')
|
||||
.filter((x) => {
|
||||
x = x[0].toUpperCase() + x.slice(1);
|
||||
return x;
|
||||
})
|
||||
.join('');
|
||||
} else if (Input.getInput('branch')) {
|
||||
return Input.getInput('branch');
|
||||
} else {
|
||||
return 'main';
|
||||
}
|
||||
}
|
||||
static get runNumber() {
|
||||
return Input.getInput('GITHUB_RUN_NUMBER') || '0';
|
||||
}
|
||||
|
|
@ -50,7 +68,7 @@ class Input {
|
|||
}
|
||||
|
||||
static get buildMethod() {
|
||||
return Input.getInput('buildMethod'); // processed in docker file
|
||||
return Input.getInput('buildMethod') || ''; // processed in docker file
|
||||
}
|
||||
|
||||
static get versioningStrategy() {
|
||||
|
|
|
|||
Loading…
Reference in New Issue