pull/310/head
Frostebite 2021-12-30 20:25:28 +00:00
parent ac4113a7a3
commit 9366b55b0f
15 changed files with 279 additions and 77 deletions

82
dist/index.js vendored
View File

@ -335,10 +335,10 @@ class Cache {
if (action_1.default.isRunningLocally) { if (action_1.default.isRunningLocally) {
return; return;
} }
core.warning(` core.warning(`
Library folder does not exist. Library folder does not exist.
Consider setting up caching to speed up your workflow, Consider setting up caching to speed up your workflow,
if this is not your first build. if this is not your first build.
`); `);
} }
} }
@ -1187,7 +1187,6 @@ const core = __importStar(__webpack_require__(42186));
const zlib = __importStar(__webpack_require__(78761)); const zlib = __importStar(__webpack_require__(78761));
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855)); const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
const __1 = __webpack_require__(41359); const __1 = __webpack_require__(41359);
const fs_1 = __importDefault(__webpack_require__(35747));
const cloud_runner_state_1 = __webpack_require__(70912); const cloud_runner_state_1 = __webpack_require__(70912);
const cloud_runner_statics_1 = __webpack_require__(90828); const cloud_runner_statics_1 = __webpack_require__(90828);
const cloud_runner_build_command_process_1 = __webpack_require__(71899); const cloud_runner_build_command_process_1 = __webpack_require__(71899);
@ -1236,7 +1235,7 @@ class AWSTaskRunner {
core.error(error); core.error(error);
} }
cloud_runner_logger_1.default.log(`Cloud runner job is running`); cloud_runner_logger_1.default.log(`Cloud runner job is running`);
yield this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName); const output = yield this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
const exitCode = (_q = (yield AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers) === null || _q === void 0 ? void 0 : _q[0].exitCode; const exitCode = (_q = (yield AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers) === null || _q === void 0 ? void 0 : _q[0].exitCode;
cloud_runner_logger_1.default.log(`Cloud runner job exit code ${exitCode}`); cloud_runner_logger_1.default.log(`Cloud runner job exit code ${exitCode}`);
if (exitCode !== 0 && exitCode !== undefined) { if (exitCode !== 0 && exitCode !== undefined) {
@ -1245,6 +1244,7 @@ class AWSTaskRunner {
} }
else { else {
cloud_runner_logger_1.default.log(`Cloud runner job has finished successfully`); cloud_runner_logger_1.default.log(`Cloud runner job has finished successfully`);
return output;
} }
}); });
} }
@ -1274,15 +1274,17 @@ class AWSTaskRunner {
cloud_runner_logger_1.default.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`); cloud_runner_logger_1.default.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
let shouldReadLogs = true; let shouldReadLogs = true;
let timestamp = 0; let timestamp = 0;
let output = '';
while (shouldReadLogs) { while (shouldReadLogs) {
yield new Promise((resolve) => setTimeout(resolve, 1500)); yield new Promise((resolve) => setTimeout(resolve, 1500));
const taskData = yield AWSTaskRunner.describeTasks(ECS, clusterName, taskArn); const taskData = yield AWSTaskRunner.describeTasks(ECS, clusterName, taskArn);
({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs)); ({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs));
({ iterator, shouldReadLogs } = yield AWSTaskRunner.handleLogStreamIteration(kinesis, iterator, shouldReadLogs, taskDef)); ({ iterator, shouldReadLogs, output } = yield AWSTaskRunner.handleLogStreamIteration(kinesis, iterator, shouldReadLogs, taskDef, output));
} }
return output;
}); });
} }
static handleLogStreamIteration(kinesis, iterator, shouldReadLogs, taskDef) { static handleLogStreamIteration(kinesis, iterator, shouldReadLogs, taskDef, output) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const records = yield kinesis const records = yield kinesis
.getRecords({ .getRecords({
@ -1290,8 +1292,8 @@ class AWSTaskRunner {
}) })
.promise(); .promise();
iterator = records.NextShardIterator || ''; iterator = records.NextShardIterator || '';
shouldReadLogs = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs); ({ shouldReadLogs, output } = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs, output));
return { iterator, shouldReadLogs }; return { iterator, shouldReadLogs, output };
}); });
} }
static checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs) { static checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs) {
@ -1308,7 +1310,7 @@ class AWSTaskRunner {
} }
return { timestamp, shouldReadLogs }; return { timestamp, shouldReadLogs };
} }
static logRecords(records, iterator, taskDef, shouldReadLogs) { static logRecords(records, iterator, taskDef, shouldReadLogs, output) {
if (records.Records.length > 0 && iterator) { if (records.Records.length > 0 && iterator) {
for (let index = 0; index < records.Records.length; index++) { for (let index = 0; index < records.Records.length; index++) {
const json = JSON.parse(zlib.gunzipSync(Buffer.from(records.Records[index].Data, 'base64')).toString('utf8')); const json = JSON.parse(zlib.gunzipSync(Buffer.from(records.Records[index].Data, 'base64')).toString('utf8'));
@ -1323,15 +1325,15 @@ class AWSTaskRunner {
core.warning('LIBRARY NOT FOUND!'); core.warning('LIBRARY NOT FOUND!');
} }
message = `[${cloud_runner_statics_1.CloudRunnerStatics.logPrefix}] ${message}`; message = `[${cloud_runner_statics_1.CloudRunnerStatics.logPrefix}] ${message}`;
if (cloud_runner_state_1.CloudRunnerState.buildParams.logToFile) { if (__1.Input.cloudRunnerTests) {
fs_1.default.appendFileSync(`${cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid}-outputfile.txt`, `${message}\n`); output += message;
} }
cloud_runner_logger_1.default.log(message); cloud_runner_logger_1.default.log(message);
} }
} }
} }
} }
return shouldReadLogs; return { shouldReadLogs, output };
} }
static getLogStream(kinesis, kinesisStreamName) { static getLogStream(kinesis, kinesisStreamName) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
@ -1503,10 +1505,11 @@ class AWSBuildEnvironment {
yield new aws_base_stack_1.AWSBaseStack(this.baseStackName).setupBaseStack(CF); yield new aws_base_stack_1.AWSBaseStack(this.baseStackName).setupBaseStack(CF);
const taskDef = yield new aws_job_stack_1.AWSJobStack(this.baseStackName).setupCloudFormations(CF, buildGuid, image, entrypoint, commands, mountdir, workingdir, secrets); const taskDef = yield new aws_job_stack_1.AWSJobStack(this.baseStackName).setupCloudFormations(CF, buildGuid, image, entrypoint, commands, mountdir, workingdir, secrets);
let postRunTaskTimeMs; let postRunTaskTimeMs;
let output = '';
try { try {
const postSetupStacksTimeMs = Date.now(); const postSetupStacksTimeMs = Date.now();
cloud_runner_logger_1.default.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`); cloud_runner_logger_1.default.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`);
yield aws_task_runner_1.default.runTask(taskDef, ECS, CF, environment, buildGuid, commands); output = yield aws_task_runner_1.default.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
postRunTaskTimeMs = Date.now(); postRunTaskTimeMs = Date.now();
cloud_runner_logger_1.default.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`); cloud_runner_logger_1.default.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`);
} }
@ -1516,6 +1519,7 @@ class AWSBuildEnvironment {
if (postRunTaskTimeMs !== undefined) if (postRunTaskTimeMs !== undefined)
cloud_runner_logger_1.default.log(`Cleanup job time: ${Math.floor((postCleanupTimeMs - postRunTaskTimeMs) / 1000)}s`); cloud_runner_logger_1.default.log(`Cleanup job time: ${Math.floor((postCleanupTimeMs - postRunTaskTimeMs) / 1000)}s`);
} }
return output;
}); });
} }
cleanupResources(CF, taskDef) { cleanupResources(CF, taskDef) {
@ -1608,8 +1612,9 @@ class CloudRunner {
CloudRunner.setup(buildParameters); CloudRunner.setup(buildParameters);
try { try {
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.setupSharedResources(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets); yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.setupSharedResources(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets);
yield new workflow_composition_root_1.WorkflowCompositionRoot().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets)); const output = yield new workflow_composition_root_1.WorkflowCompositionRoot().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedResources(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets); yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedResources(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets);
return output;
} }
catch (error) { catch (error) {
yield cloud_runner_error_1.CloudRunnerError.handleException(error); yield cloud_runner_error_1.CloudRunnerError.handleException(error);
@ -1780,8 +1785,9 @@ class Kubernetes {
cloud_runner_logger_1.default.log('Watching pod until running'); cloud_runner_logger_1.default.log('Watching pod until running');
yield kubernetes_task_runner_1.default.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace); yield kubernetes_task_runner_1.default.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
cloud_runner_logger_1.default.log('Pod running, streaming logs'); cloud_runner_logger_1.default.log('Pod running, streaming logs');
yield kubernetes_task_runner_1.default.runTask(this.kubeConfig, this.kubeClient, this.jobName, this.podName, 'main', this.namespace, cloud_runner_logger_1.default.log); const output = yield kubernetes_task_runner_1.default.runTask(this.kubeConfig, this.kubeClient, this.jobName, this.podName, 'main', this.namespace, cloud_runner_logger_1.default.log);
yield this.cleanupTaskResources(); yield this.cleanupTaskResources();
return output;
} }
catch (error) { catch (error) {
cloud_runner_logger_1.default.log('Running job failed'); cloud_runner_logger_1.default.log('Running job failed');
@ -2713,13 +2719,13 @@ const cloud_runner_state_1 = __webpack_require__(70912);
class BuildStep { class BuildStep {
run(cloudRunnerStepState) { run(cloudRunnerStepState) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
yield BuildStep.BuildStep(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets); return yield BuildStep.BuildStep(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
}); });
} }
static BuildStep(image, environmentVariables, secrets) { static BuildStep(image, environmentVariables, secrets) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
cloud_runner_logger_1.default.logLine('Starting part 2/2 (build unity project)'); cloud_runner_logger_1.default.logLine('Starting part 2/2 (build unity project)');
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, ` return yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, `
export GITHUB_WORKSPACE="${cloud_runner_state_1.CloudRunnerState.repoPathFull}" export GITHUB_WORKSPACE="${cloud_runner_state_1.CloudRunnerState.repoPathFull}"
cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/default-build-script/" "/UnityBuilderAction" cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/default-build-script/" "/UnityBuilderAction"
cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/entrypoint.sh" "/entrypoint.sh" cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/entrypoint.sh" "/entrypoint.sh"
@ -2774,7 +2780,7 @@ class SetupStep {
run(cloudRunnerStepState) { run(cloudRunnerStepState) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { try {
yield SetupStep.downloadRepository(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets); return yield SetupStep.downloadRepository(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
} }
catch (error) { catch (error) {
throw error; throw error;
@ -2785,7 +2791,7 @@ class SetupStep {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { try {
cloud_runner_logger_1.default.logLine('Starting step 1/2 download game files from repository, try to use cache'); cloud_runner_logger_1.default.logLine('Starting step 1/2 download game files from repository, try to use cache');
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, ` return yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, `
apk update -q apk update -q
apk add unzip zip git-lfs jq tree nodejs -q apk add unzip zip git-lfs jq tree nodejs -q
export GIT_DISCOVERY_ACROSS_FILESYSTEM=1 export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
@ -2837,7 +2843,7 @@ class BuildAutomationWorkflow {
run(cloudRunnerStepState) { run(cloudRunnerStepState) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { try {
yield BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image); return yield BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image);
} }
catch (error) { catch (error) {
throw error; throw error;
@ -2848,19 +2854,21 @@ class BuildAutomationWorkflow {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { try {
cloud_runner_logger_1.default.log(`Cloud Runner is running standard build automation`); cloud_runner_logger_1.default.log(`Cloud Runner is running standard build automation`);
yield new setup_step_1.SetupStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets)); let output = '';
output += yield new setup_step_1.SetupStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
cloud_runner_logger_1.default.logWithTime('Download repository step time'); cloud_runner_logger_1.default.logWithTime('Download repository step time');
if (cloud_runner_state_1.CloudRunnerState.buildParams.preBuildSteps !== '') { if (cloud_runner_state_1.CloudRunnerState.buildParams.preBuildSteps !== '') {
yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.preBuildSteps); output += yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.preBuildSteps);
} }
cloud_runner_logger_1.default.logWithTime('Pre build step(s) time'); cloud_runner_logger_1.default.logWithTime('Pre build step(s) time');
yield new build_step_1.BuildStep().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets)); output += yield new build_step_1.BuildStep().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
cloud_runner_logger_1.default.logWithTime('Build time'); cloud_runner_logger_1.default.logWithTime('Build time');
if (cloud_runner_state_1.CloudRunnerState.buildParams.postBuildSteps !== '') { if (cloud_runner_state_1.CloudRunnerState.buildParams.postBuildSteps !== '') {
yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.postBuildSteps); output += yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.postBuildSteps);
} }
cloud_runner_logger_1.default.logWithTime('Post build step(s) time'); cloud_runner_logger_1.default.logWithTime('Post build step(s) time');
cloud_runner_logger_1.default.log(`Cloud Runner finished running standard build automation`); cloud_runner_logger_1.default.log(`Cloud Runner finished running standard build automation`);
return output;
} }
catch (error) { catch (error) {
throw error; throw error;
@ -2904,6 +2912,7 @@ class CustomWorkflow {
cloud_runner_logger_1.default.log(`Cloud Runner is running in custom job mode`); cloud_runner_logger_1.default.log(`Cloud Runner is running in custom job mode`);
try { try {
buildSteps = yaml_1.default.parse(buildSteps); buildSteps = yaml_1.default.parse(buildSteps);
let output = '';
for (const step of buildSteps) { for (const step of buildSteps) {
const stepSecrets = step.secrets.map((x) => { const stepSecrets = step.secrets.map((x) => {
const secret = { const secret = {
@ -2913,8 +2922,9 @@ class CustomWorkflow {
}; };
return secret; return secret;
}); });
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, step['image'], step['commands'], `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), [...cloud_runner_state_1.CloudRunnerState.defaultSecrets, ...stepSecrets]); output += yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, step['image'], step['commands'], `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), [...cloud_runner_state_1.CloudRunnerState.defaultSecrets, ...stepSecrets]);
} }
return output;
} }
catch (error) { catch (error) {
cloud_runner_logger_1.default.log(`failed to parse a custom job "${buildSteps}"`); cloud_runner_logger_1.default.log(`failed to parse a custom job "${buildSteps}"`);
@ -2957,7 +2967,7 @@ class EphemeralGitHubRunnerWorkflow {
run(cloudRunnerStepState) { run(cloudRunnerStepState) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { try {
yield EphemeralGitHubRunnerWorkflow.runJobAsEphemeralGitHubRunner(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets); return yield EphemeralGitHubRunnerWorkflow.runJobAsEphemeralGitHubRunner(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
} }
catch (error) { catch (error) {
throw error; throw error;
@ -2969,7 +2979,7 @@ class EphemeralGitHubRunnerWorkflow {
try { try {
cloud_runner_logger_1.default.log(`Cloud Runner is running in ephemeral GitHub runner mode`); cloud_runner_logger_1.default.log(`Cloud Runner is running in ephemeral GitHub runner mode`);
const installAndStartRunner = ' cd ../.. && ls && mkdir actions-runner && cd actions-runner && curl -O -L https://github.com/actions/runner/releases/download/v2.283.1/actions-runner-linux-x64-2.283.1.tar.gz && tar xzf ./actions-runner-linux-x64-2.283.1.tar.gz'; const installAndStartRunner = ' cd ../.. && ls && mkdir actions-runner && cd actions-runner && curl -O -L https://github.com/actions/runner/releases/download/v2.283.1/actions-runner-linux-x64-2.283.1.tar.gz && tar xzf ./actions-runner-linux-x64-2.283.1.tar.gz';
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, installAndStartRunner, `/runner`, `/runner`, environmentVariables, secrets); return yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, installAndStartRunner, `/runner`, `/runner`, environmentVariables, secrets);
} }
catch (error) { catch (error) {
throw error; throw error;
@ -3013,7 +3023,7 @@ class WorkflowCompositionRoot {
run(cloudRunnerStepState) { run(cloudRunnerStepState) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { try {
yield WorkflowCompositionRoot.runJob(cloudRunnerStepState.image.toString()); return yield WorkflowCompositionRoot.runJob(cloudRunnerStepState.image.toString());
} }
catch (error) { catch (error) {
throw error; throw error;
@ -3025,16 +3035,16 @@ class WorkflowCompositionRoot {
try { try {
cloud_runner_logger_1.default.log(`Workflow specified: ${cloud_runner_state_1.CloudRunnerState.buildParams.customJob}`); cloud_runner_logger_1.default.log(`Workflow specified: ${cloud_runner_state_1.CloudRunnerState.buildParams.customJob}`);
if (cloud_runner_state_1.CloudRunnerState.buildParams.customJob === '') { if (cloud_runner_state_1.CloudRunnerState.buildParams.customJob === '') {
yield new build_automation_workflow_1.BuildAutomationWorkflow().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets)); return yield new build_automation_workflow_1.BuildAutomationWorkflow().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
} }
else if (cloud_runner_state_1.CloudRunnerState.buildParams.customJob === 'ephemeral') { else if (cloud_runner_state_1.CloudRunnerState.buildParams.customJob === 'ephemeral') {
yield new ephemeral_github_runner_workflow_1.EphemeralGitHubRunnerWorkflow().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets)); return yield new ephemeral_github_runner_workflow_1.EphemeralGitHubRunnerWorkflow().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
} }
else if (cloud_runner_state_1.CloudRunnerState.buildParams.customJob === 'download') { else if (cloud_runner_state_1.CloudRunnerState.buildParams.customJob === 'download') {
yield new setup_step_1.SetupStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets)); return yield new setup_step_1.SetupStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
} }
else { else {
yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.customJob); return yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.customJob);
} }
} }
catch (error) { catch (error) {
@ -3304,8 +3314,8 @@ class ImageTag {
case platform_1.default.types.Test: case platform_1.default.types.Test:
return generic; return generic;
default: default:
throw new Error(` throw new Error(`
Platform must be one of the ones described in the documentation. Platform must be one of the ones described in the documentation.
"${platform}" is currently not supported.`); "${platform}" is currently not supported.`);
} }
} }

2
dist/index.js.map vendored

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,175 @@
import fs from 'fs';
import path from 'path';
import CloudRunnerLogger from '../../cloud-runner/services/cloud-runner-logger';
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
export class SetupRemoteRepository {
static LFS_ASSETS_HASH;
public static async run() {
try {
fs.mkdirSync(CloudRunnerState.buildPathFull);
fs.mkdirSync(CloudRunnerState.repoPathFull);
await SetupRemoteRepository.cloneRepoWithoutLFSFiles();
await SetupRemoteRepository.createLFSHashFiles();
await SetupRemoteRepository.printLFSHashState();
const lfsCacheFolder = path.join(CloudRunnerState.cacheFolderFull, `lfs`);
const libraryCacheFolder = path.join(CloudRunnerState.cacheFolderFull, `lib`);
await CloudRunnerAgentSystem.Run(`tree ${libraryCacheFolder}`);
await CloudRunnerAgentSystem.Run(`tree ${CloudRunnerState.builderPathFull}`);
await SetupRemoteRepository.libraryCaching(lfsCacheFolder, libraryCacheFolder);
await SetupRemoteRepository.lfsCaching(lfsCacheFolder);
await SetupRemoteRepository.printCacheState(lfsCacheFolder, libraryCacheFolder);
await SetupRemoteRepository.pullLatestLFS();
await SetupRemoteRepository.cacheLatestLFSFiles(lfsCacheFolder);
SetupRemoteRepository.handleCachePurging();
} catch (error) {
throw error;
}
}
private static async printLFSHashState() {
await CloudRunnerAgentSystem.Run(
`echo ' '
echo 'Contents of .lfs-assets-guid file:'
cat .lfs-assets-guid
echo ' '
echo 'Contents of .lfs-assets-guid-sum file:'
cat .lfs-assets-guid-sum
echo ' '
echo 'Source repository initialized'
ls ${CloudRunnerState.projectPathFull}
echo ' '`,
);
}
private static async printCacheState(lfsCacheFolder: string, libraryCacheFolder: string) {
await CloudRunnerAgentSystem.Run(
`echo ' '
echo "LFS cache for $branch"
du -sch "${lfsCacheFolder}/"
echo '**'
echo "Library cache for $branch"
du -sch "${libraryCacheFolder}/"
echo '**'
echo "Branch: $branch"
du -sch "${CloudRunnerState.cacheFolderFull}/"
echo '**'
echo 'Full cache'
du -sch "${CloudRunnerState.cacheFolderFull}/"
echo ' '`,
);
}
private static handleCachePurging() {
if (process.env.purgeRemoteCaching !== undefined) {
CloudRunnerLogger.logCli(`purging ${CloudRunnerState.purgeRemoteCaching}`);
fs.rmdirSync(CloudRunnerState.cacheFolder, { recursive: true });
}
}
private static async cacheLatestLFSFiles(lfsCacheFolder: string) {
process.chdir(`${CloudRunnerState.lfsDirectory}/..`);
await CloudRunnerAgentSystem.Run(`zip -r "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "lfs"`);
CloudRunnerLogger.logCli(fs.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`).toString());
await CloudRunnerAgentSystem.Run(
`cp "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "${path.join(
lfsCacheFolder,
`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`,
)}"`,
);
CloudRunnerLogger.logCli(`copied ${SetupRemoteRepository.LFS_ASSETS_HASH} to ${lfsCacheFolder}`);
}
private static async pullLatestLFS() {
process.chdir(CloudRunnerState.repoPathFull);
await CloudRunnerAgentSystem.Run(`git lfs pull`);
CloudRunnerLogger.logCli(`pulled latest LFS files`);
}
private static async lfsCaching(lfsCacheFolder: string) {
CloudRunnerLogger.logCli(` `);
CloudRunnerLogger.logCli(`LFS Caching`);
if (!fs.existsSync(lfsCacheFolder)) {
fs.mkdirSync(lfsCacheFolder);
}
process.chdir(lfsCacheFolder);
let latestLFSCacheFile;
if (fs.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`)) {
CloudRunnerLogger.logCli(`Match found: using large file hash match ${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`);
latestLFSCacheFile = `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`;
} else {
latestLFSCacheFile = await CloudRunnerAgentSystem.Run(`ls -t "${lfsCacheFolder}" | grep .zip$ | head -1`);
}
if (fs.existsSync(latestLFSCacheFile)) {
CloudRunnerLogger.logCli(`LFS cache exists`);
fs.rmdirSync(CloudRunnerState.lfsDirectory, { recursive: true });
CloudRunnerLogger.logCli(
`LFS cache exists from build ${latestLFSCacheFile} from ${CloudRunnerState.buildParams.branch}`,
);
await CloudRunnerAgentSystem.Run(
`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${path.join(CloudRunnerState.repoPathFull, `.git`)}"`,
);
CloudRunnerLogger.logCli(`git LFS folder, (should not contain $latestLFSCacheFile)`);
}
}
private static async libraryCaching(lfsCacheFolder: string, libraryCacheFolder: string) {
CloudRunnerLogger.logCli(`Starting checks of cache for the Unity project Library and git LFS files`);
if (!fs.existsSync(libraryCacheFolder)) {
fs.mkdirSync(libraryCacheFolder);
}
CloudRunnerLogger.logCli(`Library Caching`);
//if the unity git project has included the library delete it and echo a warning
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
fs.rmdirSync(CloudRunnerState.libraryFolderFull, { recursive: true });
CloudRunnerLogger.logCli(
`!Warning!: The Unity library was included in the git repository (this isn't usually a good practice)`,
);
}
//Restore library cache
const latestLibraryCacheFile = await CloudRunnerAgentSystem.Run(
`ls -t "${libraryCacheFolder}" | grep .zip$ | head -1`,
);
await CloudRunnerAgentSystem.Run(`ls -lh "${libraryCacheFolder}"`);
CloudRunnerLogger.logCli(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
if (fs.existsSync(latestLibraryCacheFile)) {
CloudRunnerLogger.logCli(`Library cache exists`);
const latestCacheFilePath = path.join(libraryCacheFolder, latestLibraryCacheFile);
await CloudRunnerAgentSystem.Run(`unzip -q "${latestCacheFilePath}" -d "$projectPathFull"`);
}
}
private static async createLFSHashFiles() {
await CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
await CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
SetupRemoteRepository.LFS_ASSETS_HASH = fs.readFileSync(
`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`,
'utf8',
);
CloudRunnerLogger.logCli(SetupRemoteRepository.LFS_ASSETS_HASH);
}
private static async cloneRepoWithoutLFSFiles() {
try {
CloudRunnerLogger.logCli(`Initializing source repository for cloning with caching of LFS files`);
process.chdir(CloudRunnerState.repoPathFull);
await CloudRunnerAgentSystem.Run(`git config --global advice.detachedHead false`);
CloudRunnerLogger.logCli(`Cloning the repository being built:`);
await CloudRunnerAgentSystem.Run(`git lfs install --skip-smudge`);
CloudRunnerLogger.logCli(CloudRunnerState.targetBuildRepoUrl);
await CloudRunnerAgentSystem.Run(
`git clone ${CloudRunnerState.targetBuildRepoUrl} ${CloudRunnerState.repoPathFull}`,
);
await CloudRunnerAgentSystem.Run(`ls -lh`);
await CloudRunnerAgentSystem.Run(`tree`);
CloudRunnerLogger.logCli(`${CloudRunnerState.buildParams.branch}`);
await CloudRunnerAgentSystem.Run(`git checkout ${CloudRunnerState.buildParams.branch}`);
CloudRunnerLogger.logCli(`Checked out ${process.env.GITHUB_SHA}`);
} catch (error) {
throw error;
}
}
}

View File

@ -5,7 +5,6 @@ import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
import * as zlib from 'zlib'; import * as zlib from 'zlib';
import CloudRunnerLogger from '../services/cloud-runner-logger'; import CloudRunnerLogger from '../services/cloud-runner-logger';
import { Input } from '../..'; import { Input } from '../..';
import fs from 'fs';
import { CloudRunnerState } from '../state/cloud-runner-state'; import { CloudRunnerState } from '../state/cloud-runner-state';
import { CloudRunnerStatics } from '../cloud-runner-statics'; import { CloudRunnerStatics } from '../cloud-runner-statics';
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process'; import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
@ -72,7 +71,8 @@ class AWSTaskRunner {
core.error(error); core.error(error);
} }
CloudRunnerLogger.log(`Cloud runner job is running`); CloudRunnerLogger.log(`Cloud runner job is running`);
await this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
const output = await this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
const exitCode = (await AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].exitCode; const exitCode = (await AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].exitCode;
CloudRunnerLogger.log(`Cloud runner job exit code ${exitCode}`); CloudRunnerLogger.log(`Cloud runner job exit code ${exitCode}`);
if (exitCode !== 0 && exitCode !== undefined) { if (exitCode !== 0 && exitCode !== undefined) {
@ -86,6 +86,7 @@ class AWSTaskRunner {
throw new Error(`job failed with exit code ${exitCode}`); throw new Error(`job failed with exit code ${exitCode}`);
} else { } else {
CloudRunnerLogger.log(`Cloud runner job has finished successfully`); CloudRunnerLogger.log(`Cloud runner job has finished successfully`);
return output;
} }
} }
@ -121,17 +122,20 @@ class AWSTaskRunner {
CloudRunnerLogger.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`); CloudRunnerLogger.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
let shouldReadLogs = true; let shouldReadLogs = true;
let timestamp: number = 0; let timestamp: number = 0;
let output = '';
while (shouldReadLogs) { while (shouldReadLogs) {
await new Promise((resolve) => setTimeout(resolve, 1500)); await new Promise((resolve) => setTimeout(resolve, 1500));
const taskData = await AWSTaskRunner.describeTasks(ECS, clusterName, taskArn); const taskData = await AWSTaskRunner.describeTasks(ECS, clusterName, taskArn);
({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs)); ({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs));
({ iterator, shouldReadLogs } = await AWSTaskRunner.handleLogStreamIteration( ({ iterator, shouldReadLogs, output } = await AWSTaskRunner.handleLogStreamIteration(
kinesis, kinesis,
iterator, iterator,
shouldReadLogs, shouldReadLogs,
taskDef, taskDef,
output,
)); ));
} }
return output;
} }
private static async handleLogStreamIteration( private static async handleLogStreamIteration(
@ -139,6 +143,7 @@ class AWSTaskRunner {
iterator: string, iterator: string,
shouldReadLogs: boolean, shouldReadLogs: boolean,
taskDef: CloudRunnerAWSTaskDef, taskDef: CloudRunnerAWSTaskDef,
output: string,
) { ) {
const records = await kinesis const records = await kinesis
.getRecords({ .getRecords({
@ -146,8 +151,8 @@ class AWSTaskRunner {
}) })
.promise(); .promise();
iterator = records.NextShardIterator || ''; iterator = records.NextShardIterator || '';
shouldReadLogs = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs); ({ shouldReadLogs, output } = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs, output));
return { iterator, shouldReadLogs }; return { iterator, shouldReadLogs, output };
} }
private static checkStreamingShouldContinue(taskData: AWS.ECS.Task, timestamp: number, shouldReadLogs: boolean) { private static checkStreamingShouldContinue(taskData: AWS.ECS.Task, timestamp: number, shouldReadLogs: boolean) {
@ -165,7 +170,13 @@ class AWSTaskRunner {
return { timestamp, shouldReadLogs }; return { timestamp, shouldReadLogs };
} }
private static logRecords(records, iterator: string, taskDef: CloudRunnerAWSTaskDef, shouldReadLogs: boolean) { private static logRecords(
records,
iterator: string,
taskDef: CloudRunnerAWSTaskDef,
shouldReadLogs: boolean,
output: string,
) {
if (records.Records.length > 0 && iterator) { if (records.Records.length > 0 && iterator) {
for (let index = 0; index < records.Records.length; index++) { for (let index = 0; index < records.Records.length; index++) {
const json = JSON.parse( const json = JSON.parse(
@ -181,15 +192,15 @@ class AWSTaskRunner {
core.warning('LIBRARY NOT FOUND!'); core.warning('LIBRARY NOT FOUND!');
} }
message = `[${CloudRunnerStatics.logPrefix}] ${message}`; message = `[${CloudRunnerStatics.logPrefix}] ${message}`;
if (CloudRunnerState.buildParams.logToFile) { if (Input.cloudRunnerTests) {
fs.appendFileSync(`${CloudRunnerState.buildParams.buildGuid}-outputfile.txt`, `${message}\n`); output += message;
} }
CloudRunnerLogger.log(message); CloudRunnerLogger.log(message);
} }
} }
} }
} }
return shouldReadLogs; return { shouldReadLogs, output };
} }
private static async getLogStream(kinesis: AWS.Kinesis, kinesisStreamName: string) { private static async getLogStream(kinesis: AWS.Kinesis, kinesisStreamName: string) {

View File

@ -45,7 +45,7 @@ class AWSBuildEnvironment implements CloudRunnerProviderInterface {
workingdir: string, workingdir: string,
environment: CloudRunnerEnvironmentVariable[], environment: CloudRunnerEnvironmentVariable[],
secrets: CloudRunnerSecret[], secrets: CloudRunnerSecret[],
): Promise<void> { ): Promise<string> {
process.env.AWS_REGION = Input.region; process.env.AWS_REGION = Input.region;
const ECS = new SDK.ECS(); const ECS = new SDK.ECS();
const CF = new SDK.CloudFormation(); const CF = new SDK.CloudFormation();
@ -66,10 +66,11 @@ class AWSBuildEnvironment implements CloudRunnerProviderInterface {
); );
let postRunTaskTimeMs; let postRunTaskTimeMs;
let output = '';
try { try {
const postSetupStacksTimeMs = Date.now(); const postSetupStacksTimeMs = Date.now();
CloudRunnerLogger.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`); CloudRunnerLogger.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`);
await AWSTaskRunner.runTask(taskDef, ECS, CF, environment, buildGuid, commands); output = await AWSTaskRunner.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
postRunTaskTimeMs = Date.now(); postRunTaskTimeMs = Date.now();
CloudRunnerLogger.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`); CloudRunnerLogger.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`);
} finally { } finally {
@ -78,6 +79,7 @@ class AWSBuildEnvironment implements CloudRunnerProviderInterface {
if (postRunTaskTimeMs !== undefined) if (postRunTaskTimeMs !== undefined)
CloudRunnerLogger.log(`Cleanup job time: ${Math.floor((postCleanupTimeMs - postRunTaskTimeMs) / 1000)}s`); CloudRunnerLogger.log(`Cleanup job time: ${Math.floor((postCleanupTimeMs - postRunTaskTimeMs) / 1000)}s`);
} }
return output;
} }
async cleanupResources(CF: SDK.CloudFormation, taskDef: CloudRunnerAWSTaskDef) { async cleanupResources(CF: SDK.CloudFormation, taskDef: CloudRunnerAWSTaskDef) {

View File

@ -1,8 +1,6 @@
import { BuildParameters, ImageTag } from '..'; import { BuildParameters, ImageTag } from '..';
import CloudRunner from './cloud-runner'; import CloudRunner from './cloud-runner';
import Input from '../input'; import Input from '../input';
import fs from 'fs';
import { CloudRunnerState } from './state/cloud-runner-state';
import { CloudRunnerStatics } from './cloud-runner-statics'; import { CloudRunnerStatics } from './cloud-runner-statics';
import { TaskParameterSerializer } from './services/task-parameter-serializer'; import { TaskParameterSerializer } from './services/task-parameter-serializer';
@ -30,9 +28,7 @@ describe('Cloud Runner', () => {
const buildParameter = await BuildParameters.create(); const buildParameter = await BuildParameters.create();
buildParameter.logToFile = true; buildParameter.logToFile = true;
const baseImage = new ImageTag(buildParameter); const baseImage = new ImageTag(buildParameter);
await CloudRunner.run(buildParameter, baseImage.toString()); const file = await CloudRunner.run(buildParameter, baseImage.toString());
const testOutput = `${CloudRunnerState.buildParams.buildGuid}-outputfile.txt`;
const file = fs.readFileSync(testOutput, 'utf-8').toString();
expect(file).toContain(JSON.stringify(buildParameter)); expect(file).toContain(JSON.stringify(buildParameter));
expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`); expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`);
const environmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables(); const environmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables();
@ -41,8 +37,10 @@ describe('Cloud Runner', () => {
.replace(new RegExp(`\\[${CloudRunnerStatics.logPrefix}\\]`, 'g'), ''); .replace(new RegExp(`\\[${CloudRunnerStatics.logPrefix}\\]`, 'g'), '');
for (const element of environmentVariables) { for (const element of environmentVariables) {
if (element.value !== undefined && typeof element.value !== 'function') { if (element.value !== undefined && typeof element.value !== 'function') {
const newLinePurgedValue = element.value.toString().replace(/\s+/g, ''); if (typeof element.value === `string`) {
expect(newLinePurgedFile).toContain(`${element.name}=${newLinePurgedValue}`); element.value = element.value.toString().replace(/\s+/g, '');
}
expect(newLinePurgedFile).toContain(`${element.name}=${element.value}`);
} }
} }
} }

View File

@ -38,7 +38,7 @@ class CloudRunner {
CloudRunnerState.branchName, CloudRunnerState.branchName,
CloudRunnerState.defaultSecrets, CloudRunnerState.defaultSecrets,
); );
await new WorkflowCompositionRoot().run( const output = await new WorkflowCompositionRoot().run(
new CloudRunnerStepState( new CloudRunnerStepState(
baseImage, baseImage,
TaskParameterSerializer.readBuildEnvironmentVariables(), TaskParameterSerializer.readBuildEnvironmentVariables(),
@ -51,6 +51,7 @@ class CloudRunner {
CloudRunnerState.branchName, CloudRunnerState.branchName,
CloudRunnerState.defaultSecrets, CloudRunnerState.defaultSecrets,
); );
return output;
} catch (error) { } catch (error) {
await CloudRunnerError.handleException(error); await CloudRunnerError.handleException(error);
throw error; throw error;

View File

@ -73,7 +73,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
workingdir: string, workingdir: string,
environment: CloudRunnerEnvironmentVariable[], environment: CloudRunnerEnvironmentVariable[],
secrets: CloudRunnerSecret[], secrets: CloudRunnerSecret[],
): Promise<void> { ): Promise<string> {
try { try {
// setup // setup
this.buildGuid = buildGuid; this.buildGuid = buildGuid;
@ -105,7 +105,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
CloudRunnerLogger.log('Watching pod until running'); CloudRunnerLogger.log('Watching pod until running');
await KubernetesTaskRunner.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace); await KubernetesTaskRunner.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
CloudRunnerLogger.log('Pod running, streaming logs'); CloudRunnerLogger.log('Pod running, streaming logs');
await KubernetesTaskRunner.runTask( const output = await KubernetesTaskRunner.runTask(
this.kubeConfig, this.kubeConfig,
this.kubeClient, this.kubeClient,
this.jobName, this.jobName,
@ -115,6 +115,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
CloudRunnerLogger.log, CloudRunnerLogger.log,
); );
await this.cleanupTaskResources(); await this.cleanupTaskResources();
return output;
} catch (error) { } catch (error) {
CloudRunnerLogger.log('Running job failed'); CloudRunnerLogger.log('Running job failed');
core.error(JSON.stringify(error, undefined, 4)); core.error(JSON.stringify(error, undefined, 4));

View File

@ -38,5 +38,5 @@ export interface CloudRunnerProviderInterface {
environment: CloudRunnerEnvironmentVariable[], environment: CloudRunnerEnvironmentVariable[],
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
secrets: CloudRunnerSecret[], secrets: CloudRunnerSecret[],
): Promise<void>; ): Promise<string>;
} }

View File

@ -7,7 +7,7 @@ import { StepInterface } from './step-interface';
export class BuildStep implements StepInterface { export class BuildStep implements StepInterface {
async run(cloudRunnerStepState: CloudRunnerStepState) { async run(cloudRunnerStepState: CloudRunnerStepState) {
await BuildStep.BuildStep( return await BuildStep.BuildStep(
cloudRunnerStepState.image, cloudRunnerStepState.image,
cloudRunnerStepState.environment, cloudRunnerStepState.environment,
cloudRunnerStepState.secrets, cloudRunnerStepState.secrets,
@ -20,7 +20,7 @@ export class BuildStep implements StepInterface {
secrets: CloudRunnerSecret[], secrets: CloudRunnerSecret[],
) { ) {
CloudRunnerLogger.logLine('Starting part 2/2 (build unity project)'); CloudRunnerLogger.logLine('Starting part 2/2 (build unity project)');
await CloudRunnerState.CloudRunnerProviderPlatform.runTask( return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
CloudRunnerState.buildParams.buildGuid, CloudRunnerState.buildParams.buildGuid,
image, image,
` `

View File

@ -8,7 +8,7 @@ import { StepInterface } from './step-interface';
export class SetupStep implements StepInterface { export class SetupStep implements StepInterface {
async run(cloudRunnerStepState: CloudRunnerStepState) { async run(cloudRunnerStepState: CloudRunnerStepState) {
try { try {
await SetupStep.downloadRepository( return await SetupStep.downloadRepository(
cloudRunnerStepState.image, cloudRunnerStepState.image,
cloudRunnerStepState.environment, cloudRunnerStepState.environment,
cloudRunnerStepState.secrets, cloudRunnerStepState.secrets,
@ -25,7 +25,7 @@ export class SetupStep implements StepInterface {
) { ) {
try { try {
CloudRunnerLogger.logLine('Starting step 1/2 download game files from repository, try to use cache'); CloudRunnerLogger.logLine('Starting step 1/2 download game files from repository, try to use cache');
await CloudRunnerState.CloudRunnerProviderPlatform.runTask( return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
CloudRunnerState.buildParams.buildGuid, CloudRunnerState.buildParams.buildGuid,
image, image,
` `

View File

@ -10,7 +10,7 @@ import { WorkflowInterface } from './workflow-interface';
export class BuildAutomationWorkflow implements WorkflowInterface { export class BuildAutomationWorkflow implements WorkflowInterface {
async run(cloudRunnerStepState: CloudRunnerStepState) { async run(cloudRunnerStepState: CloudRunnerStepState) {
try { try {
await BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image); return await BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image);
} catch (error) { } catch (error) {
throw error; throw error;
} }
@ -19,8 +19,8 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
private static async standardBuildAutomation(baseImage: any) { private static async standardBuildAutomation(baseImage: any) {
try { try {
CloudRunnerLogger.log(`Cloud Runner is running standard build automation`); CloudRunnerLogger.log(`Cloud Runner is running standard build automation`);
let output = '';
await new SetupStep().run( output += await new SetupStep().run(
new CloudRunnerStepState( new CloudRunnerStepState(
'alpine/git', 'alpine/git',
TaskParameterSerializer.readBuildEnvironmentVariables(), TaskParameterSerializer.readBuildEnvironmentVariables(),
@ -29,11 +29,11 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
); );
CloudRunnerLogger.logWithTime('Download repository step time'); CloudRunnerLogger.logWithTime('Download repository step time');
if (CloudRunnerState.buildParams.preBuildSteps !== '') { if (CloudRunnerState.buildParams.preBuildSteps !== '') {
await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.preBuildSteps); output += await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.preBuildSteps);
} }
CloudRunnerLogger.logWithTime('Pre build step(s) time'); CloudRunnerLogger.logWithTime('Pre build step(s) time');
await new BuildStep().run( output += await new BuildStep().run(
new CloudRunnerStepState( new CloudRunnerStepState(
baseImage, baseImage,
TaskParameterSerializer.readBuildEnvironmentVariables(), TaskParameterSerializer.readBuildEnvironmentVariables(),
@ -43,11 +43,13 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
CloudRunnerLogger.logWithTime('Build time'); CloudRunnerLogger.logWithTime('Build time');
if (CloudRunnerState.buildParams.postBuildSteps !== '') { if (CloudRunnerState.buildParams.postBuildSteps !== '') {
await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.postBuildSteps); output += await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.postBuildSteps);
} }
CloudRunnerLogger.logWithTime('Post build step(s) time'); CloudRunnerLogger.logWithTime('Post build step(s) time');
CloudRunnerLogger.log(`Cloud Runner finished running standard build automation`); CloudRunnerLogger.log(`Cloud Runner finished running standard build automation`);
return output;
} catch (error) { } catch (error) {
throw error; throw error;
} }

View File

@ -11,6 +11,7 @@ export class CustomWorkflow {
CloudRunnerLogger.log(`Cloud Runner is running in custom job mode`); CloudRunnerLogger.log(`Cloud Runner is running in custom job mode`);
try { try {
buildSteps = YAML.parse(buildSteps); buildSteps = YAML.parse(buildSteps);
let output = '';
for (const step of buildSteps) { for (const step of buildSteps) {
const stepSecrets: CloudRunnerSecret[] = step.secrets.map((x) => { const stepSecrets: CloudRunnerSecret[] = step.secrets.map((x) => {
const secret: CloudRunnerSecret = { const secret: CloudRunnerSecret = {
@ -20,7 +21,7 @@ export class CustomWorkflow {
}; };
return secret; return secret;
}); });
await CloudRunnerState.CloudRunnerProviderPlatform.runTask( output += await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
CloudRunnerState.buildParams.buildGuid, CloudRunnerState.buildParams.buildGuid,
step['image'], step['image'],
step['commands'], step['commands'],
@ -30,6 +31,7 @@ export class CustomWorkflow {
[...CloudRunnerState.defaultSecrets, ...stepSecrets], [...CloudRunnerState.defaultSecrets, ...stepSecrets],
); );
} }
return output;
} catch (error) { } catch (error) {
CloudRunnerLogger.log(`failed to parse a custom job "${buildSteps}"`); CloudRunnerLogger.log(`failed to parse a custom job "${buildSteps}"`);
throw error; throw error;

View File

@ -8,7 +8,7 @@ import { WorkflowInterface } from './workflow-interface';
export class EphemeralGitHubRunnerWorkflow implements WorkflowInterface { export class EphemeralGitHubRunnerWorkflow implements WorkflowInterface {
async run(cloudRunnerStepState: CloudRunnerStepState) { async run(cloudRunnerStepState: CloudRunnerStepState) {
try { try {
await EphemeralGitHubRunnerWorkflow.runJobAsEphemeralGitHubRunner( return await EphemeralGitHubRunnerWorkflow.runJobAsEphemeralGitHubRunner(
cloudRunnerStepState.image, cloudRunnerStepState.image,
cloudRunnerStepState.environment, cloudRunnerStepState.environment,
cloudRunnerStepState.secrets, cloudRunnerStepState.secrets,
@ -27,7 +27,7 @@ export class EphemeralGitHubRunnerWorkflow implements WorkflowInterface {
CloudRunnerLogger.log(`Cloud Runner is running in ephemeral GitHub runner mode`); CloudRunnerLogger.log(`Cloud Runner is running in ephemeral GitHub runner mode`);
const installAndStartRunner = const installAndStartRunner =
' cd ../.. && ls && mkdir actions-runner && cd actions-runner && curl -O -L https://github.com/actions/runner/releases/download/v2.283.1/actions-runner-linux-x64-2.283.1.tar.gz && tar xzf ./actions-runner-linux-x64-2.283.1.tar.gz'; ' cd ../.. && ls && mkdir actions-runner && cd actions-runner && curl -O -L https://github.com/actions/runner/releases/download/v2.283.1/actions-runner-linux-x64-2.283.1.tar.gz && tar xzf ./actions-runner-linux-x64-2.283.1.tar.gz';
await CloudRunnerState.CloudRunnerProviderPlatform.runTask( return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
CloudRunnerState.buildParams.buildGuid, CloudRunnerState.buildParams.buildGuid,
image, image,
installAndStartRunner, installAndStartRunner,

View File

@ -11,7 +11,7 @@ import { TaskParameterSerializer } from '../services/task-parameter-serializer';
export class WorkflowCompositionRoot implements WorkflowInterface { export class WorkflowCompositionRoot implements WorkflowInterface {
async run(cloudRunnerStepState: CloudRunnerStepState) { async run(cloudRunnerStepState: CloudRunnerStepState) {
try { try {
await WorkflowCompositionRoot.runJob(cloudRunnerStepState.image.toString()); return await WorkflowCompositionRoot.runJob(cloudRunnerStepState.image.toString());
} catch (error) { } catch (error) {
throw error; throw error;
} }
@ -21,7 +21,7 @@ export class WorkflowCompositionRoot implements WorkflowInterface {
try { try {
CloudRunnerLogger.log(`Workflow specified: ${CloudRunnerState.buildParams.customJob}`); CloudRunnerLogger.log(`Workflow specified: ${CloudRunnerState.buildParams.customJob}`);
if (CloudRunnerState.buildParams.customJob === '') { if (CloudRunnerState.buildParams.customJob === '') {
await new BuildAutomationWorkflow().run( return await new BuildAutomationWorkflow().run(
new CloudRunnerStepState( new CloudRunnerStepState(
baseImage, baseImage,
TaskParameterSerializer.readBuildEnvironmentVariables(), TaskParameterSerializer.readBuildEnvironmentVariables(),
@ -29,7 +29,7 @@ export class WorkflowCompositionRoot implements WorkflowInterface {
), ),
); );
} else if (CloudRunnerState.buildParams.customJob === 'ephemeral') { } else if (CloudRunnerState.buildParams.customJob === 'ephemeral') {
await new EphemeralGitHubRunnerWorkflow().run( return await new EphemeralGitHubRunnerWorkflow().run(
new CloudRunnerStepState( new CloudRunnerStepState(
baseImage, baseImage,
TaskParameterSerializer.readBuildEnvironmentVariables(), TaskParameterSerializer.readBuildEnvironmentVariables(),
@ -37,7 +37,7 @@ export class WorkflowCompositionRoot implements WorkflowInterface {
), ),
); );
} else if (CloudRunnerState.buildParams.customJob === 'download') { } else if (CloudRunnerState.buildParams.customJob === 'download') {
await new SetupStep().run( return await new SetupStep().run(
new CloudRunnerStepState( new CloudRunnerStepState(
'alpine/git', 'alpine/git',
TaskParameterSerializer.readBuildEnvironmentVariables(), TaskParameterSerializer.readBuildEnvironmentVariables(),
@ -45,7 +45,7 @@ export class WorkflowCompositionRoot implements WorkflowInterface {
), ),
); );
} else { } else {
await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.customJob); return await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.customJob);
} }
} catch (error) { } catch (error) {
throw error; throw error;