fix
parent
ac4113a7a3
commit
9366b55b0f
|
|
@ -335,10 +335,10 @@ class Cache {
|
|||
if (action_1.default.isRunningLocally) {
|
||||
return;
|
||||
}
|
||||
core.warning(`
|
||||
Library folder does not exist.
|
||||
Consider setting up caching to speed up your workflow,
|
||||
if this is not your first build.
|
||||
core.warning(`
|
||||
Library folder does not exist.
|
||||
Consider setting up caching to speed up your workflow,
|
||||
if this is not your first build.
|
||||
`);
|
||||
}
|
||||
}
|
||||
|
|
@ -1187,7 +1187,6 @@ const core = __importStar(__webpack_require__(42186));
|
|||
const zlib = __importStar(__webpack_require__(78761));
|
||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||
const __1 = __webpack_require__(41359);
|
||||
const fs_1 = __importDefault(__webpack_require__(35747));
|
||||
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||
const cloud_runner_statics_1 = __webpack_require__(90828);
|
||||
const cloud_runner_build_command_process_1 = __webpack_require__(71899);
|
||||
|
|
@ -1236,7 +1235,7 @@ class AWSTaskRunner {
|
|||
core.error(error);
|
||||
}
|
||||
cloud_runner_logger_1.default.log(`Cloud runner job is running`);
|
||||
yield this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
|
||||
const output = yield this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
|
||||
const exitCode = (_q = (yield AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers) === null || _q === void 0 ? void 0 : _q[0].exitCode;
|
||||
cloud_runner_logger_1.default.log(`Cloud runner job exit code ${exitCode}`);
|
||||
if (exitCode !== 0 && exitCode !== undefined) {
|
||||
|
|
@ -1245,6 +1244,7 @@ class AWSTaskRunner {
|
|||
}
|
||||
else {
|
||||
cloud_runner_logger_1.default.log(`Cloud runner job has finished successfully`);
|
||||
return output;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
@ -1274,15 +1274,17 @@ class AWSTaskRunner {
|
|||
cloud_runner_logger_1.default.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
|
||||
let shouldReadLogs = true;
|
||||
let timestamp = 0;
|
||||
let output = '';
|
||||
while (shouldReadLogs) {
|
||||
yield new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
const taskData = yield AWSTaskRunner.describeTasks(ECS, clusterName, taskArn);
|
||||
({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs));
|
||||
({ iterator, shouldReadLogs } = yield AWSTaskRunner.handleLogStreamIteration(kinesis, iterator, shouldReadLogs, taskDef));
|
||||
({ iterator, shouldReadLogs, output } = yield AWSTaskRunner.handleLogStreamIteration(kinesis, iterator, shouldReadLogs, taskDef, output));
|
||||
}
|
||||
return output;
|
||||
});
|
||||
}
|
||||
static handleLogStreamIteration(kinesis, iterator, shouldReadLogs, taskDef) {
|
||||
static handleLogStreamIteration(kinesis, iterator, shouldReadLogs, taskDef, output) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const records = yield kinesis
|
||||
.getRecords({
|
||||
|
|
@ -1290,8 +1292,8 @@ class AWSTaskRunner {
|
|||
})
|
||||
.promise();
|
||||
iterator = records.NextShardIterator || '';
|
||||
shouldReadLogs = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs);
|
||||
return { iterator, shouldReadLogs };
|
||||
({ shouldReadLogs, output } = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs, output));
|
||||
return { iterator, shouldReadLogs, output };
|
||||
});
|
||||
}
|
||||
static checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs) {
|
||||
|
|
@ -1308,7 +1310,7 @@ class AWSTaskRunner {
|
|||
}
|
||||
return { timestamp, shouldReadLogs };
|
||||
}
|
||||
static logRecords(records, iterator, taskDef, shouldReadLogs) {
|
||||
static logRecords(records, iterator, taskDef, shouldReadLogs, output) {
|
||||
if (records.Records.length > 0 && iterator) {
|
||||
for (let index = 0; index < records.Records.length; index++) {
|
||||
const json = JSON.parse(zlib.gunzipSync(Buffer.from(records.Records[index].Data, 'base64')).toString('utf8'));
|
||||
|
|
@ -1323,15 +1325,15 @@ class AWSTaskRunner {
|
|||
core.warning('LIBRARY NOT FOUND!');
|
||||
}
|
||||
message = `[${cloud_runner_statics_1.CloudRunnerStatics.logPrefix}] ${message}`;
|
||||
if (cloud_runner_state_1.CloudRunnerState.buildParams.logToFile) {
|
||||
fs_1.default.appendFileSync(`${cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid}-outputfile.txt`, `${message}\n`);
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
output += message;
|
||||
}
|
||||
cloud_runner_logger_1.default.log(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return shouldReadLogs;
|
||||
return { shouldReadLogs, output };
|
||||
}
|
||||
static getLogStream(kinesis, kinesisStreamName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
|
|
@ -1503,10 +1505,11 @@ class AWSBuildEnvironment {
|
|||
yield new aws_base_stack_1.AWSBaseStack(this.baseStackName).setupBaseStack(CF);
|
||||
const taskDef = yield new aws_job_stack_1.AWSJobStack(this.baseStackName).setupCloudFormations(CF, buildGuid, image, entrypoint, commands, mountdir, workingdir, secrets);
|
||||
let postRunTaskTimeMs;
|
||||
let output = '';
|
||||
try {
|
||||
const postSetupStacksTimeMs = Date.now();
|
||||
cloud_runner_logger_1.default.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`);
|
||||
yield aws_task_runner_1.default.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
|
||||
output = yield aws_task_runner_1.default.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
|
||||
postRunTaskTimeMs = Date.now();
|
||||
cloud_runner_logger_1.default.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`);
|
||||
}
|
||||
|
|
@ -1516,6 +1519,7 @@ class AWSBuildEnvironment {
|
|||
if (postRunTaskTimeMs !== undefined)
|
||||
cloud_runner_logger_1.default.log(`Cleanup job time: ${Math.floor((postCleanupTimeMs - postRunTaskTimeMs) / 1000)}s`);
|
||||
}
|
||||
return output;
|
||||
});
|
||||
}
|
||||
cleanupResources(CF, taskDef) {
|
||||
|
|
@ -1608,8 +1612,9 @@ class CloudRunner {
|
|||
CloudRunner.setup(buildParameters);
|
||||
try {
|
||||
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.setupSharedResources(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets);
|
||||
yield new workflow_composition_root_1.WorkflowCompositionRoot().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
const output = yield new workflow_composition_root_1.WorkflowCompositionRoot().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedResources(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets);
|
||||
return output;
|
||||
}
|
||||
catch (error) {
|
||||
yield cloud_runner_error_1.CloudRunnerError.handleException(error);
|
||||
|
|
@ -1780,8 +1785,9 @@ class Kubernetes {
|
|||
cloud_runner_logger_1.default.log('Watching pod until running');
|
||||
yield kubernetes_task_runner_1.default.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
|
||||
cloud_runner_logger_1.default.log('Pod running, streaming logs');
|
||||
yield kubernetes_task_runner_1.default.runTask(this.kubeConfig, this.kubeClient, this.jobName, this.podName, 'main', this.namespace, cloud_runner_logger_1.default.log);
|
||||
const output = yield kubernetes_task_runner_1.default.runTask(this.kubeConfig, this.kubeClient, this.jobName, this.podName, 'main', this.namespace, cloud_runner_logger_1.default.log);
|
||||
yield this.cleanupTaskResources();
|
||||
return output;
|
||||
}
|
||||
catch (error) {
|
||||
cloud_runner_logger_1.default.log('Running job failed');
|
||||
|
|
@ -2713,13 +2719,13 @@ const cloud_runner_state_1 = __webpack_require__(70912);
|
|||
class BuildStep {
|
||||
run(cloudRunnerStepState) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield BuildStep.BuildStep(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
|
||||
return yield BuildStep.BuildStep(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
|
||||
});
|
||||
}
|
||||
static BuildStep(image, environmentVariables, secrets) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
cloud_runner_logger_1.default.logLine('Starting part 2/2 (build unity project)');
|
||||
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, `
|
||||
return yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, `
|
||||
export GITHUB_WORKSPACE="${cloud_runner_state_1.CloudRunnerState.repoPathFull}"
|
||||
cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/default-build-script/" "/UnityBuilderAction"
|
||||
cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/entrypoint.sh" "/entrypoint.sh"
|
||||
|
|
@ -2774,7 +2780,7 @@ class SetupStep {
|
|||
run(cloudRunnerStepState) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield SetupStep.downloadRepository(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
|
||||
return yield SetupStep.downloadRepository(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
|
|
@ -2785,7 +2791,7 @@ class SetupStep {
|
|||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
cloud_runner_logger_1.default.logLine('Starting step 1/2 download game files from repository, try to use cache');
|
||||
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, `
|
||||
return yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, `
|
||||
apk update -q
|
||||
apk add unzip zip git-lfs jq tree nodejs -q
|
||||
export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
|
|
@ -2837,7 +2843,7 @@ class BuildAutomationWorkflow {
|
|||
run(cloudRunnerStepState) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image);
|
||||
return yield BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image);
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
|
|
@ -2848,19 +2854,21 @@ class BuildAutomationWorkflow {
|
|||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
cloud_runner_logger_1.default.log(`Cloud Runner is running standard build automation`);
|
||||
yield new setup_step_1.SetupStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
let output = '';
|
||||
output += yield new setup_step_1.SetupStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
cloud_runner_logger_1.default.logWithTime('Download repository step time');
|
||||
if (cloud_runner_state_1.CloudRunnerState.buildParams.preBuildSteps !== '') {
|
||||
yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.preBuildSteps);
|
||||
output += yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.preBuildSteps);
|
||||
}
|
||||
cloud_runner_logger_1.default.logWithTime('Pre build step(s) time');
|
||||
yield new build_step_1.BuildStep().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
output += yield new build_step_1.BuildStep().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
cloud_runner_logger_1.default.logWithTime('Build time');
|
||||
if (cloud_runner_state_1.CloudRunnerState.buildParams.postBuildSteps !== '') {
|
||||
yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.postBuildSteps);
|
||||
output += yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.postBuildSteps);
|
||||
}
|
||||
cloud_runner_logger_1.default.logWithTime('Post build step(s) time');
|
||||
cloud_runner_logger_1.default.log(`Cloud Runner finished running standard build automation`);
|
||||
return output;
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
|
|
@ -2904,6 +2912,7 @@ class CustomWorkflow {
|
|||
cloud_runner_logger_1.default.log(`Cloud Runner is running in custom job mode`);
|
||||
try {
|
||||
buildSteps = yaml_1.default.parse(buildSteps);
|
||||
let output = '';
|
||||
for (const step of buildSteps) {
|
||||
const stepSecrets = step.secrets.map((x) => {
|
||||
const secret = {
|
||||
|
|
@ -2913,8 +2922,9 @@ class CustomWorkflow {
|
|||
};
|
||||
return secret;
|
||||
});
|
||||
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, step['image'], step['commands'], `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), [...cloud_runner_state_1.CloudRunnerState.defaultSecrets, ...stepSecrets]);
|
||||
output += yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, step['image'], step['commands'], `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), [...cloud_runner_state_1.CloudRunnerState.defaultSecrets, ...stepSecrets]);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
catch (error) {
|
||||
cloud_runner_logger_1.default.log(`failed to parse a custom job "${buildSteps}"`);
|
||||
|
|
@ -2957,7 +2967,7 @@ class EphemeralGitHubRunnerWorkflow {
|
|||
run(cloudRunnerStepState) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield EphemeralGitHubRunnerWorkflow.runJobAsEphemeralGitHubRunner(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
|
||||
return yield EphemeralGitHubRunnerWorkflow.runJobAsEphemeralGitHubRunner(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
|
|
@ -2969,7 +2979,7 @@ class EphemeralGitHubRunnerWorkflow {
|
|||
try {
|
||||
cloud_runner_logger_1.default.log(`Cloud Runner is running in ephemeral GitHub runner mode`);
|
||||
const installAndStartRunner = ' cd ../.. && ls && mkdir actions-runner && cd actions-runner && curl -O -L https://github.com/actions/runner/releases/download/v2.283.1/actions-runner-linux-x64-2.283.1.tar.gz && tar xzf ./actions-runner-linux-x64-2.283.1.tar.gz';
|
||||
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, installAndStartRunner, `/runner`, `/runner`, environmentVariables, secrets);
|
||||
return yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildParams.buildGuid, image, installAndStartRunner, `/runner`, `/runner`, environmentVariables, secrets);
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
|
|
@ -3013,7 +3023,7 @@ class WorkflowCompositionRoot {
|
|||
run(cloudRunnerStepState) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield WorkflowCompositionRoot.runJob(cloudRunnerStepState.image.toString());
|
||||
return yield WorkflowCompositionRoot.runJob(cloudRunnerStepState.image.toString());
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
|
|
@ -3025,16 +3035,16 @@ class WorkflowCompositionRoot {
|
|||
try {
|
||||
cloud_runner_logger_1.default.log(`Workflow specified: ${cloud_runner_state_1.CloudRunnerState.buildParams.customJob}`);
|
||||
if (cloud_runner_state_1.CloudRunnerState.buildParams.customJob === '') {
|
||||
yield new build_automation_workflow_1.BuildAutomationWorkflow().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
return yield new build_automation_workflow_1.BuildAutomationWorkflow().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
}
|
||||
else if (cloud_runner_state_1.CloudRunnerState.buildParams.customJob === 'ephemeral') {
|
||||
yield new ephemeral_github_runner_workflow_1.EphemeralGitHubRunnerWorkflow().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
return yield new ephemeral_github_runner_workflow_1.EphemeralGitHubRunnerWorkflow().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
}
|
||||
else if (cloud_runner_state_1.CloudRunnerState.buildParams.customJob === 'download') {
|
||||
yield new setup_step_1.SetupStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
return yield new setup_step_1.SetupStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
||||
}
|
||||
else {
|
||||
yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.customJob);
|
||||
return yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.customJob);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
|
|
@ -3304,8 +3314,8 @@ class ImageTag {
|
|||
case platform_1.default.types.Test:
|
||||
return generic;
|
||||
default:
|
||||
throw new Error(`
|
||||
Platform must be one of the ones described in the documentation.
|
||||
throw new Error(`
|
||||
Platform must be one of the ones described in the documentation.
|
||||
"${platform}" is currently not supported.`);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,175 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import CloudRunnerLogger from '../../cloud-runner/services/cloud-runner-logger';
|
||||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||
import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
||||
|
||||
export class SetupRemoteRepository {
|
||||
static LFS_ASSETS_HASH;
|
||||
public static async run() {
|
||||
try {
|
||||
fs.mkdirSync(CloudRunnerState.buildPathFull);
|
||||
fs.mkdirSync(CloudRunnerState.repoPathFull);
|
||||
await SetupRemoteRepository.cloneRepoWithoutLFSFiles();
|
||||
|
||||
await SetupRemoteRepository.createLFSHashFiles();
|
||||
await SetupRemoteRepository.printLFSHashState();
|
||||
const lfsCacheFolder = path.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
||||
const libraryCacheFolder = path.join(CloudRunnerState.cacheFolderFull, `lib`);
|
||||
await CloudRunnerAgentSystem.Run(`tree ${libraryCacheFolder}`);
|
||||
await CloudRunnerAgentSystem.Run(`tree ${CloudRunnerState.builderPathFull}`);
|
||||
await SetupRemoteRepository.libraryCaching(lfsCacheFolder, libraryCacheFolder);
|
||||
await SetupRemoteRepository.lfsCaching(lfsCacheFolder);
|
||||
|
||||
await SetupRemoteRepository.printCacheState(lfsCacheFolder, libraryCacheFolder);
|
||||
await SetupRemoteRepository.pullLatestLFS();
|
||||
await SetupRemoteRepository.cacheLatestLFSFiles(lfsCacheFolder);
|
||||
SetupRemoteRepository.handleCachePurging();
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async printLFSHashState() {
|
||||
await CloudRunnerAgentSystem.Run(
|
||||
`echo ' '
|
||||
echo 'Contents of .lfs-assets-guid file:'
|
||||
cat .lfs-assets-guid
|
||||
echo ' '
|
||||
echo 'Contents of .lfs-assets-guid-sum file:'
|
||||
cat .lfs-assets-guid-sum
|
||||
echo ' '
|
||||
echo 'Source repository initialized'
|
||||
ls ${CloudRunnerState.projectPathFull}
|
||||
echo ' '`,
|
||||
);
|
||||
}
|
||||
|
||||
private static async printCacheState(lfsCacheFolder: string, libraryCacheFolder: string) {
|
||||
await CloudRunnerAgentSystem.Run(
|
||||
`echo ' '
|
||||
echo "LFS cache for $branch"
|
||||
du -sch "${lfsCacheFolder}/"
|
||||
echo '**'
|
||||
echo "Library cache for $branch"
|
||||
du -sch "${libraryCacheFolder}/"
|
||||
echo '**'
|
||||
echo "Branch: $branch"
|
||||
du -sch "${CloudRunnerState.cacheFolderFull}/"
|
||||
echo '**'
|
||||
echo 'Full cache'
|
||||
du -sch "${CloudRunnerState.cacheFolderFull}/"
|
||||
echo ' '`,
|
||||
);
|
||||
}
|
||||
|
||||
private static handleCachePurging() {
|
||||
if (process.env.purgeRemoteCaching !== undefined) {
|
||||
CloudRunnerLogger.logCli(`purging ${CloudRunnerState.purgeRemoteCaching}`);
|
||||
fs.rmdirSync(CloudRunnerState.cacheFolder, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
private static async cacheLatestLFSFiles(lfsCacheFolder: string) {
|
||||
process.chdir(`${CloudRunnerState.lfsDirectory}/..`);
|
||||
await CloudRunnerAgentSystem.Run(`zip -r "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "lfs"`);
|
||||
CloudRunnerLogger.logCli(fs.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`).toString());
|
||||
await CloudRunnerAgentSystem.Run(
|
||||
`cp "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "${path.join(
|
||||
lfsCacheFolder,
|
||||
`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`,
|
||||
)}"`,
|
||||
);
|
||||
CloudRunnerLogger.logCli(`copied ${SetupRemoteRepository.LFS_ASSETS_HASH} to ${lfsCacheFolder}`);
|
||||
}
|
||||
|
||||
private static async pullLatestLFS() {
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
await CloudRunnerAgentSystem.Run(`git lfs pull`);
|
||||
CloudRunnerLogger.logCli(`pulled latest LFS files`);
|
||||
}
|
||||
|
||||
private static async lfsCaching(lfsCacheFolder: string) {
|
||||
CloudRunnerLogger.logCli(` `);
|
||||
CloudRunnerLogger.logCli(`LFS Caching`);
|
||||
if (!fs.existsSync(lfsCacheFolder)) {
|
||||
fs.mkdirSync(lfsCacheFolder);
|
||||
}
|
||||
process.chdir(lfsCacheFolder);
|
||||
let latestLFSCacheFile;
|
||||
if (fs.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`)) {
|
||||
CloudRunnerLogger.logCli(`Match found: using large file hash match ${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`);
|
||||
latestLFSCacheFile = `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`;
|
||||
} else {
|
||||
latestLFSCacheFile = await CloudRunnerAgentSystem.Run(`ls -t "${lfsCacheFolder}" | grep .zip$ | head -1`);
|
||||
}
|
||||
if (fs.existsSync(latestLFSCacheFile)) {
|
||||
CloudRunnerLogger.logCli(`LFS cache exists`);
|
||||
fs.rmdirSync(CloudRunnerState.lfsDirectory, { recursive: true });
|
||||
CloudRunnerLogger.logCli(
|
||||
`LFS cache exists from build ${latestLFSCacheFile} from ${CloudRunnerState.buildParams.branch}`,
|
||||
);
|
||||
await CloudRunnerAgentSystem.Run(
|
||||
`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${path.join(CloudRunnerState.repoPathFull, `.git`)}"`,
|
||||
);
|
||||
CloudRunnerLogger.logCli(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
||||
}
|
||||
}
|
||||
|
||||
private static async libraryCaching(lfsCacheFolder: string, libraryCacheFolder: string) {
|
||||
CloudRunnerLogger.logCli(`Starting checks of cache for the Unity project Library and git LFS files`);
|
||||
if (!fs.existsSync(libraryCacheFolder)) {
|
||||
fs.mkdirSync(libraryCacheFolder);
|
||||
}
|
||||
CloudRunnerLogger.logCli(`Library Caching`);
|
||||
//if the unity git project has included the library delete it and echo a warning
|
||||
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
||||
fs.rmdirSync(CloudRunnerState.libraryFolderFull, { recursive: true });
|
||||
CloudRunnerLogger.logCli(
|
||||
`!Warning!: The Unity library was included in the git repository (this isn't usually a good practice)`,
|
||||
);
|
||||
}
|
||||
//Restore library cache
|
||||
const latestLibraryCacheFile = await CloudRunnerAgentSystem.Run(
|
||||
`ls -t "${libraryCacheFolder}" | grep .zip$ | head -1`,
|
||||
);
|
||||
await CloudRunnerAgentSystem.Run(`ls -lh "${libraryCacheFolder}"`);
|
||||
CloudRunnerLogger.logCli(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
|
||||
if (fs.existsSync(latestLibraryCacheFile)) {
|
||||
CloudRunnerLogger.logCli(`Library cache exists`);
|
||||
const latestCacheFilePath = path.join(libraryCacheFolder, latestLibraryCacheFile);
|
||||
await CloudRunnerAgentSystem.Run(`unzip -q "${latestCacheFilePath}" -d "$projectPathFull"`);
|
||||
}
|
||||
}
|
||||
|
||||
private static async createLFSHashFiles() {
|
||||
await CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
await CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
SetupRemoteRepository.LFS_ASSETS_HASH = fs.readFileSync(
|
||||
`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`,
|
||||
'utf8',
|
||||
);
|
||||
CloudRunnerLogger.logCli(SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
}
|
||||
|
||||
private static async cloneRepoWithoutLFSFiles() {
|
||||
try {
|
||||
CloudRunnerLogger.logCli(`Initializing source repository for cloning with caching of LFS files`);
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
await CloudRunnerAgentSystem.Run(`git config --global advice.detachedHead false`);
|
||||
CloudRunnerLogger.logCli(`Cloning the repository being built:`);
|
||||
await CloudRunnerAgentSystem.Run(`git lfs install --skip-smudge`);
|
||||
CloudRunnerLogger.logCli(CloudRunnerState.targetBuildRepoUrl);
|
||||
await CloudRunnerAgentSystem.Run(
|
||||
`git clone ${CloudRunnerState.targetBuildRepoUrl} ${CloudRunnerState.repoPathFull}`,
|
||||
);
|
||||
await CloudRunnerAgentSystem.Run(`ls -lh`);
|
||||
await CloudRunnerAgentSystem.Run(`tree`);
|
||||
CloudRunnerLogger.logCli(`${CloudRunnerState.buildParams.branch}`);
|
||||
await CloudRunnerAgentSystem.Run(`git checkout ${CloudRunnerState.buildParams.branch}`);
|
||||
CloudRunnerLogger.logCli(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -5,7 +5,6 @@ import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
|
|||
import * as zlib from 'zlib';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { Input } from '../..';
|
||||
import fs from 'fs';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerStatics } from '../cloud-runner-statics';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
||||
|
|
@ -72,7 +71,8 @@ class AWSTaskRunner {
|
|||
core.error(error);
|
||||
}
|
||||
CloudRunnerLogger.log(`Cloud runner job is running`);
|
||||
await this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
|
||||
|
||||
const output = await this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
|
||||
const exitCode = (await AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].exitCode;
|
||||
CloudRunnerLogger.log(`Cloud runner job exit code ${exitCode}`);
|
||||
if (exitCode !== 0 && exitCode !== undefined) {
|
||||
|
|
@ -86,6 +86,7 @@ class AWSTaskRunner {
|
|||
throw new Error(`job failed with exit code ${exitCode}`);
|
||||
} else {
|
||||
CloudRunnerLogger.log(`Cloud runner job has finished successfully`);
|
||||
return output;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -121,17 +122,20 @@ class AWSTaskRunner {
|
|||
CloudRunnerLogger.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
|
||||
let shouldReadLogs = true;
|
||||
let timestamp: number = 0;
|
||||
let output = '';
|
||||
while (shouldReadLogs) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
const taskData = await AWSTaskRunner.describeTasks(ECS, clusterName, taskArn);
|
||||
({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs));
|
||||
({ iterator, shouldReadLogs } = await AWSTaskRunner.handleLogStreamIteration(
|
||||
({ iterator, shouldReadLogs, output } = await AWSTaskRunner.handleLogStreamIteration(
|
||||
kinesis,
|
||||
iterator,
|
||||
shouldReadLogs,
|
||||
taskDef,
|
||||
output,
|
||||
));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
private static async handleLogStreamIteration(
|
||||
|
|
@ -139,6 +143,7 @@ class AWSTaskRunner {
|
|||
iterator: string,
|
||||
shouldReadLogs: boolean,
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
output: string,
|
||||
) {
|
||||
const records = await kinesis
|
||||
.getRecords({
|
||||
|
|
@ -146,8 +151,8 @@ class AWSTaskRunner {
|
|||
})
|
||||
.promise();
|
||||
iterator = records.NextShardIterator || '';
|
||||
shouldReadLogs = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs);
|
||||
return { iterator, shouldReadLogs };
|
||||
({ shouldReadLogs, output } = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs, output));
|
||||
return { iterator, shouldReadLogs, output };
|
||||
}
|
||||
|
||||
private static checkStreamingShouldContinue(taskData: AWS.ECS.Task, timestamp: number, shouldReadLogs: boolean) {
|
||||
|
|
@ -165,7 +170,13 @@ class AWSTaskRunner {
|
|||
return { timestamp, shouldReadLogs };
|
||||
}
|
||||
|
||||
private static logRecords(records, iterator: string, taskDef: CloudRunnerAWSTaskDef, shouldReadLogs: boolean) {
|
||||
private static logRecords(
|
||||
records,
|
||||
iterator: string,
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
shouldReadLogs: boolean,
|
||||
output: string,
|
||||
) {
|
||||
if (records.Records.length > 0 && iterator) {
|
||||
for (let index = 0; index < records.Records.length; index++) {
|
||||
const json = JSON.parse(
|
||||
|
|
@ -181,15 +192,15 @@ class AWSTaskRunner {
|
|||
core.warning('LIBRARY NOT FOUND!');
|
||||
}
|
||||
message = `[${CloudRunnerStatics.logPrefix}] ${message}`;
|
||||
if (CloudRunnerState.buildParams.logToFile) {
|
||||
fs.appendFileSync(`${CloudRunnerState.buildParams.buildGuid}-outputfile.txt`, `${message}\n`);
|
||||
if (Input.cloudRunnerTests) {
|
||||
output += message;
|
||||
}
|
||||
CloudRunnerLogger.log(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return shouldReadLogs;
|
||||
return { shouldReadLogs, output };
|
||||
}
|
||||
|
||||
private static async getLogStream(kinesis: AWS.Kinesis, kinesisStreamName: string) {
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ class AWSBuildEnvironment implements CloudRunnerProviderInterface {
|
|||
workingdir: string,
|
||||
environment: CloudRunnerEnvironmentVariable[],
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<void> {
|
||||
): Promise<string> {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const ECS = new SDK.ECS();
|
||||
const CF = new SDK.CloudFormation();
|
||||
|
|
@ -66,10 +66,11 @@ class AWSBuildEnvironment implements CloudRunnerProviderInterface {
|
|||
);
|
||||
|
||||
let postRunTaskTimeMs;
|
||||
let output = '';
|
||||
try {
|
||||
const postSetupStacksTimeMs = Date.now();
|
||||
CloudRunnerLogger.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`);
|
||||
await AWSTaskRunner.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
|
||||
output = await AWSTaskRunner.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
|
||||
postRunTaskTimeMs = Date.now();
|
||||
CloudRunnerLogger.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`);
|
||||
} finally {
|
||||
|
|
@ -78,6 +79,7 @@ class AWSBuildEnvironment implements CloudRunnerProviderInterface {
|
|||
if (postRunTaskTimeMs !== undefined)
|
||||
CloudRunnerLogger.log(`Cleanup job time: ${Math.floor((postCleanupTimeMs - postRunTaskTimeMs) / 1000)}s`);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
async cleanupResources(CF: SDK.CloudFormation, taskDef: CloudRunnerAWSTaskDef) {
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
import { BuildParameters, ImageTag } from '..';
|
||||
import CloudRunner from './cloud-runner';
|
||||
import Input from '../input';
|
||||
import fs from 'fs';
|
||||
import { CloudRunnerState } from './state/cloud-runner-state';
|
||||
import { CloudRunnerStatics } from './cloud-runner-statics';
|
||||
import { TaskParameterSerializer } from './services/task-parameter-serializer';
|
||||
|
||||
|
|
@ -30,9 +28,7 @@ describe('Cloud Runner', () => {
|
|||
const buildParameter = await BuildParameters.create();
|
||||
buildParameter.logToFile = true;
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
const testOutput = `${CloudRunnerState.buildParams.buildGuid}-outputfile.txt`;
|
||||
const file = fs.readFileSync(testOutput, 'utf-8').toString();
|
||||
const file = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
expect(file).toContain(JSON.stringify(buildParameter));
|
||||
expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`);
|
||||
const environmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables();
|
||||
|
|
@ -41,8 +37,10 @@ describe('Cloud Runner', () => {
|
|||
.replace(new RegExp(`\\[${CloudRunnerStatics.logPrefix}\\]`, 'g'), '');
|
||||
for (const element of environmentVariables) {
|
||||
if (element.value !== undefined && typeof element.value !== 'function') {
|
||||
const newLinePurgedValue = element.value.toString().replace(/\s+/g, '');
|
||||
expect(newLinePurgedFile).toContain(`${element.name}=${newLinePurgedValue}`);
|
||||
if (typeof element.value === `string`) {
|
||||
element.value = element.value.toString().replace(/\s+/g, '');
|
||||
}
|
||||
expect(newLinePurgedFile).toContain(`${element.name}=${element.value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class CloudRunner {
|
|||
CloudRunnerState.branchName,
|
||||
CloudRunnerState.defaultSecrets,
|
||||
);
|
||||
await new WorkflowCompositionRoot().run(
|
||||
const output = await new WorkflowCompositionRoot().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
|
|
@ -51,6 +51,7 @@ class CloudRunner {
|
|||
CloudRunnerState.branchName,
|
||||
CloudRunnerState.defaultSecrets,
|
||||
);
|
||||
return output;
|
||||
} catch (error) {
|
||||
await CloudRunnerError.handleException(error);
|
||||
throw error;
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
|||
workingdir: string,
|
||||
environment: CloudRunnerEnvironmentVariable[],
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<void> {
|
||||
): Promise<string> {
|
||||
try {
|
||||
// setup
|
||||
this.buildGuid = buildGuid;
|
||||
|
|
@ -105,7 +105,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
|||
CloudRunnerLogger.log('Watching pod until running');
|
||||
await KubernetesTaskRunner.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
|
||||
CloudRunnerLogger.log('Pod running, streaming logs');
|
||||
await KubernetesTaskRunner.runTask(
|
||||
const output = await KubernetesTaskRunner.runTask(
|
||||
this.kubeConfig,
|
||||
this.kubeClient,
|
||||
this.jobName,
|
||||
|
|
@ -115,6 +115,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
|||
CloudRunnerLogger.log,
|
||||
);
|
||||
await this.cleanupTaskResources();
|
||||
return output;
|
||||
} catch (error) {
|
||||
CloudRunnerLogger.log('Running job failed');
|
||||
core.error(JSON.stringify(error, undefined, 4));
|
||||
|
|
|
|||
|
|
@ -38,5 +38,5 @@ export interface CloudRunnerProviderInterface {
|
|||
environment: CloudRunnerEnvironmentVariable[],
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<void>;
|
||||
): Promise<string>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import { StepInterface } from './step-interface';
|
|||
|
||||
export class BuildStep implements StepInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
await BuildStep.BuildStep(
|
||||
return await BuildStep.BuildStep(
|
||||
cloudRunnerStepState.image,
|
||||
cloudRunnerStepState.environment,
|
||||
cloudRunnerStepState.secrets,
|
||||
|
|
@ -20,7 +20,7 @@ export class BuildStep implements StepInterface {
|
|||
secrets: CloudRunnerSecret[],
|
||||
) {
|
||||
CloudRunnerLogger.logLine('Starting part 2/2 (build unity project)');
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
image,
|
||||
`
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import { StepInterface } from './step-interface';
|
|||
export class SetupStep implements StepInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
try {
|
||||
await SetupStep.downloadRepository(
|
||||
return await SetupStep.downloadRepository(
|
||||
cloudRunnerStepState.image,
|
||||
cloudRunnerStepState.environment,
|
||||
cloudRunnerStepState.secrets,
|
||||
|
|
@ -25,7 +25,7 @@ export class SetupStep implements StepInterface {
|
|||
) {
|
||||
try {
|
||||
CloudRunnerLogger.logLine('Starting step 1/2 download game files from repository, try to use cache');
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
image,
|
||||
`
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import { WorkflowInterface } from './workflow-interface';
|
|||
export class BuildAutomationWorkflow implements WorkflowInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
try {
|
||||
await BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image);
|
||||
return await BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
|
|
@ -19,8 +19,8 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
|||
private static async standardBuildAutomation(baseImage: any) {
|
||||
try {
|
||||
CloudRunnerLogger.log(`Cloud Runner is running standard build automation`);
|
||||
|
||||
await new SetupStep().run(
|
||||
let output = '';
|
||||
output += await new SetupStep().run(
|
||||
new CloudRunnerStepState(
|
||||
'alpine/git',
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
|
|
@ -29,11 +29,11 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
|||
);
|
||||
CloudRunnerLogger.logWithTime('Download repository step time');
|
||||
if (CloudRunnerState.buildParams.preBuildSteps !== '') {
|
||||
await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.preBuildSteps);
|
||||
output += await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.preBuildSteps);
|
||||
}
|
||||
CloudRunnerLogger.logWithTime('Pre build step(s) time');
|
||||
|
||||
await new BuildStep().run(
|
||||
output += await new BuildStep().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
|
|
@ -43,11 +43,13 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
|||
CloudRunnerLogger.logWithTime('Build time');
|
||||
|
||||
if (CloudRunnerState.buildParams.postBuildSteps !== '') {
|
||||
await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.postBuildSteps);
|
||||
output += await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.postBuildSteps);
|
||||
}
|
||||
CloudRunnerLogger.logWithTime('Post build step(s) time');
|
||||
|
||||
CloudRunnerLogger.log(`Cloud Runner finished running standard build automation`);
|
||||
|
||||
return output;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ export class CustomWorkflow {
|
|||
CloudRunnerLogger.log(`Cloud Runner is running in custom job mode`);
|
||||
try {
|
||||
buildSteps = YAML.parse(buildSteps);
|
||||
let output = '';
|
||||
for (const step of buildSteps) {
|
||||
const stepSecrets: CloudRunnerSecret[] = step.secrets.map((x) => {
|
||||
const secret: CloudRunnerSecret = {
|
||||
|
|
@ -20,7 +21,7 @@ export class CustomWorkflow {
|
|||
};
|
||||
return secret;
|
||||
});
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
output += await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
step['image'],
|
||||
step['commands'],
|
||||
|
|
@ -30,6 +31,7 @@ export class CustomWorkflow {
|
|||
[...CloudRunnerState.defaultSecrets, ...stepSecrets],
|
||||
);
|
||||
}
|
||||
return output;
|
||||
} catch (error) {
|
||||
CloudRunnerLogger.log(`failed to parse a custom job "${buildSteps}"`);
|
||||
throw error;
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import { WorkflowInterface } from './workflow-interface';
|
|||
export class EphemeralGitHubRunnerWorkflow implements WorkflowInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
try {
|
||||
await EphemeralGitHubRunnerWorkflow.runJobAsEphemeralGitHubRunner(
|
||||
return await EphemeralGitHubRunnerWorkflow.runJobAsEphemeralGitHubRunner(
|
||||
cloudRunnerStepState.image,
|
||||
cloudRunnerStepState.environment,
|
||||
cloudRunnerStepState.secrets,
|
||||
|
|
@ -27,7 +27,7 @@ export class EphemeralGitHubRunnerWorkflow implements WorkflowInterface {
|
|||
CloudRunnerLogger.log(`Cloud Runner is running in ephemeral GitHub runner mode`);
|
||||
const installAndStartRunner =
|
||||
' cd ../.. && ls && mkdir actions-runner && cd actions-runner && curl -O -L https://github.com/actions/runner/releases/download/v2.283.1/actions-runner-linux-x64-2.283.1.tar.gz && tar xzf ./actions-runner-linux-x64-2.283.1.tar.gz';
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
image,
|
||||
installAndStartRunner,
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import { TaskParameterSerializer } from '../services/task-parameter-serializer';
|
|||
export class WorkflowCompositionRoot implements WorkflowInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
try {
|
||||
await WorkflowCompositionRoot.runJob(cloudRunnerStepState.image.toString());
|
||||
return await WorkflowCompositionRoot.runJob(cloudRunnerStepState.image.toString());
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
|
|
@ -21,7 +21,7 @@ export class WorkflowCompositionRoot implements WorkflowInterface {
|
|||
try {
|
||||
CloudRunnerLogger.log(`Workflow specified: ${CloudRunnerState.buildParams.customJob}`);
|
||||
if (CloudRunnerState.buildParams.customJob === '') {
|
||||
await new BuildAutomationWorkflow().run(
|
||||
return await new BuildAutomationWorkflow().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
|
|
@ -29,7 +29,7 @@ export class WorkflowCompositionRoot implements WorkflowInterface {
|
|||
),
|
||||
);
|
||||
} else if (CloudRunnerState.buildParams.customJob === 'ephemeral') {
|
||||
await new EphemeralGitHubRunnerWorkflow().run(
|
||||
return await new EphemeralGitHubRunnerWorkflow().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
|
|
@ -37,7 +37,7 @@ export class WorkflowCompositionRoot implements WorkflowInterface {
|
|||
),
|
||||
);
|
||||
} else if (CloudRunnerState.buildParams.customJob === 'download') {
|
||||
await new SetupStep().run(
|
||||
return await new SetupStep().run(
|
||||
new CloudRunnerStepState(
|
||||
'alpine/git',
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
|
|
@ -45,7 +45,7 @@ export class WorkflowCompositionRoot implements WorkflowInterface {
|
|||
),
|
||||
);
|
||||
} else {
|
||||
await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.customJob);
|
||||
return await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.customJob);
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
|
|
|
|||
Loading…
Reference in New Issue