Converting caching from shell to typescript

pull/310/head
Frostebite 2021-12-24 01:58:22 +00:00
parent 584aa65154
commit 9079429c8d
7 changed files with 192 additions and 216 deletions

122
dist/index.js vendored
View File

@ -2005,22 +2005,22 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DownloadRepository = void 0;
const cloud_runner_state_1 = __webpack_require__(70912);
const run_cli_1 = __webpack_require__(33639);
const fs_1 = __importDefault(__webpack_require__(35747));
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
class DownloadRepository {
static run() {
return __awaiter(this, void 0, void 0, function* () {
yield run_cli_1.RunCli.RunCli(`
tree -f -L 2tree -f -L 2
echo "test"
mkdir -p ${cloud_runner_state_1.CloudRunnerState.buildPathFull}
mkdir -p ${cloud_runner_state_1.CloudRunnerState.repoPathFull}
echo ' '
echo 'Initializing source repository for cloning with caching of LFS files'
githubSha=$GITHUB_SHA
`);
yield run_cli_1.RunCli.RunCli(`tree -f -L 2tree -f -L 2`);
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.buildPathFull);
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.repoPathFull);
cloud_runner_logger_1.default.log(`Initializing source repository for cloning with caching of LFS files`);
yield run_cli_1.RunCli.RunCli(`
cd ${cloud_runner_state_1.CloudRunnerState.repoPathFull}
# stop annoying git detatched head info
@ -2030,16 +2030,14 @@ class DownloadRepository {
git lfs install --skip-smudge
echo "${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl}"
git clone ${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl} ${cloud_runner_state_1.CloudRunnerState.repoPathFull}
git checkout $githubSha
echo "Checked out $githubSha"
git checkout ${process.env.GITHUB_SHA}
echo "Checked out ${process.env.GITHUB_SHA}"
`);
yield run_cli_1.RunCli.RunCli(`
git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid
md5sum .lfs-assets-guid > .lfs-assets-guid-sum
`);
yield run_cli_1.RunCli.RunCli(`
export LFS_ASSETS_HASH="$(cat ${cloud_runner_state_1.CloudRunnerState.repoPathFull}/.lfs-assets-guid)"
`);
const LFS_ASSETS_HASH = fs_1.default.readFileSync(`${cloud_runner_state_1.CloudRunnerState.repoPathFull}/.lfs-assets-guid`, 'utf8');
yield run_cli_1.RunCli.RunCli(`
echo ' '
echo 'Contents of .lfs-assets-guid file:'
@ -2054,40 +2052,35 @@ class DownloadRepository {
`);
const lfsCacheFolder = `${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/lfs`;
const libraryCacheFolder = `${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/lib`;
yield run_cli_1.RunCli.RunCli(`
tree ${cloud_runner_state_1.CloudRunnerState.builderPathFull}
echo 'Starting checks of cache for the Unity project Library and git LFS files'
mkdir -p "${lfsCacheFolder}"
mkdir -p "${libraryCacheFolder}"
echo 'Library Caching'
`);
yield run_cli_1.RunCli.RunCli(`
# if the unity git project has included the library delete it and echo a warning
if [ -d "${cloud_runner_state_1.CloudRunnerState.libraryFolderFull}" ]; then
rm -r "${cloud_runner_state_1.CloudRunnerState.libraryFolderFull}"
echo "!Warning!: The Unity library was included in the git repository (this isn't usually a good practice)"
fi
`);
yield run_cli_1.RunCli.RunCli(`
# Restore library cache
ls -lh "${libraryCacheFolder}"
latestLibraryCacheFile=$(ls -t "${libraryCacheFolder}" | grep .zip$ | head -1)
echo "Checking if Library cache ${libraryCacheFolder}/$latestLibraryCacheFile exists"
cd ${libraryCacheFolder}
if [ -f "$latestLibraryCacheFile" ]; then
echo "Library cache exists"
unzip -q "${libraryCacheFolder}/$latestLibraryCacheFile" -d "$projectPathFull"
tree "${cloud_runner_state_1.CloudRunnerState.libraryFolderFull}"
fi
yield run_cli_1.RunCli.RunCli(`tree ${cloud_runner_state_1.CloudRunnerState.builderPathFull}`);
cloud_runner_logger_1.default.log(`Starting checks of cache for the Unity project Library and git LFS files`);
fs_1.default.mkdirSync(lfsCacheFolder);
fs_1.default.mkdirSync(libraryCacheFolder);
cloud_runner_logger_1.default.log(`Library Caching`);
//if the unity git project has included the library delete it and echo a warning
if (fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull)) {
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull, { recursive: true });
cloud_runner_logger_1.default.log(`!Warning!: The Unity library was included in the git repository (this isn't usually a good practice)`);
}
//Restore library cache
const latestLibraryCacheFile = yield run_cli_1.RunCli.RunCli(`ls -t "${libraryCacheFolder}" | grep .zip$ | head -1`);
yield run_cli_1.RunCli.RunCli(`ls -lh "${libraryCacheFolder}"`);
cloud_runner_logger_1.default.log(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
if (fs_1.default.existsSync(latestLibraryCacheFile)) {
cloud_runner_logger_1.default.log(`Library cache exists`);
yield run_cli_1.RunCli.RunCli(`
unzip -q "${libraryCacheFolder}/${latestLibraryCacheFile}" -d "$projectPathFull"
tree "${cloud_runner_state_1.CloudRunnerState.libraryFolderFull}"
`);
}
yield run_cli_1.RunCli.RunCli(`
echo ' '
echo 'Large File Caching'
echo "Checking large file cache exists (${lfsCacheFolder}/$LFS_ASSETS_HASH.zip)"
echo "Checking large file cache exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)"
cd ${lfsCacheFolder}
if [ -f "$LFS_ASSETS_HASH.zip" ]; then
echo "Match found: using large file hash match $LFS_ASSETS_HASH.zip"
latestLFSCacheFile="$LFS_ASSETS_HASH"
if [ -f "${LFS_ASSETS_HASH}.zip" ]; then
echo "Match found: using large file hash match ${LFS_ASSETS_HASH}.zip"
latestLFSCacheFile="${LFS_ASSETS_HASH}"
else
latestLFSCacheFile=$(ls -t "${lfsCacheFolder}" | grep .zip$ | head -1)
echo "Match not found: using latest large file cache $latestLFSCacheFile"
@ -2122,19 +2115,14 @@ class DownloadRepository {
`);
yield run_cli_1.RunCli.RunCli(`
cd "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}/.."
zip -q -r "$LFS_ASSETS_HASH.zip" "./lfs"
cp "$LFS_ASSETS_HASH.zip" "${lfsCacheFolder}"
echo "copied $LFS_ASSETS_HASH to ${lfsCacheFolder}"
zip -q -r "${LFS_ASSETS_HASH}.zip" "./lfs"
cp "${LFS_ASSETS_HASH}.zip" "${lfsCacheFolder}"
echo "copied ${LFS_ASSETS_HASH} to ${lfsCacheFolder}"
`);
yield run_cli_1.RunCli.RunCli(`
# purge cache
if [ -z "${cloud_runner_state_1.CloudRunnerState.purgeRemoteCaching}" ]; then
echo ' '
echo "purging ${cloud_runner_state_1.CloudRunnerState.purgeRemoteCaching}"
rm -r "${cloud_runner_state_1.CloudRunnerState.purgeRemoteCaching}"
echo ' '
fi
`);
if (process.env.purgeRemoteCaching !== undefined) {
cloud_runner_logger_1.default.log(`purging ${cloud_runner_state_1.CloudRunnerState.purgeRemoteCaching}`);
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.cacheFolder, { recursive: true });
}
});
}
}
@ -2570,7 +2558,7 @@ class BuildStep {
}
static BuildStep(image, environmentVariables, secrets) {
return __awaiter(this, void 0, void 0, function* () {
cloud_runner_logger_1.default.logLine('Starting part 2/4 (build unity project)');
cloud_runner_logger_1.default.logLine('Starting part 2/2 (build unity project)');
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(cloud_runner_state_1.CloudRunnerState.buildGuid, image, [
`
export GITHUB_WORKSPACE="${cloud_runner_state_1.CloudRunnerState.repoPathFull}"
@ -2599,7 +2587,7 @@ exports.BuildStep = BuildStep;
/***/ }),
/***/ 12929:
/***/ 10359:
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
"use strict";
@ -2617,24 +2605,24 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DownloadRepositoryStep = void 0;
exports.DownloadStep = void 0;
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
const cloud_runner_state_1 = __webpack_require__(70912);
class DownloadRepositoryStep {
class DownloadStep {
run(cloudRunnerStepState) {
return __awaiter(this, void 0, void 0, function* () {
try {
yield DownloadRepositoryStep.downloadRepositoryStep(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
yield DownloadStep.downloadRepository(cloudRunnerStepState.image, cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
}
catch (error) {
throw error;
}
});
}
static downloadRepositoryStep(image, environmentVariables, secrets) {
static downloadRepository(image, environmentVariables, secrets) {
return __awaiter(this, void 0, void 0, function* () {
try {
cloud_runner_logger_1.default.logLine('Starting step 1/4 clone and restore cache');
cloud_runner_logger_1.default.logLine('Starting step 1/2 download game files from repository, try to use cache');
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(cloud_runner_state_1.CloudRunnerState.buildGuid, image, [
`
apk update -q
@ -2655,7 +2643,7 @@ class DownloadRepositoryStep {
});
}
}
exports.DownloadRepositoryStep = DownloadRepositoryStep;
exports.DownloadStep = DownloadStep;
/***/ }),
@ -2683,7 +2671,7 @@ const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
const cloud_runner_state_1 = __webpack_require__(70912);
const cloud_runner_step_state_1 = __webpack_require__(64854);
const build_step_1 = __webpack_require__(91491);
const download_repository_step_1 = __webpack_require__(12929);
const download_step_1 = __webpack_require__(10359);
const custom_workflow_1 = __webpack_require__(3786);
class BuildAutomationWorkflow {
run(cloudRunnerStepState) {
@ -2700,7 +2688,7 @@ class BuildAutomationWorkflow {
return __awaiter(this, void 0, void 0, function* () {
try {
cloud_runner_logger_1.default.log(`Cloud Runner is running standard build automation`);
yield new download_repository_step_1.DownloadRepositoryStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', cloud_runner_state_1.CloudRunnerState.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
yield new download_step_1.DownloadStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', cloud_runner_state_1.CloudRunnerState.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
cloud_runner_logger_1.default.logWithTime('Download repository step time');
if (cloud_runner_state_1.CloudRunnerState.buildParams.preBuildSteps !== '') {
yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.preBuildSteps);
@ -2869,7 +2857,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.WorkflowCompositionRoot = void 0;
const cloud_runner_state_1 = __webpack_require__(70912);
const cloud_runner_step_state_1 = __webpack_require__(64854);
const download_repository_step_1 = __webpack_require__(12929);
const download_step_1 = __webpack_require__(10359);
const custom_workflow_1 = __webpack_require__(3786);
const ephemeral_github_runner_workflow_1 = __webpack_require__(37092);
const core = __importStar(__webpack_require__(42186));
@ -2897,7 +2885,7 @@ class WorkflowCompositionRoot {
yield new ephemeral_github_runner_workflow_1.EphemeralGitHubRunnerWorkflow().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, cloud_runner_state_1.CloudRunnerState.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
}
else if (cloud_runner_state_1.CloudRunnerState.buildParams.customBuildSteps === 'download') {
yield new download_repository_step_1.DownloadRepositoryStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', cloud_runner_state_1.CloudRunnerState.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
yield new download_step_1.DownloadStep().run(new cloud_runner_step_state_1.CloudRunnerStepState('alpine/git', cloud_runner_state_1.CloudRunnerState.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
}
else {
yield custom_workflow_1.CustomWorkflow.runCustomJob(cloud_runner_state_1.CloudRunnerState.buildParams.customBuildSteps);

2
dist/index.js.map vendored

File diff suppressed because one or more lines are too long

View File

@ -1,17 +1,15 @@
import { CloudRunnerState } from '../../state/cloud-runner-state';
import { RunCli } from '../run-cli';
import fs from 'fs';
import CloudRunnerLogger from '../../services/cloud-runner-logger';
export class DownloadRepository {
public static async run() {
await RunCli.RunCli(`
tree -f -L 2tree -f -L 2
echo "test"
mkdir -p ${CloudRunnerState.buildPathFull}
mkdir -p ${CloudRunnerState.repoPathFull}
echo ' '
echo 'Initializing source repository for cloning with caching of LFS files'
githubSha=$GITHUB_SHA
`);
await RunCli.RunCli(`tree -f -L 2tree -f -L 2`);
fs.mkdirSync(CloudRunnerState.buildPathFull);
fs.mkdirSync(CloudRunnerState.repoPathFull);
CloudRunnerLogger.log(`Initializing source repository for cloning with caching of LFS files`);
await RunCli.RunCli(`
cd ${CloudRunnerState.repoPathFull}
# stop annoying git detatched head info
@ -21,16 +19,14 @@ export class DownloadRepository {
git lfs install --skip-smudge
echo "${CloudRunnerState.targetBuildRepoUrl}"
git clone ${CloudRunnerState.targetBuildRepoUrl} ${CloudRunnerState.repoPathFull}
git checkout $githubSha
echo "Checked out $githubSha"
git checkout ${process.env.GITHUB_SHA}
echo "Checked out ${process.env.GITHUB_SHA}"
`);
await RunCli.RunCli(`
git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid
md5sum .lfs-assets-guid > .lfs-assets-guid-sum
`);
await RunCli.RunCli(`
export LFS_ASSETS_HASH="$(cat ${CloudRunnerState.repoPathFull}/.lfs-assets-guid)"
`);
const LFS_ASSETS_HASH = fs.readFileSync(`${CloudRunnerState.repoPathFull}/.lfs-assets-guid`, 'utf8');
await RunCli.RunCli(`
echo ' '
echo 'Contents of .lfs-assets-guid file:'
@ -45,40 +41,37 @@ export class DownloadRepository {
`);
const lfsCacheFolder = `${CloudRunnerState.cacheFolderFull}/lfs`;
const libraryCacheFolder = `${CloudRunnerState.cacheFolderFull}/lib`;
await RunCli.RunCli(`
tree ${CloudRunnerState.builderPathFull}
echo 'Starting checks of cache for the Unity project Library and git LFS files'
mkdir -p "${lfsCacheFolder}"
mkdir -p "${libraryCacheFolder}"
echo 'Library Caching'
`);
await RunCli.RunCli(`
# if the unity git project has included the library delete it and echo a warning
if [ -d "${CloudRunnerState.libraryFolderFull}" ]; then
rm -r "${CloudRunnerState.libraryFolderFull}"
echo "!Warning!: The Unity library was included in the git repository (this isn't usually a good practice)"
fi
`);
await RunCli.RunCli(`
# Restore library cache
ls -lh "${libraryCacheFolder}"
latestLibraryCacheFile=$(ls -t "${libraryCacheFolder}" | grep .zip$ | head -1)
echo "Checking if Library cache ${libraryCacheFolder}/$latestLibraryCacheFile exists"
cd ${libraryCacheFolder}
if [ -f "$latestLibraryCacheFile" ]; then
echo "Library cache exists"
unzip -q "${libraryCacheFolder}/$latestLibraryCacheFile" -d "$projectPathFull"
tree "${CloudRunnerState.libraryFolderFull}"
fi
await RunCli.RunCli(`tree ${CloudRunnerState.builderPathFull}`);
CloudRunnerLogger.log(`Starting checks of cache for the Unity project Library and git LFS files`);
fs.mkdirSync(lfsCacheFolder);
fs.mkdirSync(libraryCacheFolder);
CloudRunnerLogger.log(`Library Caching`);
//if the unity git project has included the library delete it and echo a warning
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
fs.rmdirSync(CloudRunnerState.libraryFolderFull, { recursive: true });
CloudRunnerLogger.log(
`!Warning!: The Unity library was included in the git repository (this isn't usually a good practice)`,
);
}
//Restore library cache
const latestLibraryCacheFile = await RunCli.RunCli(`ls -t "${libraryCacheFolder}" | grep .zip$ | head -1`);
await RunCli.RunCli(`ls -lh "${libraryCacheFolder}"`);
CloudRunnerLogger.log(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
if (fs.existsSync(latestLibraryCacheFile)) {
CloudRunnerLogger.log(`Library cache exists`);
await RunCli.RunCli(`
unzip -q "${libraryCacheFolder}/${latestLibraryCacheFile}" -d "$projectPathFull"
tree "${CloudRunnerState.libraryFolderFull}"
`);
}
await RunCli.RunCli(`
echo ' '
echo 'Large File Caching'
echo "Checking large file cache exists (${lfsCacheFolder}/$LFS_ASSETS_HASH.zip)"
echo "Checking large file cache exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)"
cd ${lfsCacheFolder}
if [ -f "$LFS_ASSETS_HASH.zip" ]; then
echo "Match found: using large file hash match $LFS_ASSETS_HASH.zip"
latestLFSCacheFile="$LFS_ASSETS_HASH"
if [ -f "${LFS_ASSETS_HASH}.zip" ]; then
echo "Match found: using large file hash match ${LFS_ASSETS_HASH}.zip"
latestLFSCacheFile="${LFS_ASSETS_HASH}"
else
latestLFSCacheFile=$(ls -t "${lfsCacheFolder}" | grep .zip$ | head -1)
echo "Match not found: using latest large file cache $latestLFSCacheFile"
@ -113,18 +106,13 @@ export class DownloadRepository {
`);
await RunCli.RunCli(`
cd "${CloudRunnerState.lfsDirectory}/.."
zip -q -r "$LFS_ASSETS_HASH.zip" "./lfs"
cp "$LFS_ASSETS_HASH.zip" "${lfsCacheFolder}"
echo "copied $LFS_ASSETS_HASH to ${lfsCacheFolder}"
zip -q -r "${LFS_ASSETS_HASH}.zip" "./lfs"
cp "${LFS_ASSETS_HASH}.zip" "${lfsCacheFolder}"
echo "copied ${LFS_ASSETS_HASH} to ${lfsCacheFolder}"
`);
await RunCli.RunCli(`
# purge cache
if [ -z "${CloudRunnerState.purgeRemoteCaching}" ]; then
echo ' '
echo "purging ${CloudRunnerState.purgeRemoteCaching}"
rm -r "${CloudRunnerState.purgeRemoteCaching}"
echo ' '
fi
`);
if (process.env.purgeRemoteCaching !== undefined) {
CloudRunnerLogger.log(`purging ${CloudRunnerState.purgeRemoteCaching}`);
fs.rmdirSync(CloudRunnerState.cacheFolder, { recursive: true });
}
}
}

View File

@ -19,7 +19,7 @@ export class BuildStep implements StepInterface {
environmentVariables: CloudRunnerEnvironmentVariable[],
secrets: CloudRunnerSecret[],
) {
CloudRunnerLogger.logLine('Starting part 2/4 (build unity project)');
CloudRunnerLogger.logLine('Starting part 2/2 (build unity project)');
await CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(
CloudRunnerState.buildGuid,
image,

View File

@ -5,10 +5,10 @@ import { CloudRunnerState } from '../state/cloud-runner-state';
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
import { StepInterface } from './step-interface';
export class DownloadRepositoryStep implements StepInterface {
export class DownloadStep implements StepInterface {
async run(cloudRunnerStepState: CloudRunnerStepState) {
try {
await DownloadRepositoryStep.downloadRepositoryStep(
await DownloadStep.downloadRepository(
cloudRunnerStepState.image,
cloudRunnerStepState.environment,
cloudRunnerStepState.secrets,
@ -18,13 +18,13 @@ export class DownloadRepositoryStep implements StepInterface {
}
}
private static async downloadRepositoryStep(
private static async downloadRepository(
image: string,
environmentVariables: CloudRunnerEnvironmentVariable[],
secrets: CloudRunnerSecret[],
) {
try {
CloudRunnerLogger.logLine('Starting step 1/4 clone and restore cache');
CloudRunnerLogger.logLine('Starting step 1/2 download game files from repository, try to use cache');
await CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(
CloudRunnerState.buildGuid,
image,

View File

@ -2,7 +2,7 @@ import CloudRunnerLogger from '../services/cloud-runner-logger';
import { CloudRunnerState } from '../state/cloud-runner-state';
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
import { BuildStep } from '../steps/build-step';
import { DownloadRepositoryStep } from '../steps/download-repository-step';
import { DownloadStep } from '../steps/download-step';
import { CustomWorkflow } from './custom-workflow';
import { WorkflowInterface } from './workflow-interface';
@ -19,7 +19,7 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
try {
CloudRunnerLogger.log(`Cloud Runner is running standard build automation`);
await new DownloadRepositoryStep().run(
await new DownloadStep().run(
new CloudRunnerStepState(
'alpine/git',
CloudRunnerState.readBuildEnvironmentVariables(),

View File

@ -1,6 +1,6 @@
import { CloudRunnerState } from '../state/cloud-runner-state';
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
import { DownloadRepositoryStep } from '../steps/download-repository-step';
import { DownloadStep } from '../steps/download-step';
import { CustomWorkflow } from './custom-workflow';
import { EphemeralGitHubRunnerWorkflow } from './ephemeral-github-runner-workflow';
import { WorkflowInterface } from './workflow-interface';
@ -43,7 +43,7 @@ export class WorkflowCompositionRoot implements WorkflowInterface {
),
);
} else if (CloudRunnerState.buildParams.customBuildSteps === 'download') {
await new DownloadRepositoryStep().run(
await new DownloadStep().run(
new CloudRunnerStepState(
'alpine/git',
CloudRunnerState.readBuildEnvironmentVariables(),