test
parent
6614752414
commit
75fa4dad95
|
|
@ -495,19 +495,19 @@ class RemoteClientSystem {
|
|||
let output = '';
|
||||
const child = child_process_1.exec(command, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
cloud_runner_logger_1.default.logRemoteCli(`[ERROR] ${error.message}`);
|
||||
cloud_runner_logger_1.default.logCli(`[ERROR] ${error.message}`);
|
||||
throw new Error(error.toString());
|
||||
}
|
||||
if (stderr) {
|
||||
cloud_runner_logger_1.default.logRemoteCli(`[DIAGNOSTIC] ${stderr.toString()}`);
|
||||
cloud_runner_logger_1.default.logCli(`[DIAGNOSTIC] ${stderr.toString()}`);
|
||||
return;
|
||||
}
|
||||
const outputChunk = `${stdout.toString()}`;
|
||||
cloud_runner_logger_1.default.logRemoteCli(outputChunk);
|
||||
cloud_runner_logger_1.default.logCli(outputChunk);
|
||||
output += outputChunk;
|
||||
});
|
||||
child.on('close', function (code) {
|
||||
cloud_runner_logger_1.default.logRemoteCli(`[exit ${code}]`);
|
||||
cloud_runner_logger_1.default.logCli(`[exit ${code}]`);
|
||||
if (code !== 0) {
|
||||
throw new Error(output);
|
||||
}
|
||||
|
|
@ -603,7 +603,7 @@ class SetupRemoteRepository {
|
|||
}
|
||||
static handleCachePurging() {
|
||||
if (process.env.purgeRemoteCaching !== undefined) {
|
||||
cloud_runner_logger_1.default.logRemoteCli(`purging ${cloud_runner_state_1.CloudRunnerState.purgeRemoteCaching}`);
|
||||
cloud_runner_logger_1.default.logCli(`purging ${cloud_runner_state_1.CloudRunnerState.purgeRemoteCaching}`);
|
||||
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.cacheFolder, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
|
@ -611,62 +611,62 @@ class SetupRemoteRepository {
|
|||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.chdir(`${cloud_runner_state_1.CloudRunnerState.lfsDirectory}/..`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`zip -r "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "lfs"`);
|
||||
cloud_runner_logger_1.default.logRemoteCli(fs_1.default.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`).toString());
|
||||
cloud_runner_logger_1.default.logCli(fs_1.default.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`).toString());
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`cp "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "${path_1.default.join(lfsCacheFolder, `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`)}"`);
|
||||
cloud_runner_logger_1.default.logRemoteCli(`copied ${SetupRemoteRepository.LFS_ASSETS_HASH} to ${lfsCacheFolder}`);
|
||||
cloud_runner_logger_1.default.logCli(`copied ${SetupRemoteRepository.LFS_ASSETS_HASH} to ${lfsCacheFolder}`);
|
||||
});
|
||||
}
|
||||
static pullLatestLFS() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git lfs pull`);
|
||||
cloud_runner_logger_1.default.logRemoteCli(`pulled latest LFS files`);
|
||||
cloud_runner_logger_1.default.logCli(`pulled latest LFS files`);
|
||||
});
|
||||
}
|
||||
static lfsCaching(lfsCacheFolder) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
cloud_runner_logger_1.default.logRemoteCli(` `);
|
||||
cloud_runner_logger_1.default.logRemoteCli(`LFS Caching`);
|
||||
cloud_runner_logger_1.default.logCli(` `);
|
||||
cloud_runner_logger_1.default.logCli(`LFS Caching`);
|
||||
process.chdir(lfsCacheFolder);
|
||||
let latestLFSCacheFile;
|
||||
if (fs_1.default.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`)) {
|
||||
cloud_runner_logger_1.default.logRemoteCli(`Match found: using large file hash match ${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`);
|
||||
cloud_runner_logger_1.default.logCli(`Match found: using large file hash match ${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`);
|
||||
latestLFSCacheFile = `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`;
|
||||
}
|
||||
else {
|
||||
latestLFSCacheFile = yield remote_client_system_1.RemoteClientSystem.Run(`ls -t "${lfsCacheFolder}" | grep .zip$ | head -1`);
|
||||
}
|
||||
if (fs_1.default.existsSync(latestLFSCacheFile)) {
|
||||
cloud_runner_logger_1.default.logRemoteCli(`LFS cache exists`);
|
||||
cloud_runner_logger_1.default.logCli(`LFS cache exists`);
|
||||
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.lfsDirectory, { recursive: true });
|
||||
cloud_runner_logger_1.default.logRemoteCli(`LFS cache exists from build ${latestLFSCacheFile} from ${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
||||
cloud_runner_logger_1.default.logCli(`LFS cache exists from build ${latestLFSCacheFile} from ${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.git`)}"`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`ls -lh "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}"`);
|
||||
cloud_runner_logger_1.default.logRemoteCli(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
||||
cloud_runner_logger_1.default.logCli(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
||||
}
|
||||
});
|
||||
}
|
||||
static libraryCaching(lfsCacheFolder, libraryCacheFolder) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
cloud_runner_logger_1.default.logRemoteCli(`Starting checks of cache for the Unity project Library and git LFS files`);
|
||||
cloud_runner_logger_1.default.logCli(`Starting checks of cache for the Unity project Library and git LFS files`);
|
||||
if (!fs_1.default.existsSync(lfsCacheFolder)) {
|
||||
fs_1.default.mkdirSync(lfsCacheFolder);
|
||||
}
|
||||
if (!fs_1.default.existsSync(libraryCacheFolder)) {
|
||||
fs_1.default.mkdirSync(libraryCacheFolder);
|
||||
}
|
||||
cloud_runner_logger_1.default.logRemoteCli(`Library Caching`);
|
||||
cloud_runner_logger_1.default.logCli(`Library Caching`);
|
||||
//if the unity git project has included the library delete it and echo a warning
|
||||
if (fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull)) {
|
||||
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull, { recursive: true });
|
||||
cloud_runner_logger_1.default.logRemoteCli(`!Warning!: The Unity library was included in the git repository (this isn't usually a good practice)`);
|
||||
cloud_runner_logger_1.default.logCli(`!Warning!: The Unity library was included in the git repository (this isn't usually a good practice)`);
|
||||
}
|
||||
//Restore library cache
|
||||
const latestLibraryCacheFile = yield remote_client_system_1.RemoteClientSystem.Run(`ls -t "${libraryCacheFolder}" | grep .zip$ | head -1`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`ls -lh "${libraryCacheFolder}"`);
|
||||
cloud_runner_logger_1.default.logRemoteCli(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
|
||||
cloud_runner_logger_1.default.logCli(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
|
||||
if (fs_1.default.existsSync(latestLibraryCacheFile)) {
|
||||
cloud_runner_logger_1.default.logRemoteCli(`Library cache exists`);
|
||||
cloud_runner_logger_1.default.logCli(`Library cache exists`);
|
||||
const latestCacheFilePath = path_1.default.join(libraryCacheFolder, latestLibraryCacheFile);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`unzip -q "${latestCacheFilePath}" -d "$projectPathFull"`);
|
||||
}
|
||||
|
|
@ -677,24 +677,24 @@ class SetupRemoteRepository {
|
|||
yield remote_client_system_1.RemoteClientSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
SetupRemoteRepository.LFS_ASSETS_HASH = fs_1.default.readFileSync(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
||||
cloud_runner_logger_1.default.logRemoteCli(SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
cloud_runner_logger_1.default.logCli(SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
});
|
||||
}
|
||||
static cloneRepoWithoutLFSFiles() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
cloud_runner_logger_1.default.logRemoteCli(`Initializing source repository for cloning with caching of LFS files`);
|
||||
cloud_runner_logger_1.default.logCli(`Initializing source repository for cloning with caching of LFS files`);
|
||||
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git config --global advice.detachedHead false`);
|
||||
cloud_runner_logger_1.default.logRemoteCli(`Cloning the repository being built:`);
|
||||
cloud_runner_logger_1.default.logCli(`Cloning the repository being built:`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git lfs install --skip-smudge`);
|
||||
cloud_runner_logger_1.default.logRemoteCli(cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl);
|
||||
cloud_runner_logger_1.default.logCli(cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git clone ${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl} ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`ls -lh`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree`);
|
||||
cloud_runner_logger_1.default.logRemoteCli(`${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
||||
cloud_runner_logger_1.default.logCli(`${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git checkout ${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
||||
cloud_runner_logger_1.default.logRemoteCli(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
cloud_runner_logger_1.default.logCli(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
|
|
@ -1468,7 +1468,6 @@ class AWSBuildEnvironment {
|
|||
});
|
||||
}
|
||||
cleanupResources(CF, taskDef) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
cloud_runner_logger_1.default.log('Cleanup starting');
|
||||
yield CF.deleteStack({
|
||||
|
|
@ -1483,9 +1482,7 @@ class AWSBuildEnvironment {
|
|||
yield CF.waitFor('stackDeleteComplete', {
|
||||
StackName: taskDef.taskDefStackNameTTL,
|
||||
}).promise();
|
||||
const stacks = (_a = (yield CF.listStacks().promise()).StackSummaries) === null || _a === void 0 ? void 0 : _a.filter((x) => x.StackStatus !== 'DELETE_COMPLETE');
|
||||
cloud_runner_logger_1.default.log(`Deleted Stacks: ${taskDef.taskDefStackName}, ${taskDef.taskDefStackNameTTL}`);
|
||||
cloud_runner_logger_1.default.log(`Stacks: ${JSON.stringify(stacks, undefined, 4)}`);
|
||||
cloud_runner_logger_1.default.log('Cleanup complete');
|
||||
});
|
||||
}
|
||||
|
|
@ -2382,7 +2379,7 @@ class CloudRunnerLogger {
|
|||
static log(message) {
|
||||
core.info(message);
|
||||
}
|
||||
static logRemoteCli(message) {
|
||||
static logCli(message) {
|
||||
CloudRunnerLogger.log(`[CLI] ${message}`);
|
||||
}
|
||||
static logLine(message) {
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -7,19 +7,19 @@ export class RemoteClientSystem {
|
|||
let output = '';
|
||||
const child = exec(command, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
CloudRunnerLogger.logRemoteCli(`[ERROR] ${error.message}`);
|
||||
CloudRunnerLogger.logCli(`[ERROR] ${error.message}`);
|
||||
throw new Error(error.toString());
|
||||
}
|
||||
if (stderr) {
|
||||
CloudRunnerLogger.logRemoteCli(`[DIAGNOSTIC] ${stderr.toString()}`);
|
||||
CloudRunnerLogger.logCli(`[DIAGNOSTIC] ${stderr.toString()}`);
|
||||
return;
|
||||
}
|
||||
const outputChunk = `${stdout.toString()}`;
|
||||
CloudRunnerLogger.logRemoteCli(outputChunk);
|
||||
CloudRunnerLogger.logCli(outputChunk);
|
||||
output += outputChunk;
|
||||
});
|
||||
child.on('close', function (code) {
|
||||
CloudRunnerLogger.logRemoteCli(`[exit ${code}]`);
|
||||
CloudRunnerLogger.logCli(`[exit ${code}]`);
|
||||
if (code !== 0) {
|
||||
throw new Error(output);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ export class SetupRemoteRepository {
|
|||
|
||||
private static handleCachePurging() {
|
||||
if (process.env.purgeRemoteCaching !== undefined) {
|
||||
CloudRunnerLogger.logRemoteCli(`purging ${CloudRunnerState.purgeRemoteCaching}`);
|
||||
CloudRunnerLogger.logCli(`purging ${CloudRunnerState.purgeRemoteCaching}`);
|
||||
fs.rmdirSync(CloudRunnerState.cacheFolder, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
|
@ -72,71 +72,69 @@ export class SetupRemoteRepository {
|
|||
private static async cacheLatestLFSFiles(lfsCacheFolder: string) {
|
||||
process.chdir(`${CloudRunnerState.lfsDirectory}/..`);
|
||||
await RemoteClientSystem.Run(`zip -r "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "lfs"`);
|
||||
CloudRunnerLogger.logRemoteCli(fs.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`).toString());
|
||||
CloudRunnerLogger.logCli(fs.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`).toString());
|
||||
await RemoteClientSystem.Run(
|
||||
`cp "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "${path.join(
|
||||
lfsCacheFolder,
|
||||
`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`,
|
||||
)}"`,
|
||||
);
|
||||
CloudRunnerLogger.logRemoteCli(`copied ${SetupRemoteRepository.LFS_ASSETS_HASH} to ${lfsCacheFolder}`);
|
||||
CloudRunnerLogger.logCli(`copied ${SetupRemoteRepository.LFS_ASSETS_HASH} to ${lfsCacheFolder}`);
|
||||
}
|
||||
|
||||
private static async pullLatestLFS() {
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
await RemoteClientSystem.Run(`git lfs pull`);
|
||||
CloudRunnerLogger.logRemoteCli(`pulled latest LFS files`);
|
||||
CloudRunnerLogger.logCli(`pulled latest LFS files`);
|
||||
}
|
||||
|
||||
private static async lfsCaching(lfsCacheFolder: string) {
|
||||
CloudRunnerLogger.logRemoteCli(` `);
|
||||
CloudRunnerLogger.logRemoteCli(`LFS Caching`);
|
||||
CloudRunnerLogger.logCli(` `);
|
||||
CloudRunnerLogger.logCli(`LFS Caching`);
|
||||
process.chdir(lfsCacheFolder);
|
||||
let latestLFSCacheFile;
|
||||
if (fs.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`)) {
|
||||
CloudRunnerLogger.logRemoteCli(
|
||||
`Match found: using large file hash match ${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`,
|
||||
);
|
||||
CloudRunnerLogger.logCli(`Match found: using large file hash match ${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`);
|
||||
latestLFSCacheFile = `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`;
|
||||
} else {
|
||||
latestLFSCacheFile = await RemoteClientSystem.Run(`ls -t "${lfsCacheFolder}" | grep .zip$ | head -1`);
|
||||
}
|
||||
if (fs.existsSync(latestLFSCacheFile)) {
|
||||
CloudRunnerLogger.logRemoteCli(`LFS cache exists`);
|
||||
CloudRunnerLogger.logCli(`LFS cache exists`);
|
||||
fs.rmdirSync(CloudRunnerState.lfsDirectory, { recursive: true });
|
||||
CloudRunnerLogger.logRemoteCli(
|
||||
CloudRunnerLogger.logCli(
|
||||
`LFS cache exists from build ${latestLFSCacheFile} from ${CloudRunnerState.buildParams.branch}`,
|
||||
);
|
||||
await RemoteClientSystem.Run(
|
||||
`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${path.join(CloudRunnerState.repoPathFull, `.git`)}"`,
|
||||
);
|
||||
await RemoteClientSystem.Run(`ls -lh "${CloudRunnerState.lfsDirectory}"`);
|
||||
CloudRunnerLogger.logRemoteCli(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
||||
CloudRunnerLogger.logCli(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
||||
}
|
||||
}
|
||||
|
||||
private static async libraryCaching(lfsCacheFolder: string, libraryCacheFolder: string) {
|
||||
CloudRunnerLogger.logRemoteCli(`Starting checks of cache for the Unity project Library and git LFS files`);
|
||||
CloudRunnerLogger.logCli(`Starting checks of cache for the Unity project Library and git LFS files`);
|
||||
if (!fs.existsSync(lfsCacheFolder)) {
|
||||
fs.mkdirSync(lfsCacheFolder);
|
||||
}
|
||||
if (!fs.existsSync(libraryCacheFolder)) {
|
||||
fs.mkdirSync(libraryCacheFolder);
|
||||
}
|
||||
CloudRunnerLogger.logRemoteCli(`Library Caching`);
|
||||
CloudRunnerLogger.logCli(`Library Caching`);
|
||||
//if the unity git project has included the library delete it and echo a warning
|
||||
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
||||
fs.rmdirSync(CloudRunnerState.libraryFolderFull, { recursive: true });
|
||||
CloudRunnerLogger.logRemoteCli(
|
||||
CloudRunnerLogger.logCli(
|
||||
`!Warning!: The Unity library was included in the git repository (this isn't usually a good practice)`,
|
||||
);
|
||||
}
|
||||
//Restore library cache
|
||||
const latestLibraryCacheFile = await RemoteClientSystem.Run(`ls -t "${libraryCacheFolder}" | grep .zip$ | head -1`);
|
||||
await RemoteClientSystem.Run(`ls -lh "${libraryCacheFolder}"`);
|
||||
CloudRunnerLogger.logRemoteCli(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
|
||||
CloudRunnerLogger.logCli(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
|
||||
if (fs.existsSync(latestLibraryCacheFile)) {
|
||||
CloudRunnerLogger.logRemoteCli(`Library cache exists`);
|
||||
CloudRunnerLogger.logCli(`Library cache exists`);
|
||||
const latestCacheFilePath = path.join(libraryCacheFolder, latestLibraryCacheFile);
|
||||
await RemoteClientSystem.Run(`unzip -q "${latestCacheFilePath}" -d "$projectPathFull"`);
|
||||
}
|
||||
|
|
@ -149,23 +147,23 @@ export class SetupRemoteRepository {
|
|||
`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`,
|
||||
'utf8',
|
||||
);
|
||||
CloudRunnerLogger.logRemoteCli(SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
CloudRunnerLogger.logCli(SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
}
|
||||
|
||||
private static async cloneRepoWithoutLFSFiles() {
|
||||
try {
|
||||
CloudRunnerLogger.logRemoteCli(`Initializing source repository for cloning with caching of LFS files`);
|
||||
CloudRunnerLogger.logCli(`Initializing source repository for cloning with caching of LFS files`);
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
await RemoteClientSystem.Run(`git config --global advice.detachedHead false`);
|
||||
CloudRunnerLogger.logRemoteCli(`Cloning the repository being built:`);
|
||||
CloudRunnerLogger.logCli(`Cloning the repository being built:`);
|
||||
await RemoteClientSystem.Run(`git lfs install --skip-smudge`);
|
||||
CloudRunnerLogger.logRemoteCli(CloudRunnerState.targetBuildRepoUrl);
|
||||
CloudRunnerLogger.logCli(CloudRunnerState.targetBuildRepoUrl);
|
||||
await RemoteClientSystem.Run(`git clone ${CloudRunnerState.targetBuildRepoUrl} ${CloudRunnerState.repoPathFull}`);
|
||||
await RemoteClientSystem.Run(`ls -lh`);
|
||||
await RemoteClientSystem.Run(`tree`);
|
||||
CloudRunnerLogger.logRemoteCli(`${CloudRunnerState.buildParams.branch}`);
|
||||
CloudRunnerLogger.logCli(`${CloudRunnerState.buildParams.branch}`);
|
||||
await RemoteClientSystem.Run(`git checkout ${CloudRunnerState.buildParams.branch}`);
|
||||
CloudRunnerLogger.logRemoteCli(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
CloudRunnerLogger.logCli(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -94,12 +94,7 @@ class AWSBuildEnvironment implements CloudRunnerProviderInterface {
|
|||
await CF.waitFor('stackDeleteComplete', {
|
||||
StackName: taskDef.taskDefStackNameTTL,
|
||||
}).promise();
|
||||
|
||||
const stacks = (await CF.listStacks().promise()).StackSummaries?.filter((x) => x.StackStatus !== 'DELETE_COMPLETE');
|
||||
|
||||
CloudRunnerLogger.log(`Deleted Stacks: ${taskDef.taskDefStackName}, ${taskDef.taskDefStackNameTTL}`);
|
||||
CloudRunnerLogger.log(`Stacks: ${JSON.stringify(stacks, undefined, 4)}`);
|
||||
|
||||
CloudRunnerLogger.log('Cleanup complete');
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class CloudRunnerLogger {
|
|||
core.info(message);
|
||||
}
|
||||
|
||||
public static logRemoteCli(message: string) {
|
||||
public static logCli(message: string) {
|
||||
CloudRunnerLogger.log(`[CLI] ${message}`);
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue