Converting caching from shell to typescript
parent
f67f5bc615
commit
c37771c360
|
|
@ -2089,10 +2089,8 @@ class DownloadRepository {
|
||||||
cloud_runner_logger_1.default.log(`LFS cache exists`);
|
cloud_runner_logger_1.default.log(`LFS cache exists`);
|
||||||
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.lfsDirectory, { recursive: true });
|
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.lfsDirectory, { recursive: true });
|
||||||
cloud_runner_logger_1.default.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
|
cloud_runner_logger_1.default.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
|
||||||
yield run_cli_1.RunCli.RunCli(`
|
yield run_cli_1.RunCli.RunCli(`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${cloud_runner_state_1.CloudRunnerState.repoPathFull}/.git"`);
|
||||||
unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${cloud_runner_state_1.CloudRunnerState.repoPathFull}/.git"
|
yield run_cli_1.RunCli.RunCli(`ls -lh "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}"`);
|
||||||
ls -lh "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}"
|
|
||||||
`);
|
|
||||||
cloud_runner_logger_1.default.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
cloud_runner_logger_1.default.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
||||||
}
|
}
|
||||||
yield run_cli_1.RunCli.RunCli(`
|
yield run_cli_1.RunCli.RunCli(`
|
||||||
|
|
@ -2110,17 +2108,13 @@ class DownloadRepository {
|
||||||
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
|
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
|
||||||
echo ' '
|
echo ' '
|
||||||
`);
|
`);
|
||||||
yield run_cli_1.RunCli.RunCli(`
|
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||||
cd "${cloud_runner_state_1.CloudRunnerState.repoPathFull}"
|
yield run_cli_1.RunCli.RunCli(`git lfs pull`);
|
||||||
git lfs pull
|
cloud_runner_logger_1.default.log(`pulled latest LFS files`);
|
||||||
echo 'pulled latest LFS files'
|
process.chdir(`${cloud_runner_state_1.CloudRunnerState.lfsDirectory}/..`);
|
||||||
`);
|
yield run_cli_1.RunCli.RunCli(`zip -q -r "${LFS_ASSETS_HASH}.zip" "./lfs"`);
|
||||||
yield run_cli_1.RunCli.RunCli(`
|
fs_1.default.copyFileSync(`${LFS_ASSETS_HASH}.zip`, lfsCacheFolder);
|
||||||
cd "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}/.."
|
cloud_runner_logger_1.default.log(`copied ${LFS_ASSETS_HASH} to ${lfsCacheFolder}`);
|
||||||
zip -q -r "${LFS_ASSETS_HASH}.zip" "./lfs"
|
|
||||||
cp "${LFS_ASSETS_HASH}.zip" "${lfsCacheFolder}"
|
|
||||||
echo "copied ${LFS_ASSETS_HASH} to ${lfsCacheFolder}"
|
|
||||||
`);
|
|
||||||
if (process.env.purgeRemoteCaching !== undefined) {
|
if (process.env.purgeRemoteCaching !== undefined) {
|
||||||
cloud_runner_logger_1.default.log(`purging ${cloud_runner_state_1.CloudRunnerState.purgeRemoteCaching}`);
|
cloud_runner_logger_1.default.log(`purging ${cloud_runner_state_1.CloudRunnerState.purgeRemoteCaching}`);
|
||||||
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.cacheFolder, { recursive: true });
|
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.cacheFolder, { recursive: true });
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -79,10 +79,10 @@ export class DownloadRepository {
|
||||||
CloudRunnerLogger.log(`LFS cache exists`);
|
CloudRunnerLogger.log(`LFS cache exists`);
|
||||||
fs.rmdirSync(CloudRunnerState.lfsDirectory, { recursive: true });
|
fs.rmdirSync(CloudRunnerState.lfsDirectory, { recursive: true });
|
||||||
CloudRunnerLogger.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
|
CloudRunnerLogger.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
|
||||||
await RunCli.RunCli(`
|
await RunCli.RunCli(
|
||||||
unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${CloudRunnerState.repoPathFull}/.git"
|
`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${CloudRunnerState.repoPathFull}/.git"`,
|
||||||
ls -lh "${CloudRunnerState.lfsDirectory}"
|
);
|
||||||
`);
|
await RunCli.RunCli(`ls -lh "${CloudRunnerState.lfsDirectory}"`);
|
||||||
CloudRunnerLogger.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
CloudRunnerLogger.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
||||||
}
|
}
|
||||||
await RunCli.RunCli(`
|
await RunCli.RunCli(`
|
||||||
|
|
@ -100,17 +100,13 @@ export class DownloadRepository {
|
||||||
du -sch "${CloudRunnerState.cacheFolderFull}/"
|
du -sch "${CloudRunnerState.cacheFolderFull}/"
|
||||||
echo ' '
|
echo ' '
|
||||||
`);
|
`);
|
||||||
await RunCli.RunCli(`
|
process.chdir(CloudRunnerState.repoPathFull);
|
||||||
cd "${CloudRunnerState.repoPathFull}"
|
await RunCli.RunCli(`git lfs pull`);
|
||||||
git lfs pull
|
CloudRunnerLogger.log(`pulled latest LFS files`);
|
||||||
echo 'pulled latest LFS files'
|
process.chdir(`${CloudRunnerState.lfsDirectory}/..`);
|
||||||
`);
|
await RunCli.RunCli(`zip -q -r "${LFS_ASSETS_HASH}.zip" "./lfs"`);
|
||||||
await RunCli.RunCli(`
|
fs.copyFileSync(`${LFS_ASSETS_HASH}.zip`, lfsCacheFolder);
|
||||||
cd "${CloudRunnerState.lfsDirectory}/.."
|
CloudRunnerLogger.log(`copied ${LFS_ASSETS_HASH} to ${lfsCacheFolder}`);
|
||||||
zip -q -r "${LFS_ASSETS_HASH}.zip" "./lfs"
|
|
||||||
cp "${LFS_ASSETS_HASH}.zip" "${lfsCacheFolder}"
|
|
||||||
echo "copied ${LFS_ASSETS_HASH} to ${lfsCacheFolder}"
|
|
||||||
`);
|
|
||||||
if (process.env.purgeRemoteCaching !== undefined) {
|
if (process.env.purgeRemoteCaching !== undefined) {
|
||||||
CloudRunnerLogger.log(`purging ${CloudRunnerState.purgeRemoteCaching}`);
|
CloudRunnerLogger.log(`purging ${CloudRunnerState.purgeRemoteCaching}`);
|
||||||
fs.rmdirSync(CloudRunnerState.cacheFolder, { recursive: true });
|
fs.rmdirSync(CloudRunnerState.cacheFolder, { recursive: true });
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue