Converting caching from shell to typescript

pull/310/head
Frostebite 2021-12-24 02:08:42 +00:00
parent 9079429c8d
commit f67f5bc615
3 changed files with 93 additions and 90 deletions

142
dist/index.js vendored
View File

@ -2021,34 +2021,34 @@ class DownloadRepository {
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.buildPathFull);
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.repoPathFull);
cloud_runner_logger_1.default.log(`Initializing source repository for cloning with caching of LFS files`);
yield run_cli_1.RunCli.RunCli(`
cd ${cloud_runner_state_1.CloudRunnerState.repoPathFull}
# stop annoying git detatched head info
git config --global advice.detachedHead false
echo ' '
echo "Cloning the repository being built:"
git lfs install --skip-smudge
echo "${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl}"
git clone ${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl} ${cloud_runner_state_1.CloudRunnerState.repoPathFull}
git checkout ${process.env.GITHUB_SHA}
echo "Checked out ${process.env.GITHUB_SHA}"
yield run_cli_1.RunCli.RunCli(`
cd ${cloud_runner_state_1.CloudRunnerState.repoPathFull}
# stop annoying git detatched head info
git config --global advice.detachedHead false
echo ' '
echo "Cloning the repository being built:"
git lfs install --skip-smudge
echo "${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl}"
git clone ${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl} ${cloud_runner_state_1.CloudRunnerState.repoPathFull}
git checkout ${process.env.GITHUB_SHA}
echo "Checked out ${process.env.GITHUB_SHA}"
`);
yield run_cli_1.RunCli.RunCli(`
git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid
md5sum .lfs-assets-guid > .lfs-assets-guid-sum
yield run_cli_1.RunCli.RunCli(`
git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid
md5sum .lfs-assets-guid > .lfs-assets-guid-sum
`);
const LFS_ASSETS_HASH = fs_1.default.readFileSync(`${cloud_runner_state_1.CloudRunnerState.repoPathFull}/.lfs-assets-guid`, 'utf8');
yield run_cli_1.RunCli.RunCli(`
echo ' '
echo 'Contents of .lfs-assets-guid file:'
cat .lfs-assets-guid
echo ' '
echo 'Contents of .lfs-assets-guid-sum file:'
cat .lfs-assets-guid-sum
echo ' '
echo 'Source repository initialized'
ls ${cloud_runner_state_1.CloudRunnerState.projectPathFull}
echo ' '
yield run_cli_1.RunCli.RunCli(`
echo ' '
echo 'Contents of .lfs-assets-guid file:'
cat .lfs-assets-guid
echo ' '
echo 'Contents of .lfs-assets-guid-sum file:'
cat .lfs-assets-guid-sum
echo ' '
echo 'Source repository initialized'
ls ${cloud_runner_state_1.CloudRunnerState.projectPathFull}
echo ' '
`);
const lfsCacheFolder = `${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/lfs`;
const libraryCacheFolder = `${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/lib`;
@ -2068,56 +2068,58 @@ class DownloadRepository {
cloud_runner_logger_1.default.log(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
if (fs_1.default.existsSync(latestLibraryCacheFile)) {
cloud_runner_logger_1.default.log(`Library cache exists`);
yield run_cli_1.RunCli.RunCli(`
unzip -q "${libraryCacheFolder}/${latestLibraryCacheFile}" -d "$projectPathFull"
tree "${cloud_runner_state_1.CloudRunnerState.libraryFolderFull}"
yield run_cli_1.RunCli.RunCli(`
unzip -q "${libraryCacheFolder}/${latestLibraryCacheFile}" -d "$projectPathFull"
tree "${cloud_runner_state_1.CloudRunnerState.libraryFolderFull}"
`);
}
yield run_cli_1.RunCli.RunCli(`
echo ' '
echo 'Large File Caching'
echo "Checking large file cache exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)"
cd ${lfsCacheFolder}
if [ -f "${LFS_ASSETS_HASH}.zip" ]; then
echo "Match found: using large file hash match ${LFS_ASSETS_HASH}.zip"
latestLFSCacheFile="${LFS_ASSETS_HASH}"
else
latestLFSCacheFile=$(ls -t "${lfsCacheFolder}" | grep .zip$ | head -1)
echo "Match not found: using latest large file cache $latestLFSCacheFile"
fi
if [ ! -f "$latestLFSCacheFile" ]; then
echo "LFS cache exists from build $latestLFSCacheFile from $branch"
rm -r "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}"
unzip -q "${lfsCacheFolder}/$latestLFSCacheFile" -d "$repoPathFull/.git"
echo "git LFS folder, (should not contain $latestLFSCacheFile)"
ls -lh "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}/"
fi
cloud_runner_logger_1.default.log(` `);
cloud_runner_logger_1.default.log(`LFS Caching`);
cloud_runner_logger_1.default.log(`Checking largest LFS file exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)`);
process.chdir(lfsCacheFolder);
let latestLFSCacheFile;
if (fs_1.default.existsSync(`${LFS_ASSETS_HASH}.zip`)) {
cloud_runner_logger_1.default.log(`Match found: using large file hash match ${LFS_ASSETS_HASH}.zip`);
latestLFSCacheFile = `${LFS_ASSETS_HASH}.zip`;
}
else {
latestLFSCacheFile = yield run_cli_1.RunCli.RunCli(`ls -t "${lfsCacheFolder}" | grep .zip$ | head -1`);
}
if (fs_1.default.existsSync(latestLFSCacheFile)) {
cloud_runner_logger_1.default.log(`LFS cache exists`);
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.lfsDirectory, { recursive: true });
cloud_runner_logger_1.default.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
yield run_cli_1.RunCli.RunCli(`
unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${cloud_runner_state_1.CloudRunnerState.repoPathFull}/.git"
ls -lh "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}"
`);
yield run_cli_1.RunCli.RunCli(`
echo ' '
echo "LFS cache for $branch"
du -sch "${lfsCacheFolder}/"
echo '**'
echo "Library cache for $branch"
du -sch "${libraryCacheFolder}/"
echo '**'
echo "Branch: $branch"
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
echo '**'
echo 'Full cache'
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
echo ' '
cloud_runner_logger_1.default.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
}
yield run_cli_1.RunCli.RunCli(`
echo ' '
echo "LFS cache for $branch"
du -sch "${lfsCacheFolder}/"
echo '**'
echo "Library cache for $branch"
du -sch "${libraryCacheFolder}/"
echo '**'
echo "Branch: $branch"
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
echo '**'
echo 'Full cache'
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
echo ' '
`);
yield run_cli_1.RunCli.RunCli(`
cd "${cloud_runner_state_1.CloudRunnerState.repoPathFull}"
git lfs pull
echo 'pulled latest LFS files'
yield run_cli_1.RunCli.RunCli(`
cd "${cloud_runner_state_1.CloudRunnerState.repoPathFull}"
git lfs pull
echo 'pulled latest LFS files'
`);
yield run_cli_1.RunCli.RunCli(`
cd "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}/.."
zip -q -r "${LFS_ASSETS_HASH}.zip" "./lfs"
cp "${LFS_ASSETS_HASH}.zip" "${lfsCacheFolder}"
echo "copied ${LFS_ASSETS_HASH} to ${lfsCacheFolder}"
yield run_cli_1.RunCli.RunCli(`
cd "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}/.."
zip -q -r "${LFS_ASSETS_HASH}.zip" "./lfs"
cp "${LFS_ASSETS_HASH}.zip" "${lfsCacheFolder}"
echo "copied ${LFS_ASSETS_HASH} to ${lfsCacheFolder}"
`);
if (process.env.purgeRemoteCaching !== undefined) {
cloud_runner_logger_1.default.log(`purging ${cloud_runner_state_1.CloudRunnerState.purgeRemoteCaching}`);

2
dist/index.js.map vendored

File diff suppressed because one or more lines are too long

View File

@ -64,26 +64,27 @@ export class DownloadRepository {
tree "${CloudRunnerState.libraryFolderFull}"
`);
}
await RunCli.RunCli(`
echo ' '
echo 'Large File Caching'
echo "Checking large file cache exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)"
cd ${lfsCacheFolder}
if [ -f "${LFS_ASSETS_HASH}.zip" ]; then
echo "Match found: using large file hash match ${LFS_ASSETS_HASH}.zip"
latestLFSCacheFile="${LFS_ASSETS_HASH}"
else
latestLFSCacheFile=$(ls -t "${lfsCacheFolder}" | grep .zip$ | head -1)
echo "Match not found: using latest large file cache $latestLFSCacheFile"
fi
if [ ! -f "$latestLFSCacheFile" ]; then
echo "LFS cache exists from build $latestLFSCacheFile from $branch"
rm -r "${CloudRunnerState.lfsDirectory}"
unzip -q "${lfsCacheFolder}/$latestLFSCacheFile" -d "$repoPathFull/.git"
echo "git LFS folder, (should not contain $latestLFSCacheFile)"
ls -lh "${CloudRunnerState.lfsDirectory}/"
fi
CloudRunnerLogger.log(` `);
CloudRunnerLogger.log(`LFS Caching`);
CloudRunnerLogger.log(`Checking largest LFS file exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)`);
process.chdir(lfsCacheFolder);
let latestLFSCacheFile;
if (fs.existsSync(`${LFS_ASSETS_HASH}.zip`)) {
CloudRunnerLogger.log(`Match found: using large file hash match ${LFS_ASSETS_HASH}.zip`);
latestLFSCacheFile = `${LFS_ASSETS_HASH}.zip`;
} else {
latestLFSCacheFile = await RunCli.RunCli(`ls -t "${lfsCacheFolder}" | grep .zip$ | head -1`);
}
if (fs.existsSync(latestLFSCacheFile)) {
CloudRunnerLogger.log(`LFS cache exists`);
fs.rmdirSync(CloudRunnerState.lfsDirectory, { recursive: true });
CloudRunnerLogger.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
await RunCli.RunCli(`
unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${CloudRunnerState.repoPathFull}/.git"
ls -lh "${CloudRunnerState.lfsDirectory}"
`);
CloudRunnerLogger.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
}
await RunCli.RunCli(`
echo ' '
echo "LFS cache for $branch"