Converting caching from shell to typescript

pull/310/head
Frostebite 2021-12-24 02:08:42 +00:00
parent 9079429c8d
commit f67f5bc615
3 changed files with 93 additions and 90 deletions

142
dist/index.js vendored
View File

@ -2021,34 +2021,34 @@ class DownloadRepository {
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.buildPathFull); fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.buildPathFull);
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.repoPathFull); fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.repoPathFull);
cloud_runner_logger_1.default.log(`Initializing source repository for cloning with caching of LFS files`); cloud_runner_logger_1.default.log(`Initializing source repository for cloning with caching of LFS files`);
yield run_cli_1.RunCli.RunCli(` yield run_cli_1.RunCli.RunCli(`
cd ${cloud_runner_state_1.CloudRunnerState.repoPathFull} cd ${cloud_runner_state_1.CloudRunnerState.repoPathFull}
# stop annoying git detatched head info # stop annoying git detatched head info
git config --global advice.detachedHead false git config --global advice.detachedHead false
echo ' ' echo ' '
echo "Cloning the repository being built:" echo "Cloning the repository being built:"
git lfs install --skip-smudge git lfs install --skip-smudge
echo "${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl}" echo "${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl}"
git clone ${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl} ${cloud_runner_state_1.CloudRunnerState.repoPathFull} git clone ${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl} ${cloud_runner_state_1.CloudRunnerState.repoPathFull}
git checkout ${process.env.GITHUB_SHA} git checkout ${process.env.GITHUB_SHA}
echo "Checked out ${process.env.GITHUB_SHA}" echo "Checked out ${process.env.GITHUB_SHA}"
`); `);
yield run_cli_1.RunCli.RunCli(` yield run_cli_1.RunCli.RunCli(`
git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid
md5sum .lfs-assets-guid > .lfs-assets-guid-sum md5sum .lfs-assets-guid > .lfs-assets-guid-sum
`); `);
const LFS_ASSETS_HASH = fs_1.default.readFileSync(`${cloud_runner_state_1.CloudRunnerState.repoPathFull}/.lfs-assets-guid`, 'utf8'); const LFS_ASSETS_HASH = fs_1.default.readFileSync(`${cloud_runner_state_1.CloudRunnerState.repoPathFull}/.lfs-assets-guid`, 'utf8');
yield run_cli_1.RunCli.RunCli(` yield run_cli_1.RunCli.RunCli(`
echo ' ' echo ' '
echo 'Contents of .lfs-assets-guid file:' echo 'Contents of .lfs-assets-guid file:'
cat .lfs-assets-guid cat .lfs-assets-guid
echo ' ' echo ' '
echo 'Contents of .lfs-assets-guid-sum file:' echo 'Contents of .lfs-assets-guid-sum file:'
cat .lfs-assets-guid-sum cat .lfs-assets-guid-sum
echo ' ' echo ' '
echo 'Source repository initialized' echo 'Source repository initialized'
ls ${cloud_runner_state_1.CloudRunnerState.projectPathFull} ls ${cloud_runner_state_1.CloudRunnerState.projectPathFull}
echo ' ' echo ' '
`); `);
const lfsCacheFolder = `${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/lfs`; const lfsCacheFolder = `${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/lfs`;
const libraryCacheFolder = `${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/lib`; const libraryCacheFolder = `${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/lib`;
@ -2068,56 +2068,58 @@ class DownloadRepository {
cloud_runner_logger_1.default.log(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`); cloud_runner_logger_1.default.log(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
if (fs_1.default.existsSync(latestLibraryCacheFile)) { if (fs_1.default.existsSync(latestLibraryCacheFile)) {
cloud_runner_logger_1.default.log(`Library cache exists`); cloud_runner_logger_1.default.log(`Library cache exists`);
yield run_cli_1.RunCli.RunCli(` yield run_cli_1.RunCli.RunCli(`
unzip -q "${libraryCacheFolder}/${latestLibraryCacheFile}" -d "$projectPathFull" unzip -q "${libraryCacheFolder}/${latestLibraryCacheFile}" -d "$projectPathFull"
tree "${cloud_runner_state_1.CloudRunnerState.libraryFolderFull}" tree "${cloud_runner_state_1.CloudRunnerState.libraryFolderFull}"
`); `);
} }
yield run_cli_1.RunCli.RunCli(` cloud_runner_logger_1.default.log(` `);
echo ' ' cloud_runner_logger_1.default.log(`LFS Caching`);
echo 'Large File Caching' cloud_runner_logger_1.default.log(`Checking largest LFS file exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)`);
echo "Checking large file cache exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)" process.chdir(lfsCacheFolder);
cd ${lfsCacheFolder} let latestLFSCacheFile;
if [ -f "${LFS_ASSETS_HASH}.zip" ]; then if (fs_1.default.existsSync(`${LFS_ASSETS_HASH}.zip`)) {
echo "Match found: using large file hash match ${LFS_ASSETS_HASH}.zip" cloud_runner_logger_1.default.log(`Match found: using large file hash match ${LFS_ASSETS_HASH}.zip`);
latestLFSCacheFile="${LFS_ASSETS_HASH}" latestLFSCacheFile = `${LFS_ASSETS_HASH}.zip`;
else }
latestLFSCacheFile=$(ls -t "${lfsCacheFolder}" | grep .zip$ | head -1) else {
echo "Match not found: using latest large file cache $latestLFSCacheFile" latestLFSCacheFile = yield run_cli_1.RunCli.RunCli(`ls -t "${lfsCacheFolder}" | grep .zip$ | head -1`);
fi }
if [ ! -f "$latestLFSCacheFile" ]; then if (fs_1.default.existsSync(latestLFSCacheFile)) {
echo "LFS cache exists from build $latestLFSCacheFile from $branch" cloud_runner_logger_1.default.log(`LFS cache exists`);
rm -r "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}" fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.lfsDirectory, { recursive: true });
unzip -q "${lfsCacheFolder}/$latestLFSCacheFile" -d "$repoPathFull/.git" cloud_runner_logger_1.default.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
echo "git LFS folder, (should not contain $latestLFSCacheFile)" yield run_cli_1.RunCli.RunCli(`
ls -lh "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}/" unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${cloud_runner_state_1.CloudRunnerState.repoPathFull}/.git"
fi ls -lh "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}"
`); `);
yield run_cli_1.RunCli.RunCli(` cloud_runner_logger_1.default.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
echo ' ' }
echo "LFS cache for $branch" yield run_cli_1.RunCli.RunCli(`
du -sch "${lfsCacheFolder}/" echo ' '
echo '**' echo "LFS cache for $branch"
echo "Library cache for $branch" du -sch "${lfsCacheFolder}/"
du -sch "${libraryCacheFolder}/" echo '**'
echo '**' echo "Library cache for $branch"
echo "Branch: $branch" du -sch "${libraryCacheFolder}/"
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/" echo '**'
echo '**' echo "Branch: $branch"
echo 'Full cache' du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/" echo '**'
echo ' ' echo 'Full cache'
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
echo ' '
`); `);
yield run_cli_1.RunCli.RunCli(` yield run_cli_1.RunCli.RunCli(`
cd "${cloud_runner_state_1.CloudRunnerState.repoPathFull}" cd "${cloud_runner_state_1.CloudRunnerState.repoPathFull}"
git lfs pull git lfs pull
echo 'pulled latest LFS files' echo 'pulled latest LFS files'
`); `);
yield run_cli_1.RunCli.RunCli(` yield run_cli_1.RunCli.RunCli(`
cd "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}/.." cd "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}/.."
zip -q -r "${LFS_ASSETS_HASH}.zip" "./lfs" zip -q -r "${LFS_ASSETS_HASH}.zip" "./lfs"
cp "${LFS_ASSETS_HASH}.zip" "${lfsCacheFolder}" cp "${LFS_ASSETS_HASH}.zip" "${lfsCacheFolder}"
echo "copied ${LFS_ASSETS_HASH} to ${lfsCacheFolder}" echo "copied ${LFS_ASSETS_HASH} to ${lfsCacheFolder}"
`); `);
if (process.env.purgeRemoteCaching !== undefined) { if (process.env.purgeRemoteCaching !== undefined) {
cloud_runner_logger_1.default.log(`purging ${cloud_runner_state_1.CloudRunnerState.purgeRemoteCaching}`); cloud_runner_logger_1.default.log(`purging ${cloud_runner_state_1.CloudRunnerState.purgeRemoteCaching}`);

2
dist/index.js.map vendored

File diff suppressed because one or more lines are too long

View File

@ -64,26 +64,27 @@ export class DownloadRepository {
tree "${CloudRunnerState.libraryFolderFull}" tree "${CloudRunnerState.libraryFolderFull}"
`); `);
} }
await RunCli.RunCli(` CloudRunnerLogger.log(` `);
echo ' ' CloudRunnerLogger.log(`LFS Caching`);
echo 'Large File Caching' CloudRunnerLogger.log(`Checking largest LFS file exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)`);
echo "Checking large file cache exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)" process.chdir(lfsCacheFolder);
cd ${lfsCacheFolder} let latestLFSCacheFile;
if [ -f "${LFS_ASSETS_HASH}.zip" ]; then if (fs.existsSync(`${LFS_ASSETS_HASH}.zip`)) {
echo "Match found: using large file hash match ${LFS_ASSETS_HASH}.zip" CloudRunnerLogger.log(`Match found: using large file hash match ${LFS_ASSETS_HASH}.zip`);
latestLFSCacheFile="${LFS_ASSETS_HASH}" latestLFSCacheFile = `${LFS_ASSETS_HASH}.zip`;
else } else {
latestLFSCacheFile=$(ls -t "${lfsCacheFolder}" | grep .zip$ | head -1) latestLFSCacheFile = await RunCli.RunCli(`ls -t "${lfsCacheFolder}" | grep .zip$ | head -1`);
echo "Match not found: using latest large file cache $latestLFSCacheFile" }
fi if (fs.existsSync(latestLFSCacheFile)) {
if [ ! -f "$latestLFSCacheFile" ]; then CloudRunnerLogger.log(`LFS cache exists`);
echo "LFS cache exists from build $latestLFSCacheFile from $branch" fs.rmdirSync(CloudRunnerState.lfsDirectory, { recursive: true });
rm -r "${CloudRunnerState.lfsDirectory}" CloudRunnerLogger.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
unzip -q "${lfsCacheFolder}/$latestLFSCacheFile" -d "$repoPathFull/.git" await RunCli.RunCli(`
echo "git LFS folder, (should not contain $latestLFSCacheFile)" unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${CloudRunnerState.repoPathFull}/.git"
ls -lh "${CloudRunnerState.lfsDirectory}/" ls -lh "${CloudRunnerState.lfsDirectory}"
fi
`); `);
CloudRunnerLogger.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
}
await RunCli.RunCli(` await RunCli.RunCli(`
echo ' ' echo ' '
echo "LFS cache for $branch" echo "LFS cache for $branch"