Converting caching from shell to typescript
parent
301e241147
commit
f359494f52
|
|
@ -2014,6 +2014,7 @@ const cloud_runner_state_1 = __webpack_require__(70912);
|
||||||
const run_cli_1 = __webpack_require__(33639);
|
const run_cli_1 = __webpack_require__(33639);
|
||||||
const fs_1 = __importDefault(__webpack_require__(35747));
|
const fs_1 = __importDefault(__webpack_require__(35747));
|
||||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||||
|
const path_1 = __importDefault(__webpack_require__(85622));
|
||||||
class DownloadRepository {
|
class DownloadRepository {
|
||||||
static run() {
|
static run() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
|
@ -2036,7 +2037,7 @@ class DownloadRepository {
|
||||||
git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid
|
git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid
|
||||||
md5sum .lfs-assets-guid > .lfs-assets-guid-sum
|
md5sum .lfs-assets-guid > .lfs-assets-guid-sum
|
||||||
`);
|
`);
|
||||||
const LFS_ASSETS_HASH = fs_1.default.readFileSync(`${cloud_runner_state_1.CloudRunnerState.repoPathFull}/.lfs-assets-guid`, 'utf8');
|
const LFS_ASSETS_HASH = fs_1.default.readFileSync(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
||||||
yield run_cli_1.RunCli.RunCli(`
|
yield run_cli_1.RunCli.RunCli(`
|
||||||
echo ' '
|
echo ' '
|
||||||
echo 'Contents of .lfs-assets-guid file:'
|
echo 'Contents of .lfs-assets-guid file:'
|
||||||
|
|
@ -2049,12 +2050,16 @@ class DownloadRepository {
|
||||||
ls ${cloud_runner_state_1.CloudRunnerState.projectPathFull}
|
ls ${cloud_runner_state_1.CloudRunnerState.projectPathFull}
|
||||||
echo ' '
|
echo ' '
|
||||||
`);
|
`);
|
||||||
const lfsCacheFolder = `${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/lfs`;
|
const lfsCacheFolder = path_1.default.join(cloud_runner_state_1.CloudRunnerState.cacheFolderFull, `lfs`);
|
||||||
const libraryCacheFolder = `${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/lib`;
|
const libraryCacheFolder = path_1.default.join(cloud_runner_state_1.CloudRunnerState.cacheFolderFull, `lib`);
|
||||||
yield run_cli_1.RunCli.RunCli(`tree ${cloud_runner_state_1.CloudRunnerState.builderPathFull}`);
|
yield run_cli_1.RunCli.RunCli(`tree ${cloud_runner_state_1.CloudRunnerState.builderPathFull}`);
|
||||||
cloud_runner_logger_1.default.log(`Starting checks of cache for the Unity project Library and git LFS files`);
|
cloud_runner_logger_1.default.log(`Starting checks of cache for the Unity project Library and git LFS files`);
|
||||||
fs_1.default.mkdirSync(lfsCacheFolder);
|
if (!fs_1.default.existsSync(lfsCacheFolder)) {
|
||||||
fs_1.default.mkdirSync(libraryCacheFolder);
|
fs_1.default.mkdirSync(lfsCacheFolder);
|
||||||
|
}
|
||||||
|
if (!fs_1.default.existsSync(libraryCacheFolder)) {
|
||||||
|
fs_1.default.mkdirSync(libraryCacheFolder);
|
||||||
|
}
|
||||||
cloud_runner_logger_1.default.log(`Library Caching`);
|
cloud_runner_logger_1.default.log(`Library Caching`);
|
||||||
//if the unity git project has included the library delete it and echo a warning
|
//if the unity git project has included the library delete it and echo a warning
|
||||||
if (fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull)) {
|
if (fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull)) {
|
||||||
|
|
@ -2068,13 +2073,12 @@ class DownloadRepository {
|
||||||
if (fs_1.default.existsSync(latestLibraryCacheFile)) {
|
if (fs_1.default.existsSync(latestLibraryCacheFile)) {
|
||||||
cloud_runner_logger_1.default.log(`Library cache exists`);
|
cloud_runner_logger_1.default.log(`Library cache exists`);
|
||||||
yield run_cli_1.RunCli.RunCli(`
|
yield run_cli_1.RunCli.RunCli(`
|
||||||
unzip -q "${libraryCacheFolder}/${latestLibraryCacheFile}" -d "$projectPathFull"
|
unzip -q "${path_1.default.join(libraryCacheFolder, latestLibraryCacheFile)}" -d "$projectPathFull"
|
||||||
tree "${cloud_runner_state_1.CloudRunnerState.libraryFolderFull}"
|
tree "${cloud_runner_state_1.CloudRunnerState.libraryFolderFull}"
|
||||||
`);
|
`);
|
||||||
}
|
}
|
||||||
cloud_runner_logger_1.default.log(` `);
|
cloud_runner_logger_1.default.log(` `);
|
||||||
cloud_runner_logger_1.default.log(`LFS Caching`);
|
cloud_runner_logger_1.default.log(`LFS Caching`);
|
||||||
cloud_runner_logger_1.default.log(`Checking largest LFS file exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)`);
|
|
||||||
process.chdir(lfsCacheFolder);
|
process.chdir(lfsCacheFolder);
|
||||||
let latestLFSCacheFile;
|
let latestLFSCacheFile;
|
||||||
if (fs_1.default.existsSync(`${LFS_ASSETS_HASH}.zip`)) {
|
if (fs_1.default.existsSync(`${LFS_ASSETS_HASH}.zip`)) {
|
||||||
|
|
@ -2088,7 +2092,7 @@ class DownloadRepository {
|
||||||
cloud_runner_logger_1.default.log(`LFS cache exists`);
|
cloud_runner_logger_1.default.log(`LFS cache exists`);
|
||||||
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.lfsDirectory, { recursive: true });
|
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.lfsDirectory, { recursive: true });
|
||||||
cloud_runner_logger_1.default.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
|
cloud_runner_logger_1.default.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
|
||||||
yield run_cli_1.RunCli.RunCli(`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${cloud_runner_state_1.CloudRunnerState.repoPathFull}/.git"`);
|
yield run_cli_1.RunCli.RunCli(`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.git`)}"`);
|
||||||
yield run_cli_1.RunCli.RunCli(`ls -lh "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}"`);
|
yield run_cli_1.RunCli.RunCli(`ls -lh "${cloud_runner_state_1.CloudRunnerState.lfsDirectory}"`);
|
||||||
cloud_runner_logger_1.default.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
cloud_runner_logger_1.default.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -4,6 +4,7 @@ import { RunCli } from '../run-cli';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||||
import { CloudRunner } from '../../..';
|
import { CloudRunner } from '../../..';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
export class DownloadRepository {
|
export class DownloadRepository {
|
||||||
public static async run() {
|
public static async run() {
|
||||||
|
|
@ -26,7 +27,7 @@ export class DownloadRepository {
|
||||||
git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid
|
git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid
|
||||||
md5sum .lfs-assets-guid > .lfs-assets-guid-sum
|
md5sum .lfs-assets-guid > .lfs-assets-guid-sum
|
||||||
`);
|
`);
|
||||||
const LFS_ASSETS_HASH = fs.readFileSync(`${CloudRunnerState.repoPathFull}/.lfs-assets-guid`, 'utf8');
|
const LFS_ASSETS_HASH = fs.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
||||||
await RunCli.RunCli(`
|
await RunCli.RunCli(`
|
||||||
echo ' '
|
echo ' '
|
||||||
echo 'Contents of .lfs-assets-guid file:'
|
echo 'Contents of .lfs-assets-guid file:'
|
||||||
|
|
@ -39,12 +40,16 @@ export class DownloadRepository {
|
||||||
ls ${CloudRunnerState.projectPathFull}
|
ls ${CloudRunnerState.projectPathFull}
|
||||||
echo ' '
|
echo ' '
|
||||||
`);
|
`);
|
||||||
const lfsCacheFolder = `${CloudRunnerState.cacheFolderFull}/lfs`;
|
const lfsCacheFolder = path.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
||||||
const libraryCacheFolder = `${CloudRunnerState.cacheFolderFull}/lib`;
|
const libraryCacheFolder = path.join(CloudRunnerState.cacheFolderFull, `lib`);
|
||||||
await RunCli.RunCli(`tree ${CloudRunnerState.builderPathFull}`);
|
await RunCli.RunCli(`tree ${CloudRunnerState.builderPathFull}`);
|
||||||
CloudRunnerLogger.log(`Starting checks of cache for the Unity project Library and git LFS files`);
|
CloudRunnerLogger.log(`Starting checks of cache for the Unity project Library and git LFS files`);
|
||||||
fs.mkdirSync(lfsCacheFolder);
|
if (!fs.existsSync(lfsCacheFolder)) {
|
||||||
fs.mkdirSync(libraryCacheFolder);
|
fs.mkdirSync(lfsCacheFolder);
|
||||||
|
}
|
||||||
|
if (!fs.existsSync(libraryCacheFolder)) {
|
||||||
|
fs.mkdirSync(libraryCacheFolder);
|
||||||
|
}
|
||||||
CloudRunnerLogger.log(`Library Caching`);
|
CloudRunnerLogger.log(`Library Caching`);
|
||||||
//if the unity git project has included the library delete it and echo a warning
|
//if the unity git project has included the library delete it and echo a warning
|
||||||
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
||||||
|
|
@ -60,13 +65,12 @@ export class DownloadRepository {
|
||||||
if (fs.existsSync(latestLibraryCacheFile)) {
|
if (fs.existsSync(latestLibraryCacheFile)) {
|
||||||
CloudRunnerLogger.log(`Library cache exists`);
|
CloudRunnerLogger.log(`Library cache exists`);
|
||||||
await RunCli.RunCli(`
|
await RunCli.RunCli(`
|
||||||
unzip -q "${libraryCacheFolder}/${latestLibraryCacheFile}" -d "$projectPathFull"
|
unzip -q "${path.join(libraryCacheFolder, latestLibraryCacheFile)}" -d "$projectPathFull"
|
||||||
tree "${CloudRunnerState.libraryFolderFull}"
|
tree "${CloudRunnerState.libraryFolderFull}"
|
||||||
`);
|
`);
|
||||||
}
|
}
|
||||||
CloudRunnerLogger.log(` `);
|
CloudRunnerLogger.log(` `);
|
||||||
CloudRunnerLogger.log(`LFS Caching`);
|
CloudRunnerLogger.log(`LFS Caching`);
|
||||||
CloudRunnerLogger.log(`Checking largest LFS file exists (${lfsCacheFolder}/${LFS_ASSETS_HASH}.zip)`);
|
|
||||||
process.chdir(lfsCacheFolder);
|
process.chdir(lfsCacheFolder);
|
||||||
let latestLFSCacheFile;
|
let latestLFSCacheFile;
|
||||||
if (fs.existsSync(`${LFS_ASSETS_HASH}.zip`)) {
|
if (fs.existsSync(`${LFS_ASSETS_HASH}.zip`)) {
|
||||||
|
|
@ -80,11 +84,12 @@ export class DownloadRepository {
|
||||||
fs.rmdirSync(CloudRunnerState.lfsDirectory, { recursive: true });
|
fs.rmdirSync(CloudRunnerState.lfsDirectory, { recursive: true });
|
||||||
CloudRunnerLogger.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
|
CloudRunnerLogger.log(`LFS cache exists from build $latestLFSCacheFile from $branch`);
|
||||||
await RunCli.RunCli(
|
await RunCli.RunCli(
|
||||||
`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${CloudRunnerState.repoPathFull}/.git"`,
|
`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${path.join(CloudRunnerState.repoPathFull, `.git`)}"`,
|
||||||
);
|
);
|
||||||
await RunCli.RunCli(`ls -lh "${CloudRunnerState.lfsDirectory}"`);
|
await RunCli.RunCli(`ls -lh "${CloudRunnerState.lfsDirectory}"`);
|
||||||
CloudRunnerLogger.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
CloudRunnerLogger.log(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
||||||
}
|
}
|
||||||
|
|
||||||
await RunCli.RunCli(`
|
await RunCli.RunCli(`
|
||||||
echo ' '
|
echo ' '
|
||||||
echo "LFS cache for $branch"
|
echo "LFS cache for $branch"
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue