Fix lib caching
parent
bb3065f8b5
commit
6037bf9007
|
|
@ -459,41 +459,51 @@ const remote_client_system_1 = __webpack_require__(91269);
|
|||
class Caching {
|
||||
static PushToCache(cacheFolder, destinationFolder, artifactName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.chdir(`${destinationFolder}/..`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`zip -r "${artifactName}.zip" "${path_1.default.dirname(destinationFolder)}"`);
|
||||
console_1.assert(fs_1.default.existsSync(`${artifactName}.zip`));
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`cp "${artifactName}.zip" "${path_1.default.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||
cloud_runner_logger_1.default.logCli(`copied ${artifactName} to ${cacheFolder}`);
|
||||
try {
|
||||
process.chdir(`${destinationFolder}/..`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`zip -r "${artifactName}.zip" "${path_1.default.dirname(destinationFolder)}"`);
|
||||
console_1.assert(fs_1.default.existsSync(`${artifactName}.zip`));
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`cp "${artifactName}.zip" "${path_1.default.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||
cloud_runner_logger_1.default.logCli(`copied ${artifactName} to ${cacheFolder}`);
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
static PullFromCache(cacheFolder, destinationFolder, specificHashMatch = ``) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!fs_1.default.existsSync(cacheFolder)) {
|
||||
fs_1.default.mkdirSync(cacheFolder);
|
||||
}
|
||||
if (!fs_1.default.existsSync(destinationFolder)) {
|
||||
fs_1.default.mkdirSync(destinationFolder);
|
||||
}
|
||||
const latest = yield (yield remote_client_system_1.RemoteClientSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(`\n`, ``);
|
||||
process.chdir(cacheFolder);
|
||||
let cacheSelection;
|
||||
if (specificHashMatch !== ``) {
|
||||
cacheSelection = fs_1.default.existsSync(specificHashMatch) ? specificHashMatch : latest;
|
||||
}
|
||||
else {
|
||||
cacheSelection = latest;
|
||||
}
|
||||
if (fs_1.default.existsSync(cacheSelection)) {
|
||||
cloud_runner_logger_1.default.logCli(`Library cache exists`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}"`);
|
||||
console_1.assert(fs_1.default.existsSync(destinationFolder));
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree ${destinationFolder}`);
|
||||
}
|
||||
else {
|
||||
cloud_runner_logger_1.default.logCli(`Library cache doesn't exist`);
|
||||
if (cacheSelection !== ``) {
|
||||
throw new Error(`Failed to get library cache, but cache hit was found: ${cacheSelection}`);
|
||||
try {
|
||||
if (!fs_1.default.existsSync(cacheFolder)) {
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||
}
|
||||
if (!fs_1.default.existsSync(destinationFolder)) {
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`mkdir -p ${destinationFolder}`);
|
||||
}
|
||||
const latest = yield (yield remote_client_system_1.RemoteClientSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(`\n`, ``);
|
||||
process.chdir(cacheFolder);
|
||||
let cacheSelection;
|
||||
if (specificHashMatch !== ``) {
|
||||
cacheSelection = fs_1.default.existsSync(specificHashMatch) ? specificHashMatch : latest;
|
||||
}
|
||||
else {
|
||||
cacheSelection = latest;
|
||||
}
|
||||
if (fs_1.default.existsSync(cacheSelection)) {
|
||||
cloud_runner_logger_1.default.logCli(`Library cache exists`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}"`);
|
||||
console_1.assert(fs_1.default.existsSync(destinationFolder));
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree ${destinationFolder}`);
|
||||
}
|
||||
else {
|
||||
cloud_runner_logger_1.default.logCli(`Library cache doesn't exist`);
|
||||
if (cacheSelection !== ``) {
|
||||
throw new Error(`Failed to get library cache, but cache hit was found: ${cacheSelection}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
@ -602,9 +612,14 @@ class LFSHashing {
|
|||
}
|
||||
static createLFSHashFiles() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
return fs_1.default.readFileSync(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
||||
try {
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
return fs_1.default.readFileSync(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -690,6 +705,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.SetupRemoteRepository = void 0;
|
||||
const console_1 = __webpack_require__(57082);
|
||||
const fs_1 = __importDefault(__webpack_require__(35747));
|
||||
const path_1 = __importDefault(__webpack_require__(85622));
|
||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||
|
|
@ -704,13 +720,17 @@ class SetupRemoteRepository {
|
|||
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.buildPathFull);
|
||||
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||
yield SetupRemoteRepository.cloneRepoWithoutLFSFiles();
|
||||
yield SetupRemoteRepository.createLFSHashFiles();
|
||||
SetupRemoteRepository.LFS_ASSETS_HASH = yield lfs_hashing_1.LFSHashing.createLFSHashFiles();
|
||||
cloud_runner_logger_1.default.logCli(SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
yield lfs_hashing_1.LFSHashing.printLFSHashState();
|
||||
const lfsCacheFolder = path_1.default.join(cloud_runner_state_1.CloudRunnerState.cacheFolderFull, `lfs`);
|
||||
const libraryCacheFolder = path_1.default.join(cloud_runner_state_1.CloudRunnerState.cacheFolderFull, `lib`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
yield SetupRemoteRepository.libraryCaching(libraryCacheFolder);
|
||||
yield SetupRemoteRepository.lfsCaching(lfsCacheFolder);
|
||||
cloud_runner_logger_1.default.logCli(`Library Caching`);
|
||||
console_1.assert(fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull), `!Warning!: The Unity library was included in the git repository`);
|
||||
yield caching_1.Caching.PullFromCache(libraryCacheFolder, cloud_runner_state_1.CloudRunnerState.libraryFolderFull);
|
||||
cloud_runner_logger_1.default.logCli(`LFS Caching`);
|
||||
yield caching_1.Caching.PullFromCache(lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
yield caching_1.Caching.printCacheState(lfsCacheFolder, libraryCacheFolder);
|
||||
yield SetupRemoteRepository.pullLatestLFS();
|
||||
|
|
@ -725,36 +745,6 @@ class SetupRemoteRepository {
|
|||
}
|
||||
});
|
||||
}
|
||||
static pullLatestLFS() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git lfs pull`);
|
||||
cloud_runner_logger_1.default.logCli(`pulled latest LFS files`);
|
||||
});
|
||||
}
|
||||
static lfsCaching(lfsCacheFolder) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
cloud_runner_logger_1.default.logCli(`LFS Caching`);
|
||||
yield caching_1.Caching.PullFromCache(lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`);
|
||||
});
|
||||
}
|
||||
static libraryCaching(libraryCacheFolder) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
cloud_runner_logger_1.default.logCli(`Library Caching`);
|
||||
//if the unity git project has included the library delete it and echo a warning
|
||||
if (fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull)) {
|
||||
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull, { recursive: true });
|
||||
cloud_runner_logger_1.default.logCli(`!Warning!: The Unity library was included in the git repository`);
|
||||
}
|
||||
yield caching_1.Caching.PullFromCache(libraryCacheFolder, cloud_runner_state_1.CloudRunnerState.libraryFolderFull);
|
||||
});
|
||||
}
|
||||
static createLFSHashFiles() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
SetupRemoteRepository.LFS_ASSETS_HASH = yield lfs_hashing_1.LFSHashing.createLFSHashFiles();
|
||||
cloud_runner_logger_1.default.logCli(SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
});
|
||||
}
|
||||
static cloneRepoWithoutLFSFiles() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
|
|
@ -776,6 +766,13 @@ class SetupRemoteRepository {
|
|||
}
|
||||
});
|
||||
}
|
||||
static pullLatestLFS() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git lfs pull`);
|
||||
cloud_runner_logger_1.default.logCli(`pulled latest LFS files`);
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.SetupRemoteRepository = SetupRemoteRepository;
|
||||
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -7,44 +7,52 @@ import { RemoteClientSystem } from './remote-client-system';
|
|||
|
||||
export class Caching {
|
||||
public static async PushToCache(cacheFolder: string, destinationFolder: string, artifactName: string) {
|
||||
process.chdir(`${destinationFolder}/..`);
|
||||
await RemoteClientSystem.Run(`zip -r "${artifactName}.zip" "${path.dirname(destinationFolder)}"`);
|
||||
assert(fs.existsSync(`${artifactName}.zip`));
|
||||
await RemoteClientSystem.Run(`cp "${artifactName}.zip" "${path.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||
CloudRunnerLogger.logCli(`copied ${artifactName} to ${cacheFolder}`);
|
||||
try {
|
||||
process.chdir(`${destinationFolder}/..`);
|
||||
await RemoteClientSystem.Run(`zip -r "${artifactName}.zip" "${path.dirname(destinationFolder)}"`);
|
||||
assert(fs.existsSync(`${artifactName}.zip`));
|
||||
await RemoteClientSystem.Run(`cp "${artifactName}.zip" "${path.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||
CloudRunnerLogger.logCli(`copied ${artifactName} to ${cacheFolder}`);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
public static async PullFromCache(cacheFolder: string, destinationFolder: string, specificHashMatch: string = ``) {
|
||||
if (!fs.existsSync(cacheFolder)) {
|
||||
fs.mkdirSync(cacheFolder);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(destinationFolder)) {
|
||||
fs.mkdirSync(destinationFolder);
|
||||
}
|
||||
|
||||
const latest = await (await RemoteClientSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(
|
||||
`\n`,
|
||||
``,
|
||||
);
|
||||
|
||||
process.chdir(cacheFolder);
|
||||
let cacheSelection;
|
||||
|
||||
if (specificHashMatch !== ``) {
|
||||
cacheSelection = fs.existsSync(specificHashMatch) ? specificHashMatch : latest;
|
||||
} else {
|
||||
cacheSelection = latest;
|
||||
}
|
||||
if (fs.existsSync(cacheSelection)) {
|
||||
CloudRunnerLogger.logCli(`Library cache exists`);
|
||||
await RemoteClientSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}"`);
|
||||
assert(fs.existsSync(destinationFolder));
|
||||
await RemoteClientSystem.Run(`tree ${destinationFolder}`);
|
||||
} else {
|
||||
CloudRunnerLogger.logCli(`Library cache doesn't exist`);
|
||||
if (cacheSelection !== ``) {
|
||||
throw new Error(`Failed to get library cache, but cache hit was found: ${cacheSelection}`);
|
||||
try {
|
||||
if (!fs.existsSync(cacheFolder)) {
|
||||
await RemoteClientSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(destinationFolder)) {
|
||||
await RemoteClientSystem.Run(`mkdir -p ${destinationFolder}`);
|
||||
}
|
||||
|
||||
const latest = await (await RemoteClientSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(
|
||||
`\n`,
|
||||
``,
|
||||
);
|
||||
|
||||
process.chdir(cacheFolder);
|
||||
let cacheSelection;
|
||||
|
||||
if (specificHashMatch !== ``) {
|
||||
cacheSelection = fs.existsSync(specificHashMatch) ? specificHashMatch : latest;
|
||||
} else {
|
||||
cacheSelection = latest;
|
||||
}
|
||||
if (fs.existsSync(cacheSelection)) {
|
||||
CloudRunnerLogger.logCli(`Library cache exists`);
|
||||
await RemoteClientSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}"`);
|
||||
assert(fs.existsSync(destinationFolder));
|
||||
await RemoteClientSystem.Run(`tree ${destinationFolder}`);
|
||||
} else {
|
||||
CloudRunnerLogger.logCli(`Library cache doesn't exist`);
|
||||
if (cacheSelection !== ``) {
|
||||
throw new Error(`Failed to get library cache, but cache hit was found: ${cacheSelection}`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -20,8 +20,12 @@ export class LFSHashing {
|
|||
}
|
||||
|
||||
public static async createLFSHashFiles() {
|
||||
await RemoteClientSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
await RemoteClientSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
return fs.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
||||
try {
|
||||
await RemoteClientSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
await RemoteClientSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
return fs.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { assert } from 'console';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import CloudRunnerLogger from '../../cloud-runner/services/cloud-runner-logger';
|
||||
|
|
@ -14,13 +15,24 @@ export class SetupRemoteRepository {
|
|||
fs.mkdirSync(CloudRunnerState.repoPathFull);
|
||||
await SetupRemoteRepository.cloneRepoWithoutLFSFiles();
|
||||
|
||||
await SetupRemoteRepository.createLFSHashFiles();
|
||||
SetupRemoteRepository.LFS_ASSETS_HASH = await LFSHashing.createLFSHashFiles();
|
||||
CloudRunnerLogger.logCli(SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
await LFSHashing.printLFSHashState();
|
||||
const lfsCacheFolder = path.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
||||
const libraryCacheFolder = path.join(CloudRunnerState.cacheFolderFull, `lib`);
|
||||
await RemoteClientSystem.Run(`tree ${CloudRunnerState.repoPathFull}`);
|
||||
await SetupRemoteRepository.libraryCaching(libraryCacheFolder);
|
||||
await SetupRemoteRepository.lfsCaching(lfsCacheFolder);
|
||||
CloudRunnerLogger.logCli(`Library Caching`);
|
||||
assert(
|
||||
fs.existsSync(CloudRunnerState.libraryFolderFull),
|
||||
`!Warning!: The Unity library was included in the git repository`,
|
||||
);
|
||||
await Caching.PullFromCache(libraryCacheFolder, CloudRunnerState.libraryFolderFull);
|
||||
CloudRunnerLogger.logCli(`LFS Caching`);
|
||||
await Caching.PullFromCache(
|
||||
lfsCacheFolder,
|
||||
CloudRunnerState.lfsDirectory,
|
||||
`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`,
|
||||
);
|
||||
await RemoteClientSystem.Run(`tree ${CloudRunnerState.repoPathFull}`);
|
||||
await Caching.printCacheState(lfsCacheFolder, libraryCacheFolder);
|
||||
await SetupRemoteRepository.pullLatestLFS();
|
||||
|
|
@ -34,36 +46,6 @@ export class SetupRemoteRepository {
|
|||
}
|
||||
}
|
||||
|
||||
private static async pullLatestLFS() {
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
await RemoteClientSystem.Run(`git lfs pull`);
|
||||
CloudRunnerLogger.logCli(`pulled latest LFS files`);
|
||||
}
|
||||
|
||||
private static async lfsCaching(lfsCacheFolder: string) {
|
||||
CloudRunnerLogger.logCli(`LFS Caching`);
|
||||
await Caching.PullFromCache(
|
||||
lfsCacheFolder,
|
||||
CloudRunnerState.lfsDirectory,
|
||||
`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`,
|
||||
);
|
||||
}
|
||||
|
||||
private static async libraryCaching(libraryCacheFolder: string) {
|
||||
CloudRunnerLogger.logCli(`Library Caching`);
|
||||
//if the unity git project has included the library delete it and echo a warning
|
||||
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
||||
fs.rmdirSync(CloudRunnerState.libraryFolderFull, { recursive: true });
|
||||
CloudRunnerLogger.logCli(`!Warning!: The Unity library was included in the git repository`);
|
||||
}
|
||||
await Caching.PullFromCache(libraryCacheFolder, CloudRunnerState.libraryFolderFull);
|
||||
}
|
||||
|
||||
private static async createLFSHashFiles() {
|
||||
SetupRemoteRepository.LFS_ASSETS_HASH = await LFSHashing.createLFSHashFiles();
|
||||
CloudRunnerLogger.logCli(SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
}
|
||||
|
||||
private static async cloneRepoWithoutLFSFiles() {
|
||||
try {
|
||||
CloudRunnerLogger.logCli(`Initializing source repository for cloning with caching of LFS files`);
|
||||
|
|
@ -82,4 +64,10 @@ export class SetupRemoteRepository {
|
|||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async pullLatestLFS() {
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
await RemoteClientSystem.Run(`git lfs pull`);
|
||||
CloudRunnerLogger.logCli(`pulled latest LFS files`);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue