fix
parent
9193b8fd15
commit
5d4ffed37e
|
|
@ -469,22 +469,36 @@ const cloud_runner_state_1 = __webpack_require__(70912);
|
|||
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
||||
const remote_client_logger_1 = __webpack_require__(68972);
|
||||
class Caching {
|
||||
static PushToCache(cacheFolder, destinationFolder, artifactName) {
|
||||
static PushToCache(cacheFolder, sourceFolder, cacheKey) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
process.chdir(`${destinationFolder}/..`);
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`zip -r "${artifactName}.zip" "${path_1.default.dirname(destinationFolder)}"`);
|
||||
console_1.assert(fs_1.default.existsSync(`${artifactName}.zip`));
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`cp "${artifactName}.zip" "${path_1.default.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||
remote_client_logger_1.RemoteClientLogger.log(`copied ${artifactName} to ${cacheFolder}`);
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
yield Caching.printFullCacheHierarchySize();
|
||||
}
|
||||
process.chdir(`${sourceFolder}/..`);
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${sourceFolder}`);
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||
}
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`zip -r "${cacheKey}.zip" "${path_1.default.dirname(sourceFolder)}"`);
|
||||
console_1.assert(fs_1.default.existsSync(`${cacheKey}.zip`));
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`cp "${cacheKey}.zip" "${path_1.default.join(cacheFolder, `${cacheKey}.zip`)}"`);
|
||||
remote_client_logger_1.RemoteClientLogger.log(`copied ${cacheKey} to ${cacheFolder}`);
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||
}
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
yield Caching.printFullCacheHierarchySize();
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
static PullFromCache(cacheFolder, destinationFolder, specificHashMatch = ``) {
|
||||
static PullFromCache(cacheFolder, destinationFolder, cacheKey = ``) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
remote_client_logger_1.RemoteClientLogger.log(`Caching for ${path_1.default.dirname(destinationFolder)}`);
|
||||
try {
|
||||
if (!fs_1.default.existsSync(cacheFolder)) {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||
|
|
@ -495,8 +509,11 @@ class Caching {
|
|||
const latest = yield (yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(/\n/g, ``);
|
||||
process.chdir(cacheFolder);
|
||||
let cacheSelection;
|
||||
if (specificHashMatch !== ``) {
|
||||
cacheSelection = fs_1.default.existsSync(specificHashMatch) ? specificHashMatch : latest;
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||
}
|
||||
if (cacheKey !== ``) {
|
||||
cacheSelection = fs_1.default.existsSync(cacheKey) ? cacheKey : latest;
|
||||
}
|
||||
else {
|
||||
cacheSelection = latest;
|
||||
|
|
@ -508,9 +525,12 @@ class Caching {
|
|||
remote_client_logger_1.RemoteClientLogger.log(`cache item exists`);
|
||||
console_1.assert(fs_1.default.existsSync(destinationFolder));
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}/.."`);
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${destinationFolder}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
remote_client_logger_1.RemoteClientLogger.logWarning(`cache item ${specificHashMatch} doesn't exist ${destinationFolder}`);
|
||||
remote_client_logger_1.RemoteClientLogger.logWarning(`cache item ${cacheKey} doesn't exist ${destinationFolder}`);
|
||||
if (cacheSelection !== ``) {
|
||||
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
||||
}
|
||||
|
|
@ -527,20 +547,20 @@ class Caching {
|
|||
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.cacheFolder, { recursive: true });
|
||||
}
|
||||
}
|
||||
static printCacheState(lfsCacheFolder, libraryCacheFolder) {
|
||||
static printFullCacheHierarchySize() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`echo ' '
|
||||
echo "LFS cache for $branch"
|
||||
du -sch "${lfsCacheFolder}/"
|
||||
du -sch "${cloud_runner_state_1.CloudRunnerState.lfsCacheFolderFull}/"
|
||||
echo '**'
|
||||
echo "Library cache for $branch"
|
||||
du -sch "${libraryCacheFolder}/"
|
||||
du -sch "${cloud_runner_state_1.CloudRunnerState.libraryCacheFolderFull}/"
|
||||
echo '**'
|
||||
echo "Branch: $branch"
|
||||
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
|
||||
echo '**'
|
||||
echo 'Full cache'
|
||||
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
|
||||
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/.."
|
||||
echo ' '`);
|
||||
});
|
||||
}
|
||||
|
|
@ -665,27 +685,26 @@ const path_1 = __importDefault(__webpack_require__(85622));
|
|||
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
||||
const fs_1 = __importDefault(__webpack_require__(35747));
|
||||
const console_1 = __webpack_require__(57082);
|
||||
const __1 = __webpack_require__(41359);
|
||||
const remote_client_logger_1 = __webpack_require__(68972);
|
||||
class LFSHashing {
|
||||
static printLFSHashState() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`echo ' '
|
||||
echo 'Contents of .lfs-assets-guid file:'
|
||||
cat .lfs-assets-guid
|
||||
echo ' '
|
||||
echo 'Contents of .lfs-assets-guid-sum file:'
|
||||
cat .lfs-assets-guid-sum
|
||||
echo ' '
|
||||
echo 'Source repository initialized'
|
||||
ls ${cloud_runner_state_1.CloudRunnerState.projectPathFull}
|
||||
echo ' '`);
|
||||
});
|
||||
}
|
||||
static createLFSHashFiles() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
return fs_1.default.readFileSync(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
||||
console_1.assert(fs_1.default.existsSync(`.lfs-assets-guid-sum`));
|
||||
console_1.assert(fs_1.default.existsSync(`.lfs-assets-guid`));
|
||||
const lfsHashes = {
|
||||
lfsGuid: fs_1.default.readFileSync(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8'),
|
||||
lfsGuidSum: fs_1.default.readFileSync(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.lfs-assets-guid-sum`)}`, 'utf8'),
|
||||
};
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
remote_client_logger_1.RemoteClientLogger.log(lfsHashes.lfsGuid);
|
||||
remote_client_logger_1.RemoteClientLogger.log(lfsHashes.lfsGuidSum);
|
||||
}
|
||||
return lfsHashes;
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
|
|
@ -752,7 +771,6 @@ const cloud_runner_state_1 = __webpack_require__(70912);
|
|||
const caching_1 = __webpack_require__(35010);
|
||||
const lfs_hashing_1 = __webpack_require__(47011);
|
||||
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
||||
const path_1 = __importDefault(__webpack_require__(85622));
|
||||
const __1 = __webpack_require__(41359);
|
||||
const remote_client_logger_1 = __webpack_require__(68972);
|
||||
class SetupCloudRunnerRepository {
|
||||
|
|
@ -762,33 +780,14 @@ class SetupCloudRunnerRepository {
|
|||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cloud_runner_state_1.CloudRunnerState.buildPathFull}`);
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
yield SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
||||
SetupCloudRunnerRepository.LFS_ASSETS_HASH = yield lfs_hashing_1.LFSHashing.createLFSHashFiles();
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
remote_client_logger_1.RemoteClientLogger.log(SetupCloudRunnerRepository.LFS_ASSETS_HASH);
|
||||
}
|
||||
yield lfs_hashing_1.LFSHashing.printLFSHashState();
|
||||
remote_client_logger_1.RemoteClientLogger.log(`Library Caching`);
|
||||
const lfsHashes = yield lfs_hashing_1.LFSHashing.createLFSHashFiles();
|
||||
if (!fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull)) {
|
||||
remote_client_logger_1.RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
||||
}
|
||||
remote_client_logger_1.RemoteClientLogger.log(`LFS Caching`);
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${path_1.default.join(cloud_runner_state_1.CloudRunnerState.lfsDirectory, '..')}`);
|
||||
}
|
||||
yield caching_1.Caching.PullFromCache(cloud_runner_state_1.CloudRunnerState.lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, `${SetupCloudRunnerRepository.LFS_ASSETS_HASH}.zip`);
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${path_1.default.join(cloud_runner_state_1.CloudRunnerState.lfsDirectory, '..')}`);
|
||||
}
|
||||
yield caching_1.Caching.printCacheState(cloud_runner_state_1.CloudRunnerState.lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.libraryCacheFolder);
|
||||
yield caching_1.Caching.PullFromCache(cloud_runner_state_1.CloudRunnerState.lfsCacheFolderFull, cloud_runner_state_1.CloudRunnerState.lfsDirectory, `${lfsHashes.lfsGuid}.zip`);
|
||||
yield SetupCloudRunnerRepository.pullLatestLFS();
|
||||
yield caching_1.Caching.PushToCache(cloud_runner_state_1.CloudRunnerState.lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, SetupCloudRunnerRepository.LFS_ASSETS_HASH);
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${path_1.default.join(cloud_runner_state_1.CloudRunnerState.libraryCacheFolder, '..')}`);
|
||||
}
|
||||
yield caching_1.Caching.PullFromCache(cloud_runner_state_1.CloudRunnerState.libraryCacheFolder, cloud_runner_state_1.CloudRunnerState.libraryFolderFull);
|
||||
if (__1.Input.cloudRunnerTests) {
|
||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${path_1.default.join(cloud_runner_state_1.CloudRunnerState.libraryCacheFolder, '..')}`);
|
||||
}
|
||||
yield caching_1.Caching.PushToCache(cloud_runner_state_1.CloudRunnerState.lfsCacheFolderFull, cloud_runner_state_1.CloudRunnerState.lfsDirectory, lfsHashes.lfsGuid);
|
||||
yield caching_1.Caching.PullFromCache(cloud_runner_state_1.CloudRunnerState.libraryCacheFolderFull, cloud_runner_state_1.CloudRunnerState.libraryFolderFull);
|
||||
caching_1.Caching.handleCachePurging();
|
||||
}
|
||||
catch (error) {
|
||||
|
|
@ -2701,10 +2700,10 @@ class CloudRunnerState {
|
|||
static get purgeRemoteCaching() {
|
||||
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
||||
}
|
||||
static get lfsCacheFolder() {
|
||||
static get lfsCacheFolderFull() {
|
||||
return path_1.default.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
||||
}
|
||||
static get libraryCacheFolder() {
|
||||
static get libraryCacheFolderFull() {
|
||||
return path_1.default.join(CloudRunnerState.cacheFolderFull, `lib`);
|
||||
}
|
||||
static get unityBuilderRepoUrl() {
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -7,18 +7,34 @@ import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
|||
import { RemoteClientLogger } from './remote-client-logger';
|
||||
|
||||
export class Caching {
|
||||
public static async PushToCache(cacheFolder: string, destinationFolder: string, artifactName: string) {
|
||||
public static async PushToCache(cacheFolder: string, sourceFolder: string, cacheKey: string) {
|
||||
try {
|
||||
process.chdir(`${destinationFolder}/..`);
|
||||
await CloudRunnerAgentSystem.Run(`zip -r "${artifactName}.zip" "${path.dirname(destinationFolder)}"`);
|
||||
assert(fs.existsSync(`${artifactName}.zip`));
|
||||
await CloudRunnerAgentSystem.Run(`cp "${artifactName}.zip" "${path.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||
RemoteClientLogger.log(`copied ${artifactName} to ${cacheFolder}`);
|
||||
if (Input.cloudRunnerTests) {
|
||||
await Caching.printFullCacheHierarchySize();
|
||||
}
|
||||
process.chdir(`${sourceFolder}/..`);
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerAgentSystem.Run(`tree ${sourceFolder}`);
|
||||
await CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||
}
|
||||
await CloudRunnerAgentSystem.Run(`zip -r "${cacheKey}.zip" "${path.dirname(sourceFolder)}"`);
|
||||
assert(fs.existsSync(`${cacheKey}.zip`));
|
||||
await CloudRunnerAgentSystem.Run(`cp "${cacheKey}.zip" "${path.join(cacheFolder, `${cacheKey}.zip`)}"`);
|
||||
RemoteClientLogger.log(`copied ${cacheKey} to ${cacheFolder}`);
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||
}
|
||||
if (Input.cloudRunnerTests) {
|
||||
await Caching.printFullCacheHierarchySize();
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
public static async PullFromCache(cacheFolder: string, destinationFolder: string, specificHashMatch: string = ``) {
|
||||
public static async PullFromCache(cacheFolder: string, destinationFolder: string, cacheKey: string = ``) {
|
||||
RemoteClientLogger.log(`Caching for ${path.dirname(destinationFolder)}`);
|
||||
try {
|
||||
if (!fs.existsSync(cacheFolder)) {
|
||||
await CloudRunnerAgentSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||
|
|
@ -36,8 +52,12 @@ export class Caching {
|
|||
process.chdir(cacheFolder);
|
||||
let cacheSelection;
|
||||
|
||||
if (specificHashMatch !== ``) {
|
||||
cacheSelection = fs.existsSync(specificHashMatch) ? specificHashMatch : latest;
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||
}
|
||||
|
||||
if (cacheKey !== ``) {
|
||||
cacheSelection = fs.existsSync(cacheKey) ? cacheKey : latest;
|
||||
} else {
|
||||
cacheSelection = latest;
|
||||
}
|
||||
|
|
@ -48,8 +68,11 @@ export class Caching {
|
|||
RemoteClientLogger.log(`cache item exists`);
|
||||
assert(fs.existsSync(destinationFolder));
|
||||
await CloudRunnerAgentSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}/.."`);
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerAgentSystem.Run(`tree ${destinationFolder}`);
|
||||
}
|
||||
} else {
|
||||
RemoteClientLogger.logWarning(`cache item ${specificHashMatch} doesn't exist ${destinationFolder}`);
|
||||
RemoteClientLogger.logWarning(`cache item ${cacheKey} doesn't exist ${destinationFolder}`);
|
||||
if (cacheSelection !== ``) {
|
||||
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
||||
}
|
||||
|
|
@ -66,20 +89,20 @@ export class Caching {
|
|||
}
|
||||
}
|
||||
|
||||
public static async printCacheState(lfsCacheFolder: string, libraryCacheFolder: string) {
|
||||
public static async printFullCacheHierarchySize() {
|
||||
await CloudRunnerAgentSystem.Run(
|
||||
`echo ' '
|
||||
echo "LFS cache for $branch"
|
||||
du -sch "${lfsCacheFolder}/"
|
||||
du -sch "${CloudRunnerState.lfsCacheFolderFull}/"
|
||||
echo '**'
|
||||
echo "Library cache for $branch"
|
||||
du -sch "${libraryCacheFolder}/"
|
||||
du -sch "${CloudRunnerState.libraryCacheFolderFull}/"
|
||||
echo '**'
|
||||
echo "Branch: $branch"
|
||||
du -sch "${CloudRunnerState.cacheFolderFull}/"
|
||||
echo '**'
|
||||
echo 'Full cache'
|
||||
du -sch "${CloudRunnerState.cacheFolderFull}/"
|
||||
du -sch "${CloudRunnerState.cacheFolderFull}/.."
|
||||
echo ' '`,
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,28 +2,26 @@ import path from 'path';
|
|||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||
import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
||||
import fs from 'fs';
|
||||
import { assert } from 'console';
|
||||
import { Input } from '../..';
|
||||
import { RemoteClientLogger } from './remote-client-logger';
|
||||
|
||||
export class LFSHashing {
|
||||
public static async printLFSHashState() {
|
||||
await CloudRunnerAgentSystem.Run(
|
||||
`echo ' '
|
||||
echo 'Contents of .lfs-assets-guid file:'
|
||||
cat .lfs-assets-guid
|
||||
echo ' '
|
||||
echo 'Contents of .lfs-assets-guid-sum file:'
|
||||
cat .lfs-assets-guid-sum
|
||||
echo ' '
|
||||
echo 'Source repository initialized'
|
||||
ls ${CloudRunnerState.projectPathFull}
|
||||
echo ' '`,
|
||||
);
|
||||
}
|
||||
|
||||
public static async createLFSHashFiles() {
|
||||
try {
|
||||
await CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
await CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
return fs.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
||||
assert(fs.existsSync(`.lfs-assets-guid-sum`));
|
||||
assert(fs.existsSync(`.lfs-assets-guid`));
|
||||
const lfsHashes = {
|
||||
lfsGuid: fs.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8'),
|
||||
lfsGuidSum: fs.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid-sum`)}`, 'utf8'),
|
||||
};
|
||||
if (Input.cloudRunnerTests) {
|
||||
RemoteClientLogger.log(lfsHashes.lfsGuid);
|
||||
RemoteClientLogger.log(lfsHashes.lfsGuidSum);
|
||||
}
|
||||
return lfsHashes;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
|||
import { Caching } from './caching';
|
||||
import { LFSHashing } from './lfs-hashing';
|
||||
import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
||||
import path from 'path';
|
||||
import { Input } from '../..';
|
||||
import { RemoteClientLogger } from './remote-client-logger';
|
||||
|
||||
|
|
@ -14,46 +13,18 @@ export class SetupCloudRunnerRepository {
|
|||
await CloudRunnerAgentSystem.Run(`mkdir -p ${CloudRunnerState.buildPathFull}`);
|
||||
await CloudRunnerAgentSystem.Run(`mkdir -p ${CloudRunnerState.repoPathFull}`);
|
||||
await SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
||||
|
||||
SetupCloudRunnerRepository.LFS_ASSETS_HASH = await LFSHashing.createLFSHashFiles();
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
RemoteClientLogger.log(SetupCloudRunnerRepository.LFS_ASSETS_HASH);
|
||||
}
|
||||
await LFSHashing.printLFSHashState();
|
||||
RemoteClientLogger.log(`Library Caching`);
|
||||
const lfsHashes = await LFSHashing.createLFSHashFiles();
|
||||
if (!fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
||||
RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
||||
}
|
||||
RemoteClientLogger.log(`LFS Caching`);
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerAgentSystem.Run(`tree ${path.join(CloudRunnerState.lfsDirectory, '..')}`);
|
||||
}
|
||||
await Caching.PullFromCache(
|
||||
CloudRunnerState.lfsCacheFolder,
|
||||
CloudRunnerState.lfsCacheFolderFull,
|
||||
CloudRunnerState.lfsDirectory,
|
||||
`${SetupCloudRunnerRepository.LFS_ASSETS_HASH}.zip`,
|
||||
`${lfsHashes.lfsGuid}.zip`,
|
||||
);
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerAgentSystem.Run(`tree ${path.join(CloudRunnerState.lfsDirectory, '..')}`);
|
||||
}
|
||||
await Caching.printCacheState(CloudRunnerState.lfsCacheFolder, CloudRunnerState.libraryCacheFolder);
|
||||
await SetupCloudRunnerRepository.pullLatestLFS();
|
||||
await Caching.PushToCache(
|
||||
CloudRunnerState.lfsCacheFolder,
|
||||
CloudRunnerState.lfsDirectory,
|
||||
SetupCloudRunnerRepository.LFS_ASSETS_HASH,
|
||||
);
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerAgentSystem.Run(`tree ${path.join(CloudRunnerState.libraryCacheFolder, '..')}`);
|
||||
}
|
||||
await Caching.PullFromCache(CloudRunnerState.libraryCacheFolder, CloudRunnerState.libraryFolderFull);
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerAgentSystem.Run(`tree ${path.join(CloudRunnerState.libraryCacheFolder, '..')}`);
|
||||
}
|
||||
await Caching.PushToCache(CloudRunnerState.lfsCacheFolderFull, CloudRunnerState.lfsDirectory, lfsHashes.lfsGuid);
|
||||
await Caching.PullFromCache(CloudRunnerState.libraryCacheFolderFull, CloudRunnerState.libraryFolderFull);
|
||||
|
||||
Caching.handleCachePurging();
|
||||
} catch (error) {
|
||||
|
|
|
|||
|
|
@ -55,11 +55,11 @@ export class CloudRunnerState {
|
|||
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
||||
}
|
||||
|
||||
public static get lfsCacheFolder() {
|
||||
public static get lfsCacheFolderFull() {
|
||||
return path.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
||||
}
|
||||
|
||||
public static get libraryCacheFolder() {
|
||||
public static get libraryCacheFolderFull() {
|
||||
return path.join(CloudRunnerState.cacheFolderFull, `lib`);
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue