fix
parent
9193b8fd15
commit
5d4ffed37e
|
|
@ -469,22 +469,36 @@ const cloud_runner_state_1 = __webpack_require__(70912);
|
||||||
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
||||||
const remote_client_logger_1 = __webpack_require__(68972);
|
const remote_client_logger_1 = __webpack_require__(68972);
|
||||||
class Caching {
|
class Caching {
|
||||||
static PushToCache(cacheFolder, destinationFolder, artifactName) {
|
static PushToCache(cacheFolder, sourceFolder, cacheKey) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
process.chdir(`${destinationFolder}/..`);
|
if (__1.Input.cloudRunnerTests) {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`zip -r "${artifactName}.zip" "${path_1.default.dirname(destinationFolder)}"`);
|
yield Caching.printFullCacheHierarchySize();
|
||||||
console_1.assert(fs_1.default.existsSync(`${artifactName}.zip`));
|
}
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`cp "${artifactName}.zip" "${path_1.default.join(cacheFolder, `${artifactName}.zip`)}"`);
|
process.chdir(`${sourceFolder}/..`);
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`copied ${artifactName} to ${cacheFolder}`);
|
if (__1.Input.cloudRunnerTests) {
|
||||||
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${sourceFolder}`);
|
||||||
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||||
|
}
|
||||||
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`zip -r "${cacheKey}.zip" "${path_1.default.dirname(sourceFolder)}"`);
|
||||||
|
console_1.assert(fs_1.default.existsSync(`${cacheKey}.zip`));
|
||||||
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`cp "${cacheKey}.zip" "${path_1.default.join(cacheFolder, `${cacheKey}.zip`)}"`);
|
||||||
|
remote_client_logger_1.RemoteClientLogger.log(`copied ${cacheKey} to ${cacheFolder}`);
|
||||||
|
if (__1.Input.cloudRunnerTests) {
|
||||||
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||||
|
}
|
||||||
|
if (__1.Input.cloudRunnerTests) {
|
||||||
|
yield Caching.printFullCacheHierarchySize();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
static PullFromCache(cacheFolder, destinationFolder, specificHashMatch = ``) {
|
static PullFromCache(cacheFolder, destinationFolder, cacheKey = ``) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
remote_client_logger_1.RemoteClientLogger.log(`Caching for ${path_1.default.dirname(destinationFolder)}`);
|
||||||
try {
|
try {
|
||||||
if (!fs_1.default.existsSync(cacheFolder)) {
|
if (!fs_1.default.existsSync(cacheFolder)) {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cacheFolder}`);
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||||
|
|
@ -495,8 +509,11 @@ class Caching {
|
||||||
const latest = yield (yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(/\n/g, ``);
|
const latest = yield (yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(/\n/g, ``);
|
||||||
process.chdir(cacheFolder);
|
process.chdir(cacheFolder);
|
||||||
let cacheSelection;
|
let cacheSelection;
|
||||||
if (specificHashMatch !== ``) {
|
if (__1.Input.cloudRunnerTests) {
|
||||||
cacheSelection = fs_1.default.existsSync(specificHashMatch) ? specificHashMatch : latest;
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||||
|
}
|
||||||
|
if (cacheKey !== ``) {
|
||||||
|
cacheSelection = fs_1.default.existsSync(cacheKey) ? cacheKey : latest;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
cacheSelection = latest;
|
cacheSelection = latest;
|
||||||
|
|
@ -508,9 +525,12 @@ class Caching {
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`cache item exists`);
|
remote_client_logger_1.RemoteClientLogger.log(`cache item exists`);
|
||||||
console_1.assert(fs_1.default.existsSync(destinationFolder));
|
console_1.assert(fs_1.default.existsSync(destinationFolder));
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}/.."`);
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}/.."`);
|
||||||
|
if (__1.Input.cloudRunnerTests) {
|
||||||
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${destinationFolder}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
remote_client_logger_1.RemoteClientLogger.logWarning(`cache item ${specificHashMatch} doesn't exist ${destinationFolder}`);
|
remote_client_logger_1.RemoteClientLogger.logWarning(`cache item ${cacheKey} doesn't exist ${destinationFolder}`);
|
||||||
if (cacheSelection !== ``) {
|
if (cacheSelection !== ``) {
|
||||||
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
||||||
}
|
}
|
||||||
|
|
@ -527,20 +547,20 @@ class Caching {
|
||||||
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.cacheFolder, { recursive: true });
|
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.cacheFolder, { recursive: true });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
static printCacheState(lfsCacheFolder, libraryCacheFolder) {
|
static printFullCacheHierarchySize() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`echo ' '
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`echo ' '
|
||||||
echo "LFS cache for $branch"
|
echo "LFS cache for $branch"
|
||||||
du -sch "${lfsCacheFolder}/"
|
du -sch "${cloud_runner_state_1.CloudRunnerState.lfsCacheFolderFull}/"
|
||||||
echo '**'
|
echo '**'
|
||||||
echo "Library cache for $branch"
|
echo "Library cache for $branch"
|
||||||
du -sch "${libraryCacheFolder}/"
|
du -sch "${cloud_runner_state_1.CloudRunnerState.libraryCacheFolderFull}/"
|
||||||
echo '**'
|
echo '**'
|
||||||
echo "Branch: $branch"
|
echo "Branch: $branch"
|
||||||
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
|
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
|
||||||
echo '**'
|
echo '**'
|
||||||
echo 'Full cache'
|
echo 'Full cache'
|
||||||
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/"
|
du -sch "${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}/.."
|
||||||
echo ' '`);
|
echo ' '`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -665,27 +685,26 @@ const path_1 = __importDefault(__webpack_require__(85622));
|
||||||
const cloud_runner_state_1 = __webpack_require__(70912);
|
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||||
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
||||||
const fs_1 = __importDefault(__webpack_require__(35747));
|
const fs_1 = __importDefault(__webpack_require__(35747));
|
||||||
|
const console_1 = __webpack_require__(57082);
|
||||||
|
const __1 = __webpack_require__(41359);
|
||||||
|
const remote_client_logger_1 = __webpack_require__(68972);
|
||||||
class LFSHashing {
|
class LFSHashing {
|
||||||
static printLFSHashState() {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`echo ' '
|
|
||||||
echo 'Contents of .lfs-assets-guid file:'
|
|
||||||
cat .lfs-assets-guid
|
|
||||||
echo ' '
|
|
||||||
echo 'Contents of .lfs-assets-guid-sum file:'
|
|
||||||
cat .lfs-assets-guid-sum
|
|
||||||
echo ' '
|
|
||||||
echo 'Source repository initialized'
|
|
||||||
ls ${cloud_runner_state_1.CloudRunnerState.projectPathFull}
|
|
||||||
echo ' '`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
static createLFSHashFiles() {
|
static createLFSHashFiles() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||||
return fs_1.default.readFileSync(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
console_1.assert(fs_1.default.existsSync(`.lfs-assets-guid-sum`));
|
||||||
|
console_1.assert(fs_1.default.existsSync(`.lfs-assets-guid`));
|
||||||
|
const lfsHashes = {
|
||||||
|
lfsGuid: fs_1.default.readFileSync(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8'),
|
||||||
|
lfsGuidSum: fs_1.default.readFileSync(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.lfs-assets-guid-sum`)}`, 'utf8'),
|
||||||
|
};
|
||||||
|
if (__1.Input.cloudRunnerTests) {
|
||||||
|
remote_client_logger_1.RemoteClientLogger.log(lfsHashes.lfsGuid);
|
||||||
|
remote_client_logger_1.RemoteClientLogger.log(lfsHashes.lfsGuidSum);
|
||||||
|
}
|
||||||
|
return lfsHashes;
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
|
|
@ -752,7 +771,6 @@ const cloud_runner_state_1 = __webpack_require__(70912);
|
||||||
const caching_1 = __webpack_require__(35010);
|
const caching_1 = __webpack_require__(35010);
|
||||||
const lfs_hashing_1 = __webpack_require__(47011);
|
const lfs_hashing_1 = __webpack_require__(47011);
|
||||||
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
||||||
const path_1 = __importDefault(__webpack_require__(85622));
|
|
||||||
const __1 = __webpack_require__(41359);
|
const __1 = __webpack_require__(41359);
|
||||||
const remote_client_logger_1 = __webpack_require__(68972);
|
const remote_client_logger_1 = __webpack_require__(68972);
|
||||||
class SetupCloudRunnerRepository {
|
class SetupCloudRunnerRepository {
|
||||||
|
|
@ -762,33 +780,14 @@ class SetupCloudRunnerRepository {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cloud_runner_state_1.CloudRunnerState.buildPathFull}`);
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cloud_runner_state_1.CloudRunnerState.buildPathFull}`);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||||
yield SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
yield SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
||||||
SetupCloudRunnerRepository.LFS_ASSETS_HASH = yield lfs_hashing_1.LFSHashing.createLFSHashFiles();
|
const lfsHashes = yield lfs_hashing_1.LFSHashing.createLFSHashFiles();
|
||||||
if (__1.Input.cloudRunnerTests) {
|
|
||||||
remote_client_logger_1.RemoteClientLogger.log(SetupCloudRunnerRepository.LFS_ASSETS_HASH);
|
|
||||||
}
|
|
||||||
yield lfs_hashing_1.LFSHashing.printLFSHashState();
|
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`Library Caching`);
|
|
||||||
if (!fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull)) {
|
if (!fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull)) {
|
||||||
remote_client_logger_1.RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
remote_client_logger_1.RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
||||||
}
|
}
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`LFS Caching`);
|
yield caching_1.Caching.PullFromCache(cloud_runner_state_1.CloudRunnerState.lfsCacheFolderFull, cloud_runner_state_1.CloudRunnerState.lfsDirectory, `${lfsHashes.lfsGuid}.zip`);
|
||||||
if (__1.Input.cloudRunnerTests) {
|
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${path_1.default.join(cloud_runner_state_1.CloudRunnerState.lfsDirectory, '..')}`);
|
|
||||||
}
|
|
||||||
yield caching_1.Caching.PullFromCache(cloud_runner_state_1.CloudRunnerState.lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, `${SetupCloudRunnerRepository.LFS_ASSETS_HASH}.zip`);
|
|
||||||
if (__1.Input.cloudRunnerTests) {
|
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${path_1.default.join(cloud_runner_state_1.CloudRunnerState.lfsDirectory, '..')}`);
|
|
||||||
}
|
|
||||||
yield caching_1.Caching.printCacheState(cloud_runner_state_1.CloudRunnerState.lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.libraryCacheFolder);
|
|
||||||
yield SetupCloudRunnerRepository.pullLatestLFS();
|
yield SetupCloudRunnerRepository.pullLatestLFS();
|
||||||
yield caching_1.Caching.PushToCache(cloud_runner_state_1.CloudRunnerState.lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, SetupCloudRunnerRepository.LFS_ASSETS_HASH);
|
yield caching_1.Caching.PushToCache(cloud_runner_state_1.CloudRunnerState.lfsCacheFolderFull, cloud_runner_state_1.CloudRunnerState.lfsDirectory, lfsHashes.lfsGuid);
|
||||||
if (__1.Input.cloudRunnerTests) {
|
yield caching_1.Caching.PullFromCache(cloud_runner_state_1.CloudRunnerState.libraryCacheFolderFull, cloud_runner_state_1.CloudRunnerState.libraryFolderFull);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${path_1.default.join(cloud_runner_state_1.CloudRunnerState.libraryCacheFolder, '..')}`);
|
|
||||||
}
|
|
||||||
yield caching_1.Caching.PullFromCache(cloud_runner_state_1.CloudRunnerState.libraryCacheFolder, cloud_runner_state_1.CloudRunnerState.libraryFolderFull);
|
|
||||||
if (__1.Input.cloudRunnerTests) {
|
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${path_1.default.join(cloud_runner_state_1.CloudRunnerState.libraryCacheFolder, '..')}`);
|
|
||||||
}
|
|
||||||
caching_1.Caching.handleCachePurging();
|
caching_1.Caching.handleCachePurging();
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
|
|
@ -2701,10 +2700,10 @@ class CloudRunnerState {
|
||||||
static get purgeRemoteCaching() {
|
static get purgeRemoteCaching() {
|
||||||
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
||||||
}
|
}
|
||||||
static get lfsCacheFolder() {
|
static get lfsCacheFolderFull() {
|
||||||
return path_1.default.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
return path_1.default.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
||||||
}
|
}
|
||||||
static get libraryCacheFolder() {
|
static get libraryCacheFolderFull() {
|
||||||
return path_1.default.join(CloudRunnerState.cacheFolderFull, `lib`);
|
return path_1.default.join(CloudRunnerState.cacheFolderFull, `lib`);
|
||||||
}
|
}
|
||||||
static get unityBuilderRepoUrl() {
|
static get unityBuilderRepoUrl() {
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -7,18 +7,34 @@ import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
||||||
import { RemoteClientLogger } from './remote-client-logger';
|
import { RemoteClientLogger } from './remote-client-logger';
|
||||||
|
|
||||||
export class Caching {
|
export class Caching {
|
||||||
public static async PushToCache(cacheFolder: string, destinationFolder: string, artifactName: string) {
|
public static async PushToCache(cacheFolder: string, sourceFolder: string, cacheKey: string) {
|
||||||
try {
|
try {
|
||||||
process.chdir(`${destinationFolder}/..`);
|
if (Input.cloudRunnerTests) {
|
||||||
await CloudRunnerAgentSystem.Run(`zip -r "${artifactName}.zip" "${path.dirname(destinationFolder)}"`);
|
await Caching.printFullCacheHierarchySize();
|
||||||
assert(fs.existsSync(`${artifactName}.zip`));
|
}
|
||||||
await CloudRunnerAgentSystem.Run(`cp "${artifactName}.zip" "${path.join(cacheFolder, `${artifactName}.zip`)}"`);
|
process.chdir(`${sourceFolder}/..`);
|
||||||
RemoteClientLogger.log(`copied ${artifactName} to ${cacheFolder}`);
|
|
||||||
|
if (Input.cloudRunnerTests) {
|
||||||
|
await CloudRunnerAgentSystem.Run(`tree ${sourceFolder}`);
|
||||||
|
await CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||||
|
}
|
||||||
|
await CloudRunnerAgentSystem.Run(`zip -r "${cacheKey}.zip" "${path.dirname(sourceFolder)}"`);
|
||||||
|
assert(fs.existsSync(`${cacheKey}.zip`));
|
||||||
|
await CloudRunnerAgentSystem.Run(`cp "${cacheKey}.zip" "${path.join(cacheFolder, `${cacheKey}.zip`)}"`);
|
||||||
|
RemoteClientLogger.log(`copied ${cacheKey} to ${cacheFolder}`);
|
||||||
|
|
||||||
|
if (Input.cloudRunnerTests) {
|
||||||
|
await CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||||
|
}
|
||||||
|
if (Input.cloudRunnerTests) {
|
||||||
|
await Caching.printFullCacheHierarchySize();
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
public static async PullFromCache(cacheFolder: string, destinationFolder: string, specificHashMatch: string = ``) {
|
public static async PullFromCache(cacheFolder: string, destinationFolder: string, cacheKey: string = ``) {
|
||||||
|
RemoteClientLogger.log(`Caching for ${path.dirname(destinationFolder)}`);
|
||||||
try {
|
try {
|
||||||
if (!fs.existsSync(cacheFolder)) {
|
if (!fs.existsSync(cacheFolder)) {
|
||||||
await CloudRunnerAgentSystem.Run(`mkdir -p ${cacheFolder}`);
|
await CloudRunnerAgentSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||||
|
|
@ -36,8 +52,12 @@ export class Caching {
|
||||||
process.chdir(cacheFolder);
|
process.chdir(cacheFolder);
|
||||||
let cacheSelection;
|
let cacheSelection;
|
||||||
|
|
||||||
if (specificHashMatch !== ``) {
|
if (Input.cloudRunnerTests) {
|
||||||
cacheSelection = fs.existsSync(specificHashMatch) ? specificHashMatch : latest;
|
await CloudRunnerAgentSystem.Run(`tree ${cacheFolder}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cacheKey !== ``) {
|
||||||
|
cacheSelection = fs.existsSync(cacheKey) ? cacheKey : latest;
|
||||||
} else {
|
} else {
|
||||||
cacheSelection = latest;
|
cacheSelection = latest;
|
||||||
}
|
}
|
||||||
|
|
@ -48,8 +68,11 @@ export class Caching {
|
||||||
RemoteClientLogger.log(`cache item exists`);
|
RemoteClientLogger.log(`cache item exists`);
|
||||||
assert(fs.existsSync(destinationFolder));
|
assert(fs.existsSync(destinationFolder));
|
||||||
await CloudRunnerAgentSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}/.."`);
|
await CloudRunnerAgentSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}/.."`);
|
||||||
|
if (Input.cloudRunnerTests) {
|
||||||
|
await CloudRunnerAgentSystem.Run(`tree ${destinationFolder}`);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
RemoteClientLogger.logWarning(`cache item ${specificHashMatch} doesn't exist ${destinationFolder}`);
|
RemoteClientLogger.logWarning(`cache item ${cacheKey} doesn't exist ${destinationFolder}`);
|
||||||
if (cacheSelection !== ``) {
|
if (cacheSelection !== ``) {
|
||||||
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
||||||
}
|
}
|
||||||
|
|
@ -66,20 +89,20 @@ export class Caching {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async printCacheState(lfsCacheFolder: string, libraryCacheFolder: string) {
|
public static async printFullCacheHierarchySize() {
|
||||||
await CloudRunnerAgentSystem.Run(
|
await CloudRunnerAgentSystem.Run(
|
||||||
`echo ' '
|
`echo ' '
|
||||||
echo "LFS cache for $branch"
|
echo "LFS cache for $branch"
|
||||||
du -sch "${lfsCacheFolder}/"
|
du -sch "${CloudRunnerState.lfsCacheFolderFull}/"
|
||||||
echo '**'
|
echo '**'
|
||||||
echo "Library cache for $branch"
|
echo "Library cache for $branch"
|
||||||
du -sch "${libraryCacheFolder}/"
|
du -sch "${CloudRunnerState.libraryCacheFolderFull}/"
|
||||||
echo '**'
|
echo '**'
|
||||||
echo "Branch: $branch"
|
echo "Branch: $branch"
|
||||||
du -sch "${CloudRunnerState.cacheFolderFull}/"
|
du -sch "${CloudRunnerState.cacheFolderFull}/"
|
||||||
echo '**'
|
echo '**'
|
||||||
echo 'Full cache'
|
echo 'Full cache'
|
||||||
du -sch "${CloudRunnerState.cacheFolderFull}/"
|
du -sch "${CloudRunnerState.cacheFolderFull}/.."
|
||||||
echo ' '`,
|
echo ' '`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,28 +2,26 @@ import path from 'path';
|
||||||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||||
import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
|
import { assert } from 'console';
|
||||||
|
import { Input } from '../..';
|
||||||
|
import { RemoteClientLogger } from './remote-client-logger';
|
||||||
|
|
||||||
export class LFSHashing {
|
export class LFSHashing {
|
||||||
public static async printLFSHashState() {
|
|
||||||
await CloudRunnerAgentSystem.Run(
|
|
||||||
`echo ' '
|
|
||||||
echo 'Contents of .lfs-assets-guid file:'
|
|
||||||
cat .lfs-assets-guid
|
|
||||||
echo ' '
|
|
||||||
echo 'Contents of .lfs-assets-guid-sum file:'
|
|
||||||
cat .lfs-assets-guid-sum
|
|
||||||
echo ' '
|
|
||||||
echo 'Source repository initialized'
|
|
||||||
ls ${CloudRunnerState.projectPathFull}
|
|
||||||
echo ' '`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static async createLFSHashFiles() {
|
public static async createLFSHashFiles() {
|
||||||
try {
|
try {
|
||||||
await CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
await CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||||
await CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
await CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||||
return fs.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
assert(fs.existsSync(`.lfs-assets-guid-sum`));
|
||||||
|
assert(fs.existsSync(`.lfs-assets-guid`));
|
||||||
|
const lfsHashes = {
|
||||||
|
lfsGuid: fs.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8'),
|
||||||
|
lfsGuidSum: fs.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid-sum`)}`, 'utf8'),
|
||||||
|
};
|
||||||
|
if (Input.cloudRunnerTests) {
|
||||||
|
RemoteClientLogger.log(lfsHashes.lfsGuid);
|
||||||
|
RemoteClientLogger.log(lfsHashes.lfsGuidSum);
|
||||||
|
}
|
||||||
|
return lfsHashes;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@ import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||||
import { Caching } from './caching';
|
import { Caching } from './caching';
|
||||||
import { LFSHashing } from './lfs-hashing';
|
import { LFSHashing } from './lfs-hashing';
|
||||||
import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
||||||
import path from 'path';
|
|
||||||
import { Input } from '../..';
|
import { Input } from '../..';
|
||||||
import { RemoteClientLogger } from './remote-client-logger';
|
import { RemoteClientLogger } from './remote-client-logger';
|
||||||
|
|
||||||
|
|
@ -14,46 +13,18 @@ export class SetupCloudRunnerRepository {
|
||||||
await CloudRunnerAgentSystem.Run(`mkdir -p ${CloudRunnerState.buildPathFull}`);
|
await CloudRunnerAgentSystem.Run(`mkdir -p ${CloudRunnerState.buildPathFull}`);
|
||||||
await CloudRunnerAgentSystem.Run(`mkdir -p ${CloudRunnerState.repoPathFull}`);
|
await CloudRunnerAgentSystem.Run(`mkdir -p ${CloudRunnerState.repoPathFull}`);
|
||||||
await SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
await SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
||||||
|
const lfsHashes = await LFSHashing.createLFSHashFiles();
|
||||||
SetupCloudRunnerRepository.LFS_ASSETS_HASH = await LFSHashing.createLFSHashFiles();
|
|
||||||
|
|
||||||
if (Input.cloudRunnerTests) {
|
|
||||||
RemoteClientLogger.log(SetupCloudRunnerRepository.LFS_ASSETS_HASH);
|
|
||||||
}
|
|
||||||
await LFSHashing.printLFSHashState();
|
|
||||||
RemoteClientLogger.log(`Library Caching`);
|
|
||||||
if (!fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
if (!fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
||||||
RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
||||||
}
|
}
|
||||||
RemoteClientLogger.log(`LFS Caching`);
|
|
||||||
|
|
||||||
if (Input.cloudRunnerTests) {
|
|
||||||
await CloudRunnerAgentSystem.Run(`tree ${path.join(CloudRunnerState.lfsDirectory, '..')}`);
|
|
||||||
}
|
|
||||||
await Caching.PullFromCache(
|
await Caching.PullFromCache(
|
||||||
CloudRunnerState.lfsCacheFolder,
|
CloudRunnerState.lfsCacheFolderFull,
|
||||||
CloudRunnerState.lfsDirectory,
|
CloudRunnerState.lfsDirectory,
|
||||||
`${SetupCloudRunnerRepository.LFS_ASSETS_HASH}.zip`,
|
`${lfsHashes.lfsGuid}.zip`,
|
||||||
);
|
);
|
||||||
if (Input.cloudRunnerTests) {
|
|
||||||
await CloudRunnerAgentSystem.Run(`tree ${path.join(CloudRunnerState.lfsDirectory, '..')}`);
|
|
||||||
}
|
|
||||||
await Caching.printCacheState(CloudRunnerState.lfsCacheFolder, CloudRunnerState.libraryCacheFolder);
|
|
||||||
await SetupCloudRunnerRepository.pullLatestLFS();
|
await SetupCloudRunnerRepository.pullLatestLFS();
|
||||||
await Caching.PushToCache(
|
await Caching.PushToCache(CloudRunnerState.lfsCacheFolderFull, CloudRunnerState.lfsDirectory, lfsHashes.lfsGuid);
|
||||||
CloudRunnerState.lfsCacheFolder,
|
await Caching.PullFromCache(CloudRunnerState.libraryCacheFolderFull, CloudRunnerState.libraryFolderFull);
|
||||||
CloudRunnerState.lfsDirectory,
|
|
||||||
SetupCloudRunnerRepository.LFS_ASSETS_HASH,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (Input.cloudRunnerTests) {
|
|
||||||
await CloudRunnerAgentSystem.Run(`tree ${path.join(CloudRunnerState.libraryCacheFolder, '..')}`);
|
|
||||||
}
|
|
||||||
await Caching.PullFromCache(CloudRunnerState.libraryCacheFolder, CloudRunnerState.libraryFolderFull);
|
|
||||||
|
|
||||||
if (Input.cloudRunnerTests) {
|
|
||||||
await CloudRunnerAgentSystem.Run(`tree ${path.join(CloudRunnerState.libraryCacheFolder, '..')}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
Caching.handleCachePurging();
|
Caching.handleCachePurging();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
|
||||||
|
|
@ -55,11 +55,11 @@ export class CloudRunnerState {
|
||||||
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static get lfsCacheFolder() {
|
public static get lfsCacheFolderFull() {
|
||||||
return path.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
return path.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static get libraryCacheFolder() {
|
public static get libraryCacheFolderFull() {
|
||||||
return path.join(CloudRunnerState.cacheFolderFull, `lib`);
|
return path.join(CloudRunnerState.cacheFolderFull, `lib`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue