Fix lib caching
parent
e82f003015
commit
1416d19c78
|
|
@ -429,6 +429,77 @@ class CLI {
|
||||||
exports.CLI = CLI;
|
exports.CLI = CLI;
|
||||||
|
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 35010:
|
||||||
|
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.Caching = void 0;
|
||||||
|
const console_1 = __webpack_require__(57082);
|
||||||
|
const fs_1 = __importDefault(__webpack_require__(35747));
|
||||||
|
const path_1 = __importDefault(__webpack_require__(85622));
|
||||||
|
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||||
|
const remote_client_system_1 = __webpack_require__(91269);
|
||||||
|
class Caching {
|
||||||
|
static PushToCache(cacheFolder, destinationFolder, artifactName) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
process.chdir(`${destinationFolder}/..`);
|
||||||
|
yield remote_client_system_1.RemoteClientSystem.Run(`zip -r "${artifactName}.zip" "${path_1.default.dirname(destinationFolder)}"`);
|
||||||
|
console_1.assert(fs_1.default.existsSync(`${artifactName}.zip`));
|
||||||
|
yield remote_client_system_1.RemoteClientSystem.Run(`cp "${artifactName}.zip" "${path_1.default.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||||
|
cloud_runner_logger_1.default.logCli(`copied ${artifactName} to ${cacheFolder}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
static PullFromCache(cacheFolder, destinationFolder, specificHashMatch = ``) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (!fs_1.default.existsSync(cacheFolder)) {
|
||||||
|
fs_1.default.mkdirSync(cacheFolder);
|
||||||
|
}
|
||||||
|
if (!fs_1.default.existsSync(destinationFolder)) {
|
||||||
|
fs_1.default.mkdirSync(destinationFolder);
|
||||||
|
}
|
||||||
|
const latest = yield (yield remote_client_system_1.RemoteClientSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(`\n`, ``);
|
||||||
|
process.chdir(cacheFolder);
|
||||||
|
let cacheSelection;
|
||||||
|
if (specificHashMatch !== ``) {
|
||||||
|
cacheSelection = fs_1.default.existsSync(specificHashMatch) ? specificHashMatch : latest;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
cacheSelection = latest;
|
||||||
|
}
|
||||||
|
if (fs_1.default.existsSync(cacheSelection)) {
|
||||||
|
cloud_runner_logger_1.default.logCli(`Library cache exists`);
|
||||||
|
yield remote_client_system_1.RemoteClientSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}"`);
|
||||||
|
console_1.assert(fs_1.default.existsSync(destinationFolder));
|
||||||
|
yield remote_client_system_1.RemoteClientSystem.Run(`tree ${destinationFolder}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
cloud_runner_logger_1.default.logCli(`Library cache doesn't exist`);
|
||||||
|
if (cacheSelection !== ``) {
|
||||||
|
throw new Error(`Failed to get library cache, but cache hit was found: ${cacheSelection}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.Caching = Caching;
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 95575:
|
/***/ 95575:
|
||||||
|
|
@ -544,11 +615,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.SetupRemoteRepository = void 0;
|
exports.SetupRemoteRepository = void 0;
|
||||||
const console_1 = __webpack_require__(57082);
|
|
||||||
const fs_1 = __importDefault(__webpack_require__(35747));
|
const fs_1 = __importDefault(__webpack_require__(35747));
|
||||||
const path_1 = __importDefault(__webpack_require__(85622));
|
const path_1 = __importDefault(__webpack_require__(85622));
|
||||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||||
const cloud_runner_state_1 = __webpack_require__(70912);
|
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||||
|
const caching_1 = __webpack_require__(35010);
|
||||||
const remote_client_system_1 = __webpack_require__(91269);
|
const remote_client_system_1 = __webpack_require__(91269);
|
||||||
class SetupRemoteRepository {
|
class SetupRemoteRepository {
|
||||||
static run() {
|
static run() {
|
||||||
|
|
@ -614,11 +685,7 @@ class SetupRemoteRepository {
|
||||||
}
|
}
|
||||||
static cacheLatestLFSFiles(lfsCacheFolder) {
|
static cacheLatestLFSFiles(lfsCacheFolder) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
process.chdir(`${cloud_runner_state_1.CloudRunnerState.lfsDirectory}/..`);
|
yield caching_1.Caching.PushToCache(lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||||
yield remote_client_system_1.RemoteClientSystem.Run(`zip -r "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "lfs"`);
|
|
||||||
console_1.assert(fs_1.default.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`));
|
|
||||||
yield remote_client_system_1.RemoteClientSystem.Run(`cp "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "${path_1.default.join(lfsCacheFolder, `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`)}"`);
|
|
||||||
cloud_runner_logger_1.default.logCli(`copied ${SetupRemoteRepository.LFS_ASSETS_HASH} to ${lfsCacheFolder}`);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
static pullLatestLFS() {
|
static pullLatestLFS() {
|
||||||
|
|
@ -631,25 +698,7 @@ class SetupRemoteRepository {
|
||||||
static lfsCaching(lfsCacheFolder) {
|
static lfsCaching(lfsCacheFolder) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
cloud_runner_logger_1.default.logCli(`LFS Caching`);
|
cloud_runner_logger_1.default.logCli(`LFS Caching`);
|
||||||
if (!fs_1.default.existsSync(lfsCacheFolder)) {
|
yield caching_1.Caching.PullFromCache(lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`);
|
||||||
fs_1.default.mkdirSync(lfsCacheFolder);
|
|
||||||
}
|
|
||||||
process.chdir(lfsCacheFolder);
|
|
||||||
let latestLFSCacheFile;
|
|
||||||
if (fs_1.default.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`)) {
|
|
||||||
cloud_runner_logger_1.default.logCli(`Match found: using large file hash match ${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`);
|
|
||||||
latestLFSCacheFile = `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
latestLFSCacheFile = yield (yield remote_client_system_1.RemoteClientSystem.Run(`ls -t "${lfsCacheFolder}" | grep .zip$ | head -1`)).replace(`\n`, ``);
|
|
||||||
}
|
|
||||||
if (fs_1.default.existsSync(latestLFSCacheFile)) {
|
|
||||||
cloud_runner_logger_1.default.logCli(`LFS cache exists`);
|
|
||||||
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.lfsDirectory, { recursive: true });
|
|
||||||
cloud_runner_logger_1.default.logCli(`LFS cache exists from build ${latestLFSCacheFile} from ${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
|
||||||
yield remote_client_system_1.RemoteClientSystem.Run(`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.git`)}"`);
|
|
||||||
cloud_runner_logger_1.default.logCli(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
static libraryCaching(libraryCacheFolder) {
|
static libraryCaching(libraryCacheFolder) {
|
||||||
|
|
@ -660,30 +709,7 @@ class SetupRemoteRepository {
|
||||||
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull, { recursive: true });
|
fs_1.default.rmdirSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull, { recursive: true });
|
||||||
cloud_runner_logger_1.default.logCli(`!Warning!: The Unity library was included in the git repository`);
|
cloud_runner_logger_1.default.logCli(`!Warning!: The Unity library was included in the git repository`);
|
||||||
}
|
}
|
||||||
if (!fs_1.default.existsSync(libraryCacheFolder)) {
|
yield caching_1.Caching.PullFromCache(libraryCacheFolder, cloud_runner_state_1.CloudRunnerState.libraryFolderFull);
|
||||||
fs_1.default.mkdirSync(libraryCacheFolder);
|
|
||||||
}
|
|
||||||
//Restore library cache
|
|
||||||
const latestLibraryCacheFile = yield (yield remote_client_system_1.RemoteClientSystem.Run(`ls -t "${libraryCacheFolder}" | grep .zip$ | head -1`)).replace(`\n`, ``);
|
|
||||||
cloud_runner_logger_1.default.logCli(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
|
|
||||||
process.chdir(libraryCacheFolder);
|
|
||||||
if (fs_1.default.existsSync(latestLibraryCacheFile)) {
|
|
||||||
cloud_runner_logger_1.default.logCli(`Library cache exists`);
|
|
||||||
yield remote_client_system_1.RemoteClientSystem.Run(`unzip "${latestLibraryCacheFile}" -d "${cloud_runner_state_1.CloudRunnerState.libraryFolderFull}"`);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
cloud_runner_logger_1.default.logCli(`Library cache doesn't exist`);
|
|
||||||
if (latestLibraryCacheFile !== ``) {
|
|
||||||
throw new Error(`Failed to get library cache, but cache hit was found (${latestLibraryCacheFile})`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
static checkFileExists(filepath) {
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
fs_1.default.access(filepath, fs_1.default.constants.F_OK, (error) => {
|
|
||||||
resolve(!error);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
static createLFSHashFiles() {
|
static createLFSHashFiles() {
|
||||||
|
|
@ -2451,6 +2477,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.CloudRunnerState = void 0;
|
exports.CloudRunnerState = void 0;
|
||||||
|
const path_1 = __importDefault(__webpack_require__(85622));
|
||||||
const cloud_runner_namespace_1 = __importDefault(__webpack_require__(63287));
|
const cloud_runner_namespace_1 = __importDefault(__webpack_require__(63287));
|
||||||
const task_parameter_serializer_1 = __webpack_require__(43247);
|
const task_parameter_serializer_1 = __webpack_require__(43247);
|
||||||
class CloudRunnerState {
|
class CloudRunnerState {
|
||||||
|
|
@ -2465,28 +2492,28 @@ class CloudRunnerState {
|
||||||
return CloudRunnerState.buildParams.branch;
|
return CloudRunnerState.buildParams.branch;
|
||||||
}
|
}
|
||||||
static get buildPathFull() {
|
static get buildPathFull() {
|
||||||
return `/${CloudRunnerState.buildVolumeFolder}/${CloudRunnerState.buildGuid}`;
|
return path_1.default.join(`/`, CloudRunnerState.buildVolumeFolder, CloudRunnerState.buildGuid);
|
||||||
}
|
}
|
||||||
static get builderPathFull() {
|
static get builderPathFull() {
|
||||||
return `${CloudRunnerState.buildPathFull}/builder`;
|
return path_1.default.join(CloudRunnerState.buildPathFull, `builder`);
|
||||||
}
|
}
|
||||||
static get steamPathFull() {
|
static get steamPathFull() {
|
||||||
return `${CloudRunnerState.buildPathFull}/steam`;
|
return path_1.default.join(CloudRunnerState.buildPathFull, `steam`);
|
||||||
}
|
}
|
||||||
static get repoPathFull() {
|
static get repoPathFull() {
|
||||||
return `${CloudRunnerState.buildPathFull}/${CloudRunnerState.repositoryFolder}`;
|
return path_1.default.join(CloudRunnerState.buildPathFull, CloudRunnerState.repositoryFolder);
|
||||||
}
|
}
|
||||||
static get projectPathFull() {
|
static get projectPathFull() {
|
||||||
return `${CloudRunnerState.repoPathFull}/${CloudRunnerState.buildParams.projectPath}`;
|
return path_1.default.join(CloudRunnerState.repoPathFull, CloudRunnerState.buildParams.projectPath);
|
||||||
}
|
}
|
||||||
static get libraryFolderFull() {
|
static get libraryFolderFull() {
|
||||||
return `${CloudRunnerState.projectPathFull}/Library`;
|
return path_1.default.join(CloudRunnerState.projectPathFull, `Library`);
|
||||||
}
|
}
|
||||||
static get cacheFolderFull() {
|
static get cacheFolderFull() {
|
||||||
return `/${CloudRunnerState.buildVolumeFolder}/${CloudRunnerState.cacheFolder}/${CloudRunnerState.branchName}`;
|
return path_1.default.join(CloudRunnerState.buildVolumeFolder, CloudRunnerState.cacheFolder, CloudRunnerState.branchName);
|
||||||
}
|
}
|
||||||
static get lfsDirectory() {
|
static get lfsDirectory() {
|
||||||
return `${CloudRunnerState.repoPathFull}/.git/lfs`;
|
return path_1.default.join(CloudRunnerState.repoPathFull, `.git`, `lfs`);
|
||||||
}
|
}
|
||||||
static get purgeRemoteCaching() {
|
static get purgeRemoteCaching() {
|
||||||
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
||||||
|
|
@ -2503,12 +2530,6 @@ class CloudRunnerState {
|
||||||
static readBuildEnvironmentVariables() {
|
static readBuildEnvironmentVariables() {
|
||||||
return task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables();
|
return task_parameter_serializer_1.TaskParameterSerializer.readBuildEnvironmentVariables();
|
||||||
}
|
}
|
||||||
static get getHandleCachingCommand() {
|
|
||||||
return `${CloudRunnerState.builderPathFull}/dist/cloud-runner/handleCaching.sh "${CloudRunnerState.cacheFolderFull}" "${CloudRunnerState.libraryFolderFull}" "${CloudRunnerState.lfsDirectory}" "${CloudRunnerState.purgeRemoteCaching}"`;
|
|
||||||
}
|
|
||||||
static get cloneBuilderCommand() {
|
|
||||||
return `git clone -b ${CloudRunnerState.branchName} ${CloudRunnerState.unityBuilderRepoUrl} ${CloudRunnerState.builderPathFull}`;
|
|
||||||
}
|
|
||||||
static get runNumber() {
|
static get runNumber() {
|
||||||
const runNumber = CloudRunnerState.buildParams.runNumber;
|
const runNumber = CloudRunnerState.buildParams.runNumber;
|
||||||
if (!runNumber || runNumber === '') {
|
if (!runNumber || runNumber === '') {
|
||||||
|
|
@ -2803,8 +2824,7 @@ class SetupStep {
|
||||||
apk add unzip zip git-lfs jq tree nodejs -q
|
apk add unzip zip git-lfs jq tree nodejs -q
|
||||||
export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||||
mkdir -p ${cloud_runner_state_1.CloudRunnerState.builderPathFull}
|
mkdir -p ${cloud_runner_state_1.CloudRunnerState.builderPathFull}
|
||||||
echo "${cloud_runner_state_1.CloudRunnerState.cloneBuilderCommand}"
|
git clone -b ${cloud_runner_state_1.CloudRunnerState.branchName} ${cloud_runner_state_1.CloudRunnerState.unityBuilderRepoUrl} ${cloud_runner_state_1.CloudRunnerState.builderPathFull}
|
||||||
${cloud_runner_state_1.CloudRunnerState.cloneBuilderCommand}
|
|
||||||
chmod +x ${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/index.js
|
chmod +x ${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/index.js
|
||||||
node ${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/index.js -m remote-cli
|
node ${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/index.js -m remote-cli
|
||||||
`,
|
`,
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,49 @@
|
||||||
|
import { assert } from 'console';
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import CloudRunnerLogger from '../../cloud-runner/services/cloud-runner-logger';
|
||||||
|
import { RemoteClientSystem } from './remote-client-system';
|
||||||
|
|
||||||
|
export class Caching {
|
||||||
|
public static async PushToCache(cacheFolder: string, destinationFolder: string, artifactName: string) {
|
||||||
|
process.chdir(`${destinationFolder}/..`);
|
||||||
|
await RemoteClientSystem.Run(`zip -r "${artifactName}.zip" "${path.dirname(destinationFolder)}"`);
|
||||||
|
assert(fs.existsSync(`${artifactName}.zip`));
|
||||||
|
await RemoteClientSystem.Run(`cp "${artifactName}.zip" "${path.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||||
|
CloudRunnerLogger.logCli(`copied ${artifactName} to ${cacheFolder}`);
|
||||||
|
}
|
||||||
|
public static async PullFromCache(cacheFolder: string, destinationFolder: string, specificHashMatch: string = ``) {
|
||||||
|
if (!fs.existsSync(cacheFolder)) {
|
||||||
|
fs.mkdirSync(cacheFolder);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fs.existsSync(destinationFolder)) {
|
||||||
|
fs.mkdirSync(destinationFolder);
|
||||||
|
}
|
||||||
|
|
||||||
|
const latest = await (await RemoteClientSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(
|
||||||
|
`\n`,
|
||||||
|
``,
|
||||||
|
);
|
||||||
|
|
||||||
|
process.chdir(cacheFolder);
|
||||||
|
let cacheSelection;
|
||||||
|
|
||||||
|
if (specificHashMatch !== ``) {
|
||||||
|
cacheSelection = fs.existsSync(specificHashMatch) ? specificHashMatch : latest;
|
||||||
|
} else {
|
||||||
|
cacheSelection = latest;
|
||||||
|
}
|
||||||
|
if (fs.existsSync(cacheSelection)) {
|
||||||
|
CloudRunnerLogger.logCli(`Library cache exists`);
|
||||||
|
await RemoteClientSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}"`);
|
||||||
|
assert(fs.existsSync(destinationFolder));
|
||||||
|
await RemoteClientSystem.Run(`tree ${destinationFolder}`);
|
||||||
|
} else {
|
||||||
|
CloudRunnerLogger.logCli(`Library cache doesn't exist`);
|
||||||
|
if (cacheSelection !== ``) {
|
||||||
|
throw new Error(`Failed to get library cache, but cache hit was found: ${cacheSelection}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
import { assert } from 'console';
|
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import CloudRunnerLogger from '../../cloud-runner/services/cloud-runner-logger';
|
import CloudRunnerLogger from '../../cloud-runner/services/cloud-runner-logger';
|
||||||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||||
|
import { Caching } from './caching';
|
||||||
import { RemoteClientSystem } from './remote-client-system';
|
import { RemoteClientSystem } from './remote-client-system';
|
||||||
|
|
||||||
export class SetupRemoteRepository {
|
export class SetupRemoteRepository {
|
||||||
|
|
@ -72,17 +72,7 @@ export class SetupRemoteRepository {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async cacheLatestLFSFiles(lfsCacheFolder: string) {
|
private static async cacheLatestLFSFiles(lfsCacheFolder: string) {
|
||||||
process.chdir(`${CloudRunnerState.lfsDirectory}/..`);
|
await Caching.PushToCache(lfsCacheFolder, CloudRunnerState.lfsDirectory, SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||||
await RemoteClientSystem.Run(`zip -r "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "lfs"`);
|
|
||||||
assert();
|
|
||||||
CloudRunnerLogger.logCli(fs.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`).toString());
|
|
||||||
await RemoteClientSystem.Run(
|
|
||||||
`cp "${SetupRemoteRepository.LFS_ASSETS_HASH}.zip" "${path.join(
|
|
||||||
lfsCacheFolder,
|
|
||||||
`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`,
|
|
||||||
)}"`,
|
|
||||||
);
|
|
||||||
CloudRunnerLogger.logCli(`copied ${SetupRemoteRepository.LFS_ASSETS_HASH} to ${lfsCacheFolder}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async pullLatestLFS() {
|
private static async pullLatestLFS() {
|
||||||
|
|
@ -93,30 +83,11 @@ export class SetupRemoteRepository {
|
||||||
|
|
||||||
private static async lfsCaching(lfsCacheFolder: string) {
|
private static async lfsCaching(lfsCacheFolder: string) {
|
||||||
CloudRunnerLogger.logCli(`LFS Caching`);
|
CloudRunnerLogger.logCli(`LFS Caching`);
|
||||||
if (!fs.existsSync(lfsCacheFolder)) {
|
await Caching.PullFromCache(
|
||||||
fs.mkdirSync(lfsCacheFolder);
|
lfsCacheFolder,
|
||||||
}
|
CloudRunnerState.lfsDirectory,
|
||||||
process.chdir(lfsCacheFolder);
|
`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`,
|
||||||
let latestLFSCacheFile;
|
);
|
||||||
if (fs.existsSync(`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`)) {
|
|
||||||
CloudRunnerLogger.logCli(`Match found: using large file hash match ${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`);
|
|
||||||
latestLFSCacheFile = `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`;
|
|
||||||
} else {
|
|
||||||
latestLFSCacheFile = await (
|
|
||||||
await RemoteClientSystem.Run(`ls -t "${lfsCacheFolder}" | grep .zip$ | head -1`)
|
|
||||||
).replace(`\n`, ``);
|
|
||||||
}
|
|
||||||
if (fs.existsSync(latestLFSCacheFile)) {
|
|
||||||
CloudRunnerLogger.logCli(`LFS cache exists`);
|
|
||||||
fs.rmdirSync(CloudRunnerState.lfsDirectory, { recursive: true });
|
|
||||||
CloudRunnerLogger.logCli(
|
|
||||||
`LFS cache exists from build ${latestLFSCacheFile} from ${CloudRunnerState.buildParams.branch}`,
|
|
||||||
);
|
|
||||||
await RemoteClientSystem.Run(
|
|
||||||
`unzip -q "${lfsCacheFolder}/${latestLFSCacheFile}" -d "${path.join(CloudRunnerState.repoPathFull, `.git`)}"`,
|
|
||||||
);
|
|
||||||
CloudRunnerLogger.logCli(`git LFS folder, (should not contain $latestLFSCacheFile)`);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async libraryCaching(libraryCacheFolder: string) {
|
private static async libraryCaching(libraryCacheFolder: string) {
|
||||||
|
|
@ -126,31 +97,7 @@ export class SetupRemoteRepository {
|
||||||
fs.rmdirSync(CloudRunnerState.libraryFolderFull, { recursive: true });
|
fs.rmdirSync(CloudRunnerState.libraryFolderFull, { recursive: true });
|
||||||
CloudRunnerLogger.logCli(`!Warning!: The Unity library was included in the git repository`);
|
CloudRunnerLogger.logCli(`!Warning!: The Unity library was included in the git repository`);
|
||||||
}
|
}
|
||||||
if (!fs.existsSync(libraryCacheFolder)) {
|
await Caching.PullFromCache(libraryCacheFolder, CloudRunnerState.libraryFolderFull);
|
||||||
fs.mkdirSync(libraryCacheFolder);
|
|
||||||
}
|
|
||||||
//Restore library cache
|
|
||||||
const latestLibraryCacheFile = await (
|
|
||||||
await RemoteClientSystem.Run(`ls -t "${libraryCacheFolder}" | grep .zip$ | head -1`)
|
|
||||||
).replace(`\n`, ``);
|
|
||||||
CloudRunnerLogger.logCli(`Checking if Library cache ${libraryCacheFolder}/${latestLibraryCacheFile} exists`);
|
|
||||||
process.chdir(libraryCacheFolder);
|
|
||||||
if (fs.existsSync(latestLibraryCacheFile)) {
|
|
||||||
CloudRunnerLogger.logCli(`Library cache exists`);
|
|
||||||
await RemoteClientSystem.Run(`unzip "${latestLibraryCacheFile}" -d "${CloudRunnerState.libraryFolderFull}"`);
|
|
||||||
} else {
|
|
||||||
CloudRunnerLogger.logCli(`Library cache doesn't exist`);
|
|
||||||
if (latestLibraryCacheFile !== ``) {
|
|
||||||
throw new Error(`Failed to get library cache, but cache hit was found (${latestLibraryCacheFile})`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
static checkFileExists(filepath) {
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
fs.access(filepath, fs.constants.F_OK, (error) => {
|
|
||||||
resolve(!error);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async createLFSHashFiles() {
|
private static async createLFSHashFiles() {
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
import path from 'path';
|
||||||
import { BuildParameters } from '../..';
|
import { BuildParameters } from '../..';
|
||||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||||
import CloudRunnerNamespace from '../services/cloud-runner-namespace';
|
import CloudRunnerNamespace from '../services/cloud-runner-namespace';
|
||||||
|
|
@ -24,28 +25,28 @@ export class CloudRunnerState {
|
||||||
return CloudRunnerState.buildParams.branch;
|
return CloudRunnerState.buildParams.branch;
|
||||||
}
|
}
|
||||||
public static get buildPathFull(): string {
|
public static get buildPathFull(): string {
|
||||||
return `/${CloudRunnerState.buildVolumeFolder}/${CloudRunnerState.buildGuid}`;
|
return path.join(`/`, CloudRunnerState.buildVolumeFolder, CloudRunnerState.buildGuid);
|
||||||
}
|
}
|
||||||
public static get builderPathFull(): string {
|
public static get builderPathFull(): string {
|
||||||
return `${CloudRunnerState.buildPathFull}/builder`;
|
return path.join(CloudRunnerState.buildPathFull, `builder`);
|
||||||
}
|
}
|
||||||
public static get steamPathFull(): string {
|
public static get steamPathFull(): string {
|
||||||
return `${CloudRunnerState.buildPathFull}/steam`;
|
return path.join(CloudRunnerState.buildPathFull, `steam`);
|
||||||
}
|
}
|
||||||
public static get repoPathFull(): string {
|
public static get repoPathFull(): string {
|
||||||
return `${CloudRunnerState.buildPathFull}/${CloudRunnerState.repositoryFolder}`;
|
return path.join(CloudRunnerState.buildPathFull, CloudRunnerState.repositoryFolder);
|
||||||
}
|
}
|
||||||
public static get projectPathFull(): string {
|
public static get projectPathFull(): string {
|
||||||
return `${CloudRunnerState.repoPathFull}/${CloudRunnerState.buildParams.projectPath}`;
|
return path.join(CloudRunnerState.repoPathFull, CloudRunnerState.buildParams.projectPath);
|
||||||
}
|
}
|
||||||
public static get libraryFolderFull(): string {
|
public static get libraryFolderFull(): string {
|
||||||
return `${CloudRunnerState.projectPathFull}/Library`;
|
return path.join(CloudRunnerState.projectPathFull, `Library`);
|
||||||
}
|
}
|
||||||
public static get cacheFolderFull(): string {
|
public static get cacheFolderFull(): string {
|
||||||
return `/${CloudRunnerState.buildVolumeFolder}/${CloudRunnerState.cacheFolder}/${CloudRunnerState.branchName}`;
|
return path.join(CloudRunnerState.buildVolumeFolder, CloudRunnerState.cacheFolder, CloudRunnerState.branchName);
|
||||||
}
|
}
|
||||||
public static get lfsDirectory(): string {
|
public static get lfsDirectory(): string {
|
||||||
return `${CloudRunnerState.repoPathFull}/.git/lfs`;
|
return path.join(CloudRunnerState.repoPathFull, `.git`, `lfs`);
|
||||||
}
|
}
|
||||||
public static get purgeRemoteCaching(): boolean {
|
public static get purgeRemoteCaching(): boolean {
|
||||||
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
||||||
|
|
@ -75,14 +76,6 @@ export class CloudRunnerState {
|
||||||
return TaskParameterSerializer.readBuildEnvironmentVariables();
|
return TaskParameterSerializer.readBuildEnvironmentVariables();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static get getHandleCachingCommand() {
|
|
||||||
return `${CloudRunnerState.builderPathFull}/dist/cloud-runner/handleCaching.sh "${CloudRunnerState.cacheFolderFull}" "${CloudRunnerState.libraryFolderFull}" "${CloudRunnerState.lfsDirectory}" "${CloudRunnerState.purgeRemoteCaching}"`;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static get cloneBuilderCommand() {
|
|
||||||
return `git clone -b ${CloudRunnerState.branchName} ${CloudRunnerState.unityBuilderRepoUrl} ${CloudRunnerState.builderPathFull}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static get runNumber() {
|
public static get runNumber() {
|
||||||
const runNumber = CloudRunnerState.buildParams.runNumber;
|
const runNumber = CloudRunnerState.buildParams.runNumber;
|
||||||
if (!runNumber || runNumber === '') {
|
if (!runNumber || runNumber === '') {
|
||||||
|
|
|
||||||
|
|
@ -34,8 +34,7 @@ export class SetupStep implements StepInterface {
|
||||||
apk add unzip zip git-lfs jq tree nodejs -q
|
apk add unzip zip git-lfs jq tree nodejs -q
|
||||||
export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||||
mkdir -p ${CloudRunnerState.builderPathFull}
|
mkdir -p ${CloudRunnerState.builderPathFull}
|
||||||
echo "${CloudRunnerState.cloneBuilderCommand}"
|
git clone -b ${CloudRunnerState.branchName} ${CloudRunnerState.unityBuilderRepoUrl} ${CloudRunnerState.builderPathFull}
|
||||||
${CloudRunnerState.cloneBuilderCommand}
|
|
||||||
chmod +x ${CloudRunnerState.builderPathFull}/dist/index.js
|
chmod +x ${CloudRunnerState.builderPathFull}/dist/index.js
|
||||||
node ${CloudRunnerState.builderPathFull}/dist/index.js -m remote-cli
|
node ${CloudRunnerState.builderPathFull}/dist/index.js -m remote-cli
|
||||||
`,
|
`,
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue