Logging improvement
parent
6037bf9007
commit
1d7a50a0b8
|
|
@ -461,9 +461,9 @@ class Caching {
|
|||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
process.chdir(`${destinationFolder}/..`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`zip -r "${artifactName}.zip" "${path_1.default.dirname(destinationFolder)}"`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`zip -r "${artifactName}.zip" "${path_1.default.dirname(destinationFolder)}"`);
|
||||
console_1.assert(fs_1.default.existsSync(`${artifactName}.zip`));
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`cp "${artifactName}.zip" "${path_1.default.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`cp "${artifactName}.zip" "${path_1.default.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||
cloud_runner_logger_1.default.logCli(`copied ${artifactName} to ${cacheFolder}`);
|
||||
}
|
||||
catch (error) {
|
||||
|
|
@ -475,12 +475,12 @@ class Caching {
|
|||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if (!fs_1.default.existsSync(cacheFolder)) {
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||
}
|
||||
if (!fs_1.default.existsSync(destinationFolder)) {
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`mkdir -p ${destinationFolder}`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${destinationFolder}`);
|
||||
}
|
||||
const latest = yield (yield remote_client_system_1.RemoteClientSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(`\n`, ``);
|
||||
const latest = yield (yield remote_client_system_1.CloudRunnerAgentSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(`\n`, ``);
|
||||
process.chdir(cacheFolder);
|
||||
let cacheSelection;
|
||||
if (specificHashMatch !== ``) {
|
||||
|
|
@ -491,9 +491,9 @@ class Caching {
|
|||
}
|
||||
if (fs_1.default.existsSync(cacheSelection)) {
|
||||
cloud_runner_logger_1.default.logCli(`Library cache exists`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}"`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}"`);
|
||||
console_1.assert(fs_1.default.existsSync(destinationFolder));
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree ${destinationFolder}`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`tree ${destinationFolder}`);
|
||||
}
|
||||
else {
|
||||
cloud_runner_logger_1.default.logCli(`Library cache doesn't exist`);
|
||||
|
|
@ -515,7 +515,7 @@ class Caching {
|
|||
}
|
||||
static printCacheState(lfsCacheFolder, libraryCacheFolder) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`echo ' '
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`echo ' '
|
||||
echo "LFS cache for $branch"
|
||||
du -sch "${lfsCacheFolder}/"
|
||||
echo '**'
|
||||
|
|
@ -534,6 +534,99 @@ class Caching {
|
|||
exports.Caching = Caching;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 21811:
|
||||
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.CloudRunnerRepositorySetup = void 0;
|
||||
const console_1 = __webpack_require__(57082);
|
||||
const fs_1 = __importDefault(__webpack_require__(35747));
|
||||
const path_1 = __importDefault(__webpack_require__(85622));
|
||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||
const caching_1 = __webpack_require__(35010);
|
||||
const lfs_hashing_1 = __webpack_require__(47011);
|
||||
const remote_client_system_1 = __webpack_require__(91269);
|
||||
class CloudRunnerRepositorySetup {
|
||||
static run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.buildPathFull);
|
||||
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||
yield CloudRunnerRepositorySetup.cloneRepoWithoutLFSFiles();
|
||||
CloudRunnerRepositorySetup.LFS_ASSETS_HASH = yield lfs_hashing_1.LFSHashing.createLFSHashFiles();
|
||||
cloud_runner_logger_1.default.logCli(CloudRunnerRepositorySetup.LFS_ASSETS_HASH);
|
||||
yield lfs_hashing_1.LFSHashing.printLFSHashState();
|
||||
const lfsCacheFolder = path_1.default.join(cloud_runner_state_1.CloudRunnerState.cacheFolderFull, `lfs`);
|
||||
const libraryCacheFolder = path_1.default.join(cloud_runner_state_1.CloudRunnerState.cacheFolderFull, `lib`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
cloud_runner_logger_1.default.logCli(`Library Caching`);
|
||||
console_1.assert(fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull), `!Warning!: The Unity library was included in the git repository`);
|
||||
yield caching_1.Caching.PullFromCache(libraryCacheFolder, cloud_runner_state_1.CloudRunnerState.libraryFolderFull);
|
||||
cloud_runner_logger_1.default.logCli(`LFS Caching`);
|
||||
yield caching_1.Caching.PullFromCache(lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, `${CloudRunnerRepositorySetup.LFS_ASSETS_HASH}.zip`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
yield caching_1.Caching.printCacheState(lfsCacheFolder, libraryCacheFolder);
|
||||
yield CloudRunnerRepositorySetup.pullLatestLFS();
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}`);
|
||||
yield caching_1.Caching.PushToCache(lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, CloudRunnerRepositorySetup.LFS_ASSETS_HASH);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}`);
|
||||
caching_1.Caching.handleCachePurging();
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
static cloneRepoWithoutLFSFiles() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
cloud_runner_logger_1.default.logCli(`Initializing source repository for cloning with caching of LFS files`);
|
||||
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`git config --global advice.detachedHead false`);
|
||||
cloud_runner_logger_1.default.logCli(`Cloning the repository being built:`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`git lfs install --skip-smudge`);
|
||||
cloud_runner_logger_1.default.logCli(cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`git clone ${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl} ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`ls -lh`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`tree`);
|
||||
cloud_runner_logger_1.default.logCli(`${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`git checkout ${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
||||
cloud_runner_logger_1.default.logCli(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
static pullLatestLFS() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`git lfs pull`);
|
||||
cloud_runner_logger_1.default.logCli(`pulled latest LFS files`);
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.CloudRunnerRepositorySetup = CloudRunnerRepositorySetup;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 95575:
|
||||
|
|
@ -553,7 +646,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.RemoteClient = void 0;
|
||||
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||
const setup_remote_repository_1 = __webpack_require__(62100);
|
||||
const cloud_runner_repository_setup_1 = __webpack_require__(21811);
|
||||
class RemoteClient {
|
||||
static Run(options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
|
|
@ -561,7 +654,7 @@ class RemoteClient {
|
|||
cloud_runner_state_1.CloudRunnerState.setup(buildParameter);
|
||||
switch (options.remoteClientState) {
|
||||
default:
|
||||
yield setup_remote_repository_1.SetupRemoteRepository.run();
|
||||
yield cloud_runner_repository_setup_1.CloudRunnerRepositorySetup.run();
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
|
@ -598,7 +691,7 @@ const fs_1 = __importDefault(__webpack_require__(35747));
|
|||
class LFSHashing {
|
||||
static printLFSHashState() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`echo ' '
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`echo ' '
|
||||
echo 'Contents of .lfs-assets-guid file:'
|
||||
cat .lfs-assets-guid
|
||||
echo ' '
|
||||
|
|
@ -613,8 +706,8 @@ class LFSHashing {
|
|||
static createLFSHashFiles() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
yield remote_client_system_1.CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
return fs_1.default.readFileSync(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
||||
}
|
||||
catch (error) {
|
||||
|
|
@ -646,10 +739,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.RemoteClientSystem = void 0;
|
||||
exports.CloudRunnerAgentSystem = void 0;
|
||||
const child_process_1 = __webpack_require__(63129);
|
||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||
class RemoteClientSystem {
|
||||
class CloudRunnerAgentSystem {
|
||||
static Run(command) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return yield new Promise((promise) => {
|
||||
|
|
@ -681,100 +774,7 @@ class RemoteClientSystem {
|
|||
});
|
||||
}
|
||||
}
|
||||
exports.RemoteClientSystem = RemoteClientSystem;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 62100:
|
||||
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.SetupRemoteRepository = void 0;
|
||||
const console_1 = __webpack_require__(57082);
|
||||
const fs_1 = __importDefault(__webpack_require__(35747));
|
||||
const path_1 = __importDefault(__webpack_require__(85622));
|
||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||
const caching_1 = __webpack_require__(35010);
|
||||
const lfs_hashing_1 = __webpack_require__(47011);
|
||||
const remote_client_system_1 = __webpack_require__(91269);
|
||||
class SetupRemoteRepository {
|
||||
static run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.buildPathFull);
|
||||
fs_1.default.mkdirSync(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||
yield SetupRemoteRepository.cloneRepoWithoutLFSFiles();
|
||||
SetupRemoteRepository.LFS_ASSETS_HASH = yield lfs_hashing_1.LFSHashing.createLFSHashFiles();
|
||||
cloud_runner_logger_1.default.logCli(SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
yield lfs_hashing_1.LFSHashing.printLFSHashState();
|
||||
const lfsCacheFolder = path_1.default.join(cloud_runner_state_1.CloudRunnerState.cacheFolderFull, `lfs`);
|
||||
const libraryCacheFolder = path_1.default.join(cloud_runner_state_1.CloudRunnerState.cacheFolderFull, `lib`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
cloud_runner_logger_1.default.logCli(`Library Caching`);
|
||||
console_1.assert(fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull), `!Warning!: The Unity library was included in the git repository`);
|
||||
yield caching_1.Caching.PullFromCache(libraryCacheFolder, cloud_runner_state_1.CloudRunnerState.libraryFolderFull);
|
||||
cloud_runner_logger_1.default.logCli(`LFS Caching`);
|
||||
yield caching_1.Caching.PullFromCache(lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, `${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
yield caching_1.Caching.printCacheState(lfsCacheFolder, libraryCacheFolder);
|
||||
yield SetupRemoteRepository.pullLatestLFS();
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}`);
|
||||
yield caching_1.Caching.PushToCache(lfsCacheFolder, cloud_runner_state_1.CloudRunnerState.lfsDirectory, SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree ${cloud_runner_state_1.CloudRunnerState.cacheFolderFull}`);
|
||||
caching_1.Caching.handleCachePurging();
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
static cloneRepoWithoutLFSFiles() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
cloud_runner_logger_1.default.logCli(`Initializing source repository for cloning with caching of LFS files`);
|
||||
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git config --global advice.detachedHead false`);
|
||||
cloud_runner_logger_1.default.logCli(`Cloning the repository being built:`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git lfs install --skip-smudge`);
|
||||
cloud_runner_logger_1.default.logCli(cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git clone ${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl} ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`ls -lh`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`tree`);
|
||||
cloud_runner_logger_1.default.logCli(`${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git checkout ${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
||||
cloud_runner_logger_1.default.logCli(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
static pullLatestLFS() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||
yield remote_client_system_1.RemoteClientSystem.Run(`git lfs pull`);
|
||||
cloud_runner_logger_1.default.logCli(`pulled latest LFS files`);
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.SetupRemoteRepository = SetupRemoteRepository;
|
||||
exports.CloudRunnerAgentSystem = CloudRunnerAgentSystem;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
|
@ -1574,7 +1574,7 @@ exports.CloudRunnerStatics = void 0;
|
|||
class CloudRunnerStatics {
|
||||
}
|
||||
exports.CloudRunnerStatics = CloudRunnerStatics;
|
||||
CloudRunnerStatics.logPrefix = `Cloud-Runner-Agent`;
|
||||
CloudRunnerStatics.logPrefix = `Cloud-Runner-System`;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
|
@ -2452,7 +2452,7 @@ class CloudRunnerLogger {
|
|||
core.info(message);
|
||||
}
|
||||
static logCli(message) {
|
||||
CloudRunnerLogger.log(`[CLI] ${message}`);
|
||||
CloudRunnerLogger.log(`[Client] ${message}`);
|
||||
}
|
||||
static logLine(message) {
|
||||
core.info(`${message}\n`);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -3,15 +3,15 @@ import fs from 'fs';
|
|||
import path from 'path';
|
||||
import CloudRunnerLogger from '../../cloud-runner/services/cloud-runner-logger';
|
||||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||
import { RemoteClientSystem } from './remote-client-system';
|
||||
import { CloudRunnerAgentSystem } from './remote-client-system';
|
||||
|
||||
export class Caching {
|
||||
public static async PushToCache(cacheFolder: string, destinationFolder: string, artifactName: string) {
|
||||
try {
|
||||
process.chdir(`${destinationFolder}/..`);
|
||||
await RemoteClientSystem.Run(`zip -r "${artifactName}.zip" "${path.dirname(destinationFolder)}"`);
|
||||
await CloudRunnerAgentSystem.Run(`zip -r "${artifactName}.zip" "${path.dirname(destinationFolder)}"`);
|
||||
assert(fs.existsSync(`${artifactName}.zip`));
|
||||
await RemoteClientSystem.Run(`cp "${artifactName}.zip" "${path.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||
await CloudRunnerAgentSystem.Run(`cp "${artifactName}.zip" "${path.join(cacheFolder, `${artifactName}.zip`)}"`);
|
||||
CloudRunnerLogger.logCli(`copied ${artifactName} to ${cacheFolder}`);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
|
|
@ -20,14 +20,14 @@ export class Caching {
|
|||
public static async PullFromCache(cacheFolder: string, destinationFolder: string, specificHashMatch: string = ``) {
|
||||
try {
|
||||
if (!fs.existsSync(cacheFolder)) {
|
||||
await RemoteClientSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||
await CloudRunnerAgentSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(destinationFolder)) {
|
||||
await RemoteClientSystem.Run(`mkdir -p ${destinationFolder}`);
|
||||
await CloudRunnerAgentSystem.Run(`mkdir -p ${destinationFolder}`);
|
||||
}
|
||||
|
||||
const latest = await (await RemoteClientSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(
|
||||
const latest = await (await CloudRunnerAgentSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`)).replace(
|
||||
`\n`,
|
||||
``,
|
||||
);
|
||||
|
|
@ -42,9 +42,9 @@ export class Caching {
|
|||
}
|
||||
if (fs.existsSync(cacheSelection)) {
|
||||
CloudRunnerLogger.logCli(`Library cache exists`);
|
||||
await RemoteClientSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}"`);
|
||||
await CloudRunnerAgentSystem.Run(`unzip "${cacheSelection}" -d "${destinationFolder}"`);
|
||||
assert(fs.existsSync(destinationFolder));
|
||||
await RemoteClientSystem.Run(`tree ${destinationFolder}`);
|
||||
await CloudRunnerAgentSystem.Run(`tree ${destinationFolder}`);
|
||||
} else {
|
||||
CloudRunnerLogger.logCli(`Library cache doesn't exist`);
|
||||
if (cacheSelection !== ``) {
|
||||
|
|
@ -64,7 +64,7 @@ export class Caching {
|
|||
}
|
||||
|
||||
public static async printCacheState(lfsCacheFolder: string, libraryCacheFolder: string) {
|
||||
await RemoteClientSystem.Run(
|
||||
await CloudRunnerAgentSystem.Run(
|
||||
`echo ' '
|
||||
echo "LFS cache for $branch"
|
||||
du -sch "${lfsCacheFolder}/"
|
||||
|
|
|
|||
|
|
@ -5,22 +5,22 @@ import CloudRunnerLogger from '../../cloud-runner/services/cloud-runner-logger';
|
|||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||
import { Caching } from './caching';
|
||||
import { LFSHashing } from './lfs-hashing';
|
||||
import { RemoteClientSystem } from './remote-client-system';
|
||||
import { CloudRunnerAgentSystem } from './remote-client-system';
|
||||
|
||||
export class SetupRemoteRepository {
|
||||
export class CloudRunnerRepositorySetup {
|
||||
static LFS_ASSETS_HASH;
|
||||
public static async run() {
|
||||
try {
|
||||
fs.mkdirSync(CloudRunnerState.buildPathFull);
|
||||
fs.mkdirSync(CloudRunnerState.repoPathFull);
|
||||
await SetupRemoteRepository.cloneRepoWithoutLFSFiles();
|
||||
await CloudRunnerRepositorySetup.cloneRepoWithoutLFSFiles();
|
||||
|
||||
SetupRemoteRepository.LFS_ASSETS_HASH = await LFSHashing.createLFSHashFiles();
|
||||
CloudRunnerLogger.logCli(SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
CloudRunnerRepositorySetup.LFS_ASSETS_HASH = await LFSHashing.createLFSHashFiles();
|
||||
CloudRunnerLogger.logCli(CloudRunnerRepositorySetup.LFS_ASSETS_HASH);
|
||||
await LFSHashing.printLFSHashState();
|
||||
const lfsCacheFolder = path.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
||||
const libraryCacheFolder = path.join(CloudRunnerState.cacheFolderFull, `lib`);
|
||||
await RemoteClientSystem.Run(`tree ${CloudRunnerState.repoPathFull}`);
|
||||
await CloudRunnerAgentSystem.Run(`tree ${CloudRunnerState.repoPathFull}`);
|
||||
CloudRunnerLogger.logCli(`Library Caching`);
|
||||
assert(
|
||||
fs.existsSync(CloudRunnerState.libraryFolderFull),
|
||||
|
|
@ -31,15 +31,19 @@ export class SetupRemoteRepository {
|
|||
await Caching.PullFromCache(
|
||||
lfsCacheFolder,
|
||||
CloudRunnerState.lfsDirectory,
|
||||
`${SetupRemoteRepository.LFS_ASSETS_HASH}.zip`,
|
||||
`${CloudRunnerRepositorySetup.LFS_ASSETS_HASH}.zip`,
|
||||
);
|
||||
await RemoteClientSystem.Run(`tree ${CloudRunnerState.repoPathFull}`);
|
||||
await CloudRunnerAgentSystem.Run(`tree ${CloudRunnerState.repoPathFull}`);
|
||||
await Caching.printCacheState(lfsCacheFolder, libraryCacheFolder);
|
||||
await SetupRemoteRepository.pullLatestLFS();
|
||||
await RemoteClientSystem.Run(`tree ${CloudRunnerState.repoPathFull}`);
|
||||
await RemoteClientSystem.Run(`tree ${CloudRunnerState.cacheFolderFull}`);
|
||||
await Caching.PushToCache(lfsCacheFolder, CloudRunnerState.lfsDirectory, SetupRemoteRepository.LFS_ASSETS_HASH);
|
||||
await RemoteClientSystem.Run(`tree ${CloudRunnerState.cacheFolderFull}`);
|
||||
await CloudRunnerRepositorySetup.pullLatestLFS();
|
||||
await CloudRunnerAgentSystem.Run(`tree ${CloudRunnerState.repoPathFull}`);
|
||||
await CloudRunnerAgentSystem.Run(`tree ${CloudRunnerState.cacheFolderFull}`);
|
||||
await Caching.PushToCache(
|
||||
lfsCacheFolder,
|
||||
CloudRunnerState.lfsDirectory,
|
||||
CloudRunnerRepositorySetup.LFS_ASSETS_HASH,
|
||||
);
|
||||
await CloudRunnerAgentSystem.Run(`tree ${CloudRunnerState.cacheFolderFull}`);
|
||||
Caching.handleCachePurging();
|
||||
} catch (error) {
|
||||
throw error;
|
||||
|
|
@ -50,15 +54,17 @@ export class SetupRemoteRepository {
|
|||
try {
|
||||
CloudRunnerLogger.logCli(`Initializing source repository for cloning with caching of LFS files`);
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
await RemoteClientSystem.Run(`git config --global advice.detachedHead false`);
|
||||
await CloudRunnerAgentSystem.Run(`git config --global advice.detachedHead false`);
|
||||
CloudRunnerLogger.logCli(`Cloning the repository being built:`);
|
||||
await RemoteClientSystem.Run(`git lfs install --skip-smudge`);
|
||||
await CloudRunnerAgentSystem.Run(`git lfs install --skip-smudge`);
|
||||
CloudRunnerLogger.logCli(CloudRunnerState.targetBuildRepoUrl);
|
||||
await RemoteClientSystem.Run(`git clone ${CloudRunnerState.targetBuildRepoUrl} ${CloudRunnerState.repoPathFull}`);
|
||||
await RemoteClientSystem.Run(`ls -lh`);
|
||||
await RemoteClientSystem.Run(`tree`);
|
||||
await CloudRunnerAgentSystem.Run(
|
||||
`git clone ${CloudRunnerState.targetBuildRepoUrl} ${CloudRunnerState.repoPathFull}`,
|
||||
);
|
||||
await CloudRunnerAgentSystem.Run(`ls -lh`);
|
||||
await CloudRunnerAgentSystem.Run(`tree`);
|
||||
CloudRunnerLogger.logCli(`${CloudRunnerState.buildParams.branch}`);
|
||||
await RemoteClientSystem.Run(`git checkout ${CloudRunnerState.buildParams.branch}`);
|
||||
await CloudRunnerAgentSystem.Run(`git checkout ${CloudRunnerState.buildParams.branch}`);
|
||||
CloudRunnerLogger.logCli(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
|
|
@ -67,7 +73,7 @@ export class SetupRemoteRepository {
|
|||
|
||||
private static async pullLatestLFS() {
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
await RemoteClientSystem.Run(`git lfs pull`);
|
||||
await CloudRunnerAgentSystem.Run(`git lfs pull`);
|
||||
CloudRunnerLogger.logCli(`pulled latest LFS files`);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||
import { SetupRemoteRepository } from './setup-remote-repository';
|
||||
import { CloudRunnerRepositorySetup } from './cloud-runner-repository-setup';
|
||||
|
||||
export class RemoteClient {
|
||||
static async Run(options) {
|
||||
|
|
@ -7,7 +7,7 @@ export class RemoteClient {
|
|||
CloudRunnerState.setup(buildParameter);
|
||||
switch (options.remoteClientState) {
|
||||
default:
|
||||
await SetupRemoteRepository.run();
|
||||
await CloudRunnerRepositorySetup.run();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
import path from 'path';
|
||||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||
import { RemoteClientSystem } from './remote-client-system';
|
||||
import { CloudRunnerAgentSystem } from './remote-client-system';
|
||||
import fs from 'fs';
|
||||
|
||||
export class LFSHashing {
|
||||
public static async printLFSHashState() {
|
||||
await RemoteClientSystem.Run(
|
||||
await CloudRunnerAgentSystem.Run(
|
||||
`echo ' '
|
||||
echo 'Contents of .lfs-assets-guid file:'
|
||||
cat .lfs-assets-guid
|
||||
|
|
@ -21,8 +21,8 @@ export class LFSHashing {
|
|||
|
||||
public static async createLFSHashFiles() {
|
||||
try {
|
||||
await RemoteClientSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
await RemoteClientSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
await CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
await CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
return fs.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8');
|
||||
} catch (error) {
|
||||
throw error;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { exec } from 'child_process';
|
||||
import CloudRunnerLogger from '../../cloud-runner/services/cloud-runner-logger';
|
||||
|
||||
export class RemoteClientSystem {
|
||||
export class CloudRunnerAgentSystem {
|
||||
public static async Run(command: string) {
|
||||
return await new Promise<string>((promise) => {
|
||||
let output = '';
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
export class CloudRunnerStatics {
|
||||
public static readonly logPrefix = `Cloud-Runner-Agent`;
|
||||
public static readonly logPrefix = `Cloud-Runner-System`;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class CloudRunnerLogger {
|
|||
}
|
||||
|
||||
public static logCli(message: string) {
|
||||
CloudRunnerLogger.log(`[CLI] ${message}`);
|
||||
CloudRunnerLogger.log(`[Client] ${message}`);
|
||||
}
|
||||
|
||||
public static logLine(message: string) {
|
||||
|
|
|
|||
Loading…
Reference in New Issue