fixing library warning in setup step
parent
dccc5f5627
commit
1db134b416
|
|
@ -438,7 +438,7 @@ const core = __importStar(__webpack_require__(42186));
|
||||||
const action_yaml_1 = __webpack_require__(11091);
|
const action_yaml_1 = __webpack_require__(11091);
|
||||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||||
const cli_decorator_1 = __webpack_require__(8731);
|
const cli_decorator_1 = __webpack_require__(8731);
|
||||||
const remote_client_logger_1 = __webpack_require__(68972);
|
const remote_client_logger_1 = __webpack_require__(28082);
|
||||||
const cloud_runner_state_1 = __webpack_require__(70912);
|
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||||
const setup_cloud_runner_repository_1 = __webpack_require__(39656);
|
const setup_cloud_runner_repository_1 = __webpack_require__(39656);
|
||||||
class CLI {
|
class CLI {
|
||||||
|
|
@ -507,7 +507,7 @@ exports.CLI = CLI;
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 35010:
|
/***/ 38759:
|
||||||
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
@ -532,9 +532,9 @@ const path_1 = __importDefault(__webpack_require__(85622));
|
||||||
const __1 = __webpack_require__(41359);
|
const __1 = __webpack_require__(41359);
|
||||||
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
||||||
const cloud_runner_state_1 = __webpack_require__(70912);
|
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||||
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
const cloud_runner_system_1 = __webpack_require__(66879);
|
||||||
const lfs_hashing_1 = __webpack_require__(47011);
|
const lfs_hashing_1 = __webpack_require__(31938);
|
||||||
const remote_client_logger_1 = __webpack_require__(68972);
|
const remote_client_logger_1 = __webpack_require__(28082);
|
||||||
class Caching {
|
class Caching {
|
||||||
static PushToCache(cacheFolder, sourceFolder, cacheKey) {
|
static PushToCache(cacheFolder, sourceFolder, cacheKey) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
|
@ -546,12 +546,12 @@ class Caching {
|
||||||
if (__1.Input.cloudRunnerTests) {
|
if (__1.Input.cloudRunnerTests) {
|
||||||
cloud_runner_logger_1.default.log(`Hashed cache folder ${yield lfs_hashing_1.LFSHashing.hashAllFiles(sourceFolder)}`);
|
cloud_runner_logger_1.default.log(`Hashed cache folder ${yield lfs_hashing_1.LFSHashing.hashAllFiles(sourceFolder)}`);
|
||||||
}
|
}
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`zip ${__1.Input.cloudRunnerTests ? '' : '-q'} -r ${cacheKey} ${path_1.default.basename(sourceFolder)}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`zip ${__1.Input.cloudRunnerTests ? '' : '-q'} -r ${cacheKey} ${path_1.default.basename(sourceFolder)}`);
|
||||||
console_1.assert(fs_1.default.existsSync(`${cacheKey}.zip`));
|
console_1.assert(fs_1.default.existsSync(`${cacheKey}.zip`));
|
||||||
console_1.assert(fs_1.default.existsSync(`${cacheFolder}`));
|
console_1.assert(fs_1.default.existsSync(`${cacheFolder}`));
|
||||||
console_1.assert(fs_1.default.existsSync(`${sourceFolder}`));
|
console_1.assert(fs_1.default.existsSync(`${sourceFolder}`));
|
||||||
console_1.assert(fs_1.default.existsSync(`${path_1.default.basename(sourceFolder)}`));
|
console_1.assert(fs_1.default.existsSync(`${path_1.default.basename(sourceFolder)}`));
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mv ${cacheKey}.zip ${cacheFolder}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mv ${cacheKey}.zip ${cacheFolder}`);
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`moved ${cacheKey}.zip to ${cacheFolder}`);
|
remote_client_logger_1.RemoteClientLogger.log(`moved ${cacheKey}.zip to ${cacheFolder}`);
|
||||||
console_1.assert(fs_1.default.existsSync(`${path_1.default.join(cacheFolder, cacheKey)}.zip`));
|
console_1.assert(fs_1.default.existsSync(`${path_1.default.join(cacheFolder, cacheKey)}.zip`));
|
||||||
if (__1.Input.cloudRunnerTests) {
|
if (__1.Input.cloudRunnerTests) {
|
||||||
|
|
@ -568,12 +568,12 @@ class Caching {
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`Caching for ${path_1.default.basename(destinationFolder)}`);
|
remote_client_logger_1.RemoteClientLogger.log(`Caching for ${path_1.default.basename(destinationFolder)}`);
|
||||||
try {
|
try {
|
||||||
if (!fs_1.default.existsSync(cacheFolder)) {
|
if (!fs_1.default.existsSync(cacheFolder)) {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cacheFolder}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||||
}
|
}
|
||||||
if (!fs_1.default.existsSync(destinationFolder)) {
|
if (!fs_1.default.existsSync(destinationFolder)) {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${destinationFolder}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mkdir -p ${destinationFolder}`);
|
||||||
}
|
}
|
||||||
const latestInBranch = yield (yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`))
|
const latestInBranch = yield (yield cloud_runner_system_1.CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`))
|
||||||
.replace(/\n/g, ``)
|
.replace(/\n/g, ``)
|
||||||
.replace('.zip', '');
|
.replace('.zip', '');
|
||||||
process.chdir(cacheFolder);
|
process.chdir(cacheFolder);
|
||||||
|
|
@ -581,12 +581,12 @@ class Caching {
|
||||||
yield cloud_runner_logger_1.default.log(`cache key ${cacheKey} selection ${cacheSelection}`);
|
yield cloud_runner_logger_1.default.log(`cache key ${cacheKey} selection ${cacheSelection}`);
|
||||||
if (fs_1.default.existsSync(`${cacheSelection}.zip`)) {
|
if (fs_1.default.existsSync(`${cacheSelection}.zip`)) {
|
||||||
if (__1.Input.cloudRunnerTests) {
|
if (__1.Input.cloudRunnerTests) {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree ${destinationFolder}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`tree ${destinationFolder}`);
|
||||||
}
|
}
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`cache item exists`);
|
remote_client_logger_1.RemoteClientLogger.log(`cache item exists`);
|
||||||
console_1.assert(fs_1.default.existsSync(destinationFolder));
|
console_1.assert(fs_1.default.existsSync(destinationFolder));
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`unzip -q ${cacheSelection} -d ${path_1.default.basename(destinationFolder)}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`unzip -q ${cacheSelection} -d ${path_1.default.basename(destinationFolder)}`);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mv ${path_1.default.basename(destinationFolder)}/* ${destinationFolder}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mv ${path_1.default.basename(destinationFolder)}/* ${destinationFolder}`);
|
||||||
console_1.assert(fs_1.default.existsSync(`${path_1.default.join(destinationFolder, `${cacheSelection}.zip`)}`));
|
console_1.assert(fs_1.default.existsSync(`${path_1.default.join(destinationFolder, `${cacheSelection}.zip`)}`));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
|
@ -609,7 +609,7 @@ class Caching {
|
||||||
}
|
}
|
||||||
static printFullCacheHierarchySize() {
|
static printFullCacheHierarchySize() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`echo ' '
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`echo ' '
|
||||||
echo "LFS cache for $branch"
|
echo "LFS cache for $branch"
|
||||||
du -sch "${cloud_runner_state_1.CloudRunnerState.lfsCacheFolderFull}/"
|
du -sch "${cloud_runner_state_1.CloudRunnerState.lfsCacheFolderFull}/"
|
||||||
echo '**'
|
echo '**'
|
||||||
|
|
@ -630,7 +630,7 @@ exports.Caching = Caching;
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 87685:
|
/***/ 66879:
|
||||||
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
@ -645,10 +645,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.CloudRunnerAgentSystem = void 0;
|
exports.CloudRunnerSystem = void 0;
|
||||||
const child_process_1 = __webpack_require__(63129);
|
const child_process_1 = __webpack_require__(63129);
|
||||||
const remote_client_logger_1 = __webpack_require__(68972);
|
const remote_client_logger_1 = __webpack_require__(28082);
|
||||||
class CloudRunnerAgentSystem {
|
class CloudRunnerSystem {
|
||||||
static Run(command) {
|
static Run(command) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
for (const element of command.split(`\n`)) {
|
for (const element of command.split(`\n`)) {
|
||||||
|
|
@ -683,12 +683,12 @@ class CloudRunnerAgentSystem {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.CloudRunnerAgentSystem = CloudRunnerAgentSystem;
|
exports.CloudRunnerSystem = CloudRunnerSystem;
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 47011:
|
/***/ 31938:
|
||||||
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
@ -709,17 +709,17 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.LFSHashing = void 0;
|
exports.LFSHashing = void 0;
|
||||||
const path_1 = __importDefault(__webpack_require__(85622));
|
const path_1 = __importDefault(__webpack_require__(85622));
|
||||||
const cloud_runner_state_1 = __webpack_require__(70912);
|
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||||
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
const cloud_runner_system_1 = __webpack_require__(66879);
|
||||||
const fs_1 = __importDefault(__webpack_require__(35747));
|
const fs_1 = __importDefault(__webpack_require__(35747));
|
||||||
const console_1 = __webpack_require__(57082);
|
const console_1 = __webpack_require__(57082);
|
||||||
const __1 = __webpack_require__(41359);
|
const __1 = __webpack_require__(41359);
|
||||||
const remote_client_logger_1 = __webpack_require__(68972);
|
const remote_client_logger_1 = __webpack_require__(28082);
|
||||||
class LFSHashing {
|
class LFSHashing {
|
||||||
static createLFSHashFiles() {
|
static createLFSHashFiles() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||||
console_1.assert(fs_1.default.existsSync(`.lfs-assets-guid-sum`));
|
console_1.assert(fs_1.default.existsSync(`.lfs-assets-guid-sum`));
|
||||||
console_1.assert(fs_1.default.existsSync(`.lfs-assets-guid`));
|
console_1.assert(fs_1.default.existsSync(`.lfs-assets-guid`));
|
||||||
const lfsHashes = {
|
const lfsHashes = {
|
||||||
|
|
@ -744,7 +744,7 @@ class LFSHashing {
|
||||||
static hashAllFiles(folder) {
|
static hashAllFiles(folder) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
process.chdir(`${folder}`);
|
process.chdir(`${folder}`);
|
||||||
return yield (yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`find -type f -exec md5sum "{}" + | sort | md5sum`))
|
return yield (yield cloud_runner_system_1.CloudRunnerSystem.Run(`find -type f -exec md5sum "{}" + | sort | md5sum`))
|
||||||
.replace(/\n/g, '')
|
.replace(/\n/g, '')
|
||||||
.split(` `)[0];
|
.split(` `)[0];
|
||||||
});
|
});
|
||||||
|
|
@ -755,7 +755,7 @@ exports.LFSHashing = LFSHashing;
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 68972:
|
/***/ 28082:
|
||||||
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
@ -806,17 +806,18 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.SetupCloudRunnerRepository = void 0;
|
exports.SetupCloudRunnerRepository = void 0;
|
||||||
const fs_1 = __importDefault(__webpack_require__(35747));
|
const fs_1 = __importDefault(__webpack_require__(35747));
|
||||||
const cloud_runner_state_1 = __webpack_require__(70912);
|
const cloud_runner_state_1 = __webpack_require__(70912);
|
||||||
const caching_1 = __webpack_require__(35010);
|
const caching_1 = __webpack_require__(38759);
|
||||||
const lfs_hashing_1 = __webpack_require__(47011);
|
const lfs_hashing_1 = __webpack_require__(31938);
|
||||||
const cloud_runner_agent_system_1 = __webpack_require__(87685);
|
const cloud_runner_system_1 = __webpack_require__(66879);
|
||||||
const __1 = __webpack_require__(41359);
|
const __1 = __webpack_require__(41359);
|
||||||
const remote_client_logger_1 = __webpack_require__(68972);
|
const remote_client_logger_1 = __webpack_require__(28082);
|
||||||
|
const path_1 = __importDefault(__webpack_require__(85622));
|
||||||
class SetupCloudRunnerRepository {
|
class SetupCloudRunnerRepository {
|
||||||
static run() {
|
static run() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cloud_runner_state_1.CloudRunnerState.buildPathFull}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mkdir -p ${cloud_runner_state_1.CloudRunnerState.buildPathFull}`);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`mkdir -p ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mkdir -p ${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||||
yield SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
yield SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
||||||
const lfsHashes = yield lfs_hashing_1.LFSHashing.createLFSHashFiles();
|
const lfsHashes = yield lfs_hashing_1.LFSHashing.createLFSHashFiles();
|
||||||
if (fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull)) {
|
if (fs_1.default.existsSync(cloud_runner_state_1.CloudRunnerState.libraryFolderFull)) {
|
||||||
|
|
@ -837,17 +838,18 @@ class SetupCloudRunnerRepository {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
remote_client_logger_1.RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
||||||
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
process.chdir(`${path_1.default.join(cloud_runner_state_1.CloudRunnerState.repoPathFull, '..')}`);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`git config --global advice.detachedHead false`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`Cloning the repository being built:`);
|
remote_client_logger_1.RemoteClientLogger.log(`Cloning the repository being built:`);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`git lfs install --skip-smudge`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git lfs install --skip-smudge`);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`git clone ${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git clone ${cloud_runner_state_1.CloudRunnerState.targetBuildRepoUrl} ${path_1.default.basename(cloud_runner_state_1.CloudRunnerState.repoPathFull)}`);
|
||||||
|
process.chdir(`${cloud_runner_state_1.CloudRunnerState.repoPathFull}`);
|
||||||
if (__1.Input.cloudRunnerTests) {
|
if (__1.Input.cloudRunnerTests) {
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`ls -lh`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`ls -lh`);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`tree`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`tree`);
|
||||||
}
|
}
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
remote_client_logger_1.RemoteClientLogger.log(`${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`git checkout ${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git checkout ${cloud_runner_state_1.CloudRunnerState.buildParams.branch}`);
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`Checked out ${process.env.GITHUB_SHA}`);
|
remote_client_logger_1.RemoteClientLogger.log(`Checked out ${process.env.GITHUB_SHA}`);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
|
|
@ -858,7 +860,7 @@ class SetupCloudRunnerRepository {
|
||||||
static pullLatestLFS() {
|
static pullLatestLFS() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
process.chdir(cloud_runner_state_1.CloudRunnerState.repoPathFull);
|
||||||
yield cloud_runner_agent_system_1.CloudRunnerAgentSystem.Run(`git lfs pull`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git lfs pull`);
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`pulled latest LFS files`);
|
remote_client_logger_1.RemoteClientLogger.log(`pulled latest LFS files`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -2725,6 +2727,7 @@ class TaskParameterSerializer {
|
||||||
array = array.map((x) => {
|
array = array.map((x) => {
|
||||||
x.name = __1.Input.ToEnvVarFormat(x.name);
|
x.name = __1.Input.ToEnvVarFormat(x.name);
|
||||||
x.value = `${x.value}`;
|
x.value = `${x.value}`;
|
||||||
|
core.getOutput(x);
|
||||||
core.setOutput(x.name, x.value);
|
core.setOutput(x.name, x.value);
|
||||||
return x;
|
return x;
|
||||||
});
|
});
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -4,7 +4,7 @@ import * as core from '@actions/core';
|
||||||
import { ActionYamlReader } from '../input-readers/action-yaml';
|
import { ActionYamlReader } from '../input-readers/action-yaml';
|
||||||
import CloudRunnerLogger from '../cloud-runner/services/cloud-runner-logger';
|
import CloudRunnerLogger from '../cloud-runner/services/cloud-runner-logger';
|
||||||
import { CliFunction, GetAllCliModes, GetCliFunctions } from './cli-decorator';
|
import { CliFunction, GetAllCliModes, GetCliFunctions } from './cli-decorator';
|
||||||
import { RemoteClientLogger } from './remote-client/remote-client-logger';
|
import { RemoteClientLogger } from './remote-client/remote-client-services/remote-client-logger';
|
||||||
import { CloudRunnerState } from '../cloud-runner/state/cloud-runner-state';
|
import { CloudRunnerState } from '../cloud-runner/state/cloud-runner-state';
|
||||||
import { SetupCloudRunnerRepository } from './remote-client/setup-cloud-runner-repository';
|
import { SetupCloudRunnerRepository } from './remote-client/setup-cloud-runner-repository';
|
||||||
export class CLI {
|
export class CLI {
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
import { assert } from 'console';
|
import { assert } from 'console';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { Input } from '../..';
|
import { Input } from '../../..';
|
||||||
import CloudRunnerLogger from '../../cloud-runner/services/cloud-runner-logger';
|
import CloudRunnerLogger from '../../../cloud-runner/services/cloud-runner-logger';
|
||||||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
import { CloudRunnerState } from '../../../cloud-runner/state/cloud-runner-state';
|
||||||
import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
import { CloudRunnerSystem } from './cloud-runner-system';
|
||||||
import { LFSHashing } from './lfs-hashing';
|
import { LFSHashing } from './lfs-hashing';
|
||||||
import { RemoteClientLogger } from './remote-client-logger';
|
import { RemoteClientLogger } from './remote-client-logger';
|
||||||
|
|
||||||
|
|
@ -20,14 +20,14 @@ export class Caching {
|
||||||
CloudRunnerLogger.log(`Hashed cache folder ${await LFSHashing.hashAllFiles(sourceFolder)}`);
|
CloudRunnerLogger.log(`Hashed cache folder ${await LFSHashing.hashAllFiles(sourceFolder)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
await CloudRunnerAgentSystem.Run(
|
await CloudRunnerSystem.Run(
|
||||||
`zip ${Input.cloudRunnerTests ? '' : '-q'} -r ${cacheKey} ${path.basename(sourceFolder)}`,
|
`zip ${Input.cloudRunnerTests ? '' : '-q'} -r ${cacheKey} ${path.basename(sourceFolder)}`,
|
||||||
);
|
);
|
||||||
assert(fs.existsSync(`${cacheKey}.zip`));
|
assert(fs.existsSync(`${cacheKey}.zip`));
|
||||||
assert(fs.existsSync(`${cacheFolder}`));
|
assert(fs.existsSync(`${cacheFolder}`));
|
||||||
assert(fs.existsSync(`${sourceFolder}`));
|
assert(fs.existsSync(`${sourceFolder}`));
|
||||||
assert(fs.existsSync(`${path.basename(sourceFolder)}`));
|
assert(fs.existsSync(`${path.basename(sourceFolder)}`));
|
||||||
await CloudRunnerAgentSystem.Run(`mv ${cacheKey}.zip ${cacheFolder}`);
|
await CloudRunnerSystem.Run(`mv ${cacheKey}.zip ${cacheFolder}`);
|
||||||
RemoteClientLogger.log(`moved ${cacheKey}.zip to ${cacheFolder}`);
|
RemoteClientLogger.log(`moved ${cacheKey}.zip to ${cacheFolder}`);
|
||||||
assert(fs.existsSync(`${path.join(cacheFolder, cacheKey)}.zip`));
|
assert(fs.existsSync(`${path.join(cacheFolder, cacheKey)}.zip`));
|
||||||
|
|
||||||
|
|
@ -42,14 +42,14 @@ export class Caching {
|
||||||
RemoteClientLogger.log(`Caching for ${path.basename(destinationFolder)}`);
|
RemoteClientLogger.log(`Caching for ${path.basename(destinationFolder)}`);
|
||||||
try {
|
try {
|
||||||
if (!fs.existsSync(cacheFolder)) {
|
if (!fs.existsSync(cacheFolder)) {
|
||||||
await CloudRunnerAgentSystem.Run(`mkdir -p ${cacheFolder}`);
|
await CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!fs.existsSync(destinationFolder)) {
|
if (!fs.existsSync(destinationFolder)) {
|
||||||
await CloudRunnerAgentSystem.Run(`mkdir -p ${destinationFolder}`);
|
await CloudRunnerSystem.Run(`mkdir -p ${destinationFolder}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const latestInBranch = await (await CloudRunnerAgentSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`))
|
const latestInBranch = await (await CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`))
|
||||||
.replace(/\n/g, ``)
|
.replace(/\n/g, ``)
|
||||||
.replace('.zip', '');
|
.replace('.zip', '');
|
||||||
|
|
||||||
|
|
@ -60,12 +60,12 @@ export class Caching {
|
||||||
|
|
||||||
if (fs.existsSync(`${cacheSelection}.zip`)) {
|
if (fs.existsSync(`${cacheSelection}.zip`)) {
|
||||||
if (Input.cloudRunnerTests) {
|
if (Input.cloudRunnerTests) {
|
||||||
await CloudRunnerAgentSystem.Run(`tree ${destinationFolder}`);
|
await CloudRunnerSystem.Run(`tree ${destinationFolder}`);
|
||||||
}
|
}
|
||||||
RemoteClientLogger.log(`cache item exists`);
|
RemoteClientLogger.log(`cache item exists`);
|
||||||
assert(fs.existsSync(destinationFolder));
|
assert(fs.existsSync(destinationFolder));
|
||||||
await CloudRunnerAgentSystem.Run(`unzip -q ${cacheSelection} -d ${path.basename(destinationFolder)}`);
|
await CloudRunnerSystem.Run(`unzip -q ${cacheSelection} -d ${path.basename(destinationFolder)}`);
|
||||||
await CloudRunnerAgentSystem.Run(`mv ${path.basename(destinationFolder)}/* ${destinationFolder}`);
|
await CloudRunnerSystem.Run(`mv ${path.basename(destinationFolder)}/* ${destinationFolder}`);
|
||||||
assert(fs.existsSync(`${path.join(destinationFolder, `${cacheSelection}.zip`)}`));
|
assert(fs.existsSync(`${path.join(destinationFolder, `${cacheSelection}.zip`)}`));
|
||||||
} else {
|
} else {
|
||||||
RemoteClientLogger.logWarning(`cache item ${cacheKey} doesn't exist ${destinationFolder}`);
|
RemoteClientLogger.logWarning(`cache item ${cacheKey} doesn't exist ${destinationFolder}`);
|
||||||
|
|
@ -86,7 +86,7 @@ export class Caching {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async printFullCacheHierarchySize() {
|
public static async printFullCacheHierarchySize() {
|
||||||
await CloudRunnerAgentSystem.Run(
|
await CloudRunnerSystem.Run(
|
||||||
`echo ' '
|
`echo ' '
|
||||||
echo "LFS cache for $branch"
|
echo "LFS cache for $branch"
|
||||||
du -sch "${CloudRunnerState.lfsCacheFolderFull}/"
|
du -sch "${CloudRunnerState.lfsCacheFolderFull}/"
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import { exec } from 'child_process';
|
import { exec } from 'child_process';
|
||||||
import { RemoteClientLogger } from './remote-client-logger';
|
import { RemoteClientLogger } from './remote-client-logger';
|
||||||
|
|
||||||
export class CloudRunnerAgentSystem {
|
export class CloudRunnerSystem {
|
||||||
public static async Run(command: string) {
|
public static async Run(command: string) {
|
||||||
for (const element of command.split(`\n`)) {
|
for (const element of command.split(`\n`)) {
|
||||||
RemoteClientLogger.log(element);
|
RemoteClientLogger.log(element);
|
||||||
|
|
@ -1,16 +1,16 @@
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
import { CloudRunnerState } from '../../../cloud-runner/state/cloud-runner-state';
|
||||||
import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
import { CloudRunnerSystem } from './cloud-runner-system';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import { assert } from 'console';
|
import { assert } from 'console';
|
||||||
import { Input } from '../..';
|
import { Input } from '../../..';
|
||||||
import { RemoteClientLogger } from './remote-client-logger';
|
import { RemoteClientLogger } from './remote-client-logger';
|
||||||
|
|
||||||
export class LFSHashing {
|
export class LFSHashing {
|
||||||
public static async createLFSHashFiles() {
|
public static async createLFSHashFiles() {
|
||||||
try {
|
try {
|
||||||
await CloudRunnerAgentSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
await CloudRunnerSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||||
await CloudRunnerAgentSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
await CloudRunnerSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||||
assert(fs.existsSync(`.lfs-assets-guid-sum`));
|
assert(fs.existsSync(`.lfs-assets-guid-sum`));
|
||||||
assert(fs.existsSync(`.lfs-assets-guid`));
|
assert(fs.existsSync(`.lfs-assets-guid`));
|
||||||
const lfsHashes = {
|
const lfsHashes = {
|
||||||
|
|
@ -32,7 +32,7 @@ export class LFSHashing {
|
||||||
}
|
}
|
||||||
public static async hashAllFiles(folder: string) {
|
public static async hashAllFiles(folder: string) {
|
||||||
process.chdir(`${folder}`);
|
process.chdir(`${folder}`);
|
||||||
return await (await CloudRunnerAgentSystem.Run(`find -type f -exec md5sum "{}" + | sort | md5sum`))
|
return await (await CloudRunnerSystem.Run(`find -type f -exec md5sum "{}" + | sort | md5sum`))
|
||||||
.replace(/\n/g, '')
|
.replace(/\n/g, '')
|
||||||
.split(` `)[0];
|
.split(` `)[0];
|
||||||
}
|
}
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
import CloudRunnerLogger from '../../cloud-runner/services/cloud-runner-logger';
|
import CloudRunnerLogger from '../../../cloud-runner/services/cloud-runner-logger';
|
||||||
|
|
||||||
export class RemoteClientLogger {
|
export class RemoteClientLogger {
|
||||||
public static log(message: string) {
|
public static log(message: string) {
|
||||||
|
|
@ -1,16 +1,17 @@
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||||
import { Caching } from './caching';
|
import { Caching } from './remote-client-services/caching';
|
||||||
import { LFSHashing } from './lfs-hashing';
|
import { LFSHashing } from './remote-client-services/lfs-hashing';
|
||||||
import { CloudRunnerAgentSystem } from './cloud-runner-agent-system';
|
import { CloudRunnerSystem } from './remote-client-services/cloud-runner-system';
|
||||||
import { Input } from '../..';
|
import { Input } from '../..';
|
||||||
import { RemoteClientLogger } from './remote-client-logger';
|
import { RemoteClientLogger } from './remote-client-services/remote-client-logger';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
export class SetupCloudRunnerRepository {
|
export class SetupCloudRunnerRepository {
|
||||||
public static async run() {
|
public static async run() {
|
||||||
try {
|
try {
|
||||||
await CloudRunnerAgentSystem.Run(`mkdir -p ${CloudRunnerState.buildPathFull}`);
|
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerState.buildPathFull}`);
|
||||||
await CloudRunnerAgentSystem.Run(`mkdir -p ${CloudRunnerState.repoPathFull}`);
|
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerState.repoPathFull}`);
|
||||||
await SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
await SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
||||||
const lfsHashes = await LFSHashing.createLFSHashFiles();
|
const lfsHashes = await LFSHashing.createLFSHashFiles();
|
||||||
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
||||||
|
|
@ -38,17 +39,20 @@ export class SetupCloudRunnerRepository {
|
||||||
private static async cloneRepoWithoutLFSFiles() {
|
private static async cloneRepoWithoutLFSFiles() {
|
||||||
try {
|
try {
|
||||||
RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
||||||
process.chdir(CloudRunnerState.repoPathFull);
|
process.chdir(`${path.join(CloudRunnerState.repoPathFull, '..')}`);
|
||||||
await CloudRunnerAgentSystem.Run(`git config --global advice.detachedHead false`);
|
await CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
|
||||||
RemoteClientLogger.log(`Cloning the repository being built:`);
|
RemoteClientLogger.log(`Cloning the repository being built:`);
|
||||||
await CloudRunnerAgentSystem.Run(`git lfs install --skip-smudge`);
|
await CloudRunnerSystem.Run(`git lfs install --skip-smudge`);
|
||||||
await CloudRunnerAgentSystem.Run(`git clone ${CloudRunnerState.targetBuildRepoUrl}`);
|
await CloudRunnerSystem.Run(
|
||||||
|
`git clone ${CloudRunnerState.targetBuildRepoUrl} ${path.basename(CloudRunnerState.repoPathFull)}`,
|
||||||
|
);
|
||||||
|
process.chdir(`${CloudRunnerState.repoPathFull}`);
|
||||||
if (Input.cloudRunnerTests) {
|
if (Input.cloudRunnerTests) {
|
||||||
await CloudRunnerAgentSystem.Run(`ls -lh`);
|
await CloudRunnerSystem.Run(`ls -lh`);
|
||||||
await CloudRunnerAgentSystem.Run(`tree`);
|
await CloudRunnerSystem.Run(`tree`);
|
||||||
}
|
}
|
||||||
RemoteClientLogger.log(`${CloudRunnerState.buildParams.branch}`);
|
RemoteClientLogger.log(`${CloudRunnerState.buildParams.branch}`);
|
||||||
await CloudRunnerAgentSystem.Run(`git checkout ${CloudRunnerState.buildParams.branch}`);
|
await CloudRunnerSystem.Run(`git checkout ${CloudRunnerState.buildParams.branch}`);
|
||||||
RemoteClientLogger.log(`Checked out ${process.env.GITHUB_SHA}`);
|
RemoteClientLogger.log(`Checked out ${process.env.GITHUB_SHA}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
|
|
@ -57,7 +61,7 @@ export class SetupCloudRunnerRepository {
|
||||||
|
|
||||||
private static async pullLatestLFS() {
|
private static async pullLatestLFS() {
|
||||||
process.chdir(CloudRunnerState.repoPathFull);
|
process.chdir(CloudRunnerState.repoPathFull);
|
||||||
await CloudRunnerAgentSystem.Run(`git lfs pull`);
|
await CloudRunnerSystem.Run(`git lfs pull`);
|
||||||
RemoteClientLogger.log(`pulled latest LFS files`);
|
RemoteClientLogger.log(`pulled latest LFS files`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -41,6 +41,7 @@ export class TaskParameterSerializer {
|
||||||
array = array.map((x) => {
|
array = array.map((x) => {
|
||||||
x.name = Input.ToEnvVarFormat(x.name);
|
x.name = Input.ToEnvVarFormat(x.name);
|
||||||
x.value = `${x.value}`;
|
x.value = `${x.value}`;
|
||||||
|
core.getOutput(x);
|
||||||
core.setOutput(x.name, x.value);
|
core.setOutput(x.name, x.value);
|
||||||
return x;
|
return x;
|
||||||
});
|
});
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue