improving storage api
parent
46610398aa
commit
fed0044084
|
|
@ -658,6 +658,9 @@ class CloudRunnerOptions {
|
|||
static cachePullOverrideCommand() {
|
||||
return CloudRunnerOptions.getInput('cachePullOverrideCommand') || '';
|
||||
}
|
||||
static storageProvider() {
|
||||
return CloudRunnerOptions.getInput('storageProvider') || 'shared-volume'; // TODO also need to accept premade types AWS S3 + GCP Storage and custom yaml definitions
|
||||
}
|
||||
static readInputFromOverrideList() {
|
||||
return CloudRunnerOptions.getInput('readInputFromOverrideList') || '';
|
||||
}
|
||||
|
|
@ -3739,19 +3742,9 @@ class Caching {
|
|||
if (cloud_runner_1.default.buildParameters.cloudRunnerIntegrationTests) {
|
||||
cloud_runner_logger_1.default.log(`Hashed cache folder ${yield lfs_hashing_1.LfsHashing.hashAllFiles(sourceFolder)} ${sourceFolder} ${path_1.default.basename(sourceFolder)}`);
|
||||
}
|
||||
// eslint-disable-next-line func-style
|
||||
const formatFunction = function (format) {
|
||||
const arguments_ = Array.prototype.slice.call([path_1.default.resolve(sourceFolder, '..'), cacheFolder, cacheArtifactName], 1);
|
||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
||||
});
|
||||
};
|
||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`tar -cf ${cacheArtifactName}.tar.lz4 ${path_1.default.basename(sourceFolder)}`);
|
||||
console_1.assert(yield fileExists(`${cacheArtifactName}.tar.lz4`), 'cache archive exists');
|
||||
console_1.assert(yield fileExists(path_1.default.basename(sourceFolder)), 'source folder exists');
|
||||
if (cloud_runner_1.default.buildParameters.cachePushOverrideCommand) {
|
||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(formatFunction(cloud_runner_1.default.buildParameters.cachePushOverrideCommand));
|
||||
}
|
||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mv ${cacheArtifactName}.tar.lz4 ${cacheFolder}`);
|
||||
remote_client_logger_1.RemoteClientLogger.log(`moved cache entry ${cacheArtifactName} to ${cacheFolder}`);
|
||||
console_1.assert(yield fileExists(`${path_1.default.join(cacheFolder, cacheArtifactName)}.tar.lz4`), 'cache archive exists inside cache folder');
|
||||
|
|
@ -3783,16 +3776,6 @@ class Caching {
|
|||
? cacheArtifactName
|
||||
: latestInBranch;
|
||||
yield cloud_runner_logger_1.default.log(`cache key ${cacheArtifactName} selection ${cacheSelection}`);
|
||||
// eslint-disable-next-line func-style
|
||||
const formatFunction = function (format) {
|
||||
const arguments_ = Array.prototype.slice.call([path_1.default.resolve(destinationFolder, '..'), cacheFolder, cacheArtifactName], 1);
|
||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
||||
});
|
||||
};
|
||||
if (cloud_runner_1.default.buildParameters.cachePullOverrideCommand) {
|
||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(formatFunction(cloud_runner_1.default.buildParameters.cachePullOverrideCommand));
|
||||
}
|
||||
if (yield fileExists(`${cacheSelection}.tar.lz4`)) {
|
||||
const resultsFolder = `results${cloud_runner_1.default.buildParameters.buildGuid}`;
|
||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
||||
|
|
@ -4024,6 +4007,7 @@ exports.Hook = exports.CloudRunnerCustomHooks = void 0;
|
|||
const yaml_1 = __importDefault(__nccwpck_require__(44603));
|
||||
const cloud_runner_1 = __importDefault(__nccwpck_require__(79144));
|
||||
class CloudRunnerCustomHooks {
|
||||
// TODO also accept hooks as yaml files in the repo
|
||||
static ApplyHooksToCommands(commands, buildParameters) {
|
||||
const hooks = CloudRunnerCustomHooks.getHooks(buildParameters.customJobHooks).filter((x) => x.step.includes(`all`));
|
||||
return `echo "---"
|
||||
|
|
@ -4258,6 +4242,7 @@ const formatFunction = (value, arguments_) => {
|
|||
return value;
|
||||
};
|
||||
class CloudRunnerQueryOverride {
|
||||
// TODO accept premade secret sources or custom secret source definition yamls
|
||||
static query(key, alternativeKey) {
|
||||
if (CloudRunnerQueryOverride.queryOverrides && CloudRunnerQueryOverride.queryOverrides[key] !== undefined) {
|
||||
return CloudRunnerQueryOverride.queryOverrides[key];
|
||||
|
|
@ -4747,6 +4732,7 @@ class BuildAutomationWorkflow {
|
|||
}
|
||||
static standardBuildAutomation(baseImage) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// TODO accept post and pre build steps as yaml files in the repo
|
||||
try {
|
||||
cloud_runner_logger_1.default.log(`Cloud Runner is running standard build automation`);
|
||||
if (!cloud_runner_1.default.buildParameters.isCliMode)
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -18,6 +18,7 @@ module.exports = {
|
|||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest',
|
||||
},
|
||||
autoRun: false,
|
||||
|
||||
// Indicates whether each individual test should be reported during the run
|
||||
verbose: true,
|
||||
|
|
|
|||
|
|
@ -85,6 +85,10 @@ class CloudRunnerOptions {
|
|||
return CloudRunnerOptions.getInput('cachePullOverrideCommand') || '';
|
||||
}
|
||||
|
||||
static storageProvider() {
|
||||
return CloudRunnerOptions.getInput('storageProvider') || 'shared-volume'; // TODO also need to accept premade types AWS S3 + GCP Storage and custom yaml definitions
|
||||
}
|
||||
|
||||
static readInputFromOverrideList() {
|
||||
return CloudRunnerOptions.getInput('readInputFromOverrideList') || '';
|
||||
}
|
||||
|
|
|
|||
|
|
@ -59,23 +59,9 @@ export class Caching {
|
|||
)}`,
|
||||
);
|
||||
}
|
||||
// eslint-disable-next-line func-style
|
||||
const formatFunction = function (format: string) {
|
||||
const arguments_ = Array.prototype.slice.call(
|
||||
[path.resolve(sourceFolder, '..'), cacheFolder, cacheArtifactName],
|
||||
1,
|
||||
);
|
||||
|
||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
||||
});
|
||||
};
|
||||
await CloudRunnerSystem.Run(`tar -cf ${cacheArtifactName}.tar.lz4 ${path.basename(sourceFolder)}`);
|
||||
assert(await fileExists(`${cacheArtifactName}.tar.lz4`), 'cache archive exists');
|
||||
assert(await fileExists(path.basename(sourceFolder)), 'source folder exists');
|
||||
if (CloudRunner.buildParameters.cachePushOverrideCommand) {
|
||||
await CloudRunnerSystem.Run(formatFunction(CloudRunner.buildParameters.cachePushOverrideCommand));
|
||||
}
|
||||
await CloudRunnerSystem.Run(`mv ${cacheArtifactName}.tar.lz4 ${cacheFolder}`);
|
||||
RemoteClientLogger.log(`moved cache entry ${cacheArtifactName} to ${cacheFolder}`);
|
||||
assert(
|
||||
|
|
@ -113,22 +99,6 @@ export class Caching {
|
|||
: latestInBranch;
|
||||
await CloudRunnerLogger.log(`cache key ${cacheArtifactName} selection ${cacheSelection}`);
|
||||
|
||||
// eslint-disable-next-line func-style
|
||||
const formatFunction = function (format: string) {
|
||||
const arguments_ = Array.prototype.slice.call(
|
||||
[path.resolve(destinationFolder, '..'), cacheFolder, cacheArtifactName],
|
||||
1,
|
||||
);
|
||||
|
||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
||||
});
|
||||
};
|
||||
|
||||
if (CloudRunner.buildParameters.cachePullOverrideCommand) {
|
||||
await CloudRunnerSystem.Run(formatFunction(CloudRunner.buildParameters.cachePullOverrideCommand));
|
||||
}
|
||||
|
||||
if (await fileExists(`${cacheSelection}.tar.lz4`)) {
|
||||
const resultsFolder = `results${CloudRunner.buildParameters.buildGuid}`;
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import CloudRunnerSecret from './cloud-runner-secret';
|
|||
import CloudRunner from '../cloud-runner';
|
||||
|
||||
export class CloudRunnerCustomHooks {
|
||||
// TODO also accept hooks as yaml files in the repo
|
||||
public static ApplyHooksToCommands(commands: string, buildParameters: BuildParameters): string {
|
||||
const hooks = CloudRunnerCustomHooks.getHooks(buildParameters.customJobHooks).filter((x) => x.step.includes(`all`));
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
name: gcp-secret-manager
|
||||
read-secret: gcloud secrets versions access 1 --secret=\"{0}\"
|
||||
|
|
@ -13,6 +13,8 @@ const formatFunction = (value, arguments_) => {
|
|||
class CloudRunnerQueryOverride {
|
||||
static queryOverrides: any;
|
||||
|
||||
// TODO accept premade secret sources or custom secret source definition yamls
|
||||
|
||||
public static query(key, alternativeKey) {
|
||||
if (CloudRunnerQueryOverride.queryOverrides && CloudRunnerQueryOverride.queryOverrides[key] !== undefined) {
|
||||
return CloudRunnerQueryOverride.queryOverrides[key];
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
|||
}
|
||||
|
||||
private static async standardBuildAutomation(baseImage: any) {
|
||||
// TODO accept post and pre build steps as yaml files in the repo
|
||||
try {
|
||||
CloudRunnerLogger.log(`Cloud Runner is running standard build automation`);
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue