pipeline improvements
parent
8706e5cb98
commit
079dd160e2
|
@ -2475,7 +2475,7 @@ Parameters:
|
|||
Type: Number
|
||||
Description: How much CPU to give the container. 1024 is 1 CPU
|
||||
ContainerMemory:
|
||||
Default: 2048
|
||||
Default: 4096
|
||||
Type: Number
|
||||
Description: How much memory in megabytes to give the container
|
||||
BUILDGUID:
|
||||
|
@ -4339,6 +4339,7 @@ class Caching {
|
|||
}
|
||||
static PushToCache(cacheFolder, sourceFolder, cacheArtifactName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
cloud_runner_logger_1.default.log(`Pushing to cache ${sourceFolder}`);
|
||||
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
||||
const startPath = process.cwd();
|
||||
let compressionSuffix = '';
|
||||
|
@ -4382,8 +4383,8 @@ class Caching {
|
|||
}
|
||||
static PullFromCache(cacheFolder, destinationFolder, cacheArtifactName = ``) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
cloud_runner_logger_1.default.log(`Pulling from cache ${destinationFolder}`);
|
||||
if (cloud_runner_1.default.buildParameters.cloudRunnerDebugSkipCache) {
|
||||
yield new Promise((resolve) => resolve);
|
||||
return;
|
||||
}
|
||||
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
||||
|
@ -4400,6 +4401,8 @@ class Caching {
|
|||
if (!(yield fileExists(destinationFolder))) {
|
||||
yield fs_1.default.promises.mkdir(destinationFolder);
|
||||
}
|
||||
cloud_runner_logger_1.default.log(yield cloud_runner_system_1.CloudRunnerSystem.Run(`ls -t "${cacheFolder}"`));
|
||||
cloud_runner_logger_1.default.log(yield cloud_runner_system_1.CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .tar${compressionSuffix}$`));
|
||||
const latestInBranch = yield (yield cloud_runner_system_1.CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .tar${compressionSuffix}$ | head -1`))
|
||||
.replace(/\n/g, ``)
|
||||
.replace(`.tar${compressionSuffix}`, '');
|
||||
|
@ -4522,6 +4525,7 @@ class RemoteClient {
|
|||
yield caching_1.Caching.handleCachePurging();
|
||||
}
|
||||
catch (error) {
|
||||
remote_client_logger_1.RemoteClientLogger.logWarning(`!Warning!: Failed setting up repo`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
@ -5581,31 +5585,25 @@ const path_1 = __importDefault(__nccwpck_require__(71017));
|
|||
const cloud_runner_folders_1 = __nccwpck_require__(13527);
|
||||
const cloud_runner_system_1 = __nccwpck_require__(99393);
|
||||
const fs_1 = __importDefault(__nccwpck_require__(57147));
|
||||
const console_1 = __nccwpck_require__(96206);
|
||||
const cli_1 = __nccwpck_require__(55651);
|
||||
const cli_functions_repository_1 = __nccwpck_require__(85301);
|
||||
const cloud_runner_logger_1 = __importDefault(__nccwpck_require__(22855));
|
||||
class LfsHashing {
|
||||
static createLFSHashFiles() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
console_1.assert(fs_1.default.existsSync(`.lfs-assets-guid-sum`));
|
||||
console_1.assert(fs_1.default.existsSync(`.lfs-assets-guid`));
|
||||
const lfsHashes = {
|
||||
lfsGuid: fs_1.default
|
||||
.readFileSync(`${path_1.default.join(cloud_runner_folders_1.CloudRunnerFolders.repoPathAbsolute, `.lfs-assets-guid`)}`, 'utf8')
|
||||
.replace(/\n/g, ``),
|
||||
lfsGuidSum: fs_1.default
|
||||
.readFileSync(`${path_1.default.join(cloud_runner_folders_1.CloudRunnerFolders.repoPathAbsolute, `.lfs-assets-guid-sum`)}`, 'utf8')
|
||||
.replace(' .lfs-assets-guid', '')
|
||||
.replace(/\n/g, ``),
|
||||
};
|
||||
return lfsHashes;
|
||||
}
|
||||
catch (error) {
|
||||
throw error;
|
||||
}
|
||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
const lfsHashes = {
|
||||
lfsGuid: fs_1.default
|
||||
.readFileSync(`${path_1.default.join(cloud_runner_folders_1.CloudRunnerFolders.repoPathAbsolute, `.lfs-assets-guid`)}`, 'utf8')
|
||||
.replace(/\n/g, ``),
|
||||
lfsGuidSum: fs_1.default
|
||||
.readFileSync(`${path_1.default.join(cloud_runner_folders_1.CloudRunnerFolders.repoPathAbsolute, `.lfs-assets-guid-sum`)}`, 'utf8')
|
||||
.replace(' .lfs-assets-guid', '')
|
||||
.replace(/\n/g, ``),
|
||||
};
|
||||
cloud_runner_logger_1.default.log(`lfs hash completion: ${lfsHashes.lfsGuid} ${lfsHashes.lfsGuidSum}`);
|
||||
return lfsHashes;
|
||||
});
|
||||
}
|
||||
static hashAllFiles(folder) {
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -2,7 +2,6 @@ import path from 'path';
|
|||
import { CloudRunnerFolders } from './cloud-runner-folders';
|
||||
import { CloudRunnerSystem } from './cloud-runner-system';
|
||||
import fs from 'fs';
|
||||
import { assert } from 'console';
|
||||
import { Cli } from '../../cli/cli';
|
||||
import { CliFunction } from '../../cli/cli-functions-repository';
|
||||
import CloudRunnerLogger from './cloud-runner-logger';
|
||||
|
|
Loading…
Reference in New Issue