handle cloud runner git sync via sha not only branch
parent
007f2d4ee3
commit
384f27d18c
|
|
@ -297,6 +297,8 @@ class BuildParameters {
|
||||||
kubeStorageClass: cloud_runner_options_1.default.kubeStorageClass,
|
kubeStorageClass: cloud_runner_options_1.default.kubeStorageClass,
|
||||||
cacheKey: cloud_runner_options_1.default.cacheKey,
|
cacheKey: cloud_runner_options_1.default.cacheKey,
|
||||||
retainWorkspace: cloud_runner_options_1.default.retainWorkspaces,
|
retainWorkspace: cloud_runner_options_1.default.retainWorkspaces,
|
||||||
|
useSharedLargePackages: cloud_runner_options_1.default.useSharedLargePackages,
|
||||||
|
useLZ4Compression: cloud_runner_options_1.default.useLZ4Compression,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -925,6 +927,12 @@ class CloudRunnerOptions {
|
||||||
static get retainWorkspacesMax() {
|
static get retainWorkspacesMax() {
|
||||||
return Number(CloudRunnerOptions.getInput(`retainWorkspacesMax`)) || 5;
|
return Number(CloudRunnerOptions.getInput(`retainWorkspacesMax`)) || 5;
|
||||||
}
|
}
|
||||||
|
static get useSharedLargePackages() {
|
||||||
|
return CloudRunnerOptions.getInput(`useSharedLargePackages`) || false;
|
||||||
|
}
|
||||||
|
static get useLZ4Compression() {
|
||||||
|
return CloudRunnerOptions.getInput(`useLZ4Compression`) || true;
|
||||||
|
}
|
||||||
static ToEnvVarFormat(input) {
|
static ToEnvVarFormat(input) {
|
||||||
if (input.toUpperCase() === input) {
|
if (input.toUpperCase() === input) {
|
||||||
return input;
|
return input;
|
||||||
|
|
@ -4002,6 +4010,7 @@ class Caching {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
||||||
const startPath = process.cwd();
|
const startPath = process.cwd();
|
||||||
|
const compressionSuffix = cloud_runner_1.default.buildParameters.useLZ4Compression ? '.lz4' : '';
|
||||||
try {
|
try {
|
||||||
if (!(yield fileExists(cacheFolder))) {
|
if (!(yield fileExists(cacheFolder))) {
|
||||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||||
|
|
@ -4021,13 +4030,13 @@ class Caching {
|
||||||
process.chdir(`${startPath}`);
|
process.chdir(`${startPath}`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`tar -cf ${cacheArtifactName}.tar.lz4 ${path_1.default.basename(sourceFolder)}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`tar -cf ${cacheArtifactName}.tar${compressionSuffix} ${path_1.default.basename(sourceFolder)}`);
|
||||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`du ${cacheArtifactName}.tar.lz4`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`du ${cacheArtifactName}.tar${compressionSuffix}`);
|
||||||
console_1.assert(yield fileExists(`${cacheArtifactName}.tar.lz4`), 'cache archive exists');
|
console_1.assert(yield fileExists(`${cacheArtifactName}.tar${compressionSuffix}`), 'cache archive exists');
|
||||||
console_1.assert(yield fileExists(path_1.default.basename(sourceFolder)), 'source folder exists');
|
console_1.assert(yield fileExists(path_1.default.basename(sourceFolder)), 'source folder exists');
|
||||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mv ${cacheArtifactName}.tar.lz4 ${cacheFolder}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mv ${cacheArtifactName}.tar${compressionSuffix} ${cacheFolder}`);
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`moved cache entry ${cacheArtifactName} to ${cacheFolder}`);
|
remote_client_logger_1.RemoteClientLogger.log(`moved cache entry ${cacheArtifactName} to ${cacheFolder}`);
|
||||||
console_1.assert(yield fileExists(`${path_1.default.join(cacheFolder, cacheArtifactName)}.tar.lz4`), 'cache archive exists inside cache folder');
|
console_1.assert(yield fileExists(`${path_1.default.join(cacheFolder, cacheArtifactName)}.tar${compressionSuffix}`), 'cache archive exists inside cache folder');
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
process.chdir(`${startPath}`);
|
process.chdir(`${startPath}`);
|
||||||
|
|
@ -4039,6 +4048,7 @@ class Caching {
|
||||||
static PullFromCache(cacheFolder, destinationFolder, cacheArtifactName = ``) {
|
static PullFromCache(cacheFolder, destinationFolder, cacheArtifactName = ``) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
||||||
|
const compressionSuffix = cloud_runner_1.default.buildParameters.useLZ4Compression ? '.lz4' : '';
|
||||||
const startPath = process.cwd();
|
const startPath = process.cwd();
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`Caching for ${path_1.default.basename(destinationFolder)}`);
|
remote_client_logger_1.RemoteClientLogger.log(`Caching for ${path_1.default.basename(destinationFolder)}`);
|
||||||
try {
|
try {
|
||||||
|
|
@ -4048,20 +4058,20 @@ class Caching {
|
||||||
if (!(yield fileExists(destinationFolder))) {
|
if (!(yield fileExists(destinationFolder))) {
|
||||||
yield fs_1.default.promises.mkdir(destinationFolder);
|
yield fs_1.default.promises.mkdir(destinationFolder);
|
||||||
}
|
}
|
||||||
const latestInBranch = yield (yield cloud_runner_system_1.CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .tar.lz4$ | head -1`))
|
const latestInBranch = yield (yield cloud_runner_system_1.CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .tar${compressionSuffix}$ | head -1`))
|
||||||
.replace(/\n/g, ``)
|
.replace(/\n/g, ``)
|
||||||
.replace('.tar.lz4', '');
|
.replace(`.tar${compressionSuffix}`, '');
|
||||||
process.chdir(cacheFolder);
|
process.chdir(cacheFolder);
|
||||||
const cacheSelection = cacheArtifactName !== `` && (yield fileExists(`${cacheArtifactName}.tar.lz4`))
|
const cacheSelection = cacheArtifactName !== `` && (yield fileExists(`${cacheArtifactName}.tar${compressionSuffix}`))
|
||||||
? cacheArtifactName
|
? cacheArtifactName
|
||||||
: latestInBranch;
|
: latestInBranch;
|
||||||
yield cloud_runner_logger_1.default.log(`cache key ${cacheArtifactName} selection ${cacheSelection}`);
|
yield cloud_runner_logger_1.default.log(`cache key ${cacheArtifactName} selection ${cacheSelection}`);
|
||||||
if (yield fileExists(`${cacheSelection}.tar.lz4`)) {
|
if (yield fileExists(`${cacheSelection}.tar${compressionSuffix}`)) {
|
||||||
const resultsFolder = `results${cloud_runner_1.default.buildParameters.buildGuid}`;
|
const resultsFolder = `results${cloud_runner_1.default.buildParameters.buildGuid}`;
|
||||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.tar.lz4`);
|
remote_client_logger_1.RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.tar${compressionSuffix}`);
|
||||||
const fullResultsFolder = path_1.default.join(cacheFolder, resultsFolder);
|
const fullResultsFolder = path_1.default.join(cacheFolder, resultsFolder);
|
||||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`tar -xf ${cacheSelection}.tar.lz4 -C ${fullResultsFolder}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`tar -xf ${cacheSelection}.tar${compressionSuffix} -C ${fullResultsFolder}`);
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`cache item extracted to ${fullResultsFolder}`);
|
remote_client_logger_1.RemoteClientLogger.log(`cache item extracted to ${fullResultsFolder}`);
|
||||||
console_1.assert(yield fileExists(fullResultsFolder), `cache extraction results folder exists`);
|
console_1.assert(yield fileExists(fullResultsFolder), `cache extraction results folder exists`);
|
||||||
const destinationParentFolder = path_1.default.resolve(destinationFolder, '..');
|
const destinationParentFolder = path_1.default.resolve(destinationFolder, '..');
|
||||||
|
|
@ -4075,7 +4085,7 @@ class Caching {
|
||||||
else {
|
else {
|
||||||
remote_client_logger_1.RemoteClientLogger.logWarning(`cache item ${cacheArtifactName} doesn't exist ${destinationFolder}`);
|
remote_client_logger_1.RemoteClientLogger.logWarning(`cache item ${cacheArtifactName} doesn't exist ${destinationFolder}`);
|
||||||
if (cacheSelection !== ``) {
|
if (cacheSelection !== ``) {
|
||||||
remote_client_logger_1.RemoteClientLogger.logWarning(`cache item ${cacheArtifactName}.tar.lz4 doesn't exist ${destinationFolder}`);
|
remote_client_logger_1.RemoteClientLogger.logWarning(`cache item ${cacheArtifactName}.tar${compressionSuffix} doesn't exist ${destinationFolder}`);
|
||||||
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -4182,13 +4192,13 @@ class RemoteClient {
|
||||||
}
|
}
|
||||||
static cloneRepoWithoutLFSFiles() {
|
static cloneRepoWithoutLFSFiles() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
process.chdir(`${cloud_runner_folders_1.CloudRunnerFolders.repoPathAbsolute}`);
|
||||||
if (fs_1.default.existsSync(cloud_runner_folders_1.CloudRunnerFolders.repoPathAbsolute)) {
|
if (fs_1.default.existsSync(cloud_runner_folders_1.CloudRunnerFolders.repoPathAbsolute)) {
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`${cloud_runner_folders_1.CloudRunnerFolders.repoPathAbsolute} repo exists - skipping clone - retained workspace mode ${cloud_runner_1.default.buildParameters.retainWorkspace}`);
|
remote_client_logger_1.RemoteClientLogger.log(`${cloud_runner_folders_1.CloudRunnerFolders.repoPathAbsolute} repo exists - skipping clone - retained workspace mode ${cloud_runner_1.default.buildParameters.retainWorkspace}`);
|
||||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git reset --hard ${cloud_runner_1.default.buildParameters.gitSha}`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git reset --hard ${cloud_runner_1.default.buildParameters.gitSha}`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
process.chdir(`${cloud_runner_folders_1.CloudRunnerFolders.repoPathAbsolute}`);
|
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
remote_client_logger_1.RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
||||||
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
|
yield cloud_runner_system_1.CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
|
||||||
remote_client_logger_1.RemoteClientLogger.log(`Cloning the repository being built:`);
|
remote_client_logger_1.RemoteClientLogger.log(`Cloning the repository being built:`);
|
||||||
|
|
@ -4209,8 +4219,12 @@ class RemoteClient {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
static replaceLargePackageReferencesWithSharedReferences() {
|
static replaceLargePackageReferencesWithSharedReferences() {
|
||||||
|
const manifest = fs_1.default.readFileSync(path_1.default.join(cloud_runner_folders_1.CloudRunnerFolders.projectPathAbsolute, `Packages/manifest.json`), 'utf8');
|
||||||
if (cloud_runner_1.default.buildParameters.cloudRunnerIntegrationTests) {
|
if (cloud_runner_1.default.buildParameters.cloudRunnerIntegrationTests) {
|
||||||
cloud_runner_logger_1.default.log(fs_1.default.readFileSync(path_1.default.join(cloud_runner_folders_1.CloudRunnerFolders.projectPathAbsolute, `Packages/manifest.json`), 'utf8'));
|
cloud_runner_logger_1.default.log(manifest);
|
||||||
|
}
|
||||||
|
if (cloud_runner_1.default.buildParameters.useSharedLargePackages) {
|
||||||
|
manifest.replace(/LargePackages/g, '../../LargePackages');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
static pullLatestLFS() {
|
static pullLatestLFS() {
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -64,6 +64,8 @@ class BuildParameters {
|
||||||
public cloudRunnerBuilderPlatform!: string | undefined;
|
public cloudRunnerBuilderPlatform!: string | undefined;
|
||||||
public isCliMode!: boolean;
|
public isCliMode!: boolean;
|
||||||
public retainWorkspace!: boolean;
|
public retainWorkspace!: boolean;
|
||||||
|
public useSharedLargePackages!: boolean;
|
||||||
|
public useLZ4Compression!: boolean;
|
||||||
|
|
||||||
static async create(): Promise<BuildParameters> {
|
static async create(): Promise<BuildParameters> {
|
||||||
const buildFile = this.parseBuildFile(Input.buildName, Input.targetPlatform, Input.androidAppBundle);
|
const buildFile = this.parseBuildFile(Input.buildName, Input.targetPlatform, Input.androidAppBundle);
|
||||||
|
|
@ -139,6 +141,8 @@ class BuildParameters {
|
||||||
kubeStorageClass: CloudRunnerOptions.kubeStorageClass,
|
kubeStorageClass: CloudRunnerOptions.kubeStorageClass,
|
||||||
cacheKey: CloudRunnerOptions.cacheKey,
|
cacheKey: CloudRunnerOptions.cacheKey,
|
||||||
retainWorkspace: CloudRunnerOptions.retainWorkspaces,
|
retainWorkspace: CloudRunnerOptions.retainWorkspaces,
|
||||||
|
useSharedLargePackages: CloudRunnerOptions.useSharedLargePackages,
|
||||||
|
useLZ4Compression: CloudRunnerOptions.useLZ4Compression,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -171,6 +171,14 @@ class CloudRunnerOptions {
|
||||||
return Number(CloudRunnerOptions.getInput(`retainWorkspacesMax`)) || 5;
|
return Number(CloudRunnerOptions.getInput(`retainWorkspacesMax`)) || 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static get useSharedLargePackages(): boolean {
|
||||||
|
return CloudRunnerOptions.getInput(`useSharedLargePackages`) || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static get useLZ4Compression(): boolean {
|
||||||
|
return CloudRunnerOptions.getInput(`useLZ4Compression`) || true;
|
||||||
|
}
|
||||||
|
|
||||||
public static ToEnvVarFormat(input: string) {
|
public static ToEnvVarFormat(input: string) {
|
||||||
if (input.toUpperCase() === input) {
|
if (input.toUpperCase() === input) {
|
||||||
return input;
|
return input;
|
||||||
|
|
|
||||||
|
|
@ -46,6 +46,7 @@ export class Caching {
|
||||||
public static async PushToCache(cacheFolder: string, sourceFolder: string, cacheArtifactName: string) {
|
public static async PushToCache(cacheFolder: string, sourceFolder: string, cacheArtifactName: string) {
|
||||||
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
||||||
const startPath = process.cwd();
|
const startPath = process.cwd();
|
||||||
|
const compressionSuffix = CloudRunner.buildParameters.useLZ4Compression ? '.lz4' : '';
|
||||||
try {
|
try {
|
||||||
if (!(await fileExists(cacheFolder))) {
|
if (!(await fileExists(cacheFolder))) {
|
||||||
await CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
|
await CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||||
|
|
@ -78,14 +79,16 @@ export class Caching {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await CloudRunnerSystem.Run(`tar -cf ${cacheArtifactName}.tar.lz4 ${path.basename(sourceFolder)}`);
|
await CloudRunnerSystem.Run(
|
||||||
await CloudRunnerSystem.Run(`du ${cacheArtifactName}.tar.lz4`);
|
`tar -cf ${cacheArtifactName}.tar${compressionSuffix} ${path.basename(sourceFolder)}`,
|
||||||
assert(await fileExists(`${cacheArtifactName}.tar.lz4`), 'cache archive exists');
|
);
|
||||||
|
await CloudRunnerSystem.Run(`du ${cacheArtifactName}.tar${compressionSuffix}`);
|
||||||
|
assert(await fileExists(`${cacheArtifactName}.tar${compressionSuffix}`), 'cache archive exists');
|
||||||
assert(await fileExists(path.basename(sourceFolder)), 'source folder exists');
|
assert(await fileExists(path.basename(sourceFolder)), 'source folder exists');
|
||||||
await CloudRunnerSystem.Run(`mv ${cacheArtifactName}.tar.lz4 ${cacheFolder}`);
|
await CloudRunnerSystem.Run(`mv ${cacheArtifactName}.tar${compressionSuffix} ${cacheFolder}`);
|
||||||
RemoteClientLogger.log(`moved cache entry ${cacheArtifactName} to ${cacheFolder}`);
|
RemoteClientLogger.log(`moved cache entry ${cacheArtifactName} to ${cacheFolder}`);
|
||||||
assert(
|
assert(
|
||||||
await fileExists(`${path.join(cacheFolder, cacheArtifactName)}.tar.lz4`),
|
await fileExists(`${path.join(cacheFolder, cacheArtifactName)}.tar${compressionSuffix}`),
|
||||||
'cache archive exists inside cache folder',
|
'cache archive exists inside cache folder',
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
@ -96,6 +99,7 @@ export class Caching {
|
||||||
}
|
}
|
||||||
public static async PullFromCache(cacheFolder: string, destinationFolder: string, cacheArtifactName: string = ``) {
|
public static async PullFromCache(cacheFolder: string, destinationFolder: string, cacheArtifactName: string = ``) {
|
||||||
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
||||||
|
const compressionSuffix = CloudRunner.buildParameters.useLZ4Compression ? '.lz4' : '';
|
||||||
const startPath = process.cwd();
|
const startPath = process.cwd();
|
||||||
RemoteClientLogger.log(`Caching for ${path.basename(destinationFolder)}`);
|
RemoteClientLogger.log(`Caching for ${path.basename(destinationFolder)}`);
|
||||||
try {
|
try {
|
||||||
|
|
@ -107,24 +111,26 @@ export class Caching {
|
||||||
await fs.promises.mkdir(destinationFolder);
|
await fs.promises.mkdir(destinationFolder);
|
||||||
}
|
}
|
||||||
|
|
||||||
const latestInBranch = await (await CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .tar.lz4$ | head -1`))
|
const latestInBranch = await (
|
||||||
|
await CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .tar${compressionSuffix}$ | head -1`)
|
||||||
|
)
|
||||||
.replace(/\n/g, ``)
|
.replace(/\n/g, ``)
|
||||||
.replace('.tar.lz4', '');
|
.replace(`.tar${compressionSuffix}`, '');
|
||||||
|
|
||||||
process.chdir(cacheFolder);
|
process.chdir(cacheFolder);
|
||||||
|
|
||||||
const cacheSelection =
|
const cacheSelection =
|
||||||
cacheArtifactName !== `` && (await fileExists(`${cacheArtifactName}.tar.lz4`))
|
cacheArtifactName !== `` && (await fileExists(`${cacheArtifactName}.tar${compressionSuffix}`))
|
||||||
? cacheArtifactName
|
? cacheArtifactName
|
||||||
: latestInBranch;
|
: latestInBranch;
|
||||||
await CloudRunnerLogger.log(`cache key ${cacheArtifactName} selection ${cacheSelection}`);
|
await CloudRunnerLogger.log(`cache key ${cacheArtifactName} selection ${cacheSelection}`);
|
||||||
|
|
||||||
if (await fileExists(`${cacheSelection}.tar.lz4`)) {
|
if (await fileExists(`${cacheSelection}.tar${compressionSuffix}`)) {
|
||||||
const resultsFolder = `results${CloudRunner.buildParameters.buildGuid}`;
|
const resultsFolder = `results${CloudRunner.buildParameters.buildGuid}`;
|
||||||
await CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
await CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
||||||
RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.tar.lz4`);
|
RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.tar${compressionSuffix}`);
|
||||||
const fullResultsFolder = path.join(cacheFolder, resultsFolder);
|
const fullResultsFolder = path.join(cacheFolder, resultsFolder);
|
||||||
await CloudRunnerSystem.Run(`tar -xf ${cacheSelection}.tar.lz4 -C ${fullResultsFolder}`);
|
await CloudRunnerSystem.Run(`tar -xf ${cacheSelection}.tar${compressionSuffix} -C ${fullResultsFolder}`);
|
||||||
RemoteClientLogger.log(`cache item extracted to ${fullResultsFolder}`);
|
RemoteClientLogger.log(`cache item extracted to ${fullResultsFolder}`);
|
||||||
assert(await fileExists(fullResultsFolder), `cache extraction results folder exists`);
|
assert(await fileExists(fullResultsFolder), `cache extraction results folder exists`);
|
||||||
const destinationParentFolder = path.resolve(destinationFolder, '..');
|
const destinationParentFolder = path.resolve(destinationFolder, '..');
|
||||||
|
|
@ -144,7 +150,9 @@ export class Caching {
|
||||||
} else {
|
} else {
|
||||||
RemoteClientLogger.logWarning(`cache item ${cacheArtifactName} doesn't exist ${destinationFolder}`);
|
RemoteClientLogger.logWarning(`cache item ${cacheArtifactName} doesn't exist ${destinationFolder}`);
|
||||||
if (cacheSelection !== ``) {
|
if (cacheSelection !== ``) {
|
||||||
RemoteClientLogger.logWarning(`cache item ${cacheArtifactName}.tar.lz4 doesn't exist ${destinationFolder}`);
|
RemoteClientLogger.logWarning(
|
||||||
|
`cache item ${cacheArtifactName}.tar${compressionSuffix} doesn't exist ${destinationFolder}`,
|
||||||
|
);
|
||||||
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -61,6 +61,7 @@ export class RemoteClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async cloneRepoWithoutLFSFiles() {
|
private static async cloneRepoWithoutLFSFiles() {
|
||||||
|
process.chdir(`${CloudRunnerFolders.repoPathAbsolute}`);
|
||||||
if (fs.existsSync(CloudRunnerFolders.repoPathAbsolute)) {
|
if (fs.existsSync(CloudRunnerFolders.repoPathAbsolute)) {
|
||||||
RemoteClientLogger.log(
|
RemoteClientLogger.log(
|
||||||
`${CloudRunnerFolders.repoPathAbsolute} repo exists - skipping clone - retained workspace mode ${CloudRunner.buildParameters.retainWorkspace}`,
|
`${CloudRunnerFolders.repoPathAbsolute} repo exists - skipping clone - retained workspace mode ${CloudRunner.buildParameters.retainWorkspace}`,
|
||||||
|
|
@ -70,7 +71,6 @@ export class RemoteClient {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
process.chdir(`${CloudRunnerFolders.repoPathAbsolute}`);
|
|
||||||
RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
||||||
await CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
|
await CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
|
||||||
RemoteClientLogger.log(`Cloning the repository being built:`);
|
RemoteClientLogger.log(`Cloning the repository being built:`);
|
||||||
|
|
@ -95,10 +95,15 @@ export class RemoteClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
static replaceLargePackageReferencesWithSharedReferences() {
|
static replaceLargePackageReferencesWithSharedReferences() {
|
||||||
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
const manifest = fs.readFileSync(
|
||||||
CloudRunnerLogger.log(
|
path.join(CloudRunnerFolders.projectPathAbsolute, `Packages/manifest.json`),
|
||||||
fs.readFileSync(path.join(CloudRunnerFolders.projectPathAbsolute, `Packages/manifest.json`), 'utf8'),
|
'utf8',
|
||||||
);
|
);
|
||||||
|
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
||||||
|
CloudRunnerLogger.log(manifest);
|
||||||
|
}
|
||||||
|
if (CloudRunner.buildParameters.useSharedLargePackages) {
|
||||||
|
manifest.replace(/LargePackages/g, '../../LargePackages');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue