mirror of https://github.com/actions/cache.git
Add S3 cache download validation and retry logic
- Add empty file validation (0 bytes) and minimum size checks (512 bytes) for tar archives - Implement download completeness validation (bytes downloaded = expected) - Add retry logic with exponential backoff for validation failures (3 attempts: 1s/2s/4s delays) - Create DownloadValidationError class for specific validation failures - Add comprehensive test coverage for validation scenarios - Maintain graceful degradation - validation failures log warnings but don't fail workflowspull/1662/head
parent
7994cabd39
commit
a28af779d2
|
@ -0,0 +1,232 @@
|
||||||
|
import * as core from "@actions/core";
|
||||||
|
import * as fs from "fs";
|
||||||
|
import nock from "nock";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
|
import { DownloadValidationError, restoreCache } from "../src/custom/cache";
|
||||||
|
import { downloadCacheHttpClientConcurrent } from "../src/custom/downloadUtils";
|
||||||
|
|
||||||
|
// Mock the core module
|
||||||
|
jest.mock("@actions/core");
|
||||||
|
|
||||||
|
// Mock fs for file size checks
|
||||||
|
jest.mock("fs", () => ({
|
||||||
|
...jest.requireActual("fs"),
|
||||||
|
promises: {
|
||||||
|
...jest.requireActual("fs").promises,
|
||||||
|
open: jest.fn()
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe("Download Validation", () => {
|
||||||
|
const testArchivePath = "/tmp/test-cache.tar.gz";
|
||||||
|
const testUrl = "https://example.com/cache.tar.gz";
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
nock.cleanAll();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
nock.cleanAll();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("downloadCacheHttpClientConcurrent", () => {
|
||||||
|
it("should validate downloaded size matches expected content-length", async () => {
|
||||||
|
const expectedSize = 1024;
|
||||||
|
const mockFileDescriptor = {
|
||||||
|
write: jest.fn().mockResolvedValue(undefined),
|
||||||
|
close: jest.fn().mockResolvedValue(undefined)
|
||||||
|
};
|
||||||
|
|
||||||
|
(fs.promises.open as jest.Mock).mockResolvedValue(
|
||||||
|
mockFileDescriptor
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mock the initial range request to get content length
|
||||||
|
nock("https://example.com")
|
||||||
|
.get("/cache.tar.gz")
|
||||||
|
.reply(206, "partial content", {
|
||||||
|
"content-range": `bytes 0-1/${expectedSize}`
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the actual content download with wrong size
|
||||||
|
nock("https://example.com")
|
||||||
|
.get("/cache.tar.gz")
|
||||||
|
.reply(206, Buffer.alloc(512), {
|
||||||
|
// Return only 512 bytes instead of 1024
|
||||||
|
"content-range": "bytes 0-511/1024"
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
downloadCacheHttpClientConcurrent(testUrl, testArchivePath, {
|
||||||
|
timeoutInMs: 30000,
|
||||||
|
partSize: 1024
|
||||||
|
})
|
||||||
|
).rejects.toThrow(
|
||||||
|
"Download validation failed: Expected 1024 bytes but downloaded 512 bytes"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should succeed when downloaded size matches expected", async () => {
|
||||||
|
const expectedSize = 1024;
|
||||||
|
const testContent = Buffer.alloc(expectedSize);
|
||||||
|
const mockFileDescriptor = {
|
||||||
|
write: jest.fn().mockResolvedValue(undefined),
|
||||||
|
close: jest.fn().mockResolvedValue(undefined)
|
||||||
|
};
|
||||||
|
|
||||||
|
(fs.promises.open as jest.Mock).mockResolvedValue(
|
||||||
|
mockFileDescriptor
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mock the initial range request
|
||||||
|
nock("https://example.com")
|
||||||
|
.get("/cache.tar.gz")
|
||||||
|
.reply(206, "partial content", {
|
||||||
|
"content-range": `bytes 0-1/${expectedSize}`
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the actual content download with correct size
|
||||||
|
nock("https://example.com")
|
||||||
|
.get("/cache.tar.gz")
|
||||||
|
.reply(206, testContent, {
|
||||||
|
"content-range": `bytes 0-${
|
||||||
|
expectedSize - 1
|
||||||
|
}/${expectedSize}`
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
downloadCacheHttpClientConcurrent(testUrl, testArchivePath, {
|
||||||
|
timeoutInMs: 30000,
|
||||||
|
partSize: expectedSize
|
||||||
|
})
|
||||||
|
).resolves.not.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("restoreCache validation", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// Mock environment variables for S3 backend
|
||||||
|
process.env.RUNS_ON_S3_BUCKET_CACHE = "test-bucket";
|
||||||
|
process.env.RUNS_ON_AWS_REGION = "us-east-1";
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
delete process.env.RUNS_ON_S3_BUCKET_CACHE;
|
||||||
|
delete process.env.RUNS_ON_AWS_REGION;
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw DownloadValidationError for empty files", async () => {
|
||||||
|
// Mock the cache lookup to return a valid cache entry
|
||||||
|
const mockCacheHttpClient = require("../src/custom/backend");
|
||||||
|
jest.spyOn(mockCacheHttpClient, "getCacheEntry").mockResolvedValue({
|
||||||
|
cacheKey: "test-key",
|
||||||
|
archiveLocation: "https://s3.example.com/cache.tar.gz"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the download to succeed
|
||||||
|
jest.spyOn(mockCacheHttpClient, "downloadCache").mockResolvedValue(
|
||||||
|
undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mock utils to return 0 file size (empty file)
|
||||||
|
const mockUtils = require("@actions/cache/lib/internal/cacheUtils");
|
||||||
|
jest.spyOn(mockUtils, "getArchiveFileSizeInBytes").mockReturnValue(
|
||||||
|
0
|
||||||
|
);
|
||||||
|
jest.spyOn(mockUtils, "createTempDirectory").mockResolvedValue(
|
||||||
|
"/tmp"
|
||||||
|
);
|
||||||
|
jest.spyOn(mockUtils, "getCacheFileName").mockReturnValue(
|
||||||
|
"cache.tar.gz"
|
||||||
|
);
|
||||||
|
|
||||||
|
const coreSpy = jest.spyOn(core, "warning");
|
||||||
|
|
||||||
|
const result = await restoreCache(["/test/path"], "test-key");
|
||||||
|
|
||||||
|
expect(result).toBeUndefined(); // Should return undefined on validation failure
|
||||||
|
expect(coreSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining(
|
||||||
|
"Cache download validation failed: Downloaded cache archive is empty"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw DownloadValidationError for files too small", async () => {
|
||||||
|
// Mock the cache lookup to return a valid cache entry
|
||||||
|
const mockCacheHttpClient = require("../src/custom/backend");
|
||||||
|
jest.spyOn(mockCacheHttpClient, "getCacheEntry").mockResolvedValue({
|
||||||
|
cacheKey: "test-key",
|
||||||
|
archiveLocation: "https://s3.example.com/cache.tar.gz"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the download to succeed
|
||||||
|
jest.spyOn(mockCacheHttpClient, "downloadCache").mockResolvedValue(
|
||||||
|
undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mock utils to return small file size (less than 512 bytes)
|
||||||
|
const mockUtils = require("@actions/cache/lib/internal/cacheUtils");
|
||||||
|
jest.spyOn(mockUtils, "getArchiveFileSizeInBytes").mockReturnValue(
|
||||||
|
100
|
||||||
|
);
|
||||||
|
jest.spyOn(mockUtils, "createTempDirectory").mockResolvedValue(
|
||||||
|
"/tmp"
|
||||||
|
);
|
||||||
|
jest.spyOn(mockUtils, "getCacheFileName").mockReturnValue(
|
||||||
|
"cache.tar.gz"
|
||||||
|
);
|
||||||
|
|
||||||
|
const coreSpy = jest.spyOn(core, "warning");
|
||||||
|
|
||||||
|
const result = await restoreCache(["/test/path"], "test-key");
|
||||||
|
|
||||||
|
expect(result).toBeUndefined(); // Should return undefined on validation failure
|
||||||
|
expect(coreSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining(
|
||||||
|
"Cache download validation failed: Downloaded cache archive is too small (100 bytes)"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should succeed with valid file size", async () => {
|
||||||
|
// Mock the cache lookup to return a valid cache entry
|
||||||
|
const mockCacheHttpClient = require("../src/custom/backend");
|
||||||
|
jest.spyOn(mockCacheHttpClient, "getCacheEntry").mockResolvedValue({
|
||||||
|
cacheKey: "test-key",
|
||||||
|
archiveLocation: "https://s3.example.com/cache.tar.gz"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the download to succeed
|
||||||
|
jest.spyOn(mockCacheHttpClient, "downloadCache").mockResolvedValue(
|
||||||
|
undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mock utils to return valid file size (>= 512 bytes)
|
||||||
|
const mockUtils = require("@actions/cache/lib/internal/cacheUtils");
|
||||||
|
jest.spyOn(mockUtils, "getArchiveFileSizeInBytes").mockReturnValue(
|
||||||
|
1024
|
||||||
|
);
|
||||||
|
jest.spyOn(mockUtils, "createTempDirectory").mockResolvedValue(
|
||||||
|
"/tmp"
|
||||||
|
);
|
||||||
|
jest.spyOn(mockUtils, "getCacheFileName").mockReturnValue(
|
||||||
|
"cache.tar.gz"
|
||||||
|
);
|
||||||
|
jest.spyOn(mockUtils, "getCompressionMethod").mockResolvedValue(
|
||||||
|
"gzip"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mock tar operations
|
||||||
|
const mockTar = require("@actions/cache/lib/internal/tar");
|
||||||
|
jest.spyOn(mockTar, "extractTar").mockResolvedValue(undefined);
|
||||||
|
jest.spyOn(mockTar, "listTar").mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const result = await restoreCache(["/test/path"], "test-key");
|
||||||
|
|
||||||
|
expect(result).toBe("test-key"); // Should return the cache key on success
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -93862,14 +93862,42 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||||
}
|
}
|
||||||
const archiveUrl = new URL(archiveLocation);
|
const archiveUrl = new URL(archiveLocation);
|
||||||
const objectKey = archiveUrl.pathname.slice(1);
|
const objectKey = archiveUrl.pathname.slice(1);
|
||||||
const command = new client_s3_1.GetObjectCommand({
|
// Retry logic for download validation failures
|
||||||
Bucket: bucketName,
|
const maxRetries = 3;
|
||||||
Key: objectKey
|
let lastError;
|
||||||
});
|
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||||
const url = yield getSignedUrl(s3Client, command, {
|
try {
|
||||||
expiresIn: 3600
|
const command = new client_s3_1.GetObjectCommand({
|
||||||
});
|
Bucket: bucketName,
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize }));
|
Key: objectKey
|
||||||
|
});
|
||||||
|
const url = yield getSignedUrl(s3Client, command, {
|
||||||
|
expiresIn: 3600
|
||||||
|
});
|
||||||
|
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize }));
|
||||||
|
// If we get here, download succeeded
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
const errorMessage = error.message;
|
||||||
|
lastError = error;
|
||||||
|
// Only retry on validation failures, not on other errors
|
||||||
|
if (errorMessage.includes("Download validation failed") ||
|
||||||
|
errorMessage.includes("Range request not supported") ||
|
||||||
|
errorMessage.includes("Content-Range header")) {
|
||||||
|
if (attempt < maxRetries) {
|
||||||
|
const delayMs = Math.pow(2, attempt - 1) * 1000; // exponential backoff
|
||||||
|
core.warning(`Download attempt ${attempt} failed: ${errorMessage}. Retrying in ${delayMs}ms...`);
|
||||||
|
yield new Promise(resolve => setTimeout(resolve, delayMs));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// For non-retryable errors or max retries reached, throw the error
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// This should never be reached, but just in case
|
||||||
|
throw lastError || new Error("Download failed after all retry attempts");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
|
@ -93954,7 +93982,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0;
|
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.DownloadValidationError = exports.ReserveCacheError = exports.ValidationError = void 0;
|
||||||
const core = __importStar(__nccwpck_require__(2186));
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const utils = __importStar(__nccwpck_require__(1518));
|
const utils = __importStar(__nccwpck_require__(1518));
|
||||||
|
@ -93976,6 +94004,14 @@ class ReserveCacheError extends Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.ReserveCacheError = ReserveCacheError;
|
exports.ReserveCacheError = ReserveCacheError;
|
||||||
|
class DownloadValidationError extends Error {
|
||||||
|
constructor(message) {
|
||||||
|
super(message);
|
||||||
|
this.name = "DownloadValidationError";
|
||||||
|
Object.setPrototypeOf(this, DownloadValidationError.prototype);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.DownloadValidationError = DownloadValidationError;
|
||||||
function checkPaths(paths) {
|
function checkPaths(paths) {
|
||||||
if (!paths || paths.length === 0) {
|
if (!paths || paths.length === 0) {
|
||||||
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
|
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
|
||||||
|
@ -94047,6 +94083,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
||||||
}
|
}
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
|
// Validate downloaded archive
|
||||||
|
if (archiveFileSize === 0) {
|
||||||
|
throw new DownloadValidationError("Downloaded cache archive is empty (0 bytes). This may indicate a failed download or corrupted cache.");
|
||||||
|
}
|
||||||
|
// Minimum size check - a valid tar archive needs at least 512 bytes for header
|
||||||
|
const MIN_ARCHIVE_SIZE = 512;
|
||||||
|
if (archiveFileSize < MIN_ARCHIVE_SIZE) {
|
||||||
|
throw new DownloadValidationError(`Downloaded cache archive is too small (${archiveFileSize} bytes). Expected at least ${MIN_ARCHIVE_SIZE} bytes for a valid archive.`);
|
||||||
|
}
|
||||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||||
core.info("Cache restored successfully");
|
core.info("Cache restored successfully");
|
||||||
return cacheEntry.cacheKey;
|
return cacheEntry.cacheKey;
|
||||||
|
@ -94056,6 +94101,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
||||||
if (typedError.name === ValidationError.name) {
|
if (typedError.name === ValidationError.name) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
else if (typedError.name === DownloadValidationError.name) {
|
||||||
|
// Log download validation errors as warnings but don't fail the workflow
|
||||||
|
core.warning(`Cache download validation failed: ${typedError.message}`);
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
// Supress all non-validation cache related errors because caching should be optional
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
core.warning(`Failed to restore: ${error.message}`);
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
@ -94302,6 +94351,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
socketTimeout: options.timeoutInMs,
|
socketTimeout: options.timeoutInMs,
|
||||||
keepAlive: true
|
keepAlive: true
|
||||||
});
|
});
|
||||||
|
let progress;
|
||||||
try {
|
try {
|
||||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () {
|
const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () {
|
||||||
return yield httpClient.request("GET", archiveLocation, null, {
|
return yield httpClient.request("GET", archiveLocation, null, {
|
||||||
|
@ -94335,7 +94385,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
downloads.reverse();
|
downloads.reverse();
|
||||||
let actives = 0;
|
let actives = 0;
|
||||||
let bytesDownloaded = 0;
|
let bytesDownloaded = 0;
|
||||||
const progress = new DownloadProgress(length);
|
progress = new DownloadProgress(length);
|
||||||
progress.startDisplayTimer();
|
progress.startDisplayTimer();
|
||||||
const progressFn = progress.onProgress();
|
const progressFn = progress.onProgress();
|
||||||
const activeDownloads = [];
|
const activeDownloads = [];
|
||||||
|
@ -94358,8 +94408,14 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
while (actives > 0) {
|
while (actives > 0) {
|
||||||
yield waitAndWrite();
|
yield waitAndWrite();
|
||||||
}
|
}
|
||||||
|
// Validate that we downloaded the expected amount of data
|
||||||
|
if (bytesDownloaded !== length) {
|
||||||
|
throw new Error(`Download validation failed: Expected ${length} bytes but downloaded ${bytesDownloaded} bytes`);
|
||||||
|
}
|
||||||
|
progress.stopDisplayTimer();
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
|
progress === null || progress === void 0 ? void 0 : progress.stopDisplayTimer();
|
||||||
httpClient.dispose();
|
httpClient.dispose();
|
||||||
yield archiveDescriptor.close();
|
yield archiveDescriptor.close();
|
||||||
}
|
}
|
||||||
|
@ -94461,9 +94517,9 @@ exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0;
|
||||||
const cache = __importStar(__nccwpck_require__(7799));
|
const cache = __importStar(__nccwpck_require__(7799));
|
||||||
const core = __importStar(__nccwpck_require__(2186));
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
const constants_1 = __nccwpck_require__(9042);
|
const constants_1 = __nccwpck_require__(9042);
|
||||||
|
const custom = __importStar(__nccwpck_require__(1082));
|
||||||
const stateProvider_1 = __nccwpck_require__(1527);
|
const stateProvider_1 = __nccwpck_require__(1527);
|
||||||
const utils = __importStar(__nccwpck_require__(6850));
|
const utils = __importStar(__nccwpck_require__(6850));
|
||||||
const custom = __importStar(__nccwpck_require__(1082));
|
|
||||||
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
||||||
function restoreImpl(stateProvider, earlyExit) {
|
function restoreImpl(stateProvider, earlyExit) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
|
|
@ -93862,14 +93862,42 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||||
}
|
}
|
||||||
const archiveUrl = new URL(archiveLocation);
|
const archiveUrl = new URL(archiveLocation);
|
||||||
const objectKey = archiveUrl.pathname.slice(1);
|
const objectKey = archiveUrl.pathname.slice(1);
|
||||||
const command = new client_s3_1.GetObjectCommand({
|
// Retry logic for download validation failures
|
||||||
Bucket: bucketName,
|
const maxRetries = 3;
|
||||||
Key: objectKey
|
let lastError;
|
||||||
});
|
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||||
const url = yield getSignedUrl(s3Client, command, {
|
try {
|
||||||
expiresIn: 3600
|
const command = new client_s3_1.GetObjectCommand({
|
||||||
});
|
Bucket: bucketName,
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize }));
|
Key: objectKey
|
||||||
|
});
|
||||||
|
const url = yield getSignedUrl(s3Client, command, {
|
||||||
|
expiresIn: 3600
|
||||||
|
});
|
||||||
|
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize }));
|
||||||
|
// If we get here, download succeeded
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
const errorMessage = error.message;
|
||||||
|
lastError = error;
|
||||||
|
// Only retry on validation failures, not on other errors
|
||||||
|
if (errorMessage.includes("Download validation failed") ||
|
||||||
|
errorMessage.includes("Range request not supported") ||
|
||||||
|
errorMessage.includes("Content-Range header")) {
|
||||||
|
if (attempt < maxRetries) {
|
||||||
|
const delayMs = Math.pow(2, attempt - 1) * 1000; // exponential backoff
|
||||||
|
core.warning(`Download attempt ${attempt} failed: ${errorMessage}. Retrying in ${delayMs}ms...`);
|
||||||
|
yield new Promise(resolve => setTimeout(resolve, delayMs));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// For non-retryable errors or max retries reached, throw the error
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// This should never be reached, but just in case
|
||||||
|
throw lastError || new Error("Download failed after all retry attempts");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
|
@ -93954,7 +93982,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0;
|
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.DownloadValidationError = exports.ReserveCacheError = exports.ValidationError = void 0;
|
||||||
const core = __importStar(__nccwpck_require__(2186));
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const utils = __importStar(__nccwpck_require__(1518));
|
const utils = __importStar(__nccwpck_require__(1518));
|
||||||
|
@ -93976,6 +94004,14 @@ class ReserveCacheError extends Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.ReserveCacheError = ReserveCacheError;
|
exports.ReserveCacheError = ReserveCacheError;
|
||||||
|
class DownloadValidationError extends Error {
|
||||||
|
constructor(message) {
|
||||||
|
super(message);
|
||||||
|
this.name = "DownloadValidationError";
|
||||||
|
Object.setPrototypeOf(this, DownloadValidationError.prototype);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.DownloadValidationError = DownloadValidationError;
|
||||||
function checkPaths(paths) {
|
function checkPaths(paths) {
|
||||||
if (!paths || paths.length === 0) {
|
if (!paths || paths.length === 0) {
|
||||||
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
|
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
|
||||||
|
@ -94047,6 +94083,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
||||||
}
|
}
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
|
// Validate downloaded archive
|
||||||
|
if (archiveFileSize === 0) {
|
||||||
|
throw new DownloadValidationError("Downloaded cache archive is empty (0 bytes). This may indicate a failed download or corrupted cache.");
|
||||||
|
}
|
||||||
|
// Minimum size check - a valid tar archive needs at least 512 bytes for header
|
||||||
|
const MIN_ARCHIVE_SIZE = 512;
|
||||||
|
if (archiveFileSize < MIN_ARCHIVE_SIZE) {
|
||||||
|
throw new DownloadValidationError(`Downloaded cache archive is too small (${archiveFileSize} bytes). Expected at least ${MIN_ARCHIVE_SIZE} bytes for a valid archive.`);
|
||||||
|
}
|
||||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||||
core.info("Cache restored successfully");
|
core.info("Cache restored successfully");
|
||||||
return cacheEntry.cacheKey;
|
return cacheEntry.cacheKey;
|
||||||
|
@ -94056,6 +94101,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
||||||
if (typedError.name === ValidationError.name) {
|
if (typedError.name === ValidationError.name) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
else if (typedError.name === DownloadValidationError.name) {
|
||||||
|
// Log download validation errors as warnings but don't fail the workflow
|
||||||
|
core.warning(`Cache download validation failed: ${typedError.message}`);
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
// Supress all non-validation cache related errors because caching should be optional
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
core.warning(`Failed to restore: ${error.message}`);
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
@ -94302,6 +94351,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
socketTimeout: options.timeoutInMs,
|
socketTimeout: options.timeoutInMs,
|
||||||
keepAlive: true
|
keepAlive: true
|
||||||
});
|
});
|
||||||
|
let progress;
|
||||||
try {
|
try {
|
||||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () {
|
const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () {
|
||||||
return yield httpClient.request("GET", archiveLocation, null, {
|
return yield httpClient.request("GET", archiveLocation, null, {
|
||||||
|
@ -94335,7 +94385,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
downloads.reverse();
|
downloads.reverse();
|
||||||
let actives = 0;
|
let actives = 0;
|
||||||
let bytesDownloaded = 0;
|
let bytesDownloaded = 0;
|
||||||
const progress = new DownloadProgress(length);
|
progress = new DownloadProgress(length);
|
||||||
progress.startDisplayTimer();
|
progress.startDisplayTimer();
|
||||||
const progressFn = progress.onProgress();
|
const progressFn = progress.onProgress();
|
||||||
const activeDownloads = [];
|
const activeDownloads = [];
|
||||||
|
@ -94358,8 +94408,14 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
while (actives > 0) {
|
while (actives > 0) {
|
||||||
yield waitAndWrite();
|
yield waitAndWrite();
|
||||||
}
|
}
|
||||||
|
// Validate that we downloaded the expected amount of data
|
||||||
|
if (bytesDownloaded !== length) {
|
||||||
|
throw new Error(`Download validation failed: Expected ${length} bytes but downloaded ${bytesDownloaded} bytes`);
|
||||||
|
}
|
||||||
|
progress.stopDisplayTimer();
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
|
progress === null || progress === void 0 ? void 0 : progress.stopDisplayTimer();
|
||||||
httpClient.dispose();
|
httpClient.dispose();
|
||||||
yield archiveDescriptor.close();
|
yield archiveDescriptor.close();
|
||||||
}
|
}
|
||||||
|
@ -94461,9 +94517,9 @@ exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0;
|
||||||
const cache = __importStar(__nccwpck_require__(7799));
|
const cache = __importStar(__nccwpck_require__(7799));
|
||||||
const core = __importStar(__nccwpck_require__(2186));
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
const constants_1 = __nccwpck_require__(9042);
|
const constants_1 = __nccwpck_require__(9042);
|
||||||
|
const custom = __importStar(__nccwpck_require__(1082));
|
||||||
const stateProvider_1 = __nccwpck_require__(1527);
|
const stateProvider_1 = __nccwpck_require__(1527);
|
||||||
const utils = __importStar(__nccwpck_require__(6850));
|
const utils = __importStar(__nccwpck_require__(6850));
|
||||||
const custom = __importStar(__nccwpck_require__(1082));
|
|
||||||
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
||||||
function restoreImpl(stateProvider, earlyExit) {
|
function restoreImpl(stateProvider, earlyExit) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
|
|
@ -93862,14 +93862,42 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||||
}
|
}
|
||||||
const archiveUrl = new URL(archiveLocation);
|
const archiveUrl = new URL(archiveLocation);
|
||||||
const objectKey = archiveUrl.pathname.slice(1);
|
const objectKey = archiveUrl.pathname.slice(1);
|
||||||
const command = new client_s3_1.GetObjectCommand({
|
// Retry logic for download validation failures
|
||||||
Bucket: bucketName,
|
const maxRetries = 3;
|
||||||
Key: objectKey
|
let lastError;
|
||||||
});
|
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||||
const url = yield getSignedUrl(s3Client, command, {
|
try {
|
||||||
expiresIn: 3600
|
const command = new client_s3_1.GetObjectCommand({
|
||||||
});
|
Bucket: bucketName,
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize }));
|
Key: objectKey
|
||||||
|
});
|
||||||
|
const url = yield getSignedUrl(s3Client, command, {
|
||||||
|
expiresIn: 3600
|
||||||
|
});
|
||||||
|
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize }));
|
||||||
|
// If we get here, download succeeded
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
const errorMessage = error.message;
|
||||||
|
lastError = error;
|
||||||
|
// Only retry on validation failures, not on other errors
|
||||||
|
if (errorMessage.includes("Download validation failed") ||
|
||||||
|
errorMessage.includes("Range request not supported") ||
|
||||||
|
errorMessage.includes("Content-Range header")) {
|
||||||
|
if (attempt < maxRetries) {
|
||||||
|
const delayMs = Math.pow(2, attempt - 1) * 1000; // exponential backoff
|
||||||
|
core.warning(`Download attempt ${attempt} failed: ${errorMessage}. Retrying in ${delayMs}ms...`);
|
||||||
|
yield new Promise(resolve => setTimeout(resolve, delayMs));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// For non-retryable errors or max retries reached, throw the error
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// This should never be reached, but just in case
|
||||||
|
throw lastError || new Error("Download failed after all retry attempts");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
|
@ -93954,7 +93982,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0;
|
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.DownloadValidationError = exports.ReserveCacheError = exports.ValidationError = void 0;
|
||||||
const core = __importStar(__nccwpck_require__(2186));
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const utils = __importStar(__nccwpck_require__(1518));
|
const utils = __importStar(__nccwpck_require__(1518));
|
||||||
|
@ -93976,6 +94004,14 @@ class ReserveCacheError extends Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.ReserveCacheError = ReserveCacheError;
|
exports.ReserveCacheError = ReserveCacheError;
|
||||||
|
class DownloadValidationError extends Error {
|
||||||
|
constructor(message) {
|
||||||
|
super(message);
|
||||||
|
this.name = "DownloadValidationError";
|
||||||
|
Object.setPrototypeOf(this, DownloadValidationError.prototype);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.DownloadValidationError = DownloadValidationError;
|
||||||
function checkPaths(paths) {
|
function checkPaths(paths) {
|
||||||
if (!paths || paths.length === 0) {
|
if (!paths || paths.length === 0) {
|
||||||
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
|
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
|
||||||
|
@ -94047,6 +94083,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
||||||
}
|
}
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
|
// Validate downloaded archive
|
||||||
|
if (archiveFileSize === 0) {
|
||||||
|
throw new DownloadValidationError("Downloaded cache archive is empty (0 bytes). This may indicate a failed download or corrupted cache.");
|
||||||
|
}
|
||||||
|
// Minimum size check - a valid tar archive needs at least 512 bytes for header
|
||||||
|
const MIN_ARCHIVE_SIZE = 512;
|
||||||
|
if (archiveFileSize < MIN_ARCHIVE_SIZE) {
|
||||||
|
throw new DownloadValidationError(`Downloaded cache archive is too small (${archiveFileSize} bytes). Expected at least ${MIN_ARCHIVE_SIZE} bytes for a valid archive.`);
|
||||||
|
}
|
||||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||||
core.info("Cache restored successfully");
|
core.info("Cache restored successfully");
|
||||||
return cacheEntry.cacheKey;
|
return cacheEntry.cacheKey;
|
||||||
|
@ -94056,6 +94101,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
||||||
if (typedError.name === ValidationError.name) {
|
if (typedError.name === ValidationError.name) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
else if (typedError.name === DownloadValidationError.name) {
|
||||||
|
// Log download validation errors as warnings but don't fail the workflow
|
||||||
|
core.warning(`Cache download validation failed: ${typedError.message}`);
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
// Supress all non-validation cache related errors because caching should be optional
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
core.warning(`Failed to restore: ${error.message}`);
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
@ -94302,6 +94351,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
socketTimeout: options.timeoutInMs,
|
socketTimeout: options.timeoutInMs,
|
||||||
keepAlive: true
|
keepAlive: true
|
||||||
});
|
});
|
||||||
|
let progress;
|
||||||
try {
|
try {
|
||||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () {
|
const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () {
|
||||||
return yield httpClient.request("GET", archiveLocation, null, {
|
return yield httpClient.request("GET", archiveLocation, null, {
|
||||||
|
@ -94335,7 +94385,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
downloads.reverse();
|
downloads.reverse();
|
||||||
let actives = 0;
|
let actives = 0;
|
||||||
let bytesDownloaded = 0;
|
let bytesDownloaded = 0;
|
||||||
const progress = new DownloadProgress(length);
|
progress = new DownloadProgress(length);
|
||||||
progress.startDisplayTimer();
|
progress.startDisplayTimer();
|
||||||
const progressFn = progress.onProgress();
|
const progressFn = progress.onProgress();
|
||||||
const activeDownloads = [];
|
const activeDownloads = [];
|
||||||
|
@ -94358,8 +94408,14 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
while (actives > 0) {
|
while (actives > 0) {
|
||||||
yield waitAndWrite();
|
yield waitAndWrite();
|
||||||
}
|
}
|
||||||
|
// Validate that we downloaded the expected amount of data
|
||||||
|
if (bytesDownloaded !== length) {
|
||||||
|
throw new Error(`Download validation failed: Expected ${length} bytes but downloaded ${bytesDownloaded} bytes`);
|
||||||
|
}
|
||||||
|
progress.stopDisplayTimer();
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
|
progress === null || progress === void 0 ? void 0 : progress.stopDisplayTimer();
|
||||||
httpClient.dispose();
|
httpClient.dispose();
|
||||||
yield archiveDescriptor.close();
|
yield archiveDescriptor.close();
|
||||||
}
|
}
|
||||||
|
@ -94461,9 +94517,9 @@ exports.saveRun = exports.saveOnlyRun = exports.saveImpl = void 0;
|
||||||
const cache = __importStar(__nccwpck_require__(7799));
|
const cache = __importStar(__nccwpck_require__(7799));
|
||||||
const core = __importStar(__nccwpck_require__(2186));
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
const constants_1 = __nccwpck_require__(9042);
|
const constants_1 = __nccwpck_require__(9042);
|
||||||
|
const custom = __importStar(__nccwpck_require__(1082));
|
||||||
const stateProvider_1 = __nccwpck_require__(1527);
|
const stateProvider_1 = __nccwpck_require__(1527);
|
||||||
const utils = __importStar(__nccwpck_require__(6850));
|
const utils = __importStar(__nccwpck_require__(6850));
|
||||||
const custom = __importStar(__nccwpck_require__(1082));
|
|
||||||
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
||||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||||
|
|
|
@ -93862,14 +93862,42 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||||
}
|
}
|
||||||
const archiveUrl = new URL(archiveLocation);
|
const archiveUrl = new URL(archiveLocation);
|
||||||
const objectKey = archiveUrl.pathname.slice(1);
|
const objectKey = archiveUrl.pathname.slice(1);
|
||||||
const command = new client_s3_1.GetObjectCommand({
|
// Retry logic for download validation failures
|
||||||
Bucket: bucketName,
|
const maxRetries = 3;
|
||||||
Key: objectKey
|
let lastError;
|
||||||
});
|
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||||
const url = yield getSignedUrl(s3Client, command, {
|
try {
|
||||||
expiresIn: 3600
|
const command = new client_s3_1.GetObjectCommand({
|
||||||
});
|
Bucket: bucketName,
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize }));
|
Key: objectKey
|
||||||
|
});
|
||||||
|
const url = yield getSignedUrl(s3Client, command, {
|
||||||
|
expiresIn: 3600
|
||||||
|
});
|
||||||
|
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize }));
|
||||||
|
// If we get here, download succeeded
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
const errorMessage = error.message;
|
||||||
|
lastError = error;
|
||||||
|
// Only retry on validation failures, not on other errors
|
||||||
|
if (errorMessage.includes("Download validation failed") ||
|
||||||
|
errorMessage.includes("Range request not supported") ||
|
||||||
|
errorMessage.includes("Content-Range header")) {
|
||||||
|
if (attempt < maxRetries) {
|
||||||
|
const delayMs = Math.pow(2, attempt - 1) * 1000; // exponential backoff
|
||||||
|
core.warning(`Download attempt ${attempt} failed: ${errorMessage}. Retrying in ${delayMs}ms...`);
|
||||||
|
yield new Promise(resolve => setTimeout(resolve, delayMs));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// For non-retryable errors or max retries reached, throw the error
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// This should never be reached, but just in case
|
||||||
|
throw lastError || new Error("Download failed after all retry attempts");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
|
@ -93954,7 +93982,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0;
|
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.DownloadValidationError = exports.ReserveCacheError = exports.ValidationError = void 0;
|
||||||
const core = __importStar(__nccwpck_require__(2186));
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const utils = __importStar(__nccwpck_require__(1518));
|
const utils = __importStar(__nccwpck_require__(1518));
|
||||||
|
@ -93976,6 +94004,14 @@ class ReserveCacheError extends Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.ReserveCacheError = ReserveCacheError;
|
exports.ReserveCacheError = ReserveCacheError;
|
||||||
|
class DownloadValidationError extends Error {
|
||||||
|
constructor(message) {
|
||||||
|
super(message);
|
||||||
|
this.name = "DownloadValidationError";
|
||||||
|
Object.setPrototypeOf(this, DownloadValidationError.prototype);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.DownloadValidationError = DownloadValidationError;
|
||||||
function checkPaths(paths) {
|
function checkPaths(paths) {
|
||||||
if (!paths || paths.length === 0) {
|
if (!paths || paths.length === 0) {
|
||||||
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
|
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
|
||||||
|
@ -94047,6 +94083,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
||||||
}
|
}
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
|
// Validate downloaded archive
|
||||||
|
if (archiveFileSize === 0) {
|
||||||
|
throw new DownloadValidationError("Downloaded cache archive is empty (0 bytes). This may indicate a failed download or corrupted cache.");
|
||||||
|
}
|
||||||
|
// Minimum size check - a valid tar archive needs at least 512 bytes for header
|
||||||
|
const MIN_ARCHIVE_SIZE = 512;
|
||||||
|
if (archiveFileSize < MIN_ARCHIVE_SIZE) {
|
||||||
|
throw new DownloadValidationError(`Downloaded cache archive is too small (${archiveFileSize} bytes). Expected at least ${MIN_ARCHIVE_SIZE} bytes for a valid archive.`);
|
||||||
|
}
|
||||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||||
core.info("Cache restored successfully");
|
core.info("Cache restored successfully");
|
||||||
return cacheEntry.cacheKey;
|
return cacheEntry.cacheKey;
|
||||||
|
@ -94056,6 +94101,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
||||||
if (typedError.name === ValidationError.name) {
|
if (typedError.name === ValidationError.name) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
else if (typedError.name === DownloadValidationError.name) {
|
||||||
|
// Log download validation errors as warnings but don't fail the workflow
|
||||||
|
core.warning(`Cache download validation failed: ${typedError.message}`);
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
// Supress all non-validation cache related errors because caching should be optional
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
core.warning(`Failed to restore: ${error.message}`);
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
@ -94302,6 +94351,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
socketTimeout: options.timeoutInMs,
|
socketTimeout: options.timeoutInMs,
|
||||||
keepAlive: true
|
keepAlive: true
|
||||||
});
|
});
|
||||||
|
let progress;
|
||||||
try {
|
try {
|
||||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () {
|
const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () {
|
||||||
return yield httpClient.request("GET", archiveLocation, null, {
|
return yield httpClient.request("GET", archiveLocation, null, {
|
||||||
|
@ -94335,7 +94385,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
downloads.reverse();
|
downloads.reverse();
|
||||||
let actives = 0;
|
let actives = 0;
|
||||||
let bytesDownloaded = 0;
|
let bytesDownloaded = 0;
|
||||||
const progress = new DownloadProgress(length);
|
progress = new DownloadProgress(length);
|
||||||
progress.startDisplayTimer();
|
progress.startDisplayTimer();
|
||||||
const progressFn = progress.onProgress();
|
const progressFn = progress.onProgress();
|
||||||
const activeDownloads = [];
|
const activeDownloads = [];
|
||||||
|
@ -94358,8 +94408,14 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
while (actives > 0) {
|
while (actives > 0) {
|
||||||
yield waitAndWrite();
|
yield waitAndWrite();
|
||||||
}
|
}
|
||||||
|
// Validate that we downloaded the expected amount of data
|
||||||
|
if (bytesDownloaded !== length) {
|
||||||
|
throw new Error(`Download validation failed: Expected ${length} bytes but downloaded ${bytesDownloaded} bytes`);
|
||||||
|
}
|
||||||
|
progress.stopDisplayTimer();
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
|
progress === null || progress === void 0 ? void 0 : progress.stopDisplayTimer();
|
||||||
httpClient.dispose();
|
httpClient.dispose();
|
||||||
yield archiveDescriptor.close();
|
yield archiveDescriptor.close();
|
||||||
}
|
}
|
||||||
|
@ -94461,9 +94517,9 @@ exports.saveRun = exports.saveOnlyRun = exports.saveImpl = void 0;
|
||||||
const cache = __importStar(__nccwpck_require__(7799));
|
const cache = __importStar(__nccwpck_require__(7799));
|
||||||
const core = __importStar(__nccwpck_require__(2186));
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
const constants_1 = __nccwpck_require__(9042);
|
const constants_1 = __nccwpck_require__(9042);
|
||||||
|
const custom = __importStar(__nccwpck_require__(1082));
|
||||||
const stateProvider_1 = __nccwpck_require__(1527);
|
const stateProvider_1 = __nccwpck_require__(1527);
|
||||||
const utils = __importStar(__nccwpck_require__(6850));
|
const utils = __importStar(__nccwpck_require__(6850));
|
||||||
const custom = __importStar(__nccwpck_require__(1082));
|
|
||||||
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
||||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||||
|
|
|
@ -32,7 +32,7 @@
|
||||||
"eslint-plugin-simple-import-sort": "^7.0.0",
|
"eslint-plugin-simple-import-sort": "^7.0.0",
|
||||||
"jest": "^28.1.3",
|
"jest": "^28.1.3",
|
||||||
"jest-circus": "^27.5.1",
|
"jest-circus": "^27.5.1",
|
||||||
"nock": "^13.2.9",
|
"nock": "^13.5.6",
|
||||||
"prettier": "^2.8.8",
|
"prettier": "^2.8.8",
|
||||||
"ts-jest": "^28.0.8",
|
"ts-jest": "^28.0.8",
|
||||||
"typescript": "^4.9.3"
|
"typescript": "^4.9.3"
|
||||||
|
@ -4720,6 +4720,7 @@
|
||||||
"integrity": "sha512-jI/ewavBQ7X5178262JQR0ewicPAcJhXS/iFaNJl0VHLfyosZ/kwSrsa6VNQNSO8i9d8SqdRgOtZSOKJ/+iNMw==",
|
"integrity": "sha512-jI/ewavBQ7X5178262JQR0ewicPAcJhXS/iFaNJl0VHLfyosZ/kwSrsa6VNQNSO8i9d8SqdRgOtZSOKJ/+iNMw==",
|
||||||
"deprecated": "This is a stub types definition. nock provides its own type definitions, so you do not need this installed.",
|
"deprecated": "This is a stub types definition. nock provides its own type definitions, so you do not need this installed.",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"nock": "*"
|
"nock": "*"
|
||||||
}
|
}
|
||||||
|
@ -10004,12 +10005,6 @@
|
||||||
"node": ">=4"
|
"node": ">=4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/lodash": {
|
|
||||||
"version": "4.17.21",
|
|
||||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
|
||||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
|
|
||||||
"dev": true
|
|
||||||
},
|
|
||||||
"node_modules/lodash.memoize": {
|
"node_modules/lodash.memoize": {
|
||||||
"version": "4.1.2",
|
"version": "4.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
|
||||||
|
@ -10150,14 +10145,14 @@
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/nock": {
|
"node_modules/nock": {
|
||||||
"version": "13.2.9",
|
"version": "13.5.6",
|
||||||
"resolved": "https://registry.npmjs.org/nock/-/nock-13.2.9.tgz",
|
"resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz",
|
||||||
"integrity": "sha512-1+XfJNYF1cjGB+TKMWi29eZ0b82QOvQs2YoLNzbpWGqFMtRQHTa57osqdGj4FrFPgkO4D4AZinzUJR9VvW3QUA==",
|
"integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"debug": "^4.1.0",
|
"debug": "^4.1.0",
|
||||||
"json-stringify-safe": "^5.0.1",
|
"json-stringify-safe": "^5.0.1",
|
||||||
"lodash": "^4.17.21",
|
|
||||||
"propagate": "^2.0.0"
|
"propagate": "^2.0.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
|
@ -19654,12 +19649,6 @@
|
||||||
"path-exists": "^3.0.0"
|
"path-exists": "^3.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"lodash": {
|
|
||||||
"version": "4.17.21",
|
|
||||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
|
||||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
|
|
||||||
"dev": true
|
|
||||||
},
|
|
||||||
"lodash.memoize": {
|
"lodash.memoize": {
|
||||||
"version": "4.1.2",
|
"version": "4.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
|
||||||
|
@ -19773,14 +19762,13 @@
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"nock": {
|
"nock": {
|
||||||
"version": "13.2.9",
|
"version": "13.5.6",
|
||||||
"resolved": "https://registry.npmjs.org/nock/-/nock-13.2.9.tgz",
|
"resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz",
|
||||||
"integrity": "sha512-1+XfJNYF1cjGB+TKMWi29eZ0b82QOvQs2YoLNzbpWGqFMtRQHTa57osqdGj4FrFPgkO4D4AZinzUJR9VvW3QUA==",
|
"integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
"debug": "^4.1.0",
|
"debug": "^4.1.0",
|
||||||
"json-stringify-safe": "^5.0.1",
|
"json-stringify-safe": "^5.0.1",
|
||||||
"lodash": "^4.17.21",
|
|
||||||
"propagate": "^2.0.0"
|
"propagate": "^2.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -46,7 +46,7 @@
|
||||||
"eslint-plugin-simple-import-sort": "^7.0.0",
|
"eslint-plugin-simple-import-sort": "^7.0.0",
|
||||||
"jest": "^28.1.3",
|
"jest": "^28.1.3",
|
||||||
"jest-circus": "^27.5.1",
|
"jest-circus": "^27.5.1",
|
||||||
"nock": "^13.2.9",
|
"nock": "^13.5.6",
|
||||||
"prettier": "^2.8.8",
|
"prettier": "^2.8.8",
|
||||||
"ts-jest": "^28.0.8",
|
"ts-jest": "^28.0.8",
|
||||||
"typescript": "^4.9.3"
|
"typescript": "^4.9.3"
|
||||||
|
|
|
@ -154,19 +154,57 @@ export async function downloadCache(
|
||||||
|
|
||||||
const archiveUrl = new URL(archiveLocation);
|
const archiveUrl = new URL(archiveLocation);
|
||||||
const objectKey = archiveUrl.pathname.slice(1);
|
const objectKey = archiveUrl.pathname.slice(1);
|
||||||
const command = new GetObjectCommand({
|
|
||||||
Bucket: bucketName,
|
// Retry logic for download validation failures
|
||||||
Key: objectKey
|
const maxRetries = 3;
|
||||||
});
|
let lastError: Error | undefined;
|
||||||
const url = await getSignedUrl(s3Client, command, {
|
|
||||||
expiresIn: 3600
|
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||||
});
|
try {
|
||||||
await downloadCacheHttpClientConcurrent(url, archivePath, {
|
const command = new GetObjectCommand({
|
||||||
...options,
|
Bucket: bucketName,
|
||||||
downloadConcurrency: downloadQueueSize,
|
Key: objectKey
|
||||||
concurrentBlobDownloads: true,
|
});
|
||||||
partSize: downloadPartSize
|
const url = await getSignedUrl(s3Client, command, {
|
||||||
});
|
expiresIn: 3600
|
||||||
|
});
|
||||||
|
|
||||||
|
await downloadCacheHttpClientConcurrent(url, archivePath, {
|
||||||
|
...options,
|
||||||
|
downloadConcurrency: downloadQueueSize,
|
||||||
|
concurrentBlobDownloads: true,
|
||||||
|
partSize: downloadPartSize
|
||||||
|
});
|
||||||
|
|
||||||
|
// If we get here, download succeeded
|
||||||
|
return;
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = (error as Error).message;
|
||||||
|
lastError = error as Error;
|
||||||
|
|
||||||
|
// Only retry on validation failures, not on other errors
|
||||||
|
if (
|
||||||
|
errorMessage.includes("Download validation failed") ||
|
||||||
|
errorMessage.includes("Range request not supported") ||
|
||||||
|
errorMessage.includes("Content-Range header")
|
||||||
|
) {
|
||||||
|
if (attempt < maxRetries) {
|
||||||
|
const delayMs = Math.pow(2, attempt - 1) * 1000; // exponential backoff
|
||||||
|
core.warning(
|
||||||
|
`Download attempt ${attempt} failed: ${errorMessage}. Retrying in ${delayMs}ms...`
|
||||||
|
);
|
||||||
|
await new Promise(resolve => setTimeout(resolve, delayMs));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For non-retryable errors or max retries reached, throw the error
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This should never be reached, but just in case
|
||||||
|
throw lastError || new Error("Download failed after all retry attempts");
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function saveCache(
|
export async function saveCache(
|
||||||
|
|
|
@ -27,6 +27,14 @@ export class ReserveCacheError extends Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export class DownloadValidationError extends Error {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message);
|
||||||
|
this.name = "DownloadValidationError";
|
||||||
|
Object.setPrototypeOf(this, DownloadValidationError.prototype);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function checkPaths(paths: string[]): void {
|
function checkPaths(paths: string[]): void {
|
||||||
if (!paths || paths.length === 0) {
|
if (!paths || paths.length === 0) {
|
||||||
throw new ValidationError(
|
throw new ValidationError(
|
||||||
|
@ -135,6 +143,21 @@ export async function restoreCache(
|
||||||
)} MB (${archiveFileSize} B)`
|
)} MB (${archiveFileSize} B)`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Validate downloaded archive
|
||||||
|
if (archiveFileSize === 0) {
|
||||||
|
throw new DownloadValidationError(
|
||||||
|
"Downloaded cache archive is empty (0 bytes). This may indicate a failed download or corrupted cache."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Minimum size check - a valid tar archive needs at least 512 bytes for header
|
||||||
|
const MIN_ARCHIVE_SIZE = 512;
|
||||||
|
if (archiveFileSize < MIN_ARCHIVE_SIZE) {
|
||||||
|
throw new DownloadValidationError(
|
||||||
|
`Downloaded cache archive is too small (${archiveFileSize} bytes). Expected at least ${MIN_ARCHIVE_SIZE} bytes for a valid archive.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
await extractTar(archivePath, compressionMethod);
|
await extractTar(archivePath, compressionMethod);
|
||||||
core.info("Cache restored successfully");
|
core.info("Cache restored successfully");
|
||||||
|
|
||||||
|
@ -143,6 +166,11 @@ export async function restoreCache(
|
||||||
const typedError = error as Error;
|
const typedError = error as Error;
|
||||||
if (typedError.name === ValidationError.name) {
|
if (typedError.name === ValidationError.name) {
|
||||||
throw error;
|
throw error;
|
||||||
|
} else if (typedError.name === DownloadValidationError.name) {
|
||||||
|
// Log download validation errors as warnings but don't fail the workflow
|
||||||
|
core.warning(
|
||||||
|
`Cache download validation failed: ${typedError.message}`
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
// Supress all non-validation cache related errors because caching should be optional
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
core.warning(`Failed to restore: ${(error as Error).message}`);
|
core.warning(`Failed to restore: ${(error as Error).message}`);
|
||||||
|
|
|
@ -160,6 +160,7 @@ export async function downloadCacheHttpClientConcurrent(
|
||||||
socketTimeout: options.timeoutInMs,
|
socketTimeout: options.timeoutInMs,
|
||||||
keepAlive: true
|
keepAlive: true
|
||||||
});
|
});
|
||||||
|
let progress: DownloadProgress | undefined;
|
||||||
try {
|
try {
|
||||||
const res = await retryHttpClientResponse(
|
const res = await retryHttpClientResponse(
|
||||||
"downloadCacheMetadata",
|
"downloadCacheMetadata",
|
||||||
|
@ -210,7 +211,7 @@ export async function downloadCacheHttpClientConcurrent(
|
||||||
downloads.reverse();
|
downloads.reverse();
|
||||||
let actives = 0;
|
let actives = 0;
|
||||||
let bytesDownloaded = 0;
|
let bytesDownloaded = 0;
|
||||||
const progress = new DownloadProgress(length);
|
progress = new DownloadProgress(length);
|
||||||
progress.startDisplayTimer();
|
progress.startDisplayTimer();
|
||||||
const progressFn = progress.onProgress();
|
const progressFn = progress.onProgress();
|
||||||
|
|
||||||
|
@ -246,7 +247,17 @@ export async function downloadCacheHttpClientConcurrent(
|
||||||
while (actives > 0) {
|
while (actives > 0) {
|
||||||
await waitAndWrite();
|
await waitAndWrite();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate that we downloaded the expected amount of data
|
||||||
|
if (bytesDownloaded !== length) {
|
||||||
|
throw new Error(
|
||||||
|
`Download validation failed: Expected ${length} bytes but downloaded ${bytesDownloaded} bytes`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
progress.stopDisplayTimer();
|
||||||
} finally {
|
} finally {
|
||||||
|
progress?.stopDisplayTimer();
|
||||||
httpClient.dispose();
|
httpClient.dispose();
|
||||||
await archiveDescriptor.close();
|
await archiveDescriptor.close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ import * as cache from "@actions/cache";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { Events, Inputs, Outputs, State } from "./constants";
|
import { Events, Inputs, Outputs, State } from "./constants";
|
||||||
|
import * as custom from "./custom/cache";
|
||||||
import {
|
import {
|
||||||
IStateProvider,
|
IStateProvider,
|
||||||
NullStateProvider,
|
NullStateProvider,
|
||||||
|
@ -9,7 +10,6 @@ import {
|
||||||
} from "./stateProvider";
|
} from "./stateProvider";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
import * as custom from "./custom/cache";
|
|
||||||
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
||||||
|
|
||||||
export async function restoreImpl(
|
export async function restoreImpl(
|
||||||
|
|
|
@ -2,6 +2,7 @@ import * as cache from "@actions/cache";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { Events, Inputs, State } from "./constants";
|
import { Events, Inputs, State } from "./constants";
|
||||||
|
import * as custom from "./custom/cache";
|
||||||
import {
|
import {
|
||||||
IStateProvider,
|
IStateProvider,
|
||||||
NullStateProvider,
|
NullStateProvider,
|
||||||
|
@ -9,7 +10,6 @@ import {
|
||||||
} from "./stateProvider";
|
} from "./stateProvider";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
import * as custom from "./custom/cache";
|
|
||||||
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined;
|
||||||
|
|
||||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||||
|
|
Loading…
Reference in New Issue