From a28af779d256503463b6f81174166bdb5747c85c Mon Sep 17 00:00:00 2001 From: Brenden Matthews Date: Fri, 20 Jun 2025 16:35:40 -0400 Subject: [PATCH] Add S3 cache download validation and retry logic - Add empty file validation (0 bytes) and minimum size checks (512 bytes) for tar archives - Implement download completeness validation (bytes downloaded = expected) - Add retry logic with exponential backoff for validation failures (3 attempts: 1s/2s/4s delays) - Create DownloadValidationError class for specific validation failures - Add comprehensive test coverage for validation scenarios - Maintain graceful degradation - validation failures log warnings but don't fail workflows --- __tests__/downloadValidation.test.ts | 232 +++++++++++++++++++++++++++ dist/restore-only/index.js | 78 +++++++-- dist/restore/index.js | 78 +++++++-- dist/save-only/index.js | 78 +++++++-- dist/save/index.js | 78 +++++++-- package-lock.json | 30 ++-- package.json | 2 +- src/custom/backend.ts | 64 ++++++-- src/custom/cache.ts | 28 ++++ src/custom/downloadUtils.ts | 13 +- src/restoreImpl.ts | 2 +- src/saveImpl.ts | 2 +- 12 files changed, 603 insertions(+), 82 deletions(-) create mode 100644 __tests__/downloadValidation.test.ts diff --git a/__tests__/downloadValidation.test.ts b/__tests__/downloadValidation.test.ts new file mode 100644 index 0000000..130f85c --- /dev/null +++ b/__tests__/downloadValidation.test.ts @@ -0,0 +1,232 @@ +import * as core from "@actions/core"; +import * as fs from "fs"; +import nock from "nock"; +import * as path from "path"; + +import { DownloadValidationError, restoreCache } from "../src/custom/cache"; +import { downloadCacheHttpClientConcurrent } from "../src/custom/downloadUtils"; + +// Mock the core module +jest.mock("@actions/core"); + +// Mock fs for file size checks +jest.mock("fs", () => ({ + ...jest.requireActual("fs"), + promises: { + ...jest.requireActual("fs").promises, + open: jest.fn() + } +})); + +describe("Download Validation", () => { + const testArchivePath = "/tmp/test-cache.tar.gz"; + const testUrl = "https://example.com/cache.tar.gz"; + + beforeEach(() => { + jest.clearAllMocks(); + nock.cleanAll(); + }); + + afterEach(() => { + nock.cleanAll(); + }); + + describe("downloadCacheHttpClientConcurrent", () => { + it("should validate downloaded size matches expected content-length", async () => { + const expectedSize = 1024; + const mockFileDescriptor = { + write: jest.fn().mockResolvedValue(undefined), + close: jest.fn().mockResolvedValue(undefined) + }; + + (fs.promises.open as jest.Mock).mockResolvedValue( + mockFileDescriptor + ); + + // Mock the initial range request to get content length + nock("https://example.com") + .get("/cache.tar.gz") + .reply(206, "partial content", { + "content-range": `bytes 0-1/${expectedSize}` + }); + + // Mock the actual content download with wrong size + nock("https://example.com") + .get("/cache.tar.gz") + .reply(206, Buffer.alloc(512), { + // Return only 512 bytes instead of 1024 + "content-range": "bytes 0-511/1024" + }); + + await expect( + downloadCacheHttpClientConcurrent(testUrl, testArchivePath, { + timeoutInMs: 30000, + partSize: 1024 + }) + ).rejects.toThrow( + "Download validation failed: Expected 1024 bytes but downloaded 512 bytes" + ); + }); + + it("should succeed when downloaded size matches expected", async () => { + const expectedSize = 1024; + const testContent = Buffer.alloc(expectedSize); + const mockFileDescriptor = { + write: jest.fn().mockResolvedValue(undefined), + close: jest.fn().mockResolvedValue(undefined) + }; + + (fs.promises.open as jest.Mock).mockResolvedValue( + mockFileDescriptor + ); + + // Mock the initial range request + nock("https://example.com") + .get("/cache.tar.gz") + .reply(206, "partial content", { + "content-range": `bytes 0-1/${expectedSize}` + }); + + // Mock the actual content download with correct size + nock("https://example.com") + .get("/cache.tar.gz") + .reply(206, testContent, { + "content-range": `bytes 0-${ + expectedSize - 1 + }/${expectedSize}` + }); + + await expect( + downloadCacheHttpClientConcurrent(testUrl, testArchivePath, { + timeoutInMs: 30000, + partSize: expectedSize + }) + ).resolves.not.toThrow(); + }); + }); + + describe("restoreCache validation", () => { + beforeEach(() => { + // Mock environment variables for S3 backend + process.env.RUNS_ON_S3_BUCKET_CACHE = "test-bucket"; + process.env.RUNS_ON_AWS_REGION = "us-east-1"; + }); + + afterEach(() => { + delete process.env.RUNS_ON_S3_BUCKET_CACHE; + delete process.env.RUNS_ON_AWS_REGION; + }); + + it("should throw DownloadValidationError for empty files", async () => { + // Mock the cache lookup to return a valid cache entry + const mockCacheHttpClient = require("../src/custom/backend"); + jest.spyOn(mockCacheHttpClient, "getCacheEntry").mockResolvedValue({ + cacheKey: "test-key", + archiveLocation: "https://s3.example.com/cache.tar.gz" + }); + + // Mock the download to succeed + jest.spyOn(mockCacheHttpClient, "downloadCache").mockResolvedValue( + undefined + ); + + // Mock utils to return 0 file size (empty file) + const mockUtils = require("@actions/cache/lib/internal/cacheUtils"); + jest.spyOn(mockUtils, "getArchiveFileSizeInBytes").mockReturnValue( + 0 + ); + jest.spyOn(mockUtils, "createTempDirectory").mockResolvedValue( + "/tmp" + ); + jest.spyOn(mockUtils, "getCacheFileName").mockReturnValue( + "cache.tar.gz" + ); + + const coreSpy = jest.spyOn(core, "warning"); + + const result = await restoreCache(["/test/path"], "test-key"); + + expect(result).toBeUndefined(); // Should return undefined on validation failure + expect(coreSpy).toHaveBeenCalledWith( + expect.stringContaining( + "Cache download validation failed: Downloaded cache archive is empty" + ) + ); + }); + + it("should throw DownloadValidationError for files too small", async () => { + // Mock the cache lookup to return a valid cache entry + const mockCacheHttpClient = require("../src/custom/backend"); + jest.spyOn(mockCacheHttpClient, "getCacheEntry").mockResolvedValue({ + cacheKey: "test-key", + archiveLocation: "https://s3.example.com/cache.tar.gz" + }); + + // Mock the download to succeed + jest.spyOn(mockCacheHttpClient, "downloadCache").mockResolvedValue( + undefined + ); + + // Mock utils to return small file size (less than 512 bytes) + const mockUtils = require("@actions/cache/lib/internal/cacheUtils"); + jest.spyOn(mockUtils, "getArchiveFileSizeInBytes").mockReturnValue( + 100 + ); + jest.spyOn(mockUtils, "createTempDirectory").mockResolvedValue( + "/tmp" + ); + jest.spyOn(mockUtils, "getCacheFileName").mockReturnValue( + "cache.tar.gz" + ); + + const coreSpy = jest.spyOn(core, "warning"); + + const result = await restoreCache(["/test/path"], "test-key"); + + expect(result).toBeUndefined(); // Should return undefined on validation failure + expect(coreSpy).toHaveBeenCalledWith( + expect.stringContaining( + "Cache download validation failed: Downloaded cache archive is too small (100 bytes)" + ) + ); + }); + + it("should succeed with valid file size", async () => { + // Mock the cache lookup to return a valid cache entry + const mockCacheHttpClient = require("../src/custom/backend"); + jest.spyOn(mockCacheHttpClient, "getCacheEntry").mockResolvedValue({ + cacheKey: "test-key", + archiveLocation: "https://s3.example.com/cache.tar.gz" + }); + + // Mock the download to succeed + jest.spyOn(mockCacheHttpClient, "downloadCache").mockResolvedValue( + undefined + ); + + // Mock utils to return valid file size (>= 512 bytes) + const mockUtils = require("@actions/cache/lib/internal/cacheUtils"); + jest.spyOn(mockUtils, "getArchiveFileSizeInBytes").mockReturnValue( + 1024 + ); + jest.spyOn(mockUtils, "createTempDirectory").mockResolvedValue( + "/tmp" + ); + jest.spyOn(mockUtils, "getCacheFileName").mockReturnValue( + "cache.tar.gz" + ); + jest.spyOn(mockUtils, "getCompressionMethod").mockResolvedValue( + "gzip" + ); + + // Mock tar operations + const mockTar = require("@actions/cache/lib/internal/tar"); + jest.spyOn(mockTar, "extractTar").mockResolvedValue(undefined); + jest.spyOn(mockTar, "listTar").mockResolvedValue(undefined); + + const result = await restoreCache(["/test/path"], "test-key"); + + expect(result).toBe("test-key"); // Should return the cache key on success + }); + }); +}); diff --git a/dist/restore-only/index.js b/dist/restore-only/index.js index 9aaa4cb..e129de1 100644 --- a/dist/restore-only/index.js +++ b/dist/restore-only/index.js @@ -93862,14 +93862,42 @@ function downloadCache(archiveLocation, archivePath, options) { } const archiveUrl = new URL(archiveLocation); const objectKey = archiveUrl.pathname.slice(1); - const command = new client_s3_1.GetObjectCommand({ - Bucket: bucketName, - Key: objectKey - }); - const url = yield getSignedUrl(s3Client, command, { - expiresIn: 3600 - }); - yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize })); + // Retry logic for download validation failures + const maxRetries = 3; + let lastError; + for (let attempt = 1; attempt <= maxRetries; attempt++) { + try { + const command = new client_s3_1.GetObjectCommand({ + Bucket: bucketName, + Key: objectKey + }); + const url = yield getSignedUrl(s3Client, command, { + expiresIn: 3600 + }); + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize })); + // If we get here, download succeeded + return; + } + catch (error) { + const errorMessage = error.message; + lastError = error; + // Only retry on validation failures, not on other errors + if (errorMessage.includes("Download validation failed") || + errorMessage.includes("Range request not supported") || + errorMessage.includes("Content-Range header")) { + if (attempt < maxRetries) { + const delayMs = Math.pow(2, attempt - 1) * 1000; // exponential backoff + core.warning(`Download attempt ${attempt} failed: ${errorMessage}. Retrying in ${delayMs}ms...`); + yield new Promise(resolve => setTimeout(resolve, delayMs)); + continue; + } + } + // For non-retryable errors or max retries reached, throw the error + throw error; + } + } + // This should never be reached, but just in case + throw lastError || new Error("Download failed after all retry attempts"); }); } exports.downloadCache = downloadCache; @@ -93954,7 +93982,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; +exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.DownloadValidationError = exports.ReserveCacheError = exports.ValidationError = void 0; const core = __importStar(__nccwpck_require__(2186)); const path = __importStar(__nccwpck_require__(1017)); const utils = __importStar(__nccwpck_require__(1518)); @@ -93976,6 +94004,14 @@ class ReserveCacheError extends Error { } } exports.ReserveCacheError = ReserveCacheError; +class DownloadValidationError extends Error { + constructor(message) { + super(message); + this.name = "DownloadValidationError"; + Object.setPrototypeOf(this, DownloadValidationError.prototype); + } +} +exports.DownloadValidationError = DownloadValidationError; function checkPaths(paths) { if (!paths || paths.length === 0) { throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); @@ -94047,6 +94083,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + // Validate downloaded archive + if (archiveFileSize === 0) { + throw new DownloadValidationError("Downloaded cache archive is empty (0 bytes). This may indicate a failed download or corrupted cache."); + } + // Minimum size check - a valid tar archive needs at least 512 bytes for header + const MIN_ARCHIVE_SIZE = 512; + if (archiveFileSize < MIN_ARCHIVE_SIZE) { + throw new DownloadValidationError(`Downloaded cache archive is too small (${archiveFileSize} bytes). Expected at least ${MIN_ARCHIVE_SIZE} bytes for a valid archive.`); + } yield (0, tar_1.extractTar)(archivePath, compressionMethod); core.info("Cache restored successfully"); return cacheEntry.cacheKey; @@ -94056,6 +94101,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch if (typedError.name === ValidationError.name) { throw error; } + else if (typedError.name === DownloadValidationError.name) { + // Log download validation errors as warnings but don't fail the workflow + core.warning(`Cache download validation failed: ${typedError.message}`); + } else { // Supress all non-validation cache related errors because caching should be optional core.warning(`Failed to restore: ${error.message}`); @@ -94302,6 +94351,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options socketTimeout: options.timeoutInMs, keepAlive: true }); + let progress; try { const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request("GET", archiveLocation, null, { @@ -94335,7 +94385,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options downloads.reverse(); let actives = 0; let bytesDownloaded = 0; - const progress = new DownloadProgress(length); + progress = new DownloadProgress(length); progress.startDisplayTimer(); const progressFn = progress.onProgress(); const activeDownloads = []; @@ -94358,8 +94408,14 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options while (actives > 0) { yield waitAndWrite(); } + // Validate that we downloaded the expected amount of data + if (bytesDownloaded !== length) { + throw new Error(`Download validation failed: Expected ${length} bytes but downloaded ${bytesDownloaded} bytes`); + } + progress.stopDisplayTimer(); } finally { + progress === null || progress === void 0 ? void 0 : progress.stopDisplayTimer(); httpClient.dispose(); yield archiveDescriptor.close(); } @@ -94461,9 +94517,9 @@ exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0; const cache = __importStar(__nccwpck_require__(7799)); const core = __importStar(__nccwpck_require__(2186)); const constants_1 = __nccwpck_require__(9042); +const custom = __importStar(__nccwpck_require__(1082)); const stateProvider_1 = __nccwpck_require__(1527); const utils = __importStar(__nccwpck_require__(6850)); -const custom = __importStar(__nccwpck_require__(1082)); const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined; function restoreImpl(stateProvider, earlyExit) { return __awaiter(this, void 0, void 0, function* () { diff --git a/dist/restore/index.js b/dist/restore/index.js index 10f3788..312e22a 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -93862,14 +93862,42 @@ function downloadCache(archiveLocation, archivePath, options) { } const archiveUrl = new URL(archiveLocation); const objectKey = archiveUrl.pathname.slice(1); - const command = new client_s3_1.GetObjectCommand({ - Bucket: bucketName, - Key: objectKey - }); - const url = yield getSignedUrl(s3Client, command, { - expiresIn: 3600 - }); - yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize })); + // Retry logic for download validation failures + const maxRetries = 3; + let lastError; + for (let attempt = 1; attempt <= maxRetries; attempt++) { + try { + const command = new client_s3_1.GetObjectCommand({ + Bucket: bucketName, + Key: objectKey + }); + const url = yield getSignedUrl(s3Client, command, { + expiresIn: 3600 + }); + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize })); + // If we get here, download succeeded + return; + } + catch (error) { + const errorMessage = error.message; + lastError = error; + // Only retry on validation failures, not on other errors + if (errorMessage.includes("Download validation failed") || + errorMessage.includes("Range request not supported") || + errorMessage.includes("Content-Range header")) { + if (attempt < maxRetries) { + const delayMs = Math.pow(2, attempt - 1) * 1000; // exponential backoff + core.warning(`Download attempt ${attempt} failed: ${errorMessage}. Retrying in ${delayMs}ms...`); + yield new Promise(resolve => setTimeout(resolve, delayMs)); + continue; + } + } + // For non-retryable errors or max retries reached, throw the error + throw error; + } + } + // This should never be reached, but just in case + throw lastError || new Error("Download failed after all retry attempts"); }); } exports.downloadCache = downloadCache; @@ -93954,7 +93982,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; +exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.DownloadValidationError = exports.ReserveCacheError = exports.ValidationError = void 0; const core = __importStar(__nccwpck_require__(2186)); const path = __importStar(__nccwpck_require__(1017)); const utils = __importStar(__nccwpck_require__(1518)); @@ -93976,6 +94004,14 @@ class ReserveCacheError extends Error { } } exports.ReserveCacheError = ReserveCacheError; +class DownloadValidationError extends Error { + constructor(message) { + super(message); + this.name = "DownloadValidationError"; + Object.setPrototypeOf(this, DownloadValidationError.prototype); + } +} +exports.DownloadValidationError = DownloadValidationError; function checkPaths(paths) { if (!paths || paths.length === 0) { throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); @@ -94047,6 +94083,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + // Validate downloaded archive + if (archiveFileSize === 0) { + throw new DownloadValidationError("Downloaded cache archive is empty (0 bytes). This may indicate a failed download or corrupted cache."); + } + // Minimum size check - a valid tar archive needs at least 512 bytes for header + const MIN_ARCHIVE_SIZE = 512; + if (archiveFileSize < MIN_ARCHIVE_SIZE) { + throw new DownloadValidationError(`Downloaded cache archive is too small (${archiveFileSize} bytes). Expected at least ${MIN_ARCHIVE_SIZE} bytes for a valid archive.`); + } yield (0, tar_1.extractTar)(archivePath, compressionMethod); core.info("Cache restored successfully"); return cacheEntry.cacheKey; @@ -94056,6 +94101,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch if (typedError.name === ValidationError.name) { throw error; } + else if (typedError.name === DownloadValidationError.name) { + // Log download validation errors as warnings but don't fail the workflow + core.warning(`Cache download validation failed: ${typedError.message}`); + } else { // Supress all non-validation cache related errors because caching should be optional core.warning(`Failed to restore: ${error.message}`); @@ -94302,6 +94351,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options socketTimeout: options.timeoutInMs, keepAlive: true }); + let progress; try { const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request("GET", archiveLocation, null, { @@ -94335,7 +94385,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options downloads.reverse(); let actives = 0; let bytesDownloaded = 0; - const progress = new DownloadProgress(length); + progress = new DownloadProgress(length); progress.startDisplayTimer(); const progressFn = progress.onProgress(); const activeDownloads = []; @@ -94358,8 +94408,14 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options while (actives > 0) { yield waitAndWrite(); } + // Validate that we downloaded the expected amount of data + if (bytesDownloaded !== length) { + throw new Error(`Download validation failed: Expected ${length} bytes but downloaded ${bytesDownloaded} bytes`); + } + progress.stopDisplayTimer(); } finally { + progress === null || progress === void 0 ? void 0 : progress.stopDisplayTimer(); httpClient.dispose(); yield archiveDescriptor.close(); } @@ -94461,9 +94517,9 @@ exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0; const cache = __importStar(__nccwpck_require__(7799)); const core = __importStar(__nccwpck_require__(2186)); const constants_1 = __nccwpck_require__(9042); +const custom = __importStar(__nccwpck_require__(1082)); const stateProvider_1 = __nccwpck_require__(1527); const utils = __importStar(__nccwpck_require__(6850)); -const custom = __importStar(__nccwpck_require__(1082)); const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined; function restoreImpl(stateProvider, earlyExit) { return __awaiter(this, void 0, void 0, function* () { diff --git a/dist/save-only/index.js b/dist/save-only/index.js index c83ae6c..98ca169 100644 --- a/dist/save-only/index.js +++ b/dist/save-only/index.js @@ -93862,14 +93862,42 @@ function downloadCache(archiveLocation, archivePath, options) { } const archiveUrl = new URL(archiveLocation); const objectKey = archiveUrl.pathname.slice(1); - const command = new client_s3_1.GetObjectCommand({ - Bucket: bucketName, - Key: objectKey - }); - const url = yield getSignedUrl(s3Client, command, { - expiresIn: 3600 - }); - yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize })); + // Retry logic for download validation failures + const maxRetries = 3; + let lastError; + for (let attempt = 1; attempt <= maxRetries; attempt++) { + try { + const command = new client_s3_1.GetObjectCommand({ + Bucket: bucketName, + Key: objectKey + }); + const url = yield getSignedUrl(s3Client, command, { + expiresIn: 3600 + }); + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize })); + // If we get here, download succeeded + return; + } + catch (error) { + const errorMessage = error.message; + lastError = error; + // Only retry on validation failures, not on other errors + if (errorMessage.includes("Download validation failed") || + errorMessage.includes("Range request not supported") || + errorMessage.includes("Content-Range header")) { + if (attempt < maxRetries) { + const delayMs = Math.pow(2, attempt - 1) * 1000; // exponential backoff + core.warning(`Download attempt ${attempt} failed: ${errorMessage}. Retrying in ${delayMs}ms...`); + yield new Promise(resolve => setTimeout(resolve, delayMs)); + continue; + } + } + // For non-retryable errors or max retries reached, throw the error + throw error; + } + } + // This should never be reached, but just in case + throw lastError || new Error("Download failed after all retry attempts"); }); } exports.downloadCache = downloadCache; @@ -93954,7 +93982,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; +exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.DownloadValidationError = exports.ReserveCacheError = exports.ValidationError = void 0; const core = __importStar(__nccwpck_require__(2186)); const path = __importStar(__nccwpck_require__(1017)); const utils = __importStar(__nccwpck_require__(1518)); @@ -93976,6 +94004,14 @@ class ReserveCacheError extends Error { } } exports.ReserveCacheError = ReserveCacheError; +class DownloadValidationError extends Error { + constructor(message) { + super(message); + this.name = "DownloadValidationError"; + Object.setPrototypeOf(this, DownloadValidationError.prototype); + } +} +exports.DownloadValidationError = DownloadValidationError; function checkPaths(paths) { if (!paths || paths.length === 0) { throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); @@ -94047,6 +94083,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + // Validate downloaded archive + if (archiveFileSize === 0) { + throw new DownloadValidationError("Downloaded cache archive is empty (0 bytes). This may indicate a failed download or corrupted cache."); + } + // Minimum size check - a valid tar archive needs at least 512 bytes for header + const MIN_ARCHIVE_SIZE = 512; + if (archiveFileSize < MIN_ARCHIVE_SIZE) { + throw new DownloadValidationError(`Downloaded cache archive is too small (${archiveFileSize} bytes). Expected at least ${MIN_ARCHIVE_SIZE} bytes for a valid archive.`); + } yield (0, tar_1.extractTar)(archivePath, compressionMethod); core.info("Cache restored successfully"); return cacheEntry.cacheKey; @@ -94056,6 +94101,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch if (typedError.name === ValidationError.name) { throw error; } + else if (typedError.name === DownloadValidationError.name) { + // Log download validation errors as warnings but don't fail the workflow + core.warning(`Cache download validation failed: ${typedError.message}`); + } else { // Supress all non-validation cache related errors because caching should be optional core.warning(`Failed to restore: ${error.message}`); @@ -94302,6 +94351,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options socketTimeout: options.timeoutInMs, keepAlive: true }); + let progress; try { const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request("GET", archiveLocation, null, { @@ -94335,7 +94385,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options downloads.reverse(); let actives = 0; let bytesDownloaded = 0; - const progress = new DownloadProgress(length); + progress = new DownloadProgress(length); progress.startDisplayTimer(); const progressFn = progress.onProgress(); const activeDownloads = []; @@ -94358,8 +94408,14 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options while (actives > 0) { yield waitAndWrite(); } + // Validate that we downloaded the expected amount of data + if (bytesDownloaded !== length) { + throw new Error(`Download validation failed: Expected ${length} bytes but downloaded ${bytesDownloaded} bytes`); + } + progress.stopDisplayTimer(); } finally { + progress === null || progress === void 0 ? void 0 : progress.stopDisplayTimer(); httpClient.dispose(); yield archiveDescriptor.close(); } @@ -94461,9 +94517,9 @@ exports.saveRun = exports.saveOnlyRun = exports.saveImpl = void 0; const cache = __importStar(__nccwpck_require__(7799)); const core = __importStar(__nccwpck_require__(2186)); const constants_1 = __nccwpck_require__(9042); +const custom = __importStar(__nccwpck_require__(1082)); const stateProvider_1 = __nccwpck_require__(1527); const utils = __importStar(__nccwpck_require__(6850)); -const custom = __importStar(__nccwpck_require__(1082)); const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined; // Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in // @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to diff --git a/dist/save/index.js b/dist/save/index.js index b8e3006..bc49648 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -93862,14 +93862,42 @@ function downloadCache(archiveLocation, archivePath, options) { } const archiveUrl = new URL(archiveLocation); const objectKey = archiveUrl.pathname.slice(1); - const command = new client_s3_1.GetObjectCommand({ - Bucket: bucketName, - Key: objectKey - }); - const url = yield getSignedUrl(s3Client, command, { - expiresIn: 3600 - }); - yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize })); + // Retry logic for download validation failures + const maxRetries = 3; + let lastError; + for (let attempt = 1; attempt <= maxRetries; attempt++) { + try { + const command = new client_s3_1.GetObjectCommand({ + Bucket: bucketName, + Key: objectKey + }); + const url = yield getSignedUrl(s3Client, command, { + expiresIn: 3600 + }); + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(url, archivePath, Object.assign(Object.assign({}, options), { downloadConcurrency: downloadQueueSize, concurrentBlobDownloads: true, partSize: downloadPartSize })); + // If we get here, download succeeded + return; + } + catch (error) { + const errorMessage = error.message; + lastError = error; + // Only retry on validation failures, not on other errors + if (errorMessage.includes("Download validation failed") || + errorMessage.includes("Range request not supported") || + errorMessage.includes("Content-Range header")) { + if (attempt < maxRetries) { + const delayMs = Math.pow(2, attempt - 1) * 1000; // exponential backoff + core.warning(`Download attempt ${attempt} failed: ${errorMessage}. Retrying in ${delayMs}ms...`); + yield new Promise(resolve => setTimeout(resolve, delayMs)); + continue; + } + } + // For non-retryable errors or max retries reached, throw the error + throw error; + } + } + // This should never be reached, but just in case + throw lastError || new Error("Download failed after all retry attempts"); }); } exports.downloadCache = downloadCache; @@ -93954,7 +93982,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; +exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.DownloadValidationError = exports.ReserveCacheError = exports.ValidationError = void 0; const core = __importStar(__nccwpck_require__(2186)); const path = __importStar(__nccwpck_require__(1017)); const utils = __importStar(__nccwpck_require__(1518)); @@ -93976,6 +94004,14 @@ class ReserveCacheError extends Error { } } exports.ReserveCacheError = ReserveCacheError; +class DownloadValidationError extends Error { + constructor(message) { + super(message); + this.name = "DownloadValidationError"; + Object.setPrototypeOf(this, DownloadValidationError.prototype); + } +} +exports.DownloadValidationError = DownloadValidationError; function checkPaths(paths) { if (!paths || paths.length === 0) { throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); @@ -94047,6 +94083,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + // Validate downloaded archive + if (archiveFileSize === 0) { + throw new DownloadValidationError("Downloaded cache archive is empty (0 bytes). This may indicate a failed download or corrupted cache."); + } + // Minimum size check - a valid tar archive needs at least 512 bytes for header + const MIN_ARCHIVE_SIZE = 512; + if (archiveFileSize < MIN_ARCHIVE_SIZE) { + throw new DownloadValidationError(`Downloaded cache archive is too small (${archiveFileSize} bytes). Expected at least ${MIN_ARCHIVE_SIZE} bytes for a valid archive.`); + } yield (0, tar_1.extractTar)(archivePath, compressionMethod); core.info("Cache restored successfully"); return cacheEntry.cacheKey; @@ -94056,6 +94101,10 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch if (typedError.name === ValidationError.name) { throw error; } + else if (typedError.name === DownloadValidationError.name) { + // Log download validation errors as warnings but don't fail the workflow + core.warning(`Cache download validation failed: ${typedError.message}`); + } else { // Supress all non-validation cache related errors because caching should be optional core.warning(`Failed to restore: ${error.message}`); @@ -94302,6 +94351,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options socketTimeout: options.timeoutInMs, keepAlive: true }); + let progress; try { const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request("GET", archiveLocation, null, { @@ -94335,7 +94385,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options downloads.reverse(); let actives = 0; let bytesDownloaded = 0; - const progress = new DownloadProgress(length); + progress = new DownloadProgress(length); progress.startDisplayTimer(); const progressFn = progress.onProgress(); const activeDownloads = []; @@ -94358,8 +94408,14 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options while (actives > 0) { yield waitAndWrite(); } + // Validate that we downloaded the expected amount of data + if (bytesDownloaded !== length) { + throw new Error(`Download validation failed: Expected ${length} bytes but downloaded ${bytesDownloaded} bytes`); + } + progress.stopDisplayTimer(); } finally { + progress === null || progress === void 0 ? void 0 : progress.stopDisplayTimer(); httpClient.dispose(); yield archiveDescriptor.close(); } @@ -94461,9 +94517,9 @@ exports.saveRun = exports.saveOnlyRun = exports.saveImpl = void 0; const cache = __importStar(__nccwpck_require__(7799)); const core = __importStar(__nccwpck_require__(2186)); const constants_1 = __nccwpck_require__(9042); +const custom = __importStar(__nccwpck_require__(1082)); const stateProvider_1 = __nccwpck_require__(1527); const utils = __importStar(__nccwpck_require__(6850)); -const custom = __importStar(__nccwpck_require__(1082)); const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined; // Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in // @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to diff --git a/package-lock.json b/package-lock.json index 6d2eb08..263e77d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -32,7 +32,7 @@ "eslint-plugin-simple-import-sort": "^7.0.0", "jest": "^28.1.3", "jest-circus": "^27.5.1", - "nock": "^13.2.9", + "nock": "^13.5.6", "prettier": "^2.8.8", "ts-jest": "^28.0.8", "typescript": "^4.9.3" @@ -4720,6 +4720,7 @@ "integrity": "sha512-jI/ewavBQ7X5178262JQR0ewicPAcJhXS/iFaNJl0VHLfyosZ/kwSrsa6VNQNSO8i9d8SqdRgOtZSOKJ/+iNMw==", "deprecated": "This is a stub types definition. nock provides its own type definitions, so you do not need this installed.", "dev": true, + "license": "MIT", "dependencies": { "nock": "*" } @@ -10004,12 +10005,6 @@ "node": ">=4" } }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -10150,14 +10145,14 @@ "dev": true }, "node_modules/nock": { - "version": "13.2.9", - "resolved": "https://registry.npmjs.org/nock/-/nock-13.2.9.tgz", - "integrity": "sha512-1+XfJNYF1cjGB+TKMWi29eZ0b82QOvQs2YoLNzbpWGqFMtRQHTa57osqdGj4FrFPgkO4D4AZinzUJR9VvW3QUA==", + "version": "13.5.6", + "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz", + "integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==", "dev": true, + "license": "MIT", "dependencies": { "debug": "^4.1.0", "json-stringify-safe": "^5.0.1", - "lodash": "^4.17.21", "propagate": "^2.0.0" }, "engines": { @@ -19654,12 +19649,6 @@ "path-exists": "^3.0.0" } }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -19773,14 +19762,13 @@ "dev": true }, "nock": { - "version": "13.2.9", - "resolved": "https://registry.npmjs.org/nock/-/nock-13.2.9.tgz", - "integrity": "sha512-1+XfJNYF1cjGB+TKMWi29eZ0b82QOvQs2YoLNzbpWGqFMtRQHTa57osqdGj4FrFPgkO4D4AZinzUJR9VvW3QUA==", + "version": "13.5.6", + "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz", + "integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==", "dev": true, "requires": { "debug": "^4.1.0", "json-stringify-safe": "^5.0.1", - "lodash": "^4.17.21", "propagate": "^2.0.0" } }, diff --git a/package.json b/package.json index c0bff98..6f88744 100644 --- a/package.json +++ b/package.json @@ -46,7 +46,7 @@ "eslint-plugin-simple-import-sort": "^7.0.0", "jest": "^28.1.3", "jest-circus": "^27.5.1", - "nock": "^13.2.9", + "nock": "^13.5.6", "prettier": "^2.8.8", "ts-jest": "^28.0.8", "typescript": "^4.9.3" diff --git a/src/custom/backend.ts b/src/custom/backend.ts index a9146be..1b0a17b 100644 --- a/src/custom/backend.ts +++ b/src/custom/backend.ts @@ -154,19 +154,57 @@ export async function downloadCache( const archiveUrl = new URL(archiveLocation); const objectKey = archiveUrl.pathname.slice(1); - const command = new GetObjectCommand({ - Bucket: bucketName, - Key: objectKey - }); - const url = await getSignedUrl(s3Client, command, { - expiresIn: 3600 - }); - await downloadCacheHttpClientConcurrent(url, archivePath, { - ...options, - downloadConcurrency: downloadQueueSize, - concurrentBlobDownloads: true, - partSize: downloadPartSize - }); + + // Retry logic for download validation failures + const maxRetries = 3; + let lastError: Error | undefined; + + for (let attempt = 1; attempt <= maxRetries; attempt++) { + try { + const command = new GetObjectCommand({ + Bucket: bucketName, + Key: objectKey + }); + const url = await getSignedUrl(s3Client, command, { + expiresIn: 3600 + }); + + await downloadCacheHttpClientConcurrent(url, archivePath, { + ...options, + downloadConcurrency: downloadQueueSize, + concurrentBlobDownloads: true, + partSize: downloadPartSize + }); + + // If we get here, download succeeded + return; + } catch (error) { + const errorMessage = (error as Error).message; + lastError = error as Error; + + // Only retry on validation failures, not on other errors + if ( + errorMessage.includes("Download validation failed") || + errorMessage.includes("Range request not supported") || + errorMessage.includes("Content-Range header") + ) { + if (attempt < maxRetries) { + const delayMs = Math.pow(2, attempt - 1) * 1000; // exponential backoff + core.warning( + `Download attempt ${attempt} failed: ${errorMessage}. Retrying in ${delayMs}ms...` + ); + await new Promise(resolve => setTimeout(resolve, delayMs)); + continue; + } + } + + // For non-retryable errors or max retries reached, throw the error + throw error; + } + } + + // This should never be reached, but just in case + throw lastError || new Error("Download failed after all retry attempts"); } export async function saveCache( diff --git a/src/custom/cache.ts b/src/custom/cache.ts index aec47c8..6de7cd9 100644 --- a/src/custom/cache.ts +++ b/src/custom/cache.ts @@ -27,6 +27,14 @@ export class ReserveCacheError extends Error { } } +export class DownloadValidationError extends Error { + constructor(message: string) { + super(message); + this.name = "DownloadValidationError"; + Object.setPrototypeOf(this, DownloadValidationError.prototype); + } +} + function checkPaths(paths: string[]): void { if (!paths || paths.length === 0) { throw new ValidationError( @@ -135,6 +143,21 @@ export async function restoreCache( )} MB (${archiveFileSize} B)` ); + // Validate downloaded archive + if (archiveFileSize === 0) { + throw new DownloadValidationError( + "Downloaded cache archive is empty (0 bytes). This may indicate a failed download or corrupted cache." + ); + } + + // Minimum size check - a valid tar archive needs at least 512 bytes for header + const MIN_ARCHIVE_SIZE = 512; + if (archiveFileSize < MIN_ARCHIVE_SIZE) { + throw new DownloadValidationError( + `Downloaded cache archive is too small (${archiveFileSize} bytes). Expected at least ${MIN_ARCHIVE_SIZE} bytes for a valid archive.` + ); + } + await extractTar(archivePath, compressionMethod); core.info("Cache restored successfully"); @@ -143,6 +166,11 @@ export async function restoreCache( const typedError = error as Error; if (typedError.name === ValidationError.name) { throw error; + } else if (typedError.name === DownloadValidationError.name) { + // Log download validation errors as warnings but don't fail the workflow + core.warning( + `Cache download validation failed: ${typedError.message}` + ); } else { // Supress all non-validation cache related errors because caching should be optional core.warning(`Failed to restore: ${(error as Error).message}`); diff --git a/src/custom/downloadUtils.ts b/src/custom/downloadUtils.ts index 5e311a0..320b6cb 100644 --- a/src/custom/downloadUtils.ts +++ b/src/custom/downloadUtils.ts @@ -160,6 +160,7 @@ export async function downloadCacheHttpClientConcurrent( socketTimeout: options.timeoutInMs, keepAlive: true }); + let progress: DownloadProgress | undefined; try { const res = await retryHttpClientResponse( "downloadCacheMetadata", @@ -210,7 +211,7 @@ export async function downloadCacheHttpClientConcurrent( downloads.reverse(); let actives = 0; let bytesDownloaded = 0; - const progress = new DownloadProgress(length); + progress = new DownloadProgress(length); progress.startDisplayTimer(); const progressFn = progress.onProgress(); @@ -246,7 +247,17 @@ export async function downloadCacheHttpClientConcurrent( while (actives > 0) { await waitAndWrite(); } + + // Validate that we downloaded the expected amount of data + if (bytesDownloaded !== length) { + throw new Error( + `Download validation failed: Expected ${length} bytes but downloaded ${bytesDownloaded} bytes` + ); + } + + progress.stopDisplayTimer(); } finally { + progress?.stopDisplayTimer(); httpClient.dispose(); await archiveDescriptor.close(); } diff --git a/src/restoreImpl.ts b/src/restoreImpl.ts index 98f319a..5e8867c 100644 --- a/src/restoreImpl.ts +++ b/src/restoreImpl.ts @@ -2,6 +2,7 @@ import * as cache from "@actions/cache"; import * as core from "@actions/core"; import { Events, Inputs, Outputs, State } from "./constants"; +import * as custom from "./custom/cache"; import { IStateProvider, NullStateProvider, @@ -9,7 +10,6 @@ import { } from "./stateProvider"; import * as utils from "./utils/actionUtils"; -import * as custom from "./custom/cache"; const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined; export async function restoreImpl( diff --git a/src/saveImpl.ts b/src/saveImpl.ts index a5516a3..d1fca4c 100644 --- a/src/saveImpl.ts +++ b/src/saveImpl.ts @@ -2,6 +2,7 @@ import * as cache from "@actions/cache"; import * as core from "@actions/core"; import { Events, Inputs, State } from "./constants"; +import * as custom from "./custom/cache"; import { IStateProvider, NullStateProvider, @@ -9,7 +10,6 @@ import { } from "./stateProvider"; import * as utils from "./utils/actionUtils"; -import * as custom from "./custom/cache"; const canSaveToS3 = process.env["RUNS_ON_S3_BUCKET_CACHE"] !== undefined; // Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in