diff --git a/README.md b/README.md index 1eba600c..fbf5e252 100644 --- a/README.md +++ b/README.md @@ -36,6 +36,25 @@ When these variables are set, Unity Builder will direct its CloudFormation, ECS, to the emulator instead of the real AWS services. See `.github/workflows/cloud-runner-integrity-localstack.yml` for an example configuration. +## Rclone storage provider (experimental) + +Unity Builder can use rclone as an alternative storage provider for cache and build artifacts in Cloud Runner. + +- Inputs: + - `storageProvider`: set to `rclone` to enable rclone-backed storage (default is `s3`). + - `rcloneRemote`: rclone remote to use, e.g. `s3:my-bucket`, `gcs:my-bucket`, `local:./path`. + +When `storageProvider=rclone`: + +- Retained workspace locking uses rclone operations instead of S3 (touch/delete/ls). +- Built-in container hooks are available: + - `rclone-pull-cache`, `rclone-upload-cache` + - `rclone-pull-build`, `rclone-upload-build` + +You must ensure the `rclone` CLI is available in the container runtime. The built-in hooks use the `rclone/rclone` image. + +Example (local testing): set `RCLONE_REMOTE=local:./temp/rclone-remote` and include the rclone hooks via `containerHookFiles`. + ## Community Feel free to join us on diff --git a/src/model/cloud-runner/providers/aws/services/task-service.ts b/src/model/cloud-runner/providers/aws/services/task-service.ts index f4df3323..ef9fee66 100644 --- a/src/model/cloud-runner/providers/aws/services/task-service.ts +++ b/src/model/cloud-runner/providers/aws/services/task-service.ts @@ -12,6 +12,7 @@ import { BaseStackFormation } from '../cloud-formations/base-stack-formation'; import AwsTaskRunner from '../aws-task-runner'; import CloudRunner from '../../../cloud-runner'; import { AwsClientFactory } from '../aws-client-factory'; +import SharedWorkspaceLocking from '../../../services/core/shared-workspace-locking'; export class TaskService { static async watch() { @@ -182,6 +183,10 @@ export class TaskService { } public static async getLocks() { process.env.AWS_REGION = Input.region; + if (CloudRunner.buildParameters.storageProvider === 'rclone') { + const objects = await (SharedWorkspaceLocking as any).listObjects(''); + return objects.map((x: string) => ({ Key: x })); + } const s3 = AwsClientFactory.getS3(); const listRequest = { Bucket: CloudRunner.buildParameters.awsStackName, diff --git a/src/model/cloud-runner/remote-client/index.ts b/src/model/cloud-runner/remote-client/index.ts index 99d090fd..8536ca53 100644 --- a/src/model/cloud-runner/remote-client/index.ts +++ b/src/model/cloud-runner/remote-client/index.ts @@ -304,6 +304,7 @@ export class RemoteClient { // Best effort: try plain pull first (works for public repos or pre-configured auth) try { await CloudRunnerSystem.Run(`git lfs pull`, true); + await CloudRunnerSystem.Run(`git lfs checkout || true`, true); RemoteClientLogger.log(`Pulled LFS files without explicit token configuration`); return; @@ -324,6 +325,7 @@ export class RemoteClient { `git config --global url."https://${gitPrivateToken}@github.com/".insteadOf "https://github.com/"`, ); await CloudRunnerSystem.Run(`git lfs pull`, true); + await CloudRunnerSystem.Run(`git lfs checkout || true`, true); RemoteClientLogger.log(`Successfully pulled LFS files with GIT_PRIVATE_TOKEN`); return; @@ -344,6 +346,7 @@ export class RemoteClient { `git config --global url."https://${githubToken}@github.com/".insteadOf "https://github.com/"`, ); await CloudRunnerSystem.Run(`git lfs pull`, true); + await CloudRunnerSystem.Run(`git lfs checkout || true`, true); RemoteClientLogger.log(`Successfully pulled LFS files with GITHUB_TOKEN`); return; @@ -374,6 +377,7 @@ export class RemoteClient { await CloudRunnerSystem.Run(`git fetch origin +refs/pull/*:refs/remotes/origin/pull/* || true`); } await CloudRunnerSystem.Run(`git lfs pull`); + await CloudRunnerSystem.Run(`git lfs checkout || true`); const sha = CloudRunner.buildParameters.gitSha; const branch = CloudRunner.buildParameters.branch; try { diff --git a/src/model/cloud-runner/services/core/shared-workspace-locking.ts b/src/model/cloud-runner/services/core/shared-workspace-locking.ts index 34b67b09..fbc4c54d 100644 --- a/src/model/cloud-runner/services/core/shared-workspace-locking.ts +++ b/src/model/cloud-runner/services/core/shared-workspace-locking.ts @@ -11,6 +11,9 @@ import { S3, } from '@aws-sdk/client-s3'; import { AwsClientFactory } from '../../providers/aws/aws-client-factory'; +import { promisify } from 'node:util'; +import { exec as execCb } from 'node:child_process'; +const exec = promisify(execCb); export class SharedWorkspaceLocking { private static _s3: S3; private static get s3(): S3 { @@ -20,11 +23,22 @@ export class SharedWorkspaceLocking { } return SharedWorkspaceLocking._s3; } + private static get useRclone() { + return CloudRunner.buildParameters.storageProvider === 'rclone'; + } + private static async rclone(command: string): Promise { + const { stdout } = await exec(`rclone ${command}`); + return stdout.toString(); + } private static get bucket() { - return CloudRunner.buildParameters.awsStackName; + return SharedWorkspaceLocking.useRclone + ? CloudRunner.buildParameters.rcloneRemote + : CloudRunner.buildParameters.awsStackName; } public static get workspaceBucketRoot() { - return `s3://${SharedWorkspaceLocking.bucket}/`; + return SharedWorkspaceLocking.useRclone + ? `${SharedWorkspaceLocking.bucket}/` + : `s3://${SharedWorkspaceLocking.bucket}/`; } public static get workspaceRoot() { return `${SharedWorkspaceLocking.workspaceBucketRoot}locks/`; @@ -34,6 +48,14 @@ export class SharedWorkspaceLocking { } private static async ensureBucketExists(): Promise { const bucket = SharedWorkspaceLocking.bucket; + if (SharedWorkspaceLocking.useRclone) { + try { + await SharedWorkspaceLocking.rclone(`lsf ${bucket}`); + } catch { + await SharedWorkspaceLocking.rclone(`mkdir ${bucket}`); + } + return; + } try { await SharedWorkspaceLocking.s3.send(new HeadBucketCommand({ Bucket: bucket })); } catch { @@ -50,6 +72,16 @@ export class SharedWorkspaceLocking { if (prefix !== '' && !prefix.endsWith('/')) { prefix += '/'; } + if (SharedWorkspaceLocking.useRclone) { + const path = `${bucket}/${prefix}`; + try { + const output = await SharedWorkspaceLocking.rclone(`lsjson ${path}`); + const json = JSON.parse(output) as { Name: string; IsDir: boolean }[]; + return json.map((e) => (e.IsDir ? `${e.Name}/` : e.Name)); + } catch { + return []; + } + } const result = await SharedWorkspaceLocking.s3.send( new ListObjectsV2Command({ Bucket: bucket, Prefix: prefix, Delimiter: '/' }), ); @@ -264,9 +296,13 @@ export class SharedWorkspaceLocking { const timestamp = Date.now(); const key = `${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/${timestamp}_${workspace}_workspace`; await SharedWorkspaceLocking.ensureBucketExists(); - await SharedWorkspaceLocking.s3.send( - new PutObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: key, Body: '' }), - ); + if (SharedWorkspaceLocking.useRclone) { + await SharedWorkspaceLocking.rclone(`touch ${SharedWorkspaceLocking.bucket}/${key}`); + } else { + await SharedWorkspaceLocking.s3.send( + new PutObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: key, Body: '' }), + ); + } const workspaces = await SharedWorkspaceLocking.GetAllWorkspaces(buildParametersContext); @@ -292,18 +328,26 @@ export class SharedWorkspaceLocking { buildParametersContext.cacheKey }/${Date.now()}_${runId}_${ending}_lock`; await SharedWorkspaceLocking.ensureBucketExists(); - await SharedWorkspaceLocking.s3.send( - new PutObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: key, Body: '' }), - ); + if (SharedWorkspaceLocking.useRclone) { + await SharedWorkspaceLocking.rclone(`touch ${SharedWorkspaceLocking.bucket}/${key}`); + } else { + await SharedWorkspaceLocking.s3.send( + new PutObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: key, Body: '' }), + ); + } const hasLock = await SharedWorkspaceLocking.HasWorkspaceLock(workspace, runId, buildParametersContext); if (hasLock) { CloudRunner.lockedWorkspace = workspace; } else { - await SharedWorkspaceLocking.s3.send( - new DeleteObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: key }), - ); + if (SharedWorkspaceLocking.useRclone) { + await SharedWorkspaceLocking.rclone(`delete ${SharedWorkspaceLocking.bucket}/${key}`); + } else { + await SharedWorkspaceLocking.s3.send( + new DeleteObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: key }), + ); + } } return hasLock; @@ -321,12 +365,18 @@ export class SharedWorkspaceLocking { CloudRunnerLogger.log(`Deleting lock ${workspace}/${file}`); CloudRunnerLogger.log(`rm ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${file}`); if (file) { - await SharedWorkspaceLocking.s3.send( - new DeleteObjectCommand({ - Bucket: SharedWorkspaceLocking.bucket, - Key: `${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/${file}`, - }), - ); + if (SharedWorkspaceLocking.useRclone) { + await SharedWorkspaceLocking.rclone( + `delete ${SharedWorkspaceLocking.bucket}/${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/${file}`, + ); + } else { + await SharedWorkspaceLocking.s3.send( + new DeleteObjectCommand({ + Bucket: SharedWorkspaceLocking.bucket, + Key: `${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/${file}`, + }), + ); + } } return !(await SharedWorkspaceLocking.HasWorkspaceLock(workspace, runId, buildParametersContext)); @@ -336,14 +386,18 @@ export class SharedWorkspaceLocking { const prefix = `${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/`; const files = await SharedWorkspaceLocking.listObjects(prefix); for (const file of files.filter((x) => x.includes(`_${workspace}_`))) { - await SharedWorkspaceLocking.s3.send( - new DeleteObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: `${prefix}${file}` }), - ); + if (SharedWorkspaceLocking.useRclone) { + await SharedWorkspaceLocking.rclone(`delete ${SharedWorkspaceLocking.bucket}/${prefix}${file}`); + } else { + await SharedWorkspaceLocking.s3.send( + new DeleteObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: `${prefix}${file}` }), + ); + } } } public static async ReadLines(command: string): Promise { - const path = command.replace('aws s3 ls', '').trim(); + const path = command.replace('aws s3 ls', '').replace('rclone lsf', '').trim(); const withoutScheme = path.replace('s3://', ''); const [bucket, ...rest] = withoutScheme.split('/'); const prefix = rest.join('/'); diff --git a/src/model/cloud-runner/services/hooks/container-hook-service.ts b/src/model/cloud-runner/services/hooks/container-hook-service.ts index bc0bb00b..418ef485 100644 --- a/src/model/cloud-runner/services/hooks/container-hook-service.ts +++ b/src/model/cloud-runner/services/hooks/container-hook-service.ts @@ -182,6 +182,80 @@ export class ContainerHookService { else echo "AWS CLI not available, skipping aws-s3-pull-cache" fi +- name: rclone-upload-build + image: rclone/rclone + hook: after + commands: | + if command -v rclone > /dev/null 2>&1; then + rclone copy /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${ + CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : '' + } ${CloudRunner.buildParameters.rcloneRemote}/cloud-runner-cache/$CACHE_KEY/build/ || true + rm /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${ + CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : '' + } || true + else + echo "rclone not available, skipping rclone-upload-build" + fi + secrets: + - name: RCLONE_REMOTE + value: ${CloudRunner.buildParameters.rcloneRemote || ``} +- name: rclone-pull-build + image: rclone/rclone + commands: | + mkdir -p /data/cache/$CACHE_KEY/build/ + if command -v rclone > /dev/null 2>&1; then + rclone copy ${ + CloudRunner.buildParameters.rcloneRemote + }/cloud-runner-cache/$CACHE_KEY/build/build-$BUILD_GUID_TARGET.tar${ + CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : '' + } /data/cache/$CACHE_KEY/build/build-$BUILD_GUID_TARGET.tar${ + CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : '' + } || true + else + echo "rclone not available, skipping rclone-pull-build" + fi + secrets: + - name: BUILD_GUID_TARGET + - name: RCLONE_REMOTE + value: ${CloudRunner.buildParameters.rcloneRemote || ``} +- name: rclone-upload-cache + image: rclone/rclone + hook: after + commands: | + if command -v rclone > /dev/null 2>&1; then + rclone copy /data/cache/$CACHE_KEY/lfs ${ + CloudRunner.buildParameters.rcloneRemote + }/cloud-runner-cache/$CACHE_KEY/lfs || true + rm -r /data/cache/$CACHE_KEY/lfs || true + rclone copy /data/cache/$CACHE_KEY/Library ${ + CloudRunner.buildParameters.rcloneRemote + }/cloud-runner-cache/$CACHE_KEY/Library || true + rm -r /data/cache/$CACHE_KEY/Library || true + else + echo "rclone not available, skipping rclone-upload-cache" + fi + secrets: + - name: RCLONE_REMOTE + value: ${CloudRunner.buildParameters.rcloneRemote || ``} +- name: rclone-pull-cache + image: rclone/rclone + hook: before + commands: | + mkdir -p /data/cache/$CACHE_KEY/Library/ + mkdir -p /data/cache/$CACHE_KEY/lfs/ + if command -v rclone > /dev/null 2>&1; then + rclone copy ${ + CloudRunner.buildParameters.rcloneRemote + }/cloud-runner-cache/$CACHE_KEY/Library /data/cache/$CACHE_KEY/Library/ || true + rclone copy ${ + CloudRunner.buildParameters.rcloneRemote + }/cloud-runner-cache/$CACHE_KEY/lfs /data/cache/$CACHE_KEY/lfs/ || true + else + echo "rclone not available, skipping rclone-pull-cache" + fi + secrets: + - name: RCLONE_REMOTE + value: ${CloudRunner.buildParameters.rcloneRemote || ``} - name: debug-cache image: ubuntu hook: after diff --git a/src/model/cloud-runner/tests/cloud-runner-rclone-steps.test.ts b/src/model/cloud-runner/tests/cloud-runner-rclone-steps.test.ts new file mode 100644 index 00000000..f78adcee --- /dev/null +++ b/src/model/cloud-runner/tests/cloud-runner-rclone-steps.test.ts @@ -0,0 +1,78 @@ +import CloudRunner from '../cloud-runner'; +import { BuildParameters, ImageTag } from '../..'; +import UnityVersioning from '../../unity-versioning'; +import { Cli } from '../../cli/cli'; +import CloudRunnerLogger from '../services/core/cloud-runner-logger'; +import { v4 as uuidv4 } from 'uuid'; +import setups from './cloud-runner-suite.test'; +import { CloudRunnerSystem } from '../services/core/cloud-runner-system'; +import { OptionValues } from 'commander'; + +async function CreateParameters(overrides: OptionValues | undefined) { + if (overrides) { + Cli.options = overrides; + } + + return await BuildParameters.create(); +} + +describe('Cloud Runner pre-built rclone steps', () => { + it('Responds', () => {}); + it('Simple test to check if file is loaded', () => { + expect(true).toBe(true); + }); + setups(); + + (() => { + // Determine environment capability to run rclone operations + const isCI = process.env.GITHUB_ACTIONS === 'true'; + let rcloneAvailable = false; + if (!isCI) { + try { + const { execSync } = require('child_process'); + execSync('rclone version', { stdio: 'ignore' }); + rcloneAvailable = true; + } catch { + rcloneAvailable = false; + } + } + + const hasRcloneRemote = Boolean(process.env.RCLONE_REMOTE || process.env.rcloneRemote); + const shouldRunRclone = (isCI && hasRcloneRemote) || rcloneAvailable; + + if (shouldRunRclone) { + it('Run build and prebuilt rclone cache pull, cache push and upload build', async () => { + const remote = process.env.RCLONE_REMOTE || process.env.rcloneRemote || 'local:./temp/rclone-remote'; + const overrides = { + versioning: 'None', + projectPath: 'test-project', + unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')), + targetPlatform: 'StandaloneLinux64', + cacheKey: `test-case-${uuidv4()}`, + containerHookFiles: `rclone-pull-cache,rclone-upload-cache,rclone-upload-build`, + storageProvider: 'rclone', + rcloneRemote: remote, + cloudRunnerDebug: true, + } as unknown as OptionValues; + + const buildParams = await CreateParameters(overrides); + const baseImage = new ImageTag(buildParams); + const results = await CloudRunner.run(buildParams, baseImage.toString()); + CloudRunnerLogger.log(`rclone run succeeded`); + expect(results.BuildSucceeded).toBe(true); + + // List remote root to validate the remote is accessible (best-effort) + try { + const lines = await CloudRunnerSystem.RunAndReadLines(`rclone lsf ${remote}`); + CloudRunnerLogger.log(lines.join(',')); + } catch {} + }, 1_000_000_000); + } else { + it.skip('Run build and prebuilt rclone steps - rclone not configured', () => { + CloudRunnerLogger.log('rclone not configured (no CLI/remote); skipping rclone test'); + }); + } + })(); +}); + + diff --git a/src/model/cloud-runner/workflows/build-automation-workflow.ts b/src/model/cloud-runner/workflows/build-automation-workflow.ts index 7447f0e3..837d8a3b 100644 --- a/src/model/cloud-runner/workflows/build-automation-workflow.ts +++ b/src/model/cloud-runner/workflows/build-automation-workflow.ts @@ -147,6 +147,18 @@ echo "CACHE_KEY=$CACHE_KEY"`; cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(ubuntuPlatformsFolder, 'steps'))}" "/steps" chmod -R +x "/entrypoint.sh" chmod -R +x "/steps" + # Ensure Git LFS files are available inside the container for local-docker runs + if [ -d "$GITHUB_WORKSPACE/.git" ]; then + echo "Ensuring Git LFS content is pulled" + (cd "$GITHUB_WORKSPACE" \ + && git lfs install || true \ + && git config --global filter.lfs.smudge "git-lfs smudge -- %f" \ + && git config --global filter.lfs.process "git-lfs filter-process" \ + && git lfs pull || true \ + && git lfs checkout || true) + else + echo "Skipping Git LFS pull: no .git directory in workspace" + fi # Normalize potential CRLF line endings and create safe stubs for missing tooling if command -v sed > /dev/null 2>&1; then sed -i 's/\r$//' "/entrypoint.sh" || true