Cloud runner develop - better parameterization of s3 usage, improved async workflow and GC, github checks early integration (#479)

* custom steps may leave value undefined, will be pulled from env vars

* custom steps may leave value undefined, will be pulled from env vars

* custom steps may leave value undefined, will be pulled from env vars

* add 3 new premade steps, steam-deploy-client, steam-deploy-project, aws-s3-pull-build

* fix

* fix

* fix

* continue building async-workflow support

* test checks

* test checks

* test checks

* move github checks within build workflow

* async workflow test

* async workflow test

* async workflow test

* async workflow test

* async workflow test

* async workflow test

* async workflow test

* async workflow test for aws only

* async workflow test for aws only

* async workflow test for aws only

* async workflow test for aws only

* cleanup logging

* disable lz4 compression by default

* disable lz4 compression by default

* AWS BASE STACK for tests

* AWS BASE STACK for tests

* AWS BASE STACK for tests

* AWS BASE STACK for tests

* AWS BASE STACK for tests

* AWS BASE STACK for tests

* disable lz4 compression by default

* disable lz4 compression by default

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* Update github check with aws log

* kinesis and subscription filter for logs creation skipped when watchToEnd false

* kinesis and subscription filter for logs creation skipped when watchToEnd false

* kinesis and subscription filter for logs creation skipped when watchToEnd false

* kinesis and subscription filter for logs creation skipped when watchToEnd false

* kinesis and subscription filter for logs creation skipped when watchToEnd false

* kinesis and subscription filter for logs creation skipped when watchToEnd false

* kinesis and subscription filter for logs creation skipped when watchToEnd false

* kinesis and subscription filter for logs creation skipped when watchToEnd false

* kinesis and subscription filter for logs creation skipped when watchToEnd false

* kinesis and subscription filter for logs creation skipped when watchToEnd false

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* cleanup local pipeline, log aws formation

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* async pipeline

* workflow

* workflow

* workflow

* workflow

* workflow

* workflow

* workflow

* workflow

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3

* parameterize s3
pull/498/head
Frostebite 2023-01-20 17:40:57 +00:00 committed by GitHub
parent a45155c578
commit e334dc785a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
39 changed files with 3083 additions and 399 deletions

View File

@ -8,7 +8,7 @@ on:
required: false
default: ''
permissions:
permissions:
checks: write
env:
@ -44,7 +44,6 @@ jobs:
with:
lfs: false
- run: yarn
- run: yarn run cli --help
- run: yarn run cli -m checks-update
timeout-minutes: 180
env:

View File

@ -1,95 +0,0 @@
name: Cloud Runner Local
on:
push: { branches: ['!cloud-runner-develop', '!cloud-runner-preview', '!main'] }
# push: { branches: [main] }
# pull_request:
# paths-ignore:
# - '.github/**'
jobs:
integrationTests:
name: Integration Tests
if: github.event.event_type != 'pull_request_target'
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
cloudRunnerCluster:
- local-docker
targetPlatform:
- StandaloneWindows64 # Build a Windows 64-bit standalone.
# steps
steps:
- name: Checkout (default)
uses: actions/checkout@v2
with:
lfs: true
- run: yarn
- run: yarn run cli --help
- run: yarn run test-i --detectOpenHandles --forceExit --runInBand
env:
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
PROJECT_PATH: ${{ matrix.projectPath }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TARGET_PLATFORM: ${{ matrix.targetPlatform }}
cloudRunnerTests: true
versioning: None
CLOUD_RUNNER_CLUSTER: ${{ matrix.cloudRunnerCluster }}
buildTests:
name: Build Tests
if: github.event.event_type != 'pull_request_target'
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
cloudRunnerCluster:
- local-docker
targetPlatform:
- StandaloneOSX # Build a macOS standalone (Intel 64-bit).
- StandaloneWindows64 # Build a Windows 64-bit standalone.
- StandaloneLinux64 # Build a Linux 64-bit standalone.
- WebGL # WebGL.
- iOS # Build an iOS player.
- Android # Build an Android .apk.
# - StandaloneWindows # Build a Windows standalone.
# - WSAPlayer # Build an Windows Store Apps player.
# - PS4 # Build a PS4 Standalone.
# - XboxOne # Build a Xbox One Standalone.
# - tvOS # Build to Apple's tvOS platform.
# - Switch # Build a Nintendo Switch player
# steps
steps:
- name: Checkout (default)
uses: actions/checkout@v2
with:
lfs: true
- uses: ./
id: unity-build
timeout-minutes: 25
env:
CLOUD_RUNNER_BRANCH: ${{ github.ref }}
CLOUD_RUNNER_DEBUG: true
CLOUD_RUNNER_DEBUG_TREE: true
DEBUG: true
PROJECT_PATH: test-project
UNITY_VERSION: 2019.3.15f1
USE_IL2CPP: false
with:
cloudRunnerTests: true
versioning: None
projectPath: ${{ matrix.projectPath }}
gitPrivateToken: ${{ secrets.GITHUB_TOKEN }}
targetPlatform: ${{ matrix.targetPlatform }}
cloudRunnerCluster: ${{ matrix.cloudRunnerCluster }}
- run: |
mv ./cloud-runner-cache/${{ steps.unity-build.outputs.CACHE_KEY }}/build/build-${{ steps.unity-build.outputs.BUILD_GUID }}.tar.lz4 build-${{ steps.unity-build.outputs.BUILD_GUID }}.tar.lz4
ls
###########################
# Upload #
###########################
- uses: actions/upload-artifact@v2
with:
name: Local Build (${{ matrix.targetPlatform }})
path: build-${{ steps.unity-build.outputs.BUILD_GUID }}.tar.lz4
retention-days: 14

View File

@ -2,6 +2,12 @@ name: Cloud Runner CI Pipeline
on:
push: { branches: [cloud-runner-develop, cloud-runner-preview, main] }
workflow_dispatch:
permissions:
checks: write
contents: read
actions: write
env:
GKE_ZONE: 'us-central1'
@ -15,7 +21,7 @@ env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: eu-west-2
AWS_BASE_STACK_NAME: game-ci-github-pipelines
AWS_BASE_STACK_NAME: game-ci-team-pipelines
CLOUD_RUNNER_BRANCH: ${{ github.ref }}
CLOUD_RUNNER_DEBUG: true
CLOUD_RUNNER_DEBUG_TREE: true
@ -24,6 +30,7 @@ env:
PROJECT_PATH: test-project
UNITY_VERSION: 2019.3.15f1
USE_IL2CPP: false
USE_GKE_GCLOUD_AUTH_PLUGIN: true
jobs:
integrationTests:
@ -41,14 +48,17 @@ jobs:
- name: Checkout (default)
uses: actions/checkout@v2
with:
lfs: true
- uses: google-github-actions/setup-gcloud@v0
lfs: false
- uses: google-github-actions/auth@v1
with:
version: '288.0.0'
service_account_email: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_EMAIL }}
service_account_key: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_KEY }}
credentials_json: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_KEY }}
- name: 'Set up Cloud SDK'
uses: 'google-github-actions/setup-gcloud@v1'
- name: Get GKE cluster credentials
run: gcloud container clusters get-credentials $GKE_CLUSTER --zone $GKE_ZONE --project $GKE_PROJECT
run: |
export USE_GKE_GCLOUD_AUTH_PLUGIN=True
gcloud components install gke-gcloud-auth-plugin
gcloud container clusters get-credentials $GKE_CLUSTER --zone $GKE_ZONE --project $GKE_PROJECT
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
@ -56,9 +66,8 @@ jobs:
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-2
- run: yarn
- run: yarn run cli --help
- run: yarn run test "cloud-runner-run-twice-retaining" --detectOpenHandles --forceExit --runInBand
if: matrix.CloudRunnerCluster == 'aws' || matrix.CloudRunnerCluster == 'k8s'
- run: yarn run test "cloud-runner-async-workflow" --detectOpenHandles --forceExit --runInBand
if: matrix.CloudRunnerCluster != 'local-docker'
timeout-minutes: 180
env:
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
@ -68,6 +77,7 @@ jobs:
cloudRunnerTests: true
versioning: None
CLOUD_RUNNER_CLUSTER: ${{ matrix.cloudRunnerCluster }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: yarn run test-i --detectOpenHandles --forceExit --runInBand
if: matrix.CloudRunnerCluster == 'local-docker'
timeout-minutes: 180
@ -79,10 +89,8 @@ jobs:
cloudRunnerTests: true
versioning: None
CLOUD_RUNNER_CLUSTER: ${{ matrix.cloudRunnerCluster }}
buildTargetTests:
name: Build Tests - Targets
if: github.event.event_type != 'pull_request_target'
localBuildTests:
name: Local Build Target Tests
runs-on: ubuntu-latest
strategy:
fail-fast: false
@ -93,7 +101,7 @@ jobs:
#- k8s
targetPlatform:
- StandaloneOSX # Build a macOS standalone (Intel 64-bit).
# - StandaloneWindows64 # Build a Windows 64-bit standalone.
- StandaloneWindows64 # Build a Windows 64-bit standalone.
- StandaloneLinux64 # Build a Linux 64-bit standalone.
- WebGL # WebGL.
- iOS # Build an iOS player.
@ -102,21 +110,7 @@ jobs:
- name: Checkout (default)
uses: actions/checkout@v2
with:
lfs: true
- uses: google-github-actions/setup-gcloud@v0
with:
version: '288.0.0'
service_account_email: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_EMAIL }}
service_account_key: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_KEY }}
- name: Get GKE cluster credentials
run: gcloud container clusters get-credentials $GKE_CLUSTER --zone $GKE_ZONE --project $GKE_PROJECT
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-2
lfs: false
- run: yarn
- uses: ./
id: unity-build
@ -130,82 +124,10 @@ jobs:
gitPrivateToken: ${{ secrets.GITHUB_TOKEN }}
targetPlatform: ${{ matrix.targetPlatform }}
cloudRunnerCluster: ${{ matrix.cloudRunnerCluster }}
customStepFiles: aws-s3-upload-build,aws-s3-pull-cache,aws-s3-upload-cache
- run: |
aws s3 cp s3://game-ci-test-storage/cloud-runner-cache/${{ steps.unity-build.outputs.CACHE_KEY }}/build/build-${{ steps.unity-build.outputs.BUILD_GUID }}.tar.lz4 build-${{ steps.unity-build.outputs.BUILD_GUID }}.tar.lz4
ls
- run: yarn run cli -m list-resources
env:
cloudRunnerTests: true
CLOUD_RUNNER_CLUSTER: ${{ matrix.cloudRunnerCluster }}
cp ./cloud-runner-cache/cache/${{ steps.unity-build.outputs.CACHE_KEY }}/build/${{ steps.unity-build.outputs.BUILD_ARTIFACT }} ${{ steps.unity-build.outputs.BUILD_ARTIFACT }}
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.cloudRunnerCluster }} Build (${{ matrix.targetPlatform }})
path: build-${{ steps.unity-build.outputs.BUILD_GUID }}.tar.lz4
retention-days: 14
buildTests:
name: Build Tests - Providers
if: github.event.event_type != 'pull_request_target'
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
cloudRunnerCluster:
- aws
- local-docker
- k8s
targetPlatform:
#- StandaloneOSX # Build a macOS standalone (Intel 64-bit).
- StandaloneWindows64 # Build a Windows 64-bit standalone.
#- StandaloneLinux64 # Build a Linux 64-bit standalone.
#- WebGL # WebGL.
#- iOS # Build an iOS player.
#- Android # Build an Android .apk.
# steps
steps:
- name: Checkout (default)
uses: actions/checkout@v2
with:
lfs: true
- uses: google-github-actions/setup-gcloud@v0
with:
version: '288.0.0'
service_account_email: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_EMAIL }}
service_account_key: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_KEY }}
- name: Get GKE cluster credentials
run: gcloud container clusters get-credentials $GKE_CLUSTER --zone $GKE_ZONE --project $GKE_PROJECT
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-2
- run: yarn
- uses: ./
id: unity-build
timeout-minutes: 90
env:
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
with:
cloudRunnerTests: true
versioning: None
projectPath: test-project
gitPrivateToken: ${{ secrets.GITHUB_TOKEN }}
targetPlatform: ${{ matrix.targetPlatform }}
cloudRunnerCluster: ${{ matrix.cloudRunnerCluster }}
customStepFiles: aws-s3-upload-build,aws-s3-pull-cache,aws-s3-upload-cache
- run: |
aws s3 cp s3://game-ci-test-storage/cloud-runner-cache/${{ steps.unity-build.outputs.CACHE_KEY }}/build/build-${{ steps.unity-build.outputs.BUILD_GUID }}.tar.lz4 build-${{ steps.unity-build.outputs.BUILD_GUID }}.tar.lz4
ls
- run: yarn run cli -m list-resources
if: always()
env:
cloudRunnerTests: true
CLOUD_RUNNER_CLUSTER: ${{ matrix.cloudRunnerCluster }}
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.cloudRunnerCluster }} Build (${{ matrix.targetPlatform }})
path: build-${{ steps.unity-build.outputs.BUILD_GUID }}.tar.lz4
path: ${{ steps.unity-build.outputs.BUILD_ARTIFACT }}
retention-days: 14

View File

@ -83,6 +83,10 @@ inputs:
required: false
default: ''
description: '[CloudRunner] Github private token to pull from github'
githubOwner:
required: false
default: ''
description: '[CloudRunner] GitHub owner name or organization/team name'
chownFilesTo:
required: false
default: ''

2102
dist/index.js generated vendored

File diff suppressed because it is too large Load Diff

2
dist/index.js.map generated vendored

File diff suppressed because one or more lines are too long

410
dist/licenses.txt generated vendored
View File

@ -264,6 +264,156 @@ Apache-2.0
limitations under the License.
@octokit/auth-token
MIT
The MIT License
Copyright (c) 2019 Octokit contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@octokit/core
MIT
The MIT License
Copyright (c) 2019 Octokit contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@octokit/endpoint
MIT
The MIT License
Copyright (c) 2018 Octokit contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@octokit/graphql
MIT
The MIT License
Copyright (c) 2018 Octokit contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@octokit/request
MIT
The MIT License
Copyright (c) 2018 Octokit contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@octokit/request-error
MIT
The MIT License
Copyright (c) 2019 Octokit contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@panva/asn1.js
MIT
The MIT License (MIT)
@ -876,6 +1026,211 @@ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
before-after-hook
Apache-2.0
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2018 Gregor Martynus and other contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
brace-expansion
MIT
MIT License
@ -1359,6 +1714,25 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
deprecation
ISC
The ISC License
Copyright (c) Gregor Martynus and contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
domexception
MIT
MIT License
@ -2126,6 +2500,31 @@ PERFORMANCE OF THIS SOFTWARE.
is-plain-object
MIT
The MIT License (MIT)
Copyright (c) 2014-2017, Jon Schlinkert.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
is-potential-custom-element-name
MIT
Copyright Mathias Bynens <https://mathiasbynens.be/>
@ -4151,6 +4550,17 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
universal-user-agent
ISC
# [ISC License](https://spdx.org/licenses/ISC)
Copyright (c) 2018, Gregor Martynus (https://github.com/gr2m)
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
universalify
MIT
(The MIT License)

View File

@ -70,6 +70,7 @@ class BuildParameters {
public useLz4Compression!: boolean;
public garbageCollectionMaxAge!: number;
public constantGarbageCollection!: boolean;
public githubChecks!: boolean;
static async create(): Promise<BuildParameters> {
const buildFile = this.parseBuildFile(Input.buildName, Input.targetPlatform, Input.androidAppBundle);
@ -153,6 +154,7 @@ class BuildParameters {
maxRetainedWorkspaces: CloudRunnerOptions.maxRetainedWorkspaces,
constantGarbageCollection: CloudRunnerOptions.constantGarbageCollection,
garbageCollectionMaxAge: CloudRunnerOptions.garbageCollectionMaxAge,
githubChecks: CloudRunnerOptions.githubChecks,
};
}

View File

@ -109,6 +109,25 @@ export class Cli {
return await CloudRunner.run(buildParameter, baseImage.toString());
}
@CliFunction(`async-workflow`, `runs a cloud runner build`)
public static async asyncronousWorkflow(): Promise<string> {
const buildParameter = await BuildParameters.create();
const baseImage = new ImageTag(buildParameter);
return await CloudRunner.run(buildParameter, baseImage.toString());
}
@CliFunction(`checks-update`, `runs a cloud runner build`)
public static async checksUpdate() {
const input = JSON.parse(process.env.CHECKS_UPDATE || ``);
core.info(`Checks Update ${process.env.CHECKS_UPDATE}`);
if (input.mode === `create`) {
throw new Error(`Not supported: only use update`);
} else if (input.mode === `update`) {
await GitHub.updateGitHubCheckRequest(input.data);
}
}
@CliFunction(`garbage-collect`, `runs garbage collection`)
public static async GarbageCollect(): Promise<string> {
const buildParameter = await BuildParameters.create();

View File

@ -56,6 +56,21 @@ class CloudRunnerOptions {
return CloudRunnerOptions.getInput('region') || 'eu-west-2';
}
// ### ### ###
// GitHub parameters
// ### ### ###
static get githubChecks(): boolean {
return CloudRunnerOptions.getInput('githubChecks') || false;
}
static get githubOwner() {
return CloudRunnerOptions.getInput('githubOwner') || CloudRunnerOptions.githubRepo.split(`/`)[0] || false;
}
static get githubRepoName() {
return CloudRunnerOptions.getInput('githubRepoName') || CloudRunnerOptions.githubRepo.split(`/`)[1] || false;
}
// ### ### ###
// Git syncronization parameters
// ### ### ###
@ -220,19 +235,31 @@ class CloudRunnerOptions {
}
static get watchCloudRunnerToEnd(): boolean {
return (CloudRunnerOptions.getInput(`watchToEnd`) || true) !== 'false';
if (CloudRunnerOptions.asyncCloudRunner) {
return false;
}
return CloudRunnerOptions.getInput(`watchToEnd`) || true;
}
static get asyncCloudRunner(): boolean {
return (CloudRunnerOptions.getInput('asyncCloudRunner') || `false`) === `true` || false;
}
public static get useSharedLargePackages(): boolean {
return (CloudRunnerOptions.getInput(`useSharedLargePackages`) || 'false') !== 'false';
return (CloudRunnerOptions.getInput(`useSharedLargePackages`) || 'false') === 'true';
}
public static get useSharedBuilder(): boolean {
return (CloudRunnerOptions.getInput(`useSharedBuilder`) || true) !== 'false';
return (CloudRunnerOptions.getInput(`useSharedBuilder`) || 'true') === 'true';
}
public static get useLz4Compression(): boolean {
return (CloudRunnerOptions.getInput(`useLz4Compression`) || true) !== false;
return (CloudRunnerOptions.getInput(`useLz4Compression`) || 'false') === 'true';
}
public static get useCleanupCron(): boolean {
return (CloudRunnerOptions.getInput(`useCleanupCron`) || 'true') === 'true';
}
// ### ### ###

View File

@ -23,6 +23,7 @@ class CloudRunner {
private static cloudRunnerEnvironmentVariables: CloudRunnerEnvironmentVariable[];
static lockedWorkspace: string | undefined;
public static readonly retainedWorkspacePrefix: string = `retained-workspace`;
public static githubCheckId;
public static setup(buildParameters: BuildParameters) {
CloudRunnerLogger.setup();
CloudRunnerLogger.log(`Setting up cloud runner`);
@ -41,6 +42,12 @@ class CloudRunner {
// CloudRunnerLogger.log(`Cloud Runner output ${Input.ToEnvVarFormat(element)} = ${buildParameters[element]}`);
core.setOutput(Input.ToEnvVarFormat(element), buildParameters[element]);
}
core.setOutput(
Input.ToEnvVarFormat(`buildArtifact`),
`build-${CloudRunner.buildParameters.buildGuid}.tar${
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
}`,
);
}
}
@ -68,6 +75,8 @@ class CloudRunner {
static async run(buildParameters: BuildParameters, baseImage: string) {
CloudRunner.setup(buildParameters);
try {
CloudRunner.githubCheckId = await GitHub.createGitHubCheck(CloudRunner.buildParameters.buildGuid);
if (buildParameters.retainWorkspace) {
CloudRunner.lockedWorkspace = `${CloudRunner.retainedWorkspacePrefix}-${CloudRunner.buildParameters.buildGuid}`;
@ -97,6 +106,7 @@ class CloudRunner {
CloudRunner.defaultSecrets,
);
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
await GitHub.updateGitHubCheck(CloudRunner.buildParameters.buildGuid, CloudRunner.buildParameters.buildGuid);
const output = await new WorkflowCompositionRoot().run(
new CloudRunnerStepState(baseImage, CloudRunner.cloudRunnerEnvironmentVariables, CloudRunner.defaultSecrets),
);
@ -109,6 +119,7 @@ class CloudRunner {
);
CloudRunnerLogger.log(`Cleanup complete`);
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
await GitHub.updateGitHubCheck(CloudRunner.buildParameters.buildGuid, `success`, `success`, `completed`);
if (CloudRunner.buildParameters.retainWorkspace) {
await SharedWorkspaceLocking.ReleaseWorkspace(
@ -125,6 +136,7 @@ class CloudRunner {
return output;
} catch (error) {
await GitHub.updateGitHubCheck(CloudRunner.buildParameters.buildGuid, error, `failure`, `completed`);
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
await CloudRunnerError.handleException(error, CloudRunner.buildParameters, CloudRunner.defaultSecrets);
throw error;

View File

@ -5,6 +5,9 @@ import { AWSCloudFormationTemplates } from './aws-cloud-formation-templates';
import CloudRunnerLogger from '../../services/cloud-runner-logger';
import { AWSError } from './aws-error';
import CloudRunner from '../../cloud-runner';
import { CleanupCronFormation } from './cloud-formations/cleanup-cron-formation';
import CloudRunnerOptions from '../../cloud-runner-options';
import { TaskDefinitionFormation } from './cloud-formations/task-definition-formation';
export class AWSJobStack {
private baseStackName: string;
@ -38,6 +41,13 @@ export class AWSJobStack {
`ContainerMemory:
Default: ${Number.parseInt(memory)}`,
);
if (CloudRunnerOptions.watchCloudRunnerToEnd) {
taskDefCloudFormation = AWSCloudFormationTemplates.insertAtTemplate(
taskDefCloudFormation,
'# template resources logstream',
TaskDefinitionFormation.streamLogs,
);
}
for (const secret of secrets) {
secret.ParameterKey = `${buildGuid.replace(/[^\dA-Za-z]/g, '')}${secret.ParameterKey.replace(
/[^\dA-Za-z]/g,
@ -57,7 +67,7 @@ export class AWSJobStack {
);
taskDefCloudFormation = AWSCloudFormationTemplates.insertAtTemplate(
taskDefCloudFormation,
'p2 - secret',
'# template resources secrets',
AWSCloudFormationTemplates.getSecretTemplate(`${secret.ParameterKey}`),
);
taskDefCloudFormation = AWSCloudFormationTemplates.insertAtTemplate(
@ -132,14 +142,53 @@ export class AWSJobStack {
};
try {
CloudRunnerLogger.log(`Creating job aws formation ${taskDefStackName}`);
await CF.createStack(createStackInput).promise();
CloudRunnerLogger.log('Creating cloud runner job');
await CF.waitFor('stackCreateComplete', { StackName: taskDefStackName }).promise();
} catch (error) {
await AWSError.handleStackCreationFailure(error, CF, taskDefStackName);
throw error;
}
const createCleanupStackInput: SDK.CloudFormation.CreateStackInput = {
StackName: `${taskDefStackName}-cleanup`,
TemplateBody: CleanupCronFormation.formation,
Capabilities: ['CAPABILITY_IAM'],
Parameters: [
{
ParameterKey: 'StackName',
ParameterValue: taskDefStackName,
},
{
ParameterKey: 'DeleteStackName',
ParameterValue: `${taskDefStackName}-cleanup`,
},
{
ParameterKey: 'TTL',
ParameterValue: `1080`,
},
{
ParameterKey: 'BUILDGUID',
ParameterValue: CloudRunner.buildParameters.buildGuid,
},
{
ParameterKey: 'EnvironmentName',
ParameterValue: this.baseStackName,
},
],
};
if (CloudRunnerOptions.useCleanupCron) {
try {
CloudRunnerLogger.log(`Creating job cleanup formation`);
CF.createStack(createCleanupStackInput).promise();
// await CF.waitFor('stackCreateComplete', { StackName: createCleanupStackInput.StackName }).promise();
} catch (error) {
await AWSError.handleStackCreationFailure(error, CF, taskDefStackName);
throw error;
}
}
const taskDefResources = (
await CF.describeStackResources({
StackName: taskDefStackName,

View File

@ -9,10 +9,12 @@ import CloudRunner from '../../cloud-runner';
import { CloudRunnerCustomHooks } from '../../services/cloud-runner-custom-hooks';
import { FollowLogStreamService } from '../../services/follow-log-stream-service';
import CloudRunnerOptions from '../../cloud-runner-options';
import GitHub from '../../../github';
class AWSTaskRunner {
public static ECS: AWS.ECS;
public static Kinesis: AWS.Kinesis;
private static readonly encodedUnderscore = `$252F`;
static async runTask(
taskDef: CloudRunnerAWSTaskDef,
environment: CloudRunnerEnvironmentVariable[],
@ -56,7 +58,9 @@ class AWSTaskRunner {
CloudRunnerLogger.log('Cloud runner job is starting');
await AWSTaskRunner.waitUntilTaskRunning(taskArn, cluster);
CloudRunnerLogger.log(
`Cloud runner job status is running ${(await AWSTaskRunner.describeTasks(cluster, taskArn))?.lastStatus}`,
`Cloud runner job status is running ${(await AWSTaskRunner.describeTasks(cluster, taskArn))?.lastStatus} Watch:${
CloudRunnerOptions.watchCloudRunnerToEnd
} Async:${CloudRunnerOptions.asyncCloudRunner}`,
);
if (!CloudRunnerOptions.watchCloudRunnerToEnd) {
const shouldCleanup: boolean = false;
@ -125,8 +129,9 @@ class AWSTaskRunner {
const stream = await AWSTaskRunner.getLogStream(kinesisStreamName);
let iterator = await AWSTaskRunner.getLogIterator(stream);
const logBaseUrl = `https://${Input.region}.console.aws.amazon.com/cloudwatch/home?region=${Input.region}#logsV2:log-groups/log-group/${CloudRunner.buildParameters.awsBaseStackName}-${CloudRunner.buildParameters.buildGuid}`;
const logBaseUrl = `https://${Input.region}.console.aws.amazon.com/cloudwatch/home?region=${Input.region}#logsV2:log-groups/log-group/${CloudRunner.buildParameters.awsBaseStackName}${AWSTaskRunner.encodedUnderscore}${CloudRunner.buildParameters.awsBaseStackName}-${CloudRunner.buildParameters.buildGuid}`;
CloudRunnerLogger.log(`You view the log stream on AWS Cloud Watch: ${logBaseUrl}`);
await GitHub.updateGitHubCheck(`You view the log stream on AWS Cloud Watch: ${logBaseUrl}`, ``);
let shouldReadLogs = true;
let shouldCleanup = true;
let timestamp: number = 0;

View File

@ -47,6 +47,11 @@ Resources:
EnableDnsHostnames: true
CidrBlock: !FindInMap ['SubnetConfig', 'VPC', 'CIDR']
MainBucket:
Type: "AWS::S3::Bucket"
Properties:
BucketName: !Ref EnvironmentName
EFSServerSecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:

View File

@ -1,4 +1,5 @@
AWSTemplateFormatVersion: '2010-09-09'
export class CleanupCronFormation {
public static readonly formation: string = `AWSTemplateFormatVersion: '2010-09-09'
Description: Schedule automatic deletion of CloudFormation stacks
Metadata:
AWS::CloudFormation::Interface:
@ -64,10 +65,10 @@ Resources:
stackName: !Ref 'StackName'
deleteStackName: !Ref 'DeleteStackName'
Handler: "index.handler"
Runtime: "python3.6"
Runtime: "python3.9"
Timeout: "5"
Role:
'Fn::ImportValue': !Sub '${EnvironmentName}:DeleteCFNLambdaExecutionRole'
'Fn::ImportValue': !Sub '\${EnvironmentName}:DeleteCFNLambdaExecutionRole'
DeleteStackEventRule:
DependsOn:
- DeleteCFNLambda
@ -130,10 +131,10 @@ Resources:
status = cfnresponse.FAILED
cfnresponse.send(event, context, status, {}, None)
Handler: "index.handler"
Runtime: "python3.6"
Runtime: "python3.9"
Timeout: "5"
Role:
'Fn::ImportValue': !Sub '${EnvironmentName}:DeleteCFNLambdaExecutionRole'
'Fn::ImportValue': !Sub '\${EnvironmentName}:DeleteCFNLambdaExecutionRole'
GenerateCronExpression:
Type: "Custom::GenerateCronExpression"
Version: "1.0"
@ -141,3 +142,5 @@ Resources:
Name: !Join [ "", [ 'GenerateCronExpression', !Ref BUILDGUID ] ]
ServiceToken: !GetAtt GenerateCronExpLambda.Arn
ttl: !Ref 'TTL'
`;
}

View File

@ -76,32 +76,10 @@ Resources:
Metadata:
'AWS::CloudFormation::Designer':
id: aece53ae-b82d-4267-bc16-ed964b05db27
SubscriptionFilter:
Type: 'AWS::Logs::SubscriptionFilter'
Properties:
FilterPattern: ''
RoleArn:
'Fn::ImportValue': !Sub '${'${EnvironmentName}'}:CloudWatchIAMRole'
LogGroupName: !Ref LogGroupName
DestinationArn:
'Fn::GetAtt':
- KinesisStream
- Arn
Metadata:
'AWS::CloudFormation::Designer':
id: 7f809e91-9e5d-4678-98c1-c5085956c480
DependsOn:
- LogGroup
- KinesisStream
KinesisStream:
Type: 'AWS::Kinesis::Stream'
Properties:
Name: !Ref ServiceName
ShardCount: 1
Metadata:
'AWS::CloudFormation::Designer':
id: c6f18447-b879-4696-8873-f981b2cedd2b
# template secrets p2 - secret
# template resources secrets
# template resources logstream
TaskDefinition:
Type: 'AWS::ECS::TaskDefinition'
Properties:
@ -156,5 +134,32 @@ Resources:
awslogs-stream-prefix: !Ref ServiceName
DependsOn:
- LogGroup
`;
public static streamLogs = `
SubscriptionFilter:
Type: 'AWS::Logs::SubscriptionFilter'
Properties:
FilterPattern: ''
RoleArn:
'Fn::ImportValue': !Sub '${'${EnvironmentName}'}:CloudWatchIAMRole'
LogGroupName: !Ref LogGroupName
DestinationArn:
'Fn::GetAtt':
- KinesisStream
- Arn
Metadata:
'AWS::CloudFormation::Designer':
id: 7f809e91-9e5d-4678-98c1-c5085956c480
DependsOn:
- LogGroup
- KinesisStream
KinesisStream:
Type: 'AWS::Kinesis::Stream'
Properties:
Name: !Ref ServiceName
ShardCount: 1
Metadata:
'AWS::CloudFormation::Designer':
id: c6f18447-b879-4696-8873-f981b2cedd2b
`;
}

View File

@ -13,6 +13,7 @@ import { GarbageCollectionService } from './services/garbage-collection-service'
import { ProviderResource } from '../provider-resource';
import { ProviderWorkflow } from '../provider-workflow';
import { TaskService } from './services/task-service';
import CloudRunnerOptions from '../../cloud-runner-options';
class AWSBuildEnvironment implements ProviderInterface {
private baseStackName: string;
@ -133,6 +134,11 @@ class AWSBuildEnvironment implements ProviderInterface {
await CF.deleteStack({
StackName: taskDef.taskDefStackName,
}).promise();
if (CloudRunnerOptions.useCleanupCron) {
await CF.deleteStack({
StackName: `${taskDef.taskDefStackName}-cleanup`,
}).promise();
}
await CF.waitFor('stackDeleteComplete', {
StackName: taskDef.taskDefStackName,

View File

@ -4,6 +4,7 @@ import CloudRunnerLogger from '../../../services/cloud-runner-logger';
import { BaseStackFormation } from '../cloud-formations/base-stack-formation';
import AwsTaskRunner from '../aws-task-runner';
import { ListObjectsRequest } from 'aws-sdk/clients/s3';
import CloudRunner from '../../../cloud-runner';
export class TaskService {
static async watch() {
@ -158,7 +159,7 @@ export class TaskService {
process.env.AWS_REGION = Input.region;
const s3 = new AWS.S3();
const listRequest: ListObjectsRequest = {
Bucket: `game-ci-test-storage`,
Bucket: CloudRunner.buildParameters.awsBaseStackName,
};
const results = await s3.listObjects(listRequest).promise();

View File

@ -47,10 +47,18 @@ class LocalDockerCloudRunner implements ProviderInterface {
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
) {
const { workspace } = Action;
if (fs.existsSync(`${workspace}/cloud-runner-cache/cache/build/build-${buildParameters.buildGuid}.tar.lz4`)) {
if (
fs.existsSync(
`${workspace}/cloud-runner-cache/cache/build/build-${buildParameters.buildGuid}.tar${
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
}`,
)
) {
await CloudRunnerSystem.Run(`ls ${workspace}/cloud-runner-cache/cache/build/`);
await CloudRunnerSystem.Run(
`rm -r ${workspace}/cloud-runner-cache/cache/build/build-${buildParameters.buildGuid}.tar.lz4`,
`rm -r ${workspace}/cloud-runner-cache/cache/build/build-${buildParameters.buildGuid}.tar${
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
}`,
);
}
}

View File

@ -154,6 +154,7 @@ class Kubernetes implements ProviderInterface {
} else {
CloudRunnerLogger.log('Pod still running, recovering stream...');
}
await this.cleanupTaskResources();
} catch (error: any) {
let errorParsed;
try {
@ -161,10 +162,16 @@ class Kubernetes implements ProviderInterface {
} catch {
errorParsed = error;
}
const reason = errorParsed.reason || errorParsed.response?.body?.reason || ``;
const errorMessage = errorParsed.message || ``;
const continueStreaming = reason === `NotFound` || errorMessage.includes(`dial timeout, backstop`);
const reason = errorParsed.reason || errorParsed.response?.body?.reason || ``;
const errorMessage = errorParsed.message || reason;
const continueStreaming =
errorMessage.includes(`dial timeout, backstop`) ||
errorMessage.includes(`HttpError: HTTP request failed`) ||
errorMessage.includes(`an error occurred when try to find container`) ||
errorMessage.includes(`not found`) ||
errorMessage.includes(`Not Found`);
if (continueStreaming) {
CloudRunnerLogger.log('Log Stream Container Not Found');
await new Promise((resolve) => resolve(5000));
@ -175,7 +182,6 @@ class Kubernetes implements ProviderInterface {
}
}
}
await this.cleanupTaskResources();
return output;
} catch (error) {

View File

@ -158,6 +158,8 @@ class KubernetesJobSpecFactory {
},
};
job.spec.template.spec.containers[0].resources.requests[`ephemeral-storage`] = '5Gi';
return job;
}
}

View File

@ -4,8 +4,11 @@ class KubernetesPods {
public static async IsPodRunning(podName: string, namespace: string, kubeClient: CoreV1Api) {
const pods = (await kubeClient.listNamespacedPod(namespace)).body.items.filter((x) => podName === x.metadata?.name);
const running = pods.length > 0 && (pods[0].status?.phase === `Running` || pods[0].status?.phase === `Pending`);
const phase = pods[0].status?.phase || 'undefined status';
const phase = pods[0]?.status?.phase || 'undefined status';
CloudRunnerLogger.log(`Getting pod status: ${phase}`);
if (phase === `Failed`) {
throw new Error(`K8s pod failed`);
}
return running;
}

View File

@ -46,7 +46,11 @@ export class Caching {
public static async PushToCache(cacheFolder: string, sourceFolder: string, cacheArtifactName: string) {
cacheArtifactName = cacheArtifactName.replace(' ', '');
const startPath = process.cwd();
const compressionSuffix = CloudRunner.buildParameters.useLz4Compression ? '.lz4' : '';
let compressionSuffix = '';
if (CloudRunner.buildParameters.useLz4Compression === true) {
compressionSuffix = `.lz4`;
}
CloudRunnerLogger.log(`Compression: ${CloudRunner.buildParameters.useLz4Compression} ${compressionSuffix}`);
try {
if (!(await fileExists(cacheFolder))) {
await CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
@ -99,9 +103,12 @@ export class Caching {
}
public static async PullFromCache(cacheFolder: string, destinationFolder: string, cacheArtifactName: string = ``) {
cacheArtifactName = cacheArtifactName.replace(' ', '');
const compressionSuffix = CloudRunner.buildParameters.useLz4Compression ? '.lz4' : '';
let compressionSuffix = '';
if (CloudRunner.buildParameters.useLz4Compression === true) {
compressionSuffix = `.lz4`;
}
const startPath = process.cwd();
RemoteClientLogger.log(`Caching for ${path.basename(destinationFolder)}`);
RemoteClientLogger.log(`Caching for (lz4 ${compressionSuffix}) ${path.basename(destinationFolder)}`);
try {
if (!(await fileExists(cacheFolder))) {
await fs.promises.mkdir(cacheFolder);
@ -153,6 +160,7 @@ export class Caching {
RemoteClientLogger.logWarning(
`cache item ${cacheArtifactName}.tar${compressionSuffix} doesn't exist ${destinationFolder}`,
);
await CloudRunnerSystem.Run(`tree ${cacheFolder}`);
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
}
}

View File

@ -5,7 +5,8 @@ import { RemoteClientLogger } from '../remote-client/remote-client-logger';
import path from 'path';
import CloudRunnerOptions from '../cloud-runner-options';
import * as fs from 'fs';
import CloudRunnerLogger from './cloud-runner-logger';
// import CloudRunnerLogger from './cloud-runner-logger';
export class CloudRunnerCustomHooks {
// TODO also accept hooks as yaml files in the repo
@ -83,7 +84,7 @@ export class CloudRunnerCustomHooks {
// if (CloudRunner.buildParameters?.cloudRunnerIntegrationTests) {
CloudRunnerLogger.log(`Parsing build hooks: ${steps}`);
// CloudRunnerLogger.log(`Parsing build hooks: ${steps}`);
// }
const isArray = steps.replace(/\s/g, ``)[0] === `-`;

View File

@ -1,5 +1,4 @@
import YAML from 'yaml';
import CloudRunnerSecret from './cloud-runner-secret';
import CloudRunner from '../cloud-runner';
import * as core from '@actions/core';
import { CustomWorkflow } from '../workflows/custom-workflow';
@ -9,6 +8,7 @@ import * as fs from 'fs';
import Input from '../../input';
import CloudRunnerOptions from '../cloud-runner-options';
import CloudRunnerLogger from './cloud-runner-logger';
import { CustomStep } from './custom-step';
export class CloudRunnerCustomSteps {
static GetCustomStepsFromFiles(hookLifecycle: string): CustomStep[] {
@ -43,8 +43,14 @@ export class CloudRunnerCustomSteps {
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
aws configure set region $AWS_DEFAULT_REGION --profile default
aws s3 cp /data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar.lz4 s3://game-ci-test-storage/cloud-runner-cache/$CACHE_KEY/build/build-$BUILD_GUID.tar.lz4
rm /data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar.lz4
aws s3 cp /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
} s3://${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/$CACHE_KEY/build/build-$BUILD_GUID.tar${
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
}
rm /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
}
secrets:
- name: awsAccessKeyId
value: ${process.env.AWS_ACCESS_KEY_ID || ``}
@ -52,6 +58,62 @@ export class CloudRunnerCustomSteps {
value: ${process.env.AWS_SECRET_ACCESS_KEY || ``}
- name: awsDefaultRegion
value: ${process.env.AWS_REGION || ``}
- name: aws-s3-pull-build
image: amazon/aws-cli
commands: |
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
aws configure set region $AWS_DEFAULT_REGION --profile default
aws s3 ls ${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/ || true
aws s3 ls ${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/$CACHE_KEY/build || true
aws s3 cp s3://${
CloudRunner.buildParameters.awsBaseStackName
}/cloud-runner-cache/$CACHE_KEY/build/build-$BUILD_GUID_TARGET.tar${
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
} /data/cache/$CACHE_KEY/build/build-$BUILD_GUID_TARGET.tar${
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
}
secrets:
- name: awsAccessKeyId
- name: awsSecretAccessKey
- name: awsDefaultRegion
- name: BUILD_GUID_TARGET
- name: steam-deploy-client
image: steamcmd/steamcmd
commands: |
apt-get update
apt-get install -y curl tar coreutils git tree > /dev/null
curl -s https://gist.githubusercontent.com/frostebite/1d56f5505b36b403b64193b7a6e54cdc/raw/fa6639ed4ef750c4268ea319d63aa80f52712ffb/deploy-client-steam.sh | bash
secrets:
- name: STEAM_USERNAME
- name: STEAM_PASSWORD
- name: STEAM_APPID
- name: STEAM_SSFN_FILE_NAME
- name: STEAM_SSFN_FILE_CONTENTS
- name: STEAM_CONFIG_VDF_1
- name: STEAM_CONFIG_VDF_2
- name: STEAM_CONFIG_VDF_3
- name: STEAM_CONFIG_VDF_4
- name: BUILD_GUID_TARGET
- name: RELEASE_BRANCH
- name: steam-deploy-project
image: steamcmd/steamcmd
commands: |
apt-get update
apt-get install -y curl tar coreutils git tree > /dev/null
curl -s https://gist.githubusercontent.com/frostebite/969da6a41002a0e901174124b643709f/raw/02403e53fb292026cba81ddcf4ff35fc1eba111d/steam-deploy-project.sh | bash
secrets:
- name: STEAM_USERNAME
- name: STEAM_PASSWORD
- name: STEAM_APPID
- name: STEAM_SSFN_FILE_NAME
- name: STEAM_SSFN_FILE_CONTENTS
- name: STEAM_CONFIG_VDF_1
- name: STEAM_CONFIG_VDF_2
- name: STEAM_CONFIG_VDF_3
- name: STEAM_CONFIG_VDF_4
- name: BUILD_GUID_2
- name: RELEASE_BRANCH
- name: aws-s3-upload-cache
image: amazon/aws-cli
hook: after
@ -59,9 +121,13 @@ export class CloudRunnerCustomSteps {
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
aws configure set region $AWS_DEFAULT_REGION --profile default
aws s3 cp --recursive /data/cache/$CACHE_KEY/lfs s3://game-ci-test-storage/cloud-runner-cache/$CACHE_KEY/lfs
aws s3 cp --recursive /data/cache/$CACHE_KEY/lfs s3://${
CloudRunner.buildParameters.awsBaseStackName
}/cloud-runner-cache/$CACHE_KEY/lfs
rm -r /data/cache/$CACHE_KEY/lfs
aws s3 cp --recursive /data/cache/$CACHE_KEY/Library s3://game-ci-test-storage/cloud-runner-cache/$CACHE_KEY/Library
aws s3 cp --recursive /data/cache/$CACHE_KEY/Library s3://${
CloudRunner.buildParameters.awsBaseStackName
}/cloud-runner-cache/$CACHE_KEY/Library
rm -r /data/cache/$CACHE_KEY/Library
secrets:
- name: awsAccessKeyId
@ -77,13 +143,13 @@ export class CloudRunnerCustomSteps {
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
aws configure set region $AWS_DEFAULT_REGION --profile default
aws s3 ls game-ci-test-storage/cloud-runner-cache/ || true
aws s3 ls game-ci-test-storage/cloud-runner-cache/$CACHE_KEY/ || true
BUCKET1="game-ci-test-storage/cloud-runner-cache/$CACHE_KEY/Library/"
aws s3 ls ${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/ || true
aws s3 ls ${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/$CACHE_KEY/ || true
BUCKET1="${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/$CACHE_KEY/Library/"
aws s3 ls $BUCKET1 || true
OBJECT1="$(aws s3 ls $BUCKET1 | sort | tail -n 1 | awk '{print $4}' || '')"
aws s3 cp s3://$BUCKET1$OBJECT1 /data/cache/$CACHE_KEY/Library/ || true
BUCKET2="game-ci-test-storage/cloud-runner-cache/$CACHE_KEY/lfs/"
BUCKET2="${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/$CACHE_KEY/lfs/"
aws s3 ls $BUCKET2 || true
OBJECT2="$(aws s3 ls $BUCKET2 | sort | tail -n 1 | awk '{print $4}' || '')"
aws s3 cp s3://$BUCKET2$OBJECT2 /data/cache/$CACHE_KEY/lfs/ || true
@ -135,21 +201,21 @@ export class CloudRunnerCustomSteps {
if (steps === '') {
return [];
}
// if (CloudRunner.buildParameters?.cloudRunnerIntegrationTests) {
// CloudRunnerLogger.log(`Parsing build steps: ${steps}`);
// }
const isArray = steps.replace(/\s/g, ``)[0] === `-`;
if (CloudRunner.buildParameters?.cloudRunnerDebug) {
CloudRunnerLogger.log(`Parsing: ${steps}`);
}
const object: CustomStep[] = isArray ? YAML.parse(steps) : [YAML.parse(steps)];
for (const step of object) {
CloudRunnerCustomSteps.ConvertYamlSecrets(step);
if (step.secrets === undefined) {
step.secrets = [];
} else {
for (const secret of step.secrets) {
if (secret.ParameterValue === undefined && process.env[secret.EnvironmentVariable] !== undefined) {
if (CloudRunner.buildParameters?.cloudRunnerDebug) {
CloudRunnerLogger.log(`Injecting custom step ${step.name} from env var ${secret.ParameterKey}`);
}
secret.ParameterValue = process.env[secret.ParameterKey] || ``;
}
}
}
if (step.image === undefined) {
step.image = `ubuntu`;
@ -201,10 +267,3 @@ export class CloudRunnerCustomSteps {
return output;
}
}
export class CustomStep {
public commands;
public secrets: CloudRunnerSecret[] = new Array<CloudRunnerSecret>();
public name;
public image: string = `ubuntu`;
public hook!: string;
}

View File

@ -0,0 +1,9 @@
import CloudRunnerSecret from './cloud-runner-secret';
export class CustomStep {
public commands;
public secrets: CloudRunnerSecret[] = new Array<CloudRunnerSecret>();
public name;
public image: string = `ubuntu`;
public hook!: string;
}

View File

@ -2,6 +2,7 @@ import CloudRunnerLogger from './cloud-runner-logger';
import * as core from '@actions/core';
import CloudRunner from '../cloud-runner';
import { CloudRunnerStatics } from '../cloud-runner-statics';
import GitHub from '../../github';
export class FollowLogStreamService {
public static handleIteration(message, shouldReadLogs, shouldCleanup, output) {
@ -9,11 +10,14 @@ export class FollowLogStreamService {
CloudRunnerLogger.log('End of log transmission received');
shouldReadLogs = false;
} else if (message.includes('Rebuilding Library because the asset database could not be found!')) {
GitHub.updateGitHubCheck(`Library was not found, importing new Library`, ``);
core.warning('LIBRARY NOT FOUND!');
core.setOutput('library-found', 'false');
} else if (message.includes('Build succeeded')) {
GitHub.updateGitHubCheck(`Build succeeded`, `Build succeeded`);
core.setOutput('build-result', 'success');
} else if (message.includes('Build fail')) {
GitHub.updateGitHubCheck(`Build failed`, `Build failed`);
core.setOutput('build-result', 'failed');
core.setFailed('unity build failed');
core.error('BUILD FAILED!');

View File

@ -5,8 +5,12 @@ import CloudRunnerOptions from '../cloud-runner-options';
import BuildParameters from '../../build-parameters';
import CloudRunner from '../cloud-runner';
export class SharedWorkspaceLocking {
private static readonly workspaceBucketRoot = `s3://game-ci-test-storage/`;
private static readonly workspaceRoot = `${SharedWorkspaceLocking.workspaceBucketRoot}locks/`;
private static get workspaceBucketRoot() {
return `s3://${CloudRunner.buildParameters.awsBaseStackName}/`;
}
private static get workspaceRoot() {
return `${SharedWorkspaceLocking.workspaceBucketRoot}locks/`;
}
public static async GetAllWorkspaces(buildParametersContext: BuildParameters): Promise<string[]> {
if (!(await SharedWorkspaceLocking.DoesWorkspaceTopLevelExist(buildParametersContext))) {
return [];
@ -19,14 +23,11 @@ export class SharedWorkspaceLocking {
).map((x) => x.replace(`/`, ``));
}
public static async DoesWorkspaceTopLevelExist(buildParametersContext: BuildParameters) {
return (
(await SharedWorkspaceLocking.ReadLines(`aws s3 ls ${SharedWorkspaceLocking.workspaceBucketRoot}`))
.map((x) => x.replace(`/`, ``))
.includes(`locks`) &&
(await SharedWorkspaceLocking.ReadLines(`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}`))
.map((x) => x.replace(`/`, ``))
.includes(buildParametersContext.cacheKey)
);
await SharedWorkspaceLocking.ReadLines(`aws s3 ls ${SharedWorkspaceLocking.workspaceBucketRoot}`);
return (await SharedWorkspaceLocking.ReadLines(`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}`))
.map((x) => x.replace(`/`, ``))
.includes(buildParametersContext.cacheKey);
}
public static async GetAllLocks(workspace: string, buildParametersContext: BuildParameters): Promise<string[]> {
if (!(await SharedWorkspaceLocking.DoesWorkspaceExist(workspace, buildParametersContext))) {
@ -49,20 +50,27 @@ export class SharedWorkspaceLocking {
if (!CloudRunnerOptions.retainWorkspaces) {
return;
}
if (await SharedWorkspaceLocking.DoesWorkspaceTopLevelExist(buildParametersContext)) {
const workspaces = await SharedWorkspaceLocking.GetFreeWorkspaces(buildParametersContext);
CloudRunnerLogger.log(`run agent ${runId} is trying to access a workspace, free: ${JSON.stringify(workspaces)}`);
for (const element of workspaces) {
await new Promise((promise) => setTimeout(promise, 1000));
const lockResult = await SharedWorkspaceLocking.LockWorkspace(element, runId, buildParametersContext);
CloudRunnerLogger.log(`run agent: ${runId} try lock workspace: ${element} result: ${lockResult}`);
if (lockResult) {
CloudRunner.lockedWorkspace = element;
try {
if (await SharedWorkspaceLocking.DoesWorkspaceTopLevelExist(buildParametersContext)) {
const workspaces = await SharedWorkspaceLocking.GetFreeWorkspaces(buildParametersContext);
CloudRunnerLogger.log(
`run agent ${runId} is trying to access a workspace, free: ${JSON.stringify(workspaces)}`,
);
for (const element of workspaces) {
await new Promise((promise) => setTimeout(promise, 1000));
const lockResult = await SharedWorkspaceLocking.LockWorkspace(element, runId, buildParametersContext);
CloudRunnerLogger.log(`run agent: ${runId} try lock workspace: ${element} result: ${lockResult}`);
return true;
if (lockResult) {
CloudRunner.lockedWorkspace = element;
return true;
}
}
}
} catch {
return;
}
const createResult = await SharedWorkspaceLocking.CreateWorkspace(workspace, buildParametersContext, runId);

View File

@ -145,6 +145,7 @@ export class TaskParameterSerializer {
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_EMAIL');
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_PASSWORD');
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_LICENSE');
array = TaskParameterSerializer.tryAddInput(array, 'GIT_PRIVATE_TOKEN');
return array;
}

View File

@ -0,0 +1,33 @@
import { BuildParameters, ImageTag } from '../..';
import CloudRunner from '../cloud-runner';
import UnityVersioning from '../../unity-versioning';
import { Cli } from '../../cli/cli';
import CloudRunnerOptions from '../cloud-runner-options';
import setups from './cloud-runner-suite.test';
async function CreateParameters(overrides) {
if (overrides) Cli.options = overrides;
return BuildParameters.create();
}
describe('Cloud Runner Async Workflows', () => {
setups();
it('Responds', () => {});
if (CloudRunnerOptions.cloudRunnerDebug && CloudRunnerOptions.cloudRunnerCluster !== `local-docker`) {
it('Async Workflows', async () => {
// Setup parameters
const buildParameter = await CreateParameters({
versioning: 'None',
projectPath: 'test-project',
unityVersion: UnityVersioning.read('test-project'),
asyncCloudRunner: `true`,
githubChecks: `true`,
});
const baseImage = new ImageTag(buildParameter);
// Run the job
await CloudRunner.run(buildParameter, baseImage.toString());
}, 1_000_000_000);
}
});

View File

@ -24,6 +24,14 @@ describe('Cloud Runner Custom Hooks And Steps', () => {
commands: echo "test"`;
const yamlString2 = `- hook: before
commands: echo "test"`;
const overrides = {
versioning: 'None',
projectPath: 'test-project',
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
targetPlatform: 'StandaloneLinux64',
cacheKey: `test-case-${uuidv4()}`,
};
CloudRunner.setup(await CreateParameters(overrides));
const stringObject = CloudRunnerCustomSteps.ParseSteps(yamlString);
const stringObject2 = CloudRunnerCustomSteps.ParseSteps(yamlString2);

View File

@ -6,6 +6,7 @@ import CloudRunnerLogger from '../services/cloud-runner-logger';
import { v4 as uuidv4 } from 'uuid';
import CloudRunnerOptions from '../cloud-runner-options';
import setups from './cloud-runner-suite.test';
import * as fs from 'fs';
async function CreateParameters(overrides) {
if (overrides) {
@ -45,6 +46,11 @@ describe('Cloud Runner Caching', () => {
expect(results).not.toContain(cachePushFail);
CloudRunnerLogger.log(`run 1 succeeded`);
if (CloudRunnerOptions.cloudRunnerCluster === `local-docker`) {
const cacheFolderExists = fs.existsSync(`cloud-runner-cache/cache/${overrides.cacheKey}`);
expect(cacheFolderExists).toBeTruthy();
}
const buildParameter2 = await CreateParameters(overrides);
buildParameter2.cacheKey = buildParameter.cacheKey;

View File

@ -6,7 +6,6 @@ import CloudRunnerLogger from '../services/cloud-runner-logger';
import { v4 as uuidv4 } from 'uuid';
import CloudRunnerOptions from '../cloud-runner-options';
import setups from './cloud-runner-suite.test';
import { CloudRunnerSystem } from '../services/cloud-runner-system';
import * as fs from 'fs';
import path from 'path';
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
@ -46,6 +45,11 @@ describe('Cloud Runner Retain Workspace', () => {
expect(results).toContain(buildSucceededString);
expect(results).not.toContain(cachePushFail);
if (CloudRunnerOptions.cloudRunnerCluster === `local-docker`) {
const cacheFolderExists = fs.existsSync(`cloud-runner-cache/cache/${overrides.cacheKey}`);
expect(cacheFolderExists).toBeTruthy();
}
CloudRunnerLogger.log(`run 1 succeeded`);
const buildParameter2 = await CreateParameters(overrides);
@ -84,9 +88,6 @@ describe('Cloud Runner Retain Workspace', () => {
CloudRunnerLogger.log(
`Cleaning up ./cloud-runner-cache/${path.basename(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute)}`,
);
await CloudRunnerSystem.Run(
`rm -r ./cloud-runner-cache/${path.basename(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute)}`,
);
}
});
}

View File

@ -38,7 +38,7 @@ describe('Cloud Runner pre-built S3 steps', () => {
expect(build2ContainsBuildSucceeded).toBeTruthy();
const results = await CloudRunnerSystem.RunAndReadLines(
`aws s3 ls s3://game-ci-test-storage/cloud-runner-cache/${buildParameter2.cacheKey}/`,
`aws s3 ls s3://${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/${buildParameter2.cacheKey}/`,
);
CloudRunnerLogger.log(results.join(`,`));
}, 1_000_000_000);

View File

@ -6,6 +6,7 @@ import { v4 as uuidv4 } from 'uuid';
import CloudRunnerOptions from '../cloud-runner-options';
import UnityVersioning from '../../unity-versioning';
import BuildParameters from '../../build-parameters';
import CloudRunner from '../cloud-runner';
async function CreateParameters(overrides) {
if (overrides) {
@ -32,6 +33,7 @@ describe('Cloud Runner Locking', () => {
const newWorkspaceName = `test-workspace-${uuidv4()}`;
const runId = uuidv4();
CloudRunner.buildParameters = buildParameters;
await SharedWorkspaceLocking.CreateWorkspace(newWorkspaceName, buildParameters);
const isExpectedUnlockedBeforeLocking =
(await SharedWorkspaceLocking.IsWorkspaceLocked(newWorkspaceName, buildParameters)) === false;

View File

@ -0,0 +1,60 @@
import CloudRunnerSecret from '../services/cloud-runner-secret';
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
import CloudRunnerLogger from '../services/cloud-runner-logger';
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
import CloudRunner from '../cloud-runner';
export class AsyncWorkflow {
public static async runAsyncWorkflow(
environmentVariables: CloudRunnerEnvironmentVariable[],
secrets: CloudRunnerSecret[],
): Promise<string> {
try {
CloudRunnerLogger.log(`Cloud Runner is running async mode`);
let output = '';
output += await CloudRunner.Provider.runTaskInWorkflow(
CloudRunner.buildParameters.buildGuid,
`ubuntu`,
`apt-get update > /dev/null
apt-get install -y curl tar tree npm git git-lfs jq git > /dev/null
mkdir /builder
printenv
git config --global advice.detachedHead false
git config --global filter.lfs.smudge "git-lfs smudge --skip -- %f"
git config --global filter.lfs.process "git-lfs filter-process --skip"
git clone -q -b ${CloudRunner.buildParameters.cloudRunnerBranch} ${CloudRunnerFolders.unityBuilderRepoUrl} /builder
git clone -q -b ${CloudRunner.buildParameters.branch} ${CloudRunnerFolders.targetBuildRepoUrl} /repo
cd /repo
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
./aws/install
aws --version
node /builder/dist/index.js -m async-workflow`,
`/${CloudRunnerFolders.buildVolumeFolder}`,
`/${CloudRunnerFolders.buildVolumeFolder}/`,
environmentVariables,
[
...secrets,
...[
{
ParameterKey: `AWS_ACCESS_KEY_ID`,
EnvironmentVariable: `AWS_ACCESS_KEY_ID`,
ParameterValue: process.env.AWS_ACCESS_KEY_ID || ``,
},
{
ParameterKey: `AWS_SECRET_ACCESS_KEY`,
EnvironmentVariable: `AWS_SECRET_ACCESS_KEY`,
ParameterValue: process.env.AWS_SECRET_ACCESS_KEY || ``,
},
],
],
);
return output;
} catch (error) {
throw error;
}
}
}

View File

@ -2,7 +2,8 @@ import CloudRunnerLogger from '../services/cloud-runner-logger';
import CloudRunnerSecret from '../services/cloud-runner-secret';
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
import { CloudRunnerCustomSteps, CustomStep } from '../services/cloud-runner-custom-steps';
import { CloudRunnerCustomSteps } from '../services/cloud-runner-custom-steps';
import { CustomStep } from '../services/custom-step';
import CloudRunner from '../cloud-runner';
export class CustomWorkflow {
@ -26,9 +27,10 @@ export class CustomWorkflow {
try {
CloudRunnerLogger.log(`Cloud Runner is running in custom job mode`);
let output = '';
if (CloudRunner.buildParameters?.cloudRunnerDebug) {
CloudRunnerLogger.log(`Custom Job Description \n${JSON.stringify(buildSteps, undefined, 4)}`);
}
// if (CloudRunner.buildParameters?.cloudRunnerDebug) {
// CloudRunnerLogger.log(`Custom Job Description \n${JSON.stringify(buildSteps, undefined, 4)}`);
// }
for (const step of buildSteps) {
output += await CloudRunner.Provider.runTaskInWorkflow(
CloudRunner.buildParameters.buildGuid,

View File

@ -3,10 +3,16 @@ import { CustomWorkflow } from './custom-workflow';
import { WorkflowInterface } from './workflow-interface';
import { BuildAutomationWorkflow } from './build-automation-workflow';
import CloudRunner from '../cloud-runner';
import CloudRunnerOptions from '../cloud-runner-options';
import { AsyncWorkflow } from './async-workflow';
export class WorkflowCompositionRoot implements WorkflowInterface {
async run(cloudRunnerStepState: CloudRunnerStepState) {
try {
if (CloudRunnerOptions.asyncCloudRunner) {
return await AsyncWorkflow.runAsyncWorkflow(cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
}
if (CloudRunner.buildParameters.customJob !== '') {
return await CustomWorkflow.runCustomJobFromString(
CloudRunner.buildParameters.customJob,

View File

@ -1,5 +1,168 @@
import CloudRunnerLogger from './cloud-runner/services/cloud-runner-logger';
import CloudRunner from './cloud-runner/cloud-runner';
import CloudRunnerOptions from './cloud-runner/cloud-runner-options';
import * as core from '@actions/core';
import { Octokit } from '@octokit/core';
class GitHub {
private static readonly asyncChecksApiWorkflowName = `Async Checks API`;
public static githubInputEnabled: boolean = true;
private static longDescriptionContent: string = ``;
private static startedDate: string;
private static endedDate: string;
private static get octokitDefaultToken() {
return new Octokit({
auth: process.env.GITHUB_TOKEN,
});
}
private static get octokitPAT() {
return new Octokit({
auth: CloudRunner.buildParameters.gitPrivateToken,
});
}
private static get sha() {
return CloudRunner.buildParameters.gitSha;
}
private static get checkName() {
return `Cloud Runner (${CloudRunner.buildParameters.buildGuid})`;
}
private static get nameReadable() {
return GitHub.checkName;
}
private static get checkRunId() {
return CloudRunner.githubCheckId;
}
private static get owner() {
return CloudRunnerOptions.githubOwner;
}
private static get repo() {
return CloudRunnerOptions.githubRepoName;
}
public static async createGitHubCheck(summary) {
if (!CloudRunnerOptions.githubChecks) {
return ``;
}
GitHub.startedDate = new Date().toISOString();
CloudRunnerLogger.log(`POST /repos/${GitHub.owner}/${GitHub.repo}/check-runs`);
const data = {
owner: GitHub.owner,
repo: GitHub.repo,
name: GitHub.checkName,
// eslint-disable-next-line camelcase
head_sha: GitHub.sha,
status: 'queued',
// eslint-disable-next-line camelcase
external_id: CloudRunner.buildParameters.buildGuid,
// eslint-disable-next-line camelcase
started_at: GitHub.startedDate,
output: {
title: GitHub.nameReadable,
summary,
text: '',
images: [
{
alt: 'Game-CI',
// eslint-disable-next-line camelcase
image_url: 'https://game.ci/assets/images/game-ci-brand-logo-wordmark.svg',
},
],
},
};
const result = await GitHub.createGitHubCheckRequest(data);
return result.data.id;
}
public static async updateGitHubCheck(longDescription, summary, result = `neutral`, status = `in_progress`) {
if (!CloudRunnerOptions.githubChecks) {
return;
}
GitHub.longDescriptionContent += `\n${longDescription}`;
const data: any = {
owner: GitHub.owner,
repo: GitHub.repo,
// eslint-disable-next-line camelcase
check_run_id: GitHub.checkRunId,
name: GitHub.checkName,
// eslint-disable-next-line camelcase
head_sha: GitHub.sha,
// eslint-disable-next-line camelcase
started_at: GitHub.startedDate,
status,
output: {
title: GitHub.nameReadable,
summary,
text: GitHub.longDescriptionContent,
annotations: [],
},
};
if (status === `completed`) {
if (GitHub.endedDate !== undefined) {
GitHub.endedDate = new Date().toISOString();
}
// eslint-disable-next-line camelcase
data.completed_at = GitHub.endedDate || GitHub.startedDate;
data.conclusion = result;
}
if (await CloudRunnerOptions.asyncCloudRunner) {
await GitHub.runUpdateAsyncChecksWorkflow(data, `update`);
return;
}
await GitHub.updateGitHubCheckRequest(data);
}
public static async updateGitHubCheckRequest(data) {
return await GitHub.octokitDefaultToken.request(`PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}`, data);
}
public static async createGitHubCheckRequest(data) {
return await GitHub.octokitDefaultToken.request(`POST /repos/{owner}/{repo}/check-runs`, data);
}
public static async runUpdateAsyncChecksWorkflow(data, mode) {
if (mode === `create`) {
throw new Error(`Not supported: only use update`);
}
const workflowsResult = await GitHub.octokitDefaultToken.request(
`GET /repos/${GitHub.owner}/${GitHub.repo}/actions/workflows`,
{
owner: GitHub.owner,
repo: GitHub.repo,
},
);
const workflows = workflowsResult.data.workflows;
let selectedId = ``;
for (let index = 0; index < workflowsResult.data.total_count; index++) {
if (workflows[index].name === GitHub.asyncChecksApiWorkflowName) {
selectedId = workflows[index].id;
}
}
if (selectedId === ``) {
core.info(JSON.stringify(workflows));
throw new Error(`no workflow with name "${GitHub.asyncChecksApiWorkflowName}"`);
}
await GitHub.octokitPAT.request(`POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches`, {
owner: GitHub.owner,
repo: GitHub.repo,
// eslint-disable-next-line camelcase
workflow_id: selectedId,
ref: CloudRunnerOptions.branch,
inputs: {
checksObject: JSON.stringify({ data, mode }),
},
});
}
}
export default GitHub;