|
|
|
|
@ -414,6 +414,7 @@ class CLI {
|
|
|
|
|
core.info(`\r\n`);
|
|
|
|
|
core.info(`INPUT:`);
|
|
|
|
|
for (const element of properties) {
|
|
|
|
|
// TODO pull description from action.yml
|
|
|
|
|
program.option(`--${element} <${element}>`, 'default description');
|
|
|
|
|
if (__1.Input[element] !== undefined && __1.Input[element] !== '') {
|
|
|
|
|
core.info(`${element} ${__1.Input[element]}`);
|
|
|
|
|
@ -826,219 +827,6 @@ class AWSBaseStack {
|
|
|
|
|
exports.AWSBaseStack = AWSBaseStack;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/***/ }),
|
|
|
|
|
|
|
|
|
|
/***/ 29102:
|
|
|
|
|
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
|
|
|
|
|
|
|
|
|
"use strict";
|
|
|
|
|
|
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
|
|
|
if (k2 === undefined) k2 = k;
|
|
|
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
|
|
|
}) : (function(o, m, k, k2) {
|
|
|
|
|
if (k2 === undefined) k2 = k;
|
|
|
|
|
o[k2] = m[k];
|
|
|
|
|
}));
|
|
|
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
|
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
|
|
|
}) : function(o, v) {
|
|
|
|
|
o["default"] = v;
|
|
|
|
|
});
|
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
|
|
if (mod && mod.__esModule) return mod;
|
|
|
|
|
var result = {};
|
|
|
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
|
|
|
__setModuleDefault(result, mod);
|
|
|
|
|
return result;
|
|
|
|
|
};
|
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
|
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
|
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
|
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
|
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
|
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
|
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
|
|
|
});
|
|
|
|
|
};
|
|
|
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
|
|
|
};
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
|
const AWS = __importStar(__webpack_require__(71786));
|
|
|
|
|
const core = __importStar(__webpack_require__(42186));
|
|
|
|
|
const zlib = __importStar(__webpack_require__(78761));
|
|
|
|
|
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
|
|
|
|
const __1 = __webpack_require__(41359);
|
|
|
|
|
const fs_1 = __importDefault(__webpack_require__(35747));
|
|
|
|
|
const cloud_runner_state_1 = __webpack_require__(70912);
|
|
|
|
|
const cloud_runner_statics_1 = __webpack_require__(90828);
|
|
|
|
|
class AWSBuildRunner {
|
|
|
|
|
static runTask(taskDef, ECS, CF, environment, buildGuid, commands) {
|
|
|
|
|
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q;
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const cluster = ((_b = (_a = taskDef.baseResources) === null || _a === void 0 ? void 0 : _a.find((x) => x.LogicalResourceId === 'ECSCluster')) === null || _b === void 0 ? void 0 : _b.PhysicalResourceId) || '';
|
|
|
|
|
const taskDefinition = ((_d = (_c = taskDef.taskDefResources) === null || _c === void 0 ? void 0 : _c.find((x) => x.LogicalResourceId === 'TaskDefinition')) === null || _d === void 0 ? void 0 : _d.PhysicalResourceId) || '';
|
|
|
|
|
const SubnetOne = ((_f = (_e = taskDef.baseResources) === null || _e === void 0 ? void 0 : _e.find((x) => x.LogicalResourceId === 'PublicSubnetOne')) === null || _f === void 0 ? void 0 : _f.PhysicalResourceId) || '';
|
|
|
|
|
const SubnetTwo = ((_h = (_g = taskDef.baseResources) === null || _g === void 0 ? void 0 : _g.find((x) => x.LogicalResourceId === 'PublicSubnetTwo')) === null || _h === void 0 ? void 0 : _h.PhysicalResourceId) || '';
|
|
|
|
|
const ContainerSecurityGroup = ((_k = (_j = taskDef.baseResources) === null || _j === void 0 ? void 0 : _j.find((x) => x.LogicalResourceId === 'ContainerSecurityGroup')) === null || _k === void 0 ? void 0 : _k.PhysicalResourceId) || '';
|
|
|
|
|
const streamName = ((_m = (_l = taskDef.taskDefResources) === null || _l === void 0 ? void 0 : _l.find((x) => x.LogicalResourceId === 'KinesisStream')) === null || _m === void 0 ? void 0 : _m.PhysicalResourceId) || '';
|
|
|
|
|
const task = yield ECS.runTask({
|
|
|
|
|
cluster,
|
|
|
|
|
taskDefinition,
|
|
|
|
|
platformVersion: '1.4.0',
|
|
|
|
|
overrides: {
|
|
|
|
|
containerOverrides: [
|
|
|
|
|
{
|
|
|
|
|
name: taskDef.taskDefStackName,
|
|
|
|
|
environment: [...environment, { name: 'BUILDID', value: buildGuid }],
|
|
|
|
|
command: ['-c', ...commands],
|
|
|
|
|
},
|
|
|
|
|
],
|
|
|
|
|
},
|
|
|
|
|
launchType: 'FARGATE',
|
|
|
|
|
networkConfiguration: {
|
|
|
|
|
awsvpcConfiguration: {
|
|
|
|
|
subnets: [SubnetOne, SubnetTwo],
|
|
|
|
|
assignPublicIp: 'ENABLED',
|
|
|
|
|
securityGroups: [ContainerSecurityGroup],
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
}).promise();
|
|
|
|
|
cloud_runner_logger_1.default.log('Cloud runner job is starting');
|
|
|
|
|
const taskArn = ((_o = task.tasks) === null || _o === void 0 ? void 0 : _o[0].taskArn) || '';
|
|
|
|
|
try {
|
|
|
|
|
yield ECS.waitFor('tasksRunning', { tasks: [taskArn], cluster }).promise();
|
|
|
|
|
}
|
|
|
|
|
catch (error_) {
|
|
|
|
|
const error = error_;
|
|
|
|
|
yield new Promise((resolve) => setTimeout(resolve, 3000));
|
|
|
|
|
cloud_runner_logger_1.default.log(`Cloud runner job has ended ${(_p = (yield AWSBuildRunner.describeTasks(ECS, cluster, taskArn)).containers) === null || _p === void 0 ? void 0 : _p[0].lastStatus}`);
|
|
|
|
|
core.setFailed(error);
|
|
|
|
|
core.error(error);
|
|
|
|
|
}
|
|
|
|
|
cloud_runner_logger_1.default.log(`Cloud runner job is running`);
|
|
|
|
|
yield this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
|
|
|
|
|
const exitCode = (_q = (yield AWSBuildRunner.describeTasks(ECS, cluster, taskArn)).containers) === null || _q === void 0 ? void 0 : _q[0].exitCode;
|
|
|
|
|
cloud_runner_logger_1.default.log(`Cloud runner job exit code ${exitCode}`);
|
|
|
|
|
if (exitCode !== 0 && exitCode !== undefined) {
|
|
|
|
|
core.error(`job failed with exit code ${exitCode} ${JSON.stringify(yield ECS.describeTasks({ tasks: [taskArn], cluster }).promise(), undefined, 4)}`);
|
|
|
|
|
throw new Error(`job failed with exit code ${exitCode}`);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
cloud_runner_logger_1.default.log(`Cloud runner job has finished successfully`);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
static describeTasks(ECS, clusterName, taskArn) {
|
|
|
|
|
var _a, _b;
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const tasks = yield ECS.describeTasks({
|
|
|
|
|
cluster: clusterName,
|
|
|
|
|
tasks: [taskArn],
|
|
|
|
|
}).promise();
|
|
|
|
|
if ((_a = tasks.tasks) === null || _a === void 0 ? void 0 : _a[0]) {
|
|
|
|
|
return (_b = tasks.tasks) === null || _b === void 0 ? void 0 : _b[0];
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
throw new Error('No task found');
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
static streamLogsUntilTaskStops(ECS, CF, taskDef, clusterName, taskArn, kinesisStreamName) {
|
|
|
|
|
var _a;
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const kinesis = new AWS.Kinesis();
|
|
|
|
|
const stream = yield AWSBuildRunner.getLogStream(kinesis, kinesisStreamName);
|
|
|
|
|
let iterator = yield AWSBuildRunner.getLogIterator(kinesis, stream);
|
|
|
|
|
cloud_runner_logger_1.default.log(`Cloud runner job status is ${(_a = (yield AWSBuildRunner.describeTasks(ECS, clusterName, taskArn))) === null || _a === void 0 ? void 0 : _a.lastStatus}`);
|
|
|
|
|
const logBaseUrl = `https://${__1.Input.region}.console.aws.amazon.com/cloudwatch/home?region=${AWS.config.region}#logsV2:log-groups/log-group/${taskDef.taskDefStackName}`;
|
|
|
|
|
cloud_runner_logger_1.default.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
|
|
|
|
|
let readingLogs = true;
|
|
|
|
|
let timestamp = 0;
|
|
|
|
|
while (readingLogs) {
|
|
|
|
|
yield new Promise((resolve) => setTimeout(resolve, 1500));
|
|
|
|
|
const taskData = yield AWSBuildRunner.describeTasks(ECS, clusterName, taskArn);
|
|
|
|
|
({ timestamp, readingLogs } = AWSBuildRunner.checkStreamingShouldContinue(taskData, timestamp, readingLogs));
|
|
|
|
|
({ iterator, readingLogs } = yield AWSBuildRunner.handleLogStreamIteration(kinesis, iterator, readingLogs, taskDef));
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
static handleLogStreamIteration(kinesis, iterator, readingLogs, taskDef) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const records = yield kinesis
|
|
|
|
|
.getRecords({
|
|
|
|
|
ShardIterator: iterator,
|
|
|
|
|
})
|
|
|
|
|
.promise();
|
|
|
|
|
iterator = records.NextShardIterator || '';
|
|
|
|
|
readingLogs = AWSBuildRunner.logRecords(records, iterator, taskDef, readingLogs);
|
|
|
|
|
return { iterator, readingLogs };
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
static checkStreamingShouldContinue(taskData, timestamp, readingLogs) {
|
|
|
|
|
if ((taskData === null || taskData === void 0 ? void 0 : taskData.lastStatus) !== 'RUNNING') {
|
|
|
|
|
if (timestamp === 0) {
|
|
|
|
|
cloud_runner_logger_1.default.log('## Cloud runner job stopped, streaming end of logs');
|
|
|
|
|
timestamp = Date.now();
|
|
|
|
|
}
|
|
|
|
|
if (timestamp !== 0 && Date.now() - timestamp > 30000) {
|
|
|
|
|
cloud_runner_logger_1.default.log('## Cloud runner status is not RUNNING for 30 seconds, last query for logs');
|
|
|
|
|
readingLogs = false;
|
|
|
|
|
}
|
|
|
|
|
cloud_runner_logger_1.default.log(`## Status of job: ${taskData.lastStatus}`);
|
|
|
|
|
}
|
|
|
|
|
return { timestamp, readingLogs };
|
|
|
|
|
}
|
|
|
|
|
static logRecords(records, iterator, taskDef, readingLogs) {
|
|
|
|
|
if (records.Records.length > 0 && iterator) {
|
|
|
|
|
for (let index = 0; index < records.Records.length; index++) {
|
|
|
|
|
const json = JSON.parse(zlib.gunzipSync(Buffer.from(records.Records[index].Data, 'base64')).toString('utf8'));
|
|
|
|
|
if (json.messageType === 'DATA_MESSAGE') {
|
|
|
|
|
for (let logEventsIndex = 0; logEventsIndex < json.logEvents.length; logEventsIndex++) {
|
|
|
|
|
let message = json.logEvents[logEventsIndex].message;
|
|
|
|
|
if (json.logEvents[logEventsIndex].message.includes(taskDef.logid)) {
|
|
|
|
|
cloud_runner_logger_1.default.log('End of cloud runner job logs');
|
|
|
|
|
readingLogs = false;
|
|
|
|
|
}
|
|
|
|
|
else if (message.includes('Rebuilding Library because the asset database could not be found!')) {
|
|
|
|
|
core.warning('LIBRARY NOT FOUND!');
|
|
|
|
|
}
|
|
|
|
|
message = `[${cloud_runner_statics_1.CloudRunnerStatics.logPrefix}] ${message}`;
|
|
|
|
|
if (cloud_runner_state_1.CloudRunnerState.buildParams.logToFile) {
|
|
|
|
|
fs_1.default.appendFileSync(`${cloud_runner_state_1.CloudRunnerState.buildGuid}-outputfile.txt`, `${message}\r\n`);
|
|
|
|
|
}
|
|
|
|
|
cloud_runner_logger_1.default.log(message);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return readingLogs;
|
|
|
|
|
}
|
|
|
|
|
static getLogStream(kinesis, kinesisStreamName) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
return yield kinesis
|
|
|
|
|
.describeStream({
|
|
|
|
|
StreamName: kinesisStreamName,
|
|
|
|
|
})
|
|
|
|
|
.promise();
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
static getLogIterator(kinesis, stream) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
return ((yield kinesis
|
|
|
|
|
.getShardIterator({
|
|
|
|
|
ShardIteratorType: 'TRIM_HORIZON',
|
|
|
|
|
StreamName: stream.StreamDescription.StreamName,
|
|
|
|
|
ShardId: stream.StreamDescription.Shards[0].ShardId,
|
|
|
|
|
})
|
|
|
|
|
.promise()).ShardIterator || '');
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
exports.default = AWSBuildRunner;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/***/ }),
|
|
|
|
|
|
|
|
|
|
/***/ 83683:
|
|
|
|
|
@ -1290,6 +1078,219 @@ class AWSJobStack {
|
|
|
|
|
exports.AWSJobStack = AWSJobStack;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/***/ }),
|
|
|
|
|
|
|
|
|
|
/***/ 74668:
|
|
|
|
|
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
|
|
|
|
|
|
|
|
|
|
"use strict";
|
|
|
|
|
|
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
|
|
|
if (k2 === undefined) k2 = k;
|
|
|
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
|
|
|
}) : (function(o, m, k, k2) {
|
|
|
|
|
if (k2 === undefined) k2 = k;
|
|
|
|
|
o[k2] = m[k];
|
|
|
|
|
}));
|
|
|
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
|
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
|
|
|
}) : function(o, v) {
|
|
|
|
|
o["default"] = v;
|
|
|
|
|
});
|
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
|
|
if (mod && mod.__esModule) return mod;
|
|
|
|
|
var result = {};
|
|
|
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
|
|
|
__setModuleDefault(result, mod);
|
|
|
|
|
return result;
|
|
|
|
|
};
|
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
|
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
|
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
|
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
|
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
|
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
|
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
|
|
|
});
|
|
|
|
|
};
|
|
|
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
|
|
|
};
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
|
const AWS = __importStar(__webpack_require__(71786));
|
|
|
|
|
const core = __importStar(__webpack_require__(42186));
|
|
|
|
|
const zlib = __importStar(__webpack_require__(78761));
|
|
|
|
|
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
|
|
|
|
const __1 = __webpack_require__(41359);
|
|
|
|
|
const fs_1 = __importDefault(__webpack_require__(35747));
|
|
|
|
|
const cloud_runner_state_1 = __webpack_require__(70912);
|
|
|
|
|
const cloud_runner_statics_1 = __webpack_require__(90828);
|
|
|
|
|
class AWSTaskRunner {
|
|
|
|
|
static runTask(taskDef, ECS, CF, environment, buildGuid, commands) {
|
|
|
|
|
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q;
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const cluster = ((_b = (_a = taskDef.baseResources) === null || _a === void 0 ? void 0 : _a.find((x) => x.LogicalResourceId === 'ECSCluster')) === null || _b === void 0 ? void 0 : _b.PhysicalResourceId) || '';
|
|
|
|
|
const taskDefinition = ((_d = (_c = taskDef.taskDefResources) === null || _c === void 0 ? void 0 : _c.find((x) => x.LogicalResourceId === 'TaskDefinition')) === null || _d === void 0 ? void 0 : _d.PhysicalResourceId) || '';
|
|
|
|
|
const SubnetOne = ((_f = (_e = taskDef.baseResources) === null || _e === void 0 ? void 0 : _e.find((x) => x.LogicalResourceId === 'PublicSubnetOne')) === null || _f === void 0 ? void 0 : _f.PhysicalResourceId) || '';
|
|
|
|
|
const SubnetTwo = ((_h = (_g = taskDef.baseResources) === null || _g === void 0 ? void 0 : _g.find((x) => x.LogicalResourceId === 'PublicSubnetTwo')) === null || _h === void 0 ? void 0 : _h.PhysicalResourceId) || '';
|
|
|
|
|
const ContainerSecurityGroup = ((_k = (_j = taskDef.baseResources) === null || _j === void 0 ? void 0 : _j.find((x) => x.LogicalResourceId === 'ContainerSecurityGroup')) === null || _k === void 0 ? void 0 : _k.PhysicalResourceId) || '';
|
|
|
|
|
const streamName = ((_m = (_l = taskDef.taskDefResources) === null || _l === void 0 ? void 0 : _l.find((x) => x.LogicalResourceId === 'KinesisStream')) === null || _m === void 0 ? void 0 : _m.PhysicalResourceId) || '';
|
|
|
|
|
const task = yield ECS.runTask({
|
|
|
|
|
cluster,
|
|
|
|
|
taskDefinition,
|
|
|
|
|
platformVersion: '1.4.0',
|
|
|
|
|
overrides: {
|
|
|
|
|
containerOverrides: [
|
|
|
|
|
{
|
|
|
|
|
name: taskDef.taskDefStackName,
|
|
|
|
|
environment: [...environment, { name: 'BUILDID', value: buildGuid }],
|
|
|
|
|
command: ['-c', ...commands],
|
|
|
|
|
},
|
|
|
|
|
],
|
|
|
|
|
},
|
|
|
|
|
launchType: 'FARGATE',
|
|
|
|
|
networkConfiguration: {
|
|
|
|
|
awsvpcConfiguration: {
|
|
|
|
|
subnets: [SubnetOne, SubnetTwo],
|
|
|
|
|
assignPublicIp: 'ENABLED',
|
|
|
|
|
securityGroups: [ContainerSecurityGroup],
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
}).promise();
|
|
|
|
|
cloud_runner_logger_1.default.log('Cloud runner job is starting');
|
|
|
|
|
const taskArn = ((_o = task.tasks) === null || _o === void 0 ? void 0 : _o[0].taskArn) || '';
|
|
|
|
|
try {
|
|
|
|
|
yield ECS.waitFor('tasksRunning', { tasks: [taskArn], cluster }).promise();
|
|
|
|
|
}
|
|
|
|
|
catch (error_) {
|
|
|
|
|
const error = error_;
|
|
|
|
|
yield new Promise((resolve) => setTimeout(resolve, 3000));
|
|
|
|
|
cloud_runner_logger_1.default.log(`Cloud runner job has ended ${(_p = (yield AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers) === null || _p === void 0 ? void 0 : _p[0].lastStatus}`);
|
|
|
|
|
core.setFailed(error);
|
|
|
|
|
core.error(error);
|
|
|
|
|
}
|
|
|
|
|
cloud_runner_logger_1.default.log(`Cloud runner job is running`);
|
|
|
|
|
yield this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
|
|
|
|
|
const exitCode = (_q = (yield AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers) === null || _q === void 0 ? void 0 : _q[0].exitCode;
|
|
|
|
|
cloud_runner_logger_1.default.log(`Cloud runner job exit code ${exitCode}`);
|
|
|
|
|
if (exitCode !== 0 && exitCode !== undefined) {
|
|
|
|
|
core.error(`job failed with exit code ${exitCode} ${JSON.stringify(yield ECS.describeTasks({ tasks: [taskArn], cluster }).promise(), undefined, 4)}`);
|
|
|
|
|
throw new Error(`job failed with exit code ${exitCode}`);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
cloud_runner_logger_1.default.log(`Cloud runner job has finished successfully`);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
static describeTasks(ECS, clusterName, taskArn) {
|
|
|
|
|
var _a, _b;
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const tasks = yield ECS.describeTasks({
|
|
|
|
|
cluster: clusterName,
|
|
|
|
|
tasks: [taskArn],
|
|
|
|
|
}).promise();
|
|
|
|
|
if ((_a = tasks.tasks) === null || _a === void 0 ? void 0 : _a[0]) {
|
|
|
|
|
return (_b = tasks.tasks) === null || _b === void 0 ? void 0 : _b[0];
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
throw new Error('No task found');
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
static streamLogsUntilTaskStops(ECS, CF, taskDef, clusterName, taskArn, kinesisStreamName) {
|
|
|
|
|
var _a;
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const kinesis = new AWS.Kinesis();
|
|
|
|
|
const stream = yield AWSTaskRunner.getLogStream(kinesis, kinesisStreamName);
|
|
|
|
|
let iterator = yield AWSTaskRunner.getLogIterator(kinesis, stream);
|
|
|
|
|
cloud_runner_logger_1.default.log(`Cloud runner job status is ${(_a = (yield AWSTaskRunner.describeTasks(ECS, clusterName, taskArn))) === null || _a === void 0 ? void 0 : _a.lastStatus}`);
|
|
|
|
|
const logBaseUrl = `https://${__1.Input.region}.console.aws.amazon.com/cloudwatch/home?region=${AWS.config.region}#logsV2:log-groups/log-group/${taskDef.taskDefStackName}`;
|
|
|
|
|
cloud_runner_logger_1.default.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
|
|
|
|
|
let readingLogs = true;
|
|
|
|
|
let timestamp = 0;
|
|
|
|
|
while (readingLogs) {
|
|
|
|
|
yield new Promise((resolve) => setTimeout(resolve, 1500));
|
|
|
|
|
const taskData = yield AWSTaskRunner.describeTasks(ECS, clusterName, taskArn);
|
|
|
|
|
({ timestamp, readingLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, readingLogs));
|
|
|
|
|
({ iterator, readingLogs } = yield AWSTaskRunner.handleLogStreamIteration(kinesis, iterator, readingLogs, taskDef));
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
static handleLogStreamIteration(kinesis, iterator, readingLogs, taskDef) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const records = yield kinesis
|
|
|
|
|
.getRecords({
|
|
|
|
|
ShardIterator: iterator,
|
|
|
|
|
})
|
|
|
|
|
.promise();
|
|
|
|
|
iterator = records.NextShardIterator || '';
|
|
|
|
|
readingLogs = AWSTaskRunner.logRecords(records, iterator, taskDef, readingLogs);
|
|
|
|
|
return { iterator, readingLogs };
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
static checkStreamingShouldContinue(taskData, timestamp, readingLogs) {
|
|
|
|
|
if ((taskData === null || taskData === void 0 ? void 0 : taskData.lastStatus) !== 'RUNNING') {
|
|
|
|
|
if (timestamp === 0) {
|
|
|
|
|
cloud_runner_logger_1.default.log('## Cloud runner job stopped, streaming end of logs');
|
|
|
|
|
timestamp = Date.now();
|
|
|
|
|
}
|
|
|
|
|
if (timestamp !== 0 && Date.now() - timestamp > 30000) {
|
|
|
|
|
cloud_runner_logger_1.default.log('## Cloud runner status is not RUNNING for 30 seconds, last query for logs');
|
|
|
|
|
readingLogs = false;
|
|
|
|
|
}
|
|
|
|
|
cloud_runner_logger_1.default.log(`## Status of job: ${taskData.lastStatus}`);
|
|
|
|
|
}
|
|
|
|
|
return { timestamp, readingLogs };
|
|
|
|
|
}
|
|
|
|
|
static logRecords(records, iterator, taskDef, readingLogs) {
|
|
|
|
|
if (records.Records.length > 0 && iterator) {
|
|
|
|
|
for (let index = 0; index < records.Records.length; index++) {
|
|
|
|
|
const json = JSON.parse(zlib.gunzipSync(Buffer.from(records.Records[index].Data, 'base64')).toString('utf8'));
|
|
|
|
|
if (json.messageType === 'DATA_MESSAGE') {
|
|
|
|
|
for (let logEventsIndex = 0; logEventsIndex < json.logEvents.length; logEventsIndex++) {
|
|
|
|
|
let message = json.logEvents[logEventsIndex].message;
|
|
|
|
|
if (json.logEvents[logEventsIndex].message.includes(taskDef.logid)) {
|
|
|
|
|
cloud_runner_logger_1.default.log('End of cloud runner job logs');
|
|
|
|
|
readingLogs = false;
|
|
|
|
|
}
|
|
|
|
|
else if (message.includes('Rebuilding Library because the asset database could not be found!')) {
|
|
|
|
|
core.warning('LIBRARY NOT FOUND!');
|
|
|
|
|
}
|
|
|
|
|
message = `[${cloud_runner_statics_1.CloudRunnerStatics.logPrefix}] ${message}`;
|
|
|
|
|
if (cloud_runner_state_1.CloudRunnerState.buildParams.logToFile) {
|
|
|
|
|
fs_1.default.appendFileSync(`${cloud_runner_state_1.CloudRunnerState.buildGuid}-outputfile.txt`, `${message}\r\n`);
|
|
|
|
|
}
|
|
|
|
|
cloud_runner_logger_1.default.log(message);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return readingLogs;
|
|
|
|
|
}
|
|
|
|
|
static getLogStream(kinesis, kinesisStreamName) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
return yield kinesis
|
|
|
|
|
.describeStream({
|
|
|
|
|
StreamName: kinesisStreamName,
|
|
|
|
|
})
|
|
|
|
|
.promise();
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
static getLogIterator(kinesis, stream) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
return ((yield kinesis
|
|
|
|
|
.getShardIterator({
|
|
|
|
|
ShardIteratorType: 'TRIM_HORIZON',
|
|
|
|
|
StreamName: stream.StreamDescription.StreamName,
|
|
|
|
|
ShardId: stream.StreamDescription.Shards[0].ShardId,
|
|
|
|
|
})
|
|
|
|
|
.promise()).ShardIterator || '');
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
exports.default = AWSTaskRunner;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/***/ }),
|
|
|
|
|
|
|
|
|
|
/***/ 6436:
|
|
|
|
|
@ -1394,7 +1395,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
|
|
};
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
|
const SDK = __importStar(__webpack_require__(71786));
|
|
|
|
|
const aws_build_runner_1 = __importDefault(__webpack_require__(29102));
|
|
|
|
|
const aws_task_runner_1 = __importDefault(__webpack_require__(74668));
|
|
|
|
|
const cloud_runner_logger_1 = __importDefault(__webpack_require__(22855));
|
|
|
|
|
const aws_job_stack_1 = __webpack_require__(85819);
|
|
|
|
|
const aws_base_stack_1 = __webpack_require__(28730);
|
|
|
|
|
@ -1403,7 +1404,7 @@ class AWSBuildEnvironment {
|
|
|
|
|
constructor(buildParameters) {
|
|
|
|
|
this.baseStackName = buildParameters.awsBaseStackName;
|
|
|
|
|
}
|
|
|
|
|
cleanupSharedBuildResources(
|
|
|
|
|
cleanupSharedResources(
|
|
|
|
|
// eslint-disable-next-line no-unused-vars
|
|
|
|
|
buildGuid,
|
|
|
|
|
// eslint-disable-next-line no-unused-vars
|
|
|
|
|
@ -1414,7 +1415,7 @@ class AWSBuildEnvironment {
|
|
|
|
|
defaultSecretsArray) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () { });
|
|
|
|
|
}
|
|
|
|
|
setupSharedBuildResources(
|
|
|
|
|
setupSharedResources(
|
|
|
|
|
// eslint-disable-next-line no-unused-vars
|
|
|
|
|
buildGuid,
|
|
|
|
|
// eslint-disable-next-line no-unused-vars
|
|
|
|
|
@ -1425,7 +1426,7 @@ class AWSBuildEnvironment {
|
|
|
|
|
defaultSecretsArray) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () { });
|
|
|
|
|
}
|
|
|
|
|
runBuildTask(buildId, image, commands, mountdir, workingdir, environment, secrets) {
|
|
|
|
|
runTask(buildId, image, commands, mountdir, workingdir, environment, secrets) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
process.env.AWS_REGION = __1.Input.region;
|
|
|
|
|
const ECS = new SDK.ECS();
|
|
|
|
|
@ -1439,7 +1440,7 @@ class AWSBuildEnvironment {
|
|
|
|
|
try {
|
|
|
|
|
const t1 = Date.now();
|
|
|
|
|
cloud_runner_logger_1.default.log(`Setup job time: ${Math.floor((t1 - t0) / 1000)}s`);
|
|
|
|
|
yield aws_build_runner_1.default.runTask(taskDef, ECS, CF, environment, buildId, commands);
|
|
|
|
|
yield aws_task_runner_1.default.runTask(taskDef, ECS, CF, environment, buildId, commands);
|
|
|
|
|
t2 = Date.now();
|
|
|
|
|
cloud_runner_logger_1.default.log(`Run job time: ${Math.floor((t2 - t1) / 1000)}s`);
|
|
|
|
|
}
|
|
|
|
|
@ -1542,9 +1543,9 @@ class CloudRunner {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
CloudRunner.setup(buildParameters);
|
|
|
|
|
try {
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.setupSharedBuildResources(cloud_runner_state_1.CloudRunnerState.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets);
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.setupSharedResources(cloud_runner_state_1.CloudRunnerState.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets);
|
|
|
|
|
yield new workflow_composition_root_1.WorkflowCompositionRoot().run(new cloud_runner_step_state_1.CloudRunnerStepState(baseImage, cloud_runner_state_1.CloudRunnerState.readBuildEnvironmentVariables(), cloud_runner_state_1.CloudRunnerState.defaultSecrets));
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedBuildResources(cloud_runner_state_1.CloudRunnerState.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets);
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedResources(cloud_runner_state_1.CloudRunnerState.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets);
|
|
|
|
|
}
|
|
|
|
|
catch (error) {
|
|
|
|
|
yield cloud_runner_error_1.CloudRunnerError.handleException(error);
|
|
|
|
|
@ -1604,7 +1605,7 @@ class CloudRunnerError {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
cloud_runner_logger_1.default.error(JSON.stringify(error, undefined, 4));
|
|
|
|
|
core.setFailed('Cloud Runner failed');
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedBuildResources(cloud_runner_state_1.CloudRunnerState.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets);
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedResources(cloud_runner_state_1.CloudRunnerState.buildGuid, cloud_runner_state_1.CloudRunnerState.buildParams, cloud_runner_state_1.CloudRunnerState.branchName, cloud_runner_state_1.CloudRunnerState.defaultSecrets);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
@ -1679,7 +1680,7 @@ class Kubernetes {
|
|
|
|
|
this.namespace = 'default';
|
|
|
|
|
this.buildParameters = buildParameters;
|
|
|
|
|
}
|
|
|
|
|
setupSharedBuildResources(buildGuid, buildParameters,
|
|
|
|
|
setupSharedResources(buildGuid, buildParameters,
|
|
|
|
|
// eslint-disable-next-line no-unused-vars
|
|
|
|
|
branchName,
|
|
|
|
|
// eslint-disable-next-line no-unused-vars
|
|
|
|
|
@ -1697,7 +1698,7 @@ class Kubernetes {
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
runBuildTask(buildGuid, image, commands, mountdir, workingdir, environment, secrets) {
|
|
|
|
|
runTask(buildGuid, image, commands, mountdir, workingdir, environment, secrets) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
try {
|
|
|
|
|
// setup
|
|
|
|
|
@ -1755,7 +1756,7 @@ class Kubernetes {
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
cleanupSharedBuildResources(
|
|
|
|
|
cleanupSharedResources(
|
|
|
|
|
// eslint-disable-next-line no-unused-vars
|
|
|
|
|
buildGuid,
|
|
|
|
|
// eslint-disable-next-line no-unused-vars
|
|
|
|
|
@ -2702,7 +2703,7 @@ class BuildStep {
|
|
|
|
|
static BuildStep(image, environmentVariables, secrets) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
cloud_runner_logger_1.default.logLine('Starting part 2/2 (build unity project)');
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(cloud_runner_state_1.CloudRunnerState.buildGuid, image, [
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildGuid, image, [
|
|
|
|
|
`
|
|
|
|
|
export GITHUB_WORKSPACE="${cloud_runner_state_1.CloudRunnerState.repoPathFull}"
|
|
|
|
|
cp -r "${cloud_runner_state_1.CloudRunnerState.builderPathFull}/dist/default-build-script/" "/UnityBuilderAction"
|
|
|
|
|
@ -2766,7 +2767,7 @@ class SetupStep {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
try {
|
|
|
|
|
cloud_runner_logger_1.default.logLine('Starting step 1/2 download game files from repository, try to use cache');
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(cloud_runner_state_1.CloudRunnerState.buildGuid, image, [
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildGuid, image, [
|
|
|
|
|
`
|
|
|
|
|
apk update -q
|
|
|
|
|
apk add unzip zip git-lfs jq tree nodejs -q
|
|
|
|
|
@ -2894,7 +2895,7 @@ class CustomWorkflow {
|
|
|
|
|
};
|
|
|
|
|
return secret;
|
|
|
|
|
});
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(cloud_runner_state_1.CloudRunnerState.buildGuid, step['image'], step['commands'], `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, cloud_runner_state_1.CloudRunnerState.readBuildEnvironmentVariables(), [...cloud_runner_state_1.CloudRunnerState.defaultSecrets, ...stepSecrets]);
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildGuid, step['image'], step['commands'], `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, `/${cloud_runner_state_1.CloudRunnerState.buildVolumeFolder}`, cloud_runner_state_1.CloudRunnerState.readBuildEnvironmentVariables(), [...cloud_runner_state_1.CloudRunnerState.defaultSecrets, ...stepSecrets]);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
catch (error) {
|
|
|
|
|
@ -2950,7 +2951,7 @@ class EphemeralGitHubRunnerWorkflow {
|
|
|
|
|
try {
|
|
|
|
|
cloud_runner_logger_1.default.log(`Cloud Runner is running in ephemeral GitHub runner mode`);
|
|
|
|
|
const installAndStartRunner = ' cd .. & cd .. && ls && mkdir actions-runner && cd actions-runner && curl -O -L https://github.com/actions/runner/releases/download/v2.283.1/actions-runner-linux-x64-2.283.1.tar.gz && tar xzf ./actions-runner-linux-x64-2.283.1.tar.gz';
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runBuildTask(cloud_runner_state_1.CloudRunnerState.buildGuid, image, [installAndStartRunner], `/runner`, `/runner`, environmentVariables, secrets);
|
|
|
|
|
yield cloud_runner_state_1.CloudRunnerState.CloudRunnerProviderPlatform.runTask(cloud_runner_state_1.CloudRunnerState.buildGuid, image, [installAndStartRunner], `/runner`, `/runner`, environmentVariables, secrets);
|
|
|
|
|
}
|
|
|
|
|
catch (error) {
|
|
|
|
|
throw error;
|
|
|
|
|
|