cleanup
parent
2df97e0b08
commit
b00a0baa34
|
|
@ -699,7 +699,7 @@ const core = __importStar(__webpack_require__(42186));
|
|||
const zlib = __importStar(__webpack_require__(78761));
|
||||
class AWSBuildRunner {
|
||||
static runTask(taskDef, ECS, CF, environment, buildGuid, commands) {
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s;
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const cluster = ((_b = (_a = taskDef.baseResources) === null || _a === void 0 ? void 0 : _a.find((x) => x.LogicalResourceId === 'ECSCluster')) === null || _b === void 0 ? void 0 : _b.PhysicalResourceId) || '';
|
||||
const taskDefinition = ((_d = (_c = taskDef.taskDefResources) === null || _c === void 0 ? void 0 : _c.find((x) => x.LogicalResourceId === 'TaskDefinition')) === null || _d === void 0 ? void 0 : _d.PhysicalResourceId) || '';
|
||||
|
|
@ -737,26 +737,16 @@ class AWSBuildRunner {
|
|||
catch (error_) {
|
||||
const error = error_;
|
||||
yield new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const describeTasks = yield ECS.describeTasks({
|
||||
tasks: [taskArn],
|
||||
cluster,
|
||||
}).promise();
|
||||
core.info(`Cloud runner job has ended ${(_q = (_p = describeTasks.tasks) === null || _p === void 0 ? void 0 : _p[0].containers) === null || _q === void 0 ? void 0 : _q[0].lastStatus}`);
|
||||
core.info(`Cloud runner job has ended ${(_p = (yield AWSBuildRunner.describeTasks(ECS, cluster, taskArn)).containers) === null || _p === void 0 ? void 0 : _p[0].lastStatus}`);
|
||||
core.setFailed(error);
|
||||
core.error(error);
|
||||
}
|
||||
core.info(`Cloud runner job is running`);
|
||||
yield this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
|
||||
yield ECS.waitFor('tasksStopped', { cluster, tasks: [taskArn] }).promise();
|
||||
const exitCode = (_s = (_r = (yield ECS.describeTasks({
|
||||
tasks: [taskArn],
|
||||
cluster,
|
||||
}).promise()).tasks) === null || _r === void 0 ? void 0 : _r[0].containers) === null || _s === void 0 ? void 0 : _s[0].exitCode;
|
||||
const exitCode = (_q = (yield AWSBuildRunner.describeTasks(ECS, cluster, taskArn)).containers) === null || _q === void 0 ? void 0 : _q[0].exitCode;
|
||||
core.info(`Cloud runner job exit code ${exitCode}`);
|
||||
if (exitCode !== 0) {
|
||||
core.error(`job failed with exit code ${exitCode} ${JSON.stringify(yield ECS.describeTasks({
|
||||
tasks: [taskArn],
|
||||
cluster,
|
||||
}).promise(), undefined, 4)}`);
|
||||
core.error(`job failed with exit code ${exitCode} ${JSON.stringify(yield ECS.describeTasks({ tasks: [taskArn], cluster }).promise(), undefined, 4)}`);
|
||||
throw new Error(`job failed with exit code ${exitCode}`);
|
||||
}
|
||||
else {
|
||||
|
|
@ -764,19 +754,26 @@ class AWSBuildRunner {
|
|||
}
|
||||
});
|
||||
}
|
||||
static describeTasks(ECS, clusterName, taskArn) {
|
||||
var _a, _b;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const tasks = yield ECS.describeTasks({
|
||||
cluster: clusterName,
|
||||
tasks: [taskArn],
|
||||
}).promise();
|
||||
if ((_a = tasks.tasks) === null || _a === void 0 ? void 0 : _a[0]) {
|
||||
return (_b = tasks.tasks) === null || _b === void 0 ? void 0 : _b[0];
|
||||
}
|
||||
else {
|
||||
throw new Error('No task found');
|
||||
}
|
||||
});
|
||||
}
|
||||
static streamLogsUntilTaskStops(ECS, CF, taskDef, clusterName, taskArn, kinesisStreamName) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// watching logs
|
||||
const kinesis = new AWS.Kinesis();
|
||||
const getTaskData = () => __awaiter(this, void 0, void 0, function* () {
|
||||
var _b;
|
||||
const tasks = yield ECS.describeTasks({
|
||||
cluster: clusterName,
|
||||
tasks: [taskArn],
|
||||
}).promise();
|
||||
return (_b = tasks.tasks) === null || _b === void 0 ? void 0 : _b[0];
|
||||
});
|
||||
const stream = yield kinesis
|
||||
.describeStream({
|
||||
StreamName: kinesisStreamName,
|
||||
|
|
@ -789,24 +786,16 @@ class AWSBuildRunner {
|
|||
ShardId: stream.StreamDescription.Shards[0].ShardId,
|
||||
})
|
||||
.promise()).ShardIterator || '';
|
||||
yield CF.waitFor('stackCreateComplete', { StackName: taskDef.taskDefStackNameTTL }).promise();
|
||||
core.info(`Cloud runner job status is ${(_a = (yield getTaskData())) === null || _a === void 0 ? void 0 : _a.lastStatus}`);
|
||||
core.info(`Cloud runner job status is ${(_a = (yield AWSBuildRunner.describeTasks(ECS, clusterName, taskArn))) === null || _a === void 0 ? void 0 : _a.lastStatus}`);
|
||||
const logBaseUrl = `https://${AWS.config.region}.console.aws.amazon.com/cloudwatch/home?region=${AWS.config.region}#logsV2:log-groups/log-group/${taskDef.taskDefStackName}`;
|
||||
core.info(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
|
||||
let readingLogs = true;
|
||||
let timestamp = 0;
|
||||
while (readingLogs) {
|
||||
yield new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
const taskData = yield getTaskData();
|
||||
const taskData = yield AWSBuildRunner.describeTasks(ECS, clusterName, taskArn);
|
||||
if ((taskData === null || taskData === void 0 ? void 0 : taskData.lastStatus) !== 'RUNNING') {
|
||||
if (timestamp === 0) {
|
||||
core.info('Cloud runner job stopped, streaming end of logs');
|
||||
timestamp = Date.now();
|
||||
}
|
||||
if (timestamp !== 0 && Date.now() - timestamp < 30000) {
|
||||
core.info('Cloud runner status is not RUNNING for 30 seconds, last query for logs');
|
||||
readingLogs = false;
|
||||
}
|
||||
core.info('Task not runner, job ended');
|
||||
readingLogs = false;
|
||||
}
|
||||
const records = yield kinesis
|
||||
.getRecords({
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -56,31 +56,23 @@ class AWSBuildRunner {
|
|||
} catch (error_) {
|
||||
const error = error_ as Error;
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const describeTasks = await ECS.describeTasks({
|
||||
tasks: [taskArn],
|
||||
cluster,
|
||||
}).promise();
|
||||
core.info(`Cloud runner job has ended ${describeTasks.tasks?.[0].containers?.[0].lastStatus}`);
|
||||
core.info(
|
||||
`Cloud runner job has ended ${
|
||||
(await AWSBuildRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].lastStatus
|
||||
}`,
|
||||
);
|
||||
|
||||
core.setFailed(error);
|
||||
core.error(error);
|
||||
}
|
||||
core.info(`Cloud runner job is running`);
|
||||
await this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
|
||||
await ECS.waitFor('tasksStopped', { cluster, tasks: [taskArn] }).promise();
|
||||
const exitCode = (
|
||||
await ECS.describeTasks({
|
||||
tasks: [taskArn],
|
||||
cluster,
|
||||
}).promise()
|
||||
).tasks?.[0].containers?.[0].exitCode;
|
||||
const exitCode = (await AWSBuildRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].exitCode;
|
||||
core.info(`Cloud runner job exit code ${exitCode}`);
|
||||
if (exitCode !== 0) {
|
||||
core.error(
|
||||
`job failed with exit code ${exitCode} ${JSON.stringify(
|
||||
await ECS.describeTasks({
|
||||
tasks: [taskArn],
|
||||
cluster,
|
||||
}).promise(),
|
||||
await ECS.describeTasks({ tasks: [taskArn], cluster }).promise(),
|
||||
undefined,
|
||||
4,
|
||||
)}`,
|
||||
|
|
@ -91,6 +83,18 @@ class AWSBuildRunner {
|
|||
}
|
||||
}
|
||||
|
||||
static async describeTasks(ECS: AWS.ECS, clusterName: string, taskArn: string) {
|
||||
const tasks = await ECS.describeTasks({
|
||||
cluster: clusterName,
|
||||
tasks: [taskArn],
|
||||
}).promise();
|
||||
if (tasks.tasks?.[0]) {
|
||||
return tasks.tasks?.[0];
|
||||
} else {
|
||||
throw new Error('No task found');
|
||||
}
|
||||
}
|
||||
|
||||
static async streamLogsUntilTaskStops(
|
||||
ECS: AWS.ECS,
|
||||
CF: AWS.CloudFormation,
|
||||
|
|
@ -102,14 +106,6 @@ class AWSBuildRunner {
|
|||
// watching logs
|
||||
const kinesis = new AWS.Kinesis();
|
||||
|
||||
const getTaskData = async () => {
|
||||
const tasks = await ECS.describeTasks({
|
||||
cluster: clusterName,
|
||||
tasks: [taskArn],
|
||||
}).promise();
|
||||
return tasks.tasks?.[0];
|
||||
};
|
||||
|
||||
const stream = await kinesis
|
||||
.describeStream({
|
||||
StreamName: kinesisStreamName,
|
||||
|
|
@ -127,27 +123,19 @@ class AWSBuildRunner {
|
|||
.promise()
|
||||
).ShardIterator || '';
|
||||
|
||||
await CF.waitFor('stackCreateComplete', { StackName: taskDef.taskDefStackNameTTL }).promise();
|
||||
|
||||
core.info(`Cloud runner job status is ${(await getTaskData())?.lastStatus}`);
|
||||
core.info(
|
||||
`Cloud runner job status is ${(await AWSBuildRunner.describeTasks(ECS, clusterName, taskArn))?.lastStatus}`,
|
||||
);
|
||||
|
||||
const logBaseUrl = `https://${AWS.config.region}.console.aws.amazon.com/cloudwatch/home?region=${AWS.config.region}#logsV2:log-groups/log-group/${taskDef.taskDefStackName}`;
|
||||
core.info(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
|
||||
|
||||
let readingLogs = true;
|
||||
let timestamp: number = 0;
|
||||
while (readingLogs) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
const taskData = await getTaskData();
|
||||
const taskData = await AWSBuildRunner.describeTasks(ECS, clusterName, taskArn);
|
||||
if (taskData?.lastStatus !== 'RUNNING') {
|
||||
if (timestamp === 0) {
|
||||
core.info('Cloud runner job stopped, streaming end of logs');
|
||||
timestamp = Date.now();
|
||||
}
|
||||
if (timestamp !== 0 && Date.now() - timestamp < 30000) {
|
||||
core.info('Cloud runner status is not RUNNING for 30 seconds, last query for logs');
|
||||
readingLogs = false;
|
||||
}
|
||||
core.info('Task not runner, job ended');
|
||||
readingLogs = false;
|
||||
}
|
||||
const records = await kinesis
|
||||
.getRecords({
|
||||
|
|
|
|||
Loading…
Reference in New Issue