fix k8s isodate
parent
ae0804b5c7
commit
b22874e411
|
|
@ -3046,8 +3046,6 @@ const kubernetes_job_spec_factory_1 = __importDefault(__nccwpck_require__(3610))
|
||||||
const kubernetes_service_account_1 = __importDefault(__nccwpck_require__(47319));
|
const kubernetes_service_account_1 = __importDefault(__nccwpck_require__(47319));
|
||||||
const cloud_runner_logger_1 = __importDefault(__nccwpck_require__(22855));
|
const cloud_runner_logger_1 = __importDefault(__nccwpck_require__(22855));
|
||||||
const cloud_runner_1 = __importDefault(__nccwpck_require__(79144));
|
const cloud_runner_1 = __importDefault(__nccwpck_require__(79144));
|
||||||
const kubernetes_pods_1 = __importDefault(__nccwpck_require__(90740));
|
|
||||||
const follow_log_stream_service_1 = __nccwpck_require__(64121);
|
|
||||||
class Kubernetes {
|
class Kubernetes {
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
constructor(buildParameters) {
|
constructor(buildParameters) {
|
||||||
|
|
@ -3136,74 +3134,19 @@ class Kubernetes {
|
||||||
this.containerName = `main`;
|
this.containerName = `main`;
|
||||||
await kubernetes_secret_1.default.createSecret(secrets, this.secretName, this.namespace, this.kubeClient);
|
await kubernetes_secret_1.default.createSecret(secrets, this.secretName, this.namespace, this.kubeClient);
|
||||||
let output = '';
|
let output = '';
|
||||||
// eslint-disable-next-line no-constant-condition
|
|
||||||
while (true) {
|
|
||||||
try {
|
try {
|
||||||
let existsAlready = false;
|
|
||||||
try {
|
|
||||||
(await this.kubeClient.readNamespacedPodStatus(this.podName, this.namespace)).body.status;
|
|
||||||
existsAlready = true;
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
// empty
|
|
||||||
}
|
|
||||||
if (!existsAlready) {
|
|
||||||
cloud_runner_logger_1.default.log('Job does not exist');
|
cloud_runner_logger_1.default.log('Job does not exist');
|
||||||
await this.createJob(commands, image, mountdir, workingdir, environment, secrets);
|
await this.createJob(commands, image, mountdir, workingdir, environment, secrets);
|
||||||
cloud_runner_logger_1.default.log('Watching pod until running');
|
cloud_runner_logger_1.default.log('Watching pod until running');
|
||||||
await kubernetes_task_runner_1.default.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
|
await kubernetes_task_runner_1.default.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
|
||||||
}
|
|
||||||
cloud_runner_logger_1.default.log('Pod running, streaming logs');
|
cloud_runner_logger_1.default.log('Pod running, streaming logs');
|
||||||
const running = await kubernetes_pods_1.default.IsPodRunning(this.podName, this.namespace, this.kubeClient);
|
output += await kubernetes_task_runner_1.default.runTask(this.kubeConfig, this.kubeClient, this.jobName, this.podName, kubernetes_job_spec_factory_1.default.MainContainerName, this.namespace);
|
||||||
output += await kubernetes_task_runner_1.default.runTask(this.kubeConfig, this.kubeClient, this.jobName, this.podName, kubernetes_job_spec_factory_1.default.MainContainerName, this.namespace, running);
|
|
||||||
let podStatus = await kubernetes_pods_1.default.GetPodStatus(this.podName, this.namespace, this.kubeClient);
|
|
||||||
if (!running) {
|
|
||||||
if (!follow_log_stream_service_1.FollowLogStreamService.DidReceiveEndOfTransmission && podStatus === `Succeeded`) {
|
|
||||||
output += await kubernetes_task_runner_1.default.runTask(this.kubeConfig, this.kubeClient, this.jobName, this.podName, 'main', this.namespace, true);
|
|
||||||
cloud_runner_logger_1.default.log(JSON.stringify((await this.kubeClient.listNamespacedEvent(this.namespace)).body.items
|
|
||||||
.map((x) => {
|
|
||||||
return {
|
|
||||||
message: x.message || ``,
|
|
||||||
name: x.metadata.name || ``,
|
|
||||||
reason: x.reason || ``,
|
|
||||||
};
|
|
||||||
})
|
|
||||||
.filter((x) => x.name.includes(this.podName)), undefined, 4));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (follow_log_stream_service_1.FollowLogStreamService.DidReceiveEndOfTransmission) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
podStatus = await kubernetes_pods_1.default.GetPodStatus(this.podName, this.namespace, this.kubeClient);
|
|
||||||
cloud_runner_logger_1.default.log(`Pod status ${podStatus}, retrying log stream...`);
|
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
let errorParsed;
|
|
||||||
try {
|
|
||||||
errorParsed = JSON.parse(error);
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
errorParsed = error;
|
|
||||||
}
|
|
||||||
const errorMessage = errorParsed.name || errorParsed.reason || errorParsed.response?.body?.reason || errorParsed.message;
|
|
||||||
const continueStreaming = errorMessage.includes(`dial timeout, backstop`) ||
|
|
||||||
errorMessage.includes(`HttpError`) ||
|
|
||||||
errorMessage.includes(`HttpError: HTTP request failed`) ||
|
|
||||||
errorMessage.includes(`an error occurred when try to find container`) ||
|
|
||||||
errorMessage.includes(`not found`) ||
|
|
||||||
errorMessage.includes(`Not Found`);
|
|
||||||
if (continueStreaming) {
|
|
||||||
cloud_runner_logger_1.default.log('Log Stream Container Not Found');
|
|
||||||
await new Promise((resolve) => resolve(5000));
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
cloud_runner_logger_1.default.log(`error running k8s workflow ${error}`);
|
cloud_runner_logger_1.default.log(`error running k8s workflow ${error}`);
|
||||||
|
await this.cleanupTaskResources();
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
await this.cleanupTaskResources();
|
await this.cleanupTaskResources();
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
@ -3459,38 +3402,6 @@ KubernetesJobSpecFactory.MainContainerName = 'main';
|
||||||
exports["default"] = KubernetesJobSpecFactory;
|
exports["default"] = KubernetesJobSpecFactory;
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
|
||||||
|
|
||||||
/***/ 90740:
|
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
||||||
|
|
||||||
"use strict";
|
|
||||||
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
||||||
const cloud_runner_logger_1 = __importDefault(__nccwpck_require__(22855));
|
|
||||||
class KubernetesPods {
|
|
||||||
static async IsPodRunning(podName, namespace, kubeClient) {
|
|
||||||
const pods = (await kubeClient.listNamespacedPod(namespace)).body.items.filter((x) => podName === x.metadata?.name);
|
|
||||||
const running = pods.length > 0 && (pods[0].status?.phase === `Running` || pods[0].status?.phase === `Pending`);
|
|
||||||
const phase = pods[0]?.status?.phase || 'undefined status';
|
|
||||||
cloud_runner_logger_1.default.log(`Getting pod status: ${phase}`);
|
|
||||||
if (phase === `Failed`) {
|
|
||||||
throw new Error(`K8s pod failed`);
|
|
||||||
}
|
|
||||||
return running;
|
|
||||||
}
|
|
||||||
static async GetPodStatus(podName, namespace, kubeClient) {
|
|
||||||
const pods = (await kubeClient.listNamespacedPod(namespace)).body.items.find((x) => podName === x.metadata?.name);
|
|
||||||
const phase = pods?.status?.phase || 'undefined status';
|
|
||||||
return phase;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports["default"] = KubernetesPods;
|
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 95875:
|
/***/ 95875:
|
||||||
|
|
@ -3752,11 +3663,11 @@ const follow_log_stream_service_1 = __nccwpck_require__(64121);
|
||||||
const cloud_runner_system_1 = __nccwpck_require__(99393);
|
const cloud_runner_system_1 = __nccwpck_require__(99393);
|
||||||
const cloud_runner_1 = __importDefault(__nccwpck_require__(79144));
|
const cloud_runner_1 = __importDefault(__nccwpck_require__(79144));
|
||||||
class KubernetesTaskRunner {
|
class KubernetesTaskRunner {
|
||||||
static async runTask(kubeConfig, kubeClient, jobName, podName, containerName, namespace, alreadyFinished = false) {
|
static async runTask(kubeConfig, kubeClient, jobName, podName, containerName, namespace) {
|
||||||
const lastReceivedMessage = this.lastReceivedMessage !== ``
|
const lastReceivedMessage = this.lastReceivedMessage !== ``
|
||||||
? `\nLast Log Message "${this.lastReceivedMessage}" ${this.lastReceivedTimestamp}`
|
? `\nLast Log Message "${this.lastReceivedMessage}" ${this.lastReceivedTimestamp}`
|
||||||
: ``;
|
: ``;
|
||||||
cloud_runner_logger_1.default.log(`Streaming logs from pod: ${podName} container: ${containerName} namespace: ${namespace} finished ${alreadyFinished} ${cloud_runner_1.default.buildParameters.kubeVolumeSize}/${cloud_runner_1.default.buildParameters.containerCpu}/${cloud_runner_1.default.buildParameters.containerMemory}\n${lastReceivedMessage}`);
|
cloud_runner_logger_1.default.log(`Streaming logs from pod: ${podName} container: ${containerName} namespace: ${namespace} ${cloud_runner_1.default.buildParameters.kubeVolumeSize}/${cloud_runner_1.default.buildParameters.containerCpu}/${cloud_runner_1.default.buildParameters.containerMemory}\n${lastReceivedMessage}`);
|
||||||
let output = '';
|
let output = '';
|
||||||
let didStreamAnyLogs = false;
|
let didStreamAnyLogs = false;
|
||||||
let shouldReadLogs = true;
|
let shouldReadLogs = true;
|
||||||
|
|
@ -3775,7 +3686,9 @@ class KubernetesTaskRunner {
|
||||||
const tzd = `${symbolOffset + hourOffset + Math.floor(timeOffsetInHours)}:${minOffset}${minZone}`;
|
const tzd = `${symbolOffset + hourOffset + Math.floor(timeOffsetInHours)}:${minOffset}${minZone}`;
|
||||||
const dateTZDformat = currentDateTime + tzd;
|
const dateTZDformat = currentDateTime + tzd;
|
||||||
try {
|
try {
|
||||||
const sinceTime = KubernetesTaskRunner.lastReceivedTimestamp ? `--since-time="${dateTZDformat}" ` : ` `;
|
// eslint-disable-next-line no-constant-condition
|
||||||
|
while (true) {
|
||||||
|
const sinceTime = `${KubernetesTaskRunner.lastReceivedTimestamp}` !== `` ? `--since-time="${dateTZDformat}" ` : ` `;
|
||||||
let lastMessageSeenIncludedInChunk = false;
|
let lastMessageSeenIncludedInChunk = false;
|
||||||
let lastMessageSeen = false;
|
let lastMessageSeen = false;
|
||||||
// using this instead of Kube
|
// using this instead of Kube
|
||||||
|
|
@ -3819,7 +3732,11 @@ class KubernetesTaskRunner {
|
||||||
}, undefined, 4));
|
}, undefined, 4));
|
||||||
throw new Error(`No logs streamed from k8s`);
|
throw new Error(`No logs streamed from k8s`);
|
||||||
}
|
}
|
||||||
|
if (follow_log_stream_service_1.FollowLogStreamService.DidReceiveEndOfTransmission) {
|
||||||
cloud_runner_logger_1.default.log('end of log stream');
|
cloud_runner_logger_1.default.log('end of log stream');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
cloud_runner_logger_1.default.log(`k8s stream watching failed ${JSON.stringify(error, undefined, 4)}`);
|
cloud_runner_logger_1.default.log(`k8s stream watching failed ${JSON.stringify(error, undefined, 4)}`);
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -14,8 +14,6 @@ import { CoreV1Api } from '@kubernetes/client-node';
|
||||||
import CloudRunner from '../../cloud-runner';
|
import CloudRunner from '../../cloud-runner';
|
||||||
import { ProviderResource } from '../provider-resource';
|
import { ProviderResource } from '../provider-resource';
|
||||||
import { ProviderWorkflow } from '../provider-workflow';
|
import { ProviderWorkflow } from '../provider-workflow';
|
||||||
import KubernetesPods from './kubernetes-pods';
|
|
||||||
import { FollowLogStreamService } from '../../services/follow-log-stream-service';
|
|
||||||
|
|
||||||
class Kubernetes implements ProviderInterface {
|
class Kubernetes implements ProviderInterface {
|
||||||
public static Instance: Kubernetes;
|
public static Instance: Kubernetes;
|
||||||
|
|
@ -133,24 +131,13 @@ class Kubernetes implements ProviderInterface {
|
||||||
this.containerName = `main`;
|
this.containerName = `main`;
|
||||||
await KubernetesSecret.createSecret(secrets, this.secretName, this.namespace, this.kubeClient);
|
await KubernetesSecret.createSecret(secrets, this.secretName, this.namespace, this.kubeClient);
|
||||||
let output = '';
|
let output = '';
|
||||||
// eslint-disable-next-line no-constant-condition
|
|
||||||
while (true) {
|
|
||||||
try {
|
try {
|
||||||
let existsAlready = false;
|
|
||||||
try {
|
|
||||||
(await this.kubeClient.readNamespacedPodStatus(this.podName, this.namespace)).body.status;
|
|
||||||
existsAlready = true;
|
|
||||||
} catch {
|
|
||||||
// empty
|
|
||||||
}
|
|
||||||
if (!existsAlready) {
|
|
||||||
CloudRunnerLogger.log('Job does not exist');
|
CloudRunnerLogger.log('Job does not exist');
|
||||||
await this.createJob(commands, image, mountdir, workingdir, environment, secrets);
|
await this.createJob(commands, image, mountdir, workingdir, environment, secrets);
|
||||||
CloudRunnerLogger.log('Watching pod until running');
|
CloudRunnerLogger.log('Watching pod until running');
|
||||||
await KubernetesTaskRunner.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
|
await KubernetesTaskRunner.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
|
||||||
}
|
|
||||||
CloudRunnerLogger.log('Pod running, streaming logs');
|
CloudRunnerLogger.log('Pod running, streaming logs');
|
||||||
const running = await KubernetesPods.IsPodRunning(this.podName, this.namespace, this.kubeClient);
|
|
||||||
output += await KubernetesTaskRunner.runTask(
|
output += await KubernetesTaskRunner.runTask(
|
||||||
this.kubeConfig,
|
this.kubeConfig,
|
||||||
this.kubeClient,
|
this.kubeClient,
|
||||||
|
|
@ -158,73 +145,13 @@ class Kubernetes implements ProviderInterface {
|
||||||
this.podName,
|
this.podName,
|
||||||
KubernetesJobSpecFactory.MainContainerName,
|
KubernetesJobSpecFactory.MainContainerName,
|
||||||
this.namespace,
|
this.namespace,
|
||||||
running,
|
|
||||||
);
|
);
|
||||||
let podStatus = await KubernetesPods.GetPodStatus(this.podName, this.namespace, this.kubeClient);
|
|
||||||
|
|
||||||
if (!running) {
|
|
||||||
if (!FollowLogStreamService.DidReceiveEndOfTransmission && podStatus === `Succeeded`) {
|
|
||||||
output += await KubernetesTaskRunner.runTask(
|
|
||||||
this.kubeConfig,
|
|
||||||
this.kubeClient,
|
|
||||||
this.jobName,
|
|
||||||
this.podName,
|
|
||||||
'main',
|
|
||||||
this.namespace,
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
CloudRunnerLogger.log(
|
|
||||||
JSON.stringify(
|
|
||||||
(await this.kubeClient.listNamespacedEvent(this.namespace)).body.items
|
|
||||||
.map((x) => {
|
|
||||||
return {
|
|
||||||
message: x.message || ``,
|
|
||||||
name: x.metadata.name || ``,
|
|
||||||
reason: x.reason || ``,
|
|
||||||
};
|
|
||||||
})
|
|
||||||
.filter((x) => x.name.includes(this.podName)),
|
|
||||||
undefined,
|
|
||||||
4,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (FollowLogStreamService.DidReceiveEndOfTransmission) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
podStatus = await KubernetesPods.GetPodStatus(this.podName, this.namespace, this.kubeClient);
|
|
||||||
CloudRunnerLogger.log(`Pod status ${podStatus}, retrying log stream...`);
|
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
let errorParsed;
|
|
||||||
try {
|
|
||||||
errorParsed = JSON.parse(error);
|
|
||||||
} catch {
|
|
||||||
errorParsed = error;
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorMessage =
|
|
||||||
errorParsed.name || errorParsed.reason || errorParsed.response?.body?.reason || errorParsed.message;
|
|
||||||
|
|
||||||
const continueStreaming =
|
|
||||||
errorMessage.includes(`dial timeout, backstop`) ||
|
|
||||||
errorMessage.includes(`HttpError`) ||
|
|
||||||
errorMessage.includes(`HttpError: HTTP request failed`) ||
|
|
||||||
errorMessage.includes(`an error occurred when try to find container`) ||
|
|
||||||
errorMessage.includes(`not found`) ||
|
|
||||||
errorMessage.includes(`Not Found`);
|
|
||||||
if (continueStreaming) {
|
|
||||||
CloudRunnerLogger.log('Log Stream Container Not Found');
|
|
||||||
await new Promise((resolve) => resolve(5000));
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
CloudRunnerLogger.log(`error running k8s workflow ${error}`);
|
CloudRunnerLogger.log(`error running k8s workflow ${error}`);
|
||||||
|
await this.cleanupTaskResources();
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
await this.cleanupTaskResources();
|
await this.cleanupTaskResources();
|
||||||
|
|
||||||
return output;
|
return output;
|
||||||
|
|
|
||||||
|
|
@ -16,14 +16,13 @@ class KubernetesTaskRunner {
|
||||||
podName: string,
|
podName: string,
|
||||||
containerName: string,
|
containerName: string,
|
||||||
namespace: string,
|
namespace: string,
|
||||||
alreadyFinished: boolean = false,
|
|
||||||
) {
|
) {
|
||||||
const lastReceivedMessage =
|
const lastReceivedMessage =
|
||||||
this.lastReceivedMessage !== ``
|
this.lastReceivedMessage !== ``
|
||||||
? `\nLast Log Message "${this.lastReceivedMessage}" ${this.lastReceivedTimestamp}`
|
? `\nLast Log Message "${this.lastReceivedMessage}" ${this.lastReceivedTimestamp}`
|
||||||
: ``;
|
: ``;
|
||||||
CloudRunnerLogger.log(
|
CloudRunnerLogger.log(
|
||||||
`Streaming logs from pod: ${podName} container: ${containerName} namespace: ${namespace} finished ${alreadyFinished} ${CloudRunner.buildParameters.kubeVolumeSize}/${CloudRunner.buildParameters.containerCpu}/${CloudRunner.buildParameters.containerMemory}\n${lastReceivedMessage}`,
|
`Streaming logs from pod: ${podName} container: ${containerName} namespace: ${namespace} ${CloudRunner.buildParameters.kubeVolumeSize}/${CloudRunner.buildParameters.containerCpu}/${CloudRunner.buildParameters.containerMemory}\n${lastReceivedMessage}`,
|
||||||
);
|
);
|
||||||
let output = '';
|
let output = '';
|
||||||
let didStreamAnyLogs: boolean = false;
|
let didStreamAnyLogs: boolean = false;
|
||||||
|
|
@ -45,7 +44,10 @@ class KubernetesTaskRunner {
|
||||||
const dateTZDformat = currentDateTime + tzd;
|
const dateTZDformat = currentDateTime + tzd;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const sinceTime = KubernetesTaskRunner.lastReceivedTimestamp ? `--since-time="${dateTZDformat}" ` : ` `;
|
// eslint-disable-next-line no-constant-condition
|
||||||
|
while (true) {
|
||||||
|
const sinceTime =
|
||||||
|
`${KubernetesTaskRunner.lastReceivedTimestamp}` !== `` ? `--since-time="${dateTZDformat}" ` : ` `;
|
||||||
let lastMessageSeenIncludedInChunk = false;
|
let lastMessageSeenIncludedInChunk = false;
|
||||||
let lastMessageSeen = false;
|
let lastMessageSeen = false;
|
||||||
|
|
||||||
|
|
@ -108,7 +110,11 @@ class KubernetesTaskRunner {
|
||||||
);
|
);
|
||||||
throw new Error(`No logs streamed from k8s`);
|
throw new Error(`No logs streamed from k8s`);
|
||||||
}
|
}
|
||||||
|
if (FollowLogStreamService.DidReceiveEndOfTransmission) {
|
||||||
CloudRunnerLogger.log('end of log stream');
|
CloudRunnerLogger.log('end of log stream');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
CloudRunnerLogger.log(`k8s stream watching failed ${JSON.stringify(error, undefined, 4)}`);
|
CloudRunnerLogger.log(`k8s stream watching failed ${JSON.stringify(error, undefined, 4)}`);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue