fixes
parent
8773eade07
commit
fa4ab7ea60
|
|
@ -3142,10 +3142,19 @@ class Kubernetes {
|
|||
cloud_runner_logger_1.default.log('Watching pod until running');
|
||||
await kubernetes_task_runner_1.default.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
|
||||
cloud_runner_logger_1.default.log('Pod running, streaming logs');
|
||||
output += await kubernetes_task_runner_1.default.runTask(this.kubeConfig, this.kubeClient, this.jobName, this.podName, kubernetes_job_spec_factory_1.default.MainContainerName, this.namespace);
|
||||
output += await kubernetes_task_runner_1.default.runTask(this.kubeConfig, this.kubeClient, this.jobName, this.podName, this.containerName, this.namespace);
|
||||
}
|
||||
catch (error) {
|
||||
cloud_runner_logger_1.default.log(`error running k8s workflow ${error}`);
|
||||
cloud_runner_logger_1.default.log(JSON.stringify((await this.kubeClient.listNamespacedEvent(this.namespace)).body.items
|
||||
.map((x) => {
|
||||
return {
|
||||
message: x.message || ``,
|
||||
name: x.metadata.name || ``,
|
||||
reason: x.reason || ``,
|
||||
};
|
||||
})
|
||||
.filter((x) => x.name.includes(this.podName)), undefined, 4));
|
||||
await this.cleanupTaskResources();
|
||||
throw error;
|
||||
}
|
||||
|
|
@ -3175,7 +3184,7 @@ class Kubernetes {
|
|||
async createNamespacedJob(commands, image, mountdir, workingdir, environment, secrets) {
|
||||
for (let index = 0; index < 3; index++) {
|
||||
try {
|
||||
const jobSpec = kubernetes_job_spec_factory_1.default.getJobSpec(commands, image, mountdir, workingdir, environment, secrets, this.buildGuid, this.buildParameters, this.secretName, this.pvcName, this.jobName, k8s);
|
||||
const jobSpec = kubernetes_job_spec_factory_1.default.getJobSpec(commands, image, mountdir, workingdir, environment, secrets, this.buildGuid, this.buildParameters, this.secretName, this.pvcName, this.jobName, k8s, this.containerName);
|
||||
await new Promise((promise) => setTimeout(promise, 15000));
|
||||
const result = await this.kubeClientBatch.createNamespacedJob(this.namespace, jobSpec);
|
||||
cloud_runner_logger_1.default.log(`Build job created`);
|
||||
|
|
@ -3191,7 +3200,7 @@ class Kubernetes {
|
|||
}
|
||||
setPodNameAndContainerName(pod) {
|
||||
this.podName = pod.metadata?.name || '';
|
||||
this.containerName = pod.status?.containerStatuses?.[0].name || '';
|
||||
this.containerName = pod.status?.containerStatuses?.[0].name || this.containerName;
|
||||
}
|
||||
async cleanupTaskResources() {
|
||||
cloud_runner_logger_1.default.log('cleaning up');
|
||||
|
|
@ -3262,7 +3271,7 @@ const client_node_1 = __nccwpck_require__(89679);
|
|||
const cloud_runner_custom_hooks_1 = __nccwpck_require__(58873);
|
||||
const cloud_runner_1 = __importDefault(__nccwpck_require__(79144));
|
||||
class KubernetesJobSpecFactory {
|
||||
static getJobSpec(command, image, mountdir, workingDirectory, environment, secrets, buildGuid, buildParameters, secretName, pvcName, jobName, k8s) {
|
||||
static getJobSpec(command, image, mountdir, workingDirectory, environment, secrets, buildGuid, buildParameters, secretName, pvcName, jobName, k8s, containerName) {
|
||||
environment.push(...[
|
||||
{
|
||||
name: 'GITHUB_SHA',
|
||||
|
|
@ -3343,7 +3352,7 @@ class KubernetesJobSpecFactory {
|
|||
containers: [
|
||||
{
|
||||
ttlSecondsAfterFinished: 9999,
|
||||
name: KubernetesJobSpecFactory.MainContainerName,
|
||||
name: containerName,
|
||||
image,
|
||||
command: ['/bin/sh'],
|
||||
args: ['-c', `${cloud_runner_custom_hooks_1.CloudRunnerCustomHooks.ApplyHooksToCommands(command, cloud_runner_1.default.buildParameters)}`],
|
||||
|
|
@ -3401,7 +3410,6 @@ class KubernetesJobSpecFactory {
|
|||
return job;
|
||||
}
|
||||
}
|
||||
KubernetesJobSpecFactory.MainContainerName = 'main';
|
||||
exports["default"] = KubernetesJobSpecFactory;
|
||||
|
||||
|
||||
|
|
@ -3696,14 +3704,12 @@ class KubernetesTaskRunner {
|
|||
sinceTime = ` --since-time="${dateTimeIsoString}"`;
|
||||
}
|
||||
let extraFlags = ``;
|
||||
extraFlags += (await kubernetes_pods_1.default.IsPodRunning(podName, namespace, kubeClient))
|
||||
? ` -c ${containerName}`
|
||||
: ` -p`;
|
||||
extraFlags += (await kubernetes_pods_1.default.IsPodRunning(podName, namespace, kubeClient)) ? `` : ` -p`;
|
||||
let lastMessageSeenIncludedInChunk = false;
|
||||
let lastMessageSeen = false;
|
||||
let logs;
|
||||
try {
|
||||
logs = await cloud_runner_system_1.CloudRunnerSystem.Run(`kubectl logs ${podName}${extraFlags} -f --timestamps${sinceTime}`, false, true);
|
||||
logs = await cloud_runner_system_1.CloudRunnerSystem.Run(`kubectl logs ${podName}${extraFlags} -f -c ${containerName} --timestamps${sinceTime}`, false, true);
|
||||
}
|
||||
catch (error) {
|
||||
const errorString = `${error}`;
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -143,11 +143,26 @@ class Kubernetes implements ProviderInterface {
|
|||
this.kubeClient,
|
||||
this.jobName,
|
||||
this.podName,
|
||||
KubernetesJobSpecFactory.MainContainerName,
|
||||
this.containerName,
|
||||
this.namespace,
|
||||
);
|
||||
} catch (error: any) {
|
||||
CloudRunnerLogger.log(`error running k8s workflow ${error}`);
|
||||
CloudRunnerLogger.log(
|
||||
JSON.stringify(
|
||||
(await this.kubeClient.listNamespacedEvent(this.namespace)).body.items
|
||||
.map((x) => {
|
||||
return {
|
||||
message: x.message || ``,
|
||||
name: x.metadata.name || ``,
|
||||
reason: x.reason || ``,
|
||||
};
|
||||
})
|
||||
.filter((x) => x.name.includes(this.podName)),
|
||||
undefined,
|
||||
4,
|
||||
),
|
||||
);
|
||||
await this.cleanupTaskResources();
|
||||
throw error;
|
||||
}
|
||||
|
|
@ -212,6 +227,7 @@ class Kubernetes implements ProviderInterface {
|
|||
this.pvcName,
|
||||
this.jobName,
|
||||
k8s,
|
||||
this.containerName,
|
||||
);
|
||||
await new Promise((promise) => setTimeout(promise, 15000));
|
||||
const result = await this.kubeClientBatch.createNamespacedJob(this.namespace, jobSpec);
|
||||
|
|
@ -229,7 +245,7 @@ class Kubernetes implements ProviderInterface {
|
|||
|
||||
setPodNameAndContainerName(pod: k8s.V1Pod) {
|
||||
this.podName = pod.metadata?.name || '';
|
||||
this.containerName = pod.status?.containerStatuses?.[0].name || '';
|
||||
this.containerName = pod.status?.containerStatuses?.[0].name || this.containerName;
|
||||
}
|
||||
|
||||
async cleanupTaskResources() {
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
|||
import CloudRunner from '../../cloud-runner';
|
||||
|
||||
class KubernetesJobSpecFactory {
|
||||
public static readonly MainContainerName = 'main';
|
||||
static getJobSpec(
|
||||
command: string,
|
||||
image: string,
|
||||
|
|
@ -20,6 +19,7 @@ class KubernetesJobSpecFactory {
|
|||
pvcName: string,
|
||||
jobName: string,
|
||||
k8s: any,
|
||||
containerName: string,
|
||||
) {
|
||||
environment.push(
|
||||
...[
|
||||
|
|
@ -103,7 +103,7 @@ class KubernetesJobSpecFactory {
|
|||
containers: [
|
||||
{
|
||||
ttlSecondsAfterFinished: 9999,
|
||||
name: KubernetesJobSpecFactory.MainContainerName,
|
||||
name: containerName,
|
||||
image,
|
||||
command: ['/bin/sh'],
|
||||
args: ['-c', `${CloudRunnerCustomHooks.ApplyHooksToCommands(command, CloudRunner.buildParameters)}`],
|
||||
|
|
|
|||
|
|
@ -36,9 +36,7 @@ class KubernetesTaskRunner {
|
|||
sinceTime = ` --since-time="${dateTimeIsoString}"`;
|
||||
}
|
||||
let extraFlags = ``;
|
||||
extraFlags += (await KubernetesPods.IsPodRunning(podName, namespace, kubeClient))
|
||||
? ` -c ${containerName}`
|
||||
: ` -p`;
|
||||
extraFlags += (await KubernetesPods.IsPodRunning(podName, namespace, kubeClient)) ? `` : ` -p`;
|
||||
let lastMessageSeenIncludedInChunk = false;
|
||||
let lastMessageSeen = false;
|
||||
|
||||
|
|
@ -46,7 +44,7 @@ class KubernetesTaskRunner {
|
|||
|
||||
try {
|
||||
logs = await CloudRunnerSystem.Run(
|
||||
`kubectl logs ${podName}${extraFlags} -f --timestamps${sinceTime}`,
|
||||
`kubectl logs ${podName}${extraFlags} -f -c ${containerName} --timestamps${sinceTime}`,
|
||||
false,
|
||||
true,
|
||||
);
|
||||
|
|
|
|||
Loading…
Reference in New Issue