hashed logs
parent
2019e730cb
commit
87c7a1f553
|
@ -117,7 +117,6 @@ jobs:
|
|||
- 'cloud-runner-async-workflow'
|
||||
- 'cloud-runner-end2end-locking'
|
||||
- 'cloud-runner-end2end-caching'
|
||||
- 'cloud-runner-kubernetes'
|
||||
- 'cloud-runner-end2end-retaining'
|
||||
- 'cloud-runner-environment'
|
||||
- 'cloud-runner-github-checks'
|
||||
|
|
|
@ -4585,6 +4585,8 @@ const node_fs_1 = __importDefault(__nccwpck_require__(87561));
|
|||
const node_path_1 = __importDefault(__nccwpck_require__(49411));
|
||||
const cloud_runner_1 = __importDefault(__nccwpck_require__(79144));
|
||||
const cloud_runner_options_1 = __importDefault(__nccwpck_require__(66965));
|
||||
const cloud_runner_system_1 = __nccwpck_require__(4197);
|
||||
const cloud_runner_folders_1 = __nccwpck_require__(77795);
|
||||
class RemoteClientLogger {
|
||||
static get LogFilePath() {
|
||||
return node_path_1.default.join(`/home`, `job-log.txt`);
|
||||
|
@ -4613,7 +4615,12 @@ class RemoteClientLogger {
|
|||
return;
|
||||
}
|
||||
cloud_runner_logger_1.default.log(`Collected Logs`);
|
||||
const hashedLogs = node_fs_1.default.readFileSync(RemoteClientLogger.LogFilePath).toString();
|
||||
let hashedLogs = node_fs_1.default.readFileSync(RemoteClientLogger.LogFilePath).toString();
|
||||
// create hashed version of logs using md5sum
|
||||
const startPath = process.cwd();
|
||||
process.chdir(node_path_1.default.resolve(cloud_runner_folders_1.CloudRunnerFolders.repoPathAbsolute, '..'));
|
||||
hashedLogs = await await cloud_runner_system_1.CloudRunnerSystem.Run(`md5sum ${RemoteClientLogger.LogFilePath}`);
|
||||
process.chdir(startPath);
|
||||
cloud_runner_logger_1.default.log(hashedLogs);
|
||||
const logs = node_fs_1.default.readFileSync(RemoteClientLogger.LogFilePath).toString();
|
||||
cloud_runner_logger_1.default.log(logs);
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -3,6 +3,8 @@ import fs from 'node:fs';
|
|||
import path from 'node:path';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
import CloudRunnerOptions from '../options/cloud-runner-options';
|
||||
import { CloudRunnerSystem } from '../services/core/cloud-runner-system';
|
||||
import { CloudRunnerFolders } from '../options/cloud-runner-folders';
|
||||
|
||||
export class RemoteClientLogger {
|
||||
private static get LogFilePath() {
|
||||
|
@ -38,7 +40,14 @@ export class RemoteClientLogger {
|
|||
return;
|
||||
}
|
||||
CloudRunnerLogger.log(`Collected Logs`);
|
||||
const hashedLogs = fs.readFileSync(RemoteClientLogger.LogFilePath).toString();
|
||||
let hashedLogs = fs.readFileSync(RemoteClientLogger.LogFilePath).toString();
|
||||
|
||||
// create hashed version of logs using md5sum
|
||||
const startPath = process.cwd();
|
||||
process.chdir(path.resolve(CloudRunnerFolders.repoPathAbsolute, '..'));
|
||||
hashedLogs = await await CloudRunnerSystem.Run(`md5sum ${RemoteClientLogger.LogFilePath}`);
|
||||
process.chdir(startPath);
|
||||
|
||||
CloudRunnerLogger.log(hashedLogs);
|
||||
const logs = fs.readFileSync(RemoteClientLogger.LogFilePath).toString();
|
||||
CloudRunnerLogger.log(logs);
|
||||
|
|
Loading…
Reference in New Issue