change runtimeCheckLog

This commit is contained in:
joseph le 2023-10-04 11:15:31 +07:00
parent c6333d40ea
commit 6d04a73c35
1 changed files with 195 additions and 178 deletions

View File

@ -87,13 +87,14 @@ export async function runtimeCheckLogs(folderPath) {
// Watch for changes in the files listed
async function watchFilesInList() {
//only check new file ---> fileList - fileList_old = new file
let listFileWatch = fileList.slice(0, 40)?.filter((i) => fileList_old.includes(i) === false).map((file) => folderPath + "/" + file)
const watcher = chokidar.watch(
fileList.slice(0,40)
?.filter((i) => fileList_old.includes(i) === false)
.map((file) => folderPath + "/" + file),{
persistent: true,
interval: 300000
});
listFileWatch,
{
persistent: true,
interval: 300000,
}
);
watcher.setMaxListeners(200);
@ -101,97 +102,65 @@ export async function runtimeCheckLogs(folderPath) {
// fs.watchFile(filePath,{ interval: 15000 },
// async (eventType) => {
//check special item, extra RAM, error in log
const fileName = path.split("/")[path.split("/").length - 1]
const filePath = path
let lines = [];
const today = DateTime.now().toFormat('yyyy-MM-dd');
let allFile = await LogDetectFile.query()
.whereRaw(`DATE(created_at) = ?`, [today])
// let allReport = await LogReport.all();
let allValue = await KeyValue.all();
const allReport = await LogReport.query()
.whereRaw(`DATE(created_at) = ?`, [today])
const fileName = path.split("/")[path.split("/").length - 1];
const filePath = path;
let lines = [];
const today = DateTime.now().toFormat("yyyy-MM-dd");
let allFile = await LogDetectFile.query().whereRaw(
`DATE(created_at) = ?`,
[today]
);
// let allReport = await LogReport.all();
let allValue = await KeyValue.all();
const allReport = await LogReport.query().whereRaw(
`DATE(created_at) = ?`,
[today]
);
//get information file
let fileDetect = allFile?.filter(
(i) => i.$original.file_name === fileName
)[0];
let logsDetect = allReport?.filter(
(i) => i.$original.id_file === fileDetect?.id_ldf
);
//get the last line detected
let lastLine = Math.max(...logsDetect.map((obj) => obj.line));
//get information file
let fileDetect = allFile?.filter(
(i) => i.$original.file_name === fileName
)[0];
//get content file in local
let contentFile = await fs
.readFileSync(filePath)
.toString()
?.split("\n");
let logsDetect = allReport?.filter(
(i) => i.$original.id_file === fileDetect?.id_ldf
);
//get the last line detected
let lastLine = Math.max(...logsDetect.map((obj) => obj.line));
//get index SN and send to ERP
//get content file in local
let contentFile = await fs
.readFileSync(filePath)
.toString()
?.split("\n");
checkIndexSN(contentFile, lastLine, fileName);
//get index SN and send to ERP
//get list item to check
let listKeyValues = allValue.filter(
(i) =>
i.$original.key === "MODEL_SPECIAL" ||
i.$original.key === "CATCH_FAULTY"
);
checkIndexSN(contentFile, lastLine, fileName);
//get list exclude error
let listExcludeErr = allValue
.filter((i) => i.$original.key === "EXCLUDE_ERR")
.map((obj) => obj.$original.value);
//get list item to check
let listKeyValues = allValue.filter(
(i) =>
i.$original.key === "MODEL_SPECIAL" ||
i.$original.key === "CATCH_FAULTY"
);
//get list item special
let listExtraItem = allValue
.filter((i) => i.$original.key === "MODEL_SPECIAL")
.map((obj) => obj.$original.value);
//get list exclude error
let listExcludeErr = allValue
.filter((i) => i.$original.key === "EXCLUDE_ERR")
.map((obj) => obj.$original.value);
//Process file content
if(contentFile.length>50000){
for (let i = 0; i < contentFile.length; i += 1000) {
const chunk = contentFile.slice(i, i + 1000);
chunk.map((line, index) => {
//check line the line with errors and exclude errors
listKeyValues
.map((obj) => obj.$original.value)
.map(async (value) => {
if (
line.search(value) !== -1 &&
listExcludeErr.filter((err) => line.includes(err))
.length === 0
) {
let log = allFile?.filter(
(i) => i.$original.file_name === fileName
)[0];
let checkLog = allReport?.filter(
(report) =>
report.$original.id_file === log?.id_ldf &&
report.$original.line === index + 1 &&
report.$original.detected_content === value
);
if (checkLog?.length === 0) {
await LogReport.create({
detected_content: value,
line: index + 1,
id_file: log?.id_ldf,
});
lines.push(index + 1);
}
}
// if(checkSpecialVersion())
});
});
}
}else{
contentFile.map((line, index) => {
//get list item special
let listExtraItem = allValue
.filter((i) => i.$original.key === "MODEL_SPECIAL")
.map((obj) => obj.$original.value);
//Process file content
if (contentFile.length > 50000) {
for (let i = 0; i < contentFile.length; i += 1000) {
const chunk = contentFile.slice(i, i + 1000);
chunk.map((line, index) => {
//check line the line with errors and exclude errors
listKeyValues
.map((obj) => obj.$original.value)
@ -204,14 +173,14 @@ export async function runtimeCheckLogs(folderPath) {
let log = allFile?.filter(
(i) => i.$original.file_name === fileName
)[0];
let checkLog = allReport?.filter(
(report) =>
report.$original.id_file === log?.id_ldf &&
report.$original.line === index + 1 &&
report.$original.detected_content === value
);
if (checkLog?.length === 0) {
await LogReport.create({
detected_content: value,
@ -221,109 +190,157 @@ export async function runtimeCheckLogs(folderPath) {
lines.push(index + 1);
}
}
// if(checkSpecialVersion())
});
});
}
} else {
contentFile.map((line, index) => {
//check line the line with errors and exclude errors
listKeyValues
.map((obj) => obj.$original.value)
.map(async (value) => {
if (
line.search(value) !== -1 &&
listExcludeErr.filter((err) => line.includes(err)).length ===
0
) {
let log = allFile?.filter(
(i) => i.$original.file_name === fileName
)[0];
//true: import log to log_report table, send report to Zulip
setTimeout(async () => {
if (lines.length === 0) {
console.log(fileName + "has changed("+contentFile.length+") ---Good");
} else {
console.log(
fileName + "has changed("+contentFile.length+") ---SOS---"+lines.length
);
let allReport_new = await LogReport.query()
.whereRaw(`DATE(created_at) = ?`, [today])
let checkLog = allReport?.filter(
(report) =>
report.$original.id_file === log?.id_ldf &&
report.$original.line === index + 1 &&
report.$original.detected_content === value
);
let fileDetect = allFile?.filter(
(i) => i.$original.file_name === fileName
)[0];
if (checkLog?.length === 0) {
await LogReport.create({
detected_content: value,
line: index + 1,
id_file: log?.id_ldf,
});
lines.push(index + 1);
}
}
let logsDetect = allReport_new?.filter(
(i) => i.$original.id_file === fileDetect?.id_ldf
);
// console.log(logsDetect)
// await Database.rawQuery(
// "select * from log_reports where id_file = " +
// fileDetect?.id_ldf
// );
//Get all report newest
let listReport = await getListLineByItem(
logsDetect
.map((obj) => obj.$original)
.filter((l) => l.line > lastLine)
);
let content =
"| |Last updated at | Item/error | Line | Report \n|---|:---:|:---|:---|:-----------:\n";
let spoiler = "";
let issueFound = "";
listReport.map((log, index) => {
let item = listExtraItem.includes(log.detected_content)
? ":medal: **" + log.detected_content + "**"
: ":small_orange_diamond: " + log.detected_content;
log.line?.map((line) => {
issueFound =
issueFound +
"\n`" +
line +
"` " +
contentFile[line - 1]?.replace(
log.detected_content,
"[" +
log.detected_content +
"](https://logs.danielvu.com/logs/" +
fileName +
"#" +
line +
")"
);
});
content =
content +
"|" +
(index + 1) +
"|**" +
moment(Date.now()).format("HH:mm - DD/MM") +
"**|" +
item +
"|" +
log.line +
"|[View](https://logs.danielvu.com/logs/" +
fileName +
"#" +
log.line +
")\n";
// if(checkSpecialVersion())
});
});
}
sendMessToZulip(
"stream",
Env.get("ZULIP_STREAM_ALERT"),
Env.get("ZULIP_TOPIC_ALERT"),
"------------\n\n:warning: :warning: **" +
fileName +
"**\n\n" +
content +
"\n\n" +
spoiler +
"\n\n***Issue found:***\n" +
issueFound
);
}
}, 3000)
//true: import log to log_report table, send report to Zulip
setTimeout(async () => {
if (lines.length === 0) {
console.log(
fileName + "has changed(" + contentFile.length + ") ---Good"
);
} else {
console.log(
fileName +
"has changed(" +
contentFile.length +
") ---SOS---" +
lines.length
);
let allReport_new = await LogReport.query().whereRaw(
`DATE(created_at) = ?`,
[today]
);
let fileDetect = allFile?.filter(
(i) => i.$original.file_name === fileName
)[0];
let logsDetect = allReport_new?.filter(
(i) => i.$original.id_file === fileDetect?.id_ldf
);
// console.log(logsDetect)
// await Database.rawQuery(
// "select * from log_reports where id_file = " +
// fileDetect?.id_ldf
// );
//Get all report newest
let listReport = await getListLineByItem(
logsDetect
.map((obj) => obj.$original)
.filter((l) => l.line > lastLine)
);
let content =
"| |Last updated at | Item/error | Line | Report \n|---|:---:|:---|:---|:-----------:\n";
let spoiler = "";
let issueFound = "";
listReport.map((log, index) => {
let item = listExtraItem.includes(log.detected_content)
? ":medal: **" + log.detected_content + "**"
: ":small_orange_diamond: " + log.detected_content;
log.line?.map((line) => {
issueFound =
issueFound +
"\n`" +
line +
"` " +
contentFile[line - 1]?.replace(
log.detected_content,
"[" +
log.detected_content +
"](https://logs.danielvu.com/logs/" +
fileName +
"#" +
line +
")"
);
});
content =
content +
"|" +
(index + 1) +
"|**" +
moment(Date.now()).format("HH:mm - DD/MM") +
"**|" +
item +
"|" +
log.line +
"|[View](https://logs.danielvu.com/logs/" +
fileName +
"#" +
log.line +
")\n";
});
sendMessToZulip(
"stream",
Env.get("ZULIP_STREAM_ALERT"),
Env.get("ZULIP_TOPIC_ALERT"),
"------------\n\n:warning: :warning: **" +
fileName +
"**\n\n" +
content +
"\n\n" +
spoiler +
"\n\n***Issue found:***\n" +
issueFound
);
}
}, 3000);
// console.log(path + " change")
});
watcher.on("error", (error) => {
console.error(`Watcher error: ${error}`);
});
// await fileList.slice(0,40)
// ?.filter((i) => fileList_old.includes(i) === false)
// ?.forEach((fileName) => {
// //path file
// const filePath = `${folderPath}/${fileName}`;
// });
}
} catch (error) {