update trycatch runtime
This commit is contained in:
parent
411f135a70
commit
4696f155c4
|
|
@ -73,8 +73,8 @@ export async function runtimeCheckLogs(folderPath) {
|
|||
if (
|
||||
filePath?.split(".")[filePath.split(".").length - 1] === "log" &&
|
||||
filePath.split("/")[filePath.split("/").length - 1]?.split("-")[0] ===
|
||||
//localhost
|
||||
// filePath.split("\\")[filePath.split("\\").length - 1]?.split("-")[0] ===
|
||||
//localhost
|
||||
// filePath.split("\\")[filePath.split("\\").length - 1]?.split("-")[0] ===
|
||||
moment(Date.now()).format("YYYYMMDD").toString()
|
||||
) {
|
||||
//add information file to database
|
||||
|
|
@ -99,86 +99,151 @@ export async function runtimeCheckLogs(folderPath) {
|
|||
|
||||
// Watch for changes in the files listed
|
||||
async function watchFilesInList() {
|
||||
//only check new file ---> fileList - fileList_old = new file
|
||||
let listFileWatch = fileList
|
||||
?.filter((i) => fileList_old.includes(i) === false)
|
||||
.map((file) => folderPath + "/" + file);
|
||||
const watcher = chokidar.watch(listFileWatch, {
|
||||
persistent: true,
|
||||
usePolling: true,
|
||||
interval: 300000,
|
||||
});
|
||||
try {
|
||||
//only check new file ---> fileList - fileList_old = new file
|
||||
let listFileWatch = fileList
|
||||
?.filter((i) => fileList_old.includes(i) === false)
|
||||
.map((file) => folderPath + "/" + file);
|
||||
const watcher = chokidar.watch(listFileWatch, {
|
||||
persistent: true,
|
||||
usePolling: true,
|
||||
interval: 300000,
|
||||
});
|
||||
|
||||
watcher.setMaxListeners(200);
|
||||
watcher.setMaxListeners(200);
|
||||
|
||||
watcher.on("change", async (path) => {
|
||||
// fs.watchFile(filePath,{ interval: 15000 },
|
||||
// async (eventType) => {
|
||||
//check special item, extra RAM, error in log
|
||||
const fileName = path.split("/")[path.split("/").length - 1];
|
||||
// const fileName = path.split("\\")[path.split("\\").length - 1];
|
||||
const filePath = path;
|
||||
let lines = [];
|
||||
const today = DateTime.now().toFormat("yyyy-MM-dd");
|
||||
let allFile = await LogDetectFile.query().whereRaw(
|
||||
`DATE(created_at) = ?`,
|
||||
[today]
|
||||
);
|
||||
// let allReport = await LogReport.all();
|
||||
let allValue = await KeyValue.all();
|
||||
const allReport = await LogReport.query().whereRaw(
|
||||
`DATE(created_at) = ?`,
|
||||
[today]
|
||||
);
|
||||
watcher.on("change", async (path) => {
|
||||
// fs.watchFile(filePath,{ interval: 15000 },
|
||||
// async (eventType) => {
|
||||
//check special item, extra RAM, error in log
|
||||
const fileName = path.split("/")[path.split("/").length - 1];
|
||||
// const fileName = path.split("\\")[path.split("\\").length - 1];
|
||||
const filePath = path;
|
||||
let lines = [];
|
||||
const today = DateTime.now().toFormat("yyyy-MM-dd");
|
||||
let allFile = await LogDetectFile.query().whereRaw(
|
||||
`DATE(created_at) = ?`,
|
||||
[today]
|
||||
);
|
||||
// let allReport = await LogReport.all();
|
||||
let allValue = await KeyValue.all();
|
||||
const allReport = await LogReport.query().whereRaw(
|
||||
`DATE(created_at) = ?`,
|
||||
[today]
|
||||
);
|
||||
|
||||
//get information file
|
||||
let fileDetect = allFile?.filter(
|
||||
(i) => i.$original.file_name === fileName
|
||||
)[0];
|
||||
//get information file
|
||||
let fileDetect = allFile?.filter(
|
||||
(i) => i.$original.file_name === fileName
|
||||
)[0];
|
||||
|
||||
let logsDetect = allReport?.filter(
|
||||
(i) => i.$original.id_file === fileDetect?.id_ldf
|
||||
);
|
||||
//get the last line detected
|
||||
let lastLine = Math.max(...logsDetect.map((obj) => obj.line));
|
||||
let logsDetect = allReport?.filter(
|
||||
(i) => i.$original.id_file === fileDetect?.id_ldf
|
||||
);
|
||||
//get the last line detected
|
||||
let lastLine = Math.max(...logsDetect.map((obj) => obj.line));
|
||||
|
||||
//get content file in local
|
||||
let contentFile = await fs
|
||||
.readFileSync(filePath)
|
||||
.toString()
|
||||
?.split("\n");
|
||||
//get content file in local
|
||||
let contentFile = await fs
|
||||
.readFileSync(filePath)
|
||||
.toString()
|
||||
?.split("\n");
|
||||
|
||||
//get index SN and send to ERP
|
||||
//get index SN and send to ERP
|
||||
|
||||
checkIndexSN(contentFile, lastLine, fileName);
|
||||
checkIndexSN(contentFile, lastLine, fileName);
|
||||
|
||||
//get list item to check
|
||||
let listKeyValues = allValue.filter(
|
||||
(i) =>
|
||||
i.$original.key === "MODEL_SPECIAL" ||
|
||||
i.$original.key === "CATCH_FAULTY"
|
||||
);
|
||||
//get list item to check
|
||||
let listKeyValues = allValue.filter(
|
||||
(i) =>
|
||||
i.$original.key === "MODEL_SPECIAL" ||
|
||||
i.$original.key === "CATCH_FAULTY"
|
||||
);
|
||||
|
||||
//get list exclude error
|
||||
let listExcludeErr = allValue
|
||||
.filter((i) => i.$original.key === "EXCLUDE_ERR")
|
||||
.map((obj) => obj.$original.value);
|
||||
//get list exclude error
|
||||
let listExcludeErr = allValue
|
||||
.filter((i) => i.$original.key === "EXCLUDE_ERR")
|
||||
.map((obj) => obj.$original.value);
|
||||
|
||||
//get list item special
|
||||
let listExtraItem = allValue
|
||||
.filter((i) => i.$original.key === "MODEL_SPECIAL")
|
||||
.map((obj) => obj.$original.value);
|
||||
//get list item special
|
||||
let listExtraItem = allValue
|
||||
.filter((i) => i.$original.key === "MODEL_SPECIAL")
|
||||
.map((obj) => obj.$original.value);
|
||||
|
||||
//Process file content
|
||||
if (contentFile.length > 50000) {
|
||||
for (let i = 0; i < contentFile.length; i += 1000) {
|
||||
const chunk = contentFile.slice(i, i + 1000);
|
||||
//Process file content
|
||||
if (contentFile.length > 50000) {
|
||||
for (let i = 0; i < contentFile.length; i += 1000) {
|
||||
const chunk = contentFile.slice(i, i + 1000);
|
||||
|
||||
chunk.map(async (line, index) => {
|
||||
chunk.map(async (line, index) => {
|
||||
//check line the line with errors and exclude errors
|
||||
listKeyValues
|
||||
.map((obj) => obj.$original.value)
|
||||
.map(async (value) => {
|
||||
if (
|
||||
line.includes(value) &&
|
||||
listExcludeErr.filter((err) => line.includes(err))
|
||||
.length === 0
|
||||
) {
|
||||
let log = allFile?.filter(
|
||||
(i) => i.$original.file_name === fileName
|
||||
)[0];
|
||||
|
||||
let checkLog = allReport?.filter(
|
||||
(report) =>
|
||||
report.$original.id_file === log?.id_ldf &&
|
||||
report.$original.line === index + 1 &&
|
||||
report.$original.detected_content === value
|
||||
);
|
||||
|
||||
if (checkLog?.length === 0) {
|
||||
await LogReport.create({
|
||||
detected_content: value,
|
||||
line: index + 1,
|
||||
id_file: log?.id_ldf,
|
||||
});
|
||||
lines.push(index + 1);
|
||||
}
|
||||
}
|
||||
|
||||
// if(checkSpecialVersion())
|
||||
});
|
||||
|
||||
if (
|
||||
checkSpecialVersion(line) !== "" &&
|
||||
listExcludeErr.filter((err) => line.includes(err)).length ===
|
||||
0
|
||||
) {
|
||||
let checkVersion = checkSpecialVersion(line);
|
||||
let log = allFile?.filter(
|
||||
(i) => i.$original.file_name === fileName
|
||||
)[0];
|
||||
|
||||
let checkLog = allReport?.filter(
|
||||
(report) =>
|
||||
report.$original.id_file === log?.id_ldf &&
|
||||
report.$original.line === index + 1 &&
|
||||
report.$original.detected_content === checkVersion
|
||||
);
|
||||
|
||||
if (checkLog?.length === 0) {
|
||||
await LogReport.create({
|
||||
detected_content: checkVersion,
|
||||
line: index + 1,
|
||||
id_file: log?.id_ldf,
|
||||
});
|
||||
lines.push(index + 1);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
contentFile.map(async (line, index) => {
|
||||
//check line the line with errors and exclude errors
|
||||
listKeyValues
|
||||
.map((obj) => obj.$original.value)
|
||||
.map(async (value) => {
|
||||
// console.log({line:Array(line), value:Array(value)})
|
||||
if (
|
||||
line.includes(value) &&
|
||||
listExcludeErr.filter((err) => line.includes(err))
|
||||
|
|
@ -235,189 +300,128 @@ export async function runtimeCheckLogs(folderPath) {
|
|||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
contentFile.map(async (line, index) => {
|
||||
//check line the line with errors and exclude errors
|
||||
listKeyValues
|
||||
.map((obj) => obj.$original.value)
|
||||
.map(async (value) => {
|
||||
// console.log({line:Array(line), value:Array(value)})
|
||||
if (
|
||||
line.includes(value) &&
|
||||
listExcludeErr.filter((err) => line.includes(err)).length ===
|
||||
0
|
||||
) {
|
||||
let log = allFile?.filter(
|
||||
(i) => i.$original.file_name === fileName
|
||||
)[0];
|
||||
|
||||
let checkLog = allReport?.filter(
|
||||
(report) =>
|
||||
report.$original.id_file === log?.id_ldf &&
|
||||
report.$original.line === index + 1 &&
|
||||
report.$original.detected_content === value
|
||||
);
|
||||
//true: import log to log_report table, send report to Zulip
|
||||
setTimeout(async () => {
|
||||
if (lines.length === 0) {
|
||||
console.log(
|
||||
fileName + "has changed(" + contentFile.length + ") ---Good"
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
fileName +
|
||||
"has changed(" +
|
||||
contentFile.length +
|
||||
") ---SOS---" +
|
||||
lines.length
|
||||
);
|
||||
let allReport_new = await LogReport.query().whereRaw(
|
||||
`DATE(created_at) = ?`,
|
||||
[today]
|
||||
);
|
||||
|
||||
if (checkLog?.length === 0) {
|
||||
await LogReport.create({
|
||||
detected_content: value,
|
||||
line: index + 1,
|
||||
id_file: log?.id_ldf,
|
||||
});
|
||||
lines.push(index + 1);
|
||||
}
|
||||
}
|
||||
|
||||
// if(checkSpecialVersion())
|
||||
});
|
||||
|
||||
if (
|
||||
checkSpecialVersion(line) !== "" &&
|
||||
listExcludeErr.filter((err) => line.includes(err)).length === 0
|
||||
) {
|
||||
let checkVersion = checkSpecialVersion(line);
|
||||
let log = allFile?.filter(
|
||||
let fileDetect = allFile?.filter(
|
||||
(i) => i.$original.file_name === fileName
|
||||
)[0];
|
||||
|
||||
let checkLog = allReport?.filter(
|
||||
(report) =>
|
||||
report.$original.id_file === log?.id_ldf &&
|
||||
report.$original.line === index + 1 &&
|
||||
report.$original.detected_content === checkVersion
|
||||
let logsDetect = allReport_new?.filter(
|
||||
(i) => i.$original.id_file === fileDetect?.id_ldf
|
||||
);
|
||||
// console.log(logsDetect)
|
||||
// await Database.rawQuery(
|
||||
// "select * from log_reports where id_file = " +
|
||||
// fileDetect?.id_ldf
|
||||
// );
|
||||
//Get all report newest
|
||||
let listReport = await getListLineByItem(
|
||||
logsDetect
|
||||
.map((obj) => obj.$original)
|
||||
.filter((l) => l.line > lastLine)
|
||||
);
|
||||
|
||||
if (checkLog?.length === 0) {
|
||||
await LogReport.create({
|
||||
detected_content: checkVersion,
|
||||
line: index + 1,
|
||||
id_file: log?.id_ldf,
|
||||
let content =
|
||||
"| |Last updated at | Item/error | Line | Report \n|---|:---:|:---|:---|:-----------:\n";
|
||||
let spoiler = "";
|
||||
let issueFound = "";
|
||||
let important = [
|
||||
"Vxx",
|
||||
"V00",
|
||||
"(CAT3K_CAA-UNIVERSALK9-M), Version",
|
||||
];
|
||||
listReport.map((log, index) => {
|
||||
let item = listExtraItem.includes(log.detected_content)
|
||||
? ":medal: **" + log.detected_content + "**"
|
||||
: ":small_orange_diamond: " + log.detected_content;
|
||||
|
||||
log.line?.map((line) => {
|
||||
issueFound =
|
||||
issueFound +
|
||||
"\n`" +
|
||||
line +
|
||||
"` " +
|
||||
contentFile[line - 1]?.replace(
|
||||
log.detected_content,
|
||||
"[" +
|
||||
log.detected_content +
|
||||
"](https://logs.danielvu.com/logs/" +
|
||||
fileName +
|
||||
"#" +
|
||||
line +
|
||||
")"
|
||||
);
|
||||
});
|
||||
lines.push(index + 1);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
//true: import log to log_report table, send report to Zulip
|
||||
setTimeout(async () => {
|
||||
if (lines.length === 0) {
|
||||
console.log(
|
||||
fileName + "has changed(" + contentFile.length + ") ---Good"
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
fileName +
|
||||
"has changed(" +
|
||||
contentFile.length +
|
||||
") ---SOS---" +
|
||||
lines.length
|
||||
);
|
||||
let allReport_new = await LogReport.query().whereRaw(
|
||||
`DATE(created_at) = ?`,
|
||||
[today]
|
||||
);
|
||||
|
||||
let fileDetect = allFile?.filter(
|
||||
(i) => i.$original.file_name === fileName
|
||||
)[0];
|
||||
|
||||
let logsDetect = allReport_new?.filter(
|
||||
(i) => i.$original.id_file === fileDetect?.id_ldf
|
||||
);
|
||||
// console.log(logsDetect)
|
||||
// await Database.rawQuery(
|
||||
// "select * from log_reports where id_file = " +
|
||||
// fileDetect?.id_ldf
|
||||
// );
|
||||
//Get all report newest
|
||||
let listReport = await getListLineByItem(
|
||||
logsDetect
|
||||
.map((obj) => obj.$original)
|
||||
.filter((l) => l.line > lastLine)
|
||||
);
|
||||
|
||||
let content =
|
||||
"| |Last updated at | Item/error | Line | Report \n|---|:---:|:---|:---|:-----------:\n";
|
||||
let spoiler = "";
|
||||
let issueFound = "";
|
||||
let important = [
|
||||
"Vxx",
|
||||
"V00",
|
||||
"(CAT3K_CAA-UNIVERSALK9-M), Version",
|
||||
];
|
||||
listReport.map((log, index) => {
|
||||
let item = listExtraItem.includes(log.detected_content)
|
||||
? ":medal: **" + log.detected_content + "**"
|
||||
: ":small_orange_diamond: " + log.detected_content;
|
||||
|
||||
log.line?.map((line) => {
|
||||
issueFound =
|
||||
issueFound +
|
||||
"\n`" +
|
||||
line +
|
||||
"` " +
|
||||
contentFile[line - 1]?.replace(
|
||||
log.detected_content,
|
||||
"[" +
|
||||
log.detected_content +
|
||||
"](https://logs.danielvu.com/logs/" +
|
||||
fileName +
|
||||
"#" +
|
||||
line +
|
||||
")"
|
||||
);
|
||||
});
|
||||
content =
|
||||
content +
|
||||
"|" +
|
||||
(index + 1) +
|
||||
"|**" +
|
||||
moment(Date.now()).format("HH:mm - DD/MM") +
|
||||
"**|" +
|
||||
item +
|
||||
"|" +
|
||||
log.line +
|
||||
"|[View](https://logs.danielvu.com/logs/" +
|
||||
fileName +
|
||||
"#" +
|
||||
log.line +
|
||||
")\n";
|
||||
});
|
||||
|
||||
let icon =
|
||||
important.filter((i) => content.includes(i)).length > 0
|
||||
? "------------\n\n:no_entry: :no_entry:**" +
|
||||
content =
|
||||
content +
|
||||
"|" +
|
||||
(index + 1) +
|
||||
"|**" +
|
||||
moment(Date.now()).format("HH:mm - DD/MM") +
|
||||
"**|" +
|
||||
item +
|
||||
"|" +
|
||||
log.line +
|
||||
"|[View](https://logs.danielvu.com/logs/" +
|
||||
fileName +
|
||||
"**:no_entry: :no_entry:"
|
||||
: "------------\n\n:warning: :warning: **" + fileName + "**";
|
||||
sendMessToZulip(
|
||||
"stream",
|
||||
Env.get("ZULIP_STREAM_ALERT"),
|
||||
Env.get("ZULIP_TOPIC_ALERT"),
|
||||
icon +
|
||||
"\n\n" +
|
||||
content +
|
||||
"\n\n" +
|
||||
spoiler +
|
||||
"\n\n***Issue found:***\n" +
|
||||
issueFound
|
||||
);
|
||||
}
|
||||
}, 3000);
|
||||
// console.log(path + " change")
|
||||
});
|
||||
"#" +
|
||||
log.line +
|
||||
")\n";
|
||||
});
|
||||
|
||||
watcher.on("error", (error) => {
|
||||
console.error(`Watcher error: ${error}`);
|
||||
});
|
||||
// await fileList.slice(0,40)
|
||||
// ?.filter((i) => fileList_old.includes(i) === false)
|
||||
// ?.forEach((fileName) => {
|
||||
// //path file
|
||||
// const filePath = `${folderPath}/${fileName}`;
|
||||
let icon =
|
||||
important.filter((i) => content.includes(i)).length > 0
|
||||
? "------------\n\n:no_entry: :no_entry:**" +
|
||||
fileName +
|
||||
"**:no_entry: :no_entry:"
|
||||
: "------------\n\n:warning: :warning: **" + fileName + "**";
|
||||
sendMessToZulip(
|
||||
"stream",
|
||||
Env.get("ZULIP_STREAM_ALERT"),
|
||||
Env.get("ZULIP_TOPIC_ALERT"),
|
||||
icon +
|
||||
"\n\n" +
|
||||
content +
|
||||
"\n\n" +
|
||||
spoiler +
|
||||
"\n\n***Issue found:***\n" +
|
||||
issueFound
|
||||
);
|
||||
}
|
||||
}, 3000);
|
||||
});
|
||||
|
||||
// });
|
||||
watcher.on("error", (error) => {
|
||||
console.error(`Watcher error: ${error}`);
|
||||
});
|
||||
// await fileList.slice(0,40)
|
||||
// ?.filter((i) => fileList_old.includes(i) === false)
|
||||
// ?.forEach((fileName) => {
|
||||
// //path file
|
||||
// const filePath = `${folderPath}/${fileName}`;
|
||||
|
||||
// });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
|
|
|
|||
Loading…
Reference in New Issue