446 lines
16 KiB
TypeScript
446 lines
16 KiB
TypeScript
import Env from "@ioc:Adonis/Core/Env";
|
|
import KeyValue from "App/Models/KeyValue";
|
|
import LogDetectFile from "App/Models/LogDetectFile";
|
|
import LogReport from "App/Models/LogReport";
|
|
import chokidar from "chokidar";
|
|
import fs from "fs";
|
|
import { DateTime } from "luxon";
|
|
import moment from "moment";
|
|
import { checkIndexSN } from "./checkIndexSN";
|
|
import { getListLineByItem } from "./getListLineByItem";
|
|
import { sendMessToZulip } from "./sendMessToZulip";
|
|
export async function runtimeCheckLogs(folderPath) {
|
|
try {
|
|
let cacheFile: any = [];
|
|
let fileList = Array();
|
|
let fileList_old = Array();
|
|
// Function to update the list of files
|
|
async function updateFileList() {
|
|
//get date now
|
|
let dateNow = moment(Date.now()).format("YYYY/MM/DD");
|
|
//Get list file in folder
|
|
fileList = fs.readdirSync(folderPath);
|
|
//Get date modified
|
|
fileList.map((file) => {
|
|
const filePath = `${folderPath}/${file}`;
|
|
if (file?.split(".")[filePath.split(".").length - 1] === "log") {
|
|
const stats = fs.statSync(filePath);
|
|
|
|
//scan file today
|
|
if (dateNow !== moment(stats.mtime).format("YYYY/MM/DD")) {
|
|
fileList = fileList.filter((i) => i !== file);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
const checkSpecialVersion = (paragraph: string): string => {
|
|
try {
|
|
const patterns = [
|
|
/\(CAT[1-9]K.*Version 16\.9\.[2-9]/,
|
|
/\(CAT[1-9]K.*Version 1[7-9]\.[0-9]\.[0-9]/,
|
|
/\(CAT[1-9]K.*Version [2-9][0-9]\.[0-9]\.[0-9]/,
|
|
];
|
|
for (const regex of patterns) {
|
|
const match = paragraph.match(regex);
|
|
if (match) return match[0];
|
|
}
|
|
return "";
|
|
} catch {
|
|
return "";
|
|
}
|
|
};
|
|
// Watch the folder for new files
|
|
const folderWatcher = chokidar.watch(folderPath);
|
|
|
|
folderWatcher.on("add", async (filePath) => {
|
|
//import log new file
|
|
//check format file name
|
|
if (
|
|
filePath?.split(".")[filePath.split(".").length - 1] === "log" &&
|
|
filePath.split("/")[filePath.split("/").length - 1]?.split("-")[0] ===
|
|
//localhost
|
|
// filePath.split("\\")[filePath.split("\\").length - 1]?.split("-")[0] ===
|
|
moment(Date.now()).format("YYYYMMDD").toString()
|
|
) {
|
|
//add information file to database
|
|
console.log("New file added: ", filePath);
|
|
await LogDetectFile.firstOrCreate(
|
|
{ file_name: filePath.split("/")[filePath.split("/").length - 1] },
|
|
{ file_name: filePath.split("/")[filePath.split("/").length - 1] }
|
|
);
|
|
|
|
//localhost
|
|
// await LogDetectFile.firstOrCreate(
|
|
// { file_name: filePath.split("\\")[filePath.split("\\").length - 1] },
|
|
// { file_name: filePath.split("\\")[filePath.split("\\").length - 1] }
|
|
// );
|
|
|
|
fileList_old = fileList;
|
|
updateFileList();
|
|
watchFilesInList();
|
|
}
|
|
// updateFile;
|
|
});
|
|
|
|
// Watch for changes in the files listed
|
|
async function watchFilesInList() {
|
|
try {
|
|
//only check new file ---> fileList - fileList_old = new file
|
|
let listFileWatch = fileList
|
|
?.filter(
|
|
(i) => fileList_old.includes(i) === false && i.includes(".log")
|
|
)
|
|
.map((file) => folderPath + "/" + file);
|
|
const watcher = chokidar.watch(listFileWatch, {
|
|
persistent: true,
|
|
usePolling: true,
|
|
interval: 300000,
|
|
});
|
|
|
|
watcher.setMaxListeners(200);
|
|
|
|
watcher.on("change", async (path) => {
|
|
// fs.watchFile(filePath,{ interval: 15000 },
|
|
// async (eventType) => {
|
|
//check special item, extra RAM, error in log
|
|
const fileName = path.split("/")[path.split("/").length - 1];
|
|
// const fileName = path.split("\\")[path.split("\\").length - 1];
|
|
const filePath = path;
|
|
let lines: any = [];
|
|
const today = DateTime.now().toFormat("yyyy-MM-dd");
|
|
let allFile = await LogDetectFile.query().whereRaw(
|
|
`DATE(created_at) = ?`,
|
|
[today]
|
|
);
|
|
// let allReport = await LogReport.all();
|
|
let allValue = await KeyValue.all();
|
|
const allReport = await LogReport.query().whereRaw(
|
|
`DATE(created_at) = ?`,
|
|
[today]
|
|
);
|
|
|
|
cacheFile = cacheFile.concat(
|
|
allFile.filter(
|
|
(obj2) => !cacheFile.some((obj1) => obj1.id_ldf === obj2.id_ldf)
|
|
)
|
|
);
|
|
|
|
//get information file
|
|
let fileDetect = allFile?.filter(
|
|
(i) => i.$original.file_name === fileName
|
|
)[0];
|
|
|
|
let logsDetect = allReport?.filter(
|
|
(i) => i.$original.id_file === fileDetect?.id_ldf
|
|
);
|
|
//get the last line detected
|
|
let lastLine = Math.max(...logsDetect.map((obj) => obj.line));
|
|
|
|
//get content file in local
|
|
let contentFile = await fs
|
|
.readFileSync(filePath)
|
|
.toString()
|
|
?.split("\n");
|
|
|
|
//get index SN and send to ERP
|
|
|
|
checkIndexSN(contentFile, lastLine, fileName);
|
|
|
|
//get list item to check
|
|
let listKeyValues = allValue.filter(
|
|
(i) =>
|
|
i.$original.key === "MODEL_SPECIAL" ||
|
|
i.$original.key === "CATCH_FAULTY"
|
|
);
|
|
|
|
//get list exclude error
|
|
let listExcludeErr = allValue
|
|
.filter((i) => i.$original.key === "EXCLUDE_ERR")
|
|
.map((obj) => obj.$original.value);
|
|
|
|
//get list item special
|
|
let listExtraItem = allValue
|
|
.filter((i) => i.$original.key === "MODEL_SPECIAL")
|
|
.map((obj) => obj.$original.value);
|
|
|
|
//Process file content
|
|
if (contentFile.length > 50000) {
|
|
for (let i = 0; i < contentFile.length; i += 1000) {
|
|
const chunk = contentFile.slice(i, i + 1000);
|
|
|
|
chunk.map(async (line, index) => {
|
|
//check line the line with errors and exclude errors
|
|
listKeyValues
|
|
.map((obj) => obj.$original.value)
|
|
.map(async (value) => {
|
|
if (
|
|
line.includes(value) &&
|
|
listExcludeErr.filter((err) => line.includes(err))
|
|
.length === 0
|
|
) {
|
|
let log = allFile?.filter(
|
|
(i) => i.$original.file_name === fileName
|
|
)[0];
|
|
|
|
let checkLog = allReport?.filter(
|
|
(report) =>
|
|
report.$original.id_file === log?.id_ldf &&
|
|
report.$original.line === index + 1 &&
|
|
report.$original.detected_content === value
|
|
);
|
|
|
|
if (!log?.id_ldf) {
|
|
let logBackup = cacheFile.filter(
|
|
(i) => i.file_name === fileName
|
|
)[0];
|
|
if (logBackup !== undefined) {
|
|
let checkLog2 = allReport?.filter(
|
|
(report) =>
|
|
report.$original.id_file === logBackup?.id_ldf &&
|
|
report.$original.line === index + 1 &&
|
|
report.$original.detected_content === value
|
|
);
|
|
if (checkLog2?.length === 0) {
|
|
await LogReport.create({
|
|
detected_content: value,
|
|
line: index + 1,
|
|
id_file: log?.id_ldf,
|
|
});
|
|
lines.push(index + 1);
|
|
}
|
|
} else {
|
|
console.log("ERROR CHECK ", fileName);
|
|
}
|
|
}
|
|
if (checkLog?.length === 0) {
|
|
await LogReport.create({
|
|
detected_content: value,
|
|
line: index + 1,
|
|
id_file: log?.id_ldf,
|
|
});
|
|
lines.push(index + 1);
|
|
}
|
|
}
|
|
});
|
|
|
|
if (
|
|
checkSpecialVersion(line) !== "" &&
|
|
listExcludeErr.filter((err) => line.includes(err)).length ===
|
|
0
|
|
) {
|
|
let checkVersion = checkSpecialVersion(line);
|
|
let log = allFile?.filter(
|
|
(i) => i.$original.file_name === fileName
|
|
)[0];
|
|
if (!log?.id_ldf) {
|
|
console.log("ERROR CHECK ", fileName);
|
|
}
|
|
let checkLog = allReport?.filter(
|
|
(report) =>
|
|
report.$original.id_file === log?.id_ldf &&
|
|
report.$original.line === index + 1 &&
|
|
report.$original.detected_content === checkVersion
|
|
);
|
|
|
|
if (checkLog?.length === 0) {
|
|
await LogReport.create({
|
|
detected_content: checkVersion,
|
|
line: index + 1,
|
|
id_file: log?.id_ldf,
|
|
});
|
|
lines.push(index + 1);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
} else {
|
|
contentFile.map(async (line, index) => {
|
|
//check line the line with errors and exclude errors
|
|
listKeyValues
|
|
.map((obj) => obj.$original.value)
|
|
.map(async (value) => {
|
|
if (
|
|
line.includes(value) &&
|
|
listExcludeErr.filter((err) => line.includes(err))
|
|
.length === 0
|
|
) {
|
|
let log = allFile?.filter(
|
|
(i) => i.$original.file_name === fileName
|
|
)[0];
|
|
if (!log?.id_ldf) {
|
|
console.log("ERROR CHECK ", fileName);
|
|
}
|
|
let checkLog = allReport?.filter(
|
|
(report) =>
|
|
report.$original.id_file === log?.id_ldf &&
|
|
report.$original.line === index + 1 &&
|
|
report.$original.detected_content === value
|
|
);
|
|
|
|
if (checkLog?.length === 0) {
|
|
await LogReport.create({
|
|
detected_content: value,
|
|
line: index + 1,
|
|
id_file: log?.id_ldf,
|
|
});
|
|
lines.push(index + 1);
|
|
}
|
|
}
|
|
|
|
// if(checkSpecialVersion())
|
|
});
|
|
|
|
if (
|
|
checkSpecialVersion(line) !== "" &&
|
|
listExcludeErr.filter((err) => line.includes(err)).length === 0
|
|
) {
|
|
let checkVersion = checkSpecialVersion(line);
|
|
let log = allFile?.filter(
|
|
(i) => i.$original.file_name === fileName
|
|
)[0];
|
|
if (!log?.id_ldf) {
|
|
console.log("ERROR CHECK ", fileName);
|
|
}
|
|
let checkLog = allReport?.filter(
|
|
(report) =>
|
|
report.$original.id_file === log?.id_ldf &&
|
|
report.$original.line === index + 1 &&
|
|
report.$original.detected_content === checkVersion
|
|
);
|
|
|
|
if (checkLog?.length === 0) {
|
|
await LogReport.create({
|
|
detected_content: checkVersion,
|
|
line: index + 1,
|
|
id_file: log?.id_ldf,
|
|
});
|
|
lines.push(index + 1);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
//true: import log to log_report table, send report to Zulip
|
|
setTimeout(async () => {
|
|
if (lines.length === 0) {
|
|
console.log(
|
|
fileName + "has changed(" + contentFile.length + ") ---Good"
|
|
);
|
|
} else {
|
|
console.log(
|
|
fileName +
|
|
"has changed(" +
|
|
contentFile.length +
|
|
") ---SOS---" +
|
|
lines.length
|
|
);
|
|
let allReport_new = await LogReport.query().whereRaw(
|
|
`DATE(created_at) = ?`,
|
|
[today]
|
|
);
|
|
|
|
let fileDetect = allFile?.filter(
|
|
(i) => i.$original.file_name === fileName
|
|
)[0];
|
|
|
|
let logsDetect = allReport_new?.filter(
|
|
(i) => i.$original.id_file === fileDetect?.id_ldf
|
|
);
|
|
|
|
//Get all report newest
|
|
let listReport = await getListLineByItem(
|
|
logsDetect
|
|
.map((obj) => obj.$original)
|
|
.filter((l) => l.line > lastLine)
|
|
);
|
|
|
|
let content =
|
|
"| |Last updated at | Item/error | Line | Report \n|---|:---:|:---|:---|:-----------:\n";
|
|
let spoiler = "";
|
|
let issueFound = "";
|
|
let important = [
|
|
"Vxx",
|
|
"V00",
|
|
"(CAT3K_CAA-UNIVERSALK9-M), Version",
|
|
];
|
|
listReport.map((log, index) => {
|
|
let item = listExtraItem.includes(log.detected_content)
|
|
? ":medal: **" + log.detected_content + "**"
|
|
: ":small_orange_diamond: " + log.detected_content;
|
|
|
|
log.line?.map((line) => {
|
|
issueFound =
|
|
issueFound +
|
|
"\n`" +
|
|
line +
|
|
"` " +
|
|
contentFile[line - 1]?.replace(
|
|
log.detected_content,
|
|
"[" +
|
|
log.detected_content +
|
|
"](https://logs.danielvu.com/logs/" +
|
|
fileName +
|
|
"#" +
|
|
line +
|
|
")"
|
|
);
|
|
});
|
|
content =
|
|
content +
|
|
"|" +
|
|
(index + 1) +
|
|
"|**" +
|
|
moment(Date.now()).format("HH:mm - DD/MM") +
|
|
"**|" +
|
|
item +
|
|
"|" +
|
|
log.line +
|
|
"|[View](https://logs.danielvu.com/logs/" +
|
|
fileName +
|
|
"#" +
|
|
log.line +
|
|
")\n";
|
|
});
|
|
|
|
let icon = "";
|
|
if (
|
|
checkSpecialVersion(content) !== "" ||
|
|
important.filter((i) => content.includes(i)).length > 0
|
|
) {
|
|
icon +=
|
|
"------------\n\n:no_entry: :no_entry:**" +
|
|
fileName +
|
|
"**:no_entry: :no_entry:";
|
|
} else {
|
|
icon +=
|
|
"------------\n\n:warning: :warning: **" + fileName + "**";
|
|
}
|
|
sendMessToZulip(
|
|
"stream",
|
|
Env.get("ZULIP_STREAM_ALERT"),
|
|
Env.get("ZULIP_TOPIC_ALERT"),
|
|
icon +
|
|
"\n\n" +
|
|
content +
|
|
"\n\n" +
|
|
spoiler +
|
|
"\n\n***Issue found:***\n" +
|
|
issueFound
|
|
);
|
|
}
|
|
}, 3000);
|
|
});
|
|
|
|
watcher.on("error", (error) => {
|
|
console.error(`Watcher error: ${error}`);
|
|
});
|
|
} catch (error) {
|
|
console.log(error);
|
|
}
|
|
}
|
|
} catch (error) {
|
|
console.log(error);
|
|
}
|
|
}
|