Optimize code

This commit is contained in:
nguentrungthat 2025-04-08 16:49:06 +07:00
parent 34bf3142f1
commit ca3efce708
14 changed files with 399 additions and 504 deletions

View File

@ -15,3 +15,5 @@ REDIS_CONNECTION=local
REDIS_HOST=127.0.0.1
REDIS_PORT=6379
REDIS_PASSWORD=
BASE_URL_LOG="http://172.16.5.7:8080"
FOLDER_LOGS="/logs"

1
.gitignore vendored
View File

@ -8,3 +8,4 @@ app/utils/indexSN.txt
app/store
tmp
.npm
logs

View File

@ -0,0 +1,9 @@
# LOG SERVICE
### 1. Copy .env.example -> .env
### 2. npm install
### 3. node ace migration:run
### 4. npm run dev

View File

@ -1,6 +1,11 @@
import fs from "fs";
import type { HttpContextContract } from "@ioc:Adonis/Core/HttpContext";
import axios from "axios";
import Env from "@ioc:Adonis/Core/Env";
const path = require("path");
const BASE_URL = Env.get("BASE_URL_LOG");
const BASE_URL_AUTO = Env.get(`${BASE_URL}/AUTOlog/`);
export default class ErpsController {
/**
@ -17,10 +22,8 @@ export default class ErpsController {
try {
// console.log("check!")
const listLog: string[] = [];
const response = await axios.get("http://172.16.5.7:8080/");
const responseAUTO = await axios.get(
"http://172.16.5.7:8080/AUTOlog/"
);
const response = await axios.get(BASE_URL);
const responseAUTO = await axios.get(BASE_URL_AUTO);
let data1 = response.data
.split("\n")
@ -49,8 +52,8 @@ export default class ErpsController {
.slice(u.search("<a ") + 9, u.search("</a>"))
.split(">")[1]
.includes("AUTO")
? "http://172.16.5.7:8080/AUTOlog/"
: "http://172.16.5.7:8080/") +
? BASE_URL_AUTO
: BASE_URL + "/") +
u.slice(u.search("<a ") + 9, u.search("</a>")).split(">")[1] +
" "
);
@ -63,235 +66,156 @@ export default class ErpsController {
}
};
const fetchWithRetry = async (url) => {
let retries = 0;
const MAX_RETRIES = 10;
while (retries < MAX_RETRIES) {
try {
const response = await axios.get(url);
return response.data;
} catch (error) {
if (error.code) {
retries++;
continue;
} else {
console.error("Error fetching file:", error);
return null;
}
}
}
return null;
};
const extractInfoFromLine = (line, index, fName) => {
const cleanedLine = line.replace(/\r/g, "");
const results: any = [];
const getWarehouse = (fileName) =>
/(US(?!A)|-US|\.US|US-)/.test(fileName) ? "US" : "AU";
const extractSN = (str) =>
str?.replace(/[!@#$%^&*()_+{}\[\]:;<>,.?~\\/]/g, "").trim();
// PID & SN
if (
cleanedLine.includes("PID:") &&
cleanedLine.includes("SN:") &&
!cleanedLine.includes("%")
) {
const parts = cleanedLine.split(",");
const SN = extractSN(
parts.find((p) => p.includes("SN:"))?.split(":")[1] ?? ""
);
const PID = extractSN(
parts.find((p) => p.includes("PID:"))?.split(":")[1] ?? ""
);
const VID = extractSN(
parts.find((p) => p.includes("VID:"))?.split(":")[1] ?? ""
);
if (SN && SN !== "N/A" && SN.length > 4) {
results.push({
PID,
VID,
SN,
line: [index + 1],
fileName: fName,
warehouse: getWarehouse(fName),
});
}
}
// Serial Number
if (cleanedLine.includes("Serial Number")) {
const PCB_SN = extractSN(cleanedLine.split(":")[1]);
if (PCB_SN) {
results.push({
PID: "",
VID: "",
SN: PCB_SN,
line: [index + 1],
fileName: fName,
warehouse: getWarehouse(fName),
});
}
}
// Processor board ID
if (cleanedLine.includes("Processor board ID")) {
const PBID = extractSN(cleanedLine.split(" ").pop());
if (PBID?.length >= 8) {
results.push({
PID: "",
VID: "",
SN: PBID,
line: [index + 1],
fileName: fName,
warehouse: getWarehouse(fName),
});
}
}
return results;
};
const mergeLines = (output, lineResult) => {
lineResult.forEach((entry) => {
const existing = output.find((o) => o.SN === entry.SN);
if (existing) {
existing.line = [...new Set([...existing.line, ...entry.line])];
if (entry.PID) existing.PID = entry.PID;
if (entry.VID) existing.VID = entry.VID;
} else {
output.push(entry);
}
});
};
const fetchFiles = async (from, to) => {
try {
const urls = await getListLog(from, to);
let report = [];
if (!urls || urls.length === 0) {
console.log("No logs found");
return [];
}
const fileContents = await Promise.all(
urls.map(async (url) => {
const maxRetries = 10;
let retries = 0;
while (retries < maxRetries) {
try {
const response = await axios.get(url?.split(" ")[0]);
return response.data;
} catch (error) {
if (error.code !== "") {
//=== "ETIMEDOUT" || error.code === "ECONNRESET"
// console.log("Connection timed out. Retrying...");
retries++;
} else {
console.error("Error fetching file:", error);
return;
}
}
}
})
urls.map((u) => fetchWithRetry(u?.split(" ")[0]))
);
// Handle the file contents
let report: any = [];
fileContents.forEach((content, index) => {
console.log(`Content of file ${index + 1}:`);
const arrayLine = content?.split("\n");
let output = [];
if (arrayLine !== undefined) {
for (let i = 0; i < arrayLine.length; i++) {
let SN = arrayLine[i]
?.split("SN:")[1]
?.trim()
.replace(/[!@#$%^&*()_+{}\[\]:;<>,.?~\\/]/g, "");
if (
arrayLine[i].search("PID:") !== -1 &&
arrayLine[i].search("SN:") !== -1 &&
arrayLine[i].search("%") === -1 &&
arrayLine[i]
?.split(",")[2]
?.split(":")[1]
?.replace("\r", "")
.trim() !== "" &&
SN !== "N/A" && SN.length>4
) {
if (output.some((u) => u.SN === SN)) {
output.map((u, index) => {
if (u.SN === SN) {
output[index].PID =
arrayLine[i]?.split("VID:")[0] !== undefined
? arrayLine[i]
?.split("VID:")[0]
?.slice(
arrayLine[i]?.split("VID:")[0]?.search("PID")
)
?.split(":")[1]
?.split(",")[0]
?.trim()
: "";
(output[index].VID =
arrayLine[i]?.split("SN:")[0] !== undefined
? arrayLine[i]
?.split("SN:")[0]
?.split("VID:")[1]
?.split(",")[0]
?.trim()
: ""),
(output[index].line = output[index].line.concat([
i + 1,
]));
}
});
} else {
let fName = urls[index]
?.split("/")
[urls[index]?.split("/")?.length - 1]?.trim();
output.push({
PID:
arrayLine[i]?.split("VID:")[0] !== undefined
? arrayLine[i]
?.split("VID:")[0]
?.slice(
arrayLine[i]?.split("VID:")[0]?.search("PID")
)
?.split(":")[1]
?.split(",")[0]
?.trim()
: "",
VID:
arrayLine[i]?.split("SN:")[0] !== undefined
? arrayLine[i]
?.split("SN:")[0]
?.split("VID:")[1]
?.split(",")[0]
?.trim()
: "",
SN:
arrayLine[i].split("SN:")[1] !== undefined
? SN.search(" ") !== -1
? SN?.split(" ")[0]
: SN
: "",
line: [i + 1],
fileName: fName,
warehouse:
(fName.search("-US") !== -1 ||
fName.search(".US") !== -1 ||
fName.search("US-") !== -1) &&
fName.search("AUS") === -1
? "US"
: "AU",
});
}
}
const lines = content?.split("\n");
if (!lines) return;
if (arrayLine[i].search("Serial Number") !== -1) {
let PCB_SN = arrayLine[i]
?.split("Serial Number")[1]
.split(":")[1]
?.replace("\r", "")
.trim()
.replace(/[!@#$%^&*()_+{}\[\]:;<>,.?~\\/]/g, "");
if (
//Neu SN da nam trong output
output.some((u) => u.SN === PCB_SN)
) {
output.map((u, index) => {
if (u.SN === PCB_SN) {
output[index].line = output[index].line.concat([i + 1]);
}
});
} else {
// if (
// /^[A-Z0-9-]{5,}$/.test(
// PCB_SN
// )
// ) {
let fName = urls[index]
?.split("/")
[urls[index]?.split("/")?.length - 1]?.trim();
output.push({
PID: "",
VID: "",
SN:
PCB_SN?.search(" ") !== -1
? PCB_SN?.split(" ")[0]
: PCB_SN,
line: [i + 1],
fileName: fName,
warehouse:
(fName.search("-US") !== -1 ||
fName.search(".US") !== -1 ||
fName.search("US-") !== -1) &&
fName.search("AUS") === -1
? "US"
: "AU",
});
// }
}
}
const fName = path.basename(urls[index] ?? "").trim();
const output = [];
if (arrayLine[i].search("Processor board ID") !== -1) {
let PBID = arrayLine[i]
?.split(" ")
[arrayLine[i]?.split(" ").length - 1]?.replace("\r", "")
.trim()
.replace(/[!@#$%^&*()_+{}\[\]:;<>,.?~\\/]/g, "");
if (
//Neu SN da nam trong output
output.some((u) => u.SN === PBID)
) {
output.map((u, index) => {
if (u.SN === PBID) {
output[index].line = output[index].line.concat([i + 1]);
}
});
} else {
if (PBID?.length >= 8) {
let fName = urls[index]
?.split("/")
[urls[index]?.split("/")?.length - 1]?.trim();
output.push({
PID: "",
VID: "",
SN: PBID?.search(" ") !== -1 ? PBID?.split(" ")[0] : PBID,
line: [i + 1],
fileName: fName,
warehouse:
(fName.search("-US") !== -1 ||
fName.search(".US") !== -1 ||
fName.search("US-") !== -1) &&
fName.search("AUS") === -1
? "US"
: "AU",
});
}
}
}
}
report = report.concat(output);
}
lines.forEach((line, i) => {
const lineResult = extractInfoFromLine(line, i, fName);
mergeLines(output, lineResult);
});
report = [...report, ...output];
});
fs.writeFile(
const finalReport = report.filter(
(i) => i.SN && /^[A-Z0-9-]{5,}$/.test(i.SN) && i.PID
);
fs.writeFileSync(
"./app/utils/indexSN.txt",
JSON.stringify(report)
.replace(/,{/g, "\n,{")
.replace(/\\u0000/g, ""),
function (err) {
if (err) {
return console.error(err);
}
console.log("Write loggg !");
}
JSON.stringify(finalReport, null, 2).replace(/\\u0000/g, ""),
"utf-8"
);
// console.log(report);
return report.filter(
(i) => i.SN !== "" && /^[A-Z0-9-]{5,}$/.test(i.SN) === true && i.PID !==""
);
// }, 15000);
console.log("Write loggg !");
return finalReport;
} catch (error) {
response
.status(500)
.send({ mess: "GET INFORMATION FAIL", error: error });
console.error("GET INFORMATION FAIL", error);
throw error;
}
};
const result = await fetchFiles(from, to);
response.status(200).json(result);
}
@ -309,7 +233,7 @@ export default class ErpsController {
try {
let fName =
fileName.search("AUTO") !== -1 ? "AUTOlog/" + fileName : fileName;
const res = await axios.get("http://172.16.5.7:8080/" + fName);
const res = await axios.get(BASE_URL + "/" + fName);
const arrayLine = res?.data?.split("\n");
if (range >= line) {
@ -323,9 +247,7 @@ export default class ErpsController {
}
} catch (error) {
console.log(error);
response
.status(202)
.send({ mess: "FILE NOT FOUND", error: error });
response.status(202).send({ mess: "FILE NOT FOUND", error: error });
}
}

View File

@ -1,162 +1,142 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext'
import Database from '@ioc:Adonis/Lucid/Database';
import KeyValue from 'App/Models/KeyValue';
import LogDetectFile from 'App/Models/LogDetectFile';
import axios from 'axios';
import type { HttpContextContract } from "@ioc:Adonis/Core/HttpContext";
import Database from "@ioc:Adonis/Lucid/Database";
import KeyValue from "App/Models/KeyValue";
import LogDetectFile from "App/Models/LogDetectFile";
import axios from "axios";
import Env from "@ioc:Adonis/Core/Env";
const BASE_URL = Env.get("BASE_URL_LOG");
// Utility function for version detection
const checkSpecialVersion = (paragraph: string): string => {
try {
const patterns = [
/\(CAT[1-9]K.*Version 16\.9\.[2-9]/,
/\(CAT[1-9]K.*Version 1[7-9]\.[0-9]\.[0-9]/,
/\(CAT[1-9]K.*Version [2-9][0-9]\.[0-9]\.[0-9]/,
];
for (const regex of patterns) {
const match = paragraph.match(regex);
if (match) return match[0];
}
return "";
} catch {
return "";
}
};
export default class LogsController {
public async showLog({ request, response }: HttpContextContract) {
try {
// fghf
const checkSpecialVersion = (paragraph) => {
try {
const regex = /\(CAT[1-9]K.*Version 16\.9\.[2-9]/;
const regex1 =
/\(CAT[1-9]K.*Version 1[7-9]\.[0-9]\.[0-9]/;
const regex2 =
/\(CAT[1-9]K.*Version [2-9][0-9]\.[0-9]\.[0-9]/;
// Use the regular expression to find the match
const match = paragraph.match(regex);
const match1 = paragraph.match(regex1);
const match2 = paragraph.match(regex2);
if (match || match1 || match2) {
if (match) {
return match[0];
}
const fileName = request.params().name;
const fileDetect = await LogDetectFile.findBy("file_name", fileName);
if (match1) {
return match1[0];
}
if (!fileDetect) {
return response.status(203).send("FILE NOT FOUND");
}
if (match2) {
return match2[0];
}
} else {
return "";
}
} catch (error) {
console.log(error);
}
};
let fileDetect = await LogDetectFile.findBy(
"file_name",
request.params().name
);
let logsDetect = await Database.rawQuery(
"select * from log_reports where id_file = " + fileDetect?.id_ldf
const logsDetect = await Database.rawQuery(
"SELECT * FROM log_reports WHERE id_file = ?",
[fileDetect.id_ldf]
);
let modelSpecialDetected = [];
let issueSpecialDetected = [];
let listLine = logsDetect[0]
.map((obj) => obj.line)
.filter((value, index, self) => {
return self.indexOf(value) === index;
})
.sort((a, b) => a - b);
const content = await axios.get(
request.params().name.search("AUTO") !== -1
? "http://172.16.5.7:8080/AUTOlog/" + request.params().name
: "http://172.16.5.7:8080/" + request.params().name
);
let allValue = await KeyValue.all();
let listKeyValues = allValue.filter(
(i) =>
i.$original.key === "MODEL_SPECIAL" ||
i.$original.key === "CATCH_FAULTY"
const lines = [...new Set(logsDetect[0].map((obj) => obj.line))].sort(
(a, b) => a - b
);
let MODEL_SPECIAL = allValue
.filter((i) => i.$original.key === "MODEL_SPECIAL")
.map((obj) => obj.$original.value);
const logUrl = fileName.includes("AUTO")
? `${BASE_URL}/AUTOlog/${fileName}`
: `${BASE_URL}/${fileName}`;
let listExcludeErr = allValue
.filter((i) => i.$original.key === "EXCLUDE_ERR")
.map((obj) => obj.$original.value);
const content = await axios.get(logUrl);
const allKeyValues = await KeyValue.all();
let data = content.data.split("\n");
data.map(async (line, index) => {
data[index] = index + 1 + "|-|" + line;
const keyValueMap = allKeyValues.reduce((acc, { $original }) => {
acc[$original.key] = acc[$original.key] || [];
acc[$original.key].push($original.value);
return acc;
}, {} as Record<string, string[]>);
if (checkSpecialVersion(line) !== "") {
const specialVersion = checkSpecialVersion(line)
data[index] =
data[index].slice(0, data[index].indexOf(specialVersion))
+
const MODEL_SPECIAL = keyValueMap["MODEL_SPECIAL"] || [];
const CATCH_FAULTY = keyValueMap["CATCH_FAULTY"] || [];
const EXCLUDE_ERR = keyValueMap["EXCLUDE_ERR"] || [];
const rawData = content.data.split("\n");
const processedData: string[] = [];
const modelSpecialDetected: string[] = [];
const issueSpecialDetected: string[] = [];
for (let i = 0; i < rawData.length; i++) {
let line = rawData[i];
let numberedLine = `${i + 1}|-|${line}`;
const specialVersion = checkSpecialVersion(line);
if (specialVersion) {
const index = numberedLine.indexOf(specialVersion);
numberedLine =
numberedLine.slice(0, index) +
"|-|" +
specialVersion +
"|-|" +
data[index].slice(data[index].indexOf(specialVersion) + specialVersion.length);
numberedLine.slice(index + specialVersion.length);
}
listKeyValues
.map((obj) => obj.$original.value)
.map(async (value) => {
if (
line.indexOf(value) !== -1 &&
listExcludeErr.filter((err) => line.includes(err)).length === 0
) {
data[index] =
data[index].slice(0, data[index].indexOf(value)) +
"|-|" +
value +
"|-|" +
data[index].slice(data[index].indexOf(value) + value.length);
// }
}
});
});
listLine.map(async (u) => {
if (
listExcludeErr.filter((err) => data[u - 1].includes(err)).length === 0
) {
if (MODEL_SPECIAL.filter((i) => data[u - 1].includes(i)).length > 0) {
modelSpecialDetected.push(data[u - 1]);
} else {
if (checkSpecialVersion(data[u - 1]) !== "") {
modelSpecialDetected.push(data[u - 1]);
} else {
issueSpecialDetected.push(data[u - 1]);
}
for (const value of [...MODEL_SPECIAL, ...CATCH_FAULTY]) {
if (
line.includes(value) &&
!EXCLUDE_ERR.some((err) => line.includes(err))
) {
const index = numberedLine.indexOf(value);
numberedLine =
numberedLine.slice(0, index) +
"|-|" +
value +
"|-|" +
numberedLine.slice(index + value.length);
break;
}
}
});
let modelSpecial =
modelSpecialDetected.length > 0 ? modelSpecialDetected.join("\n") : "";
let issueItem =
issueSpecialDetected.length > 0 ? issueSpecialDetected.join("\n") : "";
processedData.push(numberedLine);
}
for (const lineIndex of lines) {
const line = processedData[lineIndex - 1];
if (EXCLUDE_ERR.some((err) => line.includes(err))) continue;
if (
MODEL_SPECIAL.some((model) => line.includes(model)) ||
checkSpecialVersion(line)
) {
modelSpecialDetected.push(line);
} else {
issueSpecialDetected.push(line);
}
}
response.status(200).send({
modelSpecial: modelSpecial,
issueItem: issueItem,
contentFile: data.join("\n"),
modelSpecial: modelSpecialDetected.join("\n"),
issueItem: issueSpecialDetected.join("\n"),
contentFile: processedData.join("\n"),
});
} catch (error) {
// console.error(error);
response.status(203).send("FILE NOT FOUND");
}
}
public async getAllLogDetect({ request, response }: HttpContextContract) {
public async getAllLogDetect({ response }: HttpContextContract) {
try {
let fileDetect = await LogDetectFile.all();
let listFiles = fileDetect.map((obj) => obj.file_name);
response.status(200).send(listFiles);
} catch (error) {
const files = await LogDetectFile.all();
const fileNames = files.map((file) => file.file_name);
response.status(200).send(fileNames);
} catch {
response.status(203).send("NO FILE");
}
}
public async store({ }: HttpContextContract) { }
public async show({ }: HttpContextContract) { }
public async edit({ }: HttpContextContract) { }
public async update({ }: HttpContextContract) { }
public async destroy({ }: HttpContextContract) { }
public async store({}: HttpContextContract) {}
public async show({}: HttpContextContract) {}
public async edit({}: HttpContextContract) {}
public async update({}: HttpContextContract) {}
public async destroy({}: HttpContextContract) {}
}

View File

@ -23,7 +23,9 @@ export default class UsersController {
const data = await request.validate({ schema: userSchema });
const user = await Users.create(data);
return response.status(201).send({ mess: "REGISTER_SUCCESS" });
return response
.status(201)
.send({ mess: "REGISTER_SUCCESS", data: user });
} else {
return response.status(203).send({ mess: "USERNAME EXIST" });
}

View File

@ -14,7 +14,7 @@ export default class ValuesController {
value: request.all().value,
model: "All",
});
response.status(200).send("ADD VALUE SUCCESS!");
response.status(200).send({ mess: "ADD VALUE SUCCESS!", data: value });
} catch (error) {
response.status(500).send("ADD VALUE FAIL!");
}
@ -29,7 +29,7 @@ export default class ValuesController {
const value = await KeyValue.findOrFail(request.all().id);
value.value = request.all().value;
await value.save();
response.status(200).send("EDIT VALUE SUCCESS!");
response.status(200).send({ mess: "EDIT VALUE SUCCESS!", data: value });
} catch (error) {
response.status(500).send("EDIT VALUE FAIL!");
}

View File

@ -11,7 +11,7 @@ import { getListLineByItem } from "./getListLineByItem";
import { sendMessToZulip } from "./sendMessToZulip";
export async function runtimeCheckLogs(folderPath) {
try {
let cacheFile = []
let cacheFile: any = [];
let fileList = Array();
let fileList_old = Array();
// Function to update the list of files
@ -34,34 +34,20 @@ export async function runtimeCheckLogs(folderPath) {
});
}
const checkSpecialVersion = (paragraph) => {
const checkSpecialVersion = (paragraph: string): string => {
try {
const regex = /\(CAT[1-9]K.*Version 16\.9\.[2-9]/;
const regex1 =
/\(CAT[1-9]K.*Version 1[7-9]\.[0-9]\.[0-9]/;
const regex2 =
/\(CAT[1-9]K.*Version [2-9][0-9]\.[0-9]\.[0-9]/;
// Use the regular expression to find the match
const match = paragraph.match(regex);
const match1 = paragraph.match(regex1);
const match2 = paragraph.match(regex2);
if (match || match1 || match2) {
if (match) {
return match[0];
}
if (match1) {
return match1[0];
}
if (match2) {
return match2[0];
}
} else {
return "";
const patterns = [
/\(CAT[1-9]K.*Version 16\.9\.[2-9]/,
/\(CAT[1-9]K.*Version 1[7-9]\.[0-9]\.[0-9]/,
/\(CAT[1-9]K.*Version [2-9][0-9]\.[0-9]\.[0-9]/,
];
for (const regex of patterns) {
const match = paragraph.match(regex);
if (match) return match[0];
}
} catch (error) {
console.log(error);
return "";
} catch {
return "";
}
};
// Watch the folder for new files
@ -73,9 +59,9 @@ export async function runtimeCheckLogs(folderPath) {
if (
filePath?.split(".")[filePath.split(".").length - 1] === "log" &&
filePath.split("/")[filePath.split("/").length - 1]?.split("-")[0] ===
//localhost
// filePath.split("\\")[filePath.split("\\").length - 1]?.split("-")[0] ===
moment(Date.now()).format("YYYYMMDD").toString()
//localhost
// filePath.split("\\")[filePath.split("\\").length - 1]?.split("-")[0] ===
moment(Date.now()).format("YYYYMMDD").toString()
) {
//add information file to database
console.log("New file added: ", filePath);
@ -102,7 +88,9 @@ export async function runtimeCheckLogs(folderPath) {
try {
//only check new file ---> fileList - fileList_old = new file
let listFileWatch = fileList
?.filter((i) => fileList_old.includes(i) === false && i.includes(".log"))
?.filter(
(i) => fileList_old.includes(i) === false && i.includes(".log")
)
.map((file) => folderPath + "/" + file);
const watcher = chokidar.watch(listFileWatch, {
persistent: true,
@ -119,7 +107,7 @@ export async function runtimeCheckLogs(folderPath) {
const fileName = path.split("/")[path.split("/").length - 1];
// const fileName = path.split("\\")[path.split("\\").length - 1];
const filePath = path;
let lines = [];
let lines: any = [];
const today = DateTime.now().toFormat("yyyy-MM-dd");
let allFile = await LogDetectFile.query().whereRaw(
`DATE(created_at) = ?`,
@ -132,7 +120,11 @@ export async function runtimeCheckLogs(folderPath) {
[today]
);
cacheFile = cacheFile.concat(allFile.filter(obj2 => !cacheFile.some(obj1 => obj1.id_ldf === obj2.id_ldf)));
cacheFile = cacheFile.concat(
allFile.filter(
(obj2) => !cacheFile.some((obj1) => obj1.id_ldf === obj2.id_ldf)
)
);
//get information file
let fileDetect = allFile?.filter(
@ -198,13 +190,10 @@ export async function runtimeCheckLogs(folderPath) {
report.$original.detected_content === value
);
if (
log?.id_ldf === "" ||
log?.id_ldf === null ||
log?.id_ldf === undefined
) {
let logBackup = cacheFile.filter((i) => i.file_name === fileName)[0]
if (!log?.id_ldf) {
let logBackup = cacheFile.filter(
(i) => i.file_name === fileName
)[0];
if (logBackup !== undefined) {
let checkLog2 = allReport?.filter(
(report) =>
@ -221,7 +210,7 @@ export async function runtimeCheckLogs(folderPath) {
lines.push(index + 1);
}
} else {
console.log("ERROR CHECK ", fileName)
console.log("ERROR CHECK ", fileName);
}
}
if (checkLog?.length === 0) {
@ -233,24 +222,19 @@ export async function runtimeCheckLogs(folderPath) {
lines.push(index + 1);
}
}
});
if (
checkSpecialVersion(line) !== "" &&
listExcludeErr.filter((err) => line.includes(err)).length ===
0
0
) {
let checkVersion = checkSpecialVersion(line);
let log = allFile?.filter(
(i) => i.$original.file_name === fileName
)[0];
if (
log?.id_ldf === "" ||
log?.id_ldf === null ||
log?.id_ldf === undefined
) {
console.log("ERROR CHECK ", fileName)
if (!log?.id_ldf) {
console.log("ERROR CHECK ", fileName);
}
let checkLog = allReport?.filter(
(report) =>
@ -276,7 +260,6 @@ export async function runtimeCheckLogs(folderPath) {
listKeyValues
.map((obj) => obj.$original.value)
.map(async (value) => {
if (
line.includes(value) &&
listExcludeErr.filter((err) => line.includes(err))
@ -285,12 +268,8 @@ export async function runtimeCheckLogs(folderPath) {
let log = allFile?.filter(
(i) => i.$original.file_name === fileName
)[0];
if (
log?.id_ldf === "" ||
log?.id_ldf === null ||
log?.id_ldf === undefined
) {
console.log("ERROR CHECK ", fileName)
if (!log?.id_ldf) {
console.log("ERROR CHECK ", fileName);
}
let checkLog = allReport?.filter(
(report) =>
@ -320,12 +299,8 @@ export async function runtimeCheckLogs(folderPath) {
let log = allFile?.filter(
(i) => i.$original.file_name === fileName
)[0];
if (
log?.id_ldf === "" ||
log?.id_ldf === null ||
log?.id_ldf === undefined
) {
console.log("ERROR CHECK ", fileName)
if (!log?.id_ldf) {
console.log("ERROR CHECK ", fileName);
}
let checkLog = allReport?.filter(
(report) =>
@ -355,10 +330,10 @@ export async function runtimeCheckLogs(folderPath) {
} else {
console.log(
fileName +
"has changed(" +
contentFile.length +
") ---SOS---" +
lines.length
"has changed(" +
contentFile.length +
") ---SOS---" +
lines.length
);
let allReport_new = await LogReport.query().whereRaw(
`DATE(created_at) = ?`,
@ -403,12 +378,12 @@ export async function runtimeCheckLogs(folderPath) {
contentFile[line - 1]?.replace(
log.detected_content,
"[" +
log.detected_content +
"](https://logs.danielvu.com/logs/" +
fileName +
"#" +
line +
")"
log.detected_content +
"](https://logs.danielvu.com/logs/" +
fileName +
"#" +
line +
")"
);
});
content =
@ -428,25 +403,30 @@ export async function runtimeCheckLogs(folderPath) {
")\n";
});
let icon = ""
if (checkSpecialVersion(content) !== "" || important.filter((i) => content.includes(i)).length > 0) {
icon += "------------\n\n:no_entry: :no_entry:**" +
let icon = "";
if (
checkSpecialVersion(content) !== "" ||
important.filter((i) => content.includes(i)).length > 0
) {
icon +=
"------------\n\n:no_entry: :no_entry:**" +
fileName +
"**:no_entry: :no_entry:"
"**:no_entry: :no_entry:";
} else {
icon += "------------\n\n:warning: :warning: **" + fileName + "**";
icon +=
"------------\n\n:warning: :warning: **" + fileName + "**";
}
sendMessToZulip(
"stream",
Env.get("ZULIP_STREAM_ALERT"),
Env.get("ZULIP_TOPIC_ALERT"),
icon +
"\n\n" +
content +
"\n\n" +
spoiler +
"\n\n***Issue found:***\n" +
issueFound
"\n\n" +
content +
"\n\n" +
spoiler +
"\n\n***Issue found:***\n" +
issueFound
);
}
}, 3000);

View File

@ -20,8 +20,8 @@ export const sendDeviceInfora = async () => {
.filter((i) => i.$attributes.key === "MEMORY_DEFAULT")
.map((obj) => obj.$attributes.value);
console.log(memDefault)
let listInformation = [];
console.log(memDefault);
let listInformation: any = [];
let dataFile = await LogDetectFile.all();
let html = "";
@ -195,36 +195,38 @@ export const sendDeviceInfora = async () => {
(line) =>
line.includes("bytes of memory") ||
line.includes("bytes of physical memory")
)
const mathches = stringMem[0]?.match(regexSPE)
);
const mathches = stringMem[0]?.match(regexSPE);
if(mathches && stringMem[0]?.includes("CHASSIS")){
console.log(mathches)
memDefaultForPID =
if (mathches && stringMem[0]?.includes("CHASSIS")) {
console.log(mathches);
memDefaultForPID =
memDefault.filter((i) =>
mathches[0].trim().includes(i.split(":")[0])
mathches[0].trim().includes(i.split(":")[0])
)[0] !== undefined
? memDefault.filter((i) =>
mathches[0].trim().includes(i.split(":")[0])
mathches[0].trim().includes(i.split(":")[0])
)[0]
: mathches[0].trim() + ":N/A:N/A";
PID = mathches[0].trim();
SN = showInventoryContent.filter((i)=>i.includes(mathches[0].trim()))[0]?.split("SN:")[1]?.replace(/,/g, "").trim();
VID = showInventoryContent.filter((i)=>i.includes(mathches[0].trim()))[0]?.split("VID:")[1]
PID = mathches[0].trim();
SN = showInventoryContent
.filter((i) => i.includes(mathches[0].trim()))[0]
?.split("SN:")[1]
?.replace(/,/g, "")
.trim();
VID = showInventoryContent
.filter((i) => i.includes(mathches[0].trim()))[0]
?.split("VID:")[1]
?.split("SN:")[0]
?.replace(/,/g, "")
.trim();
}
}
let RAM =
stringMem
.join("<br>")
.match(regexMemory) !== null
stringMem.join("<br>").match(regexMemory) !== null
? (
parseInt(
stringMem
.join("<br>")
.match(regexMemory)[0]
stringMem.join("<br>").match(regexMemory)[0]
) /
1024 /
1024
@ -312,8 +314,10 @@ export const sendDeviceInfora = async () => {
let specialModel = allKeyValue
.filter((i) => i.key === "MODEL_SPECIAL")
.map((obj) => obj.$attributes.value);
let pattern = /[\x00-\x20\x7F]/g;
listInformation = listInformation.filter((i)=>i.PID.match(pattern)===null)
let pattern = /[\x00-\x20\x7F]/g;
listInformation = listInformation.filter(
(i) => i.PID.match(pattern) === null
);
listInformation = listInformation.filter(
(i) =>
i.RAM !== "" ||

View File

@ -9,8 +9,7 @@ export default class extends BaseSchema {
table.string("username", 50).notNullable();
table.string("password", 300).notNullable();
table.timestamp("created_at", { useTz: true });
table.timestamp("updated_at", { useTz: true });
table.timestamps(true, true);
});
}

View File

@ -1,23 +1,22 @@
import BaseSchema from '@ioc:Adonis/Lucid/Schema'
import BaseSchema from "@ioc:Adonis/Lucid/Schema";
export default class extends BaseSchema {
protected tableName = 'key_values'
protected tableName = "key_values";
public async up () {
public async up() {
this.schema.createTable(this.tableName, (table) => {
table.increments('id_key').primary()
table.string("key", 100).notNullable()
table.string("value", 200).notNullable()
table.string("model", 30).notNullable().defaultTo("All")
table.increments("id_key").primary();
table.string("key", 100).notNullable();
table.string("value", 200).notNullable();
table.string("model", 30).notNullable().defaultTo("All");
/**
* Uses timestamptz for PostgreSQL and DATETIME2 for MSSQL
*/
table.timestamp('created_at', { useTz: true })
table.timestamp('updated_at', { useTz: true })
})
table.timestamps(true, true);
});
}
public async down () {
this.schema.dropTable(this.tableName)
public async down() {
this.schema.dropTable(this.tableName);
}
}

View File

@ -1,21 +1,20 @@
import BaseSchema from '@ioc:Adonis/Lucid/Schema'
import BaseSchema from "@ioc:Adonis/Lucid/Schema";
export default class extends BaseSchema {
protected tableName = 'log_detect_files'
protected tableName = "log_detect_files";
public async up () {
public async up() {
this.schema.createTable(this.tableName, (table) => {
table.increments('id_ldf').primary()
table.string("file_name", 100).notNullable()
table.increments("id_ldf").primary();
table.string("file_name", 100).notNullable();
/**
* Uses timestamptz for PostgreSQL and DATETIME2 for MSSQL
*/
table.timestamp('created_at', { useTz: true })
table.timestamp('updated_at', { useTz: true })
})
table.timestamps(true, true);
});
}
public async down () {
this.schema.dropTable(this.tableName)
public async down() {
this.schema.dropTable(this.tableName);
}
}

View File

@ -1,24 +1,23 @@
import BaseSchema from '@ioc:Adonis/Lucid/Schema'
import BaseSchema from "@ioc:Adonis/Lucid/Schema";
export default class extends BaseSchema {
protected tableName = 'log_reports'
protected tableName = "log_reports";
public async up () {
public async up() {
this.schema.createTable(this.tableName, (table) => {
table.increments('id_report').primary()
table.string("detected_content", 200).notNullable()
table.integer("line", 6).notNullable()
table.integer("id_file").notNullable()
table.increments("id_report").primary();
table.string("detected_content", 200).notNullable();
table.integer("line", 6).notNullable();
table.integer("id_file").notNullable();
/**
* Uses timestamptz for PostgreSQL and DATETIME2 for MSSQL
*/
table.timestamp('created_at', { useTz: true })
table.timestamp('updated_at', { useTz: true })
})
table.timestamps(true, true);
});
}
public async down () {
this.schema.dropTable(this.tableName)
public async down() {
this.schema.dropTable(this.tableName);
}
}

View File

@ -1,21 +1,20 @@
import BaseSchema from '@ioc:Adonis/Lucid/Schema'
import BaseSchema from "@ioc:Adonis/Lucid/Schema";
export default class extends BaseSchema {
protected tableName = 'info_devices'
protected tableName = "info_devices";
public async up () {
public async up() {
this.schema.createTable(this.tableName, (table) => {
table.increments('id')
table.increments("id");
/**
* Uses timestamptz for PostgreSQL and DATETIME2 for MSSQL
*/
table.timestamp('created_at', { useTz: true })
table.timestamp('updated_at', { useTz: true })
})
table.timestamps(true, true);
});
}
public async down () {
this.schema.dropTable(this.tableName)
public async down() {
this.schema.dropTable(this.tableName);
}
}