diff --git a/package.json b/package.json index baa0129..d2502e7 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,8 @@ }, "type": "module", "scripts": { - "start": "node src/main.js" + "start": "node src/main.js", + "generate-logs": "node scripts/generate-logs.js --output src/commands/logs.txt --lines 500000" }, "repository": { "type": "git", diff --git a/scripts/generate-logs.js b/scripts/generate-logs.js new file mode 100644 index 0000000..2a3f5df --- /dev/null +++ b/scripts/generate-logs.js @@ -0,0 +1,99 @@ +#!/usr/bin/env node +"use strict"; +import fs from "fs"; +import path from "path"; +import {parseArgs} from "util"; + +const {values} = parseArgs({ + options: { + output: {type: "string"}, + lines: {type: "string", default: "100000"}, + seed: {type: "string", default: "123456"}, + }, +}); + +const output = values.output; +const lines = Number(values.lines); +const seed = Number(values.seed); + +if (!output || !Number.isFinite(lines) || lines <= 0) { + process.stderr.write( + "Usage: node scripts/generate-logs.js --output --lines [--seed ]\n", + ); + process.exit(1); +} + +const levels = ["INFO", "WARN", "ERROR"]; +const services = [ + "user-service", + "order-service", + "payment-service", + "search-service", + "email-service", +]; +const methods = ["GET", "POST", "PUT", "DELETE"]; +const paths = [ + "/api/users", + "/api/users/:id", + "/api/orders", + "/api/orders/:id", + "/api/payments", + "/api/search", + "/api/login", + "/api/logout", + "/api/health", +]; + +let state = seed >>> 0; +const rand = () => { + // LCG: deterministic pseudo-random generator + state = (1664525 * state + 1013904223) >>> 0; + return state / 0xffffffff; +}; + +const pick = (arr) => arr[Math.floor(rand() * arr.length)]; + +const start = Date.parse("2026-01-01T00:00:00.000Z"); +let current = start; + +const outPath = path.resolve(process.cwd(), output); +fs.mkdirSync(path.dirname(outPath), {recursive: true}); + +const stream = fs.createWriteStream(outPath, {encoding: "utf8"}); + +let written = 0; +const writeBatch = () => { + let ok = true; + while (written < lines && ok) { + const dt = Math.floor(rand() * 5000); // up to 5s + current += dt; + const iso = new Date(current).toISOString(); + const level = pick(levels); + const service = pick(services); + const method = pick(methods); + const pathVal = pick(paths); + const statusBase = level === "ERROR" ? 500 : level === "WARN" ? 400 : 200; + const status = statusBase + Math.floor(rand() * 50); + const responseTime = 5 + Math.floor(rand() * 2000); + const line = `${iso} ${level} ${service} ${status} ${responseTime} ${method} ${pathVal}\n`; + ok = stream.write(line); + written += 1; + } + + if (written < lines) { + stream.once("drain", writeBatch); + } else { + stream.end(); + } +}; + +stream.on("finish", () => { + process.stdout.write(`Generated ${lines} lines at ${outPath}\n`); +}); + +stream.on("error", (err) => { + process.stderr.write(`Failed to write logs: ${err.message}\n`); + process.exit(1); +}); + +writeBatch(); diff --git a/src/commands/count.js b/src/commands/count.js new file mode 100644 index 0000000..2286a60 --- /dev/null +++ b/src/commands/count.js @@ -0,0 +1,39 @@ +import fs from "fs"; +import path from "path"; +import readline from "readline"; + +export const countFile = async (inputPath) => { + try { + const resolvedInput = path.resolve(process.cwd(), inputPath); + + if (!fs.existsSync(resolvedInput)) { + console.log("Operation failed: input file does not exist"); + return; + } + + let lines = 0; + let words = 0; + let characters = 0; + + const readStream = fs.createReadStream(resolvedInput, "utf-8"); + + const rl = readline.createInterface({ + input: readStream, + crlfDelay: Infinity, + }); + + rl.on("line", (line) => { + lines += 1; + characters += line.length + 1; + words += line.trim().split(/\s+/).filter(Boolean).length; + }); + + await new Promise((resolve) => rl.on("close", resolve)); + + console.log(`Lines: ${lines}`); + console.log(`Words: ${words}`); + console.log(`Characters: ${characters}`); + } catch (err) { + console.log("Operation failed:", err.message); + } +}; diff --git a/src/commands/csvToJson.js b/src/commands/csvToJson.js new file mode 100644 index 0000000..a9858a2 --- /dev/null +++ b/src/commands/csvToJson.js @@ -0,0 +1,65 @@ +import fs from "fs"; +import path from "path"; +import {Transform} from "stream"; +import {pipeline} from "stream/promises"; + +export const csvToJson = async (inputPath, outputPath) => { + try { + const resolvedInput = path.resolve(process.cwd(), inputPath); + const resolvedOutput = path.resolve(process.cwd(), outputPath); + + if (!fs.existsSync(resolvedInput)) { + console.log("Operation failed"); + return; + } + + fs.closeSync(fs.openSync(resolvedOutput, "a")); + + const readStream = fs.createReadStream(resolvedInput, "utf-8"); + const writeStream = fs.createWriteStream(resolvedOutput); + + let headers = []; + let isFirstLine = true; + let isFirstObject = true; + + const transform = new Transform({ + readableObjectMode: false, + writableObjectMode: false, + + transform(chunk, _, callback) { + const inputLines = chunk.toString().split("\n").filter(Boolean); + let output = ""; + + for (const line of inputLines) { + const values = line.split(","); + + if (isFirstLine) { + headers = values; + isFirstLine = false; + output += "["; + continue; + } + + const outPutEntity = {}; + headers.forEach((h, i) => { + outPutEntity[h.trim()] = values[i]?.trim(); + }); + + if (!isFirstObject) output += ","; + output += JSON.stringify(outPutEntity); + isFirstObject = false; + } + + callback(null, output); + }, + + flush(callback) { + callback(null, "]"); + }, + }); + + await pipeline(readStream, transform, writeStream); + } catch { + console.log("Operation failed"); + } +}; diff --git a/src/commands/data.csv b/src/commands/data.csv new file mode 100644 index 0000000..9a8b8a3 --- /dev/null +++ b/src/commands/data.csv @@ -0,0 +1,5 @@ +name,age,city +Alice,30,New York +Bob,25,London +Bob,32,Paris +Mark,27,Munich \ No newline at end of file diff --git a/src/commands/decrypt.js b/src/commands/decrypt.js new file mode 100644 index 0000000..ab0a04b --- /dev/null +++ b/src/commands/decrypt.js @@ -0,0 +1,65 @@ +import fs from "fs"; +import path from "path"; +import crypto from "crypto"; +import {pipeline} from "stream"; + +const HEADER_SIZE = 28; +const AUTH_TAG_SIZE = 16; +const TOTAL_BUFFER_SIZE = HEADER_SIZE + AUTH_TAG_SIZE; + +export const decrypt = (input, output, password) => { + try { + if (!input || !output || !password) { + console.log("Operation failed: Missing required parameters"); + return; + } + + const inputPath = path.resolve(process.cwd(), input); + const outputPath = path.resolve(process.cwd(), output); + + if (!fs.existsSync(inputPath)) { + console.log("Operation failed: Input file not found"); + return; + } + + const stat = fs.statSync(inputPath); + + if (stat.size < TOTAL_BUFFER_SIZE) { + console.log("Operation failed: File is too small to be valid"); + return; + } + + const fd = fs.openSync(inputPath, "r"); + + const header = Buffer.alloc(HEADER_SIZE); + fs.readSync(fd, header, 0, HEADER_SIZE, 0); + + const salt = header.subarray(0, AUTH_TAG_SIZE); + const iv = header.subarray(AUTH_TAG_SIZE, HEADER_SIZE); + + const authTag = Buffer.alloc(AUTH_TAG_SIZE); + fs.readSync(fd, authTag, 0, AUTH_TAG_SIZE, stat.size - AUTH_TAG_SIZE); + + fs.closeSync(fd); + + const key = crypto.pbkdf2Sync(password, salt, 100000, 32, "sha256"); + + const decipher = crypto.createDecipheriv("aes-256-gcm", key, iv); + decipher.setAuthTag(authTag); + + const inputStream = fs.createReadStream(inputPath, { + start: HEADER_SIZE, + end: stat.size - AUTH_TAG_SIZE - 1, + }); + + const outputStream = fs.createWriteStream(outputPath); + + pipeline(inputStream, decipher, outputStream, (err) => { + if (err) { + console.log("Operation failed"); + } + }); + } catch { + console.log("Operation failed"); + } +}; diff --git a/src/commands/encrypt.js b/src/commands/encrypt.js new file mode 100644 index 0000000..43679ee --- /dev/null +++ b/src/commands/encrypt.js @@ -0,0 +1,49 @@ +import fs from "fs"; +import path from "path"; +import crypto from "crypto"; +import {pipeline} from "stream"; + +export const encrypt = (input, output, password) => { + try { + if (!input || !output || !password) { + console.log("Operation failed: Missing required parameters"); + return; + } + + const inputPath = path.resolve(process.cwd(), input); + const outputPath = path.resolve(process.cwd(), output); + + if (!fs.existsSync(inputPath)) { + console.log("Operation failed: Input file not found"); + return; + } + + fs.closeSync(fs.openSync(outputPath, "a")); + + const salt = crypto.randomBytes(16); + const iv = crypto.randomBytes(12); + const key = crypto.pbkdf2Sync(password, salt, 100000, 32, "sha256"); + const cipher = crypto.createCipheriv("aes-256-gcm", key, iv); + + const inputStream = fs.createReadStream(inputPath); + const outputStream = fs.createWriteStream(outputPath); + + outputStream.write(Buffer.concat([salt, iv])); + + pipeline(inputStream, cipher, outputStream, (err) => { + if (err) { + console.log("Operation failed"); + return; + } + + const authTag = cipher.getAuthTag(); + fs.appendFile(outputPath, authTag, (err) => { + if (err) { + console.log("Operation failed"); + } + }); + }); + } catch { + console.log("Operation failed: An error occurred during encryption"); + } +}; diff --git a/src/commands/file.txt b/src/commands/file.txt new file mode 100644 index 0000000..9313ed5 --- /dev/null +++ b/src/commands/file.txt @@ -0,0 +1,18 @@ +Random text and words: +apple +banana +cloud +river +mountain +sky +computer +keyboard +mouse +window +door +light +shadow +book +pen +paper +pineapple diff --git a/src/commands/hash.js b/src/commands/hash.js new file mode 100644 index 0000000..d37ce0d --- /dev/null +++ b/src/commands/hash.js @@ -0,0 +1,50 @@ +import fs from "fs"; +import path from "path"; +import crypto from "crypto"; + +const SUPPORTED_ALGORITHMS = ["sha256", "md5", "sha512"]; + +export const hashFile = ({input, algorithm = "sha256", save = false}) => { + if (!input) { + console.log("Operation failed: Missing input file"); + return; + } + + if (!SUPPORTED_ALGORITHMS.includes(algorithm)) { + console.log("Operation failed: Unsupported algorithm"); + return; + } + + const resolvedPath = path.resolve(process.cwd(), input); + + if (!fs.existsSync(resolvedPath)) { + console.log("Operation failed: Input file does not exist"); + return; + } + + try { + const hash = crypto.createHash(algorithm); + const stream = fs.createReadStream(resolvedPath); + + stream.on("data", (chunk) => hash.update(chunk)); + + stream.on("end", () => { + const digest = hash.digest("hex"); + console.log(`${algorithm}: ${digest}`); + console.log(save, 1111); + if (save) { + const outputFile = `${resolvedPath}.${algorithm}`; + try { + fs.writeFileSync(outputFile, digest); + console.log(`Saved hash to ${outputFile}`); + } catch { + console.log("Operation failed"); + } + } + }); + + stream.on("error", () => console.log("Operation failed")); + } catch { + console.log("Operation failed"); + } +}; diff --git a/src/commands/hashCompare.js b/src/commands/hashCompare.js new file mode 100644 index 0000000..38f7dcc --- /dev/null +++ b/src/commands/hashCompare.js @@ -0,0 +1,50 @@ +import fs from "fs"; +import path from "path"; +import crypto from "crypto"; + +const SUPPORTED_ALGORITHMS = ["sha256", "md5", "sha512"]; + +export const hashCompare = (input, hashProvided, algorithm = "sha256") => { + try { + if (!input || !hashProvided) { + console.log("Operation failed"); + return; + } + + if (!SUPPORTED_ALGORITHMS.includes(algorithm)) { + console.log("Operation failed"); + return; + } + + const inputPath = path.resolve(process.cwd(), input); + const hashPath = path.resolve(process.cwd(), hashProvided); + + if (!fs.existsSync(inputPath) || !fs.existsSync(hashPath)) { + console.log("Operation failed"); + return; + } + + const expectedHash = fs.readFileSync(hashPath, "utf8").trim().toLowerCase(); + console.log(expectedHash, 1111); + const hash = crypto.createHash(algorithm); + const stream = fs.createReadStream(inputPath); + + stream.on("data", (chunk) => hash.update(chunk)); + + stream.on("end", () => { + const calculatedHash = hash.digest("hex").toLowerCase(); + + if (calculatedHash === expectedHash) { + console.log("OK"); + } else { + console.log("MISMATCH"); + } + }); + + stream.on("error", () => { + console.log("Operation failed"); + }); + } catch { + console.log("Operation failed"); + } +}; diff --git a/src/commands/jsonToCsv.js b/src/commands/jsonToCsv.js new file mode 100644 index 0000000..6bedc55 --- /dev/null +++ b/src/commands/jsonToCsv.js @@ -0,0 +1,60 @@ +import fs from "fs"; +import path from "path"; +import {Transform} from "stream"; +import {pipeline} from "stream/promises"; + +export const jsonToCsv = async (inputPath, outputPath) => { + try { + const resolvedInput = path.resolve(process.cwd(), inputPath); + const resolvedOutput = path.resolve(process.cwd(), outputPath); + + if (!fs.existsSync(resolvedInput)) { + console.log("Operation failed: input file does not exist"); + return; + } + + fs.closeSync(fs.openSync(resolvedOutput, "a")); + + const readStream = fs.createReadStream(resolvedInput, "utf-8"); + const writeStream = fs.createWriteStream(resolvedOutput); + + let headersWritten = false; + let headers = []; + let leftData = ""; + + const transform = new Transform({ + readableObjectMode: false, + writableObjectMode: false, + + transform(chunk, _, callback) { + leftData += chunk.toString(); + + try { + const jsonArray = JSON.parse(leftData); + + if (!headersWritten && jsonArray.length > 0) { + headers = Object.keys(jsonArray[0]); + writeStream.write(headers.join(",") + "\n"); + headersWritten = true; + } + + for (const obj of jsonArray) { + const line = headers.map((header) => obj[header] ?? "").join(","); + writeStream.write(line + "\n"); + } + + leftData = ""; + callback(); + } catch (err) { + callback(); + } + }, + }); + + await pipeline(readStream, transform, writeStream); + + console.log(`CSV saved to ${resolvedOutput}`); + } catch (err) { + console.log("Operation failed:", err.message); + } +}; diff --git a/src/commands/logStats.js b/src/commands/logStats.js new file mode 100644 index 0000000..7319ba2 --- /dev/null +++ b/src/commands/logStats.js @@ -0,0 +1,108 @@ +import fs from "fs"; +import path from "path"; +import os from "os"; +import {Worker} from "worker_threads"; + +export const logStats = (input, output) => { + try { + if (!input || !output) { + console.log("Operation failed: missing --input or --output"); + return; + } + + const inputPath = path.resolve(process.cwd(), input); + const outputPath = path.resolve(process.cwd(), output); + + if (!fs.existsSync(inputPath)) { + console.log("Operation failed: input file does not exist"); + return; + } + + const fileSize = fs.statSync(inputPath).size; + const cpuCores = os.cpus().length; + + const chunkSize = Math.floor(fileSize / cpuCores); + + const workers = []; + const promises = []; + + for (let i = 0; i < cpuCores; i++) { + const start = i * chunkSize; + const end = i === cpuCores - 1 ? fileSize : (i + 1) * chunkSize; + + const worker = new Worker(new URL("../workers/logWorker.js", import.meta.url), { + workerData: { + file: inputPath, + start, + end, + }, + }); + + promises.push( + new Promise((resolve, reject) => { + worker.on("message", resolve); + worker.on("error", reject); + worker.on("exit", (code) => { + if (code !== 0) reject(new Error()); + }); + }), + ); + + workers.push(worker); + } + + Promise.all(promises) + .then((results) => { + const finalStats = mergeStats(results); + + fs.writeFileSync(outputPath, JSON.stringify(finalStats, null, 2)); + }) + .catch(() => { + console.log("Operation failed 4"); + }); + } catch { + console.log("Operation failed 5"); + } +}; + +function mergeStats(results) { + const final = { + total: 0, + levels: {}, + status: {"2xx": 0, "3xx": 0, "4xx": 0, "5xx": 0}, + paths: {}, + responseSum: 0, + }; + + for (const result of results) { + final.total += result.total; + final.responseSum += result.responseSum; + + for (const [level, count] of Object.entries(result.levels)) { + final.levels[level] = (final.levels[level] || 0) + count; + } + + for (const [status, count] of Object.entries(result.status)) { + final.status[status] += count; + } + + for (const [path, count] of Object.entries(result.paths)) { + final.paths[path] = (final.paths[path] || 0) + count; + } + } + + const topPaths = Object.entries(final.paths) + .map(([path, count]) => ({path, count})) + .sort((a, b) => b.count - a.count) + .slice(0, 5); + + return { + total: final.total, + levels: final.levels, + status: final.status, + topPaths, + avgResponseTimeMs: final.total + ? Number((final.responseSum / final.total).toFixed(2)) + : 0, + }; +} diff --git a/src/main.js b/src/main.js new file mode 100644 index 0000000..e49846d --- /dev/null +++ b/src/main.js @@ -0,0 +1,49 @@ +import readline from "readline"; +import os from "os"; +import process from "process"; +import {processCommand} from "./repl.js"; + +process.chdir(os.homedir()); + +console.log("Welcome to Data Processing CLI!"); +console.log(`You are currently in ${process.cwd()}`); + +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + prompt: "> ", +}); + +rl.prompt(); + +rl.on("line", (input) => { + const commandInput = input.trim(); + + if (!commandInput) { + rl.prompt(); + return; + } + + if (commandInput === ".exit") { + exitProgram(); + return; + } + + try { + processCommand(commandInput); + console.log(`You are currently in ${process.cwd()}`); + } catch (err) { + console.log("Operation failed"); + } + + rl.prompt(); +}); + +rl.on("SIGINT", () => { + exitProgram(); +}); + +function exitProgram() { + console.log("Thank you for using Data Processing CLI!"); + process.exit(0); +} diff --git a/src/navigation.js b/src/navigation.js new file mode 100644 index 0000000..54dcd6f --- /dev/null +++ b/src/navigation.js @@ -0,0 +1,70 @@ +import path from "path"; +import fs from "fs"; + +export const up = () => { + try { + const currentDir = process.cwd(); + const parentDir = path.resolve(currentDir, ".."); + + if (parentDir !== currentDir) { + process.chdir(parentDir); + } + + console.log(`You are already in the root directory: ${process.cwd()}`); + } catch { + console.log("Operation failed"); + } +}; + +export const changeDir = (providedPath) => { + try { + if (!providedPath) { + console.log("Operation failed: Missing path"); + return; + } + + const resolvedPath = path.isAbsolute(providedPath) + ? providedPath + : path.resolve(process.cwd(), providedPath); + + if ( + !fs.existsSync(resolvedPath) || + !fs.statSync(resolvedPath).isDirectory() + ) { + console.log("Operation failed: Invalid directory"); + return; + } + + process.chdir(resolvedPath); + console.log(`You are now in: ${process.cwd()}`); + } catch { + console.log("Operation failed"); + } +}; + +export const listDir = () => { + try { + const currentDir = process.cwd(); + + const entries = fs.readdirSync(currentDir, {withFileTypes: true}); + + const formatted = entries.map((entry) => ({ + name: entry.name, + type: entry.isDirectory() ? "folder" : "file", + })); + + formatted.sort((a, b) => { + if (a.type !== b.type) { + return a.type === "folder" ? -1 : 1; + } + + return a.name.localeCompare(b.name); + }); + + formatted.forEach((item) => { + console.log(`${item.name} [${item.type}]`); + }); + } catch { + console.log("Operation failed"); + } +}; diff --git a/src/repl.js b/src/repl.js new file mode 100644 index 0000000..ab66f49 --- /dev/null +++ b/src/repl.js @@ -0,0 +1,60 @@ +import {changeDir, listDir, up} from "./navigation.js"; +import {csvToJson} from "./commands/csvToJson.js"; +import {argParser} from "./utils/argParser.js"; +import {jsonToCsv} from "./commands/jsonToCsv.js"; +import {countFile} from "./commands/count.js"; +import {hashFile} from "./commands/hash.js"; +import {hashCompare} from "./commands/hashCompare.js"; +import {encrypt} from "./commands/encrypt.js"; +import {decrypt} from "./commands/decrypt.js"; +import {logStats} from "./commands/logStats.js"; + +export const processCommand = (commandInput) => { + const [command, ...args] = commandInput.trim().split(/\s+/); + const input = argParser("--input", args); + const output = argParser("--output", args); + + switch (command) { + case "up": + up(); + break; + case "cd": + changeDir(args.join(" ")); + break; + case "ls": + listDir(); + break; + case "csv-to-json": + csvToJson(input, output); + break; + case "json-to-csv": + jsonToCsv(input, output); + break; + case "count": + countFile(input); + break; + case "hash": + hashFile({ + input, + algorithm: argParser("--algorithm", args), + save: args.indexOf("--save") !== -1, + }); + case "hash-compare": + hashCompare( + input, + argParser("--hash", args), + argParser("--algorithm", args), + ); + break; + case "encrypt": + encrypt(input, output, argParser("--password", args)); + case "decrypt": + decrypt(input, output, argParser("--password", args)); + break; + case "log-stats": + logStats(input, output); + break; + default: + console.log("Invalid input."); + } +}; diff --git a/src/utils/argParser.js b/src/utils/argParser.js new file mode 100644 index 0000000..ba7fc73 --- /dev/null +++ b/src/utils/argParser.js @@ -0,0 +1,4 @@ +export const argParser = (argName, args) => { + const argIndex = args.indexOf(argName); + return argIndex !== -1 ? args[argIndex + 1] : undefined; +}; diff --git a/src/workers/logWorker.js b/src/workers/logWorker.js new file mode 100644 index 0000000..a7acb63 --- /dev/null +++ b/src/workers/logWorker.js @@ -0,0 +1,55 @@ +import fs from "fs"; +import {parentPort, workerData} from "worker_threads"; + +const {file, start, end} = workerData; + +const stream = fs.createReadStream(file, { + start, + end, + encoding: "utf8", +}); + +let buffer = ""; + +const stats = { + total: 0, + levels: {}, + status: {"2xx": 0, "3xx": 0, "4xx": 0, "5xx": 0}, + paths: {}, + responseSum: 0, +}; + +stream.on("data", (chunk) => { + buffer += chunk; + + const lines = buffer.split("\n"); + buffer = lines.pop(); + + for (const line of lines) { + if (!line.trim()) continue; + + const parts = line.split(" "); + if (parts.length < 7) continue; + + const level = parts[1]; + const statusCode = parseInt(parts[3]); + const responseTime = parseFloat(parts[4]); + const path = parts[6]; + + stats.total++; + stats.responseSum += responseTime; + + stats.levels[level] = (stats.levels[level] || 0) + 1; + + const statusClass = Math.floor(statusCode / 100) + "xx"; + if (stats.status[statusClass] !== undefined) { + stats.status[statusClass]++; + } + + stats.paths[path] = (stats.paths[path] || 0) + 1; + } +}); + +stream.on("end", () => { + parentPort.postMessage(stats); +});