diff --git a/bin/index.js b/bin/index.js index 6e0d37d9..abbb0644 100755 --- a/bin/index.js +++ b/bin/index.js @@ -124,7 +124,6 @@ prog .command("cache") .option("-l, --list", i18n.getTokenSync("cli.commands.cache.option_list"), false) .option("-c, --clear", i18n.getTokenSync("cli.commands.cache.option_clear"), false) - .option("-f, --full", i18n.getTokenSync("cli.commands.cache.option_full"), false) .describe(i18n.getTokenSync("cli.commands.cache.desc")) .action(commands.cache.main); diff --git a/i18n/english.js b/i18n/english.js index 04d83447..075d94c9 100644 --- a/i18n/english.js +++ b/i18n/english.js @@ -72,9 +72,7 @@ const cli = { missingAction: "No valid action specified. Use --help to see options.", option_list: "List cache files", option_clear: "Clear the cache", - option_full: "Clear or list the full cache, including payloads", cacheTitle: "NodeSecure Cache:", - scannedPayloadsTitle: "Scanned payloads available on disk:", cleared: "Cache cleared successfully!" }, extractIntegrity: { diff --git a/i18n/french.js b/i18n/french.js index 6bd46c7c..ef6339be 100644 --- a/i18n/french.js +++ b/i18n/french.js @@ -72,9 +72,7 @@ const cli = { missingAction: "Aucune action valide spécifiée. Utilisez --help pour voir les options.", option_list: "Lister les fichiers du cache", option_clear: "Nettoyer le cache", - option_full: "Nettoyer ou lister le cache complet, y compris les payloads", cacheTitle: "Cache NodeSecure:", - scannedPayloadsTitle: "Payloads scannés disponibles sur le disque:", cleared: "Cache nettoyé avec succès !" }, extractIntegrity: { diff --git a/public/components/package-navigation/package-navigation.js b/public/components/package-navigation/package-navigation.js new file mode 100644 index 00000000..a6ac733c --- /dev/null +++ b/public/components/package-navigation/package-navigation.js @@ -0,0 +1,260 @@ +// Import Third-party Dependencies +import { LitElement, html, css, nothing } from "lit"; +import { repeat } from "lit/directives/repeat.js"; + +// Import Internal Dependencies +import * as utils from "../../common/utils.js"; +import "../icon/icon.js"; + +/** + * @typedef {Object} PackageMetadata + * @property {string} spec - Package spec (e.g. "package@1.0.0") + * @property {string} scanType - Type of scan ("cwd" for local) + * @property {string} locationOnDisk - Path to the package on disk + * @property {number} lastUsedAt - Timestamp of last usage + * @property {string | null} integrity - Package integrity hash + */ + +class PackageNavigation extends LitElement { + static styles = css` + b,p { + margin: 0; + padding: 0; + border: 0; + font: inherit; + font-size: 100%; + } + + :host { + z-index: 30; + display: flex; + justify-content: center; + align-items: center; + height: 30px; + left: 50px; + padding-left: 20px; + max-width: calc(100vw - 70px); + box-sizing: border-box; + background: var(--primary); + box-shadow: 2px 1px 10px #26107f7a; + } + + :host-context(body.dark) { + background: var(--dark-theme-primary-color); + } + + .packages { + height: 30px; + display: flex; + background: var(--primary); + } + + .packages > .package { + height: 30px; + font-family: mononoki; + display: flex; + align-items: center; + background: linear-gradient(to right, rgb(55 34 175 / 100%) 0%, rgb(87 74 173 / 100%) 50%, rgb(59 110 205) 100%); + padding: 0 10px; + border-right: 2px solid #0f041a; + text-shadow: 1px 1px 10px #000; + color: #def7ff; + } + + :host-context(body.dark) .packages > .package { + background: linear-gradient(to right, rgb(11 3 31 / 100%) 0%, rgb(11 3 31 / 80%) 50%, rgb(11 3 31 / 60%) 100%); + } + + .packages > .package > * { + transform: skewX(20deg); + } + + .packages > .package:first-child { + padding-left: 10px; + } + + .packages > .package:not(.active):hover { + background: linear-gradient(to right, rgb(55 34 175 / 100%) 1%, rgb(68 121 218) 100%); + color: #defff9; + cursor: pointer; + } + + :host-context(body.dark) .packages > .package:not(.active):hover { + background: linear-gradient(to right, rgb(11 3 31 / 70%) 1%, rgb(11 3 31 / 50%) 100%); + } + + .packages > .package.active { + background: linear-gradient(to right, rgb(55 34 175 / 100%) 0%, rgb(87 74 173 / 100%) 50%, rgb(59 110 205) 100%); + } + + .packages > .package.active > b { + background: var(--secondary); + } + + .packages > .package.active > .remove { + display: block; + } + + .packages > .package > b:last-of-type:not(:first-of-type) { + background: #f57c00; + } + + .packages > .package > b { + font-weight: bold; + font-size: 12px; + margin-left: 5px; + background: var(--secondary-darker); + padding: 3px 5px; + border-radius: 2px; + font-family: Roboto; + letter-spacing: 1px; + } + + .add { + height: 30px; + font-size: 20px; + border: none; + background: var(--secondary-darker); + cursor: pointer; + padding: 0 7px; + transition: 0.2s all ease; + color: #def7ff; + } + + .add:hover { + background: var(--secondary); + cursor: pointer; + } + + .add > i { + transform: skewX(20deg); + } + + button.remove { + display: none; + border: none; + position: relative; + cursor: pointer; + color: #fff5dc; + background: #ff3434e2; + margin-left: 10px; + border-radius: 50%; + line-height: 16px; + text-shadow: 1px 1px 10px #000; + font-weight: bold; + width: 20px; + } + + button.remove:hover { + cursor: pointer; + background: #ff5353e2; + } + `; + + static properties = { + /** + * Array of package metadata objects + * @type {PackageMetadata[]} + */ + metadata: { type: Array }, + /** + * Currently active package spec + * @type {string} + */ + activePackage: { type: String } + }; + + constructor() { + super(); + /** @type {PackageMetadata[]} */ + this.metadata = []; + /** @type {string} */ + this.activePackage = ""; + } + + /** + * Check if there are at least 2 packages + * @returns {boolean} + */ + get #hasAtLeast2Packages() { + return this.metadata.length > 1; + } + + /** + * Handle click on a package to select it + * @param {string} spec + */ + #handlePackageClick(spec) { + if (this.activePackage !== spec) { + window.socket.commands.search(spec); + } + } + + /** + * Handle click on remove button + * @param {Event} event + * @param {string} packageName + */ + #handleRemoveClick(event, packageName) { + event.stopPropagation(); + window.socket.commands.remove(packageName); + } + + #handleAddClick() { + window.navigation.setNavByName("search--view"); + } + + /** + * Render a single package element + * @param {PackageMetadata} param0 + * @returns {import("lit").TemplateResult} + */ + #renderPackage({ spec, scanType }) { + const isLocal = scanType === "cwd"; + const { name, version } = utils.parseNpmSpec(spec); + const isActive = spec === this.activePackage; + + return html` +
+

${name}

+ v${version} + ${isLocal ? html`local` : nothing} + ${this.#hasAtLeast2Packages + ? html` + + ` + : nothing} +
+ `; + } + + render() { + if (this.metadata.length === 0) { + return nothing; + } + + return html` +
+ ${repeat( + this.metadata, + (pkg) => pkg.spec, + (pkg) => this.#renderPackage(pkg) + )} +
+ + `; + } +} + +customElements.define("package-navigation", PackageNavigation); diff --git a/public/components/searchbar/searchbar.css b/public/components/searchbar/searchbar.css index 25317c07..69b68d2d 100644 --- a/public/components/searchbar/searchbar.css +++ b/public/components/searchbar/searchbar.css @@ -227,111 +227,3 @@ body.dark #search-nav { padding: 0 10px; font-family: Roboto; } - -#search-nav .packages { - height: inherit; - display: flex; - max-width: calc(100vw - 70px - 264px); - background: var(--primary); -} - -#search-nav .packages>.package { - height: inherit; - font-family: mononoki; - display: flex; - align-items: center; - background: linear-gradient(to right, rgb(55 34 175 / 100%) 0%,rgb(87 74 173 / 100%) 50%,rgb(59 110 205) 100%); - padding: 0 10px; - border-right: 2px solid #0f041a; - text-shadow: 1px 1px 10px #000; - color: #def7ff; -} - -body.dark #search-nav .packages>.package { - background: linear-gradient(to right, rgb(11 3 31 / 100%) 0%, rgb(11 3 31 / 80%) 50%, rgb(11 3 31 / 60%) 100%); -} - -#search-nav .packages>.package>* { - transform: skewX(20deg); -} - -#search-nav .packages>.package:first-child { - padding-left: 10px; -} - -#search-nav .packages>.package:not(.active):hover { - background: linear-gradient(to right, rgb(55 34 175 / 100%) 1%, rgb(68 121 218) 100%); - color: #defff9; - cursor: pointer; -} - -body.dark #search-nav .packages>.package:not(.active):hover { - background: linear-gradient(to right, rgb(11 3 31 / 70%) 1%, rgb(11 3 31 / 50%) 100%); -} - -#search-nav .packages>.package.active { - background: linear-gradient(to right, rgb(55 34 175 / 100%) 0%,rgb(87 74 173 / 100%) 50%,rgb(59 110 205) 100%); -} - -#search-nav .packages>.package.active>b { - background: var(--secondary); -} - -#search-nav .packages>.package.active>.remove { - display: block; -} - -#search-nav .packages>.package>b:last-of-type:not(:first-of-type) { - background: #f57c00; -} - -#search-nav .packages>.package>b{ - font-weight: bold; - font-size: 12px; - margin-left: 5px; - background: var(--secondary-darker); - padding: 3px 5px; - border-radius: 2px; - font-family: Roboto; - letter-spacing: 1px; -} - -#search-nav .add { - height: inherit; - font-size: 20px; - border: none; - background: var(--secondary-darker); - cursor: pointer; - padding: 0 7px; - transition: 0.2s all ease; - color: #def7ff; -} - -#search-nav .add:hover { - background: var(--secondary); - cursor: pointer; -} - -#search-nav .add>i { - transform: skewX(20deg); -} - -#search-nav button.remove { - display: none; - border: none; - position: relative; - cursor: pointer; - color: #fff5dc; - background: #ff3434e2; - margin-left: 10px; - border-radius: 50%; - line-height: 16px; - text-shadow: 1px 1px 10px #000; - font-weight: bold; - width: 20px; -} - -#search-nav button.remove:hover { - cursor: pointer; - background: #ff5353e2; -} diff --git a/public/components/views/search/search.css b/public/components/views/search/search.css index b260cc50..2e60b2a9 100644 --- a/public/components/views/search/search.css +++ b/public/components/views/search/search.css @@ -289,7 +289,7 @@ body.dark .description { height: 20px; } -.cache-packages, .recent-packages { +.cache-packages { display: flex; flex-direction: column; align-items: center; @@ -306,20 +306,20 @@ body.dark .description { overflow: auto; } -body.dark .cache-packages, body.dark .recent-packages { +body.dark .cache-packages { color: var(--secondary-darker); } -.cache-packages h1, .recent-packages h1 { +.cache-packages h1 { font-family: mononoki; color: #546884; } -body.dark .cache-packages h1, body.dark .recent-packages h1 { +body.dark .cache-packages h1 { color: white; } -.cache-packages .package-cache-result:has(span:hover), .recent-packages .package-cache-result:has(span:hover) { +.cache-packages .package-cache-result:has(span:hover) { color: var(--secondary-darker); background: #5468842a; cursor: pointer; diff --git a/public/components/views/search/search.html b/public/components/views/search/search.html index 5859489f..c725a3d9 100644 --- a/public/components/views/search/search.html +++ b/public/components/views/search/search.html @@ -13,9 +13,6 @@ > -
-

[[=z.token('search.recentPackages')]]

-

[[=z.token('search.packagesCache')]]

diff --git a/public/components/views/search/search.js b/public/components/views/search/search.js index 9e35cec8..aa4e47c9 100644 --- a/public/components/views/search/search.js +++ b/public/components/views/search/search.js @@ -56,11 +56,7 @@ export class SearchView { this.#initializePackages( ".cache-packages", - window.scannedPackageCache - ); - this.#initializePackages( - ".recent-packages", - window.recentPackageCache + window.cachedSpecs ); } diff --git a/public/core/search-nav.js b/public/core/search-nav.js index 3db36515..7066661e 100644 --- a/public/core/search-nav.js +++ b/public/core/search-nav.js @@ -1,9 +1,15 @@ // Import Internal Dependencies -import { createDOMElement, parseNpmSpec } from "../common/utils"; import { SearchBar } from "../components/searchbar/searchbar"; +import "../components/package-navigation/package-navigation.js"; -export function initSearchNav(data, options) { - const { initFromZero = true, searchOptions = null } = options; +export function initSearchNav( + data, + options +) { + const { + initFromZero = true, + searchOptions = null + } = options; const searchNavElement = document.getElementById("search-nav"); if (!searchNavElement) { @@ -12,9 +18,12 @@ export function initSearchNav(data, options) { if (initFromZero) { searchNavElement.innerHTML = ""; + const element = document.createElement("package-navigation"); searchNavElement.appendChild( - initPackagesNavigation(data) + element ); + element.metadata = data; + element.activePackage = data.length > 0 ? data[0].spec : ""; } if (searchOptions !== null) { @@ -41,92 +50,3 @@ export function initSearchNav(data, options) { window.searchbar = new SearchBar(nsn, secureDataSet.linker); } } - -function initPackagesNavigation(data) { - const fragment = document.createDocumentFragment(); - const packages = data.mru; - - const hasAtLeast2Packages = packages.length > 1; - const hasExactly2Packages = packages.length === 2; - const container = createDOMElement("div", { - classList: ["packages"] - }); - - if (packages.length === 0) { - return fragment; - } - - for (const pkg of packages) { - const { name, version, local } = parseNpmSpec(pkg); - - const childs = [ - createDOMElement("p", { text: name }), - createDOMElement("b", { text: `v${version}` }) - ]; - if (local) { - childs.push(createDOMElement("b", { text: "local" })); - } - const pkgElement = createDOMElement("div", { - classList: ["package"], - childs - }); - pkgElement.dataset.name = pkg; - if (pkg === data.current) { - window.activePackage = pkg; - pkgElement.classList.add("active"); - } - pkgElement.addEventListener("click", () => { - if (window.activePackage !== pkg) { - window.socket.commands.search(pkg); - } - }); - - if (hasAtLeast2Packages && pkg !== data.root) { - pkgElement.appendChild( - renderPackageRemoveButton(pkgElement.dataset.name, { hasExactly2Packages }) - ); - } - - container.appendChild(pkgElement); - } - - const plusButtonElement = createDOMElement("button", { - classList: ["add"], - childs: [ - createDOMElement("p", { text: "+" }) - ] - }); - plusButtonElement.addEventListener("click", () => { - window.navigation.setNavByName("search--view"); - }); - - fragment.append(container, plusButtonElement); - - return fragment; -} - -function renderPackageRemoveButton(packageName, options) { - const { - hasExactly2Packages - } = options; - - // we allow to remove a package when at least 2 packages are present - const removeButton = createDOMElement("button", { - classList: ["remove"], - text: "x" - }); - - removeButton.addEventListener("click", (event) => { - event.stopPropagation(); - window.socket.commands.remove(packageName); - - if (hasExactly2Packages) { - document - .getElementById("search-nav") - .querySelectorAll(".package") - .forEach((element) => element.querySelector(".remove")?.remove()); - } - }, { once: true }); - - return removeButton; -} diff --git a/public/main.js b/public/main.js index 6ad77cc1..adc5fdf4 100644 --- a/public/main.js +++ b/public/main.js @@ -26,8 +26,7 @@ let searchview; let packageInfoOpened = false; document.addEventListener("DOMContentLoaded", async() => { - window.scannedPackageCache = []; - window.recentPackageCache = []; + window.cachedSpecs = []; window.locker = null; window.settings = await new Settings().fetchUserConfig(); window.i18n = await new i18n().fetch(); @@ -74,26 +73,12 @@ async function onSocketInitOrReload(event) { const data = event.detail; const { cache } = data; - window.scannedPackageCache = cache.availables; - window.recentPackageCache = cache.lru; + window.cachedSpecs = cache.map((metadata) => metadata.spec); console.log( - "[INFO] Older packages are loaded!", - window.scannedPackageCache - ); - console.log( - "[INFO] Recent packages are loaded!", - window.recentPackageCache + "[INFO] Cached specs are loaded!", + window.cachedSpecs ); - initSearchNav(cache, { - searchOptions: { - nsn, - secureDataSet - } - }); - searchview.mount(); - searchview.initialize(); - const nsnActivePackage = secureDataSet.linker.get(0); const nsnRootPackage = nsnActivePackage ? `${nsnActivePackage.name}@${nsnActivePackage.version}` : @@ -105,34 +90,26 @@ async function onSocketInitOrReload(event) { ) { // it means we removed the previous active package, which is still active in network, so we need to re-init await init(); - - // FIXME: initSearchNav is called twice, we need to fix this - initSearchNav(cache, { - searchOptions: { - nsn, - secureDataSet - } - }); } + else if (window.cachedSpecs.length === 0) { + await loadDataSet(); + } + + initSearchNav(cache, { + searchOptions: { + nsn, + secureDataSet + } + }); + searchview.mount(); + searchview.initialize(); } async function init(options = {}) { const { navigateToNetworkView = false } = options; - secureDataSet = new NodeSecureDataSet({ - flagsToIgnore: window.settings.config.ignore.flags, - warningsToIgnore: window.settings.config.ignore.warnings, - theme: window.settings.config.theme - }); - await secureDataSet.init(); - - if (secureDataSet.data === null) { - window.navigation.hideMenu("network--view"); - window.navigation.hideMenu("home--view"); - window.navigation.setNavByName("search--view"); - - searchview ??= new SearchView(null, null); - + const datasetLoaded = await loadDataSet(); + if (!datasetLoaded) { return; } @@ -190,6 +167,29 @@ async function init(options = {}) { console.log("[INFO] Node-Secure is ready!"); } +async function loadDataSet() { + const config = window.settings.config; + + secureDataSet = new NodeSecureDataSet({ + flagsToIgnore: config.ignore.flags, + warningsToIgnore: config.ignore.warnings, + theme: config.theme + }); + await secureDataSet.init(); + + if (secureDataSet.data === null) { + window.navigation.hideMenu("network--view"); + window.navigation.hideMenu("home--view"); + window.navigation.setNavByName("search--view"); + + searchview ??= new SearchView(null, null); + + return false; + } + + return true; +} + async function updateShowInfoMenu(params) { if (params.nodes.length === 0) { window.networkNav.currentNodeParams = null; diff --git a/src/commands/cache.js b/src/commands/cache.js index 62170ea2..4075272a 100644 --- a/src/commands/cache.js +++ b/src/commands/cache.js @@ -1,17 +1,16 @@ // Import Node.js Dependencies import { styleText } from "node:util"; -import { setImmediate } from "node:timers/promises"; // Import Third-party Dependencies import prettyJson from "@topcli/pretty-json"; import * as i18n from "@nodesecure/i18n"; -import { cache, config } from "@nodesecure/server"; +import { PayloadCache } from "@nodesecure/cache"; +import { config } from "@nodesecure/server"; export async function main(options) { const { list, - clear, - full + clear } = options; await i18n.getLocalLang(); @@ -22,40 +21,35 @@ export async function main(options) { } if (list) { - listCache(full); + await listCache(); } if (clear) { - await setImmediate(); - await clearCache(full); + await clearCache(); } + console.log(); } -async function listCache(full) { - const paylodsList = await cache.payloadsList(); - console.log(styleText(["underline"], i18n.getTokenSync("cli.commands.cache.cacheTitle"))); - prettyJson(paylodsList); - - if (full) { - console.log(styleText(["underline"], i18n.getTokenSync("cli.commands.cache.scannedPayloadsTitle"))); - try { - const payloads = cache.availablePayloads(); - prettyJson(payloads); - } - catch { - prettyJson([]); - } - } -} +async function listCache() { + const cache = new PayloadCache(); + await cache.load(); + + const metadata = Array.from(cache); + console.log( + styleText(["underline"], i18n.getTokenSync("cli.commands.cache.cacheTitle")) + ); -async function clearCache(full) { - if (full) { - cache.availablePayloads().forEach((pkg) => { - cache.removePayload(pkg); - }); + for (const data of metadata) { + prettyJson(data); } +} +async function clearCache() { await config.setDefault(); - await cache.initPayloadsList({ logging: false, reset: true }); - console.log(styleText("green", i18n.getTokenSync("cli.commands.cache.cleared"))); + const cache = new PayloadCache(); + await cache.clear(); + + console.log( + styleText("green", i18n.getTokenSync("cli.commands.cache.cleared")) + ); } diff --git a/src/commands/http.js b/src/commands/http.js index 3521a68f..1549a46a 100644 --- a/src/commands/http.js +++ b/src/commands/http.js @@ -1,7 +1,6 @@ // Import Node.js Dependencies import fs from "node:fs"; import path from "node:path"; -import crypto from "node:crypto"; // Import Third-party Dependencies import kleur from "kleur"; @@ -9,7 +8,6 @@ import open from "open"; import * as SemVer from "semver"; import * as i18n from "@nodesecure/i18n"; import { - cache, logger, buildServer, WebSocketServerInstanciator @@ -47,11 +45,8 @@ export async function start( if (runFromPayload) { assertScannerVersion(dataFilePath); } - else { - cache.prefix = crypto.randomBytes(4).toString("hex"); - } - const httpServer = buildServer(dataFilePath, { + const { httpServer, cache } = await buildServer(dataFilePath, { port: httpPort, hotReload: enableDeveloperMode, runFromPayload, diff --git a/src/commands/scanner.js b/src/commands/scanner.js index 8ddc1574..2026fa4d 100644 --- a/src/commands/scanner.js +++ b/src/commands/scanner.js @@ -11,7 +11,6 @@ import { Spinner } from "@topcli/spinner"; import ms from "ms"; import * as i18n from "@nodesecure/i18n"; import * as scanner from "@nodesecure/scanner"; -import { cache } from "@nodesecure/server"; // Import Internal Dependencies import * as http from "./http.js"; @@ -215,8 +214,6 @@ async function logAndWrite( console.log(kleur.white().bold(i18n.getTokenSync("cli.successfully_written_json", kleur.green().bold(filePath)))); console.log(""); - await cache.setRootPayload(payload, { logging: false, local }); - return filePath; } diff --git a/test/commands/cache.test.js b/test/commands/cache.test.js deleted file mode 100644 index 60f47b13..00000000 --- a/test/commands/cache.test.js +++ /dev/null @@ -1,123 +0,0 @@ -import dotenv from "dotenv"; -dotenv.config({ quiet: true }); - -// Import Node.js Dependencies -import fs from "node:fs"; -import path from "node:path"; -import url from "node:url"; -import assert from "node:assert"; -import childProcess from "node:child_process"; -import { after, before, describe, it } from "node:test"; - -// Import Third-party Dependencies -import * as i18n from "@nodesecure/i18n"; -import { cache } from "@nodesecure/server"; -import { DEFAULT_PAYLOAD_PATH } from "@nodesecure/cache"; - -// Import Internal Dependencies -import { arrayFromAsync } from "../helpers/utils.js"; -import { main } from "../../src/commands/cache.js"; - -// CONSTANTS -const __dirname = path.dirname(url.fileURLToPath(import.meta.url)); - -describe("Cache command", { concurrency: 1 }, () => { - let lang; - let actualCache; - let dummyPayload = null; - - before(async() => { - if (fs.existsSync(DEFAULT_PAYLOAD_PATH) === false) { - dummyPayload = { - rootDependency: { - name: "test_runner", - version: "1.0.0", - integrity: null - }, - dependencies: { - test_runner: { - versions: { - "1.0.0": {} - } - } - } - }; - fs.writeFileSync(DEFAULT_PAYLOAD_PATH, JSON.stringify(dummyPayload)); - } - await i18n.setLocalLang("english"); - await i18n.extendFromSystemPath( - path.join(__dirname, "..", "..", "i18n") - ); - lang = await i18n.getLocalLang(); - - try { - actualCache = await cache.payloadsList(); - } - catch { - await cache.initPayloadsList({ logging: false }); - actualCache = await cache.payloadsList(); - } - - cache.updatePayload("test-package", { foo: "bar" }); - }); - - after(async() => { - await i18n.setLocalLang(lang); - await i18n.getLocalLang(); - - await cache.updatePayloadsList(actualCache, { logging: false }); - cache.removePayload("test-package"); - - if (dummyPayload !== null) { - fs.rmSync(DEFAULT_PAYLOAD_PATH); - } - }); - - it("should list the cache", async() => { - const cp = childProcess.spawn("node", [ - ".", - "cache", - "-l" - ]); - const stdout = await arrayFromAsync(cp.stdout); - const inlinedStdout = stdout.join(""); - assert.ok(inlinedStdout.includes(i18n.getTokenSync("cli.commands.cache.cacheTitle"))); - assert.strictEqual(inlinedStdout.includes(i18n.getTokenSync("cli.commands.cache.scannedPayloadsTitle")), false); - }); - - it("should list the cache and scanned payloads on disk", async() => { - const cp = childProcess.spawn("node", [ - ".", - "cache", - "-lf" - ]); - const stdout = await arrayFromAsync(cp.stdout); - const inlinedStdout = stdout.join(""); - assert.ok(inlinedStdout.includes(i18n.getTokenSync("cli.commands.cache.cacheTitle"))); - assert.ok(inlinedStdout.includes(i18n.getTokenSync("cli.commands.cache.scannedPayloadsTitle"))); - }); - - it("should clear the cache", async(ctx) => { - let rmSyncCalled = false; - ctx.mock.method(fs, "rmSync", () => { - rmSyncCalled = true; - }); - await main({ - clear: true, - full: false - }); - assert.strictEqual(rmSyncCalled, false, "should not have removed payloads on disk without --full option"); - }); - - it("should clear the cache and payloads on disk", async(ctx) => { - let rmSyncCalled = false; - ctx.mock.method(fs, "rmSync", () => { - rmSyncCalled = true; - }); - await main({ - clear: true, - full: true - }); - assert.ok(rmSyncCalled, "should have removed payloads on disk with --full option"); - }); -}); diff --git a/workspaces/cache/package.json b/workspaces/cache/package.json index 67f2062b..50e2095c 100644 --- a/workspaces/cache/package.json +++ b/workspaces/cache/package.json @@ -21,9 +21,8 @@ "@types/cacache": "^19.0.0" }, "dependencies": { - "@nodesecure/flags": "3.0.3", - "@nodesecure/js-x-ray": "10.2.1", "@nodesecure/scanner": "8.2.0", - "cacache": "20.0.3" + "cacache": "20.0.3", + "filenamify": "7.0.1" } } diff --git a/workspaces/cache/src/AppCache.ts b/workspaces/cache/src/AppCache.ts deleted file mode 100644 index 3610f0ab..00000000 --- a/workspaces/cache/src/AppCache.ts +++ /dev/null @@ -1,240 +0,0 @@ -// Import Node.js Dependencies -import os from "node:os"; -import path from "node:path"; -import fs from "node:fs"; - -// Import Third-party Dependencies -import cacache from "cacache"; -import type { Payload } from "@nodesecure/scanner"; - -// Import Internal Dependencies -import { type AbstractLogger, createNoopLogger } from "./abstract-logging.ts"; - -// CONSTANTS -const kPayloadsCache = "___payloads"; -const kPayloadsPath = path.join(os.homedir(), ".nsecure", "payloads"); -const kMaxPayloads = 3; -const kSlashReplaceToken = "______"; - -export const CACHE_PATH = path.join(os.tmpdir(), "nsecure-cli"); -export const DEFAULT_PAYLOAD_PATH = path.join(process.cwd(), "nsecure-result.json"); - -export interface PayloadsList { - mru: string[]; - lru: string[]; - current: string; - availables: string[]; - lastUsed: Record; - root: string | null; -} - -export interface LoggingOption { - logging?: boolean; -} - -export interface InitPayloadListOptions extends LoggingOption { - reset?: boolean; -} - -export interface SetRootPayloadOptions extends LoggingOption { - local?: boolean; -} - -export class AppCache { - #logger: AbstractLogger; - - prefix = ""; - startFromZero = false; - - constructor( - logger: AbstractLogger = createNoopLogger() - ) { - this.#logger = logger; - fs.mkdirSync(kPayloadsPath, { recursive: true }); - } - - updatePayload(packageName: string, payload: Payload) { - if (packageName.includes(kSlashReplaceToken)) { - throw new Error(`Invalid package name: ${packageName}`); - } - - const filePath = path.join(kPayloadsPath, packageName.replaceAll("/", kSlashReplaceToken)); - fs.writeFileSync(filePath, JSON.stringify(payload)); - } - - getPayload(packageName: string): Payload { - const filePath = path.join(kPayloadsPath, packageName.replaceAll("/", kSlashReplaceToken)); - - try { - return JSON.parse(fs.readFileSync(filePath, "utf-8")); - } - catch (err) { - this.#logger.error(`[cache|get](pkg: ${packageName}|cache: not found)`); - - throw err; - } - } - - availablePayloads() { - return fs - .readdirSync(kPayloadsPath) - .map((filename) => filename.replaceAll(kSlashReplaceToken, "/")); - } - - getPayloadOrNull(packageName: string): Payload | null { - try { - return this.getPayload(packageName); - } - catch { - return null; - } - } - - async updatePayloadsList(payloadsList: PayloadsList) { - await cacache.put(CACHE_PATH, `${this.prefix}${kPayloadsCache}`, JSON.stringify(payloadsList)); - } - - async payloadsList(): Promise { - try { - const { data } = await cacache.get(CACHE_PATH, `${this.prefix}${kPayloadsCache}`); - - return JSON.parse(data.toString()); - } - catch (err) { - this.#logger.error("[cache|get](cache: not found)"); - - throw err; - } - } - - async #initDefaultPayloadsList(options: LoggingOption = {}) { - const { logging = true } = options; - - if (this.startFromZero) { - const payloadsList = { - mru: [], - lru: [], - current: null, - availables: [], - lastUsed: {}, - root: null - }; - - if (logging) { - this.#logger.info("[cache|init](startFromZero)"); - } - await cacache.put(CACHE_PATH, `${this.prefix}${kPayloadsCache}`, JSON.stringify(payloadsList)); - - return; - } - - const payload = JSON.parse(fs.readFileSync(DEFAULT_PAYLOAD_PATH, "utf-8")); - const { name, version } = payload.rootDependency; - - const spec = `${name}@${version}`; - const payloadsList = { - mru: [spec], - lru: [], - current: spec, - availables: [], - lastUsed: { - [spec]: Date.now() - }, - root: spec - }; - - if (logging) { - this.#logger.info(`[cache|init](dep: ${spec})`); - } - await cacache.put(CACHE_PATH, `${this.prefix}${kPayloadsCache}`, JSON.stringify(payloadsList)); - this.updatePayload(spec, payload); - } - - async initPayloadsList(options: InitPayloadListOptions = {}) { - const { - logging = true, - reset = false - } = options; - - if (reset) { - await cacache.rm.all(CACHE_PATH); - } - - try { - // prevent re-initialization of the cache - await cacache.get(CACHE_PATH, `${this.prefix}${kPayloadsCache}`); - - return; - } - catch { - // Do nothing. - } - const packagesInFolder = this.availablePayloads(); - if (packagesInFolder.length === 0) { - await this.#initDefaultPayloadsList({ logging }); - - return; - } - - if (logging) { - this.#logger.info(`[cache|init](packagesInFolder: ${packagesInFolder})`); - } - - await cacache.put(CACHE_PATH, `${this.prefix}${kPayloadsCache}`, JSON.stringify({ - availables: packagesInFolder, - current: null, - mru: [], - lru: [] - })); - } - - removePayload(packageName: string) { - const filePath = path.join(kPayloadsPath, packageName.replaceAll("/", kSlashReplaceToken)); - fs.rmSync(filePath, { force: true }); - } - - async removeLastMRU(): Promise { - const { mru, lastUsed, lru, ...cache } = await this.payloadsList(); - if (mru.length < kMaxPayloads) { - return { - ...cache, - mru, - lru, - lastUsed - }; - } - const packageToBeRemoved = Object.keys(lastUsed) - .filter((key) => mru.includes(key)) - .sort((a, b) => lastUsed[a] - lastUsed[b])[0]; - - return { - ...cache, - mru: mru.filter((pkg) => pkg !== packageToBeRemoved), - lru: [...lru, packageToBeRemoved], - lastUsed - }; - } - - async setRootPayload(payload: Payload, options: SetRootPayloadOptions = {}) { - const { logging = true, local = false } = options; - - const { name, version } = payload.rootDependency; - - const pkg = `${name}@${version}${local ? "#local" : ""}`; - this.updatePayload(pkg, payload); - - await this.initPayloadsList({ logging }); - - const { mru, lru, availables, lastUsed } = await this.removeLastMRU(); - - const updatedPayloadsCache = { - mru: [...new Set([...mru, pkg])], - lru, - availables, - lastUsed: { ...lastUsed, [pkg]: Date.now() }, - current: pkg, - root: pkg - }; - await this.updatePayloadsList(updatedPayloadsCache); - } -} diff --git a/workspaces/cache/src/PayloadCache.ts b/workspaces/cache/src/PayloadCache.ts new file mode 100644 index 00000000..b804af59 --- /dev/null +++ b/workspaces/cache/src/PayloadCache.ts @@ -0,0 +1,328 @@ +// Import Node.js Dependencies +import os from "node:os"; +import path from "node:path"; +import fs from "node:fs/promises"; + +// Import Third-party Dependencies +import type { Payload } from "@nodesecure/scanner"; +import filenamify from "filenamify"; + +// Import Internal Dependencies +import { + type BasePersistanceProvider, + FilePersistanceProvider +} from "./FilePersistanceProvider.ts"; + +// CONSTANTS +export type PayloadStorageMap = Map; + +export interface PayloadManifest { + current: string | null; + specs: string[]; +} + +export interface PayloadMetadata { + spec: string; + scanType: "cwd" | "from"; + locationOnDisk: string; + lastUsedAt: number; + integrity: string | null; +} + +export interface PayloadSaveOptions { + /** + * @default false + */ + useAsCurrent?: boolean; + /** + * @default "cwd" + */ + scanType?: "cwd" | "from"; +} + +export interface PayloadCacheOptions { + fsProvider?: typeof fs; + storageProvider?: (spec: string) => BasePersistanceProvider; +} + +export class PayloadCache { + static PATH = path.join(os.homedir(), ".nsecure", "payloads"); + + static getPathBySpec( + spec: string + ): string { + return path.join( + PayloadCache.PATH, + filenamify(spec) + ); + } + + static getPathByPayload( + payload: Payload + ): string { + return PayloadCache.getPathBySpec( + specFromPayload(payload) + ); + } + + #fsProvider: typeof fs; + #manifest: PayloadManifestCache; + #storage = new Map(); + + constructor( + options: PayloadCacheOptions = {} + ) { + this.#fsProvider = options.fsProvider || fs; + this.#manifest = new PayloadManifestCache(options); + } + + setCurrentSpec( + spec: string | null + ): void { + this.#manifest.currentSpec = spec; + if (typeof spec === "string") { + this.updateLastUsedAt(spec); + } + } + + getCurrentSpec(): string | null { + return this.#manifest.currentSpec; + } + + * [Symbol.iterator](): IterableIterator { + yield* [...this.#storage.values()] + .sort((a, b) => b.lastUsedAt - a.lastUsedAt); + } + + async findBySpec( + spec: string + ): Promise { + const filePath = PayloadCache.getPathBySpec(spec); + + try { + return JSON.parse( + await this.#fsProvider.readFile(filePath, "utf-8") + ); + } + catch { + return null; + } + } + + async findByIntegrity( + integrity: string + ): Promise { + for (const [spec, metadata] of this.#storage) { + if (metadata.integrity === integrity) { + return this.findBySpec(spec); + } + } + + return null; + } + + async remove( + specOrPayload: string | Payload + ): Promise { + const spec = typeof specOrPayload === "string" + ? specOrPayload + : specFromPayload(specOrPayload); + this.#storage.delete(spec); + if (this.#storage.size === 0) { + this.setCurrentSpec(null); + } + this.#manifest.lazyPersistOnDisk(this.#storage, { + dirtySpecs: [spec] + }); + + const filePath = typeof specOrPayload === "string" + ? PayloadCache.getPathBySpec(specOrPayload) + : PayloadCache.getPathByPayload(specOrPayload); + + await this.#fsProvider.rm(filePath, { force: true }); + } + + async save( + payload: Payload, + options: PayloadSaveOptions = {} + ): Promise { + const { useAsCurrent = false, scanType = "cwd" } = options; + + const spec = specFromPayload(payload); + if (useAsCurrent) { + this.#manifest.currentSpec = spec; + } + + if (this.#storage.has(spec)) { + this.updateLastUsedAt(spec); + + return; + } + + const filePath = PayloadCache.getPathByPayload(payload); + + this.#storage.set( + spec, + { + spec, + scanType, + locationOnDisk: filePath, + lastUsedAt: Date.now(), + integrity: payload.rootDependency.integrity + } + ); + this.#manifest.lazyPersistOnDisk(this.#storage, { + dirtySpecs: [spec] + }); + + await this.#fsProvider.writeFile( + filePath, + JSON.stringify(payload) + ); + } + + updateLastUsedAt( + spec: string + ): void { + const metadata = this.#storage.get(spec); + if (metadata) { + metadata.lastUsedAt = Date.now(); + this.#manifest.lazyPersistOnDisk( + this.#storage, + { dirtySpecs: [spec] } + ); + } + } + + async load() { + await this.#manifest.initialize(); + this.#storage = await this.#manifest.load(); + + return this; + } + + async clear() { + await this.#fsProvider.rm( + PayloadCache.PATH, + { recursive: true, force: true } + ); + + return this; + } +} + +export class PayloadManifestCache { + #pendingStorage: PayloadStorageMap | null = null; + #pendingPersist = false; + #dirtySpecs = new Set(); + #fsProvider: typeof fs; + #storageProvider: (spec: string) => BasePersistanceProvider; + + currentSpec: string | null = null; + + constructor( + options: PayloadCacheOptions = {} + ) { + const { + fsProvider = fs, + storageProvider = (spec: string) => new FilePersistanceProvider(spec) + } = options; + + this.#fsProvider = fsProvider; + this.#storageProvider = storageProvider; + } + + lazyPersistOnDisk( + storage: PayloadStorageMap, + options: { dirtySpecs?: string[]; } = {} + ) { + const { dirtySpecs = [] } = options; + for (const spec of dirtySpecs) { + this.#dirtySpecs.add(spec); + } + + this.#pendingStorage = storage; + + if (this.#pendingPersist) { + return; + } + + this.#pendingPersist = true; + queueMicrotask(() => { + this.#pendingPersist = false; + if (this.#pendingStorage) { + this.persistPartial( + structuredClone(this.#pendingStorage) + ); + this.#pendingStorage = null; + } + }); + } + + async initialize() { + await this.#fsProvider.mkdir( + PayloadCache.PATH, + { recursive: true } + ); + + return this; + } + + async persistPartial( + storage: PayloadStorageMap + ) { + for (const spec of this.#dirtySpecs) { + const metadata = storage.get(spec); + const fileStorage = this.#storageProvider(spec); + if (metadata) { + await fileStorage.set(metadata); + } + else { + await fileStorage.remove(); + } + } + this.#dirtySpecs.clear(); + + await this.#fsProvider.writeFile( + path.join(PayloadCache.PATH, "manifest.json"), + JSON.stringify({ + current: this.currentSpec, + specs: [...storage.keys()] + }) + ); + + return this; + } + + async load() { + const storage = new Map(); + + try { + const manifestContent = await this.#fsProvider.readFile( + path.join(PayloadCache.PATH, "manifest.json"), + "utf-8" + ); + const manifest: PayloadManifest = JSON.parse(manifestContent); + this.currentSpec = manifest.current; + + for (const spec of manifest.specs) { + const fileStorage = this.#storageProvider(spec); + const metadata = await fileStorage.get(); + if (metadata) { + storage.set(spec, metadata); + } + } + } + catch { + // No manifest found, skip loading. + } + + return storage; + } +} + +function specFromPayload( + payload: Payload +): string { + return `${payload.rootDependency.name}@${payload.rootDependency.version}`; +} diff --git a/workspaces/cache/src/abstract-logging.ts b/workspaces/cache/src/abstract-logging.ts deleted file mode 100644 index 5fb7f9da..00000000 --- a/workspaces/cache/src/abstract-logging.ts +++ /dev/null @@ -1,25 +0,0 @@ -function noop() { - // VOID -} - -export interface AbstractLogger { - fatal: (...args: any[]) => void; - error: (...args: any[]) => void; - warn: (...args: any[]) => void; - info: (...args: any[]) => void; - debug: (...args: any[]) => void; - trace: (...args: any[]) => void; -} - -export function createNoopLogger(): AbstractLogger { - const logger: AbstractLogger = { - fatal: noop, - error: noop, - warn: noop, - info: noop, - debug: noop, - trace: noop - }; - - return logger; -} diff --git a/workspaces/cache/src/index.ts b/workspaces/cache/src/index.ts index 90d28c3f..2d5a1c93 100644 --- a/workspaces/cache/src/index.ts +++ b/workspaces/cache/src/index.ts @@ -1,2 +1,2 @@ -export * from "./AppCache.ts"; export * from "./FilePersistanceProvider.ts"; +export * from "./PayloadCache.ts"; diff --git a/workspaces/cache/test/PayloadCache.test.ts b/workspaces/cache/test/PayloadCache.test.ts new file mode 100644 index 00000000..8e0c9102 --- /dev/null +++ b/workspaces/cache/test/PayloadCache.test.ts @@ -0,0 +1,771 @@ +// Import Node.js Dependencies +import assert from "node:assert/strict"; +import { describe, it, beforeEach, mock } from "node:test"; +import os from "node:os"; +import path from "node:path"; +import type fs from "node:fs/promises"; + +// Import Third-party Dependencies +import type { Payload } from "@nodesecure/scanner"; + +// Import Internal Dependencies +import { + PayloadCache, + PayloadManifestCache, + type PayloadMetadata, + type PayloadManifest, + type BasePersistanceProvider +} from "../src/index.ts"; + +// Types +type MockedFs = { + [K in keyof typeof fs]: ReturnType>; +}; + +interface MockedStorageProvider { + get: ReturnType Promise>>; + set: ReturnType Promise>>; + remove: ReturnType Promise>>; +} + +// Helpers +function createMockFs(): MockedFs { + return { + readFile: mock.fn(), + writeFile: mock.fn(), + rm: mock.fn(), + mkdir: mock.fn() + } as MockedFs; +} + +function createMockStorageProvider(): MockedStorageProvider { + return { + get: mock.fn<() => Promise>(), + set: mock.fn<(value: PayloadMetadata) => Promise>(), + remove: mock.fn<() => Promise>() + }; +} + +function createMockPayload( + name: string, + version: string, + integrity: string | null = null +): Payload { + return { + rootDependency: { + name, + version, + integrity + }, + dependencies: new Map(), + scannerVersion: "1.0.0" + } as unknown as Payload; +} + +function createMockMetadata( + spec: string, + options: Partial = {} +): PayloadMetadata { + return { + spec, + scanType: "cwd", + locationOnDisk: PayloadCache.getPathBySpec(spec), + lastUsedAt: Date.now(), + integrity: null, + ...options + }; +} + +describe("PayloadCache", () => { + describe("static PATH", () => { + it("should be in os.homedir()/.nsecure/payloads", () => { + assert.equal( + PayloadCache.PATH, + path.join(os.homedir(), ".nsecure", "payloads") + ); + }); + }); + + describe("static getPathBySpec", () => { + it("should return correct path for a spec", () => { + const spec = "express@4.18.2"; + const result = PayloadCache.getPathBySpec(spec); + + assert.equal( + result, + path.join(PayloadCache.PATH, "express@4.18.2") + ); + }); + + it("should sanitize invalid filename characters", () => { + const spec = "my-package@1.0.0"; + const result = PayloadCache.getPathBySpec(spec); + + assert.ok(result.startsWith(PayloadCache.PATH)); + }); + }); + + describe("static getPathByPayload", () => { + it("should return correct path from payload", () => { + const payload = createMockPayload("lodash", "4.17.21"); + const result = PayloadCache.getPathByPayload(payload); + + assert.equal( + result, + path.join(PayloadCache.PATH, "lodash@4.17.21") + ); + }); + }); + + describe("getCurrentSpec / setCurrentSpec", () => { + it("should set and get current spec", () => { + const mockFs = createMockFs(); + const cache = new PayloadCache({ fsProvider: mockFs as unknown as typeof fs }); + + assert.equal(cache.getCurrentSpec(), null); + + cache.setCurrentSpec("express@4.18.2"); + assert.equal(cache.getCurrentSpec(), "express@4.18.2"); + + cache.setCurrentSpec(null); + assert.equal(cache.getCurrentSpec(), null); + }); + }); + + describe("[Symbol.iterator]", () => { + let mockFs: MockedFs; + let mockStorageProvider: MockedStorageProvider; + let storageProviderFactory: (spec: string) => BasePersistanceProvider; + + beforeEach(() => { + mockFs = createMockFs(); + mockStorageProvider = createMockStorageProvider(); + storageProviderFactory = () => mockStorageProvider as unknown as BasePersistanceProvider; + }); + + it("should iterate over storage values sorted by lastUsedAt descending", async() => { + const manifest: PayloadManifest = { + current: null, + specs: ["old@1.0.0", "new@2.0.0"] + }; + + mockFs.readFile.mock.mockImplementation( + () => Promise.resolve(JSON.stringify(manifest)) + ); + mockFs.mkdir.mock.mockImplementation(() => Promise.resolve(undefined)); + + const metadataOld = createMockMetadata("old@1.0.0", { lastUsedAt: 1000 }); + const metadataNew = createMockMetadata("new@2.0.0", { lastUsedAt: 2000 }); + + let callCount = 0; + mockStorageProvider.get.mock.mockImplementation(() => { + callCount++; + if (callCount === 1) { + return Promise.resolve(metadataOld); + } + + return Promise.resolve(metadataNew); + }); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: storageProviderFactory + }); + await cache.load(); + + const items = [...cache]; + + assert.equal(items.length, 2); + assert.equal(items[0].spec, "new@2.0.0"); + assert.equal(items[1].spec, "old@1.0.0"); + }); + }); + + describe("findBySpec", () => { + let mockFs: MockedFs; + + beforeEach(() => { + mockFs = createMockFs(); + }); + + it("should return parsed payload on success", async() => { + const expectedPayload = createMockPayload("express", "4.18.2"); + mockFs.readFile.mock.mockImplementation( + () => Promise.resolve(JSON.stringify(expectedPayload)) + ); + + const cache = new PayloadCache({ fsProvider: mockFs as unknown as typeof fs }); + const result = await cache.findBySpec("express@4.18.2"); + + assert.ok(result !== null); + assert.equal(result.rootDependency.name, "express"); + assert.equal(result.rootDependency.version, "4.18.2"); + assert.equal(result.scannerVersion, "1.0.0"); + assert.equal(mockFs.readFile.mock.calls.length, 1); + }); + + it("should return null on read error", async() => { + mockFs.readFile.mock.mockImplementation( + () => Promise.reject(new Error("ENOENT")) + ); + + const cache = new PayloadCache({ fsProvider: mockFs as unknown as typeof fs }); + const result = await cache.findBySpec("nonexistent@1.0.0"); + + assert.equal(result, null); + }); + }); + + describe("findByIntegrity", () => { + let mockFs: MockedFs; + let mockStorageProvider: MockedStorageProvider; + let storageProviderFactory: (spec: string) => BasePersistanceProvider; + + beforeEach(() => { + mockFs = createMockFs(); + mockStorageProvider = createMockStorageProvider(); + storageProviderFactory = () => mockStorageProvider as unknown as BasePersistanceProvider; + }); + + it("should find payload by integrity", async() => { + const targetIntegrity = "sha512-abc123"; + const expectedPayload = createMockPayload("express", "4.18.2", targetIntegrity); + const manifest: PayloadManifest = { + current: null, + specs: ["express@4.18.2"] + }; + + mockFs.mkdir.mock.mockImplementation(() => Promise.resolve(undefined)); + mockFs.readFile.mock.mockImplementation((filePath: string) => { + if (String(filePath).includes("manifest.json")) { + return Promise.resolve(JSON.stringify(manifest)); + } + + return Promise.resolve(JSON.stringify(expectedPayload)); + }); + + mockStorageProvider.get.mock.mockImplementation( + () => Promise.resolve(createMockMetadata("express@4.18.2", { integrity: targetIntegrity })) + ); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: storageProviderFactory + }); + await cache.load(); + + const result = await cache.findByIntegrity(targetIntegrity); + + assert.ok(result !== null); + assert.equal(result.rootDependency.name, "express"); + assert.equal(result.rootDependency.version, "4.18.2"); + assert.equal(result.rootDependency.integrity, targetIntegrity); + }); + + it("should return null if integrity not found", async() => { + const manifest: PayloadManifest = { + current: null, + specs: ["express@4.18.2"] + }; + + mockFs.mkdir.mock.mockImplementation(() => Promise.resolve(undefined)); + mockFs.readFile.mock.mockImplementation( + () => Promise.resolve(JSON.stringify(manifest)) + ); + + mockStorageProvider.get.mock.mockImplementation( + () => Promise.resolve(createMockMetadata("express@4.18.2", { integrity: "sha512-different" })) + ); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: storageProviderFactory + }); + await cache.load(); + + const result = await cache.findByIntegrity("sha512-nonexistent"); + + assert.equal(result, null); + }); + }); + + describe("remove", () => { + let mockFs: MockedFs; + let mockStorageProvider: MockedStorageProvider; + + beforeEach(() => { + mockFs = createMockFs(); + mockStorageProvider = createMockStorageProvider(); + }); + + it("should remove payload by spec string", async() => { + mockFs.rm.mock.mockImplementation(() => Promise.resolve(undefined)); + mockFs.writeFile.mock.mockImplementation(() => Promise.resolve(undefined)); + mockStorageProvider.remove.mock.mockImplementation(() => Promise.resolve()); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + await cache.remove("express@4.18.2"); + + assert.equal(mockFs.rm.mock.calls.length, 1); + const rmCall = mockFs.rm.mock.calls[0]; + assert.equal( + rmCall.arguments[0], + PayloadCache.getPathBySpec("express@4.18.2") + ); + }); + + it("should remove payload by Payload object", async() => { + mockFs.rm.mock.mockImplementation(() => Promise.resolve(undefined)); + mockFs.writeFile.mock.mockImplementation(() => Promise.resolve(undefined)); + mockStorageProvider.remove.mock.mockImplementation(() => Promise.resolve()); + + const payload = createMockPayload("lodash", "4.17.21"); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + await cache.remove(payload); + + assert.equal(mockFs.rm.mock.calls.length, 1); + const rmCall = mockFs.rm.mock.calls[0]; + assert.equal( + rmCall.arguments[0], + PayloadCache.getPathByPayload(payload) + ); + }); + }); + + describe("save", () => { + let mockFs: MockedFs; + let mockStorageProvider: MockedStorageProvider; + + beforeEach(() => { + mockFs = createMockFs(); + mockStorageProvider = createMockStorageProvider(); + mockFs.writeFile.mock.mockImplementation(() => Promise.resolve(undefined)); + mockStorageProvider.set.mock.mockImplementation(() => Promise.resolve(true)); + }); + + it("should save payload to disk", async() => { + const payload = createMockPayload("express", "4.18.2", "sha512-integrity"); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + await cache.save(payload); + + // Wait for microtask to complete + await new Promise((resolve) => { + queueMicrotask(() => resolve(void 0)); + }); + + assert.equal(mockFs.writeFile.mock.calls.length >= 1, true); + }); + + it("should set current spec when useAsCurrent is true", async() => { + const payload = createMockPayload("express", "4.18.2"); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + await cache.save(payload, { useAsCurrent: true }); + + assert.equal(cache.getCurrentSpec(), "express@4.18.2"); + }); + + it("should not duplicate if spec already exists", async() => { + const payload = createMockPayload("express", "4.18.2"); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + await cache.save(payload); + const writeCountAfterFirstSave = mockFs.writeFile.mock.calls.length; + + await cache.save(payload); + + // Should not have written payload file again (only manifest updates via microtask) + assert.equal(mockFs.writeFile.mock.calls.length, writeCountAfterFirstSave); + }); + + it("should use scanType option", async() => { + const payload = createMockPayload("express", "4.18.2"); + + let capturedMetadata: PayloadMetadata | null = null; + mockStorageProvider.set.mock.mockImplementation((metadata: PayloadMetadata) => { + capturedMetadata = metadata; + + return Promise.resolve(true); + }); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + await cache.save(payload, { scanType: "from" }); + + // Wait for microtask + await new Promise((resolve) => { + queueMicrotask(() => resolve(void 0)); + }); + + assert.equal(capturedMetadata!.scanType, "from"); + }); + }); + + describe("updateLastUsedAt", () => { + let mockFs: MockedFs; + let mockStorageProvider: MockedStorageProvider; + + beforeEach(() => { + mockFs = createMockFs(); + mockStorageProvider = createMockStorageProvider(); + mockFs.writeFile.mock.mockImplementation(() => Promise.resolve(undefined)); + mockStorageProvider.set.mock.mockImplementation(() => Promise.resolve(true)); + }); + + it("should update lastUsedAt for existing spec", async() => { + const payload = createMockPayload("express", "4.18.2"); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + await cache.save(payload); + + const beforeUpdate = Date.now(); + cache.updateLastUsedAt("express@4.18.2"); + + // Wait for microtask + await new Promise((resolve) => { + queueMicrotask(() => resolve(void 0)); + }); + + const items = [...cache]; + assert.equal(items.length, 1); + assert.ok(items[0].lastUsedAt >= beforeUpdate); + }); + + it("should do nothing for non-existing spec", () => { + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + // Should not throw + cache.updateLastUsedAt("nonexistent@1.0.0"); + + const items = [...cache]; + assert.equal(items.length, 0); + }); + }); + + describe("load", () => { + let mockFs: MockedFs; + let mockStorageProvider: MockedStorageProvider; + + beforeEach(() => { + mockFs = createMockFs(); + mockStorageProvider = createMockStorageProvider(); + }); + + it("should load manifest and storage from disk", async() => { + const manifest: PayloadManifest = { + current: "express@4.18.2", + specs: ["express@4.18.2", "lodash@4.17.21"] + }; + + mockFs.mkdir.mock.mockImplementation(() => Promise.resolve(undefined)); + mockFs.readFile.mock.mockImplementation( + () => Promise.resolve(JSON.stringify(manifest)) + ); + + let callIndex = 0; + mockStorageProvider.get.mock.mockImplementation(() => { + const metadata = callIndex === 0 + ? createMockMetadata("express@4.18.2") + : createMockMetadata("lodash@4.17.21"); + callIndex++; + + return Promise.resolve(metadata); + }); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + const result = await cache.load(); + + assert.equal(result, cache); + assert.equal(cache.getCurrentSpec(), "express@4.18.2"); + + const items = [...cache]; + assert.equal(items.length, 2); + }); + + it("should return empty storage when no manifest exists", async() => { + mockFs.mkdir.mock.mockImplementation(() => Promise.resolve(undefined)); + mockFs.readFile.mock.mockImplementation( + () => Promise.reject(new Error("ENOENT")) + ); + + const cache = new PayloadCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + await cache.load(); + + const items = [...cache]; + assert.equal(items.length, 0); + }); + }); + + describe("clear", () => { + it("should remove entire cache directory", async() => { + const mockFs = createMockFs(); + mockFs.rm.mock.mockImplementation(() => Promise.resolve(undefined)); + + const cache = new PayloadCache({ fsProvider: mockFs as unknown as typeof fs }); + const result = await cache.clear(); + + assert.equal(result, cache); + assert.equal(mockFs.rm.mock.calls.length, 1); + assert.deepEqual(mockFs.rm.mock.calls[0].arguments, [ + PayloadCache.PATH, + { recursive: true, force: true } + ]); + }); + }); +}); + +describe("PayloadManifestCache", () => { + describe("currentSpec", () => { + it("should initialize with null", () => { + const manifest = new PayloadManifestCache(); + assert.equal(manifest.currentSpec, null); + }); + + it("should be settable", () => { + const manifest = new PayloadManifestCache(); + manifest.currentSpec = "express@4.18.2"; + assert.equal(manifest.currentSpec, "express@4.18.2"); + }); + }); + + describe("initialize", () => { + it("should create cache directory", async() => { + const mockFs = createMockFs(); + mockFs.mkdir.mock.mockImplementation(() => Promise.resolve(undefined)); + + const manifest = new PayloadManifestCache({ + fsProvider: mockFs as unknown as typeof fs + }); + + const result = await manifest.initialize(); + + assert.equal(result, manifest); + assert.equal(mockFs.mkdir.mock.calls.length, 1); + assert.deepEqual(mockFs.mkdir.mock.calls[0].arguments, [ + PayloadCache.PATH, + { recursive: true } + ]); + }); + }); + + describe("lazyPersistOnDisk", () => { + it("should batch multiple calls into single persist", async() => { + const mockFs = createMockFs(); + const mockStorageProvider = createMockStorageProvider(); + + mockFs.writeFile.mock.mockImplementation(() => Promise.resolve(undefined)); + mockStorageProvider.set.mock.mockImplementation(() => Promise.resolve(true)); + + const manifest = new PayloadManifestCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + const storage = new Map(); + storage.set("express@4.18.2", createMockMetadata("express@4.18.2")); + + manifest.lazyPersistOnDisk(storage, { dirtySpecs: ["express@4.18.2"] }); + manifest.lazyPersistOnDisk(storage, { dirtySpecs: ["express@4.18.2"] }); + manifest.lazyPersistOnDisk(storage, { dirtySpecs: ["express@4.18.2"] }); + + // Wait for microtask + await new Promise((resolve) => { + queueMicrotask(() => resolve(void 0)); + }); + + // Should only persist once despite multiple calls + assert.equal(mockFs.writeFile.mock.calls.length, 1); + }); + }); + + describe("persistPartial", () => { + it("should persist dirty specs and manifest", async() => { + const mockFs = createMockFs(); + const mockStorageProvider = createMockStorageProvider(); + + mockFs.writeFile.mock.mockImplementation(() => Promise.resolve(undefined)); + mockStorageProvider.set.mock.mockImplementation(() => Promise.resolve(true)); + mockStorageProvider.remove.mock.mockImplementation(() => Promise.resolve()); + + const manifest = new PayloadManifestCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + manifest.currentSpec = "express@4.18.2"; + + const storage = new Map(); + storage.set("express@4.18.2", createMockMetadata("express@4.18.2")); + + // Trigger lazyPersistOnDisk to populate dirtySpecs + manifest.lazyPersistOnDisk(storage, { dirtySpecs: ["express@4.18.2"] }); + + // Wait for microtask which calls persistPartial + await new Promise((resolve) => { + queueMicrotask(() => resolve(void 0)); + }); + + assert.equal(mockStorageProvider.set.mock.calls.length, 1); + assert.equal(mockFs.writeFile.mock.calls.length, 1); + + const writeCall = mockFs.writeFile.mock.calls[0]; + assert.equal( + writeCall.arguments[0], + path.join(PayloadCache.PATH, "manifest.json") + ); + + const writtenManifest = JSON.parse(writeCall.arguments[1] as string); + assert.deepEqual(writtenManifest, { + current: "express@4.18.2", + specs: ["express@4.18.2"] + }); + }); + + it("should remove storage for deleted specs", async() => { + const mockFs = createMockFs(); + const mockStorageProvider = createMockStorageProvider(); + + mockFs.writeFile.mock.mockImplementation(() => Promise.resolve(undefined)); + mockStorageProvider.remove.mock.mockImplementation(() => Promise.resolve()); + + const manifest = new PayloadManifestCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + const storage = new Map(); + // No entry for "deleted@1.0.0" in storage + + manifest.lazyPersistOnDisk(storage, { dirtySpecs: ["deleted@1.0.0"] }); + + // Wait for microtask + await new Promise((resolve) => { + queueMicrotask(() => resolve(void 0)); + }); + + assert.equal(mockStorageProvider.remove.mock.calls.length, 1); + }); + }); + + describe("load", () => { + it("should load manifest and metadata from disk", async() => { + const mockFs = createMockFs(); + const mockStorageProvider = createMockStorageProvider(); + + const manifestData: PayloadManifest = { + current: "express@4.18.2", + specs: ["express@4.18.2"] + }; + + mockFs.readFile.mock.mockImplementation( + () => Promise.resolve(JSON.stringify(manifestData)) + ); + + const expectedMetadata = createMockMetadata("express@4.18.2"); + mockStorageProvider.get.mock.mockImplementation( + () => Promise.resolve(expectedMetadata) + ); + + const manifest = new PayloadManifestCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + const storage = await manifest.load(); + + assert.equal(manifest.currentSpec, "express@4.18.2"); + assert.equal(storage.size, 1); + assert.deepEqual(storage.get("express@4.18.2"), expectedMetadata); + }); + + it("should skip specs with no metadata", async() => { + const mockFs = createMockFs(); + const mockStorageProvider = createMockStorageProvider(); + + const manifestData: PayloadManifest = { + current: null, + specs: ["express@4.18.2", "missing@1.0.0"] + }; + + mockFs.readFile.mock.mockImplementation( + () => Promise.resolve(JSON.stringify(manifestData)) + ); + + let callIndex = 0; + mockStorageProvider.get.mock.mockImplementation(() => { + callIndex++; + if (callIndex === 1) { + return Promise.resolve(createMockMetadata("express@4.18.2")); + } + + return Promise.resolve(null); + }); + + const manifest = new PayloadManifestCache({ + fsProvider: mockFs as unknown as typeof fs, + storageProvider: () => mockStorageProvider as unknown as BasePersistanceProvider + }); + + const storage = await manifest.load(); + + assert.equal(storage.size, 1); + assert.ok(storage.has("express@4.18.2")); + assert.ok(!storage.has("missing@1.0.0")); + }); + + it("should return empty storage when manifest read fails", async() => { + const mockFs = createMockFs(); + + mockFs.readFile.mock.mockImplementation( + () => Promise.reject(new Error("ENOENT")) + ); + + const manifest = new PayloadManifestCache({ + fsProvider: mockFs as unknown as typeof fs + }); + + const storage = await manifest.load(); + + assert.equal(storage.size, 0); + }); + }); +}); diff --git a/workspaces/cache/test/index.test.ts b/workspaces/cache/test/index.test.ts deleted file mode 100644 index bcdfac73..00000000 --- a/workspaces/cache/test/index.test.ts +++ /dev/null @@ -1,345 +0,0 @@ -// Import Node.js Dependencies -import assert from "node:assert/strict"; -import { after, before, describe, it } from "node:test"; -import fs from "node:fs"; -import path from "node:path"; -import os from "node:os"; - -// Import Third-party Dependencies -import cacache from "cacache"; - -// Import Internal Dependencies -import { AppCache } from "../src/index.ts"; - -// CONSTANTS -const kPayloadsPath = path.join(os.homedir(), ".nsecure", "payloads"); - -describe("appCache", () => { - let appCache: AppCache; - - before(() => { - appCache = new AppCache(); - appCache.prefix = "test_runner"; - }); - - after(() => { - appCache.availablePayloads().forEach((pkg) => { - appCache.removePayload(pkg); - }); - }); - - it("should write payload into ~/.nsecure/payloads", (t) => { - let writePath = ""; - let writeValue = ""; - t.mock.method(fs, "writeFileSync", (path: string, value: string) => { - writePath = path; - writeValue = value; - }); - - appCache.updatePayload("foo/bar", { foo: "bar" } as any); - - assert.equal(writePath, path.join(kPayloadsPath, "foo______bar")); - assert.equal(writeValue, JSON.stringify({ foo: "bar" })); - }); - - it("should throw given a package name that contains the slash replace token", () => { - assert.throws(() => appCache.updatePayload("foo______bar", { foo: "bar" } as any), { - message: "Invalid package name: foo______bar" - }); - }); - - it("getPayload should return the payload", (t) => { - t.mock.method(fs, "readFileSync", () => JSON.stringify({ foo: "bar" })); - - const payload = appCache.getPayload("foo/bar"); - - assert.deepEqual(payload, { foo: "bar" }); - }); - - it("getPayload should throw", (t) => { - t.mock.method(fs, "readFileSync", () => { - throw new Error("boo"); - }); - - assert.throws(() => appCache.getPayload("foo/bar"), { - message: "boo" - }); - }); - - it("getPayloadOrNull should return payload", (t) => { - t.mock.method(fs, "readFileSync", () => JSON.stringify({ foo: "bar" })); - - const payload = appCache.getPayloadOrNull("foo/bar"); - - assert.deepEqual(payload, { foo: "bar" }); - }); - - it("getPayloadOrNull should return null", (t) => { - t.mock.method(fs, "readFileSync", () => { - throw new Error("boo"); - }); - - const payload = appCache.getPayloadOrNull("foo/bar"); - - assert.equal(payload, null); - }); - - it("availablePayloads should return the list of payloads", (t) => { - t.mock.method(fs, "readdirSync", () => ["foo-bar", "bar-foo"]); - - const payloads = appCache.availablePayloads(); - - assert.deepEqual(payloads, ["foo-bar", "bar-foo"]); - }); - - it("should update and get payloadsList", async() => { - await appCache.updatePayloadsList({ foo: "bar" } as any); - - const updated = await appCache.payloadsList(); - assert.deepEqual(updated, { foo: "bar" }); - }); - - it("payloadList should throw", async(t) => { - t.mock.method(cacache, "get", () => { - throw new Error("boo"); - }); - - await assert.rejects(async() => appCache.payloadsList(), { - message: "boo" - }); - }); - - it("should init payloadsList when starting from zero", async(t) => { - appCache.startFromZero = true; - t.mock.method(fs, "readdirSync", () => []); - t.mock.method(cacache, "get", () => { - throw new Error("boo"); - }); - - await appCache.initPayloadsList(); - - t.mock.reset(); - - const payloadsList = await appCache.payloadsList(); - - assert.deepEqual(payloadsList, { - mru: [], - lru: [], - current: null, - availables: [], - lastUsed: {}, - root: null - }); - }); - - it("should init payloadsList with the root payload json", async(t) => { - appCache.startFromZero = false; - t.mock.method(fs, "readdirSync", () => []); - t.mock.method(fs, "readFileSync", () => JSON.stringify({ - rootDependency: { - name: "test_runner", - version: "1.0.0" - }, - dependencies: { - test_runner: { - versions: { - "1.0.0": {} - } - } - } - })); - t.mock.method(fs, "writeFileSync", () => void 0); - t.mock.method(cacache, "get", () => { - throw new Error("boo"); - }); - t.mock.method(Date, "now", () => 1234567890); - - await appCache.initPayloadsList(); - - t.mock.reset(); - - const payloadsList = await appCache.payloadsList(); - - assert.deepEqual(payloadsList, { - mru: ["test_runner@1.0.0"], - lru: [], - current: "test_runner@1.0.0", - availables: [], - lastUsed: { "test_runner@1.0.0": 1234567890 }, - root: "test_runner@1.0.0" - }); - }); - - it("should init payloadsList.older with already scanned payloads", async(t) => { - t.mock.method(fs, "readdirSync", () => ["test_runner@1.0.0", "test_runner@2.0.0"]); - t.mock.method(cacache, "get", () => { - throw new Error("boo"); - }); - - await appCache.initPayloadsList(); - - t.mock.reset(); - - const payloadsList = await appCache.payloadsList(); - - assert.deepEqual(payloadsList, { - availables: ["test_runner@1.0.0", "test_runner@2.0.0"], - current: null, - mru: [], - lru: [] - }); - }); - - it("should remove payload from disk", (t) => { - let removedPath = ""; - t.mock.method(fs, "rmSync", (path: string) => { - removedPath = path; - }); - - appCache.removePayload("foo/bar"); - - assert.equal(removedPath, path.join(kPayloadsPath, "foo______bar")); - }); - - it("should not remove the last MRU when MRU is not full", async(t) => { - t.mock.method(cacache, "get", () => { - return { - data: { - toString: () => JSON.stringify({ - mru: ["foo"], - lru: ["bar"], - availables: [], - lastUsed: { foo: 1234567890 }, - foo: "bar" - }) - } - }; - }); - - const result = await appCache.removeLastMRU(); - - assert.deepEqual(result, { - mru: ["foo"], - lru: ["bar"], - availables: [], - lastUsed: { foo: 1234567890 }, - foo: "bar" - }); - }); - - it("should remove the last MRU when MRU is full", async(t) => { - t.mock.method(cacache, "get", () => { - return { - data: { - toString: () => JSON.stringify({ - mru: ["foo", "foz", "bar"], - lru: ["boz"], - availables: [], - lastUsed: { - foo: 123, - foz: 1234, - bar: 12345 - }, - foo: "bar" - }) - } - }; - }); - - const result = await appCache.removeLastMRU(); - - assert.deepEqual(result, { - mru: ["foz", "bar"], - lru: ["boz", "foo"], - availables: [], - lastUsed: { - foo: 123, - foz: 1234, - bar: 12345 - }, - foo: "bar" - }); - }); - - it("should set local root payload", async(t) => { - t.mock.method(fs, "writeFileSync", () => void 0); - t.mock.method(Date, "now", () => 1234567890); - await appCache.updatePayloadsList({ - mru: [], - lru: [], - current: "", - availables: [], - lastUsed: {}, - root: null - }); - const payload: any = { - rootDependency: { - name: "test_runner-local", - version: "1.0.0", - integrity: null - }, - dependencies: { - "test_runner-local": { - versions: { - "1.0.0": {} - } - } - } - }; - await appCache.setRootPayload(payload, { local: true }); - - const result = await appCache.payloadsList(); - - assert.deepEqual(result, { - mru: ["test_runner-local@1.0.0#local"], - lru: [], - current: "test_runner-local@1.0.0#local", - availables: [], - lastUsed: { - "test_runner-local@1.0.0#local": 1234567890 - }, - root: "test_runner-local@1.0.0#local" - }); - }); - - it("should set normal root payload", async(t) => { - t.mock.method(fs, "writeFileSync", () => void 0); - t.mock.method(Date, "now", () => 1234567890); - await appCache.updatePayloadsList({ - mru: [], - lru: [], - current: "", - availables: [], - lastUsed: {}, - root: null - }); - const payload: any = { - rootDependency: { - name: "test_runner-local", - version: "1.0.0", - integrity: null - }, - dependencies: { - "test_runner-local": { - versions: { - "1.0.0": {} - } - } - } - }; - await appCache.setRootPayload(payload, {}); - - const result = await appCache.payloadsList(); - - assert.deepEqual(result, { - mru: ["test_runner-local@1.0.0"], - lru: [], - current: "test_runner-local@1.0.0", - availables: [], - lastUsed: { - "test_runner-local@1.0.0": 1234567890 - }, - root: "test_runner-local@1.0.0" - }); - }); -}); diff --git a/workspaces/server/README.md b/workspaces/server/README.md index 69f41769..4ba64e4c 100644 --- a/workspaces/server/README.md +++ b/workspaces/server/README.md @@ -35,7 +35,7 @@ const kDataFilePath = path.join( "nsecure-result.json" ); -const httpServer = buildServer(kDataFilePath, { +const httpServer = await buildServer(kDataFilePath, { port: 3000, projectRootDir: kProjectRootDir, componentsDir: kComponentsDir @@ -48,7 +48,7 @@ httpServer.listen(port, async() => { ## API -### `buildServer(dataFilePath: string, options: BuildServerOptions): polka` +### `buildServer(dataFilePath: string, options: BuildServerOptions): Promise` Creates and configures a Node.js HTTP server instance for the NodeSecure CLI. diff --git a/workspaces/server/src/ALS.ts b/workspaces/server/src/ALS.ts index ddb67f31..52e2dac7 100644 --- a/workspaces/server/src/ALS.ts +++ b/workspaces/server/src/ALS.ts @@ -1,6 +1,9 @@ // Import Node.js Dependencies import { AsyncLocalStorage } from "node:async_hooks"; +// Import Third-party Dependencies +import type { PayloadCache } from "@nodesecure/cache"; + // Import Internal Dependencies import type { ViewBuilder } from "./ViewBuilder.class.ts"; @@ -9,7 +12,7 @@ export type NestedStringRecord = { }; export interface AsyncStoreContext { - dataFilePath?: string; + cache: PayloadCache; i18n: { english: NestedStringRecord; french: NestedStringRecord; diff --git a/workspaces/server/src/cache.ts b/workspaces/server/src/cache.ts deleted file mode 100644 index 7cef4c42..00000000 --- a/workspaces/server/src/cache.ts +++ /dev/null @@ -1,11 +0,0 @@ -// Import Third-party Dependencies -import { - AppCache -} from "@nodesecure/cache"; - -// Import Internal Dependencies -import { logger } from "./logger.ts"; - -export const cache = new AppCache( - logger -); diff --git a/workspaces/server/src/endpoints/data.ts b/workspaces/server/src/endpoints/data.ts index 67365346..b7a0ee7e 100644 --- a/workspaces/server/src/endpoints/data.ts +++ b/workspaces/server/src/endpoints/data.ts @@ -1,6 +1,4 @@ // Import Node.js Dependencies -import fs from "node:fs"; -import path from "node:path"; import type { IncomingMessage, ServerResponse @@ -9,57 +7,35 @@ import type { // Import Internal Dependencies import { context } from "../ALS.ts"; import { logger } from "../logger.ts"; -import { cache } from "../cache.ts"; import { send } from "./util/send.ts"; -// CONSTANTS -const kDefaultPayloadPath = path.join(process.cwd(), "nsecure-result.json"); - export async function get( _req: IncomingMessage, res: ServerResponse ) { - if (cache.startFromZero) { + const { cache } = context.getStore()!; + + const currentSpec = cache.getCurrentSpec(); + if (currentSpec === null) { logger.info("[data|get](no content)"); res.statusCode = 204; res.end(); - - return; - } - - try { - const { current, mru } = await cache.payloadsList(); - logger.info(`[data|get](current: ${current})`); - logger.debug(`[data|get](lru: ${mru})`); - - send(res, cache.getPayload(current)); } - catch { - logger.error("[data|get](No cache yet. Creating one...)"); - - const { dataFilePath } = context.getStore()!; - - const payloadPath = dataFilePath || kDefaultPayloadPath; - const payload = JSON.parse(fs.readFileSync(payloadPath, "utf-8")); - - const { name, version } = payload.rootDependency; - const formatted = `${name}@${version}${payload.local ? "#local" : ""}`; - const payloadsList = { - mru: [formatted], - current: formatted, - lru: [], - availables: cache.availablePayloads().filter((pkg) => pkg !== formatted), - lastUsed: { - [formatted]: Date.now() - }, - root: formatted - }; - logger.info(`[data|get](dep: ${formatted})`); - - await cache.updatePayloadsList(payloadsList); - cache.updatePayload(formatted, payload); - logger.info(`[data|get](cache: created|payloadsList: ${payloadsList.lru})`); - - send(res, payload); + else { + logger.info("[data|get](fetching data for spec=%s)", currentSpec); + + const payload = await cache.findBySpec(currentSpec); + if (payload === null) { + logger.info("[data|get](spec=%s not found)", currentSpec); + res.statusCode = 404; + res.end(); + + return; + } + + send( + res, + payload + ); } } diff --git a/workspaces/server/src/endpoints/report.ts b/workspaces/server/src/endpoints/report.ts index 100a4509..26092ebd 100644 --- a/workspaces/server/src/endpoints/report.ts +++ b/workspaces/server/src/endpoints/report.ts @@ -1,5 +1,4 @@ // Import Node.js Dependencies -import fs from "node:fs"; import type { IncomingMessage, ServerResponse @@ -11,7 +10,6 @@ import type { RC } from "@nodesecure/rc"; // Import Internal Dependencies import { context } from "../ALS.ts"; -import { cache } from "../cache.ts"; import { send } from "./util/send.ts"; import { bodyParser } from "./util/bodyParser.ts"; @@ -62,20 +60,35 @@ export async function post( const body = await bodyParser(req); const { title, includesAllDeps, theme } = body; - const { dataFilePath } = context.getStore()!; + const { cache } = context.getStore()!; - const scannerPayload = dataFilePath ? - JSON.parse(fs.readFileSync(dataFilePath, "utf-8")) : - cache.getPayload((await cache.payloadsList()).current); + const currentSpec = cache.getCurrentSpec(); + if (currentSpec === null) { + console.error("[report|post](no current spec set)"); + res.statusCode = 400; + + return res.end(); + } + + const scannerPayload = await cache.findBySpec(currentSpec); + if (scannerPayload === null) { + console.error( + "[report|post](no payload found for spec=%s)", + currentSpec + ); + res.statusCode = 500; + + return res.end(); + } const name = scannerPayload.rootDependency.name; - const [organizationPrefixOrRepo, repo] = name.split("/"); + const [organizationPrefix, repo] = name.split("/"); const reportPayload = structuredClone({ ...kReportPayload, title, - npm: { - organizationPrefix: repo === undefined ? null : organizationPrefixOrRepo, - packages: [repo === undefined ? organizationPrefixOrRepo : repo] + npm: repo === undefined ? undefined : { + organizationPrefix, + packages: [repo] }, theme }); diff --git a/workspaces/server/src/index.ts b/workspaces/server/src/index.ts index 84b7d56f..40b15043 100644 --- a/workspaces/server/src/index.ts +++ b/workspaces/server/src/index.ts @@ -1,10 +1,12 @@ // Import Node.js Dependencies -import fs from "node:fs"; +import fs from "node:fs/promises"; import path from "node:path"; import http from "node:http"; // Import Third-party Dependencies import sirv from "sirv"; +import { PayloadCache } from "@nodesecure/cache"; +import type { Payload } from "@nodesecure/scanner"; // Import Internal Dependencies import { getApiRouter } from "./endpoints/index.ts"; @@ -14,7 +16,6 @@ import { type AsyncStoreContext, type NestedStringRecord } from "./ALS.ts"; -import { cache } from "./cache.ts"; export interface BuildServerOptions { hotReload?: boolean; @@ -27,10 +28,13 @@ export interface BuildServerOptions { }; } -export function buildServer( +export async function buildServer( dataFilePath: string, options: BuildServerOptions -) { +): Promise<{ + httpServer: http.Server; + cache: PayloadCache; + }> { const { hotReload = true, runFromPayload = true, @@ -44,18 +48,24 @@ export function buildServer( projectRootDir, componentsDir }); + const cache = await new PayloadCache().load(); + const store: AsyncStoreContext = { i18n, - viewBuilder + viewBuilder, + cache }; if (runFromPayload) { - fs.accessSync(dataFilePath, fs.constants.R_OK | fs.constants.W_OK); - store.dataFilePath = dataFilePath; + const payloadStr = await fs.readFile(dataFilePath, "utf-8"); + const payload = JSON.parse(payloadStr) as Payload; + + await cache.save(payload, { + useAsCurrent: true + }); } else { - cache.startFromZero = true; + cache.setCurrentSpec(null); } - const apiRouter = getApiRouter(); const serving = sirv( @@ -68,13 +78,12 @@ export function buildServer( }); }); - return httpServer; + return { + httpServer, + cache + }; } export { WebSocketServerInstanciator } from "./websocket/index.ts"; export { logger } from "./logger.ts"; export * as config from "./config.ts"; - -export { - cache -}; diff --git a/workspaces/server/src/websocket/commands/remove.ts b/workspaces/server/src/websocket/commands/remove.ts index 2cd45b5e..19b92263 100644 --- a/workspaces/server/src/websocket/commands/remove.ts +++ b/workspaces/server/src/websocket/commands/remove.ts @@ -1,130 +1,18 @@ -// Import Third-party Dependencies -import type { PayloadsList } from "@nodesecure/cache/dist/AppCache.ts"; - // Import Internal Dependencies import { context } from "../websocket.als.ts"; import type { - WebSocketResponse, - WebSocketContext + WebSocketResponse } from "../websocket.types.ts"; export async function* remove( spec: string ): AsyncGenerator { - const ctx = context.getStore()!; + const { cache } = context.getStore()!; - const cacheList = await ctx.cache.payloadsList(); - let updatedList: PayloadsList; - if (cacheList.availables.includes(spec)) { - updatedList = await removeFromAvailables(spec, ctx, cacheList); - } - else { - updatedList = await removeFromMruOrLru(spec, ctx, cacheList); - } + await cache.remove(spec); yield { status: "RELOAD", - cache: updatedList - }; -} - -async function removeFromAvailables( - spec: string, - context: WebSocketContext, - cacheList: PayloadsList -): Promise { - const { cache, logger } = context; - const { availables, lastUsed, ...rest } = cacheList; - - logger.info("[ws|command.remove] remove from availables"); - const { [spec]: _, ...updatedLastUsed } = lastUsed; - const updatedList: PayloadsList = { - ...rest, - lastUsed: updatedLastUsed, - availables: availables.filter((iterSpec) => iterSpec !== spec) - }; - - await cache.updatePayloadsList(updatedList); - cache.removePayload(spec); - - return updatedList; -} - -async function removeFromMruOrLru( - spec: string, - context: WebSocketContext, - cacheList: PayloadsList -): Promise { - const { logger, cache } = context; - const { mru, lru, current } = cacheList; - - logger.debug(`[ws|command.remove](lru: ${lru}|current: ${current})`); - if (mru.length === 1 && lru.length === 0) { - throw new Error("Cannot remove the last package."); - } - - const mruIndex = mru.findIndex((iterSpec) => iterSpec === spec); - const lruIndex = lru.findIndex((iterSpec) => iterSpec === spec); - if (mruIndex === -1 && lruIndex === -1) { - throw new Error("Package not found in cache."); - } - - const isInMru = mruIndex > -1; - logger.info(`[ws|command.remove] removing from ${isInMru ? "MRU" : "LRU"}`); - const updatedList = isInMru ? - removeFromMru(spec, cacheList) : - removeFromLru(spec, cacheList); - - await cache.updatePayloadsList(updatedList); - cache.removePayload(spec); - - return updatedList; -} - -function removeFromMru( - spec: string, - cacheList: PayloadsList -): PayloadsList { - const { mru, lru, current, lastUsed, root, availables } = cacheList; - - const updatedMru = mru.filter((iterSpec) => iterSpec !== spec); - let updatedLru = lru; - - if (lru.length > 0) { - const sortedLru = [...lru].sort((a, b) => lastUsed[a] - lastUsed[b]); - const olderLruPkg = sortedLru[0]; - updatedMru.push(olderLruPkg); - updatedLru = lru.filter((iterSpec) => iterSpec !== olderLruPkg); - } - - const { [spec]: _, ...updatedLastUsed } = lastUsed; - const updatedList: PayloadsList = { - mru: updatedMru, - lru: updatedLru, - lastUsed: updatedLastUsed, - current: current === spec ? updatedMru[0] : current, - root, - availables + cache: Array.from(cache) }; - - return updatedList; -} - -function removeFromLru( - spec: string, - cacheList: PayloadsList -): PayloadsList { - const { mru, lru, current, lastUsed, root, availables } = cacheList; - - const { [spec]: _, ...updatedLastUsed } = lastUsed; - const updatedList: PayloadsList = { - mru, - lru: lru.filter((iterSpec) => iterSpec !== spec), - availables, - lastUsed: updatedLastUsed, - current, - root - }; - - return updatedList; } diff --git a/workspaces/server/src/websocket/commands/search.ts b/workspaces/server/src/websocket/commands/search.ts index 6212894e..20d150da 100644 --- a/workspaces/server/src/websocket/commands/search.ts +++ b/workspaces/server/src/websocket/commands/search.ts @@ -1,6 +1,5 @@ // Import Third-party Dependencies import * as scanner from "@nodesecure/scanner"; -import type { PayloadsList, AppCache } from "@nodesecure/cache/dist/AppCache.ts"; // Import Internal Dependencies import { context } from "../websocket.als.ts"; @@ -37,76 +36,23 @@ async function* searchInCache( ): AsyncGenerator { const { logger, cache } = context.getStore()!; - const payload = cache.getPayloadOrNull(spec); + const payload = await cache.findBySpec(spec); if (!payload) { return false; } - - logger.info("[ws|command.search] fetching cache list"); - const cacheList = await cache.payloadsList(); - - const isInMru = cacheList.mru.includes(spec); - logger.info(`[ws|command.search] payload detected in ${isInMru ? "MRU" : "LRU/Availables"}`); - - let cachePayloadList: PayloadsList; - if (isInMru) { - cachePayloadList = await handleMruCache(spec, cache, cacheList); - } - else { - cachePayloadList = await handleLruOrAvailableCache(spec, cache); - } + logger.info("[ws|command.search] payload found in cache"); + cache.setCurrentSpec(spec); yield { status: "PAYLOAD" as const, payload }; - if (!isInMru || cache.startFromZero) { - yield { - status: "RELOAD" as const, - cache: cachePayloadList - }; - } - - return true; -} - -async function handleMruCache( - spec: string, - cache: AppCache, - cacheList: PayloadsList -): Promise { - const updatedList: PayloadsList = { - ...cacheList, - current: spec, - lastUsed: { ...cacheList.lastUsed, [spec]: Date.now() } - }; - - await cache.updatePayloadsList(updatedList); - - return updatedList; -} - -async function handleLruOrAvailableCache( - spec: string, - cache: AppCache -): Promise { - const { - mru, lru, availables, lastUsed, - ...updatedCache - } = await cache.removeLastMRU(); - const updatedList: PayloadsList = { - ...updatedCache, - mru: [...new Set([...mru, spec])], - current: spec, - lru: lru.filter((pckg) => pckg !== spec), - availables: availables.filter((pckg) => pckg !== spec), - lastUsed: { ...lastUsed, [spec]: Date.now() } + yield { + status: "RELOAD" as const, + cache: Array.from(cache) }; - await cache.updatePayloadsList(updatedList); - cache.startFromZero = false; - - return updatedList; + return true; } async function* saveInCache( @@ -114,21 +60,11 @@ async function* saveInCache( ): AsyncGenerator { const { logger, cache } = context.getStore()!; - const { name, version } = payload.rootDependency; - const spec = `${name}@${version}`; - - const { mru, lru, availables, lastUsed, ...appCache } = await cache.removeLastMRU(); - mru.push(spec); - cache.updatePayload(spec, payload); - const updatedList: PayloadsList = { - ...appCache, - mru: [...new Set(mru)], - lru, - availables, - lastUsed: { ...lastUsed, [spec]: Date.now() }, - current: spec - }; - await cache.updatePayloadsList(updatedList); + await cache.save(payload, { + useAsCurrent: true, + scanType: "from" + }); + logger.info("[ws|command.search] cache updated"); yield { status: "PAYLOAD" as const, @@ -136,10 +72,6 @@ async function* saveInCache( }; yield { status: "RELOAD" as const, - cache: updatedList + cache: Array.from(cache) }; - - cache.startFromZero = false; - - logger.info("[ws|command.search] cache updated"); } diff --git a/workspaces/server/src/websocket/index.ts b/workspaces/server/src/websocket/index.ts index d2edca7a..a70d98ae 100644 --- a/workspaces/server/src/websocket/index.ts +++ b/workspaces/server/src/websocket/index.ts @@ -2,7 +2,7 @@ import { WebSocketServer, type WebSocket } from "ws"; import { match } from "ts-pattern"; import type { Logger } from "pino"; -import type { AppCache } from "@nodesecure/cache/dist/AppCache.ts"; +import type { PayloadCache } from "@nodesecure/cache"; // Import Internal Dependencies import { search } from "./commands/search.ts"; @@ -16,12 +16,12 @@ import type { export interface WebSocketServerInstanciatorOptions { logger: Logger; - cache: AppCache; + cache: PayloadCache; } export class WebSocketServerInstanciator { #logger: Logger; - #cache: AppCache; + #cache: PayloadCache; constructor( options: WebSocketServerInstanciatorOptions @@ -39,7 +39,7 @@ export class WebSocketServerInstanciator { this.#onMessageHandler(socket, JSON.parse(rawData)); }); - const data = await this.initializeServer(); + const data = this.initializeServer(); sendSocketResponse(socket, data); } @@ -76,36 +76,17 @@ export class WebSocketServerInstanciator { }); } - async initializeServer( - stopInitializationOnError = false - ): Promise { - try { - const cached = await this.#cache.payloadsList(); - if ( - cached.mru === void 0 || - cached.current === void 0 - ) { - throw new Error("Payloads list not found in cache."); - } - this.#logger.info( - `[ws|init](current: ${cached.current}|root: ${cached.root})` - ); - - return { - status: "INIT", - cache: cached - }; - } - catch { - if (stopInitializationOnError) { - return null; - } + initializeServer(): WebSocketResponse { + const cached = Array.from(this.#cache); - this.#logger.error("[ws|init] creating new payloads list in cache"); - await this.#cache.initPayloadsList(); + this.#logger.info( + `[ws|init](current: ${this.#cache.getCurrentSpec()})` + ); - return this.initializeServer(true); - } + return { + status: "INIT", + cache: cached + }; } } diff --git a/workspaces/server/src/websocket/websocket.types.ts b/workspaces/server/src/websocket/websocket.types.ts index 435f0a3d..e62e9393 100644 --- a/workspaces/server/src/websocket/websocket.types.ts +++ b/workspaces/server/src/websocket/websocket.types.ts @@ -1,6 +1,6 @@ // Import Third-party Dependencies import type { WebSocket } from "ws"; -import type { AppCache, PayloadsList } from "@nodesecure/cache/dist/AppCache.ts"; +import type { PayloadCache, PayloadMetadata } from "@nodesecure/cache"; import type { Payload } from "@nodesecure/scanner"; // Import Internal Dependencies @@ -24,7 +24,7 @@ type ScanResponse = { */ type CachedResponse = { status: "INIT" | "RELOAD"; - cache: PayloadsList; + cache: PayloadMetadata[]; }; export type WebSocketResponse = @@ -39,6 +39,6 @@ export type WebSocketMessage = { export interface WebSocketContext { socket: WebSocket; - cache: AppCache; + cache: PayloadCache; logger: typeof logger; } diff --git a/workspaces/server/test/httpServer.test.ts b/workspaces/server/test/httpServer.test.ts index abcffa46..8736cae8 100644 --- a/workspaces/server/test/httpServer.test.ts +++ b/workspaces/server/test/httpServer.test.ts @@ -31,7 +31,7 @@ const kBundlephobiaPool = kMockAgent.get("https://bundlephobia.com"); const kProjectRootDir = path.join(import.meta.dirname, "..", "..", ".."); const kComponentsDir = path.join(kProjectRootDir, "public", "components"); -describe("httpServer", { concurrency: 1 }, () => { +describe("httpServer", () => { let httpServer: Server; before(async() => { @@ -40,7 +40,7 @@ describe("httpServer", { concurrency: 1 }, () => { path.join(__dirname, "..", "..", "..", "i18n") ); - httpServer = buildServer(JSON_PATH, { + ({ httpServer } = await buildServer(JSON_PATH, { projectRootDir: kProjectRootDir, componentsDir: kComponentsDir, i18n: { @@ -51,13 +51,13 @@ describe("httpServer", { concurrency: 1 }, () => { ui: {} } } - }); + })); httpServer.listen(kHttpPort); await once(httpServer, "listening"); enableDestroy(httpServer); }, { timeout: 5000 }); - after(() => { + after(async() => { httpServer.destroy(); kBundlephobiaPool.close(); setGlobalDispatcher(kGlobalDispatcher); @@ -314,7 +314,7 @@ describe("httpServer without options", () => { let httpServer: Server; before(async() => { - httpServer = buildServer(JSON_PATH, { + ({ httpServer } = await buildServer(JSON_PATH, { projectRootDir: kProjectRootDir, componentsDir: kComponentsDir, i18n: { @@ -325,7 +325,7 @@ describe("httpServer without options", () => { ui: {} } } - }); + })); httpServer.listen(); await once(httpServer, "listening"); enableDestroy(httpServer); diff --git a/workspaces/vis-network/src/dataset.js b/workspaces/vis-network/src/dataset.js index 4987684e..a4426a86 100644 --- a/workspaces/vis-network/src/dataset.js +++ b/workspaces/vis-network/src/dataset.js @@ -73,7 +73,7 @@ export default class NodeSecureDataSet extends EventTarget { this.FLAGS = FLAGS; this.data = data; - if (data === null) { + if (data === null || data === undefined) { return; }