diff --git a/.erb/configs/webpack.config.base.ts b/.erb/configs/webpack.config.base.ts index 8181b675d1..d892d02ca7 100644 --- a/.erb/configs/webpack.config.base.ts +++ b/.erb/configs/webpack.config.base.ts @@ -8,7 +8,6 @@ import webpackPaths from './webpack.paths'; // Only packages that were in release/app/package.json should be external // These are native modules or runtime dependencies that cannot/should not be bundled const nativeExternals = [ - '@gcas/fuse', 'better-sqlite3', 'reflect-metadata', 'typeorm', diff --git a/.erb/scripts/electron-rebuild.js b/.erb/scripts/electron-rebuild.js index 4f715f1f60..11e819578b 100644 --- a/.erb/scripts/electron-rebuild.js +++ b/.erb/scripts/electron-rebuild.js @@ -8,14 +8,11 @@ if ( Object.keys(dependencies || {}).length > 0 && fs.existsSync(path.join(webpackPaths.rootPath, 'node_modules')) ) { - const electronRebuildCmd = - './node_modules/.bin/electron-rebuild --sequential --force --types prod,dev,optional --module-dir .'; - const cmd = - process.platform === 'win32' - ? electronRebuildCmd.replace(/\//g, '\\') - : electronRebuildCmd; - execSync(cmd, { + execSync( + './node_modules/.bin/electron-rebuild --sequential --force --types prod,dev,optional --module-dir .', + { cwd: webpackPaths.rootPath, stdio: 'inherit', - }); + } + ); } diff --git a/.erb/scripts/start-main-dev.js b/.erb/scripts/start-main-dev.js new file mode 100644 index 0000000000..01e115b72a --- /dev/null +++ b/.erb/scripts/start-main-dev.js @@ -0,0 +1,5 @@ +// Electron 29 runs on Node 20, whose ESM entrypoint loader does not understand +// .ts files through the CommonJS ts-node/register hook. Keep the dev main +// process on the repo's CommonJS ts-node path by loading a JS bootstrap first. +require('ts-node/register/transpile-only'); +require('../../src/apps/main/main.ts'); diff --git a/.eslintrc.js b/.eslintrc.js index a81e34f4df..92f3a8cbe4 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1,6 +1,6 @@ module.exports = { extends: ['@internxt/eslint-config-internxt'], - ignorePatterns: ['src/infra/schemas.d.ts'], + ignorePatterns: ['src/infra/schemas.d.ts', 'assets/assets.d.ts'], overrides: [ { files: ['*.ts', '*.tsx'], @@ -14,7 +14,7 @@ module.exports = { ], rules: { 'no-await-in-loop': 'warn', - 'no-use-before-define': 'warn', + '@typescript-eslint/no-use-before-define': ['warn', { functions: false, classes: true, variables: true }], 'array-callback-return': 'warn', 'max-len': [ 'warn', // TODO: Change back to 'error' after fixing existing violations diff --git a/.github/workflows/find-dead-code.yml b/.github/workflows/find-dead-code.yml index 87a563cc60..6dc2ff0831 100644 --- a/.github/workflows/find-dead-code.yml +++ b/.github/workflows/find-dead-code.yml @@ -18,7 +18,7 @@ jobs: - name: Setup Node.js uses: actions/setup-node@v4 with: - node-version: 18 + node-version-file: '.nvmrc' - name: Install dependencies run: npm ci --ignore-scripts diff --git a/.github/workflows/go-lint.yml b/.github/workflows/go-lint.yml new file mode 100644 index 0000000000..65d3b9b221 --- /dev/null +++ b/.github/workflows/go-lint.yml @@ -0,0 +1,25 @@ +name: Golang Lint + +on: + pull_request: + types: [opened, synchronize, reopened] + +jobs: + lint: + name: 🐹 Golang Lint + runs-on: ubuntu-latest + + steps: + - name: Check out Git repository + uses: actions/checkout@v4 + + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version-file: packages/fuse-daemon/go.mod + + - name: Run golangci-lint + uses: golangci/golangci-lint-action@v7 + with: + version: v2.11.4 + working-directory: packages/fuse-daemon diff --git a/.github/workflows/go-test.yml b/.github/workflows/go-test.yml new file mode 100644 index 0000000000..63c6c533fc --- /dev/null +++ b/.github/workflows/go-test.yml @@ -0,0 +1,23 @@ +name: Golang Test + +on: + pull_request: + types: [opened, synchronize, reopened] + +jobs: + test: + name: 🐹 Golang Test + runs-on: ubuntu-latest + + steps: + - name: Check out Git repository + uses: actions/checkout@v4 + + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version-file: packages/fuse-daemon/go.mod + + - name: Run tests + run: go test ./... + working-directory: packages/fuse-daemon diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 07767ac192..0ee6fa99c7 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -16,7 +16,7 @@ jobs: - name: Use Node.js uses: actions/setup-node@v4 with: - node-version: 18 + node-version-file: '.nvmrc' cache: 'npm' - name: Install dependencies @@ -36,7 +36,7 @@ jobs: - name: Use Node.js uses: actions/setup-node@v4 with: - node-version: 18 + node-version-file: '.nvmrc' cache: 'npm' - name: Install dependencies @@ -56,7 +56,7 @@ jobs: - name: Use Node.js uses: actions/setup-node@v4 with: - node-version: 18 + node-version-file: '.nvmrc' cache: 'npm' - name: Install dependencies diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 8927891443..27d15fc8b8 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -16,13 +16,16 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: '18.x' + node-version-file: '.nvmrc' - name: Create .npmrc file run: | echo "@internxt:registry=https://npm.pkg.github.com/" > .npmrc echo "//npm.pkg.github.com/:_authToken=${{ secrets.GITHUB_TOKEN }}" >> .npmrc + - name: Install rpm build tools + run: sudo apt-get install -y rpm + - name: Install dependencies run: npm ci diff --git a/.github/workflows/sonar-analysis.yml b/.github/workflows/sonar-analysis.yml index a0e07b5a9e..0c90ebb024 100644 --- a/.github/workflows/sonar-analysis.yml +++ b/.github/workflows/sonar-analysis.yml @@ -17,7 +17,7 @@ jobs: - name: Set up Node.js uses: actions/setup-node@v4 with: - node-version: 18 + node-version-file: '.nvmrc' - name: Install system dependencies run: | diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b6d6dd9c18..f84186aad6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -16,7 +16,7 @@ jobs: - name: Use Node.js uses: actions/setup-node@v4 with: - node-version: 18 + node-version-file: '.nvmrc' cache: 'npm' - name: Install system dependencies @@ -47,7 +47,7 @@ jobs: - name: Use Node.js uses: actions/setup-node@v4 with: - node-version: 18 + node-version-file: '.nvmrc' cache: 'npm' - name: Install dependencies diff --git a/.gitignore b/.gitignore index a8204048d8..f938b9ca26 100644 --- a/.gitignore +++ b/.gitignore @@ -56,3 +56,4 @@ enable-sso.sh .github/copilot-instructions.md .github/instructions/contributing.instructions.md .github/instructions/testing.instructions.md +.codex diff --git a/.husky/pre-push b/.husky/pre-push index f416884b76..7691eafd58 100755 --- a/.husky/pre-push +++ b/.husky/pre-push @@ -3,3 +3,4 @@ npm run lint npm run type-check npm run format +cd packages/fuse-daemon && make lint diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 0000000000..209e3ef4b6 --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +20 diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 82a345f733..df4279f66f 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -70,10 +70,25 @@ "group": "dev" } }, + { + "label": "Build Daemon", + "type": "npm", + "script": "build:daemon", + "options": { + "cwd": "${workspaceFolder}" + }, + "presentation": { + "reveal": "always", + "panel": "dedicated", + "clear": true, + "group": "dev" + } + }, { "label": "Start Dev Environment", "dependsOrder": "sequence", "dependsOn": [ + "Build Daemon", "Start Renderer (Debug Mode)", "Start Main Process (Debug)" ], diff --git a/README.md b/README.md index c15c763182..e85e662a24 100644 --- a/README.md +++ b/README.md @@ -3,37 +3,11 @@ [![DeepWiki](https://deepwiki.com/badge.svg)](https://deepwiki.com/internxt/drive-desktop-linux) ## Compatibility + As of right now, Internxt Drive Desktop for Linux is only compatible with Ubuntu and Debian with the File explorer **Nautilus** (The default file explorer for Gnome). We cannot guarantee that the app will work properly on other Linux distributions or with other file explorers as our development and testing efforts are focused on ensuring the best experience for Ubuntu and Debian users. -### FUSE 2 -This application requires **FUSE 2** for the virtual drive functionality. FUSE 3 is not supported. On recent Ubuntu versions, `libfuse2` may need to be installed manually: - -#### For example, on Debian (>= 13) and Ubuntu (>= 24.04): -```bash -sudo add-apt-repository universe -sudo apt install libfuse2t64 -``` -**Note:** In Ubuntu 24.04, the libfuse2 package was renamed to libfuse2t64. - -#### For example, on Ubuntu (>= 22.04): -```bash -sudo add-apt-repository universe -sudo apt install libfuse2 -``` -**Warning:** While libfuse2 is OK, do not install the fuse package as of 22.04 or you may break your system. If the fuse package did break your system, you can recover as described [here](https://github.com/orgs/AppImage/discussions/1339). - -#### For example, on Ubuntu (<= 21.10): -```bash -sudo apt install fuse libfuse2 -sudo modprobe fuse -sudo groupadd fuse - -user="$(whoami)" -sudo usermod -a -G fuse $user -``` - ## Installation Internxt Drive is available for Linux in two formats: @@ -43,7 +17,7 @@ Internxt Drive is available for Linux in two formats: Download and install the `.deb` package for full compatibility: ```bash -sudo dpkg -i internxt_2.5.1_amd64.deb +sudo dpkg -i internxt_2.6.0_amd64.deb ``` ### AppImage @@ -51,8 +25,8 @@ sudo dpkg -i internxt_2.5.1_amd64.deb Alternatively, you can use the AppImage format: ```bash -chmod +x Internxt-2.5.1.AppImage -./Internxt-2.5.1.AppImage +chmod +x Internxt-2.6.0.AppImage +./Internxt-2.6.0.AppImage ``` #### ⚠️ Important Note about AppImage and SSO Login: @@ -66,7 +40,9 @@ For the best experience with SSO authentication, we recommend using the .deb pac ### Prerequisites - [NVM](https://github.com/nvm-sh/nvm) (Node Version Manager) -- Node.js 18 +- Node.js 20 + +If working on the FUSE daemon (Go), see [packages/fuse-daemon/README.md](packages/fuse-daemon/README.md) for Go and linting tool prerequisites. ### Install @@ -94,11 +70,13 @@ To package apps for the local platform: npm run package ``` -## Login Configuration Using Deeplink +Building the `.rpm` package requires `rpmbuild`. On Ubuntu or Debian, install the `rpm` package before running the packaging command: -To log in via deeplink in development mode, special configuration is required due to limitations in Electron 19. +```bash +sudo apt-get install rpm +``` -### Create Entry-Point Script +## Login Configuration Using Deeplink Create a script in the root of the project named `enable-sso.sh` and add the following content: @@ -108,9 +86,9 @@ export NVM_DIR="$HOME/.nvm" # Load nvm manually [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" -cd "/your-project-path/drive-desktop-linux" -nvm use 18 -npm run start:main "$@" +cd "/your-project-path/drive-desktop-linux/" +nvm use 20 +npm run start:main -- "$@" ``` Use the following command to give the script execution permissions: @@ -150,3 +128,35 @@ Check that the internxt protocol is correctly registered: `gio mime x-scheme-handler/internxt` Verify by logging into the application. + +### Troubleshooting SSO in Development + +If opening an `internxt://` URL launches the development command but Electron exits with an error like this: + +``` +The SUID sandbox helper binary was found, but is not configured correctly. +You need to make sure that node_modules/electron/dist/chrome-sandbox is owned by root and has mode 4755. +``` + +Or the dev app does not open at all, it may be that the deeplink registration is working, but Electron is aborting before the main process starts because Chromium's Linux sandbox helper has the wrong owner or permissions. + +Confirm the current permissions (in the project root): + +```bash +ls -l node_modules/electron/dist/chrome-sandbox +``` + +Fix them from the project root: + +```bash +sudo chown root:root node_modules/electron/dist/chrome-sandbox +sudo chmod 4755 node_modules/electron/dist/chrome-sandbox +``` + +The expected result is that `chrome-sandbox` is owned by `root` and has the setuid bit enabled: + +```bash +-rwsr-xr-x 1 root root ... node_modules/electron/dist/chrome-sandbox +``` + +This may need to be repeated after reinstalling dependencies, because `node_modules/electron` can be recreated with regular user ownership. diff --git a/assets/entitlements.mac.plist b/assets/entitlements.mac.plist deleted file mode 100644 index dad3e20e6a..0000000000 --- a/assets/entitlements.mac.plist +++ /dev/null @@ -1,10 +0,0 @@ - - - - - com.apple.security.cs.allow-unsigned-executable-memory - - com.apple.security.cs.allow-jit - - - diff --git a/beforeBuild.js b/beforeBuild.js index 958220fed4..21d33affa7 100644 --- a/beforeBuild.js +++ b/beforeBuild.js @@ -1,8 +1,13 @@ -const electronRebuild = require('electron-rebuild'); +const electronRebuild = require('@electron/rebuild'); module.exports = async (context) => { const { appDir, electronVersion, arch } = context; - await electronRebuild.rebuild({ buildPath: appDir, electronVersion, arch }); + // Force compilation from source so native modules are built against the + // exact Electron V8 headers rather than using a generic prebuilt. Without + // this, prebuild-install downloads an Electron-v116 prebuilt compiled for + // Electron 24 (V8 11.0) which segfaults under Electron 25+ (V8 11.4). + process.env.npm_config_build_from_source = 'true'; + await electronRebuild.rebuild({ buildPath: appDir, electronVersion, arch, force: true }); return false; }; diff --git a/env.d.ts b/env.d.ts index 2b989ee366..10dca8a3fc 100644 --- a/env.d.ts +++ b/env.d.ts @@ -14,6 +14,7 @@ declare global { NOTIFICATIONS_URL: string; LOCK_REFRESH_INTERVAL: string; DRIVE_API_URL: string; + ENABLE_ANTIVIRUS?: string; } } } diff --git a/knip.json b/knip.json index 871eb93f28..683d35d97a 100644 --- a/knip.json +++ b/knip.json @@ -4,7 +4,7 @@ "ignore": ["**/*.test.{ts,tsx}"], "ignoreBinaries": ["esbuild"], "ignoreExportsUsedInFile": true, - "project": ["src/**/*.{ts,tsx}"], + "project": ["src/**/*.{ts,tsx}", "!src/**/*.test.{ts,tsx}"], "vitest": false, "webpack": false, "rules": { diff --git a/package-lock.json b/package-lock.json index 2d0c7e9d90..8b35baf72a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,16 +1,15 @@ { "name": "internxt", - "version": "2.5.4", + "version": "2.6.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "internxt", - "version": "2.5.4", + "version": "2.6.0", "hasInstallScript": true, "license": "AGPL-3.0", "dependencies": { - "@gcas/fuse": "^2.4.2", "@internxt/drive-desktop-core": "0.1.7", "@internxt/inxt-js": "^2.2.13", "@internxt/scan": "^1.0.7", @@ -30,6 +29,7 @@ "uuid": "^8.3.2" }, "devDependencies": { + "@electron/rebuild": "^3.7.2", "@headlessui/react": "^1.4.2", "@iconscout/react-unicons": "^1.1.6", "@internxt/eslint-config-internxt": "^1.0.9", @@ -74,12 +74,10 @@ "detect-port": "^1.3.0", "dotenv": "^10.0.0", "dotenv-webpack": "^7.0.3", - "electron": "^19.1.9", + "electron": "^29.0.0", "electron-builder": "^23.6.0", "electron-debug": "^3.2.0", "electron-fetch": "^1.9.1", - "electron-notarize": "^1.1.1", - "electron-rebuild": "^3.2.9", "electron-store": "^8.0.1", "eslint": "^8.5.0", "eslint-import-resolver-typescript": "^2.5.0", @@ -100,7 +98,6 @@ "mini-css-extract-plugin": "^2.4.5", "ms": "^2.1.3", "openapi-typescript": "^7.6.1", - "opencollective-postinstall": "^2.0.3", "phosphor-react": "^1.4.1", "postcss": "^8.4.5", "postcss-loader": "^6.2.1", @@ -134,7 +131,7 @@ "webpack-merge": "^5.8.0" }, "engines": { - "node": ">=18.0.0 <19.0.0" + "node": ">=20.0.0 <21.0.0" } }, "node_modules/@adobe/css-tools": { @@ -2124,26 +2121,244 @@ } }, "node_modules/@electron/get": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@electron/get/-/get-1.14.1.tgz", - "integrity": "sha512-BrZYyL/6m0ZXz/lDxy/nlVhQz+WF+iPS6qXolEU8atw7h6v1aYkjwJZ63m+bJMBTxDE66X+r2tPS4a/8C82sZw==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@electron/get/-/get-2.0.3.tgz", + "integrity": "sha512-Qkzpg2s9GnVV2I2BjRksUi43U5e6+zaQMcjoJy0C+C5oxaKl+fmckGDQFtRpZpZV0NQekuZZ+tGz7EA9TVnQtQ==", "dev": true, "license": "MIT", "dependencies": { "debug": "^4.1.1", "env-paths": "^2.2.0", "fs-extra": "^8.1.0", - "got": "^9.6.0", + "got": "^11.8.5", "progress": "^2.0.3", "semver": "^6.2.0", "sumchecker": "^3.0.1" }, "engines": { - "node": ">=8.6" + "node": ">=12" + }, + "optionalDependencies": { + "global-agent": "^3.0.0" + } + }, + "node_modules/@electron/node-gyp": { + "version": "10.2.0-electron.1", + "resolved": "git+ssh://git@github.com/electron/node-gyp.git#06b29aafb7708acef8b3669835c8a7857ebc92d2", + "integrity": "sha512-CrYo6TntjpoMO1SHjl5Pa/JoUsECNqNdB7Kx49WLQpWzPw53eEITJ2Hs9fh/ryUYDn4pxZz11StaBYBrLFJdqg==", + "dev": true, + "license": "MIT", + "dependencies": { + "env-paths": "^2.2.0", + "exponential-backoff": "^3.1.1", + "glob": "^8.1.0", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^10.2.1", + "nopt": "^6.0.0", + "proc-log": "^2.0.1", + "semver": "^7.3.5", + "tar": "^6.2.1", + "which": "^2.0.2" + }, + "bin": { + "node-gyp": "bin/node-gyp.js" + }, + "engines": { + "node": ">=12.13.0" + } + }, + "node_modules/@electron/node-gyp/node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@electron/node-gyp/node_modules/minimatch": { + "version": "5.1.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.9.tgz", + "integrity": "sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@electron/node-gyp/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@electron/rebuild": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/@electron/rebuild/-/rebuild-3.7.2.tgz", + "integrity": "sha512-19/KbIR/DAxbsCkiaGMXIdPnMCJLkcf8AvGnduJtWBs/CBwiAjY1apCqOLVxrXg+rtXFCngbXhBanWjxLUt1Mg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@electron/node-gyp": "git+https://github.com/electron/node-gyp.git#06b29aafb7708acef8b3669835c8a7857ebc92d2", + "@malept/cross-spawn-promise": "^2.0.0", + "chalk": "^4.0.0", + "debug": "^4.1.1", + "detect-libc": "^2.0.1", + "fs-extra": "^10.0.0", + "got": "^11.7.0", + "node-abi": "^3.45.0", + "node-api-version": "^0.2.0", + "ora": "^5.1.0", + "read-binary-file-arch": "^1.0.6", + "semver": "^7.3.5", + "tar": "^6.0.5", + "yargs": "^17.0.1" + }, + "bin": { + "electron-rebuild": "lib/cli.js" + }, + "engines": { + "node": ">=12.13.0" + } + }, + "node_modules/@electron/rebuild/node_modules/@malept/cross-spawn-promise": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@malept/cross-spawn-promise/-/cross-spawn-promise-2.0.0.tgz", + "integrity": "sha512-1DpKU0Z5ThltBwjNySMC14g0CkbyhCaz9FkhxqNsZI6uAPJXFS8cMXlBKo26FJ8ZuW6S9GCMcR9IO5k2X5/9Fg==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/malept" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/subscription/pkg/npm-.malept-cross-spawn-promise?utm_medium=referral&utm_source=npm_fund" + } + ], + "license": "Apache-2.0", + "dependencies": { + "cross-spawn": "^7.0.1" + }, + "engines": { + "node": ">= 12.13.0" + } + }, + "node_modules/@electron/rebuild/node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@electron/rebuild/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@electron/rebuild/node_modules/jsonfile": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.1.tgz", + "integrity": "sha512-zwOTdL3rFQ/lRdBnntKVOX6k5cKJwEc1HdilT71BWEu7J41gXIB2MRp+vxduPSwZJPWBxEzv4yH1wYLJGUHX4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" }, "optionalDependencies": { - "global-agent": "^3.0.0", - "global-tunnel-ng": "^2.7.1" + "graceful-fs": "^4.1.6" + } + }, + "node_modules/@electron/rebuild/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@electron/rebuild/node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@electron/rebuild/node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@electron/rebuild/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" } }, "node_modules/@electron/universal": { @@ -2881,23 +3096,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@gcas/fuse": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/@gcas/fuse/-/fuse-2.4.2.tgz", - "integrity": "sha512-l/vVd2eXAuzKG4QilN1VRa8za5glndSK+jxcLrzMiXRrvbbCJthwqcBZUE3VzoRL0T/l7197tW+MKR9YeQCtZQ==", - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "nanoresource": "^1.3.0", - "napi-macros": "^2.0.0" - }, - "bin": { - "fuse": "bin.js" - }, - "engines": { - "node": ">=18.15.0" - } - }, "node_modules/@headlessui/react": { "version": "1.7.19", "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.19.tgz", @@ -3726,9 +3924,9 @@ } }, "node_modules/@npmcli/fs/node_modules/semver": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", - "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "bin": { @@ -3753,19 +3951,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node_modules/@npmcli/move-file/node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/@oxc-resolver/binding-android-arm-eabi": { "version": "11.16.2", "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-android-arm-eabi/-/binding-android-arm-eabi-11.16.2.tgz", @@ -5484,13 +5669,16 @@ "license": "MIT" }, "node_modules/@sindresorhus/is": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz", - "integrity": "sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==", + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", + "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", "dev": true, "license": "MIT", "engines": { - "node": ">=6" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/is?sponsor=1" } }, "node_modules/@so-ric/colorspace": { @@ -5788,16 +5976,16 @@ } }, "node_modules/@szmarczak/http-timer": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz", - "integrity": "sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==", + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", + "integrity": "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==", "dev": true, "license": "MIT", "dependencies": { - "defer-to-connect": "^1.0.1" + "defer-to-connect": "^2.0.0" }, "engines": { - "node": ">=6" + "node": ">=10" } }, "node_modules/@tanstack/react-virtual": { @@ -6042,19 +6230,6 @@ "node": "*" } }, - "node_modules/@ts-morph/common/node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/@tsconfig/node10": { "version": "1.0.12", "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.12.tgz", @@ -6681,6 +6856,17 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/yauzl": { + "version": "2.10.3", + "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz", + "integrity": "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "6.21.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.21.0.tgz", @@ -7478,6 +7664,20 @@ "node": ">= 8.0.0" } }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -7766,43 +7966,6 @@ "node": ">= 6.0.0" } }, - "node_modules/aproba": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.1.0.tgz", - "integrity": "sha512-tLIEcj5GuR2RSTnxNKdkK0dJ/GrC7P38sUkiDmDuHfsHmbagTFAxDVIBltoklXEVIQ/f14IL8IMJ5pn9Hez1Ew==", - "dev": true, - "license": "ISC" - }, - "node_modules/are-we-there-yet": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", - "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", - "deprecated": "This package is no longer supported.", - "dev": true, - "license": "ISC", - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/are-we-there-yet/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/arg": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", @@ -8274,13 +8437,16 @@ "license": "MIT" }, "node_modules/baseline-browser-mapping": { - "version": "2.8.31", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.31.tgz", - "integrity": "sha512-a28v2eWrrRWPpJSzxc+mKwm0ZtVx/G8SepdQZDArnXYU/XS+IF6mp8aB/4E+hH1tyGCoDo3KlUCdlSxGDsRkAw==", + "version": "2.10.25", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.25.tgz", + "integrity": "sha512-QO/VHsXCQdnzADMfmkeOPvHdIAkoB7i0/rGjINPJEetLx75hNttVWGQ/jycHUDP9zZ9rupbm60WRxcwViB0MiA==", "dev": true, "license": "Apache-2.0", "bin": { - "baseline-browser-mapping": "dist/cli.js" + "baseline-browser-mapping": "dist/cli.cjs" + }, + "engines": { + "node": ">=6.0.0" } }, "node_modules/batch": { @@ -8734,35 +8900,11 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, - "node_modules/cacache/node_modules/aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "dev": true, - "license": "MIT", - "dependencies": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cacache/node_modules/clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/cacache/node_modules/glob": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", - "deprecated": "Glob versions prior to v9 are no longer supported", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", "dev": true, "license": "ISC", "dependencies": { @@ -8790,9 +8932,9 @@ } }, "node_modules/cacache/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "version": "5.1.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.9.tgz", + "integrity": "sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==", "dev": true, "license": "ISC", "dependencies": { @@ -8802,35 +8944,6 @@ "node": ">=10" } }, - "node_modules/cacache/node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/cacache/node_modules/p-map": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "aggregate-error": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/cacheable-lookup": { "version": "5.0.4", "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz", @@ -8842,63 +8955,20 @@ } }, "node_modules/cacheable-request": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", - "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.4.tgz", + "integrity": "sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==", "dev": true, "license": "MIT", "dependencies": { "clone-response": "^1.0.2", "get-stream": "^5.1.0", "http-cache-semantics": "^4.0.0", - "keyv": "^3.0.0", + "keyv": "^4.0.0", "lowercase-keys": "^2.0.0", - "normalize-url": "^4.1.0", - "responselike": "^1.0.2" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cacheable-request/node_modules/get-stream": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", - "dev": true, - "license": "MIT", - "dependencies": { - "pump": "^3.0.0" - }, - "engines": { - "node": ">=8" + "normalize-url": "^6.0.1", + "responselike": "^2.0.0" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cacheable-request/node_modules/json-buffer": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", - "integrity": "sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/cacheable-request/node_modules/keyv": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz", - "integrity": "sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==", - "dev": true, - "license": "MIT", - "dependencies": { - "json-buffer": "3.0.0" - } - }, - "node_modules/cacheable-request/node_modules/lowercase-keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", - "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", - "dev": true, - "license": "MIT", "engines": { "node": ">=8" } @@ -9008,9 +9078,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001757", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001757.tgz", - "integrity": "sha512-r0nnL/I28Zi/yjk1el6ilj27tKcdjLsNqAOZr0yVjWPrSQyHgKI2INaEWw21bAQSv2LXRt1XuCS/GomNpWOxsQ==", + "version": "1.0.30001791", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001791.tgz", + "integrity": "sha512-yk0l/YSrOnFZk3UROpDLQD9+kC1l4meK/wed583AXrzoarMGJcbRi2Q4RaUYbKxYAsZ8sWmaSa/DsLmdBeI1vQ==", "dev": true, "funding": [ { @@ -9179,6 +9249,16 @@ "node": ">=0.10.0" } }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/cli-cursor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", @@ -9339,16 +9419,6 @@ "node": ">=12.20" } }, - "node_modules/color-support": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "dev": true, - "license": "ISC", - "bin": { - "color-support": "bin.js" - } - }, "node_modules/color/node_modules/color-convert": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-3.1.3.tgz", @@ -9499,22 +9569,6 @@ "dev": true, "license": "MIT" }, - "node_modules/concat-stream": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", - "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", - "dev": true, - "engines": [ - "node >= 0.8" - ], - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^2.2.2", - "typedarray": "^0.0.6" - } - }, "node_modules/concurrently": { "version": "6.5.1", "resolved": "https://registry.npmjs.org/concurrently/-/concurrently-6.5.1.tgz", @@ -9616,18 +9670,6 @@ "node": ">=10" } }, - "node_modules/config-chain": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz", - "integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==", - "dev": true, - "license": "MIT", - "optional": true, - "dependencies": { - "ini": "^1.3.4", - "proto-list": "~1.2.1" - } - }, "node_modules/connect-history-api-fallback": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz", @@ -9638,13 +9680,6 @@ "node": ">=0.8" } }, - "node_modules/console-control-strings": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", - "dev": true, - "license": "ISC" - }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -10337,16 +10372,30 @@ "license": "MIT" }, "node_modules/decompress-response": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", - "integrity": "sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==", - "dev": true, + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", "license": "MIT", "dependencies": { - "mimic-response": "^1.0.0" + "mimic-response": "^3.1.0" }, "engines": { - "node": ">=4" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decompress-response/node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/dedent": { @@ -10466,11 +10515,14 @@ } }, "node_modules/defer-to-connect": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz", - "integrity": "sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", + "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", "dev": true, - "license": "MIT" + "license": "MIT", + "engines": { + "node": ">=10" + } }, "node_modules/define-data-property": { "version": "1.1.4", @@ -10526,13 +10578,6 @@ "node": ">=0.4.0" } }, - "node_modules/delegates": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", - "dev": true, - "license": "MIT" - }, "node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -10985,13 +11030,6 @@ "dev": true, "license": "MIT" }, - "node_modules/duplexer3": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.5.tgz", - "integrity": "sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==", - "dev": true, - "license": "BSD-3-Clause" - }, "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", @@ -11031,22 +11069,22 @@ } }, "node_modules/electron": { - "version": "19.1.9", - "resolved": "https://registry.npmjs.org/electron/-/electron-19.1.9.tgz", - "integrity": "sha512-XT5LkTzIHB+ZtD3dTmNnKjVBWrDWReCKt9G1uAFLz6uJMEVcIUiYO+fph5pLXETiBw/QZBx8egduMEfIccLx+g==", + "version": "29.0.0", + "resolved": "https://registry.npmjs.org/electron/-/electron-29.0.0.tgz", + "integrity": "sha512-HhrRC5vWb6fAbWXP3A6ABwKUO9JvYSC4E141RzWFgnDBqNiNtabfmgC8hsVeCR65RQA2MLSDgC8uP52I9zFllQ==", "dev": true, "hasInstallScript": true, "license": "MIT", "dependencies": { - "@electron/get": "^1.14.1", - "@types/node": "^16.11.26", - "extract-zip": "^1.0.3" + "@electron/get": "^2.0.0", + "@types/node": "^20.9.0", + "extract-zip": "^2.0.1" }, "bin": { "electron": "cli.js" }, "engines": { - "node": ">= 8.6" + "node": ">= 12.20.55" } }, "node_modules/electron-builder": { @@ -11213,60 +11251,6 @@ "keyboardevents-areequal": "^0.2.1" } }, - "node_modules/electron-notarize": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/electron-notarize/-/electron-notarize-1.2.2.tgz", - "integrity": "sha512-ZStVWYcWI7g87/PgjPJSIIhwQXOaw4/XeXU+pWqMMktSLHaGMLHdyPPN7Cmao7+Cr7fYufA16npdtMndYciHNw==", - "deprecated": "Please use @electron/notarize moving forward. There is no API change, just a package name change", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.1.1", - "fs-extra": "^9.0.1" - }, - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/electron-notarize/node_modules/fs-extra": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", - "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "at-least-node": "^1.0.0", - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/electron-notarize/node_modules/jsonfile": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", - "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", - "dev": true, - "license": "MIT", - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/electron-notarize/node_modules/universalify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", - "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 10.0.0" - } - }, "node_modules/electron-osx-sign": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/electron-osx-sign/-/electron-osx-sign-0.6.0.tgz", @@ -11282,380 +11266,89 @@ "minimist": "^1.2.0", "plist": "^3.0.1" }, - "bin": { - "electron-osx-flat": "bin/electron-osx-flat.js", - "electron-osx-sign": "bin/electron-osx-sign.js" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/electron-osx-sign/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/electron-osx-sign/node_modules/isbinaryfile": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/isbinaryfile/-/isbinaryfile-3.0.3.tgz", - "integrity": "sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw==", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer-alloc": "^1.2.0" - }, - "engines": { - "node": ">=0.6.0" - } - }, - "node_modules/electron-osx-sign/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "license": "MIT" - }, - "node_modules/electron-publish": { - "version": "23.6.0", - "resolved": "https://registry.npmjs.org/electron-publish/-/electron-publish-23.6.0.tgz", - "integrity": "sha512-jPj3y+eIZQJF/+t5SLvsI5eS4mazCbNYqatv5JihbqOstIM13k0d1Z3vAWntvtt13Itl61SO6seicWdioOU5dg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/fs-extra": "^9.0.11", - "builder-util": "23.6.0", - "builder-util-runtime": "9.1.1", - "chalk": "^4.1.1", - "fs-extra": "^10.0.0", - "lazy-val": "^1.0.5", - "mime": "^2.5.2" - } - }, - "node_modules/electron-publish/node_modules/fs-extra": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", - "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/electron-publish/node_modules/jsonfile": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", - "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", - "dev": true, - "license": "MIT", - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/electron-publish/node_modules/universalify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", - "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/electron-rebuild": { - "version": "3.2.9", - "resolved": "https://registry.npmjs.org/electron-rebuild/-/electron-rebuild-3.2.9.tgz", - "integrity": "sha512-FkEZNFViUem3P0RLYbZkUjC8LUFIK+wKq09GHoOITSJjfDAVQv964hwaNseTTWt58sITQX3/5fHNYcTefqaCWw==", - "deprecated": "Please use @electron/rebuild moving forward. There is no API change, just a package name change", - "dev": true, - "license": "MIT", - "dependencies": { - "@malept/cross-spawn-promise": "^2.0.0", - "chalk": "^4.0.0", - "debug": "^4.1.1", - "detect-libc": "^2.0.1", - "fs-extra": "^10.0.0", - "got": "^11.7.0", - "lzma-native": "^8.0.5", - "node-abi": "^3.0.0", - "node-api-version": "^0.1.4", - "node-gyp": "^9.0.0", - "ora": "^5.1.0", - "semver": "^7.3.5", - "tar": "^6.0.5", - "yargs": "^17.0.1" - }, - "bin": { - "electron-rebuild": "lib/src/cli.js" - }, - "engines": { - "node": ">=12.13.0" - } - }, - "node_modules/electron-rebuild/node_modules/@malept/cross-spawn-promise": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@malept/cross-spawn-promise/-/cross-spawn-promise-2.0.0.tgz", - "integrity": "sha512-1DpKU0Z5ThltBwjNySMC14g0CkbyhCaz9FkhxqNsZI6uAPJXFS8cMXlBKo26FJ8ZuW6S9GCMcR9IO5k2X5/9Fg==", - "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/malept" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/subscription/pkg/npm-.malept-cross-spawn-promise?utm_medium=referral&utm_source=npm_fund" - } - ], - "license": "Apache-2.0", - "dependencies": { - "cross-spawn": "^7.0.1" - }, - "engines": { - "node": ">= 12.13.0" - } - }, - "node_modules/electron-rebuild/node_modules/@sindresorhus/is": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", - "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/is?sponsor=1" - } - }, - "node_modules/electron-rebuild/node_modules/@szmarczak/http-timer": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", - "integrity": "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==", - "dev": true, - "license": "MIT", - "dependencies": { - "defer-to-connect": "^2.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/electron-rebuild/node_modules/cacheable-request": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.4.tgz", - "integrity": "sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==", - "dev": true, - "license": "MIT", - "dependencies": { - "clone-response": "^1.0.2", - "get-stream": "^5.1.0", - "http-cache-semantics": "^4.0.0", - "keyv": "^4.0.0", - "lowercase-keys": "^2.0.0", - "normalize-url": "^6.0.1", - "responselike": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/electron-rebuild/node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/electron-rebuild/node_modules/decompress-response": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-response": "^3.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/electron-rebuild/node_modules/defer-to-connect": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", - "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/electron-rebuild/node_modules/fs-extra": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", - "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/electron-rebuild/node_modules/get-stream": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", - "dev": true, - "license": "MIT", - "dependencies": { - "pump": "^3.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/electron-rebuild/node_modules/got": { - "version": "11.8.6", - "resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz", - "integrity": "sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@sindresorhus/is": "^4.0.0", - "@szmarczak/http-timer": "^4.0.5", - "@types/cacheable-request": "^6.0.1", - "@types/responselike": "^1.0.0", - "cacheable-lookup": "^5.0.3", - "cacheable-request": "^7.0.2", - "decompress-response": "^6.0.0", - "http2-wrapper": "^1.0.0-beta.5.2", - "lowercase-keys": "^2.0.0", - "p-cancelable": "^2.0.0", - "responselike": "^2.0.0" - }, - "engines": { - "node": ">=10.19.0" - }, - "funding": { - "url": "https://github.com/sindresorhus/got?sponsor=1" - } - }, - "node_modules/electron-rebuild/node_modules/jsonfile": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", - "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", - "dev": true, - "license": "MIT", - "dependencies": { - "universalify": "^2.0.0" + "bin": { + "electron-osx-flat": "bin/electron-osx-flat.js", + "electron-osx-sign": "bin/electron-osx-sign.js" }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" + "engines": { + "node": ">=4.0.0" } }, - "node_modules/electron-rebuild/node_modules/lowercase-keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", - "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", + "node_modules/electron-osx-sign/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dev": true, "license": "MIT", - "engines": { - "node": ">=8" + "dependencies": { + "ms": "2.0.0" } }, - "node_modules/electron-rebuild/node_modules/mimic-response": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "node_modules/electron-osx-sign/node_modules/isbinaryfile": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/isbinaryfile/-/isbinaryfile-3.0.3.tgz", + "integrity": "sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw==", "dev": true, "license": "MIT", - "engines": { - "node": ">=10" + "dependencies": { + "buffer-alloc": "^1.2.0" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "engines": { + "node": ">=0.6.0" } }, - "node_modules/electron-rebuild/node_modules/normalize-url": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", - "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", + "node_modules/electron-osx-sign/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } + "license": "MIT" }, - "node_modules/electron-rebuild/node_modules/p-cancelable": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", - "integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==", + "node_modules/electron-publish": { + "version": "23.6.0", + "resolved": "https://registry.npmjs.org/electron-publish/-/electron-publish-23.6.0.tgz", + "integrity": "sha512-jPj3y+eIZQJF/+t5SLvsI5eS4mazCbNYqatv5JihbqOstIM13k0d1Z3vAWntvtt13Itl61SO6seicWdioOU5dg==", "dev": true, "license": "MIT", - "engines": { - "node": ">=8" + "dependencies": { + "@types/fs-extra": "^9.0.11", + "builder-util": "23.6.0", + "builder-util-runtime": "9.1.1", + "chalk": "^4.1.1", + "fs-extra": "^10.0.0", + "lazy-val": "^1.0.5", + "mime": "^2.5.2" } }, - "node_modules/electron-rebuild/node_modules/responselike": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/responselike/-/responselike-2.0.1.tgz", - "integrity": "sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==", + "node_modules/electron-publish/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", "dev": true, "license": "MIT", "dependencies": { - "lowercase-keys": "^2.0.0" + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "engines": { + "node": ">=12" } }, - "node_modules/electron-rebuild/node_modules/semver": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", - "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "node_modules/electron-publish/node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" }, - "engines": { - "node": ">=10" + "optionalDependencies": { + "graceful-fs": "^4.1.6" } }, - "node_modules/electron-rebuild/node_modules/universalify": { + "node_modules/electron-publish/node_modules/universalify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", @@ -11665,35 +11358,6 @@ "node": ">= 10.0.0" } }, - "node_modules/electron-rebuild/node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/electron-rebuild/node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - }, "node_modules/electron-store": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/electron-store/-/electron-store-8.2.0.tgz", @@ -11792,9 +11456,19 @@ } }, "node_modules/electron/node_modules/@types/node": { - "version": "16.18.126", - "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.126.tgz", - "integrity": "sha512-OTcgaiwfGFBKacvfwuHzzn1KLxH/er8mluiy8/uM3sGXHaRe73RrSIj01jow9t4kJEW633Ov+cOexXeiApTyAw==", + "version": "20.19.40", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.40.tgz", + "integrity": "sha512-xxx6M2IpSTnnKcR0cMvIiohkiCx20/oRPtWGbenFygKCGl3zqUzdNjQ/1V4solq1LU+dgv0nQzeGOuqkqZGg0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/electron/node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", "dev": true, "license": "MIT" }, @@ -12869,38 +12543,26 @@ "license": "MIT" }, "node_modules/extract-zip": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-1.7.0.tgz", - "integrity": "sha512-xoh5G1W/PB0/27lXgMQyIhP5DSY/LhoCsOyZgb+6iMmRtCwVBo55uKaMoEYrDCKQhWvqEip5ZPKAc6eFNyf/MA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", + "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", "dev": true, "license": "BSD-2-Clause", "dependencies": { - "concat-stream": "^1.6.2", - "debug": "^2.6.9", - "mkdirp": "^0.5.4", + "debug": "^4.1.1", + "get-stream": "^5.1.0", "yauzl": "^2.10.0" }, "bin": { "extract-zip": "cli.js" + }, + "engines": { + "node": ">= 10.17.0" + }, + "optionalDependencies": { + "@types/yauzl": "^2.9.1" } }, - "node_modules/extract-zip/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/extract-zip/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "license": "MIT" - }, "node_modules/extsprintf": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.4.1.tgz", @@ -13487,27 +13149,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gauge": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", - "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", - "deprecated": "This package is no longer supported.", - "dev": true, - "license": "ISC", - "dependencies": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.3", - "console-control-strings": "^1.1.0", - "has-unicode": "^2.0.1", - "signal-exit": "^3.0.7", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.5" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, "node_modules/generator-function": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/generator-function/-/generator-function-2.0.1.tgz", @@ -13585,16 +13226,19 @@ } }, "node_modules/get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", "dev": true, "license": "MIT", "dependencies": { "pump": "^3.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/get-symbol-description": { @@ -13707,9 +13351,9 @@ } }, "node_modules/global-agent/node_modules/semver": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", - "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "optional": true, @@ -13720,34 +13364,6 @@ "node": ">=10" } }, - "node_modules/global-tunnel-ng": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/global-tunnel-ng/-/global-tunnel-ng-2.7.1.tgz", - "integrity": "sha512-4s+DyciWBV0eK148wqXxcmVAbFVPqtc3sEtUE/GTQfuU80rySLcMhUmHKSHI7/LDj8q0gDYI1lIhRRB7ieRAqg==", - "dev": true, - "license": "BSD-3-Clause", - "optional": true, - "dependencies": { - "encodeurl": "^1.0.2", - "lodash": "^4.17.10", - "npm-conf": "^1.1.3", - "tunnel": "^0.0.6" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/global-tunnel-ng/node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/globals": { "version": "13.24.0", "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", @@ -13838,26 +13454,29 @@ } }, "node_modules/got": { - "version": "9.6.0", - "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", - "integrity": "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==", + "version": "11.8.6", + "resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz", + "integrity": "sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==", "dev": true, "license": "MIT", "dependencies": { - "@sindresorhus/is": "^0.14.0", - "@szmarczak/http-timer": "^1.1.2", - "cacheable-request": "^6.0.0", - "decompress-response": "^3.3.0", - "duplexer3": "^0.1.4", - "get-stream": "^4.1.0", - "lowercase-keys": "^1.0.1", - "mimic-response": "^1.0.1", - "p-cancelable": "^1.0.0", - "to-readable-stream": "^1.0.0", - "url-parse-lax": "^3.0.0" + "@sindresorhus/is": "^4.0.0", + "@szmarczak/http-timer": "^4.0.5", + "@types/cacheable-request": "^6.0.1", + "@types/responselike": "^1.0.0", + "cacheable-lookup": "^5.0.3", + "cacheable-request": "^7.0.2", + "decompress-response": "^6.0.0", + "http2-wrapper": "^1.0.0-beta.5.2", + "lowercase-keys": "^2.0.0", + "p-cancelable": "^2.0.0", + "responselike": "^2.0.0" }, "engines": { - "node": ">=8.6" + "node": ">=10.19.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/got?sponsor=1" } }, "node_modules/graceful-fs": { @@ -14004,13 +13623,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-unicode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", - "dev": true, - "license": "ISC" - }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -14644,9 +14256,9 @@ } }, "node_modules/ip-address": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz", - "integrity": "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==", + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.2.0.tgz", + "integrity": "sha512-/+S6j4E9AHvW9SWMSEY9Xfy66O5PWvVEJ08O0y5JGyEKQpojb0K0GKpz/v5HJ/G0vi3D2sjGK78119oXZeE0qA==", "dev": true, "license": "MIT", "engines": { @@ -16084,13 +15696,13 @@ } }, "node_modules/lowercase-keys": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", - "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", "dev": true, "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/lru-cache": { @@ -16108,59 +15720,18 @@ "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.7.2.tgz", "integrity": "sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==", "license": "MIT", - "engines": { - "node": ">=12" - } - }, - "node_modules/lz-string": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", - "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", - "dev": true, - "license": "MIT", - "bin": { - "lz-string": "bin/bin.js" - } - }, - "node_modules/lzma-native": { - "version": "8.0.6", - "resolved": "https://registry.npmjs.org/lzma-native/-/lzma-native-8.0.6.tgz", - "integrity": "sha512-09xfg67mkL2Lz20PrrDeNYZxzeW7ADtpYFbwSQh9U8+76RIzx5QsJBMy8qikv3hbUPfpy6hqwxt6FcGK81g9AA==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "node-addon-api": "^3.1.0", - "node-gyp-build": "^4.2.1", - "readable-stream": "^3.6.0" - }, - "bin": { - "lzmajs": "bin/lzmajs" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/lzma-native/node_modules/node-addon-api": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz", - "integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==", - "dev": true, - "license": "MIT" + "engines": { + "node": ">=12" + } }, - "node_modules/lzma-native/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "node_modules/lz-string": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", + "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", "dev": true, "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" + "bin": { + "lz-string": "bin/bin.js" } }, "node_modules/magic-string": { @@ -16599,11 +16170,11 @@ } }, "node_modules/minipass-flush": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", - "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.7.tgz", + "integrity": "sha512-TbqTz9cUwWyHS2Dy89P3ocAGUGxKjjLuR9z8w4WUTGAVgEj17/4nhgo2Du56i0Fm3Pm30g4iA8Lcqctc76jCzA==", "dev": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { "minipass": "^3.0.0" }, @@ -16666,16 +16237,16 @@ "license": "ISC" }, "node_modules/mkdirp": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", - "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", "dev": true, "license": "MIT", - "dependencies": { - "minimist": "^1.2.6" - }, "bin": { "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/mkdirp-classic": { @@ -16745,27 +16316,12 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, - "node_modules/nanoresource": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/nanoresource/-/nanoresource-1.3.0.tgz", - "integrity": "sha512-OI5dswqipmlYfyL3k/YMm7mbERlh4Bd1KuKdMHpeoVD1iVxqxaTMKleB4qaA2mbQZ6/zMNSxCXv9M9P/YbqTuQ==", - "license": "MIT", - "dependencies": { - "inherits": "^2.0.4" - } - }, "node_modules/napi-build-utils": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-2.0.0.tgz", "integrity": "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==", "license": "MIT" }, - "node_modules/napi-macros": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/napi-macros/-/napi-macros-2.2.2.tgz", - "integrity": "sha512-hmEVtAGYzVQpCKdbQea4skABsdXW4RUh5t5mJ2zzqowJS2OyXZTU1KhDVFhx+NlWZ4ap9mqR9TcDO3LTTttd+g==", - "license": "MIT" - }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -16840,9 +16396,9 @@ "optional": true }, "node_modules/node-api-version": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/node-api-version/-/node-api-version-0.1.4.tgz", - "integrity": "sha512-KGXihXdUChwJAOHO53bv9/vXcLmdUsZ6jIptbvYvkpKfth+r7jw44JkVxQFA3kX5nQjzjmGu1uAu/xNNLNlI5g==", + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/node-api-version/-/node-api-version-0.2.1.tgz", + "integrity": "sha512-2xP/IGGMmmSQpI1+O/k72jF/ykvZ89JeuKX3TLJAYPDVLUalrshrLHkeVcCCZqG/eEa635cr8IBYzgnDvM2O8Q==", "dev": true, "license": "MIT", "dependencies": { @@ -16850,9 +16406,9 @@ } }, "node_modules/node-api-version/node_modules/semver": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", - "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "bin": { @@ -16872,57 +16428,6 @@ "node": ">= 6.13.0" } }, - "node_modules/node-gyp": { - "version": "9.4.1", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-9.4.1.tgz", - "integrity": "sha512-OQkWKbjQKbGkMf/xqI1jjy3oCTgMKJac58G2+bjZb3fza6gW2YrCSdMQYaoTb70crvE//Gngr4f0AgVHmqHvBQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "env-paths": "^2.2.0", - "exponential-backoff": "^3.1.1", - "glob": "^7.1.4", - "graceful-fs": "^4.2.6", - "make-fetch-happen": "^10.0.3", - "nopt": "^6.0.0", - "npmlog": "^6.0.0", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.2", - "which": "^2.0.2" - }, - "bin": { - "node-gyp": "bin/node-gyp.js" - }, - "engines": { - "node": "^12.13 || ^14.13 || >=16" - } - }, - "node_modules/node-gyp-build": { - "version": "4.8.4", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.4.tgz", - "integrity": "sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ==", - "dev": true, - "license": "MIT", - "bin": { - "node-gyp-build": "bin.js", - "node-gyp-build-optional": "optional.js", - "node-gyp-build-test": "build-test.js" - } - }, - "node_modules/node-gyp/node_modules/semver": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", - "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/node-releases": { "version": "2.0.27", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", @@ -16981,28 +16486,16 @@ } }, "node_modules/normalize-url": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz", - "integrity": "sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", + "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", "dev": true, "license": "MIT", "engines": { - "node": ">=8" - } - }, - "node_modules/npm-conf": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/npm-conf/-/npm-conf-1.1.3.tgz", - "integrity": "sha512-Yic4bZHJOt9RCFbRP3GgpqhScOY4HH3V2P8yBj6CeYq118Qr+BLXqT2JvpJ00mryLESpgOxf5XlFv4ZjXxLScw==", - "dev": true, - "license": "MIT", - "optional": true, - "dependencies": { - "config-chain": "^1.1.11", - "pify": "^3.0.0" + "node": ">=10" }, - "engines": { - "node": ">=4" + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/npm-run-path": { @@ -17018,23 +16511,6 @@ "node": ">=8" } }, - "node_modules/npmlog": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", - "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", - "deprecated": "This package is no longer supported.", - "dev": true, - "license": "ISC", - "dependencies": { - "are-we-there-yet": "^3.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^4.0.3", - "set-blocking": "^2.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, "node_modules/nth-check": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", @@ -17353,16 +16829,6 @@ "node": ">=12" } }, - "node_modules/opencollective-postinstall": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/opencollective-postinstall/-/opencollective-postinstall-2.0.3.tgz", - "integrity": "sha512-8AV/sCtuzUeTo8gQK5qDZzARrulB3egtLzFgteqB2tcT4Mw7B8Kt7JcDHmltjz6FOAHsvTevk70gZEbhM4ZS9Q==", - "dev": true, - "license": "MIT", - "bin": { - "opencollective-postinstall": "index.js" - } - }, "node_modules/opener": { "version": "1.5.2", "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz", @@ -17466,13 +16932,13 @@ } }, "node_modules/p-cancelable": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz", - "integrity": "sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", + "integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==", "dev": true, "license": "MIT", "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/p-limit": { @@ -17507,6 +16973,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/p-retry": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz", @@ -17783,17 +17265,6 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">=4" - } - }, "node_modules/pirates": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", @@ -18614,19 +18085,6 @@ "postcss": "^8.2.15" } }, - "node_modules/postcss-normalize-url/node_modules/normalize-url": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", - "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/postcss-normalize-whitespace": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz", @@ -18783,16 +18241,6 @@ "node": ">= 0.8.0" } }, - "node_modules/prepend-http": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", - "integrity": "sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, "node_modules/prettier": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.7.1.tgz", @@ -18935,6 +18383,16 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/proc-log": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-2.0.1.tgz", + "integrity": "sha512-Kcmo2FhfDTXdcbfDH76N7uBYHINxc/8GW7UAVuVP9I+Va3uHSerrnKV6dLooga/gh7GlgzuCCr/eoldnL1muGw==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -18992,14 +18450,6 @@ "dev": true, "license": "MIT" }, - "node_modules/proto-list": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", - "integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==", - "dev": true, - "license": "ISC", - "optional": true - }, "node_modules/proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -19419,6 +18869,19 @@ } } }, + "node_modules/read-binary-file-arch": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/read-binary-file-arch/-/read-binary-file-arch-1.0.6.tgz", + "integrity": "sha512-BNg9EN3DD3GsDXX7Aa8O4p92sryjkmzYYgmgTAc6CA4uGLEDzFfxOxugu21akOxpcXHiEgsYkC6nPsQvLLLmEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.3.4" + }, + "bin": { + "read-binary-file-arch": "cli.js" + } + }, "node_modules/read-cache": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", @@ -19759,13 +19222,16 @@ } }, "node_modules/responselike": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", - "integrity": "sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-2.0.1.tgz", + "integrity": "sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==", "dev": true, "license": "MIT", "dependencies": { - "lowercase-keys": "^1.0.0" + "lowercase-keys": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/restore-cursor": { @@ -20397,13 +19863,6 @@ "node": ">= 0.8.0" } }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", - "dev": true, - "license": "ISC" - }, "node_modules/set-function-length": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", @@ -20656,33 +20115,6 @@ "simple-concat": "^1.0.0" } }, - "node_modules/simple-get/node_modules/decompress-response": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", - "license": "MIT", - "dependencies": { - "mimic-response": "^3.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/simple-get/node_modules/mimic-response": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/simple-update-notifier": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.1.0.tgz", @@ -20851,13 +20283,13 @@ } }, "node_modules/socks": { - "version": "2.8.7", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.7.tgz", - "integrity": "sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==", + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.8.tgz", + "integrity": "sha512-NlGELfPrgX2f1TAAcz0WawlLn+0r3FyhhCRpFFK2CemXenPYvzMWWZINv3eDNo9ucdwme7oCHRY0Jnbs4aIkog==", "dev": true, "license": "MIT", "dependencies": { - "ip-address": "^10.0.1", + "ip-address": "^10.1.1", "smart-buffer": "^4.2.0" }, "engines": { @@ -21652,19 +21084,6 @@ "node": ">=8" } }, - "node_modules/tar/node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/tar/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", @@ -22065,16 +21484,6 @@ "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", "license": "MIT" }, - "node_modules/to-readable-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz", - "integrity": "sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -22423,17 +21832,6 @@ "dev": true, "license": "0BSD" }, - "node_modules/tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">=0.6.11 <=0.7.0 || >=0.7.3" - } - }, "node_modules/tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", @@ -22562,13 +21960,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/typedarray": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", - "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==", - "dev": true, - "license": "MIT" - }, "node_modules/typeorm": { "version": "0.3.28", "resolved": "https://registry.npmjs.org/typeorm/-/typeorm-0.3.28.tgz", @@ -22987,19 +22378,6 @@ "punycode": "^2.1.0" } }, - "node_modules/url-parse-lax": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", - "integrity": "sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "prepend-http": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/use-callback-ref": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.3.tgz", @@ -24366,16 +23744,6 @@ "node": ">=8" } }, - "node_modules/wide-align": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", - "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^1.0.2 || 2 || 3 || 4" - } - }, "node_modules/wildcard": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.1.tgz", diff --git a/package.json b/package.json index 5ce8305b2d..9597c53787 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "internxt", - "version": "2.5.4", + "version": "2.6.0", "author": "Internxt ", "description": "Internxt Drive client UI", "main": "./dist/main/main.js", @@ -9,22 +9,24 @@ "url": "https://github.com/internxt/drive-desktop-linux" }, "scripts": { - "build": "concurrently \"npm run build:main\" \"npm run build:renderer\"", + "build": "concurrently \"npm run build:main\" \"npm run build:renderer\" \"npm run build:daemon\"", + "build:daemon": "cd packages/fuse-daemon && go build -ldflags='-s -w' -o ../../dist/fuse-daemon ./cmd/daemon", "build:main": "cross-env NODE_ENV=production TS_NODE_TRANSPILE_ONLY=true webpack --config ./.erb/configs/webpack.config.main.prod.ts", "build:renderer": "cross-env NODE_ENV=production TS_NODE_TRANSPILE_ONLY=true webpack --config ./.erb/configs/webpack.config.renderer.prod.ts", "rebuild": "electron-rebuild --parallel --types prod,dev,optional --module-dir .", "reinstall:nautilus-extension": "NODE_ENV=development ts-node src/apps/nautilus-extension/reload.ts", - "lint": "cross-env NODE_ENV=development eslint . --ext .ts,.tsx --max-warnings=210", + "lint": "cross-env NODE_ENV=development eslint . --ext .ts,.tsx --max-warnings=50", "lint:fix": "npm run lint --fix", "format": "prettier src --check", "format:fix": "prettier src --write", "fix:format": "npm run format:fix", "package": "ts-node ./.erb/scripts/clean.js dist && npm run build && electron-builder build --publish never", "publish": "ts-node ./.erb/scripts/clean.js dist && npm run build && electron-builder build --publish always", - "postinstall": "electron-builder install-app-deps && cross-env NODE_ENV=development TS_NODE_TRANSPILE_ONLY=true webpack --config ./.erb/configs/webpack.config.renderer.dev.dll.ts && opencollective-postinstall", + "postinstall": "electron-builder install-app-deps && cross-env NODE_ENV=development TS_NODE_TRANSPILE_ONLY=true webpack --config ./.erb/configs/webpack.config.renderer.dev.dll.ts", + "prestart": "npm run build:daemon", "start": "ts-node ./.erb/scripts/check-port-in-use.js && npm run start:renderer", - "start:main": "cross-env NODE_ENV=development electron -r ts-node/register/transpile-only ./src/apps/main/main.ts", - "start:main:debug": "cross-env NODE_ENV=development electron --inspect=5858 -r ts-node/register/transpile-only ./src/apps/main/main.ts", + "start:main": "cross-env NODE_ENV=development electron ./.erb/scripts/start-main-dev.js", + "start:main:debug": "cross-env NODE_ENV=development electron --inspect=5858 ./.erb/scripts/start-main-dev.js", "start:renderer": "cross-env NODE_ENV=development TS_NODE_TRANSPILE_ONLY=true webpack serve --config ./.erb/configs/webpack.config.renderer.dev.ts", "test": "concurrently \"npm:test:main\" \"npm:test:renderer\"", "test:main": "vitest --config vitest.config.main.ts", @@ -65,7 +67,8 @@ "linux": { "target": [ "AppImage", - "deb" + "deb", + "rpm" ], "category": "Development" }, @@ -75,6 +78,11 @@ "python3-nautilus" ] }, + "rpm": { + "depends": [ + "fuse-libs" + ] + }, "directories": { "buildResources": "assets", "output": "build" @@ -82,13 +90,15 @@ "extraResources": [ "./assets/**", "./clamAV/**", - "./src/apps/nautilus-extension/internxt-virtual-drive.py" + "./src/apps/nautilus-extension/internxt-virtual-drive.py", + "./dist/fuse-daemon" ], "publish": { "provider": "github" } }, "devDependencies": { + "@electron/rebuild": "^3.7.2", "@headlessui/react": "^1.4.2", "@iconscout/react-unicons": "^1.1.6", "@internxt/eslint-config-internxt": "^1.0.9", @@ -133,12 +143,10 @@ "detect-port": "^1.3.0", "dotenv": "^10.0.0", "dotenv-webpack": "^7.0.3", - "electron": "^19.1.9", + "electron": "^29.0.0", "electron-builder": "^23.6.0", "electron-debug": "^3.2.0", "electron-fetch": "^1.9.1", - "electron-notarize": "^1.1.1", - "electron-rebuild": "^3.2.9", "electron-store": "^8.0.1", "eslint": "^8.5.0", "eslint-import-resolver-typescript": "^2.5.0", @@ -159,7 +167,6 @@ "mini-css-extract-plugin": "^2.4.5", "ms": "^2.1.3", "openapi-typescript": "^7.6.1", - "opencollective-postinstall": "^2.0.3", "phosphor-react": "^1.4.1", "postcss": "^8.4.5", "postcss-loader": "^6.2.1", @@ -193,7 +200,6 @@ "webpack-merge": "^5.8.0" }, "dependencies": { - "@gcas/fuse": "^2.4.2", "@internxt/drive-desktop-core": "0.1.7", "@internxt/inxt-js": "^2.2.13", "@internxt/scan": "^1.0.7", @@ -213,10 +219,10 @@ "uuid": "^8.3.2" }, "devEngines": { - "node": ">=18.0.0 <19.0.0", - "npm": ">=7.x" + "node": ">=20.0.0 <21.0.0", + "npm": ">=10.0.0 <11.0.0" }, "engines": { - "node": ">=18.0.0 <19.0.0" + "node": ">=20.0.0 <21.0.0" } } diff --git a/packages/fuse-daemon/Makefile b/packages/fuse-daemon/Makefile new file mode 100644 index 0000000000..cd7e985560 --- /dev/null +++ b/packages/fuse-daemon/Makefile @@ -0,0 +1,10 @@ +.PHONY: build test lint + +build: + go build -ldflags="-s -w" -o ../../dist/fuse-daemon ./cmd/daemon + +test: + go test ./... + +lint: + golangci-lint run ./... diff --git a/packages/fuse-daemon/README.md b/packages/fuse-daemon/README.md new file mode 100644 index 0000000000..b4ab07380d --- /dev/null +++ b/packages/fuse-daemon/README.md @@ -0,0 +1,104 @@ +# FUSE Daemon + +The Go daemon that mounts a FUSE filesystem and forwards all operations to the Electron app over a Unix domain socket. + +## Prerequisites + +### Installing Go + +The daemon requires Go 1.26+ to build from source. +```bash +wget https://go.dev/dl/go1.26.1.linux-amd64.tar.gz +sudo rm -rf /usr/local/go +sudo tar -C /usr/local -xzf go1.26.1.linux-amd64.tar.gz +``` + +Add to your `~/.bashrc`: + +```bash +export PATH=$PATH:/usr/local/go/bin +``` + +Then reload: + +```bash +source ~/.bashrc +``` + +## Development + +### Installing dev tools + +Install the linter (required for `make lint` and the pre-push hook): + +```bash +go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.11.4 +``` + +This installs the binary to `$GOPATH/bin` (usually `~/go/bin`). If `golangci-lint` is not found after installing, add this to your `~/.bashrc`: + +```bash +export PATH=$PATH:$(go env GOPATH)/bin +``` + +Then reload: + +```bash +source ~/.bashrc +``` + +### Available commands + +```bash +make build # compile the daemon binary to ../../dist/fuse-daemon +make test # run all Go tests +make lint # run golangci-lint +``` + +## Building + +From the repository root: + +```bash +npm run build:daemon +``` + +Or directly: + +```bash +cd packages/fuse-daemon +go build -ldflags="-s -w" -o ../../dist/fuse-daemon ./cmd/daemon +``` + +## Running + +The daemon is spawned automatically by Electron, you do not need to run it manually during normal development. Starting the app via `npm run start` or the VS Code `Debug Electron (Full)` launch config handles everything. + +If you need to run it manually for debugging: + +```bash +INTERNXT_MOUNT=/home/[user]/Internxt \ +INTERNXT_SOCKET=/run/[user]/1000/internxt-fuse.sock \ +INTERNXT_LOG_FILE=~/.config/internxt/logs/fuse-daemon.log \ +./dist/fuse-daemon +``` + +## Configuration + +| Config | Env var | Required | +|--------|---------|----------| +| Mount point | `INTERNXT_MOUNT` | Yes | +| Unix socket path | `INTERNXT_SOCKET` | Yes | +| Log file path | `INTERNXT_LOG_FILE` | Yes | + +## Logs + +Logs are written to `~/.config/internxt/logs/fuse-daemon.log` alongside other Internxt application logs. + +## Unmounting + +If the daemon is killed uncleanly and the mount is left orphaned: + +```bash +fusermount -u /home/[user]/Internxt +``` diff --git a/packages/fuse-daemon/cmd/daemon/main.go b/packages/fuse-daemon/cmd/daemon/main.go new file mode 100644 index 0000000000..c9e1b5be61 --- /dev/null +++ b/packages/fuse-daemon/cmd/daemon/main.go @@ -0,0 +1,58 @@ +package main + +import ( + "os" + "os/signal" + "syscall" + + "internxt/drive-desktop-linux/fuse-daemon/internal/client" + "internxt/drive-desktop-linux/fuse-daemon/internal/config" + "internxt/drive-desktop-linux/fuse-daemon/internal/filesystem" + "internxt/drive-desktop-linux/fuse-daemon/internal/logger" +) + +func main() { + config := config.ParseConfig() + + logger := logger.New(config.LogFile) + + logger.Info("starting fuse daemon", "mount", config.MountPoint, "socket", config.SocketPath) + + client := client.NewClient(config.SocketPath) + + server, done, err := filesystem.Mount(config.MountPoint, logger, client) + if err != nil { + logger.Error("failed to mount fuse filesystem", "error", err) + os.Exit(1) + } + + logger.Info("fuse filesystem mounted", "mount", config.MountPoint) + + if err := client.NotifyReady(logger); err != nil { + logger.Error("failed to notify electron of readiness", "error", err) + if err := server.Unmount(); err != nil { + logger.Error("failed to unmount fuse filesystem", "error", err) + } + os.Exit(1) + } + + stop := make(chan os.Signal, 1) + signal.Notify(stop, syscall.SIGINT, syscall.SIGTERM) + + select { + case <-stop: + logger.Info("received shutdown signal") + case <-done: + logger.Warn("fuse filesystem was unmounted externally") + } + + if err := server.Unmount(); err != nil { + logger.Error("failed to unmount fuse filesystem", "error", err) + } + + logger.Info("fuse daemon stopped") +} + + + + diff --git a/packages/fuse-daemon/go.mod b/packages/fuse-daemon/go.mod new file mode 100644 index 0000000000..176107f46a --- /dev/null +++ b/packages/fuse-daemon/go.mod @@ -0,0 +1,7 @@ +module internxt/drive-desktop-linux/fuse-daemon + +go 1.26.1 + +require github.com/hanwen/go-fuse/v2 v2.9.0 + +require golang.org/x/sys v0.28.0 // indirect diff --git a/packages/fuse-daemon/go.sum b/packages/fuse-daemon/go.sum new file mode 100644 index 0000000000..b30d22cb7c --- /dev/null +++ b/packages/fuse-daemon/go.sum @@ -0,0 +1,6 @@ +github.com/hanwen/go-fuse/v2 v2.9.0 h1:0AOGUkHtbOVeyGLr0tXupiid1Vg7QB7M6YUcdmVdC58= +github.com/hanwen/go-fuse/v2 v2.9.0/go.mod h1:yE6D2PqWwm3CbYRxFXV9xUd8Md5d6NG0WBs5spCswmI= +github.com/moby/sys/mountinfo v0.7.2 h1:1shs6aH5s4o5H2zQLn796ADW1wMrIwHsyJ2v9KouLrg= +github.com/moby/sys/mountinfo v0.7.2/go.mod h1:1YOa8w8Ih7uW0wALDUgT1dTTSBrZ+HiBLGws92L2RU4= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= diff --git a/packages/fuse-daemon/internal/client/client.go b/packages/fuse-daemon/internal/client/client.go new file mode 100644 index 0000000000..8ec2cb2027 --- /dev/null +++ b/packages/fuse-daemon/internal/client/client.go @@ -0,0 +1,182 @@ +package client + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "log/slog" + "net" + "net/http" + "time" + + "github.com/hanwen/go-fuse/v2/fuse" +) + + +type Client struct { + http *http.Client + socketPath string +} + +func NewClient(socketPath string) *Client { + return &Client{ + http: NewUnixSocketClient(socketPath), + socketPath: socketPath, + } +} + +func NewUnixSocketClient(socketPath string) *http.Client { + return &http.Client{ + Transport: &http.Transport{ + DialContext: func(ctx context.Context, _, _ string) (net.Conn, error) { + return (&net.Dialer{}).DialContext(ctx, "unix", socketPath) + }, + }, + } +} + +// NotifyReady sends POST /daemon/ready to Electron to signal the daemon is up. +func (client *Client) NotifyReady(logger *slog.Logger) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, "http://localhost/daemon/ready", nil) + if err != nil { + return fmt.Errorf("creating ready request: %w", err) + } + + resp, err := client.http.Do(req) + if err != nil { + return fmt.Errorf("sending ready request: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("unexpected status from ready endpoint: %d", resp.StatusCode) + } + + logger.Info("notified electron of readiness") + return nil +} + +// Post sends a JSON body to the given operation path and returns an errno. +// A non-200 HTTP response means a transport failure so we return fuse.EIO without reading the body. +// uppon 200, the response always contains an errno field: non-zero means the operation failed with that errno, +// zero means success and the remaining fields are the operation's data, unmarshalled into out if non-nil. +func (client *Client) Post(context context.Context, path OperationPath, in any, out any) fuse.Status { + body, err := json.Marshal(in) + if err != nil { + return fuse.EIO + } + url := serverURL + string(path) + req, err := http.NewRequestWithContext(context, http.MethodPost, url, bytes.NewBuffer(body)) + if err != nil { + return fuse.EIO + } + req.Header.Set("Content-Type", "application/json") + + resp, err := client.http.Do(req) + if err != nil { + return fuse.EIO + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return fuse.EIO + } + + resBody, err := io.ReadAll(resp.Body) + if err != nil { + return fuse.EIO + } + + var errResp ErrorResponse + if err = json.Unmarshal(resBody, &errResp); err == nil && errResp.Errno != 0 { + return fuse.Status(errResp.Errno) + } + + if out != nil { + if err = json.Unmarshal(resBody, out); err != nil { + return fuse.EIO + } + } + + return fuse.OK +} + +// PostBinary sends a JSON body to the given operation path and returns raw binary data. +// Errors are signaled via the X-Errno response header (non-zero = fuse.Status error code). +// On success (X-Errno: 0) the response body is raw bytes copied into dest. +// Returns the number of bytes read and a fuse.Status. +func (client *Client) PostBinary(ctx context.Context, path OperationPath, in any, dest []byte) (int, fuse.Status) { + body, err := json.Marshal(in) + if err != nil { + return 0, fuse.EIO + } + url := serverURL + string(path) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer(body)) + if err != nil { + return 0, fuse.EIO + } + req.Header.Set("Content-Type", "application/json") + + resp, err := client.http.Do(req) + if err != nil { + return 0, fuse.EIO + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return 0, fuse.EIO + } + + if errnoStr := resp.Header.Get("X-Errno"); errnoStr != "" && errnoStr != "0" { + var errno int32 + if _, err := fmt.Sscanf(errnoStr, "%d", &errno); err == nil && errno != 0 { + return 0, fuse.Status(errno) + } + } + + bytesRead, err := io.ReadFull(resp.Body, dest) + if err != nil && err != io.ErrUnexpectedEOF { + return 0, fuse.EIO + } + return bytesRead, fuse.OK +} + +// PostBinaryRequest sends raw binary payload to the given operation path. +// Errors are signaled via the X-Errno response header (non-zero = fuse.Status error code). +// On success it returns fuse.OK. +func (client *Client) PostSendBinary(ctx context.Context, path OperationPath, payload []byte, headers map[string]string) fuse.Status { + url := serverURL + string(path) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer(payload)) + if err != nil { + return fuse.EIO + } + req.Header.Set("Content-Type", "application/octet-stream") + for key, value := range headers { + req.Header.Set(key, value) + } + + resp, err := client.http.Do(req) + if err != nil { + return fuse.EIO + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return fuse.EIO + } + + if errnoStr := resp.Header.Get("X-Errno"); errnoStr != "" && errnoStr != "0" { + var errno int32 + if _, err := fmt.Sscanf(errnoStr, "%d", &errno); err == nil && errno != 0 { + return fuse.Status(errno) + } + return fuse.EIO + } + + return fuse.OK +} diff --git a/packages/fuse-daemon/internal/client/operation_paths.go b/packages/fuse-daemon/internal/client/operation_paths.go new file mode 100644 index 0000000000..6e8be98434 --- /dev/null +++ b/packages/fuse-daemon/internal/client/operation_paths.go @@ -0,0 +1,25 @@ +package client + +type OperationPath string + +type ErrorResponse struct { + Errno int32 `json:"errno"` +} + +const ( + OperationGetAttr OperationPath = "/op/getattributes" + OperationOpen OperationPath = "/op/open" + OperationOpenDir OperationPath = "/op/opendir" + OperationRead OperationPath = "/op/read" + OperationTruncate OperationPath = "/op/truncate" + OperationCreate OperationPath = "/op/create" + OperationWrite OperationPath = "/op/write" + OperationRelease OperationPath = "/op/release" + OperationMkdir OperationPath = "/op/mkdir" + OperationRename OperationPath = "/op/rename" + OperationUnlink OperationPath = "/op/unlink" + OperationRmdir OperationPath = "/op/rmdir" + OperationStatFs OperationPath = "/op/statfs" +) + +const serverURL = "http://localhost" diff --git a/packages/fuse-daemon/internal/config/config.go b/packages/fuse-daemon/internal/config/config.go new file mode 100644 index 0000000000..27f2a8d845 --- /dev/null +++ b/packages/fuse-daemon/internal/config/config.go @@ -0,0 +1,39 @@ +package config + +import ( + "fmt" + "os" +) + +type Config struct { + MountPoint string + SocketPath string + LogFile string +} +func ParseConfig() Config { + config := Config{ + MountPoint: os.Getenv("INTERNXT_MOUNT"), + SocketPath: os.Getenv("INTERNXT_SOCKET"), + LogFile: os.Getenv("INTERNXT_LOG_FILE"), + } + + var missing []string + if config.MountPoint == "" { + missing = append(missing, "INTERNXT_MOUNT") + } + if config.SocketPath == "" { + missing = append(missing, "INTERNXT_SOCKET") + } + if config.LogFile == "" { + missing = append(missing, "INTERNXT_LOG_FILE") + } + + if len(missing) > 0 { + for _, envVar := range missing { + fmt.Fprintf(os.Stderr, "missing required environment variable: %s\n", envVar) + } + os.Exit(1) + } + + return config +} diff --git a/packages/fuse-daemon/internal/filesystem/file.go b/packages/fuse-daemon/internal/filesystem/file.go new file mode 100644 index 0000000000..943aec36ad --- /dev/null +++ b/packages/fuse-daemon/internal/filesystem/file.go @@ -0,0 +1,108 @@ +package filesystem + +import ( + "context" + "encoding/base64" + "strconv" + "log/slog" + + "internxt/drive-desktop-linux/fuse-daemon/internal/client" + + "github.com/hanwen/go-fuse/v2/fuse" + "github.com/hanwen/go-fuse/v2/fuse/nodefs" +) + +type WriteCallbackData struct { + Written uint32 `json:"written"` +} + +// InternxtFile is the file handle returned by Open. +// It holds the context needed for future Read/Write implementation. +// Operations with a path-based fallback (GetAttr, Chmod, Chown, Truncate, Utimens) +// are intentionally not overridden — DefaultFile returns ENOSYS which triggers +// the fallback to InternxtFilesystem automatically. +type InternxtFile struct { + nodefs.File + path string + flag uint32 + processName string + logger *slog.Logger + client *client.Client +} + +func NewInternxtFile(path string, flag uint32, processName string, logger *slog.Logger, c *client.Client) *InternxtFile { + return &InternxtFile{ + File: nodefs.NewDefaultFile(), + path: path, + flag: flag, + processName: processName, + logger: logger, + client: c, + } +} + +func (f *InternxtFile) String() string { + return "InternxtFile(" + f.path + ")" +} + +func (f *InternxtFile) Read(dest []byte, off int64) (fuse.ReadResult, fuse.Status) { + f.logger.Debug("Received Read call", "path", f.path, "offset", off, "length", len(dest)) + body := struct { + Path string `json:"path"` + Offset int64 `json:"offset"` + Length int `json:"length"` + ProcessName string `json:"processName"` + }{Path: f.path, Offset: off, Length: len(dest), ProcessName: f.processName} + + bytesRead, status := f.client.PostBinary(context.Background(), client.OperationRead, body, dest) + if status != fuse.OK { + f.logger.Error("Error occurred while reading file", "status", status) + return nil, status + } + return fuse.ReadResultData(dest[:bytesRead]), fuse.OK +} + +func (f *InternxtFile) Write(data []byte, off int64) (uint32, fuse.Status) { + f.logger.Debug("Received Write call", "path", f.path, "offset", off, "length", len(data)) + headers := map[string]string{ + "X-Path-B64": base64.StdEncoding.EncodeToString([]byte(f.path)), + "X-Offset": strconv.FormatInt(off, 10), + } + + if status := f.client.PostSendBinary(context.Background(), client.OperationWrite, data, headers); status != fuse.OK { + f.logger.Error("Error occurred while writing file", "status", status) + return 0, status + } + + return uint32(len(data)), fuse.OK +} + +// v.2.6.0 +// Esteban Galvis Triana +// Flush is called on each close(2) of the file descriptor. +// Multiple flushes may occur if the file descriptor was duplicated. +// Data is already persisted to the temporal file via Write, so no action is needed here. +func (f *InternxtFile) Flush() fuse.Status { + return fuse.OK +} + +func (f *InternxtFile) Release() { + f.logger.Debug("Received Release call:", "path", f.path) + body := struct { + Path string `json:"path"` + ProcessName string `json:"processName"` + }{Path: f.path, ProcessName: f.processName} + if status := f.client.Post(context.Background(), client.OperationRelease, body, nil); status != fuse.OK { + f.logger.Warn("Release call failed", "path", f.path, "status", status) + } +} + +// v.2.6.0 +// Esteban Galvis Triana +// Fsync is called when the application requests a data flush (fsync/fdatasync). +// Data is already persisted to the temporal file on each Write call, so there is +// nothing extra to flush. Returning OK satisfies the caller without triggering ENOSYS. +func (f *InternxtFile) Fsync(flags int) fuse.Status { + f.logger.Debug("Received Fsync call", "path", f.path) + return fuse.OK +} diff --git a/packages/fuse-daemon/internal/filesystem/file_test.go b/packages/fuse-daemon/internal/filesystem/file_test.go new file mode 100644 index 0000000000..ed9c8b0f5e --- /dev/null +++ b/packages/fuse-daemon/internal/filesystem/file_test.go @@ -0,0 +1,267 @@ +package filesystem + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "syscall" + "testing" + "time" + + "internxt/drive-desktop-linux/fuse-daemon/internal/client" +) + +type releaseRequest struct { + Path string `json:"path"` + ProcessName string `json:"processName"` +} + +func respondBinary(w http.ResponseWriter, data []byte) { + w.Header().Set("X-Errno", "0") + w.Header().Set("Content-Type", "application/octet-stream") + _, _ = w.Write(data) +} + +func nonEmptyFileAttrHandler(size uint64) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + respondJSON(w, map[string]any{ + "errno": 0, + "mode": 0o100644, + "size": size, + "mtime": time.Now(), + "ctime": time.Now(), + "uid": uint32(os.Getuid()), + "gid": uint32(os.Getgid()), + "nlink": 1, + }) + } +} + +func TestRelease(t *testing.T) { + t.Run("sends path and processName to electron on close", func(t *testing.T) { + var received releaseRequest + + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: fileAttrHandler, + client.OperationOpen: func(w http.ResponseWriter, r *http.Request) { + respondJSON(w, client.ErrorResponse{Errno: 0}) + }, + client.OperationRelease: func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &received) + respondJSON(w, client.ErrorResponse{Errno: 0}) + }, + }) + + fileName := fmt.Sprintf("file-%d.txt", time.Now().UnixNano()) + f, err := os.Open(filepath.Join(sharedMount.mountPoint, fileName)) + if err != nil { + t.Fatalf("open: %v", err) + } + _ = f.Close() + + // give the async Release call time to reach the mock server + time.Sleep(50 * time.Millisecond) + + if received.Path != fileName { + t.Errorf("path: got %q, want %q", received.Path, fileName) + } + }) + + t.Run("does not block when electron returns an error", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: fileAttrHandler, + client.OperationOpen: func(w http.ResponseWriter, r *http.Request) { + respondJSON(w, client.ErrorResponse{Errno: 0}) + }, + client.OperationRelease: func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + }, + }) + + fileName := fmt.Sprintf("file-%d.txt", time.Now().UnixNano()) + f, err := os.Open(filepath.Join(sharedMount.mountPoint, fileName)) + if err != nil { + t.Fatalf("open: %v", err) + } + + done := make(chan struct{}) + go func() { + _ = f.Close() + close(done) + }() + + select { + case <-done: + case <-time.After(2 * time.Second): + t.Fatal("Close() blocked — Release did not return") + } + }) +} + +func TestFsync(t *testing.T) { + noopHandlers := map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: fileAttrHandler, + client.OperationOpen: func(w http.ResponseWriter, r *http.Request) { + respondJSON(w, client.ErrorResponse{Errno: 0}) + }, + client.OperationRelease: func(w http.ResponseWriter, r *http.Request) { + respondJSON(w, client.ErrorResponse{Errno: 0}) + }, + } + + t.Run("returns OK without calling the backend", func(t *testing.T) { + fsyncCalled := false + handlers := make(map[client.OperationPath]http.HandlerFunc, len(noopHandlers)) + for k, v := range noopHandlers { + handlers[k] = v + } + handlers["/op/fsync"] = func(w http.ResponseWriter, r *http.Request) { + fsyncCalled = true + respondJSON(w, client.ErrorResponse{Errno: 0}) + } + sharedMount.mockServer.setHandlers(handlers) + + fileName := fmt.Sprintf("file-%d.txt", time.Now().UnixNano()) + f, err := os.OpenFile(filepath.Join(sharedMount.mountPoint, fileName), os.O_WRONLY, 0) + if err != nil { + t.Fatalf("open: %v", err) + } + defer func() { _ = f.Close() }() + + if err := f.Sync(); err != nil { + t.Fatalf("fsync: %v", err) + } + + if fsyncCalled { + t.Error("expected Fsync to be handled locally — backend should not be called") + } + }) +} + +func TestRead(t *testing.T) { + noopRelease := func(w http.ResponseWriter, r *http.Request) { + respondJSON(w, client.ErrorResponse{Errno: 0}) + } + + t.Run("returns file contents", func(t *testing.T) { + content := []byte("hello from internxt") + + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: nonEmptyFileAttrHandler(uint64(len(content))), + client.OperationOpen: func(w http.ResponseWriter, r *http.Request) { + respondJSON(w, client.ErrorResponse{Errno: 0}) + }, + client.OperationRead: func(w http.ResponseWriter, r *http.Request) { + respondBinary(w, content) + }, + client.OperationRelease: noopRelease, + }) + + fileName := fmt.Sprintf("file-%d.txt", time.Now().UnixNano()) + data, err := os.ReadFile(filepath.Join(sharedMount.mountPoint, fileName)) + if err != nil { + t.Fatalf("read: %v", err) + } + + if string(data) != string(content) { + t.Errorf("content: got %q, want %q", string(data), string(content)) + } + }) + + t.Run("sends path, offset, length and processName to electron", func(t *testing.T) { + type readRequest struct { + Path string `json:"path"` + Offset int64 `json:"offset"` + Length int `json:"length"` + ProcessName string `json:"processName"` + } + var received readRequest + + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: nonEmptyFileAttrHandler(1024), + client.OperationOpen: func(w http.ResponseWriter, r *http.Request) { + respondJSON(w, client.ErrorResponse{Errno: 0}) + }, + client.OperationRead: func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &received) + respondBinary(w, []byte{}) + }, + client.OperationRelease: noopRelease, + }) + + fileName := fmt.Sprintf("file-%d.txt", time.Now().UnixNano()) + _, _ = os.ReadFile(filepath.Join(sharedMount.mountPoint, fileName)) + + if received.Path != fileName { + t.Errorf("path: got %q, want %q", received.Path, fileName) + } + if received.Offset != 0 { + t.Errorf("offset: got %d, want 0", received.Offset) + } + if received.Length <= 0 { + t.Errorf("length: got %d, want > 0", received.Length) + } + }) + + t.Run("returns EIO on transport failure", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: nonEmptyFileAttrHandler(1024), + client.OperationOpen: func(w http.ResponseWriter, r *http.Request) { + respondJSON(w, client.ErrorResponse{Errno: 0}) + }, + client.OperationRead: func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + }, + client.OperationRelease: noopRelease, + }) + + fileName := fmt.Sprintf("file-%d.txt", time.Now().UnixNano()) + _, err := os.ReadFile(filepath.Join(sharedMount.mountPoint, fileName)) + if err == nil { + t.Fatal("expected error, got nil") + } + + pathErr, ok := err.(*os.PathError) + if !ok { + t.Fatalf("expected *os.PathError, got %T", err) + } + + if pathErr.Err != syscall.EIO { + t.Errorf("expected EIO, got %v", pathErr.Err) + } + }) + + t.Run("returns ENOENT when errno is 2", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: nonEmptyFileAttrHandler(1024), + client.OperationOpen: func(w http.ResponseWriter, r *http.Request) { + respondJSON(w, client.ErrorResponse{Errno: 0}) + }, + client.OperationRead: func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("X-Errno", "2") + w.WriteHeader(http.StatusOK) + }, + client.OperationRelease: noopRelease, + }) + + fileName := fmt.Sprintf("file-%d.txt", time.Now().UnixNano()) + _, err := os.ReadFile(filepath.Join(sharedMount.mountPoint, fileName)) + if err == nil { + t.Fatal("expected error, got nil") + } + + pathErr, ok := err.(*os.PathError) + if !ok { + t.Fatalf("expected *os.PathError, got %T", err) + } + + if pathErr.Err != syscall.ENOENT { + t.Errorf("expected ENOENT, got %v", pathErr.Err) + } + }) +} diff --git a/packages/fuse-daemon/internal/filesystem/mount.go b/packages/fuse-daemon/internal/filesystem/mount.go new file mode 100644 index 0000000000..51c7b03405 --- /dev/null +++ b/packages/fuse-daemon/internal/filesystem/mount.go @@ -0,0 +1,46 @@ +package filesystem + +import ( + "log/slog" + "os/exec" + + "internxt/drive-desktop-linux/fuse-daemon/internal/client" + + "github.com/hanwen/go-fuse/v2/fuse" + "github.com/hanwen/go-fuse/v2/fuse/nodefs" + "github.com/hanwen/go-fuse/v2/fuse/pathfs" +) + +// Mount attaches InternxtFilesystem to mountPoint and starts serving FUSE operations. +// Returns the server (for unmounting on shutdown) and a done channel that closes +// when the server stops — either via Unmount or external fusermount -u. +func Mount(mountPoint string, logger *slog.Logger, client *client.Client) (*fuse.Server, <-chan struct{}, error) { + fileSystem := NewInternxtFilesystem(logger, client) + + nodeFileSystem := pathfs.NewPathNodeFs(fileSystem, nil) + + mountOptions := &fuse.MountOptions{ + AllowOther: false, + MaxReadAhead: 128 * 1024, + DisableXAttrs: false, + Debug: false, + DirectMount: true, + } + + // Clear any stale FUSE mount left from a previous crash or unclean shutdown. + // fusermount3 -uz works as a regular user; errors are ignored since the mount may not exist. + _ = exec.Command("fusermount3", "-uz", mountPoint).Run() + + server, _, err := nodefs.Mount(mountPoint, nodeFileSystem.Root(), mountOptions, nil) + if err != nil { + return nil, nil, err + } + + done := make(chan struct{}) + go func() { + server.Serve() + close(done) + }() + + return server, done, nil +} diff --git a/packages/fuse-daemon/internal/filesystem/operations.go b/packages/fuse-daemon/internal/filesystem/operations.go new file mode 100644 index 0000000000..3fcc6e1a29 --- /dev/null +++ b/packages/fuse-daemon/internal/filesystem/operations.go @@ -0,0 +1,215 @@ +package filesystem + +import ( + "context" + "fmt" + "log/slog" + "os" + + "internxt/drive-desktop-linux/fuse-daemon/internal/client" + + "github.com/hanwen/go-fuse/v2/fuse" + "github.com/hanwen/go-fuse/v2/fuse/nodefs" + "github.com/hanwen/go-fuse/v2/fuse/pathfs" +) + +// readProcessName returns the process name for a given PID by reading /proc//comm. +func readProcessName(pid uint32) string { + data, err := os.ReadFile(fmt.Sprintf("/proc/%d/comm", pid)) + if err != nil { + return "" + } + return string(data[:len(data)-1]) // trim trailing newline +} + +// InternxtFilesystem is the FUSE filesystem implementation. +// Each method corresponds to a FUSE operation forwarded to Electron over HTTP. +// +// To implement an operation: +// 1. Add the method below with its correct signature +// 2. Remove the log line and ENOSYS return +// 3. Call the corresponding endpoint via the HTTP client: fs.client.Post("/op/", ...) +// 4. Map the HTTP response back to the correct fuse.Status +type InternxtFilesystem struct { + pathfs.FileSystem + logger *slog.Logger + client *client.Client +} + +func NewInternxtFilesystem(logger *slog.Logger, client *client.Client) *InternxtFilesystem { + return &InternxtFilesystem{ + FileSystem: pathfs.NewDefaultFileSystem(), + logger: logger, + client: client, + } +} + +func (fs *InternxtFilesystem) GetAttr(name string, context *fuse.Context) (*fuse.Attr, fuse.Status) { + fs.logger.Debug("Received GetAttr call: ", "name", name) + body := struct { + Path string `json:"path"` + }{Path: name} + response := GetAttributesCallbackData{} + if status := fs.client.Post(context, client.OperationGetAttr, body, &response); status != fuse.OK { + fs.logger.Error("Error occurred while fetching attributes", "status", status) + return nil, status + } + var atime uint64 + if response.Atime != nil { + atime = uint64(response.Atime.Unix()) + } + attr := &fuse.Attr{ + Mode: response.Mode, + Size: response.Size, + Mtime: uint64(response.Mtime.Unix()), + Ctime: uint64(response.Ctime.Unix()), + Atime: atime, + Owner: fuse.Owner{Uid: response.Uid, Gid: response.Gid}, + Nlink: response.Nlink, + } + return attr, fuse.OK +} + +func (fs *InternxtFilesystem) OpenDir(name string, context *fuse.Context) ([]fuse.DirEntry, fuse.Status) { + fs.logger.Debug("Received OpenDir call", "name", name) + body := struct { + Path string `json:"path"` + }{Path: name} + response := OpenDirCallbackData{} + if status := fs.client.Post(context, client.OperationOpenDir, body, &response); status != fuse.OK { + fs.logger.Error("Error occurred while opening directory", "status", status) + return nil, status + } + entries := make([]fuse.DirEntry, 0, len(response.Entries)) + for _, entry := range response.Entries { + entries = append(entries, fuse.DirEntry{Name: entry.Name, Mode: entry.Mode}) + } + return entries, fuse.OK +} + +func (fs *InternxtFilesystem) Open(name string, flags uint32, context *fuse.Context) (nodefs.File, fuse.Status) { + fs.logger.Debug("Received Open call", "name", name, "flags", flags) + processName := readProcessName(context.Pid) + body := struct { + Path string `json:"path"` + Flag uint32 `json:"flag"` + ProcessName string `json:"processName"` + }{Path: name, Flag: flags, ProcessName: processName} + if status := fs.client.Post(context, client.OperationOpen, body, nil); status != fuse.OK { + fs.logger.Error("Error occurred while opening file", "status", status) + return nil, status + } + return NewInternxtFile(name, flags, processName, fs.logger, fs.client), fuse.OK +} + +// Create creates a new file and returns a file handle. +// When implementing: return a nodefs.File handle for the new file. +func (fs *InternxtFilesystem) Create(name string, flags uint32, mode uint32, context *fuse.Context) (nodefs.File, fuse.Status) { + fs.logger.Debug("Received Create call", "name", name, "flags", flags, "mode", mode) + body := struct { + Path string `json:"path"` + Flag uint32 `json:"flag"` + Mode uint32 `json:"mode"` + }{Path: name, Flag: flags, Mode: mode} + + if status := fs.client.Post(context, client.OperationCreate, body, nil); status != fuse.OK { + fs.logger.Error("Error occurred while creating file", "status", status) + return nil, status + } + + processName := readProcessName(context.Pid) + return NewInternxtFile(name, flags, processName, fs.logger, fs.client), fuse.OK +} + +func (fs *InternxtFilesystem) Mkdir(name string, mode uint32, context *fuse.Context) fuse.Status { + fs.logger.Debug("Received Mkdir call", "path", name) + body := struct { + Path string `json:"path"` + }{Path: name} + return fs.client.Post(context, client.OperationMkdir, body, nil) +} + +func (fs *InternxtFilesystem) Rename(oldName string, newName string, context *fuse.Context) fuse.Status { + fs.logger.Debug("Received Rename call", "oldPath", oldName, "newPath", newName) + body := struct { + OldPath string `json:"oldPath"` + NewPath string `json:"newPath"` + }{OldPath: oldName, NewPath: newName} + + status := fs.client.Post(context, client.OperationRename, body, nil) + if status != fuse.OK { + return status + } + + return fuse.OK +} + +func (fs *InternxtFilesystem) Unlink(name string, context *fuse.Context) fuse.Status { + fs.logger.Debug("Received Unlink call", "path", name) + body := struct { + Path string `json:"path"` + }{Path: name} + + status := fs.client.Post(context, client.OperationUnlink, body, nil) + if status != fuse.OK { + return status + } + + return fuse.OK +} + +func (fs *InternxtFilesystem) Rmdir(name string, context *fuse.Context) fuse.Status { + fs.logger.Debug("Received Rmdir call", "path", name) + body := struct { + Path string `json:"path"` + }{Path: name} + + status := fs.client.Post(context, client.OperationRmdir, body, nil) + if status != fuse.OK { + return status + } + + return fuse.OK +} + +func (fs *InternxtFilesystem) Truncate(name string, size uint64, context *fuse.Context) fuse.Status { + fs.logger.Debug("Received Truncate call", "path", name, "size", size) + body := struct { + Path string `json:"path"` + Size uint64 `json:"size"` + }{Path: name, Size: size} + + return fs.client.Post(context, client.OperationTruncate, body, nil) +} + +func (fs *InternxtFilesystem) GetXAttr(name string, attr string, context *fuse.Context) ([]byte, fuse.Status) { + fs.logger.Warn("not implemented", "op", "GetXAttr", "path", name, "attr", attr) + return nil, fuse.ENOSYS +} + +// v.2.6.0 +// Esteban Galvis Triana +// StatFs returns filesystem-level statistics (total/free/available blocks and inodes). +// These values are used by applications (vim, cp, df) to determine whether +// there is sufficient space before writing. The backend queries the local +// disk where temporal files are stored and returns the real available space. +func (fs *InternxtFilesystem) StatFs(name string) *fuse.StatfsOut { + fs.logger.Debug("Received StatFs call", "name", name) + body := struct { + Path string `json:"path"` + }{Path: name} + response := StatFsCallbackData{} + if status := fs.client.Post(context.Background(), client.OperationStatFs, body, &response); status != fuse.OK { + fs.logger.Error("Error occurred while getting filesystem stats", "status", status) + return nil + } + return &fuse.StatfsOut{ + Blocks: response.Blocks, + Bfree: response.Bfree, + Bavail: response.Bavail, + Files: response.Files, + Ffree: response.Ffree, + Bsize: response.Bsize, + NameLen: response.NameLen, + } +} diff --git a/packages/fuse-daemon/internal/filesystem/operations_test.go b/packages/fuse-daemon/internal/filesystem/operations_test.go new file mode 100644 index 0000000000..febee4461f --- /dev/null +++ b/packages/fuse-daemon/internal/filesystem/operations_test.go @@ -0,0 +1,553 @@ +package filesystem + +import ( + "fmt" + "net/http" + "os" + "path/filepath" + "syscall" + "testing" + "time" + + "internxt/drive-desktop-linux/fuse-daemon/internal/client" +) + +func TestGetAttr(t *testing.T) { + t.Run("returns file attributes", func(t *testing.T) { + now := time.Now().Truncate(time.Second) + + sharedMount.mockServer.setHandler(client.OperationGetAttr, func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, map[string]any{ + "errno": 0, + "mode": 0o100644, + "size": 1234, + "mtime": now, + "ctime": now, + "uid": uint32(os.Getuid()), + "gid": uint32(os.Getgid()), + "nlink": 1, + }) + }) + + info, err := os.Stat(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("file-%d.txt", time.Now().UnixNano()))) + if err != nil { + t.Fatalf("stat: %v", err) + } + + if info.Size() != 1234 { + t.Errorf("size: got %d, want 1234", info.Size()) + } + + if info.Mode().Perm() != 0o644 { + t.Errorf("mode: got %v, want 0644", info.Mode().Perm()) + } + + if info.ModTime().Unix() != now.Unix() { + t.Errorf("mtime: got %v, want %v", info.ModTime().Unix(), now.Unix()) + } + }) + + t.Run("returns directory attributes", func(t *testing.T) { + sharedMount.mockServer.setHandler(client.OperationGetAttr, func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, map[string]any{ + "errno": 0, + "mode": 0o040755, + "size": 4096, + "mtime": time.Now(), + "ctime": time.Now(), + "uid": uint32(os.Getuid()), + "gid": uint32(os.Getgid()), + "nlink": 2, + }) + }) + + info, err := os.Stat(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("mydir-%d", time.Now().UnixNano()))) + if err != nil { + t.Fatalf("stat: %v", err) + } + + if !info.IsDir() { + t.Errorf("expected directory, got mode %v", info.Mode()) + } + }) + + t.Run("returns EIO on transport failure", func(t *testing.T) { + sharedMount.mockServer.setHandler(client.OperationGetAttr, func(response http.ResponseWriter, request *http.Request) { + response.WriteHeader(http.StatusInternalServerError) + }) + + _, err := os.Stat(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("broken-%d.txt", time.Now().UnixNano()))) + if err == nil { + t.Fatal("expected error, got nil") + } + + pathErr, ok := err.(*os.PathError) + if !ok { + t.Fatalf("expected *os.PathError, got %T", err) + } + + if pathErr.Err != syscall.EIO { + t.Errorf("expected EIO, got %v", pathErr.Err) + } + }) + + t.Run("returns ENOENT when errno is 2", func(t *testing.T) { + sharedMount.mockServer.setHandler(client.OperationGetAttr, func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, client.ErrorResponse{Errno: 2}) + }) + + _, err := os.Stat(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("missing-%d.txt", time.Now().UnixNano()))) + if err == nil { + t.Fatal("expected error, got nil") + } + + pathErr, ok := err.(*os.PathError) + if !ok { + t.Fatalf("expected *os.PathError, got %T", err) + } + + if pathErr.Err != syscall.ENOENT { + t.Errorf("expected ENOENT, got %v", pathErr.Err) + } + }) +} + +func dirAttrHandler(response http.ResponseWriter, request *http.Request) { + respondJSON(response, map[string]any{ + "errno": 0, + "mode": 0o040755, + "size": 0, + "mtime": time.Now(), + "ctime": time.Now(), + "uid": uint32(os.Getuid()), + "gid": uint32(os.Getgid()), + "nlink": 2, + }) +} + +func fileAttrHandler(response http.ResponseWriter, request *http.Request) { + respondJSON(response, map[string]any{ + "errno": 0, + "mode": 0o100644, + "size": 0, + "mtime": time.Now(), + "ctime": time.Now(), + "uid": uint32(os.Getuid()), + "gid": uint32(os.Getgid()), + "nlink": 1, + }) +} + +func TestOpenDir(t *testing.T) { + t.Run("returns directory entries", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: dirAttrHandler, + client.OperationOpenDir: func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, map[string]any{ + "errno": 0, + "entries": []map[string]any{ + {"name": "file.txt", "mode": 0o100644}, + {"name": "subdir", "mode": 0o040755}, + }, + }) + }, + }) + + entries, err := os.ReadDir(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("mydir-%d", time.Now().UnixNano()))) + if err != nil { + t.Fatalf("readdir: %v", err) + } + + if len(entries) != 2 { + t.Fatalf("entries: got %d, want 2", len(entries)) + } + + if entries[0].Name() != "file.txt" { + t.Errorf("entry[0]: got %q, want %q", entries[0].Name(), "file.txt") + } + + if entries[1].Name() != "subdir" { + t.Errorf("entry[1]: got %q, want %q", entries[1].Name(), "subdir") + } + }) + + t.Run("returns EIO on transport failure", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: dirAttrHandler, + client.OperationOpenDir: func(response http.ResponseWriter, request *http.Request) { + response.WriteHeader(http.StatusInternalServerError) + }, + }) + + _, err := os.ReadDir(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("broken-%d", time.Now().UnixNano()))) + if err == nil { + t.Fatal("expected error, got nil") + } + + pathErr, ok := err.(*os.PathError) + if !ok { + t.Fatalf("expected *os.PathError, got %T", err) + } + + if pathErr.Err != syscall.EIO { + t.Errorf("expected EIO, got %v", pathErr.Err) + } + }) + + t.Run("returns ENOENT when errno is 2", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: dirAttrHandler, + client.OperationOpenDir: func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, client.ErrorResponse{Errno: 2}) + }, + }) + + _, err := os.ReadDir(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("missing-%d", time.Now().UnixNano()))) + if err == nil { + t.Fatal("expected error, got nil") + } + + pathErr, ok := err.(*os.PathError) + if !ok { + t.Fatalf("expected *os.PathError, got %T", err) + } + + if pathErr.Err != syscall.ENOENT { + t.Errorf("expected ENOENT, got %v", pathErr.Err) + } + }) +} + +func TestOpen(t *testing.T) { + t.Run("opens file successfully", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: fileAttrHandler, + client.OperationOpen: func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, client.ErrorResponse{Errno: 0}) + }, + }) + + f, err := os.Open(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("file-%d.txt", time.Now().UnixNano()))) + if err != nil { + t.Fatalf("open: %v", err) + } + _ = f.Close() + }) + + t.Run("returns EIO on transport failure", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: fileAttrHandler, + client.OperationOpen: func(response http.ResponseWriter, request *http.Request) { + response.WriteHeader(http.StatusInternalServerError) + }, + }) + + _, err := os.Open(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("broken-%d.txt", time.Now().UnixNano()))) + if err == nil { + t.Fatal("expected error, got nil") + } + + pathErr, ok := err.(*os.PathError) + if !ok { + t.Fatalf("expected *os.PathError, got %T", err) + } + + if pathErr.Err != syscall.EIO { + t.Errorf("expected EIO, got %v", pathErr.Err) + } + }) + + t.Run("returns ENOENT when errno is 2", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: fileAttrHandler, + client.OperationOpen: func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, client.ErrorResponse{Errno: 2}) + }, + }) + + _, err := os.Open(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("missing-%d.txt", time.Now().UnixNano()))) + if err == nil { + t.Fatal("expected error, got nil") + } + + pathErr, ok := err.(*os.PathError) + if !ok { + t.Fatalf("expected *os.PathError, got %T", err) + } + + if pathErr.Err != syscall.ENOENT { + t.Errorf("expected ENOENT, got %v", pathErr.Err) + } + }) +} + +func TestUnlink(t *testing.T) { + t.Run("returns OK when backend returns 200", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, GetAttributesCallbackData{ + Mode: 0o100644, + Size: 0, + Mtime: time.Now(), + Ctime: time.Now(), + Uid: uint32(os.Getuid()), + Gid: uint32(os.Getgid()), + Nlink: 1, + }) + }, + client.OperationUnlink: func(response http.ResponseWriter, request *http.Request) { + response.WriteHeader(http.StatusOK) + }, + }) + + err := syscall.Unlink(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("unlink-ok-%d.txt", time.Now().UnixNano()))) + if err != nil { + t.Fatalf("unlink: %v", err) + } + }) + + t.Run("returns EIO when backend returns 404", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, GetAttributesCallbackData{ + Mode: 0o100644, + Size: 0, + Mtime: time.Now(), + Ctime: time.Now(), + Uid: uint32(os.Getuid()), + Gid: uint32(os.Getgid()), + Nlink: 1, + }) + }, + client.OperationUnlink: func(response http.ResponseWriter, request *http.Request) { + response.WriteHeader(http.StatusNotFound) + }, + }) + + err := syscall.Unlink(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("unlink-missing-%d.txt", time.Now().UnixNano()))) + if err == nil { + t.Fatal("expected error, got nil") + } + + if err != syscall.EIO { + t.Errorf("expected EIO, got %v", err) + } + }) + + t.Run("returns EIO when backend returns 500", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, GetAttributesCallbackData{ + Mode: 0o100644, + Size: 0, + Mtime: time.Now(), + Ctime: time.Now(), + Uid: uint32(os.Getuid()), + Gid: uint32(os.Getgid()), + Nlink: 1, + }) + }, + client.OperationUnlink: func(response http.ResponseWriter, request *http.Request) { + response.WriteHeader(http.StatusInternalServerError) + }, + }) + + err := syscall.Unlink(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("unlink-eio-%d.txt", time.Now().UnixNano()))) + if err == nil { + t.Fatal("expected error, got nil") + } + + if err != syscall.EIO { + t.Errorf("expected EIO, got %v", err) + } + }) +} + +func TestRmdir(t *testing.T) { + t.Run("returns OK when backend returns 200", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, GetAttributesCallbackData{ + Mode: 0o040755, + Size: 0, + Mtime: time.Now(), + Ctime: time.Now(), + Uid: uint32(os.Getuid()), + Gid: uint32(os.Getgid()), + Nlink: 2, + }) + }, + client.OperationRmdir: func(response http.ResponseWriter, request *http.Request) { + response.WriteHeader(http.StatusOK) + }, + }) + + err := syscall.Rmdir(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("rmdir-ok-%d", time.Now().UnixNano()))) + if err != nil { + t.Fatalf("rmdir: %v", err) + } + }) + + t.Run("returns EIO when backend returns 404", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, GetAttributesCallbackData{ + Mode: 0o040755, + Size: 0, + Mtime: time.Now(), + Ctime: time.Now(), + Uid: uint32(os.Getuid()), + Gid: uint32(os.Getgid()), + Nlink: 2, + }) + }, + client.OperationRmdir: func(response http.ResponseWriter, request *http.Request) { + response.WriteHeader(http.StatusNotFound) + }, + }) + + err := syscall.Rmdir(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("rmdir-missing-%d", time.Now().UnixNano()))) + if err == nil { + t.Fatal("expected error, got nil") + } + + if err != syscall.EIO { + t.Errorf("expected EIO, got %v", err) + } + }) + + t.Run("returns EIO when backend returns 500", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, GetAttributesCallbackData{ + Mode: 0o040755, + Size: 0, + Mtime: time.Now(), + Ctime: time.Now(), + Uid: uint32(os.Getuid()), + Gid: uint32(os.Getgid()), + Nlink: 2, + }) + }, + client.OperationRmdir: func(response http.ResponseWriter, request *http.Request) { + response.WriteHeader(http.StatusInternalServerError) + }, + }) + + err := syscall.Rmdir(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("rmdir-eio-%d", time.Now().UnixNano()))) + if err == nil { + t.Fatal("expected error, got nil") + } + + if err != syscall.EIO { + t.Errorf("expected EIO, got %v", err) + } + }) +} + +func TestTruncate(t *testing.T) { + t.Run("returns OK when backend returns 200", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: fileAttrHandler, + client.OperationTruncate: func(response http.ResponseWriter, request *http.Request) { + response.WriteHeader(http.StatusOK) + }, + }) + + err := os.Truncate(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("truncate-ok-%d.txt", time.Now().UnixNano())), 0) + if err != nil { + t.Fatalf("truncate: %v", err) + } + }) + + t.Run("returns EIO on transport failure", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: fileAttrHandler, + client.OperationTruncate: func(response http.ResponseWriter, request *http.Request) { + response.WriteHeader(http.StatusInternalServerError) + }, + }) + + err := os.Truncate(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("truncate-eio-%d.txt", time.Now().UnixNano())), 0) + if err == nil { + t.Fatal("expected error, got nil") + } + + pathErr, ok := err.(*os.PathError) + if !ok { + t.Fatalf("expected *os.PathError, got %T", err) + } + + if pathErr.Err != syscall.EIO { + t.Errorf("expected EIO, got %v", pathErr.Err) + } + }) + + t.Run("returns ENOENT when backend returns errno 2", func(t *testing.T) { + sharedMount.mockServer.setHandlers(map[client.OperationPath]http.HandlerFunc{ + client.OperationGetAttr: fileAttrHandler, + client.OperationTruncate: func(response http.ResponseWriter, request *http.Request) { + respondJSON(response, client.ErrorResponse{Errno: 2}) + }, + }) + + err := os.Truncate(filepath.Join(sharedMount.mountPoint, fmt.Sprintf("truncate-missing-%d.txt", time.Now().UnixNano())), 0) + if err == nil { + t.Fatal("expected error, got nil") + } + + pathErr, ok := err.(*os.PathError) + if !ok { + t.Fatalf("expected *os.PathError, got %T", err) + } + + if pathErr.Err != syscall.ENOENT { + t.Errorf("expected ENOENT, got %v", pathErr.Err) + } + }) +} + +func TestStatFs(t *testing.T) { + t.Run("returns filesystem stats from backend", func(t *testing.T) { + sharedMount.mockServer.setHandler(client.OperationStatFs, func(w http.ResponseWriter, r *http.Request) { + respondJSON(w, map[string]any{ + "blocks": uint64(1000000), + "bfree": uint64(500000), + "bavail": uint64(490000), + "files": uint64(100000), + "ffree": uint64(90000), + "bsize": uint32(4096), + "nameLen": uint32(255), + }) + }) + + var stat syscall.Statfs_t + if err := syscall.Statfs(sharedMount.mountPoint, &stat); err != nil { + t.Fatalf("statfs: %v", err) + } + + if stat.Blocks != 1000000 { + t.Errorf("blocks: got %d, want 1000000", stat.Blocks) + } + if stat.Bfree != 500000 { + t.Errorf("bfree: got %d, want 500000", stat.Bfree) + } + if stat.Bavail != 490000 { + t.Errorf("bavail: got %d, want 490000", stat.Bavail) + } + if stat.Namelen != 255 { + t.Errorf("namelen: got %d, want 255", stat.Namelen) + } + }) + + t.Run("returns zeroed stats on transport failure", func(t *testing.T) { + sharedMount.mockServer.setHandler(client.OperationStatFs, func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + }) + + // go-fuse falls back to zero-filled StatfsOut when StatFs returns nil. + // The syscall itself still succeeds (kernel-level fallback). + var stat syscall.Statfs_t + _ = syscall.Statfs(sharedMount.mountPoint, &stat) + }) +} diff --git a/packages/fuse-daemon/internal/filesystem/responses.go b/packages/fuse-daemon/internal/filesystem/responses.go new file mode 100644 index 0000000000..465edeed69 --- /dev/null +++ b/packages/fuse-daemon/internal/filesystem/responses.go @@ -0,0 +1,33 @@ +package filesystem + +import "time" + +type GetAttributesCallbackData struct { + Mode uint32 `json:"mode"` + Size uint64 `json:"size"` + Mtime time.Time `json:"mtime"` + Ctime time.Time `json:"ctime"` + Atime *time.Time `json:"atime,omitempty"` + Uid uint32 `json:"uid"` + Gid uint32 `json:"gid"` + Nlink uint32 `json:"nlink"` +} + +type OpenDirEntry struct { + Name string `json:"name"` + Mode uint32 `json:"mode"` +} + +type OpenDirCallbackData struct { + Entries []OpenDirEntry `json:"entries"` +} + +type StatFsCallbackData struct { + Blocks uint64 `json:"blocks"` + Bfree uint64 `json:"bfree"` + Bavail uint64 `json:"bavail"` + Files uint64 `json:"files"` + Ffree uint64 `json:"ffree"` + Bsize uint32 `json:"bsize"` + NameLen uint32 `json:"nameLen"` +} diff --git a/packages/fuse-daemon/internal/filesystem/setup_test.go b/packages/fuse-daemon/internal/filesystem/setup_test.go new file mode 100644 index 0000000000..19356064b4 --- /dev/null +++ b/packages/fuse-daemon/internal/filesystem/setup_test.go @@ -0,0 +1,109 @@ +package filesystem + +import ( + "encoding/json" + "fmt" + "log/slog" + "net" + "net/http" + "os" + "path/filepath" + "testing" + + "internxt/drive-desktop-linux/fuse-daemon/internal/client" +) + +// sharedMount holds the single FUSE mount shared across all tests. +// TestMain initializes it once before any test runs. +// Each test sets its own mock handler via sharedMount.mockServer.setHandler() +// and stats a unique path to avoid kernel attribute cache hits between tests. +var sharedMount struct { + mountPoint string + server interface{ Unmount() error } + mockServer *mockServer +} + +type mockServer struct { + socket net.Listener + server *http.Server +} + +func newMockServer(socketPath string) (*mockServer, error) { + socket, err := net.Listen("unix", socketPath) + if err != nil { + return nil, fmt.Errorf("listen on unix socket: %w", err) + } + + serverMock := &mockServer{ + socket: socket, + server: &http.Server{}, + } + + go serverMock.server.Serve(socket) //nolint:errcheck + + return serverMock, nil +} + +// setHandler replaces the current request handler with one that responds to +// the given path using the provided HandlerFunc. Call this at the start of +// each test to control what the daemon receives back from the mock server. +func (serverMock *mockServer) setHandler(path client.OperationPath, handler http.HandlerFunc) { + serverMock.setHandlers(map[client.OperationPath]http.HandlerFunc{path: handler}) +} + +func (serverMock *mockServer) setHandlers(handlers map[client.OperationPath]http.HandlerFunc) { + router := http.NewServeMux() + for path, handler := range handlers { + router.HandleFunc(string(path), handler) + } + serverMock.server.Handler = router +} + +func (serverMock *mockServer) close() { + _ = serverMock.server.Close() + _ = serverMock.socket.Close() +} + +// respondJSON writes body as a JSON response. +func respondJSON(response http.ResponseWriter, body any) { + response.Header().Set("Content-Type", "application/json") + json.NewEncoder(response).Encode(body) //nolint:errcheck +} + +// TestMain runs once before all tests. It sets up: +// 1. A temp directory as the FUSE mount point +// 2. A mock http server on a Unix socket +// 3. A real FUSE mount pointing at that socket +// +// All tests share this single mount and swap the mock handler per-test. +func TestMain(runner *testing.M) { + mountPoint, err := os.MkdirTemp("", "fuse-test-mount-*") + if err != nil { + panic("create mount dir: " + err.Error()) + } + defer func() { _ = os.RemoveAll(mountPoint) }() + + socketPath := filepath.Join(os.TempDir(), "fuse-test.sock") + _ = os.Remove(socketPath) + + mockServer, err := newMockServer(socketPath) + if err != nil { + panic("start mock electron: " + err.Error()) + } + defer mockServer.close() + + logger := slog.New(slog.NewTextHandler(os.Stderr, nil)) + daemonClient := client.NewClient(socketPath) + + fuseServer, _, err := Mount(mountPoint, logger, daemonClient) + if err != nil { + panic("mount fuse: " + err.Error()) + } + defer fuseServer.Unmount() //nolint:errcheck + + sharedMount.mountPoint = mountPoint + sharedMount.server = fuseServer + sharedMount.mockServer = mockServer + + os.Exit(runner.Run()) +} diff --git a/packages/fuse-daemon/internal/logger/logger.go b/packages/fuse-daemon/internal/logger/logger.go new file mode 100644 index 0000000000..d7fc166593 --- /dev/null +++ b/packages/fuse-daemon/internal/logger/logger.go @@ -0,0 +1,18 @@ +package logger + +import ( + "fmt" + "log/slog" + "os" +) +func New(logFilePath string) *slog.Logger { + f, err := os.OpenFile(logFilePath, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0644) + if err != nil { + fmt.Fprintf(os.Stderr, "failed to open log file %s: %v\n", logFilePath, err) + os.Exit(1) + } + + return slog.New(slog.NewJSONHandler(f, &slog.HandlerOptions{ + Level: slog.LevelDebug, + })) +} diff --git a/src/apps/antivirus/ipc/AntivirusIPCHandler.ts b/src/apps/antivirus/ipc/AntivirusIPCHandler.ts index dc03e41de8..fd77d86080 100644 --- a/src/apps/antivirus/ipc/AntivirusIPCHandler.ts +++ b/src/apps/antivirus/ipc/AntivirusIPCHandler.ts @@ -1,7 +1,7 @@ import { logger } from '@internxt/drive-desktop-core/build/backend'; import { ipcMain } from 'electron'; import { AntivirusIPCMain } from './AntivirusIPCMain'; -import { getMultiplePathsFromDialog } from '../../main/device/service'; +import { getMultiplePathsFromDialog } from '../../../core/utils/get-multiple-paths-from-dialog'; import { AntivirusScanService } from '../../main/antivirus/AntivirusScanService'; import { getAntivirusManager } from '../../main/antivirus/antivirusManager'; import configStore from '../../main/config'; @@ -98,7 +98,7 @@ export class AntivirusIPCHandler { }); const shouldGetFiles = Boolean(getFiles); - const result = await getMultiplePathsFromDialog(shouldGetFiles); + const result = await getMultiplePathsFromDialog({ allowFiles: shouldGetFiles }); if (!result || !Array.isArray(result)) { return []; diff --git a/src/apps/backups/BackupService.test.ts b/src/apps/backups/BackupService.test.ts index 3c526bb824..2a21f7d8dd 100644 --- a/src/apps/backups/BackupService.test.ts +++ b/src/apps/backups/BackupService.test.ts @@ -1,3 +1,4 @@ +import { Environment } from '@internxt/inxt-js'; import { Mock } from 'vitest'; import { mockDeep } from 'vitest-mock-extended'; import { BackupService } from './BackupService'; @@ -16,6 +17,7 @@ import { BackupProgressTracker } from '../../backend/features/backup/backup-prog import * as executeAsyncQueueModule from '../../backend/common/async-queue/execute-async-queue'; import * as addFileToTrashModule from '../../infra/drive-server/services/files/services/add-file-to-trash'; import { partialSpyOn } from '../../../tests/vitest/utils.helper'; +import { AbsolutePath } from '../../context/local/localFile/infrastructure/AbsolutePath'; vi.mock(import('../../backend/features/usage/usage.module')); @@ -27,12 +29,13 @@ describe('BackupService', () => { let localTreeBuilder: LocalTreeBuilder; let remoteTreeBuilder: RemoteTreeBuilder; let simpleFolderCreator: SimpleFolderCreator; + let environment: Environment; let mockValidateSpace: Mock; let abortController: AbortController; let tracker: BackupProgressTracker; const info: BackupInfo = { - pathname: '/path/to/backup', + pathname: '/path/to/backup' as AbsolutePath, folderId: 123, folderUuid: 'uuid', tmpPath: '/tmp/path', @@ -44,6 +47,7 @@ describe('BackupService', () => { localTreeBuilder = mockDeep(); remoteTreeBuilder = mockDeep(); simpleFolderCreator = mockDeep(); + environment = mockDeep(); tracker = mockDeep(); mockValidateSpace = vi.mocked(UsageModule.validateSpace); @@ -57,7 +61,7 @@ describe('BackupService', () => { localTreeBuilder, remoteTreeBuilder, simpleFolderCreator, - {} as any, + environment, 'backups-bucket', ); @@ -77,7 +81,6 @@ describe('BackupService', () => { expect(result).toBeUndefined(); expect(localTreeBuilder.run).toHaveBeenCalledWith(info.pathname); expect(remoteTreeBuilder.run).toHaveBeenCalledWith(info.folderId, info.folderUuid); - expect(tracker.addToTotal).toHaveBeenCalled(); expect(tracker.incrementProcessed).toHaveBeenCalled(); }); diff --git a/src/apps/drive/__mocks__/ContainerMock.ts b/src/apps/drive/__mocks__/ContainerMock.ts index caa40d61d9..088670289d 100644 --- a/src/apps/drive/__mocks__/ContainerMock.ts +++ b/src/apps/drive/__mocks__/ContainerMock.ts @@ -5,11 +5,10 @@ export class ContainerMock implements Partial { get = vi.fn((service) => this.services.get(service)); - set(service: any, implementation: T): void { + set(service: Identifier, implementation: T): void { this.services.set(service, implementation); } - // @ts-ignore // eslint-disable-next-line @typescript-eslint/no-unused-vars findTaggedServiceIdentifiers(tag: string): Array> { return [] as Array>; diff --git a/src/apps/drive/dependency-injection/offline-drive/registerStorageFilesServices.ts b/src/apps/drive/dependency-injection/offline-drive/registerStorageFilesServices.ts index a267ec30ba..196d683340 100644 --- a/src/apps/drive/dependency-injection/offline-drive/registerStorageFilesServices.ts +++ b/src/apps/drive/dependency-injection/offline-drive/registerStorageFilesServices.ts @@ -1,6 +1,5 @@ import { Environment } from '@internxt/inxt-js'; import { ContainerBuilder } from 'diod'; -import { StorageClearer } from '../../../../context/storage/StorageFiles/application/delete/StorageClearer'; import { StorageFileDeleter } from '../../../../context/storage/StorageFiles/application/delete/StorageFileDeleter'; import { MakeStorageFileAvaliableOffline } from '../../../../context/storage/StorageFiles/application/offline/MakeStorageFileAvaliableOffline'; import { StorageFileIsAvailableOffline } from '../../../../context/storage/StorageFiles/application/offline/StorageFileIsAvailableOffline'; @@ -36,6 +35,5 @@ export async function registerStorageFilesServices(builder: ContainerBuilder): P builder.registerAndUse(StorageFileIsAvailableOffline); builder.registerAndUse(MakeStorageFileAvaliableOffline); builder.registerAndUse(StorageFileDeleter); - builder.registerAndUse(StorageClearer); builder.registerAndUse(StorageRemoteChangesSyncher); } diff --git a/src/apps/drive/dependency-injection/offline-drive/registerTemporalFilesServices.ts b/src/apps/drive/dependency-injection/offline-drive/registerTemporalFilesServices.ts index dc9742681e..99b19e45a1 100644 --- a/src/apps/drive/dependency-injection/offline-drive/registerTemporalFilesServices.ts +++ b/src/apps/drive/dependency-injection/offline-drive/registerTemporalFilesServices.ts @@ -8,6 +8,7 @@ import { TemporalFileDeleter } from '../../../../context/storage/TemporalFiles/a import { TemporalFileByFolderFinder } from '../../../../context/storage/TemporalFiles/application/find/TemporalFileByFolderFinder'; import { TemporalFileByPathFinder } from '../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; import { TemporalFilePathsByFolderFinder } from '../../../../context/storage/TemporalFiles/application/find/TemporalFilePathsByFolderFinder'; +import { TemporalFileTruncater } from '../../../../context/storage/TemporalFiles/application/truncate/TemporalFileTruncater'; import { TemporalFileUploader } from '../../../../context/storage/TemporalFiles/application/upload/TemporalFileUploader'; import { TemporalFileWriter } from '../../../../context/storage/TemporalFiles/application/write/TemporalFileWriter'; import { TemporalFileRepository } from '../../../../context/storage/TemporalFiles/domain/TemporalFileRepository'; @@ -30,7 +31,6 @@ export async function registerTemporalFilesServices(builder: ContainerBuilder) { return repo; }) - .private() .asSingleton(); builder @@ -49,6 +49,7 @@ export async function registerTemporalFilesServices(builder: ContainerBuilder) { builder.registerAndUse(TemporalFileByPathFinder); builder.registerAndUse(TemporalFileUploader); builder.registerAndUse(TemporalFileWriter); + builder.registerAndUse(TemporalFileTruncater); builder.registerAndUse(TemporalFileByteByByteComparator); builder.registerAndUse(TemporalFileByFolderFinder); diff --git a/src/apps/drive/fuse/FuseApp.test.ts b/src/apps/drive/fuse/FuseApp.test.ts deleted file mode 100644 index 01a42f2cbb..0000000000 --- a/src/apps/drive/fuse/FuseApp.test.ts +++ /dev/null @@ -1,263 +0,0 @@ -import { Container } from 'diod'; -import { FuseApp } from './FuseApp'; -import { VirtualDrive } from '../virtual-drive/VirtualDrive'; -import { StorageClearer } from '../../../context/storage/StorageFiles/application/delete/StorageClearer'; -import { FileRepositorySynchronizer } from '../../../context/virtual-drive/files/application/FileRepositorySynchronizer'; -import { FolderRepositorySynchronizer } from '../../../context/virtual-drive/folders/application/FolderRepositorySynchronizer/FolderRepositorySynchronizer'; -import { RemoteTreeBuilder } from '../../../context/virtual-drive/remoteTree/application/RemoteTreeBuilder'; -import { StorageRemoteChangesSyncher } from '../../../context/storage/StorageFiles/application/sync/StorageRemoteChangesSyncher'; -import * as helpersModule from './helpers'; -import * as hydrationModule from '../../../backend/features/fuse/on-read/hydration-registry'; -import * as childProcess from 'child_process'; -import { partialSpyOn } from 'tests/vitest/utils.helper'; -import { loggerMock } from 'tests/vitest/mocks.helper'; -import { Abstract } from 'diod'; -import { ChildProcess, ExecFileException } from 'child_process'; - -type ExecFileCallback = (error: ExecFileException | null) => void; - -vi.mock('child_process', () => ({ - execFile: vi.fn(), -})); - -const mountPromiseMock = partialSpyOn(helpersModule, 'mountPromise'); -const destroyAllHydrationsMock = partialSpyOn(hydrationModule, 'destroyAllHydrations'); -const execFileMock = vi.mocked(childProcess.execFile); - -function createMockContainer() { - const services = new Map, unknown>(); - - const register = (token: Abstract, mock: unknown) => { - services.set(token, mock); - }; - - const container = { - get: vi.fn((token: Abstract) => { - return services.get(token) ?? { run: vi.fn() }; - }), - } as unknown as Container; - - return { container, register }; -} - -function createFuseApp(container: Container) { - const virtualDrive = {} as VirtualDrive; - return new FuseApp(virtualDrive, container, '/tmp/test-mount', 1, 'root-uuid'); -} - -describe('FuseApp', () => { - let container: Container; - let register: (token: Abstract, mock: unknown) => void; - let fuseApp: FuseApp; - - beforeEach(() => { - ({ container, register } = createMockContainer()); - fuseApp = createFuseApp(container); - }); - - describe('getStatus', () => { - it('should return UNMOUNTED initially', () => { - expect(fuseApp.getStatus()).toBe('UNMOUNTED'); - }); - }); - - describe('mount', () => { - it('should return UNMOUNTED if fuse is not initialized', async () => { - const status = await fuseApp.mount(); - - expect(status).toBe('UNMOUNTED'); - expect(loggerMock.error).toBeCalledWith({ - msg: '[FUSE] Cannot mount: FUSE instance not initialized', - }); - }); - - it('should mount successfully and emit mounted event', async () => { - mountPromiseMock.mockResolvedValueOnce(undefined); - const mountedHandler = vi.fn(); - fuseApp.on('mounted', mountedHandler); - - await fuseApp.start(); - - expect(fuseApp.getStatus()).toBe('MOUNTED'); - expect(mountedHandler).toHaveBeenCalled(); - }); - - it('should return MOUNTED without remounting if already mounted', async () => { - mountPromiseMock.mockResolvedValueOnce(undefined); - await fuseApp.start(); - - const status = await fuseApp.mount(); - - expect(status).toBe('MOUNTED'); - expect(loggerMock.debug).toBeCalledWith({ - msg: '[FUSE] Already mounted', - }); - }); - - it('should set status to ERROR if mount fails', async () => { - vi.useFakeTimers(); - mountPromiseMock.mockRejectedValue(new Error('mount failed')); - - const startPromise = fuseApp.start(); - // eslint-disable-next-line no-await-in-loop - for (let i = 0; i < 5; i++) { - await vi.advanceTimersByTimeAsync(3000); - } - await startPromise; - - expect(fuseApp.getStatus()).toBe('ERROR'); - vi.useRealTimers(); - }); - }); - - describe('start', () => { - it('should emit mount-error after all retries fail', async () => { - vi.useFakeTimers(); - mountPromiseMock.mockRejectedValue(new Error('mount failed')); - const mountErrorHandler = vi.fn(); - fuseApp.on('mount-error', mountErrorHandler); - - const startPromise = fuseApp.start(); - // eslint-disable-next-line no-await-in-loop - for (let i = 0; i < 5; i++) { - await vi.advanceTimersByTimeAsync(3000); - } - await startPromise; - - expect(mountErrorHandler).toHaveBeenCalled(); - expect(loggerMock.error).toBeCalledWith({ - msg: '[FUSE] mount error after max retries', - }); - vi.useRealTimers(); - }); - - it('should call update after successful mount', async () => { - const tree = { files: [], folders: [] }; - const remoteTreeBuilder = { run: vi.fn().mockResolvedValue(tree) }; - const fileSynchronizer = { run: vi.fn().mockResolvedValue(undefined) }; - const folderSynchronizer = { run: vi.fn().mockResolvedValue(undefined) }; - const storageSyncher = { run: vi.fn().mockResolvedValue(undefined) }; - - register(RemoteTreeBuilder, remoteTreeBuilder); - register(FileRepositorySynchronizer, fileSynchronizer); - register(FolderRepositorySynchronizer, folderSynchronizer); - register(StorageRemoteChangesSyncher, storageSyncher); - - mountPromiseMock.mockResolvedValueOnce(undefined); - - await fuseApp.start(); - - expect(remoteTreeBuilder.run).toBeCalledWith(1, 'root-uuid'); - }); - }); - - describe('stop', () => { - it('should do nothing if fuse is not initialized', async () => { - await fuseApp.stop(); - - expect(execFileMock).not.toHaveBeenCalled(); - }); - - it('should unmount fuse and reset status', async () => { - mountPromiseMock.mockResolvedValueOnce(undefined); - execFileMock.mockImplementation((_cmd, _args, ...rest) => { - const cb = rest.pop() as ExecFileCallback; - cb(null); - return {} as ChildProcess; - }); - - await fuseApp.start(); - expect(fuseApp.getStatus()).toBe('MOUNTED'); - - await fuseApp.stop(); - - expect(fuseApp.getStatus()).toBe('UNMOUNTED'); - expect(execFileMock).toBeCalledWith('/usr/bin/fusermount', ['-u', '/tmp/test-mount'], expect.any(Function)); - }); - - it('should fall back to lazy unmount when non-lazy fails', async () => { - mountPromiseMock.mockResolvedValueOnce(undefined); - - let callCount = 0; - execFileMock.mockImplementation((_cmd, _args, ...rest) => { - const cb = rest.pop() as ExecFileCallback; - callCount++; - if (callCount === 1) { - cb(new Error('device busy')); - } else { - cb(null); - } - return {} as ChildProcess; - }); - - await fuseApp.start(); - await fuseApp.stop(); - - expect(execFileMock).toHaveBeenCalledTimes(2); - expect(execFileMock).toBeCalledWith('/usr/bin/fusermount', ['-uz', '/tmp/test-mount'], expect.any(Function)); - }); - - it('should resolve even when both unmount attempts fail', async () => { - mountPromiseMock.mockResolvedValueOnce(undefined); - execFileMock.mockImplementation((_cmd, _args, ...rest) => { - const cb = rest.pop() as ExecFileCallback; - cb(new Error('unmount failed')); - return {} as ChildProcess; - }); - - await fuseApp.start(); - await fuseApp.stop(); - - expect(fuseApp.getStatus()).toBe('UNMOUNTED'); - expect(loggerMock.error).toBeCalledWith(expect.objectContaining({ msg: '[FUSE] lazy unmount failed:' })); - }); - }); - - describe('clearCache', () => { - it('should destroy hydrations and clear storage', async () => { - const storageClearer = { run: vi.fn().mockResolvedValue(undefined) }; - register(StorageClearer, storageClearer); - destroyAllHydrationsMock.mockResolvedValue(undefined); - - await fuseApp.clearCache(); - - expect(destroyAllHydrationsMock).toHaveBeenCalled(); - expect(storageClearer.run).toHaveBeenCalled(); - }); - }); - - describe('update', () => { - it('should build remote tree and synchronize', async () => { - const tree = { files: ['file1'], folders: ['folder1'] }; - const remoteTreeBuilder = { run: vi.fn().mockResolvedValue(tree) }; - const fileSynchronizer = { run: vi.fn().mockResolvedValue(undefined) }; - const folderSynchronizer = { run: vi.fn().mockResolvedValue(undefined) }; - const storageSyncher = { run: vi.fn().mockResolvedValue(undefined) }; - - register(RemoteTreeBuilder, remoteTreeBuilder); - register(FileRepositorySynchronizer, fileSynchronizer); - register(FolderRepositorySynchronizer, folderSynchronizer); - register(StorageRemoteChangesSyncher, storageSyncher); - - await fuseApp.update(); - - expect(remoteTreeBuilder.run).toBeCalledWith(1, 'root-uuid'); - expect(fileSynchronizer.run).toBeCalledWith(['file1']); - expect(folderSynchronizer.run).toBeCalledWith(['folder1']); - expect(storageSyncher.run).toHaveBeenCalled(); - }); - - it('should log error when tree building fails', async () => { - const error = new Error('network error'); - const remoteTreeBuilder = { run: vi.fn().mockRejectedValue(error) }; - register(RemoteTreeBuilder, remoteTreeBuilder); - - await fuseApp.update(); - - expect(loggerMock.error).toBeCalledWith({ - msg: '[FUSE] Error Updating the tree:', - error, - }); - }); - }); -}); diff --git a/src/apps/drive/fuse/FuseApp.ts b/src/apps/drive/fuse/FuseApp.ts index a93aff91cc..ab9e9eb0da 100644 --- a/src/apps/drive/fuse/FuseApp.ts +++ b/src/apps/drive/fuse/FuseApp.ts @@ -1,10 +1,5 @@ import { Container } from 'diod'; import { logger } from '@internxt/drive-desktop-core/build/backend'; -import { StorageClearer } from '../../../context/storage/StorageFiles/application/delete/StorageClearer'; -import { destroyAllHydrations } from '../../../backend/features/fuse/on-read/hydration-registry'; -import { FileRepositorySynchronizer } from '../../../context/virtual-drive/files/application/FileRepositorySynchronizer'; -import { FolderRepositorySynchronizer } from '../../../context/virtual-drive/folders/application/FolderRepositorySynchronizer/FolderRepositorySynchronizer'; -import { RemoteTreeBuilder } from '../../../context/virtual-drive/remoteTree/application/RemoteTreeBuilder'; import { VirtualDrive } from '../virtual-drive/VirtualDrive'; import { FuseDriveStatus } from './FuseDriveStatus'; import { CreateCallback } from './callbacks/CreateCallback'; @@ -12,31 +7,29 @@ import { GetAttributesCallback } from './callbacks/GetAttributesCallback'; import { GetXAttributeCallback } from './callbacks/GetXAttributeCallback'; import { MakeDirectoryCallback } from './callbacks/MakeDirectoryCallback'; import { OpenCallback } from './callbacks/OpenCallback'; -import { ReadCallback } from './callbacks/ReadCallback'; +// import { ReadCallback } from './callbacks/ReadCallback'; import { ReaddirCallback } from './callbacks/ReaddirCallback'; import { ReleaseCallback } from './callbacks/ReleaseCallback'; import { RenameMoveOrTrashCallback } from './callbacks/RenameOrMoveCallback'; import { TrashFileCallback } from './callbacks/TrashFileCallback'; import { TrashFolderCallback } from './callbacks/TrashFolderCallback'; import { WriteCallback } from './callbacks/WriteCallback'; -import { mountPromise } from './helpers'; -import { StorageRemoteChangesSyncher } from '../../../context/storage/StorageFiles/application/sync/StorageRemoteChangesSyncher'; +// import { mountPromise } from './helpers'; import { execFile } from 'node:child_process'; import { EventEmitter } from 'stream'; -import Fuse from '@gcas/fuse'; export class FuseApp extends EventEmitter { private status: FuseDriveStatus = 'UNMOUNTED'; private static readonly MAX_INT_32 = 2147483647; private static readonly MAX_RETRIES = 5; - private _fuse: Fuse | undefined; + // private _fuse: Fuse | undefined; constructor( private readonly virtualDrive: VirtualDrive, private readonly container: Container, private readonly localRoot: string, - private readonly remoteRoot: number, - private readonly remoteRootUuid: string, + // private readonly remoteRoot: number, + // private readonly remoteRootUuid: string, ) { super(); } @@ -45,7 +38,7 @@ export class FuseApp extends EventEmitter { const readdir = new ReaddirCallback(this.container); const getattr = new GetAttributesCallback(this.container); const open = new OpenCallback(this.virtualDrive, this.container); - const read = new ReadCallback(this.container); + // const read = new ReadCallback(this.container); const renameOrMove = new RenameMoveOrTrashCallback(this.container); const create = new CreateCallback(this.container); const makeDirectory = new MakeDirectoryCallback(this.container); @@ -59,7 +52,7 @@ export class FuseApp extends EventEmitter { getattr: getattr.handle.bind(getattr), readdir: readdir.handle.bind(readdir), open: open.handle.bind(open), - read: read.execute.bind(read), + // read: read.execute.bind(read), rename: renameOrMove.handle.bind(renameOrMove), create: create.handle.bind(create), write: write.execute.bind(write), @@ -71,40 +64,37 @@ export class FuseApp extends EventEmitter { }; } - async start() { - const ops = this.getOpt(); + // async start() { + // const ops = this.getOpt(); - this._fuse = new Fuse(this.localRoot, ops, { - debug: false, - force: true, - autoUnmount: true, - maxRead: FuseApp.MAX_INT_32, - }); + // this._fuse = new Fuse(this.localRoot, ops, { + // debug: false, + // force: true, + // autoUnmount: true, + // maxRead: FuseApp.MAX_INT_32, + // }); - const mountSuccessful = await this.mountWithRetries(); - if (!mountSuccessful) { - logger.error({ msg: '[FUSE] mount error after max retries' }); - this.emit('mount-error'); - return; - } + // const mountSuccessful = await this.mountWithRetries(); + // if (!mountSuccessful) { + // logger.error({ msg: '[FUSE] mount error after max retries' }); + // this.emit('mount-error'); + // return; + // } - await this.update(); - } + // await this.update(); + // } async stop() { - if (!this._fuse) { - return; - } + // if (!this._fuse) { + // return; + // } await this.unmountFuse(); - this._fuse = undefined; + // this._fuse = undefined; this.status = 'UNMOUNTED'; } private unmountFuse(): Promise { - // It is not possible to implement this method during logout while @gcas/fuse is still in use. - // For more information, see this issue. https://inxt.atlassian.net/browse/PB-5389 - const fusermount = '/usr/bin/fusermount'; return new Promise((resolve) => { execFile(fusermount, ['-u', this.localRoot], (err) => { @@ -123,26 +113,27 @@ export class FuseApp extends EventEmitter { }); } - async clearCache(): Promise { - await destroyAllHydrations(); - await this.container.get(StorageClearer).run(); - } + // async clearCache(): Promise { + // clearHydrationState(); + // await this.container.get(StorageClearer).run(); + // } - async update() { - try { - const tree = await this.container.get(RemoteTreeBuilder).run(this.remoteRoot, this.remoteRootUuid); + // async update() { + // try { + // const tree = await this.container.get(RemoteTreeBuilder) + // .run(this.remoteRoot, this.remoteRootUuid); - Promise.all([ - this.container.get(FileRepositorySynchronizer).run(tree.files), - this.container.get(FolderRepositorySynchronizer).run(tree.folders), - this.container.get(StorageRemoteChangesSyncher).run(), - ]); + // Promise.all([ + // this.container.get(FileRepositorySynchronizer).run(tree.files), + // this.container.get(FolderRepositorySynchronizer).run(tree.folders), + // this.container.get(StorageRemoteChangesSyncher).run(), + // ]); - logger.debug({ msg: '[FUSE] Tree updated successfully' }); - } catch (err) { - logger.error({ msg: '[FUSE] Error Updating the tree:', error: err }); - } - } + // logger.debug({ msg: '[FUSE] Tree updated successfully' }); + // } catch (err) { + // logger.error({ msg: '[FUSE] Error Updating the tree:', error: err }); + // } + // } getStatus() { return this.status; @@ -154,13 +145,13 @@ export class FuseApp extends EventEmitter { return this.status; } - if (!this._fuse) { - logger.error({ msg: '[FUSE] Cannot mount: FUSE instance not initialized' }); - return this.status; - } + // if (!this._fuse) { + // logger.error({ msg: '[FUSE] Cannot mount: FUSE instance not initialized' }); + // return this.status; + // } try { - await mountPromise(this._fuse); + // await mountPromise(this._fuse); this.status = 'MOUNTED'; this.emit('mounted'); } catch (err) { @@ -171,18 +162,18 @@ export class FuseApp extends EventEmitter { return this.status; } - private async mountWithRetries(): Promise { - for (let attempt = 1; attempt <= FuseApp.MAX_RETRIES; attempt++) { - const status = await this.mount(); + // private async mountWithRetries(): Promise { + // for (let attempt = 1; attempt <= FuseApp.MAX_RETRIES; attempt++) { + // const status = await this.mount(); - if (status === 'MOUNTED') return true; + // if (status === 'MOUNTED') return true; - if (attempt < FuseApp.MAX_RETRIES) { - const delay = Math.min(1000 * attempt, 3000); - await new Promise((resolve) => setTimeout(resolve, delay)); - } - } + // if (attempt < FuseApp.MAX_RETRIES) { + // const delay = Math.min(1000 * attempt, 3000); + // await new Promise((resolve) => setTimeout(resolve, delay)); + // } + // } - return false; - } + // return false; + // } } diff --git a/src/apps/drive/fuse/callbacks/CreateCallback.ts b/src/apps/drive/fuse/callbacks/CreateCallback.ts index 766aea95d0..272342884e 100644 --- a/src/apps/drive/fuse/callbacks/CreateCallback.ts +++ b/src/apps/drive/fuse/callbacks/CreateCallback.ts @@ -7,7 +7,7 @@ export class CreateCallback extends NotifyFuseCallback { super('Create'); } - async execute(path: string, _mode: number) { + async execute(path: string) { await this.container.get(TemporalFileCreator).run(path); return this.right(); diff --git a/src/apps/drive/fuse/callbacks/FuseCallback.ts b/src/apps/drive/fuse/callbacks/FuseCallback.ts index 82b44b9db7..9c762411df 100644 --- a/src/apps/drive/fuse/callbacks/FuseCallback.ts +++ b/src/apps/drive/fuse/callbacks/FuseCallback.ts @@ -31,7 +31,7 @@ export abstract class FuseCallback { }, ) {} - protected async executeAndCatch(params: any[]): Promise> { + protected async executeAndCatch(params: unknown[]): Promise> { // Ensure that an Either is always returned const stopwatch = new Stopwatch(); @@ -103,10 +103,10 @@ export abstract class FuseCallback { logger.debug({ msg: `${this.name}: `, message }); } - async handle(...params: any[]): Promise { + async handle(...params: unknown[]): Promise { const callback = params.pop() as CallbackWithData; - if (PathsToIgnore.some((regex) => regex.test(params[0]))) { + if (typeof params[0] === 'string' && PathsToIgnore.some((regex) => regex.test(params[0] as string))) { return callback(FuseCodes.EINVAL); } @@ -126,7 +126,7 @@ export abstract class FuseCallback { callback(FuseCallback.OK, data); } - abstract execute(...params: any[]): Promise>; + abstract execute(...params: unknown[]): Promise>; } export abstract class NotifyFuseCallback extends FuseCallback { @@ -134,7 +134,7 @@ export abstract class NotifyFuseCallback extends FuseCallback { return right(undefined); } - async handle(...params: any[]): Promise { + async handle(...params: unknown[]): Promise { const callback = params.pop() as Callback; if (this.debug.input) { diff --git a/src/apps/drive/fuse/callbacks/FuseCodes.ts b/src/apps/drive/fuse/callbacks/FuseCodes.ts index 9c3fdf2b98..db0a7823ba 100644 --- a/src/apps/drive/fuse/callbacks/FuseCodes.ts +++ b/src/apps/drive/fuse/callbacks/FuseCodes.ts @@ -1,25 +1,24 @@ -// eslint-disable-next-line @typescript-eslint/no-var-requires -import Fuse from '@gcas/fuse'; - -export enum FuseCodes { +export const FuseCodes = { // Operation not supported (Functionality not implemented) - ENOSYS = Fuse.ENOSYS, + ENOSYS: 38, // No such file or directory - ENOENT = Fuse.ENOENT, + ENOENT: 2, // File or directory already exists - EEXIST = Fuse.EEXIST, + EEXIST: 17, // Input/output error - EIO = Fuse.EIO, + EIO: 5, // Invalid argument - EINVAL = Fuse.EINVAL, + EINVAL: 22, // Permission denied - EACCES = Fuse.EACCES, + EACCES: 13, // Network is down - ENETDOWN = Fuse.ENETDOWN, -} + ENETDOWN: 100, +} as const; + +export type FuseCode = (typeof FuseCodes)[keyof typeof FuseCodes]; diff --git a/src/apps/drive/fuse/callbacks/FuseErrors.ts b/src/apps/drive/fuse/callbacks/FuseErrors.ts index 1feead2251..1ff7dbf469 100644 --- a/src/apps/drive/fuse/callbacks/FuseErrors.ts +++ b/src/apps/drive/fuse/callbacks/FuseErrors.ts @@ -1,10 +1,10 @@ -import { FuseCodes } from './FuseCodes'; +import { FuseCode, FuseCodes } from './FuseCodes'; export class FuseError extends Error { public readonly code: number; public readonly timestamp: Date; - constructor(code: FuseCodes, message: string) { + constructor(code: FuseCode, message: string) { super(message); this.code = code; this.timestamp = new Date(); diff --git a/src/apps/drive/fuse/callbacks/GetXAttributeCallback.ts b/src/apps/drive/fuse/callbacks/GetXAttributeCallback.ts index bb52d5fae9..2e6095365e 100644 --- a/src/apps/drive/fuse/callbacks/GetXAttributeCallback.ts +++ b/src/apps/drive/fuse/callbacks/GetXAttributeCallback.ts @@ -16,7 +16,7 @@ export class GetXAttributeCallback extends FuseCallback { return path === '/'; } - async execute(path: string, _name: string, _size: string) { + async execute(path: string) { if (this.isRootFolder(path)) { return this.left(new FuseError(FuseCodes.ENOSYS, 'Cannot get the status of root folder')); } diff --git a/src/apps/drive/fuse/callbacks/MakeDirectoryCallback.ts b/src/apps/drive/fuse/callbacks/MakeDirectoryCallback.ts index 84eca27ca7..a7e365ac8a 100644 --- a/src/apps/drive/fuse/callbacks/MakeDirectoryCallback.ts +++ b/src/apps/drive/fuse/callbacks/MakeDirectoryCallback.ts @@ -9,7 +9,7 @@ export class MakeDirectoryCallback extends NotifyFuseCallback { super('Make Directory'); } - async execute(path: string, _mode: number) { + async execute(path: string) { if (path.startsWith('/.Trash')) { return this.right(); } diff --git a/src/apps/drive/fuse/callbacks/ReadCallback.test.ts b/src/apps/drive/fuse/callbacks/ReadCallback.test.ts index 0c1e6a4f61..ae35711c93 100644 --- a/src/apps/drive/fuse/callbacks/ReadCallback.test.ts +++ b/src/apps/drive/fuse/callbacks/ReadCallback.test.ts @@ -1,68 +1,58 @@ -import Fuse from '@gcas/fuse'; -import { ReadCallback } from './ReadCallback'; -import * as handleReadModule from '../../../../backend/features/fuse/on-read/handle-read-callback'; -import { partialSpyOn } from '../../../../../tests/vitest/utils.helper'; -import { left, right } from '../../../../context/shared/domain/Either'; -import { FuseNoSuchFileOrDirectoryError } from './FuseErrors'; - -const handleReadCallbackMock = partialSpyOn(handleReadModule, 'handleReadCallback'); - -function createMockContainer() { - return { - get: vi.fn().mockReturnValue({ - run: vi.fn(), - exists: vi.fn(), - register: vi.fn(), - downloadStarted: vi.fn(), - downloadUpdate: vi.fn(), - downloadFinished: vi.fn(), - elapsedTime: vi.fn(), - }), - } as any; -} - -describe('ReadCallback', () => { - it('should copy chunk into buf and call cb with chunk length on success', async () => { - const chunk = Buffer.from('hello'); - handleReadCallbackMock.mockResolvedValue(right(chunk)); - const buf = Buffer.alloc(10); - const cb = vi.fn(); - const callback = new ReadCallback(createMockContainer()); - - await callback.execute('/file.txt', 0, buf, 5, 0, cb); - - expect(buf.subarray(0, 5).toString()).toBe('hello'); - expect(cb).toHaveBeenCalledWith(5); - }); - - it('should call cb with error code when result is left', async () => { - const error = new FuseNoSuchFileOrDirectoryError('/file.txt'); - handleReadCallbackMock.mockResolvedValue(left(error)); - const cb = vi.fn(); - const callback = new ReadCallback(createMockContainer()); - - await callback.execute('/file.txt', 0, Buffer.alloc(10), 10, 0, cb); - - expect(cb).toHaveBeenCalledWith(error.code); - }); - - it('should call cb with Fuse.EIO when an exception is thrown', async () => { - handleReadCallbackMock.mockRejectedValue(new Error('unexpected')); - const cb = vi.fn(); - const callback = new ReadCallback(createMockContainer()); - - await callback.execute('/file.txt', 0, Buffer.alloc(10), 10, 0, cb); - - expect(cb).toHaveBeenCalledWith(Fuse.EIO); - }); - - it('should call cb with 0 when result is an empty buffer', async () => { - handleReadCallbackMock.mockResolvedValue(right(Buffer.alloc(0))); - const cb = vi.fn(); - const callback = new ReadCallback(createMockContainer()); - - await callback.execute('/file.txt', 0, Buffer.alloc(10), 10, 0, cb); - - expect(cb).toHaveBeenCalledWith(0); - }); +// import Fuse from '@gcas/fuse'; +// import { ReadCallback } from './ReadCallback'; +// import * as handleReadModule from '../../../../backend/features/fuse/on-read/handle-read-callback'; +// import { partialSpyOn } from '../../../../../tests/vitest/utils.helper'; +// import { left, right } from '../../../../context/shared/domain/Either'; +// import { FuseNoSuchFileOrDirectoryError } from './FuseErrors'; +// import { type Container } from 'diod'; + +// const handleReadCallbackMock = partialSpyOn(handleReadModule, 'handleReadCallback'); + +// function createMockContainer() { +// return { +// get: vi.fn().mockReturnValue({ +// run: vi.fn(), +// exists: vi.fn(), +// register: vi.fn(), +// downloadStarted: vi.fn(), +// downloadUpdate: vi.fn(), +// downloadFinished: vi.fn(), +// elapsedTime: vi.fn(), +// }), +// } as Partial as Container; +// } + +describe.skip('ReadCallback', () => { + // it('should copy chunk into buf and call cb with chunk length on success', async () => { + // const chunk = Buffer.from('hello'); + // handleReadCallbackMock.mockResolvedValue(right(chunk)); + // const buf = Buffer.alloc(10); + // const cb = vi.fn(); + // const callback = new ReadCallback(createMockContainer()); + // await callback.execute('/file.txt', 0, buf, 5, 0, cb); + // expect(buf.subarray(0, 5).toString()).toBe('hello'); + // expect(cb).toHaveBeenCalledWith(5); + // }); + // it('should call cb with error code when result is left', async () => { + // const error = new FuseNoSuchFileOrDirectoryError('/file.txt'); + // handleReadCallbackMock.mockResolvedValue(left(error)); + // const cb = vi.fn(); + // const callback = new ReadCallback(createMockContainer()); + // await callback.execute('/file.txt', 0, Buffer.alloc(10), 10, 0, cb); + // expect(cb).toHaveBeenCalledWith(error.code); + // }); + // it('should call cb with Fuse.EIO when an exception is thrown', async () => { + // handleReadCallbackMock.mockRejectedValue(new Error('unexpected')); + // const cb = vi.fn(); + // const callback = new ReadCallback(createMockContainer()); + // await callback.execute('/file.txt', 0, Buffer.alloc(10), 10, 0, cb); + // expect(cb).toHaveBeenCalledWith(Fuse.EIO); + // }); + // it('should call cb with 0 when result is an empty buffer', async () => { + // handleReadCallbackMock.mockResolvedValue(right(Buffer.alloc(0))); + // const cb = vi.fn(); + // const callback = new ReadCallback(createMockContainer()); + // await callback.execute('/file.txt', 0, Buffer.alloc(10), 10, 0, cb); + // expect(cb).toHaveBeenCalledWith(0); + // }); }); diff --git a/src/apps/drive/fuse/callbacks/ReadCallback.ts b/src/apps/drive/fuse/callbacks/ReadCallback.ts index b56e40d073..055810e5f9 100644 --- a/src/apps/drive/fuse/callbacks/ReadCallback.ts +++ b/src/apps/drive/fuse/callbacks/ReadCallback.ts @@ -1,78 +1,73 @@ -import { Container } from 'diod'; -import { logger } from '@internxt/drive-desktop-core/build/backend'; -import { TemporalFileByPathFinder } from '../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; -import { FirstsFileSearcher } from '../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; -import { StorageFilesRepository } from '../../../../context/storage/StorageFiles/domain/StorageFilesRepository'; -import { StorageFileId } from '../../../../context/storage/StorageFiles/domain/StorageFileId'; -import { StorageFile } from '../../../../context/storage/StorageFiles/domain/StorageFile'; -import { StorageFileDownloader } from '../../../../context/storage/StorageFiles/application/download/StorageFileDownloader/StorageFileDownloader'; -import { DownloadProgressTracker } from '../../../../context/shared/domain/DownloadProgressTracker'; -import { type File } from '../../../../context/virtual-drive/files/domain/File'; -import { - handleReadCallback, - type HandleReadCallbackDeps, -} from '../../../../backend/features/fuse/on-read/handle-read-callback'; +// import { Container } from 'diod'; +// import { logger } from '@internxt/drive-desktop-core/build/backend'; +// import { TemporalFileByPathFinder } from '../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +// import { TemporalFileChunkReader } from '../../../../context/storage/TemporalFiles/application/read/TemporalFileChunkReader'; +// import { FirstsFileSearcher } from '../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +// import { StorageFilesRepository } from '../../../../context/storage/StorageFiles/domain/StorageFilesRepository'; +// import { StorageFile } from '../../../../context/storage/StorageFiles/domain/StorageFile'; +// import { DownloadProgressTracker } from '../../../../context/shared/domain/DownloadProgressTracker'; +// import { +// handleReadCallback, +// type HandleReadCallbackDeps, +// } from '../../../../backend/features/fuse/on-read/handle-read-callback'; +// import { buildNetworkClient } from '../../../../infra/environment/download-file/build-network-client'; +// import { getCredentials } from '../../../main/auth/get-credentials'; +// import { DependencyInjectionUserProvider } from '../../../shared/dependency-injection/DependencyInjectionUserProvider'; -import Fuse from '@gcas/fuse'; +// export class ReadCallback { +// constructor(private readonly container: Container) {} -export class ReadCallback { - constructor(private readonly container: Container) {} +// async execute( +// path: string, +// _fd: unknown, +// buf: Buffer, +// len: number, +// pos: number, +// cb: (code: number, params?: unknown) => void, +// ) { +// try { +// const { mnemonic } = getCredentials(); +// const user = DependencyInjectionUserProvider.get(); +// const network = buildNetworkClient({ bridgeUser: user.bridgeUser, userId: user.userId }); +// const repo = this.container.get(StorageFilesRepository); +// const tracker = this.container.get(DownloadProgressTracker); - async execute( - path: string, - _fd: any, - buf: Buffer, - len: number, - pos: number, - cb: (code: number, params?: any) => void, - ) { - try { - const repo = this.container.get(StorageFilesRepository); - const downloader = this.container.get(StorageFileDownloader); - const tracker = this.container.get(DownloadProgressTracker); +// const deps: HandleReadCallbackDeps = { +// findVirtualFile: (p: string) => this.container.get(FirstsFileSearcher).run({ path: p }), +// findTemporalFile: (p: string) => this.container.get(TemporalFileByPathFinder).run(p), +// readTemporalFileChunk: async (p: string, length: number, position: number) => { +// const result = await this.container.get(TemporalFileChunkReader).run(p, length, position); +// return result.isPresent() ? result.get() : undefined; +// }, +// onDownloadProgress: (name, extension, bytesDownloaded, fileSize, elapsedTime) => { +// tracker.downloadUpdate(name, extension, { +// percentage: Math.min(bytesDownloaded / fileSize, 1), +// elapsedTime, +// }); +// }, +// saveToRepository: async (contentsId, size, uuid, name, extension) => { +// const storage = StorageFile.from({ id: contentsId, virtualId: uuid, size }); +// await repo.register(storage); +// tracker.downloadFinished(name, extension); +// }, +// bucketId: user.bucket, +// mnemonic, +// network, +// }; - const deps: HandleReadCallbackDeps = { - findVirtualFile: (p: string) => this.container.get(FirstsFileSearcher).run({ path: p }), - findTemporalFile: (p: string) => this.container.get(TemporalFileByPathFinder).run(p), - existsOnDisk: (contentsId: string) => repo.exists(new StorageFileId(contentsId)), +// const result = await handleReadCallback(deps, path, len, pos); - startDownload: async (virtualFile: File) => { - const storage = StorageFile.from({ - id: virtualFile.contentsId, - virtualId: virtualFile.uuid, - size: virtualFile.size, - }); - tracker.downloadStarted(virtualFile.name, virtualFile.type); - const { stream, handler } = await downloader.run(storage, virtualFile); - return { stream, elapsedTime: () => handler.elapsedTime() }; - }, - onDownloadProgress: (name, extension, progress) => { - tracker.downloadUpdate(name, extension, progress); - }, - saveToRepository: async (contentsId, size, uuid, name, extension) => { - const storage = StorageFile.from({ - id: contentsId, - virtualId: uuid, - size, - }); - await repo.register(storage); - tracker.downloadFinished(name, extension); - }, - }; +// if (result.isLeft()) { +// cb(result.getLeft().code); +// return; +// } - const result = await handleReadCallback(deps, path, len, pos); - - if (result.isLeft()) { - cb(result.getLeft().code); - return; - } - - const chunk = result.getRight(); - chunk.copy(buf as unknown as Uint8Array); - cb(chunk.length); - } catch (err) { - logger.error({ msg: '[ReadCallback] Error reading file:', error: err, path }); - cb(Fuse.EIO); - } - } -} +// const chunk = result.getRight(); +// chunk.copy(buf as unknown as Uint8Array); +// cb(chunk.length); +// } catch (err) { +// logger.error({ msg: '[ReadCallback] Error reading file:', error: err, path }); +// cb(Fuse.EIO); +// } +// } +// } diff --git a/src/apps/drive/fuse/callbacks/ReleaseCallback.test.ts b/src/apps/drive/fuse/callbacks/ReleaseCallback.test.ts index 32cbe5545c..c28d0bf1a2 100644 --- a/src/apps/drive/fuse/callbacks/ReleaseCallback.test.ts +++ b/src/apps/drive/fuse/callbacks/ReleaseCallback.test.ts @@ -5,6 +5,7 @@ import { right } from '../../../../context/shared/domain/Either'; import * as openFlagsTracker from './../../../../backend/features/fuse/on-open/open-flags-tracker'; import * as handleReleaseModule from '../../../../backend/features/fuse/on-release/handle-release-callback'; import { partialSpyOn } from '../../../../../tests/vitest/utils.helper'; +import { Container } from 'diod'; vi.mock(import('@internxt/drive-desktop-core/build/backend')); @@ -12,7 +13,7 @@ describe('ReleaseCallback', () => { const onReleaseSpy = partialSpyOn(openFlagsTracker, 'onRelease'); const handleReleaseSpy = partialSpyOn(handleReleaseModule, 'handleReleaseCallback'); - const container = { get: vi.fn() } as any; + const container = { get: vi.fn() } as unknown as Container; const releaseCallback = new ReleaseCallback(container); it('should call onRelease to clean up open flags tracker', async () => { handleReleaseSpy.mockResolvedValue(right(undefined)); diff --git a/src/apps/drive/fuse/callbacks/TrashFolderCallback.test.ts b/src/apps/drive/fuse/callbacks/TrashFolderCallback.test.ts new file mode 100644 index 0000000000..ab64869527 --- /dev/null +++ b/src/apps/drive/fuse/callbacks/TrashFolderCallback.test.ts @@ -0,0 +1,103 @@ +import { FolderDeleter } from '../../../../context/virtual-drive/folders/application/FolderDeleter'; +import { SingleFolderMatchingFinder } from '../../../../context/virtual-drive/folders/application/SingleFolderMatchingFinder'; +import { FolderMother } from '../../../../context/virtual-drive/folders/domain/__test-helpers__/FolderMother'; +import { FolderStatuses } from '../../../../context/virtual-drive/folders/domain/FolderStatus'; +import { SyncFolderMessenger } from '../../../../context/virtual-drive/folders/domain/SyncFolderMessenger'; +import { ContainerMock } from '../../__mocks__/ContainerMock'; +import { TrashFolderCallback } from './TrashFolderCallback'; + +describe('TrashFolderCallback', () => { + it('returns success even when folder deletion exceeds callback timeout', async () => { + vi.useFakeTimers(); + + try { + const container = new ContainerMock(); + const folder = FolderMother.any(); + + const folderFinder = { + run: vi.fn(async () => { + return folder; + }), + } as unknown as SingleFolderMatchingFinder; + + const folderDeleter = { + run: vi.fn(() => { + return new Promise((resolve) => { + setTimeout(resolve, 5_000); + }); + }), + } as unknown as FolderDeleter; + + container.set(SingleFolderMatchingFinder, folderFinder); + container.set(FolderDeleter, folderDeleter); + + const callback = new TrashFolderCallback(container as never); + const resultPromise = callback.execute('/Files/SlowFolder'); + + await vi.advanceTimersByTimeAsync(1_600); + + const result = await resultPromise; + + expect(result.isRight()).toBe(true); + expect(folderFinder.run).toHaveBeenCalledWith({ + path: '/Files/SlowFolder', + status: FolderStatuses.EXISTS, + }); + expect(folderDeleter.run).toHaveBeenCalledWith(folder.uuid); + } finally { + vi.useRealTimers(); + } + }); + + it('reports issue when background deletion fails after timeout', async () => { + vi.useFakeTimers(); + + try { + const container = new ContainerMock(); + const folder = FolderMother.any(); + + const folderFinder = { + run: vi.fn(async () => { + return folder; + }), + } as unknown as SingleFolderMatchingFinder; + + const folderDeleter = { + run: vi.fn(() => { + return new Promise((_resolve, reject) => { + setTimeout(() => { + reject(new Error('slow-delete-failed')); + }, 5_000); + }); + }), + } as unknown as FolderDeleter; + + const syncFolderMessenger = { + issue: vi.fn(async () => undefined), + } as unknown as SyncFolderMessenger; + + container.set(SingleFolderMatchingFinder, folderFinder); + container.set(FolderDeleter, folderDeleter); + container.set(SyncFolderMessenger, syncFolderMessenger); + + const callback = new TrashFolderCallback(container as never); + const resultPromise = callback.execute('/Files/SlowFolder'); + + await vi.advanceTimersByTimeAsync(1_600); + + const result = await resultPromise; + expect(result.isRight()).toBe(true); + + await vi.advanceTimersByTimeAsync(3_500); + await Promise.resolve(); + + expect(syncFolderMessenger.issue).toHaveBeenCalledWith({ + error: 'FOLDER_TRASH_ERROR', + cause: 'UNKNOWN', + name: 'SlowFolder', + }); + } finally { + vi.useRealTimers(); + } + }); +}); diff --git a/src/apps/drive/fuse/callbacks/TrashFolderCallback.ts b/src/apps/drive/fuse/callbacks/TrashFolderCallback.ts index 89fed0cb47..a93c7beb46 100644 --- a/src/apps/drive/fuse/callbacks/TrashFolderCallback.ts +++ b/src/apps/drive/fuse/callbacks/TrashFolderCallback.ts @@ -1,11 +1,30 @@ import { Container } from 'diod'; import { basename } from 'path'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; import { FolderDeleter } from '../../../../context/virtual-drive/folders/application/FolderDeleter'; import { SingleFolderMatchingFinder } from '../../../../context/virtual-drive/folders/application/SingleFolderMatchingFinder'; import { FolderStatuses } from '../../../../context/virtual-drive/folders/domain/FolderStatus'; import { SyncFolderMessenger } from '../../../../context/virtual-drive/folders/domain/SyncFolderMessenger'; import { NotifyFuseCallback } from './FuseCallback'; +const FOLDER_TRASH_CALLBACK_TIMEOUT_MS = 1_500; + +type WaitWithTimeoutPops = { + promise: Promise; + timeoutMs: number; +}; + +async function waitWithTimeout({ promise, timeoutMs }: WaitWithTimeoutPops) { + const completion = promise.then(() => true); + const timeout = new Promise((resolve) => { + setTimeout(() => { + resolve(false); + }, timeoutMs); + }); + + return Promise.race([completion, timeout]); +} + export class TrashFolderCallback extends NotifyFuseCallback { constructor(private readonly container: Container) { super('Trash Folder'); @@ -18,7 +37,33 @@ export class TrashFolderCallback extends NotifyFuseCallback { status: FolderStatuses.EXISTS, }); - await this.container.get(FolderDeleter).run(folder.uuid); + const deletionPromise = this.container.get(FolderDeleter).run(folder.uuid); + const deletionCompletedInTime = await waitWithTimeout({ + promise: deletionPromise, + timeoutMs: FOLDER_TRASH_CALLBACK_TIMEOUT_MS, + }); + + if (!deletionCompletedInTime) { + logger.warn({ + msg: 'Folder deletion exceeded callback timeout. Continuing deletion in background.', + path, + timeoutMs: FOLDER_TRASH_CALLBACK_TIMEOUT_MS, + }); + + void deletionPromise.catch(async (error) => { + logger.error({ + msg: 'Background folder deletion failed after callback timeout', + path, + error, + }); + + await this.container.get(SyncFolderMessenger).issue({ + error: 'FOLDER_TRASH_ERROR', + cause: 'UNKNOWN', + name: basename(path), + }); + }); + } return this.right(); } catch (throwed: unknown) { diff --git a/src/apps/drive/fuse/gcas-fuse.d.ts b/src/apps/drive/fuse/gcas-fuse.d.ts deleted file mode 100644 index 944be1a529..0000000000 --- a/src/apps/drive/fuse/gcas-fuse.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -declare module '@gcas/fuse' { - import FuseClass from 'fuse'; - export * from 'fuse'; - export default FuseClass; -} diff --git a/src/apps/drive/fuse/helpers.ts b/src/apps/drive/fuse/helpers.ts deleted file mode 100644 index c16518401f..0000000000 --- a/src/apps/drive/fuse/helpers.ts +++ /dev/null @@ -1,14 +0,0 @@ -import Fuse from '@gcas/fuse'; - -export function mountPromise(fuse: Fuse): Promise { - return new Promise((resolve, reject) => { - fuse.mount((err: unknown) => { - if (err) { - reject(err); - return; - } - - resolve(); - }); - }); -} diff --git a/src/apps/drive/hydration-api/HydrationApi.test.ts b/src/apps/drive/hydration-api/HydrationApi.test.ts index 96f55e42af..067c4e254a 100644 --- a/src/apps/drive/hydration-api/HydrationApi.test.ts +++ b/src/apps/drive/hydration-api/HydrationApi.test.ts @@ -55,7 +55,7 @@ describe('HydrationApi', () => { await hydrationApi.start({ debug: true, timeElapsed: false }); - const response = await fetch('http://localhost:4567/hydration/test'); + await fetch('http://localhost:4567/hydration/test'); // The request itself may 404, but the debug middleware should have logged expect(loggerMock.debug).toBeCalledWith( expect.objectContaining({ diff --git a/src/apps/drive/hydration-api/controllers/contents.ts b/src/apps/drive/hydration-api/controllers/contents.ts index 3480e33f71..4468d73d82 100644 --- a/src/apps/drive/hydration-api/controllers/contents.ts +++ b/src/apps/drive/hydration-api/controllers/contents.ts @@ -1,5 +1,4 @@ import { Container } from 'diod'; -import { logger } from '@internxt/drive-desktop-core/build/backend'; import { NextFunction, Request, Response } from 'express'; import { extname } from 'path'; import { StorageFileDeleter } from '../../../../context/storage/StorageFiles/application/delete/StorageFileDeleter'; diff --git a/src/apps/drive/hydration-api/controllers/files.ts b/src/apps/drive/hydration-api/controllers/files.ts index 2e002fa1ba..02745291bf 100644 --- a/src/apps/drive/hydration-api/controllers/files.ts +++ b/src/apps/drive/hydration-api/controllers/files.ts @@ -14,17 +14,9 @@ export function buildFilesControllers(container: Container) { }; const filter = async (req: Request, res: Response) => { - const filter = Object.entries(req.query) - - .map(([key, param]) => { - return { key, value: param }; - }) - .reduce((partial: Partial, { key, value }: any) => { - return { - ...partial, - [key]: value.toString(), - }; - }, {}); + const filter = Object.fromEntries( + Object.entries(req.query).filter((entry): entry is [string, string] => typeof entry[1] === 'string'), + ) as Partial; const files = await container.get(FilesSearcherByPartialMatch).run(filter); diff --git a/src/apps/drive/index.ts b/src/apps/drive/index.ts index c48f372b68..6ac0f286c1 100644 --- a/src/apps/drive/index.ts +++ b/src/apps/drive/index.ts @@ -1,79 +1,69 @@ -import { getRootVirtualDrive } from '../main/virtual-root-folder/service'; -import { broadcastToWindows } from '../main/windows'; -import { DependencyInjectionUserProvider } from '../shared/dependency-injection/DependencyInjectionUserProvider'; -import { VirtualDrive } from './virtual-drive/VirtualDrive'; -import { DriveDependencyContainerFactory } from './dependency-injection/DriveDependencyContainerFactory'; -import { FuseApp } from './fuse/FuseApp'; -import { HydrationApi } from './hydration-api/HydrationApi'; -import { logger } from '@internxt/drive-desktop-core/build/backend'; - -let fuseApp: FuseApp; -let hydrationApi: HydrationApi; - -export async function startVirtualDrive() { - const localRoot = getRootVirtualDrive(); - - const container = await DriveDependencyContainerFactory.build(); - - const user = DependencyInjectionUserProvider.get(); - - const virtualDrive = new VirtualDrive(container); - - hydrationApi = new HydrationApi(container); - - fuseApp = new FuseApp(virtualDrive, container, localRoot, user.root_folder_id, user.rootFolderId); - - fuseApp.on('mounted', () => broadcastToWindows('virtual-drive-status-change', 'MOUNTED')); - - fuseApp.on('mount-error', () => broadcastToWindows('virtual-drive-status-change', 'ERROR')); - - await hydrationApi.start({ debug: false, timeElapsed: false }); - - await fuseApp.start(); -} - -export async function stopAndClearFuseApp() { - await stopHydrationApi(); - await stopFuseApp(); -} - -export async function updateFuseApp() { - await fuseApp.update(); -} - -export function getFuseDriveState() { - if (!fuseApp) { - return 'UNMOUNTED'; - } - return fuseApp.getStatus(); -} - -async function stopFuseApp() { - if (!fuseApp) { - logger.debug({ msg: 'FuseApp not initialized, skipping stop.' }); - return; - } - - try { - await stopHydrationApi(); - logger.debug({ msg: 'Stopping and clearing FuseApp...' }); - await fuseApp.clearCache(); - await fuseApp.stop(); - } catch (error) { - logger.error({ msg: 'Error stopping and clearing FUSE app:', error }); - } -} - -export async function stopHydrationApi() { - if (!hydrationApi) { - logger.debug({ msg: 'HydrationApi not initialized, skipping stop.' }); - return; - } - - try { - logger.debug({ msg: 'Stopping HydrationApi...' }); - await hydrationApi.stop(); - } catch (error) { - logger.error({ msg: 'Error stopping HydrationApi:', error }); - } -} +// import { getRootVirtualDrive } from '../main/virtual-root-folder/service'; +// import { broadcastToWindows } from '../main/windows'; +// import { DependencyInjectionUserProvider } from '../shared/dependency-injection/DependencyInjectionUserProvider'; +// import { VirtualDrive } from './virtual-drive/VirtualDrive'; +// import { DriveDependencyContainerFactory } from './dependency-injection/DriveDependencyContainerFactory'; +// import { FuseApp } from './fuse/FuseApp'; +// import { HydrationApi } from './hydration-api/HydrationApi'; +// import { logger } from '@internxt/drive-desktop-core/build/backend'; +// import { +// startFuseDaemonServer, +// stopFuseDaemonServer, +// startDaemon, +// stopDaemon, +// } from '../../backend/features/virtual-drive'; + +// let fuseApp: FuseApp; +// let hydrationApi: HydrationApi; + +// export async function startVirtualDrive() { +// const localRoot = getRootVirtualDrive(); + +// const container = await DriveDependencyContainerFactory.build(); +// const user = DependencyInjectionUserProvider.get(); +// const virtualDrive = new VirtualDrive(container); +// hydrationApi = new HydrationApi(container); +// fuseApp = new FuseApp(virtualDrive, container, localRoot, user.root_folder_id, user.rootFolderId); +// fuseApp.on('mounted', () => broadcastToWindows('virtual-drive-status-change', 'MOUNTED')); +// fuseApp.on('mount-error', () => broadcastToWindows('virtual-drive-status-change', 'ERROR')); +// await hydrationApi.start({ debug: false, timeElapsed: false }); +// await fuseApp.start(); + +// await startFuseDaemonServer(container); +// await startDaemon(localRoot); + +// broadcastToWindows('virtual-drive-status-change', 'MOUNTED'); +// logger.debug({ msg: '[FUSE DAEMON] virtual drive mounted and ready' }); +// } + +// export async function stopAndClearFuseApp() { +// await stopHydrationApi(); +// await stopDaemon(); +// await stopFuseDaemonServer(); +// } + +// export async function updateFuseApp() { +// await fuseApp.update(); +// } + +// export function getFuseDriveState() { +// if (!fuseApp) { +// return 'UNMOUNTED'; +// } +// return fuseApp.getStatus(); +// return 'MOUNTED'; +// } + +// export async function stopHydrationApi() { +// if (!hydrationApi) { +// logger.debug({ msg: 'HydrationApi not initialized, skipping stop.' }); +// return; +// } + +// try { +// logger.debug({ msg: 'Stopping HydrationApi...' }); +// await hydrationApi.stop(); +// } catch (error) { +// logger.error({ msg: 'Error stopping HydrationApi:', error }); +// } +// } diff --git a/src/apps/main/antivirus/AntivirusScanService.ts b/src/apps/main/antivirus/AntivirusScanService.ts index b53eb7194b..95ca9361bf 100644 --- a/src/apps/main/antivirus/AntivirusScanService.ts +++ b/src/apps/main/antivirus/AntivirusScanService.ts @@ -1,8 +1,8 @@ +import { homedir } from 'node:os'; import { logger } from '@internxt/drive-desktop-core/build/backend'; import { shell } from 'electron'; import { ScanOrchestrator } from './ScanOrchestrator'; import { SelectedItemToScanProps } from './Antivirus'; -import { getUserSystemPath } from '../../main/device/service'; import { cancelBackgroundScan } from './scanCronJob'; let currentScan: ScanOrchestrator | null = null; @@ -32,14 +32,14 @@ export class AntivirusScanService { } private static async getSystemScanPaths(): Promise { - const userSystemPath = await getUserSystemPath(); + const userSystemPath = homedir(); if (!userSystemPath) { logger.error({ tag: 'ANTIVIRUS', msg: 'Could not get user system path' }); return []; } - return [userSystemPath.path]; + return [userSystemPath]; } private static extractPaths(items: SelectedItemToScanProps[]): string[] { diff --git a/src/apps/main/antivirus/ClamAVDaemon.ts b/src/apps/main/antivirus/ClamAVDaemon.ts index c1d10e4f78..cdf11b1796 100644 --- a/src/apps/main/antivirus/ClamAVDaemon.ts +++ b/src/apps/main/antivirus/ClamAVDaemon.ts @@ -27,6 +27,126 @@ const MAX_SERVER_START_ATTEMPTS = 3; let lastRestartTime = 0; const MIN_RESTART_INTERVAL = 30000; // 30 seconds minimum between restarts +export const ensureDirectories = () => { + const dirs = [configDir, logDir, dbDir]; + for (const dir of dirs) { + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true, mode: DIRECTORY_MODE }); + logger.debug({ + tag: 'ANTIVIRUS', + msg: `[CLAM_AVD] Created directory: ${dir}`, + }); + } + } + + if (!fs.existsSync(logFilePath)) { + fs.writeFileSync(logFilePath, '', { mode: FILE_MODE }); + logger.debug({ + tag: 'ANTIVIRUS', + msg: `[CLAM_AVD] Created log file: ${logFilePath}`, + }); + } + + const resourceDbDir = path.join(RESOURCES_PATH, 'db'); + if (fs.existsSync(resourceDbDir)) { + const files = fs.readdirSync(resourceDbDir); + for (const file of files) { + const srcPath = path.join(resourceDbDir, file); + const destPath = path.join(dbDir, file); + if (!fs.existsSync(destPath)) { + fs.copyFileSync(srcPath, destPath); + logger.debug({ + tag: 'ANTIVIRUS', + msg: `[CLAM_AVD] Copied database file: ${file}`, + }); + } + } + } +}; + +/** + * Prepares the configuration files by replacing placeholder variables with actual paths + * This allows config files to be portable and work in any user environment + */ +export const prepareConfigFiles = (): { + clamdConfigPath: string; + freshclamConfigPath: string; +} => { + // Create temporary modified configs in the user's config directory + const tempClamdConfigPath = path.join(configDir, 'clamd.conf'); + const tempFreshclamConfigPath = path.join(configDir, 'freshclam.conf'); + + // Read the original config files from resources + const originalClamdConfig = fs.readFileSync(path.join(RESOURCES_PATH, '/etc/clamd.conf'), 'utf8'); + const originalFreshclamConfig = fs.readFileSync(path.join(RESOURCES_PATH, '/etc/freshclam.conf'), 'utf8'); + + const modifiedClamdConfig = originalClamdConfig + .replace('LOGFILE_PATH', logFilePath) + .replace('DATABASE_DIRECTORY', dbDir); + + const modifiedFreshclamConfig = originalFreshclamConfig + .replace('DATABASE_DIRECTORY', dbDir) + .replace('FRESHCLAM_LOG_PATH', freshclamLogPath); + + fs.writeFileSync(tempClamdConfigPath, modifiedClamdConfig); + fs.writeFileSync(tempFreshclamConfigPath, modifiedFreshclamConfig); + + logger.debug({ + tag: 'ANTIVIRUS', + msg: `[CLAM_AVD] Created modified config files in ${configDir}`, + }); + + return { + clamdConfigPath: tempClamdConfigPath, + freshclamConfigPath: tempFreshclamConfigPath, + }; +}; + +export const getEnvWithLibraryPath = () => { + const env = { ...process.env }; + const libPath = path.join(RESOURCES_PATH, 'lib'); + + env.LD_LIBRARY_PATH = `${libPath}:${env.LD_LIBRARY_PATH || ''}`; + + logger.debug({ + tag: 'ANTIVIRUS', + msg: `[CLAM_AVD] Setting library path to: ${libPath}`, + }); + return env; +}; + +export const checkClamdAvailability = (host = SERVER_HOST, port = SERVER_PORT): Promise => { + return new Promise((resolve) => { + const client = new net.Socket(); + + client.connect(port, host, () => { + client.end(); + resolve(true); + }); + + client.on('error', () => { + client.destroy(); + resolve(false); + }); + }); +}; + +const stopClamdServer = (): void => { + if (clamdProcess) { + logger.debug({ + tag: 'ANTIVIRUS', + msg: '[CLAM_AVD] Stopping clamd server...', + }); + clamdProcess.kill(); + clamdProcess = null; + } + + if (timer) { + clearTimeout(timer); + timer = null; + } +}; + const startClamdServer = async (): Promise => { logger.debug({ tag: 'ANTIVIRUS', @@ -145,97 +265,6 @@ const startClamdServer = async (): Promise => { } }; -const stopClamdServer = (): void => { - if (clamdProcess) { - logger.debug({ - tag: 'ANTIVIRUS', - msg: '[CLAM_AVD] Stopping clamd server...', - }); - clamdProcess.kill(); - clamdProcess = null; - } - - if (timer) { - clearTimeout(timer); - timer = null; - } -}; - -export const ensureDirectories = () => { - const dirs = [configDir, logDir, dbDir]; - for (const dir of dirs) { - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir, { recursive: true, mode: DIRECTORY_MODE }); - logger.debug({ - tag: 'ANTIVIRUS', - msg: `[CLAM_AVD] Created directory: ${dir}`, - }); - } - } - - if (!fs.existsSync(logFilePath)) { - fs.writeFileSync(logFilePath, '', { mode: FILE_MODE }); - logger.debug({ - tag: 'ANTIVIRUS', - msg: `[CLAM_AVD] Created log file: ${logFilePath}`, - }); - } - - const resourceDbDir = path.join(RESOURCES_PATH, 'db'); - if (fs.existsSync(resourceDbDir)) { - const files = fs.readdirSync(resourceDbDir); - for (const file of files) { - const srcPath = path.join(resourceDbDir, file); - const destPath = path.join(dbDir, file); - if (!fs.existsSync(destPath)) { - fs.copyFileSync(srcPath, destPath); - logger.debug({ - tag: 'ANTIVIRUS', - msg: `[CLAM_AVD] Copied database file: ${file}`, - }); - } - } - } -}; - -/** - * Prepares the configuration files by replacing placeholder variables with actual paths - * This allows config files to be portable and work in any user environment - */ -export const prepareConfigFiles = (): { - clamdConfigPath: string; - freshclamConfigPath: string; -} => { - // Create temporary modified configs in the user's config directory - const tempClamdConfigPath = path.join(configDir, 'clamd.conf'); - const tempFreshclamConfigPath = path.join(configDir, 'freshclam.conf'); - - // Read the original config files from resources - const originalClamdConfig = fs.readFileSync(path.join(RESOURCES_PATH, '/etc/clamd.conf'), 'utf8'); - const originalFreshclamConfig = fs.readFileSync(path.join(RESOURCES_PATH, '/etc/freshclam.conf'), 'utf8'); - - const modifiedClamdConfig = originalClamdConfig - .replace('LOGFILE_PATH', logFilePath) - .replace('DATABASE_DIRECTORY', dbDir); - - const modifiedFreshclamConfig = originalFreshclamConfig - .replace('DATABASE_DIRECTORY', dbDir) - .replace('FRESHCLAM_LOG_PATH', freshclamLogPath); - - fs.writeFileSync(tempClamdConfigPath, modifiedClamdConfig); - fs.writeFileSync(tempFreshclamConfigPath, modifiedFreshclamConfig); - - logger.debug({ - tag: 'ANTIVIRUS', - msg: `[CLAM_AVD] Created modified config files in ${configDir}`, - }); - - return { - clamdConfigPath: tempClamdConfigPath, - freshclamConfigPath: tempFreshclamConfigPath, - }; -}; - const restartClamdServerIfNeeded = async (): Promise => { const now = Date.now(); @@ -283,35 +312,6 @@ const restartClamdServerIfNeeded = async (): Promise => { } }; -export const checkClamdAvailability = (host = SERVER_HOST, port = SERVER_PORT): Promise => { - return new Promise((resolve) => { - const client = new net.Socket(); - - client.connect(port, host, () => { - client.end(); - resolve(true); - }); - - client.on('error', () => { - client.destroy(); - resolve(false); - }); - }); -}; - -export const getEnvWithLibraryPath = () => { - const env = { ...process.env }; - const libPath = path.join(RESOURCES_PATH, 'lib'); - - env.LD_LIBRARY_PATH = `${libPath}:${env.LD_LIBRARY_PATH || ''}`; - - logger.debug({ - tag: 'ANTIVIRUS', - msg: `[CLAM_AVD] Setting library path to: ${libPath}`, - }); - return env; -}; - const waitForClamd = async ( timeout = DEFAULT_CLAMD_WAIT_TIMEOUT, interval = DEFAULT_CLAMD_CHECK_INTERVAL, diff --git a/src/apps/main/antivirus/ManualSystemScan.test.ts b/src/apps/main/antivirus/ManualSystemScan.test.ts index 8da2487760..d1e9f86521 100644 --- a/src/apps/main/antivirus/ManualSystemScan.test.ts +++ b/src/apps/main/antivirus/ManualSystemScan.test.ts @@ -7,9 +7,6 @@ import eventBus from '../event-bus'; import { Mock, Mocked } from 'vitest'; vi.mock('./Antivirus'); -vi.mock('../device/service', () => ({ - getUserSystemPath: vi.fn(() => '/home/user/Documents'), -})); vi.mock('./utils/getFilesFromDirectory', () => ({ getFilesFromDirectory: vi.fn(({ cb }: { dir: string; cb: (file: string) => Promise; signal: AbortSignal }) => { cb('/path/to/file.txt'); diff --git a/src/apps/main/antivirus/ManualSystemScan.ts b/src/apps/main/antivirus/ManualSystemScan.ts index 54d579e13e..1f16e654b7 100644 --- a/src/apps/main/antivirus/ManualSystemScan.ts +++ b/src/apps/main/antivirus/ManualSystemScan.ts @@ -1,6 +1,6 @@ /* eslint-disable max-len */ +import { homedir } from 'node:os'; import { ScannedItem } from '../database/entities/ScannedItem'; -import { getUserSystemPath } from '../device/service'; import { queue, QueueObject } from 'async'; import eventBus from '../event-bus'; import { Antivirus } from './Antivirus'; @@ -470,7 +470,7 @@ export class ManualSystemScan { msg: '[SYSTEM_SCAN] Starting full system scan', }); - const userSystemPath = await getUserSystemPath(); + const userSystemPath = homedir(); if (!userSystemPath) { logger.error({ tag: 'ANTIVIRUS', @@ -481,13 +481,13 @@ export class ManualSystemScan { logger.debug({ tag: 'ANTIVIRUS', - msg: `[SYSTEM_SCAN] Using user system path: ${userSystemPath.path}`, + msg: `[SYSTEM_SCAN] Using user system path: ${userSystemPath}`, }); this.manualQueue = queue(scan, 10); try { - const total = await countSystemFiles(userSystemPath.path); + const total = await countSystemFiles(userSystemPath); this.totalItemsToScan = total; logger.debug({ @@ -505,12 +505,12 @@ export class ManualSystemScan { ); if (total === 0) { - this.emitEmptyDirProgressEvent(userSystemPath.path, currentSession); + this.emitEmptyDirProgressEvent(userSystemPath, currentSession); return; } await getFilesFromDirectory({ - dir: userSystemPath.path, + dir: userSystemPath, cb: (filePath: string) => this.manualQueue!.pushAsync(filePath), signal: this.abortController.signal, }); diff --git a/src/apps/main/antivirus/scanCronJob.ts b/src/apps/main/antivirus/scanCronJob.ts index 49918f68fa..f396b8294b 100644 --- a/src/apps/main/antivirus/scanCronJob.ts +++ b/src/apps/main/antivirus/scanCronJob.ts @@ -1,4 +1,4 @@ -import { getUserSystemPath } from '../device/service'; +import { homedir } from 'node:os'; import { Antivirus } from './Antivirus'; import { getFilesFromDirectory } from './utils/getFilesFromDirectory'; import { transformItem } from './utils/transformItem'; @@ -23,7 +23,7 @@ async function scanInBackground() { const database = new DBScannerConnection(hashedFilesAdapter); const antivirus = await Antivirus.createInstance(); - const userSystemPath = await getUserSystemPath(); + const userSystemPath = homedir(); if (!userSystemPath) return; const scan = async (filePath: string) => { @@ -67,7 +67,7 @@ async function scanInBackground() { const backgroundQueue: QueueObject = queue(scan, BACKGROUND_MAX_CONCURRENCY); await getFilesFromDirectory({ - dir: userSystemPath.path, + dir: userSystemPath, cb: (file: string) => backgroundQueue.pushAsync(file), signal: abortController.signal, }); diff --git a/src/apps/main/antivirus/utils/errorUtils.ts b/src/apps/main/antivirus/utils/errorUtils.ts index 3b5056b85a..3b6a5ade98 100644 --- a/src/apps/main/antivirus/utils/errorUtils.ts +++ b/src/apps/main/antivirus/utils/errorUtils.ts @@ -16,14 +16,14 @@ export function getErrorMessage(error: unknown): string { if (error && typeof error === 'object') { if ('data' in error) { - const data = (error as any).data; - if (data && data.err) { + const data = (error as Record).data; + if (data && typeof data === 'object' && 'err' in data) { return `ClamAV Error: ${getErrorMessage(data.err)}`; } } - if ('message' in error && typeof (error as any).message === 'string') { - return (error as any).message; + if ('message' in error && typeof (error as Record).message === 'string') { + return (error as Record).message as string; } try { diff --git a/src/apps/main/antivirus/utils/isPermissionError.test.ts b/src/apps/main/antivirus/utils/isPermissionError.test.ts index fb3fba30bb..b0ba2d22cb 100644 --- a/src/apps/main/antivirus/utils/isPermissionError.test.ts +++ b/src/apps/main/antivirus/utils/isPermissionError.test.ts @@ -49,14 +49,14 @@ describe('isPermissionError', () => { nestedError.code = 'EACCES'; const clamError = new NodeClamError('Clam error'); - (clamError as any).data = { err: nestedError }; + clamError.data = { err: nestedError }; expect(isPermissionError(clamError)).toBe(true); }); it('should handle NodeClamError without nested error', () => { const clamError = new NodeClamError('Clam error'); - (clamError as any).data = {}; + clamError.data = {}; expect(isPermissionError(clamError)).toBe(false); }); diff --git a/src/apps/main/antivirus/utils/isPermissionError.ts b/src/apps/main/antivirus/utils/isPermissionError.ts index 9bbc212a04..8d0c397464 100644 --- a/src/apps/main/antivirus/utils/isPermissionError.ts +++ b/src/apps/main/antivirus/utils/isPermissionError.ts @@ -5,15 +5,16 @@ const PERMISSION_ERROR_CODES = ['EACCES', 'EPERM', 'EBUSY', 'ENOENT', 'ENOFILE', const PERMISSION_ERROR_MESSAGES = ['operation not permitted', 'access denied', 'access is denied']; export const isPermissionError = (err: unknown) => { - let error = err as any; if (!err || typeof err !== 'object') return false; - if (err instanceof NodeClamError && (err as any).data?.err instanceof Error) { - error = (err as any).data.err; + let error: { message?: string; code?: string } = err as { message?: string; code?: string }; + + if (err instanceof NodeClamError && err.data?.err instanceof Error) { + error = err.data.err; } - const msg = error.message?.toLowerCase() || ''; - const hasPermissionErrorCode = PERMISSION_ERROR_CODES.includes(error.code); + const msg = error.message?.toLowerCase() ?? ''; + const hasPermissionErrorCode = PERMISSION_ERROR_CODES.includes(error.code ?? ''); const hasPermissionErrorMessage = PERMISSION_ERROR_MESSAGES.some((m) => msg.includes(m)); return hasPermissionErrorCode || hasPermissionErrorMessage; diff --git a/src/apps/main/auth/deeplink/handle-deeplink.ts b/src/apps/main/auth/deeplink/handle-deeplink.ts index b2df9769a3..bdc1aa2ddb 100644 --- a/src/apps/main/auth/deeplink/handle-deeplink.ts +++ b/src/apps/main/auth/deeplink/handle-deeplink.ts @@ -8,6 +8,7 @@ import { setupRootFolder } from '../../virtual-root-folder/service'; import { processDeeplink } from './proccess-deeplink'; import { initializeCurrentUser } from './initialize_current_user'; import configStore from '../../config'; +import { PATHS } from '../../../../core/electron/paths'; type Props = { url: string; @@ -34,7 +35,7 @@ export async function handleDeeplink({ url }: Props) { logger.debug({ tag: 'AUTH', msg: 'Config restoration attempt on login', restored, uuid: userData.uuid }); } - await setupRootFolder(); + setupRootFolder(PATHS.ROOT_DRIVE_FOLDER); setIsLoggedIn(true); diff --git a/src/apps/main/auth/deeplink/setup-appimage-deeplink.ts b/src/apps/main/auth/deeplink/setup-appimage-deeplink.ts index f36907ef73..f0c5caf925 100644 --- a/src/apps/main/auth/deeplink/setup-appimage-deeplink.ts +++ b/src/apps/main/auth/deeplink/setup-appimage-deeplink.ts @@ -9,22 +9,6 @@ const execAsync = promisify(exec); const DESKTOP_FILE = join(homedir(), '.local/share/applications/internxt-appimage.desktop'); -export async function setupAppImageDeeplink() { - const appImagePath = process.env.APPIMAGE; - if (!appImagePath) return; - - await ensureDotDesktopUpdated(appImagePath); -} - -async function ensureDotDesktopUpdated(currentPath: string) { - const previousPath = await extractExecPath(); - - if (previousPath !== currentPath) { - await installDesktopFile(currentPath); - await registerProtocol(); - } -} - async function extractExecPath() { try { await access(DESKTOP_FILE); @@ -59,3 +43,19 @@ async function registerProtocol() { logger.error({ tag: 'AUTH', msg: 'Failed to register protocol:', err }); } } + +async function ensureDotDesktopUpdated(currentPath: string) { + const previousPath = await extractExecPath(); + + if (previousPath !== currentPath) { + await installDesktopFile(currentPath); + await registerProtocol(); + } +} + +export async function setupAppImageDeeplink() { + const appImagePath = process.env.APPIMAGE; + if (!appImagePath) return; + + await ensureDotDesktopUpdated(appImagePath); +} diff --git a/src/apps/main/auth/service.ts b/src/apps/main/auth/service.ts index 72d9ba8691..10ca32cb60 100644 --- a/src/apps/main/auth/service.ts +++ b/src/apps/main/auth/service.ts @@ -74,8 +74,8 @@ export function logout() { const { uuid } = user; saveConfig({ uuid }); + void driveServerModule.auth.logout(); resetConfig(); resetCredentials(); - void driveServerModule.auth.logout(); logger.debug({ msg: '[AUTH] User logged out' }); } diff --git a/src/apps/main/auto-launch/handlers.ts b/src/apps/main/auto-launch/handlers.ts deleted file mode 100644 index b7c4260e50..0000000000 --- a/src/apps/main/auto-launch/handlers.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { ipcMain } from 'electron'; - -import { isAutoLaunchEnabled, toggleAutoLaunch } from './service'; - -ipcMain.handle('is-auto-launch-enabled', isAutoLaunchEnabled); - -ipcMain.handle('toggle-auto-launch', toggleAutoLaunch); diff --git a/src/apps/main/auto-launch/linux-desktop-entry.ts b/src/apps/main/auto-launch/linux-desktop-entry.ts deleted file mode 100644 index afba1ab0e3..0000000000 --- a/src/apps/main/auto-launch/linux-desktop-entry.ts +++ /dev/null @@ -1,43 +0,0 @@ -import fs from 'fs'; -import os from 'os'; - -import packageJson from '../../../../package.json'; - -const fileName = `${packageJson.name}.desktop`; -const desktopFilePath = `${os.homedir()}/.config/autostart`; -const desktopFile = `${desktopFilePath}/${fileName}`; - -function createDesktopEntry() { - const fileContent = `[Desktop Entry] - Type=Application - Version=${packageJson.version} - Name=${packageJson.name} - Comment=${packageJson.name} startup script - Exec=${packageJson.name} --process-start-args --hidden - StartupNotify=false - Terminal=false - `; - - if (!fs.existsSync(desktopFilePath)) { - fs.mkdirSync(desktopFilePath); - } - - fs.writeFileSync(desktopFile, fileContent); -} - -function deleteDesktopEntry() { - fs.unlinkSync(desktopFile); -} - -export function desktopEntryIsPresent(): boolean { - return fs.existsSync(desktopFile); -} - -export function toggleDesktopEntry() { - if (desktopEntryIsPresent()) { - deleteDesktopEntry(); - - return; - } - createDesktopEntry(); -} diff --git a/src/apps/main/auto-launch/service.ts b/src/apps/main/auto-launch/service.ts deleted file mode 100644 index 022eaf013a..0000000000 --- a/src/apps/main/auto-launch/service.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { app } from 'electron'; -import Path from 'path'; - -import { desktopEntryIsPresent, toggleDesktopEntry } from './linux-desktop-entry'; - -const appFolder = Path.dirname(process.execPath); -const appExe = Path.resolve(appFolder, 'Internxt Drive.exe'); -const exeName = Path.basename(process.execPath); - -const path = process.platform === 'win32' ? appExe : undefined; -const args = - process.platform === 'win32' ? ['--processStart', `"${exeName}"`, '--process-start-args', '"--hidden"'] : undefined; - -export function isAutoLaunchEnabled() { - if (process.platform !== 'linux') { - const loginItem = app.getLoginItemSettings({ path, args }); - - return loginItem.openAtLogin; - } - - return desktopEntryIsPresent(); -} - -function toggleAppSettings() { - const currentSetting = isAutoLaunchEnabled(); - - app.setLoginItemSettings({ - path, - args, - openAtLogin: !currentSetting, - openAsHidden: true, - }); -} - -export function toggleAutoLaunch() { - if (process.platform !== 'linux') { - toggleAppSettings(); - } else { - toggleDesktopEntry(); - } -} diff --git a/src/apps/main/background-processes/antivirus/try-setup-antivirus-ipc-and-initialize.test.ts b/src/apps/main/background-processes/antivirus/try-setup-antivirus-ipc-and-initialize.test.ts new file mode 100644 index 0000000000..c3b7df3a87 --- /dev/null +++ b/src/apps/main/background-processes/antivirus/try-setup-antivirus-ipc-and-initialize.test.ts @@ -0,0 +1,85 @@ +import * as setupAntivirusIPCModule from './setupAntivirusIPC'; +import * as antivirusManagerModule from '../../antivirus/antivirusManager'; +import { partialSpyOn, call, calls } from 'tests/vitest/utils.helper'; +import { trySetupAntivirusIpcAndInitialize } from './try-setup-antivirus-ipc-and-initialize'; +import { loggerMock } from 'tests/vitest/mocks.helper'; + +vi.mock('./setupAntivirusIPC', () => ({ + setupAntivirusIpc: vi.fn(), +})); +vi.mock('../../antivirus/antivirusManager', () => ({ + getAntivirusManager: vi.fn(), +})); + +const setupAntivirusIpcSpy = partialSpyOn(setupAntivirusIPCModule, 'setupAntivirusIpc'); +const getAntivirusManagerSpy = partialSpyOn(antivirusManagerModule, 'getAntivirusManager'); +const initializeMock = vi.fn(); + +describe('try-setup-antivirus-ipc-and-initialize', () => { + beforeEach(() => { + delete process.env.ENABLE_ANTIVIRUS; + setupAntivirusIpcSpy.mockReturnValue({}); + initializeMock.mockResolvedValue(undefined); + getAntivirusManagerSpy.mockReturnValue({ initialize: initializeMock }); + }); + + describe('when ENABLE_ANTIVIRUS is not set', () => { + it('sets up IPC and initializes antivirus', async () => { + // When + await trySetupAntivirusIpcAndInitialize(); + // Then + calls(setupAntivirusIpcSpy).toHaveLength(1); + calls(initializeMock).toHaveLength(1); + }); + }); + + describe('when ENABLE_ANTIVIRUS=true', () => { + it('sets up IPC and initializes antivirus', async () => { + // Given + process.env.ENABLE_ANTIVIRUS = 'true'; + // When + await trySetupAntivirusIpcAndInitialize(); + // Then + calls(setupAntivirusIpcSpy).toHaveLength(1); + calls(initializeMock).toHaveLength(1); + }); + }); + + describe('when ENABLE_ANTIVIRUS=false', () => { + it('skips IPC setup and initialization', async () => { + // Given + process.env.ENABLE_ANTIVIRUS = 'false'; + // When + await trySetupAntivirusIpcAndInitialize(); + // Then + calls(setupAntivirusIpcSpy).toHaveLength(0); + calls(initializeMock).toHaveLength(0); + }); + }); + + describe('when setupAntivirusIpc throws', () => { + it('logs the error without throwing', async () => { + // Given + const error = new Error('ipc-setup-error'); + setupAntivirusIpcSpy.mockImplementation(() => { + throw error; + }); + // When + await expect(trySetupAntivirusIpcAndInitialize()).resolves.toBeUndefined(); + // Then + call(loggerMock.error).toMatchObject({ tag: 'ANTIVIRUS', error }); + }); + }); + + describe('when initialize throws', () => { + it('logs the error without throwing', async () => { + // Given + const error = new Error('initialize-error'); + initializeMock.mockRejectedValue(error); + // When + await expect(trySetupAntivirusIpcAndInitialize()).resolves.toBeUndefined(); + // Then + call(loggerMock.error).toMatchObject({ tag: 'ANTIVIRUS', error }); + }); + }); +}); diff --git a/src/apps/main/background-processes/antivirus/try-setup-antivirus-ipc-and-initialize.ts b/src/apps/main/background-processes/antivirus/try-setup-antivirus-ipc-and-initialize.ts index 199b7b59b2..9ffa8477b9 100644 --- a/src/apps/main/background-processes/antivirus/try-setup-antivirus-ipc-and-initialize.ts +++ b/src/apps/main/background-processes/antivirus/try-setup-antivirus-ipc-and-initialize.ts @@ -3,10 +3,15 @@ import { setupAntivirusIpc } from './setupAntivirusIPC'; import { getAntivirusManager } from '../../antivirus/antivirusManager'; export async function trySetupAntivirusIpcAndInitialize() { + if (process.env.ENABLE_ANTIVIRUS === 'false') { + logger.debug({ tag: 'ANTIVIRUS', msg: '[Main] Antivirus is disabled (ENABLE_ANTIVIRUS=false), skipping setup' }); + return; + } + try { logger.debug({ tag: 'ANTIVIRUS', msg: '[Main] Setting up antivirus IPC handlers' }); setupAntivirusIpc(); - logger.debug({ msg: '[Main] Antivirus IPC handlers setup complete' }); + logger.debug({ tag: 'ANTIVIRUS', msg: '[Main] Antivirus IPC handlers setup complete' }); await getAntivirusManager().initialize(); } catch (error) { logger.error({ tag: 'ANTIVIRUS', msg: '[Main] Error setting up antivirus:', error }); diff --git a/src/apps/main/backups/add-backup.ts b/src/apps/main/backups/add-backup.ts deleted file mode 100644 index ac7f87cd76..0000000000 --- a/src/apps/main/backups/add-backup.ts +++ /dev/null @@ -1,26 +0,0 @@ -import configStore from '../config'; -import { createBackup } from './create-backup'; -import { DeviceModule } from '../../../backend/features/device/device.module'; -import { logger } from '@internxt/drive-desktop-core/build/backend'; -import { enableExistingBackup } from './enable-existing-backup'; -import { getPathFromDialog } from '../../../backend/features/backup/get-path-from-dialog'; - -export async function addBackup() { - const { error, data } = await DeviceModule.getOrCreateDevice(); - if (error) { - throw logger.error({ tag: 'BACKUPS', msg: 'Error adding backup: No device found' }); - } - - const chosenItem = await getPathFromDialog(); - if (!chosenItem || !chosenItem.path) return; - - const chosenPath = chosenItem.path; - const backupList = configStore.get('backupList'); - const existingBackup = backupList[chosenPath]; - - if (!existingBackup) { - return await createBackup({ pathname: chosenPath, device: data }); - } else { - return await enableExistingBackup(chosenPath, data); - } -} diff --git a/src/apps/main/backups/enable-existing-backup.test.ts b/src/apps/main/backups/enable-existing-backup.test.ts index f2c3152018..894614f26c 100644 --- a/src/apps/main/backups/enable-existing-backup.test.ts +++ b/src/apps/main/backups/enable-existing-backup.test.ts @@ -1,22 +1,22 @@ -import { enableExistingBackup } from './enable-existing-backup'; +import { enableExistingBackup } from '../../../backend/features/backup/enable-existing-backup'; import configStore from '../config'; import { fetchFolder } from '../../../infra/drive-server/services/folder/services/fetch-folder'; -import { createBackup } from './create-backup'; -import { migrateBackupEntryIfNeeded } from '../device/migrate-backup-entry-if-needed'; -import { app } from 'electron'; +import { createBackup } from '../../../backend/features/backup/create-backup'; +import { migrateBackupEntryIfNeeded } from '../../../backend/features/backup/migrate-backup-entry-if-needed'; +import { PATHS } from '../../../core/electron/paths'; +import { createAbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; vi.mock('../config'); vi.mock('../../../infra/drive-server/services/folder/services/fetch-folder'); -vi.mock('./create-backup'); -vi.mock('../device/migrate-backup-entry-if-needed'); +vi.mock('../../../backend/features/backup/create-backup'); +vi.mock('../../../backend/features/backup/migrate-backup-entry-if-needed'); const mockedConfigStore = vi.mocked(configStore); const mockedFetchFolder = vi.mocked(fetchFolder); const mockedCreateBackup = vi.mocked(createBackup); const mockedMigrateBackupEntryIfNeeded = vi.mocked(migrateBackupEntryIfNeeded); -const mockedApp = vi.mocked(app); -describe('enableExistingBackup', () => { +describe('enable-existing-backup', () => { const mockDevice = { id: 123, bucket: 'test-bucket', @@ -26,7 +26,7 @@ describe('enableExistingBackup', () => { hasBackups: false, }; - const pathname = '/path/to/backup'; + const pathname = createAbsolutePath('/path/to/backup'); const existingBackupData = { folderUuid: 'existing-uuid', folderId: 456, @@ -48,51 +48,47 @@ describe('enableExistingBackup', () => { }; mockedConfigStore.get.mockReturnValue({ [pathname]: existingBackupData }); - mockedMigrateBackupEntryIfNeeded.mockResolvedValue(existingBackupData); - mockedFetchFolder.mockResolvedValue({ error: new Error('Folder not found') } as any); - mockedCreateBackup.mockResolvedValue(mockNewBackupInfo); + mockedFetchFolder.mockResolvedValue({ error: new Error('Folder not found') } as unknown as Awaited< + ReturnType + >); + mockedCreateBackup.mockResolvedValue({ data: mockNewBackupInfo } as unknown as Awaited< + ReturnType + >); - const result = await enableExistingBackup(pathname, mockDevice); + const result = await enableExistingBackup({ pathname, device: mockDevice }); - expect(mockedMigrateBackupEntryIfNeeded).toBeCalledWith(pathname, existingBackupData); + expect(mockedMigrateBackupEntryIfNeeded).not.toBeCalled(); expect(mockedFetchFolder).toBeCalledWith(existingBackupData.folderUuid); expect(mockedCreateBackup).toBeCalledWith({ pathname, device: mockDevice }); - expect(result).toStrictEqual(mockNewBackupInfo); + expect(result).toStrictEqual({ data: mockNewBackupInfo }); }); it('should enable existing backup when folder still exists', async () => { - const migratedBackup = { - folderUuid: 'migrated-uuid', - folderId: 456, - enabled: false, - }; - - const updatedBackupList = { - [pathname]: { ...migratedBackup, enabled: true }, - }; - mockedConfigStore.get .mockReturnValueOnce({ [pathname]: existingBackupData }) - .mockReturnValueOnce(updatedBackupList); + .mockReturnValueOnce({ [pathname]: existingBackupData }); - mockedMigrateBackupEntryIfNeeded.mockResolvedValue(migratedBackup); - mockedFetchFolder.mockResolvedValue({ data: { id: migratedBackup.folderId } } as any); - mockedApp.getPath.mockReturnValue('/tmp'); + mockedFetchFolder.mockResolvedValue({ data: { id: existingBackupData.folderId } } as unknown as Awaited< + ReturnType + >); - const result = await enableExistingBackup(pathname, mockDevice); + const result = await enableExistingBackup({ pathname, device: mockDevice }); - expect(mockedMigrateBackupEntryIfNeeded).toBeCalledWith(pathname, existingBackupData); - expect(mockedFetchFolder).toBeCalledWith(migratedBackup.folderUuid); - expect(mockedConfigStore.set).toBeCalledWith('backupList', updatedBackupList); - expect(mockedApp.getPath).toBeCalledWith('temp'); + expect(mockedMigrateBackupEntryIfNeeded).not.toBeCalled(); + expect(mockedFetchFolder).toBeCalledWith(existingBackupData.folderUuid); + expect(mockedConfigStore.set).toBeCalledWith('backupList', { + [pathname]: { ...existingBackupData, enabled: true }, + }); - expect(result).toEqual({ - folderUuid: migratedBackup.folderUuid, - folderId: migratedBackup.folderId, - pathname, - name: 'backup', - tmpPath: '/tmp', - backupsBucket: mockDevice.bucket, + expect(result).toStrictEqual({ + data: { + folderUuid: existingBackupData.folderUuid, + folderId: existingBackupData.folderId, + pathname, + name: 'backup', + tmpPath: PATHS.TEMPORAL_FOLDER, + backupsBucket: mockDevice.bucket, + }, }); }); }); diff --git a/src/apps/main/backups/enable-existing-backup.ts b/src/apps/main/backups/enable-existing-backup.ts deleted file mode 100644 index dd708c42a2..0000000000 --- a/src/apps/main/backups/enable-existing-backup.ts +++ /dev/null @@ -1,37 +0,0 @@ -import configStore from '../config'; -import { BackupInfo } from 'src/apps/backups/BackupInfo'; -import path from 'node:path'; -import { app } from 'electron'; -import { fetchFolder } from '../../../infra/drive-server/services/folder/services/fetch-folder'; -import { createBackup } from './create-backup'; -import { migrateBackupEntryIfNeeded } from '../device/migrate-backup-entry-if-needed'; -import { Device } from '../device/service'; - -export async function enableExistingBackup(pathname: string, device: Device) { - const backupList = configStore.get('backupList'); - const existingBackup = backupList[pathname]; - - const migratedBackup = await migrateBackupEntryIfNeeded(pathname, existingBackup); - - const { error } = await fetchFolder(migratedBackup.folderUuid); - - if (error) { - return await createBackup({ pathname, device }); - } - - const updatedBackupList = configStore.get('backupList'); - updatedBackupList[pathname].enabled = true; - configStore.set('backupList', updatedBackupList); - - const { base } = path.parse(pathname); - const backupInfo: BackupInfo = { - folderUuid: migratedBackup.folderUuid, - folderId: migratedBackup.folderId, - pathname, - name: base, - tmpPath: app.getPath('temp'), - backupsBucket: device.bucket, - }; - - return backupInfo; -} diff --git a/src/apps/main/database/collections/DriveFileCollection.ts b/src/apps/main/database/collections/DriveFileCollection.ts index a1fd5085f7..d988bb770a 100644 --- a/src/apps/main/database/collections/DriveFileCollection.ts +++ b/src/apps/main/database/collections/DriveFileCollection.ts @@ -26,7 +26,7 @@ export class DriveFilesCollection implements DatabaseCollectionAdapter getDevices()); - -ipcMain.handle('get-or-create-device', DeviceModule.getOrCreateDevice); - -ipcMain.handle('rename-device', (_, v) => DeviceModule.renameDevice(v)); - -ipcMain.handle('get-backups-from-device', (_, d, c?) => DeviceModule.getBackupsFromDevice(d, c)); - -ipcMain.handle('add-backup', () => addBackup()); - -ipcMain.handle('add-multiple-backups', (_, folderPaths) => createBackupsFromLocalPaths(folderPaths)); - -ipcMain.handle('download-backup', (_, v) => downloadBackup(v)); - -ipcMain.handle('delete-backup', (_, v, c?) => deleteBackup(v, c)); - -ipcMain.handle('delete-backups-from-device', (_, v, c?) => deleteBackupsFromDevice(v, c)); - -ipcMain.handle('disable-backup', (_, v) => disableBackup(v)); - -ipcMain.handle('change-backup-path', (_, v) => changeBackupPath(v)); - -ipcMain.on('add-device-issue', (_, e) => DeviceModule.addUnknownDeviceIssue(e)); - -ipcMain.handle('get-folder-path', () => getPathFromDialog()); diff --git a/src/apps/main/device/migrate-backup-entry-if-needed.ts b/src/apps/main/device/migrate-backup-entry-if-needed.ts deleted file mode 100644 index 4455c98382..0000000000 --- a/src/apps/main/device/migrate-backup-entry-if-needed.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { logger } from '@internxt/drive-desktop-core/build/backend/core/logger/logger'; -import configStore from '../config'; -import { getBackupFolderUuid } from '../../../infra/drive-server/services/folder/services/fetch-backup-folder-uuid'; - -export async function migrateBackupEntryIfNeeded( - pathname: string, - backup: { - enabled: boolean; - folderId: number; - folderUuid: string; - }, -): Promise<{ - enabled: boolean; - folderId: number; - folderUuid: string; -}> { - if (backup.folderUuid) return backup; - - try { - const getFolderUuidResponse = await getBackupFolderUuid({ folderId: String(backup.folderId) }); - if (getFolderUuidResponse.error) { - logger.error({ - tag: 'BACKUPS', - msg: `Failed to migrate backup entry for ${pathname}`, - error: getFolderUuidResponse.error, - }); - throw getFolderUuidResponse.error; - } - const { data: folderUuid } = getFolderUuidResponse; - backup.folderUuid = folderUuid; - - const backupList = configStore.get('backupList'); - backupList[pathname] = backup; - configStore.set('backupList', backupList); - logger.debug({ - tag: 'BACKUPS', - msg: `Successfully migrated backup entry for ${pathname} with UUID ${folderUuid}`, - }); - return backup; - } catch (error) { - logger.error({ - tag: 'BACKUPS', - msg: `Error migrating backup entry for ${pathname}`, - error, - }); - throw error; - } -} diff --git a/src/apps/main/device/service.ts b/src/apps/main/device/service.ts deleted file mode 100644 index cbf0acd667..0000000000 --- a/src/apps/main/device/service.ts +++ /dev/null @@ -1,359 +0,0 @@ -import { aes } from '@internxt/lib'; -import { dialog, IpcMainEvent } from 'electron'; -import { logger } from '@internxt/drive-desktop-core/build/backend'; -import os from 'os'; -import path from 'path'; -import fs, { PathLike } from 'fs'; -import { getUser } from '../auth/service'; -import configStore from '../config'; -import { BackupInfo } from '../../backups/BackupInfo'; -import { downloadFolderAsZip } from '../network/download'; -import { FolderTree } from '@internxt/sdk/dist/drive/storage/types'; -import { broadcastToWindows } from '../windows'; -import { ipcMain } from 'electron'; -import { PathTypeChecker } from '../../shared/fs/PathTypeChecker '; -import { driveServerModule } from '../../../infra/drive-server/drive-server.module'; -import { DeviceModule } from '../../../backend/features/device/device.module'; -import { fetchFolder } from '../../../infra/drive-server/services/folder/services/fetch-folder'; -import { getBackupFolderUuid } from '../../../infra/drive-server/services/folder/services/fetch-backup-folder-uuid'; -import { migrateBackupEntryIfNeeded } from './migrate-backup-entry-if-needed'; -import { createBackup } from '../backups/create-backup'; -import { addFolderToTrash } from '../../../infra/drive-server/services/folder/services/add-folder-to-trash'; -import { renameFolder } from '../../../infra/drive-server/services/folder/services/rename-folder'; -import { fetchFolderTreeByUuid } from '../../../infra/drive-server/services/folder/services/fetch-folder-tree-by-uuid'; -import { getPathFromDialog } from '../../../backend/features/backup/get-path-from-dialog'; -import { getCredentials } from '../auth/get-credentials'; - -export type Device = { - id: number; - uuid: string; - name: string; - bucket: string; - removed: boolean; - hasBackups: boolean; -}; - -export async function getDevices(): Promise> { - try { - const response = await driveServerModule.backup.getDevices(); - if (response.isLeft()) { - return []; - } else { - const devices = response.getRight(); - return devices.filter(({ removed, hasBackups }) => !removed && hasBackups).map((device) => device); - } - } catch { - return []; - } -} - -export async function fetchFolderTree(folderUuid: string): Promise<{ - tree: FolderTree; - folderDecryptedNames: Record; - fileDecryptedNames: Record; - size: number; -}> { - const { data, error } = await fetchFolderTreeByUuid({ uuid: folderUuid }); - - if (error) { - throw new Error('Unsuccesful request to fetch folder tree'); - } - - const { tree } = data as { tree: FolderTree }; - - let size = 0; - const folderDecryptedNames: Record = {}; - const fileDecryptedNames: Record = {}; - - // ! Decrypts folders and files names - const pendingFolders = [tree]; - while (pendingFolders.length > 0) { - const currentTree = pendingFolders[0]; - const { folders, files } = { - folders: currentTree.children, - files: currentTree.files, - }; - - folderDecryptedNames[currentTree.id] = currentTree.plainName; - - for (const file of files) { - fileDecryptedNames[file.id] = aes.decrypt(file.name, `${process.env.NEW_CRYPTO_KEY}-${file.folderId}`); - size += Number(file.size); - } - - pendingFolders.shift(); - - // * Adds current folder folders to pending - pendingFolders.push(...folders); - } - - return { tree, folderDecryptedNames, fileDecryptedNames, size }; -} - -export async function downloadBackup(device: Device): Promise { - const chosenItem = await getPathFromDialog(); - if (!chosenItem || !chosenItem.path) { - return; - } - - const chosenPath = chosenItem.path; - logger.debug({ - tag: 'BACKUPS', - msg: '[BACKUPS] Downloading Device', - deviceName: device.name, - chosenPath, - }); - - const date = new Date(); - const now = - String(date.getFullYear()) + - String(date.getMonth() + 1) + - String(date.getDay()) + - String(date.getHours()) + - String(date.getMinutes()) + - String(date.getSeconds()); - const zipFilePath = chosenPath + 'Backup_' + now + '.zip'; - - const abortController = new AbortController(); - - const abortListener = (_: IpcMainEvent, abortDeviceUuid: string) => { - if (abortDeviceUuid === device.uuid) { - abortController.abort(); - } - }; - - const listenerName = 'abort-download-backups-' + device.uuid; - - const removeListenerIpc = ipcMain.on(listenerName, abortListener); - - try { - await downloadDeviceBackupZip(device, zipFilePath, { - updateProgress: (progress: number) => { - if (abortController?.signal.aborted) return; - broadcastToWindows('backup-download-progress', { - id: device.uuid, - progress, - }); - }, - abortController, - }); - } catch (_) { - // Try to delete zip if download backup has failed - try { - fs.unlinkSync(zipFilePath); - } catch (_) { - /* noop */ - } - } - - removeListenerIpc.removeListener(listenerName, abortListener); -} - -async function downloadDeviceBackupZip( - device: Device, - path: PathLike, - { - updateProgress, - abortController, - }: { - updateProgress: (progress: number) => void; - abortController?: AbortController; - }, -): Promise { - if (!device.id) { - throw new Error('This backup has not been uploaded yet'); - } - - const user = getUser(); - if (!user) { - throw new Error('No saved user'); - } - - const { data: folder, error } = await fetchFolder(device.uuid); - if (error) { - throw new Error('Unsuccesful request to fetch folder'); - } - if (!folder || !folder.uuid || folder.uuid.length === 0) { - throw new Error('No backup data found'); - } - - const networkApiUrl = process.env.BRIDGE_URL; - const bridgeUser = user.bridgeUser; - const bridgePass = user.userId; - const { mnemonic } = getCredentials(); - - await downloadFolderAsZip( - device.name, - networkApiUrl!, - folder.uuid, - path, - { - bridgeUser, - bridgePass, - encryptionKey: mnemonic, - }, - { - abortController, - updateProgress, - }, - ); -} - -export async function deleteBackup(backup: BackupInfo, isCurrent?: boolean): Promise { - const { error } = await addFolderToTrash(backup.folderUuid); - if (error) { - throw new Error('Request to delete backup wasnt succesful'); - } - - if (isCurrent) { - const backupsList = configStore.get('backupList'); - - const entriesFiltered = Object.entries(backupsList).filter(([, b]) => b.folderId !== backup.folderId); - - const backupListFiltered = Object.fromEntries(entriesFiltered); - - configStore.set('backupList', backupListFiltered); - } -} - -export async function deleteBackupsFromDevice(device: Device, isCurrent?: boolean): Promise { - const backups = await DeviceModule.getBackupsFromDevice(device, isCurrent); - logger.debug({ tag: 'BACKUPS', msg: '[BACKUPS] Deleting backups from device', count: backups.length }); - logger.debug({ tag: 'BACKUPS', msg: '[BACKUPS] Backups details', backups }); - - let deletionPromises: Promise[] = backups.map((backup) => deleteBackup(backup, isCurrent)); - await Promise.all(deletionPromises); - - // delete backups that are not in the backup list - const { tree } = await fetchFolderTree(device.uuid); - const foldersToDelete = tree.children.filter((folder) => !backups.some((backup) => backup.folderId === folder.id)); - deletionPromises = foldersToDelete.map((folder) => addFolderToTrash(folder.uuid)); - await Promise.all(deletionPromises); -} - -export async function disableBackup(backup: BackupInfo): Promise { - const backupsList = configStore.get('backupList'); - const pathname = findBackupPathnameFromId(backup.folderId)!; - - try { - backupsList[pathname].enabled = false; - configStore.set('backupList', backupsList); - - const { size } = await fetchFolderTree(backup.folderUuid); - - if (size === 0) { - await deleteBackup(backup, true); - } - } catch (error) { - logger.error({ tag: 'BACKUPS', msg: 'Error disabling backup folder', error }); - } -} - -export async function changeBackupPath(currentPath: string): Promise { - const backupsList = configStore.get('backupList'); - const existingBackup = backupsList[currentPath]; - - if (!existingBackup) { - throw new Error('Backup no longer exists'); - } - - const chosen = await getPathFromDialog(); - - if (!chosen || !chosen.path) { - return false; - } - - const chosenPath = chosen.path; - if (backupsList[chosenPath]) { - throw new Error('A backup with this path already exists'); - } - const oldFolderName = path.basename(currentPath); - const newFolderName = path.basename(chosenPath); - if (oldFolderName !== newFolderName) { - logger.debug({ tag: 'BACKUPS', msg: 'Renaming backup', existingBackup }); - const getFolderUuidResponse = await getBackupFolderUuid({ folderId: String(existingBackup.folderId) }); - if (getFolderUuidResponse.error) { - throw getFolderUuidResponse.error; - } - const { data: folderUuid } = getFolderUuidResponse; - - const res = await renameFolder({ uuid: folderUuid, plainName: newFolderName }); - - if (res.error) { - throw new Error('Error in the request to rename a backup'); - } - - delete backupsList[currentPath]; - - const migratedExistingBackup = await migrateBackupEntryIfNeeded(chosenPath, existingBackup); - backupsList[chosenPath] = migratedExistingBackup; - - configStore.set('backupList', backupsList); - - return true; - } - return false; -} - -export function findBackupPathnameFromId(id: number): string | undefined { - const backupsList = configStore.get('backupList'); - const entryfound = Object.entries(backupsList).find(([, b]) => b.folderId === id); - - return entryfound?.[0]; -} - -export async function createBackupsFromLocalPaths(folderPaths: string[]) { - configStore.set('backupsEnabled', true); - - const { error, data } = await DeviceModule.getOrCreateDevice(); - if (error) { - throw error; - } - const operations = folderPaths.map((folderPath) => createBackup({ pathname: folderPath, device: data })); - - await Promise.all(operations); -} - -export type PathInfo = { - path: string; - itemName: string; - isDirectory?: boolean; -}; - -export async function getMultiplePathsFromDialog(allowFiles = false): Promise { - const result = await dialog.showOpenDialog({ - properties: ['multiSelections' as const, ...(allowFiles ? (['openFile'] as const) : ['openDirectory' as const])], - }); - - if (result.canceled || result.filePaths.length === 0) { - return null; - } - - const paths = await Promise.all( - result.filePaths.map(async (filePath) => { - const isFolder = await PathTypeChecker.isFolder(filePath); - const itemName = path.basename(filePath); - return { - path: filePath, - itemName, - isDirectory: isFolder, - }; - }), - ); - - return paths; -} - -export async function getUserSystemPath(): Promise { - const filePath = os.homedir(); - if (!filePath) return; - - const isFolder = await PathTypeChecker.isFolder(filePath); - const itemName = path.basename(filePath); - - return { - path: filePath, - itemName, - isDirectory: isFolder, - }; -} diff --git a/src/apps/main/event-bus.ts b/src/apps/main/event-bus.ts index d2d0fa05b4..8f5bced9c6 100644 --- a/src/apps/main/event-bus.ts +++ b/src/apps/main/event-bus.ts @@ -2,8 +2,6 @@ import { EventEmitter } from 'events'; import { ProgressData } from './antivirus/types'; import { UserAvailableProducts } from '@internxt/drive-desktop-core/build/backend'; -class EventBus extends EventEmitter {} - interface Events { APP_IS_READY: () => void; @@ -37,10 +35,14 @@ interface Events { USER_AVAILABLE_PRODUCTS_UPDATED: (products: UserAvailableProducts) => void; } -declare interface EventBus { - on(event: U, listener: Events[U]): this; +class EventBus extends EventEmitter { + on(event: U, listener: Events[U]): this { + return super.on(event, listener); + } - emit(event: U, ...args: Parameters): boolean; + emit(event: U, ...args: Parameters): boolean { + return super.emit(event, ...args); + } } const eventBus = new EventBus(); diff --git a/src/apps/main/interface.d.ts b/src/apps/main/interface.d.ts index 9b59155dea..6b62365cd0 100644 --- a/src/apps/main/interface.d.ts +++ b/src/apps/main/interface.d.ts @@ -2,7 +2,7 @@ import { BackupInfo } from './../backups/BackupInfo'; import { Usage } from '../../backend/features/usage/usage.types'; import { Result } from './../../context/shared/domain/Result'; import { UserAvailableProducts } from '@internxt/drive-desktop-core/build/backend'; -import { Device } from './device/service'; +import { Device } from '../../backend/features/backup/types/Device'; import { AuthAccessResponseViewModel, AuthLoginResponseViewModel, @@ -11,6 +11,7 @@ import { import { TLoggerBody } from '@internxt/drive-desktop-core/build/backend'; import { CleanerReport, CleanerViewModel, CleanupProgress } from '../../backend/features/cleaner/cleaner.types'; import { BackupErrorRecord } from '../../backend/features/backup/backup.types'; +import { AbsolutePath } from '../../context/local/localFile/infrastructure/AbsolutePath'; import { StoredValues } from './config/service.types'; import { AppStore } from './config'; import { ConfigTheme } from '../shared/types/Theme'; @@ -37,13 +38,27 @@ export interface IElectronAPI { getBackupsFromDevice: (device: Device, isCurrent?: boolean) => Promise>; - addBackup: () => Promise; + addBackup: () => Promise>; + + changeBackupPath: ({ + currentPath, + newPath, + }: { + currentPath: AbsolutePath; + newPath: AbsolutePath; + }) => Promise>; + + startBackupsProcess: () => void; + + getFolderPath: () => Promise<{ path: AbsolutePath; itemName: string } | null>; + + addBackupsFromLocalPaths: (folderPaths: string[]) => Promise>; deleteBackupsFromDevice: (device: Device, isCurrent?: boolean) => Promise; disableBackup: (backup: BackupInfo) => Promise; - downloadBackup: (device: Device) => Promise; + downloadBackup: (device: Device, pathname: AbsolutePath) => Promise; abortDownloadBackups: (deviceId: string) => void; @@ -64,12 +79,18 @@ export interface IElectronAPI { onUserLoggedInChanged(func: (value: boolean) => void): void; + closeWindow(): void; + + minimizeWindow(): void; + onRemoteChanges(func: (value: import('../main/realtime').EventPayload) => void): () => void; openVirtualDriveFolder(): Promise; openProcessIssuesWindow(): void; + openLogs(): void; + openSettingsWindow(section?: 'BACKUPS' | 'GENERAL' | 'ACCOUNT' | 'ANTIVIRUS' | 'CLEANER'): void; logout(): void; @@ -138,10 +159,10 @@ export interface IElectronAPI { onUpdateAvailable(callback: (info: { version: string }) => void): () => void; getRemoteSyncStatus(): Promise; onRemoteSyncStatusChange(callback: (status: import('./remote-sync/helpers').RemoteSyncStatus) => void): () => void; - - pathChanged(path: string): void; - isUserLoggedIn(): Promise; - onUserLoggedInChanged(func: (value: boolean) => void): void; + getVirtualDriveStatus(): Promise; + onVirtualDriveStatusChange( + callback: (event: { status: import('../drive/fuse/FuseDriveStatus').FuseDriveStatus }) => void, + ): () => void; } declare global { diff --git a/src/apps/main/logging/setup-app-log-routing.test.ts b/src/apps/main/logging/setup-app-log-routing.test.ts new file mode 100644 index 0000000000..62120f9460 --- /dev/null +++ b/src/apps/main/logging/setup-app-log-routing.test.ts @@ -0,0 +1,76 @@ +import { resolveAppLogFilePath } from './setup-app-log-routing'; + +type Pops = { + header: string; + msg: string; +}; + +function createSerializedLogMessage({ header, msg }: Pops) { + return `{ header: '${header}', msg: '${msg}' }`; +} + +describe('setup-app-log-routing', () => { + const logsPath = '/tmp/internxt-logs'; + + describe('resolveAppLogFilePath', () => { + it('should route antivirus debug logs to the dedicated antivirus file', () => { + // When + const result = resolveAppLogFilePath({ + logsPath, + message: { + level: 'debug', + data: [createSerializedLogMessage({ header: ' - b - anti', msg: '[CLAM_AVD] Starting clamd server...' })], + }, + }); + + // Then + expect(result).toBe('/tmp/internxt-logs/drive-antivirus.log'); + }); + + it('should keep important logs in the important file even for antivirus entries', () => { + // When + const result = resolveAppLogFilePath({ + logsPath, + message: { + level: 'error', + data: [ + createSerializedLogMessage({ header: 'E - b - anti', msg: '[CLAM_AVD] clamd process unexpectedly exited' }), + ], + }, + }); + + // Then + expect(result).toBe('/tmp/internxt-logs/drive-important.log'); + }); + + it('should keep non-antivirus logs in the main log file', () => { + // When + const result = resolveAppLogFilePath({ + logsPath, + message: { + level: 'debug', + data: [createSerializedLogMessage({ header: ' - b - auth', msg: 'Starting app' })], + }, + }); + + // Then + expect(result).toBe('/tmp/internxt-logs/drive.log'); + }); + + it('should route antivirus messages even when the serialized header is missing the antivirus tag', () => { + // When + const result = resolveAppLogFilePath({ + logsPath, + message: { + level: 'debug', + data: [ + createSerializedLogMessage({ header: ' - b - ', msg: '[Main] Antivirus IPC handlers setup complete' }), + ], + }, + }); + + // Then + expect(result).toBe('/tmp/internxt-logs/drive-antivirus.log'); + }); + }); +}); diff --git a/src/apps/main/logging/setup-app-log-routing.ts b/src/apps/main/logging/setup-app-log-routing.ts new file mode 100644 index 0000000000..39703545f1 --- /dev/null +++ b/src/apps/main/logging/setup-app-log-routing.ts @@ -0,0 +1,84 @@ +import { createRequire } from 'node:module'; +import { join } from 'node:path'; + +type Pops = { + logsPath: string; +}; + +type LogMessage = { + data?: unknown[]; + level?: string; +}; + +type ElectronLogModule = { + transports: { + file: { + resolvePathFn: (variables: unknown, message?: LogMessage) => string; + resolvePath?: (variables: unknown, message?: LogMessage) => string; + }; + }; +}; + +const DEFAULT_LOG_FILE_NAME = 'drive.log'; +const IMPORTANT_LOG_FILE_NAME = 'drive-important.log'; +const ANTIVIRUS_LOG_FILE_NAME = 'drive-antivirus.log'; +const ANTIVIRUS_HEADER_PATTERN = /header:\s'[^']*-\santi'/; +const ANTIVIRUS_MESSAGE_PATTERNS = [ + /\[CLAM_AVD\]/, + /\[freshclam/i, + /\[ANTIVIRUS_MANAGER\]/, + /window\.electron\.antivirus/i, + /\bantivirus\b/i, +]; +const ELECTRON_LOG_MODULE_IDS = ['electron-log', '@internxt/drive-desktop-core/node_modules/electron-log']; +const moduleRequire = createRequire(__filename); + +function isSerializedAntivirusLogEntry({ value }: { value: unknown }) { + if (typeof value !== 'string') { + return false; + } + + return ANTIVIRUS_HEADER_PATTERN.test(value) || ANTIVIRUS_MESSAGE_PATTERNS.some((pattern) => pattern.test(value)); +} + +function isAntivirusLogMessage({ message }: { message?: LogMessage }) { + return message?.data?.some((value) => isSerializedAntivirusLogEntry({ value })) ?? false; +} + +export function resolveAppLogFilePath({ logsPath, message }: Pops & { message?: LogMessage }) { + if (message?.level === 'error') { + return join(logsPath, IMPORTANT_LOG_FILE_NAME); + } + + if (isAntivirusLogMessage({ message })) { + return join(logsPath, ANTIVIRUS_LOG_FILE_NAME); + } + + return join(logsPath, DEFAULT_LOG_FILE_NAME); +} + +function getElectronLogModules() { + const modules = new Map(); + + for (const moduleId of ELECTRON_LOG_MODULE_IDS) { + try { + const electronLog = moduleRequire(moduleId) as ElectronLogModule; + const resolvedModulePath = moduleRequire.resolve(moduleId); + modules.set(resolvedModulePath, electronLog); + } catch { + continue; + } + } + + return [...modules.values()]; +} + +export function setupAppLogRouting({ logsPath }: Pops) { + for (const electronLog of getElectronLogModules()) { + electronLog.transports.file.resolvePathFn = (_, message) => { + return resolveAppLogFilePath({ logsPath, message }); + }; + + electronLog.transports.file.resolvePath = electronLog.transports.file.resolvePathFn; + } +} diff --git a/src/apps/main/main.ts b/src/apps/main/main.ts index 1bb88a869d..d443d3e035 100644 --- a/src/apps/main/main.ts +++ b/src/apps/main/main.ts @@ -2,29 +2,25 @@ import 'reflect-metadata'; import 'core-js/stable'; import 'regenerator-runtime/runtime'; -// Only effective during development -// the variables are injectedif (process.env.NODE_ENV === 'production') { - -// via webpack in prod import 'dotenv/config'; -// ***** APP BOOTSTRAPPING ****************************************************** // + import { PATHS } from '../../core/electron/paths'; import { setupElectronLog } from '@internxt/drive-desktop-core/build/backend'; +import { setupAppLogRouting } from './logging/setup-app-log-routing'; -setupElectronLog({ - logsPath: PATHS.LOGS, -}); +setupElectronLog({ logsPath: PATHS.LOGS }); +setupAppLogRouting({ logsPath: PATHS.LOGS }); +// Side-effect handlers registration. import './virtual-root-folder/handlers'; -import './auto-launch/handlers'; +import '../../core/auto-launch/handlers'; import './auth/handlers'; import './windows/settings'; import './windows/process-issues'; import './issues/virtual-drive'; -import './device/handlers'; +import '../../backend/features/backup/ipc/device-ipc-handlers'; import './../../backend/features/usage/handlers/handlers'; import './realtime'; -import './tray/tray'; import './tray/handlers'; import './fordwardToWindows'; import './analytics/handlers'; @@ -33,36 +29,15 @@ import './config/handlers'; import './app-info/handlers'; import './remote-sync/handlers'; import './../../backend/features/cleaner/ipc/handlers'; -import './virtual-drive'; -import { app, ipcMain } from 'electron'; -import eventBus from './event-bus'; -import { AppDataSource, resetAppDataSourceOnLogout } from './database/data-source'; -import { getIsLoggedIn } from './auth/handlers'; -import { getOrCreateWidged, getWidget, setBoundsOfWidgetByPath } from './windows/widget'; -import { createAuthWindow, getAuthWindow } from './windows/auth'; -import configStore from './config'; -import { getTray, setTrayStatus } from './tray/tray'; -import { broadcastToWindows } from './windows'; -import { openOnboardingWindow } from './windows/onboarding'; -import { setupThemeListener, getTheme } from '../../core/theme'; -// import { installNautilusExtension } from './nautilus-extension/install'; -// import { uninstallNautilusExtension } from './nautilus-extension/uninstall'; -import dns from 'node:dns'; -import { registerAvailableUserProductsHandlers } from '../../backend/features/payments/ipc/register-available-user-products-handlers'; -import { getAntivirusManager } from './antivirus/antivirusManager'; +import { app } from 'electron'; import { registerAuthIPCHandlers } from '../../infra/ipc/auth-ipc-handlers'; import { registerQuitHandler } from '../../core/quit/quit.handler'; import { logger } from '@internxt/drive-desktop-core/build/backend'; -import { trySetupAntivirusIpcAndInitialize } from './background-processes/antivirus/try-setup-antivirus-ipc-and-initialize'; -import { getUserAvailableProductsAndStore } from '../../backend/features/payments/services/get-user-available-products-and-store'; -import { handleDeeplink } from './auth/deeplink/handle-deeplink'; -import { setupAppImageDeeplink } from './auth/deeplink/setup-appimage-deeplink'; import { version, release } from 'node:os'; import { INTERNXT_VERSION } from '../../core/utils/utils'; -import { registerBackupHandlers } from '../../backend/features/backup/register-backup-handlers'; -import { startBackupsIfAvailable } from '../../backend/features/backup/start-backups-if-available'; -import { checkForUpdates } from './auto-update/check-for-updates'; +import { bootstrapMainProcess } from '../../core/bootstrap/main-process-bootstrap'; +import { registerVirtualDriveHandlers } from '../../backend/features/virtual-drive/ipc/handlers'; const gotTheLock = app.requestSingleInstanceLock(); app.setAsDefaultProtocolClient('internxt'); @@ -73,6 +48,7 @@ if (!gotTheLock) { registerAuthIPCHandlers(); registerQuitHandler(); +registerVirtualDriveHandlers(); logger.debug({ msg: 'Starting app', @@ -82,165 +58,4 @@ logger.debug({ osRelease: release(), }); -let pendingUpdateInfo: { version: string } | null = null; - -ipcMain.handle('get-update-status', () => pendingUpdateInfo); - -if (process.env.NODE_ENV === 'production') { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const sourceMapSupport = require('source-map-support'); - sourceMapSupport.install(); -} - -if (process.env.NODE_ENV === 'development') { - // eslint-disable-next-line @typescript-eslint/no-var-requires - require('electron-debug')({ showDevTools: false }); -} - -app - .whenReady() - .then(async () => { - /** - * v.2.5.1 - * Esteban Galvis Triana - * .AppImage users may experience login issues because the deeplink protocol - * is not registered automatically, unlike with .deb packages. - * This function manually registers the protocol handler for .AppImage installations. - */ - await setupAppImageDeeplink(); - /** - * TODO: Nautilus extension disabled temporarily - * v.2.5.4 - * Esteban Galvis Triana - * The Nautilus extension will be temporarily disabled - * while the exact behavior of the context menu options is being determined. - */ - // await installNautilusExtension(); - setupThemeListener(); - - eventBus.emit('APP_IS_READY'); - const isLoggedIn = getIsLoggedIn(); - - if (!isLoggedIn) { - await createAuthWindow(); - setTrayStatus('IDLE'); - } - - await checkForUpdates({ - currentVersion: INTERNXT_VERSION, - onUpdateAvailable: (updateInfo) => { - pendingUpdateInfo = updateInfo; - broadcastToWindows('update-available', updateInfo); - }, - }); - registerAvailableUserProductsHandlers(); - }) - .catch((exc) => logger.error({ msg: 'Error starting app', exc })); - -app.on('second-instance', async (_, argv) => { - logger.debug({ tag: 'AUTH', msg: 'Deeplink received on second instance, processing...' }); - const deeplinkArg = argv.find((arg) => arg.startsWith('internxt://')); - if (!deeplinkArg) return; - - try { - await handleDeeplink({ url: deeplinkArg }); - } catch (error) { - logger.error({ tag: 'AUTH', msg: 'Error handling deeplink', error }); - } -}); - -eventBus.on('WIDGET_IS_READY', () => { - registerBackupHandlers(); - startBackupsIfAvailable(); -}); - -eventBus.on('USER_LOGGED_IN', async () => { - try { - if (!AppDataSource.isInitialized) { - await AppDataSource.initialize(); - eventBus.emit('APP_DATA_SOURCE_INITIALIZED'); - } - - getAuthWindow()?.hide(); - - getTheme(); - - setTrayStatus('IDLE'); - const widget = await getOrCreateWidged(); - const tray = getTray(); - if (widget && tray) { - setBoundsOfWidgetByPath(widget, tray); - } - - setTimeout(() => { - const authWin = getAuthWindow(); - if (authWin && !authWin.isDestroyed()) { - authWin.destroy(); - } - }, 300); - - const lastOnboardingShown = configStore.get('lastOnboardingShown'); - - if (!lastOnboardingShown) { - openOnboardingWindow(); - } else if (widget) { - widget.show(); - } - await getUserAvailableProductsAndStore(); - await trySetupAntivirusIpcAndInitialize(); - } catch (error) { - logger.error({ - msg: 'Error on main process while handling USER_LOGGED_IN event:', - error, - }); - } -}); - -eventBus.on('USER_LOGGED_OUT', async () => { - setTrayStatus('IDLE'); - const widget = getWidget(); - - if (widget) { - widget?.hide(); - - void getAntivirusManager().shutdown(); - } - - await createAuthWindow(); - - if (widget) { - widget.destroy(); - } - await resetAppDataSourceOnLogout(); - - // await uninstallNautilusExtension(); -}); - -process.on('uncaughtException', (error) => { - /** - * v.2.5.1 - * Esteban Galvis Triana - * EPIPE errors close stdout, so they must be handled specially to avoid infinite logging loops. - */ - if ('code' in error && error.code === 'EPIPE') return; - - if (error.name === 'AbortError') { - logger.debug({ msg: 'Fetch request was aborted' }); - } else { - try { - logger.error({ msg: 'Uncaught exception in main process: ', error }); - } catch { - return; - } - } -}); - -ipcMain.handle('check-internet-connection', async () => { - return new Promise((resolve) => { - dns.lookup('google.com', (err) => { - resolve(!err); - }); - - setTimeout(() => resolve(false), 3000); - }); -}); +bootstrapMainProcess(); diff --git a/src/apps/main/nautilus-extension/service.ts b/src/apps/main/nautilus-extension/service.ts index 483ccf3d32..335f8e6d7a 100644 --- a/src/apps/main/nautilus-extension/service.ts +++ b/src/apps/main/nautilus-extension/service.ts @@ -15,13 +15,7 @@ function extensionFile() { if (process.env.NODE_ENV === 'development') { return path.join(__dirname, '../../../../assets/python-nautilus', name); } else { - return path.join( - //@ts-ignore - process.resourcesPath, - 'assets', - 'python-nautilus', - name, - ); + return path.join(process.resourcesPath, 'assets', 'python-nautilus', name); } } diff --git a/src/apps/main/network/NetworkFacade.ts b/src/apps/main/network/NetworkFacade.ts index 9923790202..3f96d71a7a 100644 --- a/src/apps/main/network/NetworkFacade.ts +++ b/src/apps/main/network/NetworkFacade.ts @@ -1,5 +1,6 @@ import { Environment } from '@internxt/inxt-js'; import { Network as NetworkModule } from '@internxt/sdk'; +import { BinaryData } from '@internxt/sdk/dist/network/types'; import { createDecipheriv, randomBytes } from 'crypto'; import { validateMnemonic } from 'bip39'; import { downloadFile } from '@internxt/sdk/dist/network/download'; @@ -15,6 +16,27 @@ interface DownloadOptions { downloadingCallback?: DownloadProgressCallback; } +export function convertToReadableStream(readStream: Readable): ReadableStream { + return new ReadableStream({ + start(controller) { + readStream.on('data', (chunk) => { + controller.enqueue(new Uint8Array(chunk)); + }); + + readStream.on('end', () => { + controller.close(); + }); + + readStream.on('error', (err) => { + controller.error(err); + }); + }, + cancel() { + readStream.destroy(); + }, + }); +} + /** * The entry point for interacting with the network */ @@ -67,9 +89,12 @@ export class NetworkFacade { } }, async (_, key, iv, fileSize) => { + const toUint8Array = (data: BinaryData | Buffer): Uint8Array => + Uint8Array.from(Buffer.isBuffer(data) ? data : Buffer.from(data.toString('hex'), 'hex')); + const cipherKey = options?.key ?? key; const decryptedStream = getDecryptedStream( encryptedContentStreams, - createDecipheriv('aes-256-ctr', options?.key || (key as Buffer), iv as Buffer), + createDecipheriv('aes-256-ctr', toUint8Array(cipherKey), toUint8Array(iv)), ); fileStream = buildProgressStream(decryptedStream, (readBytes) => { @@ -83,28 +108,3 @@ export class NetworkFacade { return fileStream!; } } - -export function convertToReadableStream(readStream: Readable): ReadableStream { - return new ReadableStream({ - start(controller) { - readStream.on('data', (chunk) => { - // Convertir el chunk a Uint8Array y pasarlo al controller - controller.enqueue(new Uint8Array(chunk)); - }); - - readStream.on('end', () => { - // Señalar que la transmisión ha finalizado - controller.close(); - }); - - readStream.on('error', (err) => { - // Señalar un error al controller - controller.error(err); - }); - }, - cancel() { - // Abortar la lectura del ReadStream de fs - readStream.destroy(); - }, - }); -} diff --git a/src/apps/main/network/download.ts b/src/apps/main/network/download.ts index 77939fc094..04c9456c2c 100644 --- a/src/apps/main/network/download.ts +++ b/src/apps/main/network/download.ts @@ -13,92 +13,33 @@ import { import { GenerateFileKey } from '@internxt/inxt-js/build/lib/utils/crypto'; import { createDecipheriv, Decipher } from 'crypto'; import downloadFileV2 from './downloadv2'; -import { fetchFolderTree } from '../device/service'; +import { getBackupFolderTreeSnapshot } from '../../../backend/features/backup/get-backup-folder-tree-snapshot'; import { FolderTree } from '@internxt/sdk/dist/drive/storage/types'; import { ReadableStream, WritableStream } from 'node:stream/web'; import { Readable } from 'node:stream'; import fetch from 'electron-fetch'; import { convertToReadableStream } from './NetworkFacade'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; -export async function downloadFolderAsZip( - deviceName: string, - networkApiUrl: string, - folderUuid: string, - fullPath: PathLike, - environment: { - bridgeUser: string; - bridgePass: string; - encryptionKey: string; - }, - opts: { - abortController?: AbortController; - updateProgress?: (progress: number) => void; - }, -) { - const writeStream = fs.createWriteStream(fullPath); - const destination = convertToWritableStream(writeStream); - - const { abortController, updateProgress } = opts; - const { bridgeUser, bridgePass, encryptionKey } = environment; - const { tree, folderDecryptedNames, fileDecryptedNames, size } = await fetchFolderTree(folderUuid); - tree.plainName = deviceName; - folderDecryptedNames[tree.id] = deviceName; - const pendingFolders: { path: string; data: FolderTree }[] = [{ path: '', data: tree }]; - - const zip = new FlatFolderZip(destination, { - abortController: opts.abortController, - // possible zip corruption caused by progress ?? - progress: (loadedBytes) => updateProgress?.(loadedBytes / size), - }); - - while (pendingFolders.length > 0 && !abortController?.signal.aborted) { - const currentFolder = pendingFolders.shift() as { - path: string; - data: FolderTree; - }; - const folderPath = - currentFolder.path + (currentFolder.path === '' ? '' : '/') + folderDecryptedNames[currentFolder.data.id]; - - zip.addFolder(folderPath); - - const { files, children: folders } = currentFolder.data; - - for (const file of files) { - if (abortController?.signal.aborted) { - throw new Error('Download cancelled'); - } - - const displayFilename = items.getItemDisplayName({ - name: fileDecryptedNames[file.id], - type: file.type, - }); - - const fileStreamPromise = downloadFile({ - networkApiUrl, - bucketId: file.bucket, - fileId: file.fileId, - creds: { - pass: bridgePass, - user: bridgeUser, - }, - mnemonic: encryptionKey, - options: { - notifyProgress: () => null, - abortController: opts.abortController, - }, - }); - - zip.addFile(folderPath + '/' + displayFilename, await fileStreamPromise); - } - - pendingFolders.push(...folders.map((tree) => ({ path: folderPath, data: tree }))); - } +interface MetadataRequiredForDownload { + mirrors: Mirror[]; + fileMeta: FileInfo; +} - if (abortController?.signal.aborted) { - throw new Error('Download cancelled'); - } +export type DownloadProgressCallback = (totalBytes: number, downloadedBytes: number) => void; - return zip.close(); +export interface IDownloadParams { + networkApiUrl: string; + bucketId: string; + fileId: string; + creds?: NetworkCredentials; + mnemonic?: string; + encryptionKey?: Buffer; + token?: string; + options?: { + notifyProgress: DownloadProgressCallback; + abortController?: AbortController; + }; } function convertToWritableStream(writeStream: fs.WriteStream): WritableStream { @@ -132,99 +73,74 @@ function convertToWritableStream(writeStream: fs.WriteStream): WritableStream void; -export interface IDownloadParams { - networkApiUrl: string; - bucketId: string; - fileId: string; - creds?: NetworkCredentials; - mnemonic?: string; - encryptionKey?: Buffer; - token?: string; - options?: { - notifyProgress: DownloadProgressCallback; - abortController?: AbortController; - }; -} +function joinReadableBinaryStreams(streams: ReadableStream[]): ReadableStream { + const streamsCopy = streams.map((s) => s); + let keepReading = true; -interface MetadataRequiredForDownload { - mirrors: Mirror[]; - fileMeta: FileInfo; -} + const flush = () => streamsCopy.forEach((s) => s.cancel()); -async function getRequiredFileMetadataWithToken( - networkApiUrl: string, - bucketId: string, - fileId: string, - token: string, -): Promise { - const fileMeta: FileInfo = await getFileInfoWithToken(networkApiUrl, bucketId, fileId, token); - const mirrors: Mirror[] = await getMirrors(networkApiUrl, bucketId, fileId, null, token); + const stream = new ReadableStream({ + async pull(controller) { + if (!keepReading) return flush(); - return { fileMeta, mirrors }; -} + const downStream = streamsCopy.shift(); -async function getRequiredFileMetadataWithAuth( - networkApiUrl: string, - bucketId: string, - fileId: string, - creds: NetworkCredentials, -): Promise { - const fileMeta: FileInfo = await getFileInfoWithAuth(networkApiUrl, bucketId, fileId, creds); - const mirrors: Mirror[] = await getMirrors(networkApiUrl, bucketId, fileId, creds); + if (!downStream) { + return controller.close(); + } - return { fileMeta, mirrors }; -} + const reader = downStream.getReader(); + let done = false; -async function downloadFile(params: IDownloadParams): Promise> { - const downloadFileV2Promise = downloadFileV2(params as any); + while (!done && keepReading) { + const status = await reader.read(); - return downloadFileV2Promise.catch((err: Error) => { - if (err instanceof FileVersionOneError) { - return _downloadFile(params); - } + if (!status.done) { + controller.enqueue(status.value); + } - throw err; - }); -} + done = status.done; + } -async function _downloadFile(params: IDownloadParams): Promise> { - const { networkApiUrl, bucketId, fileId, token, creds } = params; + reader.releaseLock(); + }, + cancel() { + keepReading = false; + }, + }); - let metadata: MetadataRequiredForDownload; + return stream; +} - if (creds) { - metadata = await getRequiredFileMetadataWithAuth(networkApiUrl, bucketId, fileId, creds); - } else if (token) { - metadata = await getRequiredFileMetadataWithToken(networkApiUrl, bucketId, fileId, token); - } else { - throw new Error('Download error 1'); - } +export function getDecryptedStream( + encryptedContentSlices: ReadableStream[], + decipher: Decipher, +): ReadableStream { + const encryptedStream = joinReadableBinaryStreams(encryptedContentSlices); - const { mirrors, fileMeta } = metadata; - const downloadUrls: string[] = mirrors.map((m) => m.url); + let keepReading = true; - const index = Buffer.from(fileMeta.index, 'hex'); - const iv = index.slice(0, 16); - let key: Buffer; + const decryptedStream = new ReadableStream({ + async pull(controller) { + if (!keepReading) return; - if (params.encryptionKey) { - key = params.encryptionKey; - } else if (params.mnemonic) { - key = await GenerateFileKey(params.mnemonic, bucketId, index); - } else { - throw new Error('Download error code 1'); - } + const reader = encryptedStream.getReader(); + const status = await reader.read(); - const downloadStream = await getFileDownloadStream( - downloadUrls, - createDecipheriv('aes-256-ctr', key, iv), - params.options?.abortController, - ); + if (status.done) { + controller.close(); + } else { + controller.enqueue(decipher.update(status.value)); + } - return buildProgressStream(downloadStream, (readBytes) => { - params.options?.notifyProgress(metadata.fileMeta.size, readBytes); + reader.releaseLock(); + }, + cancel() { + keepReading = false; + }, }); + + return decryptedStream; } async function getFileDownloadStream( @@ -250,6 +166,30 @@ async function getFileDownloadStream( return getDecryptedStream(encryptedContentParts, decipher); } +async function getRequiredFileMetadataWithAuth( + networkApiUrl: string, + bucketId: string, + fileId: string, + creds: NetworkCredentials, +): Promise { + const fileMeta: FileInfo = await getFileInfoWithAuth(networkApiUrl, bucketId, fileId, creds); + const mirrors: Mirror[] = await getMirrors(networkApiUrl, bucketId, fileId, creds); + + return { fileMeta, mirrors }; +} + +async function getRequiredFileMetadataWithToken( + networkApiUrl: string, + bucketId: string, + fileId: string, + token: string, +): Promise { + const fileMeta: FileInfo = await getFileInfoWithToken(networkApiUrl, bucketId, fileId, token); + const mirrors: Mirror[] = await getMirrors(networkApiUrl, bucketId, fileId, null, token); + + return { fileMeta, mirrors }; +} + export function buildProgressStream( source: ReadableStream, onRead: (readBytes: number) => void, @@ -276,72 +216,136 @@ export function buildProgressStream( }); } -function joinReadableBinaryStreams(streams: ReadableStream[]): ReadableStream { - const streamsCopy = streams.map((s) => s); - let keepReading = true; - - const flush = () => streamsCopy.forEach((s) => s.cancel()); - - const stream = new ReadableStream({ - async pull(controller) { - if (!keepReading) return flush(); +async function _downloadFile(params: IDownloadParams): Promise> { + const { networkApiUrl, bucketId, fileId, token, creds } = params; - const downStream = streamsCopy.shift(); + let metadata: MetadataRequiredForDownload; - if (!downStream) { - return controller.close(); - } + if (creds) { + metadata = await getRequiredFileMetadataWithAuth(networkApiUrl, bucketId, fileId, creds); + } else if (token) { + metadata = await getRequiredFileMetadataWithToken(networkApiUrl, bucketId, fileId, token); + } else { + throw new Error('Download error 1'); + } - const reader = downStream.getReader(); - let done = false; + const { mirrors, fileMeta } = metadata; + const downloadUrls: string[] = mirrors.map((m) => m.url); - while (!done && keepReading) { - const status = await reader.read(); + const index = Buffer.from(fileMeta.index, 'hex'); + const iv = index.slice(0, 16); + let key: Buffer; - if (!status.done) { - controller.enqueue(status.value); - } + if (params.encryptionKey) { + key = params.encryptionKey; + } else if (params.mnemonic) { + key = await GenerateFileKey(params.mnemonic, bucketId, index); + } else { + throw new Error('Download error code 1'); + } - done = status.done; - } + const downloadStream = await getFileDownloadStream( + downloadUrls, + createDecipheriv('aes-256-ctr', Uint8Array.from(key), Uint8Array.from(iv)), + params.options?.abortController, + ); - reader.releaseLock(); - }, - cancel() { - keepReading = false; - }, + return buildProgressStream(downloadStream, (readBytes) => { + params.options?.notifyProgress(metadata.fileMeta.size, readBytes); }); +} - return stream; +async function downloadFile(params: IDownloadParams): Promise> { + return downloadFileV2(params).catch((err: Error) => { + if (err instanceof FileVersionOneError) { + return _downloadFile(params); + } + throw err; + }); } -export function getDecryptedStream( - encryptedContentSlices: ReadableStream[], - decipher: Decipher, -): ReadableStream { - const encryptedStream = joinReadableBinaryStreams(encryptedContentSlices); +export async function downloadFolderAsZip( + deviceName: string, + networkApiUrl: string, + folderUuid: string, + fullPath: PathLike, + environment: { + bridgeUser: string; + bridgePass: string; + encryptionKey: string; + }, + opts: { + abortController?: AbortController; + updateProgress?: (progress: number) => void; + }, +) { + const writeStream = fs.createWriteStream(fullPath); + const destination = convertToWritableStream(writeStream); - let keepReading = true; + const { abortController, updateProgress } = opts; + const { bridgeUser, bridgePass, encryptionKey } = environment; + const { data, error } = await getBackupFolderTreeSnapshot({ folderUuid }); + if (error) { + throw logger.error({ tag: 'BACKUPS', msg: 'Error fetching backup folder tree snapshot', error }); + } - const decryptedStream = new ReadableStream({ - async pull(controller) { - if (!keepReading) return; + const { tree, folderDecryptedNames, fileDecryptedNames, size } = data; + tree.plainName = deviceName; + folderDecryptedNames[tree.id] = deviceName; + const pendingFolders: { path: string; data: FolderTree }[] = [{ path: '', data: tree }]; - const reader = encryptedStream.getReader(); - const status = await reader.read(); + const zip = new FlatFolderZip(destination, { + abortController: opts.abortController, + // possible zip corruption caused by progress ?? + progress: (loadedBytes) => updateProgress?.(loadedBytes / size), + }); - if (status.done) { - controller.close(); - } else { - controller.enqueue(decipher.update(status.value)); + while (pendingFolders.length > 0 && !abortController?.signal.aborted) { + const currentFolder = pendingFolders.shift() as { + path: string; + data: FolderTree; + }; + const folderPath = + currentFolder.path + (currentFolder.path === '' ? '' : '/') + folderDecryptedNames[currentFolder.data.id]; + + zip.addFolder(folderPath); + + const { files, children: folders } = currentFolder.data; + + for (const file of files) { + if (abortController?.signal.aborted) { + throw new Error('Download cancelled'); } - reader.releaseLock(); - }, - cancel() { - keepReading = false; - }, - }); + const displayFilename = items.getItemDisplayName({ + name: fileDecryptedNames[file.id], + type: file.type, + }); - return decryptedStream; + const fileStreamPromise = downloadFile({ + networkApiUrl, + bucketId: file.bucket, + fileId: file.fileId, + creds: { + pass: bridgePass, + user: bridgeUser, + }, + mnemonic: encryptionKey, + options: { + notifyProgress: () => null, + abortController: opts.abortController, + }, + }); + + zip.addFile(folderPath + '/' + displayFilename, await fileStreamPromise); + } + + pendingFolders.push(...folders.map((tree) => ({ path: folderPath, data: tree }))); + } + + if (abortController?.signal.aborted) { + throw new Error('Download cancelled'); + } + + return zip.close(); } diff --git a/src/apps/main/network/downloadv2.ts b/src/apps/main/network/downloadv2.ts index b928ccdd28..ec4ad3b60e 100644 --- a/src/apps/main/network/downloadv2.ts +++ b/src/apps/main/network/downloadv2.ts @@ -30,12 +30,21 @@ interface DownloadSharedFileParams extends DownloadFileParams { creds?: never; mnemonic?: never; token: string; - encryptionKey: string; + encryptionKey: Buffer | string; } type DownloadSharedFileFunction = (params: DownloadSharedFileParams) => DownloadFileResponse; type DownloadOwnFileFunction = (params: DownloadOwnFileParams) => DownloadFileResponse; -type DownloadFileFunction = (params: DownloadSharedFileParams | DownloadOwnFileParams) => DownloadFileResponse; +type DownloadFileRawParams = { + bucketId: string; + fileId: string; + creds?: NetworkCredentials; + mnemonic?: string; + token?: string; + encryptionKey?: Buffer | string; + options?: DownloadFileOptions; +}; +type DownloadFileFunction = (params: DownloadFileRawParams) => DownloadFileResponse; const downloadSharedFile: DownloadSharedFileFunction = (params) => { const { bucketId, fileId, encryptionKey, token, options } = params; @@ -55,7 +64,7 @@ const downloadSharedFile: DownloadSharedFileFunction = (params) => { }, ), ).download(bucketId, fileId, '', { - key: Buffer.from(encryptionKey, 'hex'), + key: typeof encryptionKey === 'string' ? Buffer.from(encryptionKey, 'hex') : encryptionKey, token, downloadingCallback: options?.notifyProgress, abortController: options?.abortController, @@ -95,9 +104,10 @@ const downloadOwnFile: DownloadOwnFileFunction = (params) => { const downloadFileV2: DownloadFileFunction = (params) => { if (params.token && params.encryptionKey) { - return downloadSharedFile(params); + // This is de facto dead code as its never called with params.token + return downloadSharedFile(params as DownloadSharedFileParams); } else if (params.creds && params.mnemonic) { - return downloadOwnFile(params); + return downloadOwnFile(params as DownloadOwnFileParams); } else { throw new Error('DOWNLOAD ERRNO. 0'); } diff --git a/src/apps/main/network/requests.ts b/src/apps/main/network/requests.ts index 1adc232cda..69998791f7 100644 --- a/src/apps/main/network/requests.ts +++ b/src/apps/main/network/requests.ts @@ -1,6 +1,12 @@ import axios, { AxiosBasicCredentials, AxiosRequestConfig } from 'axios'; import { createHash } from 'crypto'; +/** + * v.2.5.5 + * Esteban Galvis Triana + * TODO: Move this request logic to driveServerClient + */ + export interface FileInfo { bucket: string; mimetype: string; @@ -99,6 +105,61 @@ export interface Mirror { operation: string; } +function isFarmerOk(farmer?: Partial) { + return farmer && farmer.nodeID && farmer.port && farmer.address; +} + +function getFileMirrors( + networkApiUrl: string, + bucketId: string, + fileId: string, + limit: number | 3, + skip: number | 0, + excludeNodes: string[] = [], + opts?: AxiosRequestConfig, +): Promise { + const excludeNodeIds: string = excludeNodes.join(','); + const path = `${networkApiUrl}/buckets/${bucketId}/files/${fileId}`; + const queryParams = `?limit=${limit}&skip=${skip}&exclude=${excludeNodeIds}`; + + const defaultOpts: AxiosRequestConfig = { + responseType: 'json', + url: path + queryParams, + }; + + return axios + .request({ ...defaultOpts, ...opts }) + .then((res) => { + return res.data; + }) + .catch((err) => { + throw err; + }); +} + +async function replaceMirror( + networkApiUrl: string, + bucketId: string, + fileId: string, + pointerIndex: number, + excludeNodes: string[] = [], + opts?: AxiosRequestConfig, +): Promise { + let mirrorIsOk = false; + let mirror: Mirror; + + while (!mirrorIsOk) { + const [newMirror] = await getFileMirrors(networkApiUrl, bucketId, fileId, 1, pointerIndex, excludeNodes, opts); + + mirror = newMirror; + mirrorIsOk = + newMirror.farmer && newMirror.farmer.nodeID && newMirror.farmer.port && newMirror.farmer.address ? true : false; + } + + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return mirror!; +} + export async function getMirrors( networkApiUrl: string, bucketId: string, @@ -145,58 +206,3 @@ export async function getMirrors( return mirrors; } - -async function replaceMirror( - networkApiUrl: string, - bucketId: string, - fileId: string, - pointerIndex: number, - excludeNodes: string[] = [], - opts?: AxiosRequestConfig, -): Promise { - let mirrorIsOk = false; - let mirror: Mirror; - - while (!mirrorIsOk) { - const [newMirror] = await getFileMirrors(networkApiUrl, bucketId, fileId, 1, pointerIndex, excludeNodes, opts); - - mirror = newMirror; - mirrorIsOk = - newMirror.farmer && newMirror.farmer.nodeID && newMirror.farmer.port && newMirror.farmer.address ? true : false; - } - - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - return mirror!; -} - -function getFileMirrors( - networkApiUrl: string, - bucketId: string, - fileId: string, - limit: number | 3, - skip: number | 0, - excludeNodes: string[] = [], - opts?: AxiosRequestConfig, -): Promise { - const excludeNodeIds: string = excludeNodes.join(','); - const path = `${networkApiUrl}/buckets/${bucketId}/files/${fileId}`; - const queryParams = `?limit=${limit}&skip=${skip}&exclude=${excludeNodeIds}`; - - const defaultOpts: AxiosRequestConfig = { - responseType: 'json', - url: path + queryParams, - }; - - return axios - .request({ ...defaultOpts, ...opts }) - .then((res) => { - return res.data; - }) - .catch((err) => { - throw err; - }); -} - -function isFarmerOk(farmer?: Partial) { - return farmer && farmer.nodeID && farmer.port && farmer.address; -} diff --git a/src/apps/main/network/zip.service.ts b/src/apps/main/network/zip.service.ts index 4eddb1d51c..c3a484440d 100644 --- a/src/apps/main/network/zip.service.ts +++ b/src/apps/main/network/zip.service.ts @@ -1,5 +1,6 @@ import { AsyncZipDeflate, Zip } from 'fflate'; import { ReadableStream, WritableStream } from 'node:stream/web'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; type FlatFolderZipOpts = { abortController?: AbortController; @@ -17,48 +18,6 @@ export interface ZipStream { end: () => void; } -export class FlatFolderZip { - private finished!: Promise; - private zip: ZipStream; - private passThrough: ReadableStream; - private abortController?: AbortController; - - constructor(destination: WritableStream, opts: FlatFolderZipOpts) { - this.zip = createFolderWithFilesWritable(opts.progress); - this.abortController = opts.abortController; - - this.passThrough = this.zip.stream; - - this.finished = this.passThrough.pipeTo(destination, { - signal: opts.abortController?.signal, - }); - } - - addFile(name: string, source: ReadableStream): void { - if (this.abortController?.signal.aborted) return; - - this.zip.addFile(name, source); - } - - addFolder(name: string): void { - if (this.abortController?.signal.aborted) return; - - this.zip.addFolder(name); - } - - async close(): Promise { - if (this.abortController?.signal.aborted) return; - - this.zip.end(); - - await this.finished; - } - - abort(): void { - this.abortController?.abort(); - } -} - export function createFolderWithFilesWritable(progress?: FlatFolderZipOpts['progress']): ZipStream { const zip = new Zip(); let passthroughController: ReadableStreamDefaultController | null = null; @@ -81,7 +40,7 @@ export function createFolderWithFilesWritable(progress?: FlatFolderZipOpts['prog zip.ondata = (err, data, final) => { if (err) { - console.error('Error in ZIP data event:', err); + logger.error({ msg: 'Error in ZIP data event', err }); return; } @@ -134,3 +93,45 @@ export function createFolderWithFilesWritable(progress?: FlatFolderZipOpts['prog }, }; } + +export class FlatFolderZip { + private finished!: Promise; + private zip: ZipStream; + private passThrough: ReadableStream; + private abortController?: AbortController; + + constructor(destination: WritableStream, opts: FlatFolderZipOpts) { + this.zip = createFolderWithFilesWritable(opts.progress); + this.abortController = opts.abortController; + + this.passThrough = this.zip.stream; + + this.finished = this.passThrough.pipeTo(destination, { + signal: opts.abortController?.signal, + }); + } + + addFile(name: string, source: ReadableStream): void { + if (this.abortController?.signal.aborted) return; + + this.zip.addFile(name, source); + } + + addFolder(name: string): void { + if (this.abortController?.signal.aborted) return; + + this.zip.addFolder(name); + } + + async close(): Promise { + if (this.abortController?.signal.aborted) return; + + this.zip.end(); + + await this.finished; + } + + abort(): void { + this.abortController?.abort(); + } +} diff --git a/src/apps/main/platform/DesktopPlatform.ts b/src/apps/main/platform/DesktopPlatform.ts deleted file mode 100644 index 1b83ebb8ad..0000000000 --- a/src/apps/main/platform/DesktopPlatform.ts +++ /dev/null @@ -1 +0,0 @@ -export type DesktopPlatform = 'linux' | 'win32' | 'darwin'; diff --git a/src/apps/main/platform/handlers.ts b/src/apps/main/platform/handlers.ts index 50ee9084b5..8c77004496 100644 --- a/src/apps/main/platform/handlers.ts +++ b/src/apps/main/platform/handlers.ts @@ -1,22 +1,12 @@ -import { ipcMain, shell } from 'electron'; +import { ipcMain } from 'electron'; import { exec } from 'child_process'; -ipcMain.handle('get-platform', () => { - return process.platform; -}); - ipcMain.handle('open-url', (_, url: string) => { - if (process.platform === 'linux') { - // shell.openExternal is not working as intended on the current verions of electron - // this is only a workaround to fix it - return new Promise((resolve, reject) => { - exec(`xdg-open ${url} &`, (error) => { - if (error) reject(error); + return new Promise((resolve, reject) => { + exec(`xdg-open ${url} &`, (error) => { + if (error) reject(error); - resolve(); - }); + resolve(); }); - } - - return shell.openExternal(url); + }); }); diff --git a/src/apps/main/preload.d.ts b/src/apps/main/preload.d.ts index 11a06ee726..aca484dcf5 100644 --- a/src/apps/main/preload.d.ts +++ b/src/apps/main/preload.d.ts @@ -2,10 +2,13 @@ import { UserAvailableProducts } from '@internxt/drive-desktop-core/build/backen import { AuthLoginResponseViewModel } from '../../infra/drive-server/services/auth/auth.types'; import { CleanerReport } from '../../backend/features/cleaner/cleaner.types'; import { BackupErrorRecord } from '../../backend/features/backup/backup.types'; +import type { Device } from '../../backend/features/backup/types/Device'; declare interface Window { electron: { - getConfigKey(key: import('./config/service.types').StoredValues): Promise; + getConfigKey( + key: T, + ): Promise; listenToConfigKeyChange(key: import('./config/service.types').StoredValues, fn: (value: T) => void): () => void; @@ -102,26 +105,29 @@ declare interface Window { renameDevice: typeof import('../../backend/features/device/device.module').DeviceModule.renameDevice; devices: { - getDevices: () => Promise>; + getDevices: () => Promise>; }; - onDeviceCreated(func: (value: import('../main/device/service').Device) => void): () => void; + onDeviceCreated(func: (value: Device) => void): () => void; getBackupsFromDevice: typeof import('../../backend/features/device/device.module').DeviceModule.getBackupsFromDevice; - addBackup: typeof import('../main/backups/add-backup').addBackup; + addBackup: typeof import('../../backend/features/backup/add-backup').addBackup; - downloadBackup: typeof import('../main/device/service').downloadBackup; + downloadBackup: ( + device: import('../../backend/features/backup/types/Device').Device, + pathname: import('../../context/local/localFile/infrastructure/AbsolutePath').AbsolutePath, + ) => Promise; abortDownloadBackups: (deviceId: string) => void; - addBackupsFromLocalPaths: typeof import('../main/device/service').createBackupsFromLocalPaths; + addBackupsFromLocalPaths: typeof import('../../backend/features/backup/create-backups-from-local-paths').createBackupsFromLocalPaths; - deleteBackup: typeof import('../main/device/service').deleteBackup; + deleteBackup: typeof import('../../backend/features/backup/delete-backup').deleteBackup; - deleteBackupsFromDevice: typeof import('../main/device/service').deleteBackupsFromDevice; + deleteBackupsFromDevice: typeof import('../../backend/features/backup/delete-device-backups').deleteDeviceBackups; - disableBackup: typeof import('../main/device/service').disableBackup; + disableBackup: typeof import('../../backend/features/backup/disable-backup').disableBackup; getBackupsEnabled: () => Promise; @@ -135,15 +141,14 @@ declare interface Window { onBackupFatalErrorsChanged(fn: (backupErrors: Array) => void): () => void; - changeBackupPath: typeof import('../main/device/service').changeBackupPath; + changeBackupPath: typeof import('../../backend/features/backup/change-backup-path').changeBackupPath; - getFolderPath: typeof import('../../backend/features/backup/get-path-from-dialog').getPathFromDialog; + getFolderPath: typeof import('../../core/utils/get-path-from-dialog').getPathFromDialog; onRemoteChanges(func: (value: import('../main/realtime').EventPayload) => void): () => void; getUsage: () => Promise; - getPlatform: () => Promise; onRemoteSyncStatusChange(callback: (status: import('./remote-sync/helpers').RemoteSyncStatus) => void): () => void; getRemoteSyncStatus(): Promise; getVirtualDriveStatus(): Promise; diff --git a/src/apps/main/preload.js b/src/apps/main/preload.js index a4f3965764..170be45c3c 100644 --- a/src/apps/main/preload.js +++ b/src/apps/main/preload.js @@ -164,8 +164,8 @@ contextBridge.exposeInMainWorld('electron', { addBackup() { return ipcRenderer.invoke('add-backup'); }, - downloadBackup(backup) { - return ipcRenderer.invoke('download-backup', backup); + downloadBackup(backup, pathname) { + return ipcRenderer.invoke('download-backup', backup, pathname); }, addBackupsFromLocalPaths(localPaths) { return ipcRenderer.invoke('add-multiple-backups', localPaths); @@ -223,8 +223,8 @@ contextBridge.exposeInMainWorld('electron', { getLastBackupHadIssues() { return ipcRenderer.invoke('get-last-backup-had-issues'); }, - changeBackupPath(currentPath) { - return ipcRenderer.invoke('change-backup-path', currentPath); + changeBackupPath({ currentPath, newPath }) { + return ipcRenderer.invoke('change-backup-path', { currentPath, newPath }); }, getFolderPath() { return ipcRenderer.invoke('get-folder-path'); @@ -239,9 +239,6 @@ contextBridge.exposeInMainWorld('electron', { getUsage() { return ipcRenderer.invoke('get-usage'); }, - getPlatform() { - return ipcRenderer.invoke('get-platform'); - }, resizeWindow(dimensions) { return ipcRenderer.invoke('resize-focused-window', dimensions); }, diff --git a/src/apps/main/realtime.ts b/src/apps/main/realtime.ts index 6475c9d676..f88985bd28 100644 --- a/src/apps/main/realtime.ts +++ b/src/apps/main/realtime.ts @@ -22,6 +22,13 @@ export type EventPayload = { let user = getUser(); +function stopRemoteNotifications() { + if (socket) { + socket.close(); + socket = undefined; + } +} + function cleanAndStartRemoteNotifications() { stopRemoteNotifications(); const { newToken } = getCredentials(); @@ -124,12 +131,5 @@ function cleanAndStartRemoteNotifications() { }); } -function stopRemoteNotifications() { - if (socket) { - socket.close(); - socket = undefined; - } -} - eventBus.on('USER_LOGGED_IN', cleanAndStartRemoteNotifications); eventBus.on('USER_LOGGED_OUT', stopRemoteNotifications); diff --git a/src/apps/main/remote-sync/RemoteSyncManager.ts b/src/apps/main/remote-sync/RemoteSyncManager.ts index ffc1e2f884..a07a57aa1d 100644 --- a/src/apps/main/remote-sync/RemoteSyncManager.ts +++ b/src/apps/main/remote-sync/RemoteSyncManager.ts @@ -373,35 +373,29 @@ export class RemoteSyncManager { }; } - private patchDriveFolderResponseItem = (payload: any): RemoteSyncedFolder => { - // We will assume that we received an status - let status: RemoteSyncedFolder['status'] = payload.status; - - if (!status && !payload.removed) { - status = 'EXISTS'; - } - - if (!status && payload.removed) { - status = 'REMOVED'; - } - - if (!status && payload.deleted) { - status = 'DELETED'; - } + private patchDriveFolderResponseItem = (payload: Record): RemoteSyncedFolder => { + const status = this.resolveFolderStatus(payload); return { - ...payload, + ...(payload as Omit), status, - name: payload.name ?? undefined, + name: typeof payload.name === 'string' ? payload.name : undefined, }; }; - private patchDriveFileResponseItem = (payload: any): RemoteSyncedFile => { + private resolveFolderStatus(payload: Record): RemoteSyncedFolder['status'] { + if (typeof payload.status === 'string' && payload.status) return payload.status; + if (payload.removed) return 'REMOVED'; + if (payload.deleted) return 'DELETED'; + return 'EXISTS'; + } + + private readonly patchDriveFileResponseItem = (payload: Record): RemoteSyncedFile => { return { - ...payload, - fileId: payload.fileId ?? '', - size: typeof payload.size === 'string' ? parseInt(payload.size) : payload.size, - name: payload.name ?? undefined, + ...(payload as Omit), + fileId: typeof payload.fileId === 'string' ? payload.fileId : '', + size: typeof payload.size === 'string' ? Number.parseInt(payload.size) : (payload.size as number), + name: typeof payload.name === 'string' ? payload.name : undefined, }; }; } diff --git a/src/apps/main/remote-sync/errors.ts b/src/apps/main/remote-sync/errors.ts index f02bc7cb96..e6ac4bd04e 100644 --- a/src/apps/main/remote-sync/errors.ts +++ b/src/apps/main/remote-sync/errors.ts @@ -2,10 +2,10 @@ * Base class for RemoteSync errors. */ export class RemoteSyncError extends Error { - public context?: any; + public context?: Record; public code?: string; - constructor(message: string, code?: string, context?: any) { + constructor(message: string, code?: string, context?: Record) { super(message); this.name = 'RemoteSyncError'; this.code = code; @@ -17,7 +17,7 @@ export class RemoteSyncError extends Error { * Error thrown when the response does not contain an array of files. */ export class RemoteSyncInvalidResponseError extends RemoteSyncError { - constructor(response: any) { + constructor(response: unknown) { super(`Expected an array of files, but received: ${JSON.stringify(response, null, 2)}`, 'INVALID_RESPONSE', { response, }); @@ -39,7 +39,7 @@ export class RemoteSyncNetworkError extends RemoteSyncError { * Error thrown when the server responds with an error status (example, status 500). */ export class RemoteSyncServerError extends RemoteSyncError { - constructor(status: number, data: any) { + constructor(status: number, data: unknown) { super(`Server error: request failed with status code ${status} while sync`, 'SERVER_ERROR', { status, data }); this.name = 'RemoteSyncServerError'; } diff --git a/src/apps/main/tray/handlers.ts b/src/apps/main/tray/handlers.ts index 0cc05647bf..136646eb90 100644 --- a/src/apps/main/tray/handlers.ts +++ b/src/apps/main/tray/handlers.ts @@ -1,5 +1,5 @@ import { MainProcessSyncEngineIPC } from '../MainProcessSyncEngineIPC'; -import { setTrayStatus } from './tray'; +import { setTrayStatus } from './tray-setup'; MainProcessSyncEngineIPC.on('FOLDER_CREATING', () => { setTrayStatus('SYNCING'); diff --git a/src/apps/main/tray/tray-menu.test.ts b/src/apps/main/tray/tray-menu.test.ts new file mode 100644 index 0000000000..07d7a1417c --- /dev/null +++ b/src/apps/main/tray/tray-menu.test.ts @@ -0,0 +1,99 @@ +import PackageJson from '../../../../package.json'; + +const { trayHandlers, trayInstance, buildFromTemplateMock, createFromPathMock, TrayMock } = vi.hoisted(() => { + const trayHandlers = new Map unknown>(); + const trayInstance = { + getBounds: vi.fn(() => ({ x: 1, y: 2, width: 3, height: 4 })), + setIgnoreDoubleClickEvents: vi.fn(), + on: vi.fn((event: string, handler: (...args: unknown[]) => unknown) => { + trayHandlers.set(event, handler); + }), + setContextMenu: vi.fn(), + setImage: vi.fn(), + setToolTip: vi.fn(), + destroy: vi.fn(), + }; + + const buildFromTemplateMock = vi.fn((template) => ({ template })); + const createFromPathMock = vi.fn((imagePath: string) => ({ imagePath })); + const TrayMock = vi.fn(() => trayInstance); + + return { + trayHandlers, + trayInstance, + buildFromTemplateMock, + createFromPathMock, + TrayMock, + }; +}); + +vi.mock('electron', () => ({ + Menu: { + buildFromTemplate: buildFromTemplateMock, + }, + nativeImage: { + createFromPath: createFromPathMock, + }, + Tray: TrayMock, +})); + +import { TrayMenu } from './tray-menu'; + +describe('tray-menu', () => { + beforeEach(() => { + trayHandlers.clear(); + }); + + it('should initialize the tray with context menu in loading state', () => { + // Given + const onClick = vi.fn(); + const onQuit = vi.fn(); + + // When + new TrayMenu('/icons', onClick, onQuit); + const expectedContextMenu = [ + { + label: `Internxt ${PackageJson.version}`, + click: expect.any(Function), + }, + { + label: 'Quit', + click: expect.any(Function), + }, + ]; + // Then + expect(TrayMock).toBeCalledWith('/icons/loading.png'); + expect(createFromPathMock).toBeCalledWith('/icons/loading.png'); + expect(trayInstance.setImage).toBeCalledWith({ imagePath: '/icons/loading.png' }); + expect(trayInstance.setToolTip).toBeCalledWith('Loading Internxt...'); + expect(buildFromTemplateMock).toBeCalledWith(expectedContextMenu); + expect(trayInstance.setContextMenu).toBeCalledWith({ + template: expectedContextMenu, + }); + }); + + it('should invoke onClick when the context menu Open app item is clicked', async () => { + // Given + const onClick = vi.fn().mockResolvedValue(undefined); + const onQuit = vi.fn(); + new TrayMenu('/icons', onClick, onQuit); + + // When – simulate clicking the first (only) menu item + const [[menuTemplate]] = buildFromTemplateMock.mock.calls as [[Electron.MenuItemConstructorOptions[]]]; + await (menuTemplate[0].click as () => Promise)(); + + // Then + expect(onClick).toBeCalled(); + }); + + it('should update the tooltip for idle state', () => { + // Given + const trayMenu = new TrayMenu('/icons', vi.fn().mockResolvedValue(undefined), vi.fn()); + + // When + trayMenu.setState('IDLE'); + + // Then + expect(trayInstance.setToolTip).toBeCalledWith(`Internxt ${PackageJson.version}`); + }); +}); diff --git a/src/apps/main/tray/tray-menu.ts b/src/apps/main/tray/tray-menu.ts new file mode 100644 index 0000000000..d71378abea --- /dev/null +++ b/src/apps/main/tray/tray-menu.ts @@ -0,0 +1,74 @@ +import { Menu, nativeImage, Tray } from 'electron'; +import path from 'node:path'; +import PackageJson from '../../../../package.json'; +import { TrayMenuState } from './types'; + +export class TrayMenu { + private readonly tray: Tray; + + get bounds() { + return this.tray.getBounds(); + } + + constructor( + private readonly iconsPath: string, + private readonly onClick: () => Promise, + private readonly onQuit: () => void, + ) { + const trayIcon = this.getIconPath('LOADING'); + + this.tray = new Tray(trayIcon); + + this.setState('LOADING'); + + const contextMenu = Menu.buildFromTemplate([ + { + label: `Internxt ${PackageJson.version}`, + click: () => { + this.onClick(); + }, + }, + { + label: 'Quit', + click: () => { + this.onQuit(); + }, + }, + ]); + this.tray.setContextMenu(contextMenu); + } + + getIconPath(state: TrayMenuState) { + return path.join(this.iconsPath, `${state.toLowerCase()}.png`); + } + + setState(state: TrayMenuState) { + const iconPath = this.getIconPath(state); + this.setImage(iconPath); + + this.setTooltip(state); + } + + setImage(imagePath: string) { + const image = nativeImage.createFromPath(imagePath); + this.tray.setImage(image); + } + + setTooltip(state: TrayMenuState) { + const messages: Record = { + SYNCING: 'Sync in process', + IDLE: `Internxt ${PackageJson.version}`, + ALERT: 'There are some issues with your sync', + LOADING: 'Loading Internxt...', + }; + + const message = messages[state]; + this.tray.setToolTip(message); + } + + destroy() { + if (this.tray) { + this.tray.destroy(); + } + } +} diff --git a/src/apps/main/tray/tray-setup.test.ts b/src/apps/main/tray/tray-setup.test.ts new file mode 100644 index 0000000000..fb04987c36 --- /dev/null +++ b/src/apps/main/tray/tray-setup.test.ts @@ -0,0 +1,149 @@ +import { call, calls } from 'tests/vitest/utils.helper'; + +const { + mockApp, + mockGetIsLoggedIn, + mockGetOrCreateWidged, + mockSetBoundsOfWidgetByPath, + mockToggleWidgetVisibility, + mockShowAuthWindow, + mockGetAuthWindow, + trayMenuInstance, + TrayMenuMock, +} = vi.hoisted(() => { + const mockApp = { + isPackaged: false, + quit: vi.fn(), + }; + + const mockGetIsLoggedIn = vi.fn(); + const mockGetOrCreateWidged = vi.fn(); + const mockSetBoundsOfWidgetByPath = vi.fn(); + const mockToggleWidgetVisibility = vi.fn(); + const mockShowAuthWindow = vi.fn(); + const mockGetAuthWindow = vi.fn(() => ({ show: mockShowAuthWindow })); + + const trayMenuInstance = { + setState: vi.fn(), + bounds: { x: 1, y: 2, width: 3, height: 4 }, + }; + + const TrayMenuMock = vi.fn(() => trayMenuInstance); + + return { + mockApp, + mockGetIsLoggedIn, + mockGetOrCreateWidged, + mockSetBoundsOfWidgetByPath, + mockToggleWidgetVisibility, + mockShowAuthWindow, + mockGetAuthWindow, + trayMenuInstance, + TrayMenuMock, + }; +}); + +vi.mock('./tray-menu', () => ({ + TrayMenu: TrayMenuMock, +})); + +vi.mock('../windows/widget', () => ({ + getOrCreateWidged: mockGetOrCreateWidged, + setBoundsOfWidgetByPath: mockSetBoundsOfWidgetByPath, + toggleWidgetVisibility: mockToggleWidgetVisibility, +})); + +vi.mock('../auth/handlers', () => ({ + getIsLoggedIn: mockGetIsLoggedIn, +})); + +vi.mock('../windows/auth', () => ({ + getAuthWindow: mockGetAuthWindow, +})); + +describe('tray-setup', () => { + beforeEach(() => { + vi.resetModules(); + mockApp.isPackaged = false; + mockGetAuthWindow.mockReturnValue({ show: mockShowAuthWindow }); + }); + + async function importTraySetup() { + return import('./tray-setup'); + } + + function getTrayClickHandler() { + const firstCall = TrayMenuMock.mock.calls[0] as unknown[] | undefined; + + if (!firstCall) { + throw new Error('TrayMenu was not created'); + } + + const onClick = firstCall[1]; + + if (typeof onClick !== 'function') { + throw new Error('TrayMenu onClick handler was not registered'); + } + + return onClick as () => Promise; + } + + it('should create the tray only once', async () => { + // Given + const traySetup = await importTraySetup(); + + // When + const firstTray = traySetup.setupTrayIcon(); + const secondTray = traySetup.setupTrayIcon(); + + // Then + calls(TrayMenuMock).toHaveLength(1); + expect(secondTray).toBe(firstTray); + expect(traySetup.getTray()).toBe(firstTray); + }); + + it('should update tray status through the singleton instance', async () => { + // Given + const traySetup = await importTraySetup(); + traySetup.setupTrayIcon(); + + // When + traySetup.setTrayStatus('SYNCING'); + + // Then + call(trayMenuInstance.setState).toBe('SYNCING'); + }); + + it('should show auth window when clicking the tray while logged out', async () => { + // Given + mockGetIsLoggedIn.mockReturnValue(false); + const traySetup = await importTraySetup(); + traySetup.setupTrayIcon(); + const onClick = getTrayClickHandler(); + + // When + await onClick(); + + // Then + calls(mockShowAuthWindow).toHaveLength(1); + calls(mockGetOrCreateWidged).toHaveLength(0); + calls(mockToggleWidgetVisibility).toHaveLength(0); + }); + + it('should align and toggle the widget when clicking the tray while logged in', async () => { + // Given + const widgetWindow = { id: 'widget' }; + mockGetIsLoggedIn.mockReturnValue(true); + mockGetOrCreateWidged.mockResolvedValue(widgetWindow); + const traySetup = await importTraySetup(); + const tray = traySetup.setupTrayIcon(); + const onClick = getTrayClickHandler(); + + // When + await onClick(); + + // Then + call(mockSetBoundsOfWidgetByPath).toStrictEqual([widgetWindow, tray]); + calls(mockToggleWidgetVisibility).toHaveLength(1); + }); +}); diff --git a/src/apps/main/tray/tray-setup.ts b/src/apps/main/tray/tray-setup.ts new file mode 100644 index 0000000000..c9df6e7077 --- /dev/null +++ b/src/apps/main/tray/tray-setup.ts @@ -0,0 +1,47 @@ +import { app } from 'electron'; +import path from 'node:path'; +import { TrayMenu } from './tray-menu'; +import { getOrCreateWidged, setBoundsOfWidgetByPath, toggleWidgetVisibility } from '../windows/widget'; +import { getIsLoggedIn } from '../auth/handlers'; +import { getAuthWindow } from '../windows/auth'; +import { TrayMenuState } from './types'; +import { PATHS } from '../../../core/electron/paths'; + +let tray: TrayMenu | null = null; + +export function getTray() { + return tray; +} + +export function setTrayStatus(status: TrayMenuState) { + tray?.setState(status); +} + +async function onTrayClick() { + const isLoggedIn = getIsLoggedIn(); + if (!isLoggedIn) { + getAuthWindow()?.show(); + return; + } + + const widgetWindow = await getOrCreateWidged(); + if (tray && widgetWindow) { + setBoundsOfWidgetByPath(widgetWindow, tray); + } + + if (widgetWindow) toggleWidgetVisibility(); +} + +async function onQuitClick() { + app.quit(); +} + +export function setupTrayIcon() { + if (tray) return tray; + + const iconsPath = path.join(PATHS.RESOURCES_PATH, 'tray'); + + tray = new TrayMenu(iconsPath, onTrayClick, onQuitClick); + + return tray; +} diff --git a/src/apps/main/tray/tray.ts b/src/apps/main/tray/tray.ts deleted file mode 100644 index 3492abebd1..0000000000 --- a/src/apps/main/tray/tray.ts +++ /dev/null @@ -1,145 +0,0 @@ -import { app, Menu, nativeImage, Tray } from 'electron'; -import path from 'path'; -import PackageJson from '../../../../package.json'; -import eventBus from '../event-bus'; -import { getOrCreateWidged, setBoundsOfWidgetByPath, toggleWidgetVisibility } from '../windows/widget'; -import { getIsLoggedIn } from '../auth/handlers'; -import { getAuthWindow } from '../windows/auth'; -import { logger } from '@internxt/drive-desktop-core/build/backend'; - -type TrayMenuState = 'IDLE' | 'SYNCING' | 'ALERT' | 'LOADING'; - -export class TrayMenu { - private tray: Tray; - - get bounds() { - return this.tray.getBounds(); - } - - constructor( - private readonly iconsPath: string, - private readonly onClick: () => Promise, - private readonly onQuit: () => void, - ) { - const trayIcon = this.getIconPath('LOADING'); - - this.tray = new Tray(trayIcon); - - this.setState('LOADING'); - - this.tray.setIgnoreDoubleClickEvents(true); - - this.tray.on('click', async () => { - await this.onClick(); - this.tray.setContextMenu(null); - }); - if (process.platform !== 'linux') { - this.tray.on('right-click', () => { - this.updateContextMenu(); - this.tray.popUpContextMenu(); - }); - } - } - - getIconPath(state: TrayMenuState) { - const isDarwin = process.platform === 'darwin'; - const templatePart = isDarwin ? 'Template' : ''; - - return path.join(this.iconsPath, `${state.toLowerCase()}${templatePart}.png`); - } - - generateContextMenu() { - const contextMenuTemplate: Electron.MenuItemConstructorOptions[] = []; - contextMenuTemplate.push( - { - label: 'Show/Hide', - click: () => { - this.onClick(); - }, - }, - { - label: 'Quit', - click: this.onQuit, - }, - ); - - return Menu.buildFromTemplate(contextMenuTemplate); - } - - updateContextMenu() { - const ctxMenu = this.generateContextMenu(); - this.tray.setContextMenu(ctxMenu); - } - - setState(state: TrayMenuState) { - const iconPath = this.getIconPath(state); - this.setImage(iconPath); - - this.setTooltip(state); - } - - setImage(imagePath: string) { - const image = nativeImage.createFromPath(imagePath); - this.tray.setImage(image); - } - - setTooltip(state: TrayMenuState) { - const messages: Record = { - SYNCING: 'Sync in process', - IDLE: `Internxt ${PackageJson.version}`, - ALERT: 'There are some issues with your sync', - LOADING: 'Loading Internxt...', - }; - - const message = messages[state]; - this.tray.setToolTip(message); - } - - destroy() { - if (this.tray) { - this.tray.destroy(); - } - } -} - -let tray: TrayMenu | null = null; -export const getTray = () => tray; - -export const setTrayStatus = (status: TrayMenuState) => { - tray?.setState(status); -}; - -export function setupTrayIcon() { - const RESOURCES_PATH = app.isPackaged - ? path.join(process.resourcesPath, 'assets') - : path.join(__dirname, '../../../../assets'); - - const iconsPath = path.join(RESOURCES_PATH, 'tray'); - - async function onTrayClick() { - const isLoggedIn = getIsLoggedIn(); - if (!isLoggedIn) { - getAuthWindow()?.show(); - return; - } - - const widgetWindow = await getOrCreateWidged(); - if (tray && widgetWindow) { - setBoundsOfWidgetByPath(widgetWindow, tray); - } - - if (widgetWindow) { - toggleWidgetVisibility(); - } else { - logger.warn({ msg: '[ON TRAY ICON CLICK] window is undefined' }); - } - } - - async function onQuitClick() { - app.quit(); - } - - tray = new TrayMenu(iconsPath, onTrayClick, onQuitClick); -} - -eventBus.on('APP_IS_READY', setupTrayIcon); diff --git a/src/apps/main/tray/types.ts b/src/apps/main/tray/types.ts new file mode 100644 index 0000000000..5055a55e20 --- /dev/null +++ b/src/apps/main/tray/types.ts @@ -0,0 +1 @@ +export type TrayMenuState = 'IDLE' | 'SYNCING' | 'ALERT' | 'LOADING'; diff --git a/src/apps/main/virtual-drive.ts b/src/apps/main/virtual-drive.ts deleted file mode 100644 index 830e8ce201..0000000000 --- a/src/apps/main/virtual-drive.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { ipcMain } from 'electron'; -import { getFuseDriveState, startVirtualDrive, updateFuseApp, stopHydrationApi } from '../drive'; -import eventBus from './event-bus'; - -eventBus.on('USER_LOGGED_OUT', stopHydrationApi); -eventBus.on('INITIAL_SYNC_READY', startVirtualDrive); -eventBus.on('REMOTE_CHANGES_SYNCHED', updateFuseApp); - -ipcMain.handle('get-virtual-drive-status', () => { - return getFuseDriveState(); -}); diff --git a/src/apps/main/virtual-root-folder/service.ts b/src/apps/main/virtual-root-folder/service.ts index e90086038a..4f31ed8242 100644 --- a/src/apps/main/virtual-root-folder/service.ts +++ b/src/apps/main/virtual-root-folder/service.ts @@ -9,16 +9,6 @@ import { PATHS } from '../../../core/electron/paths'; const VIRTUAL_DRIVE_FOLDER = PATHS.ROOT_DRIVE_FOLDER; -async function existsFolder(pathname: string): Promise { - try { - await fs.access(pathname); - - return true; - } catch { - return false; - } -} - export async function clearDirectory(pathname: string): Promise { try { await fs.rm(pathname, { recursive: true }); @@ -30,13 +20,7 @@ export async function clearDirectory(pathname: string): Promise { } } -async function isEmptyFolder(pathname: string): Promise { - const filesInFolder = await fs.readdir(pathname); - - return filesInFolder.length === 0; -} - -function setSyncRoot(pathname: string): void { +export function setupRootFolder(pathname: string): void { const pathNameWithSepInTheEnd = pathname[pathname.length - 1] === path.sep ? pathname : pathname + path.sep; configStore.set('syncRoot', pathNameWithSepInTheEnd); configStore.set('lastSavedListing', ''); @@ -47,23 +31,18 @@ export function getRootVirtualDrive(): string { ensureFolderExists(current); if (current !== VIRTUAL_DRIVE_FOLDER) { - setupRootFolder(); + setupRootFolder(VIRTUAL_DRIVE_FOLDER); } return configStore.get('syncRoot'); } -export async function setupRootFolder(n = 0): Promise { - setSyncRoot(VIRTUAL_DRIVE_FOLDER); - return; -} - export async function chooseSyncRootWithDialog(): Promise { const result = await dialog.showOpenDialog({ properties: ['openDirectory'] }); if (!result.canceled) { const chosenPath = result.filePaths[0]; - setSyncRoot(chosenPath); + setupRootFolder(chosenPath); eventBus.emit('SYNC_ROOT_CHANGED', chosenPath); return chosenPath; diff --git a/src/apps/main/windows/auth.ts b/src/apps/main/windows/auth.ts index e4b119adc4..72cedc918f 100644 --- a/src/apps/main/windows/auth.ts +++ b/src/apps/main/windows/auth.ts @@ -1,4 +1,4 @@ -import { BrowserWindow } from 'electron'; +import { app, BrowserWindow } from 'electron'; import { preloadPath, resolveHtmlPath } from '../util'; import { setUpCommonWindowHandlers } from '.'; @@ -27,8 +27,7 @@ export const createAuthWindow = async () => { nodeIntegration: true, devTools: isDev(), }, - titleBarStyle: process.platform === 'darwin' ? 'hidden' : undefined, - frame: process.platform !== 'darwin' ? false : undefined, + frame: false, resizable: false, maximizable: false, skipTaskbar: true, @@ -42,6 +41,7 @@ export const createAuthWindow = async () => { authWindow.on('closed', () => { authWindow = null; + if (!getIsLoggedIn()) app.quit(); }); authWindow.on('blur', () => { const isLoggedIn = getIsLoggedIn(); diff --git a/src/apps/main/windows/index.ts b/src/apps/main/windows/index.ts index 7b507e92d7..0e98a75000 100644 --- a/src/apps/main/windows/index.ts +++ b/src/apps/main/windows/index.ts @@ -16,7 +16,7 @@ function closeAuxWindows() { eventBus.on('USER_LOGGED_OUT', closeAuxWindows); -export function broadcastToWindows(eventName: string, data: any) { +export function broadcastToWindows(eventName: string, data: unknown) { const renderers = [getWidget(), getProcessIssuesWindow(), getSettingsWindow(), getOnboardingWindow()]; renderers.forEach((r) => r?.webContents.send(eventName, data)); diff --git a/src/apps/main/windows/onboarding.ts b/src/apps/main/windows/onboarding.ts index dd678b0b03..b3f1d9a57c 100644 --- a/src/apps/main/windows/onboarding.ts +++ b/src/apps/main/windows/onboarding.ts @@ -8,8 +8,6 @@ import isDev from '../../../core/isDev/isDev'; let onboardingWindow: BrowserWindow | null = null; export const getOnboardingWindow = () => (onboardingWindow?.isDestroyed() ? null : onboardingWindow); -ipcMain.on('open-onboarding-window', () => openOnboardingWindow()); - export const openOnboardingWindow = () => { if (onboardingWindow) { onboardingWindow.focus(); @@ -26,8 +24,7 @@ export const openOnboardingWindow = () => { nodeIntegration: true, devTools: isDev(), }, - titleBarStyle: process.platform === 'darwin' ? 'hidden' : undefined, - frame: process.platform !== 'darwin' ? false : undefined, + frame: false, resizable: false, maximizable: false, }); @@ -45,3 +42,5 @@ export const openOnboardingWindow = () => { setUpCommonWindowHandlers(onboardingWindow); }; + +ipcMain.on('open-onboarding-window', () => openOnboardingWindow()); diff --git a/src/apps/main/windows/process-issues.ts b/src/apps/main/windows/process-issues.ts index f6262ded9d..d3655d2ed8 100644 --- a/src/apps/main/windows/process-issues.ts +++ b/src/apps/main/windows/process-issues.ts @@ -7,13 +7,6 @@ import isDev from '../../../core/isDev/isDev'; let processIssuesWindow: BrowserWindow | null = null; export const getProcessIssuesWindow = () => (processIssuesWindow?.isDestroyed() ? null : processIssuesWindow); -ipcMain.on('open-process-issues-window', openProcessIssuesWindow); -ipcMain.handle('open-process-issues-window', async () => { - await openProcessIssuesWindow(); - - return true; -}); - async function openProcessIssuesWindow() { if (processIssuesWindow) { processIssuesWindow.focus(); @@ -30,8 +23,7 @@ async function openProcessIssuesWindow() { nodeIntegration: true, devTools: isDev(), }, - titleBarStyle: process.platform === 'darwin' ? 'hidden' : undefined, - frame: process.platform !== 'darwin' ? false : undefined, + frame: false, resizable: false, maximizable: false, }); @@ -48,3 +40,10 @@ async function openProcessIssuesWindow() { setUpCommonWindowHandlers(processIssuesWindow); } + +ipcMain.on('open-process-issues-window', openProcessIssuesWindow); +ipcMain.handle('open-process-issues-window', async () => { + await openProcessIssuesWindow(); + + return true; +}); diff --git a/src/apps/main/windows/settings.ts b/src/apps/main/windows/settings.ts index 36178bf562..5a7f4ce238 100644 --- a/src/apps/main/windows/settings.ts +++ b/src/apps/main/windows/settings.ts @@ -9,8 +9,6 @@ import isDev from '../../../core/isDev/isDev'; let settingsWindow: BrowserWindow | null = null; export const getSettingsWindow = () => (settingsWindow?.isDestroyed() ? null : settingsWindow); -ipcMain.on('open-settings-window', (_, section) => openSettingsWindow(section)); - async function openSettingsWindow(section?: string) { if (settingsWindow) { settingsWindow.focus(); @@ -35,8 +33,7 @@ async function openSettingsWindow(section?: string) { nodeIntegration: true, devTools: isDev(), }, - titleBarStyle: process.platform === 'darwin' ? 'hidden' : undefined, - frame: process.platform !== 'darwin' ? false : undefined, + frame: false, resizable: false, maximizable: false, }); @@ -55,6 +52,8 @@ async function openSettingsWindow(section?: string) { setUpCommonWindowHandlers(settingsWindow); } +ipcMain.on('open-settings-window', (_, section) => openSettingsWindow(section)); + ipcMain.on('settings-window-resized', (_, { height }: { width: number; height: number }) => { if (settingsWindow) { // Not truncating the height makes this function throw diff --git a/src/apps/main/windows/widget.ts b/src/apps/main/windows/widget.ts index 5a63e13d20..c922419e3e 100644 --- a/src/apps/main/windows/widget.ts +++ b/src/apps/main/windows/widget.ts @@ -1,11 +1,12 @@ import { BrowserWindow, ipcMain, screen } from 'electron'; import eventBus from '../event-bus'; -import { TrayMenu } from '../tray/tray'; +import { TrayMenu } from '../tray/tray-menu'; import { preloadPath, resolveHtmlPath } from '../util'; import { setUpCommonWindowHandlers } from '.'; import { getIsLoggedIn } from '../auth/handlers'; import isDev from '../../../core/isDev/isDev'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; const widgetConfig: { width: number; height: number; placeUnderTray: boolean } = { width: 330, @@ -66,7 +67,7 @@ export const createWidget = async () => { widget.webContents.on('ipc-message', (_, channel, payload) => { // Current widget pathname if (channel === 'path-changed') { - console.log('Renderer navigated to ', payload); + logger.error({ msg: '[RENDERER] Renderer navigated', payload }); } }); diff --git a/src/apps/renderer/assets/icons/getIcon.tsx b/src/apps/renderer/assets/icons/getIcon.tsx index ee54ac7a4f..b4d186d609 100644 --- a/src/apps/renderer/assets/icons/getIcon.tsx +++ b/src/apps/renderer/assets/icons/getIcon.tsx @@ -20,12 +20,10 @@ import Zip from './zip.svg'; interface iconLibrary { id: string; - icon: any; + icon: JSX.Element; extensions: string[]; } -// const getSVG = (svg: any) => svg as React.SVGAttributes; - const file_type: iconLibrary[] = [ { id: 'audio', diff --git a/src/apps/renderer/components/Backups/BackupsFoldersSelector.tsx b/src/apps/renderer/components/Backups/BackupsFoldersSelector.tsx index 7ec8d43155..f93c3ab404 100644 --- a/src/apps/renderer/components/Backups/BackupsFoldersSelector.tsx +++ b/src/apps/renderer/components/Backups/BackupsFoldersSelector.tsx @@ -41,8 +41,10 @@ export const BackupsFoldersSelector: React.FC = (pr const folder = await window.electron.getFolderPath(); if (!folder?.path) { - // eslint-disable-next-line no-console - return console.warn('No folder selected by the user'); + return window.electron.logger.warn({ + tag: 'BACKUPS', + msg: '[RENDERER] No folder selected by the user', + }); } const match = backupFolders.find((backupFolder) => backupFolder.path === folder.path); @@ -53,7 +55,11 @@ export const BackupsFoldersSelector: React.FC = (pr setBackupFolders(backupFolders.concat(folder)); } catch (error) { - reportError(error); + window.electron.logger.error({ + tag: 'BACKUPS', + msg: '[RENDERER] Failed to add backup folder', + error, + }); } finally { setIsLoading(false); } @@ -116,7 +122,7 @@ export const BackupsFoldersSelector: React.FC = (pr

{translate('settings.backups.title')}

- {translate('settings.backups.selected-folders', { + {translate('settings.backups.selected-folder', { count: backupFolders.length, })}

@@ -154,7 +160,7 @@ export const BackupsFoldersSelector: React.FC = (pr
diff --git a/src/apps/renderer/components/WindowTopBar.tsx b/src/apps/renderer/components/WindowTopBar.tsx index 72b6e6c9d8..c18d78c075 100644 --- a/src/apps/renderer/components/WindowTopBar.tsx +++ b/src/apps/renderer/components/WindowTopBar.tsx @@ -2,19 +2,14 @@ import { X } from '@phosphor-icons/react'; export default function WindowTopBar({ title, className }: { title: string; className?: string }) { return ( -
- {process.env.platform !== 'darwin' && ( -
- -
- )} +
+
+ +

diff --git a/src/apps/renderer/context/CleanerContext.tsx b/src/apps/renderer/context/CleanerContext.tsx index 4bb7475d86..bd01ea78fb 100644 --- a/src/apps/renderer/context/CleanerContext.tsx +++ b/src/apps/renderer/context/CleanerContext.tsx @@ -70,7 +70,10 @@ export function CleanerProvider({ children }: { children: ReactNode }) { try { window.electron.cleaner.startCleanup(viewModel); } catch (error) { - console.error('Failed to start cleanup:', error); + window.electron.logger.error({ + msg: '[RENDERER] Failed to start cleanup', + error, + }); } }; @@ -78,7 +81,10 @@ export function CleanerProvider({ children }: { children: ReactNode }) { try { window.electron.cleaner.stopCleanup(); } catch (error) { - console.error('Failed to stop cleanup:', error); + window.electron.logger.error({ + msg: '[RENDERER] Failed to stop cleanup', + error, + }); } }; diff --git a/src/apps/renderer/context/DeviceContext.tsx b/src/apps/renderer/context/DeviceContext.tsx index a354355a4f..dc7a1e3aa9 100644 --- a/src/apps/renderer/context/DeviceContext.tsx +++ b/src/apps/renderer/context/DeviceContext.tsx @@ -1,5 +1,5 @@ import { createContext, Dispatch, ReactNode, SetStateAction, useEffect, useState } from 'react'; -import { Device } from '../../main/device/service'; +import { Device } from '../../../backend/features/backup/types/Device'; import { useDevices } from '../hooks/devices/useDevices'; export type DeviceState = { status: 'LOADING' | 'ERROR' } | { status: 'SUCCESS'; device: Device }; @@ -25,14 +25,15 @@ export function DeviceProvider({ children }: { children: ReactNode }) { const [selected, setSelected] = useState(); const { devices, getDevices } = useDevices(); - useEffect(() => { - refreshDevice(); - - const removeDeviceCreatedListener = window.electron.onDeviceCreated(setCurrentDevice); - return () => { - removeDeviceCreatedListener(); - }; - }, []); + const setCurrentDevice = (newDevice: Device) => { + try { + setDeviceState({ status: 'SUCCESS', device: newDevice }); + setCurrent(newDevice); + setSelected(newDevice); + } catch { + setDeviceState({ status: 'ERROR' }); + } + }; const refreshDevice = () => { setDeviceState({ status: 'LOADING' }); @@ -45,15 +46,14 @@ export function DeviceProvider({ children }: { children: ReactNode }) { }); }; - const setCurrentDevice = (newDevice: Device) => { - try { - setDeviceState({ status: 'SUCCESS', device: newDevice }); - setCurrent(newDevice); - setSelected(newDevice); - } catch { - setDeviceState({ status: 'ERROR' }); - } - }; + useEffect(() => { + refreshDevice(); + + const removeDeviceCreatedListener = window.electron.onDeviceCreated(setCurrentDevice); + return () => { + removeDeviceCreatedListener(); + }; + }, []); const deviceRename = async (deviceName: string) => { setDeviceState({ status: 'LOADING' }); @@ -64,7 +64,10 @@ export function DeviceProvider({ children }: { children: ReactNode }) { setCurrent(updatedDevice); setSelected(updatedDevice); } catch (err) { - console.log(err); + window.electron.logger.error({ + msg: '[RENDERER] Failed to rename device', + error: err, + }); setDeviceState({ status: 'ERROR' }); } }; diff --git a/src/apps/renderer/hooks/ClientPlatform.tsx b/src/apps/renderer/hooks/ClientPlatform.tsx deleted file mode 100644 index 1f7e7336c2..0000000000 --- a/src/apps/renderer/hooks/ClientPlatform.tsx +++ /dev/null @@ -1,13 +0,0 @@ -import { useEffect, useState } from 'react'; - -import { DesktopPlatform } from '../../main/platform/DesktopPlatform'; - -export default function useClientPlatform(): DesktopPlatform | undefined { - const [clientPlatform, setPlatform] = useState(); - - useEffect(() => { - window.electron.getPlatform().then(setPlatform); - }, []); - - return clientPlatform; -} diff --git a/src/apps/renderer/hooks/antivirus/useAntivirus.tsx b/src/apps/renderer/hooks/antivirus/useAntivirus.tsx index a60ff093ee..352ec1d30e 100644 --- a/src/apps/renderer/hooks/antivirus/useAntivirus.tsx +++ b/src/apps/renderer/hooks/antivirus/useAntivirus.tsx @@ -38,6 +38,41 @@ export const useAntivirus = (): AntivirusContext => { const [showErrorState, setShowErrorState] = useState(false); const [view, setView] = useState('loading'); + const handleProgress = (progress: { + scanId?: string; + currentScanPath?: string; + infectedFiles?: string[]; + progress?: number; + totalScannedFiles?: number; + done?: boolean; + }) => { + if (!progress) return; + + if (progress.currentScanPath) { + setCurrentScanPath(progress.currentScanPath); + } + + if (typeof progress.totalScannedFiles === 'number') { + setCountScannedFiles(progress.totalScannedFiles); + } + + if (typeof progress.progress === 'number') { + setProgressRatio(progress.progress); + } + + if (Array.isArray(progress.infectedFiles) && progress.infectedFiles.length > 0) { + setInfectedFiles(progress.infectedFiles); + } + + if (progress.done) { + setProgressRatio(100); + setTimeout(() => { + setIsScanning(false); + setIsScanCompleted(true); + }, 500); + } + }; + useEffect(() => { window.electron.antivirus.onScanProgress(handleProgress); return () => { @@ -103,41 +138,6 @@ export const useAntivirus = (): AntivirusContext => { } }; - const handleProgress = (progress: { - scanId?: string; - currentScanPath?: string; - infectedFiles?: string[]; - progress?: number; - totalScannedFiles?: number; - done?: boolean; - }) => { - if (!progress) return; - - if (progress.currentScanPath) { - setCurrentScanPath(progress.currentScanPath); - } - - if (typeof progress.totalScannedFiles === 'number') { - setCountScannedFiles(progress.totalScannedFiles); - } - - if (typeof progress.progress === 'number') { - setProgressRatio(progress.progress); - } - - if (Array.isArray(progress.infectedFiles) && progress.infectedFiles.length > 0) { - setInfectedFiles(progress.infectedFiles); - } - - if (progress.done) { - setProgressRatio(100); - setTimeout(() => { - setIsScanning(false); - setIsScanCompleted(true); - }, 500); - } - }; - const resetStates = () => { setCurrentScanPath(''); setCountScannedFiles(0); @@ -192,9 +192,9 @@ export const useAntivirus = (): AntivirusContext => { const isDirectory = scanType === 'folders' || !seemsLikeFile; return { - path: path, + path, itemName: cleanPath.split('/').pop() || cleanPath, - isDirectory: isDirectory, + isDirectory, }; } return item; diff --git a/src/apps/renderer/hooks/backups/useBackupFatalIssue.tsx b/src/apps/renderer/hooks/backups/useBackupFatalIssue.tsx index c0b27f288b..f0630ce6b7 100644 --- a/src/apps/renderer/hooks/backups/useBackupFatalIssue.tsx +++ b/src/apps/renderer/hooks/backups/useBackupFatalIssue.tsx @@ -4,6 +4,38 @@ import { BackupInfo } from '../../../backups/BackupInfo'; import { useTranslationContext } from '../../context/LocalContext'; import { shortMessages } from '../../messages/virtual-drive-error'; +type Action = { + name: string; + fn: undefined | ((backup: BackupInfo) => Promise); +}; + +type BackupErrorActionMap = Record; + +export const backupsErrorActions: BackupErrorActionMap = { + BASE_DIRECTORY_DOES_NOT_EXIST: { + name: 'issues.actions.find-folder', + fn: findBackupFolder, + }, + NOT_EXISTS: undefined, + NO_INTERNET: undefined, + NO_REMOTE_CONNECTION: undefined, + BAD_RESPONSE: undefined, + EMPTY_FILE: undefined, + FILE_TOO_BIG: undefined, + FILE_NON_EXTENSION: undefined, + UNKNOWN: undefined, + DUPLICATED_NODE: undefined, + ACTION_NOT_PERMITTED: undefined, + FILE_ALREADY_EXISTS: undefined, + COULD_NOT_ENCRYPT_NAME: undefined, + BAD_REQUEST: undefined, + INSUFFICIENT_PERMISSION: undefined, + NOT_ENOUGH_SPACE: undefined, + ABORTED: undefined, + RATE_LIMITED: undefined, + INTERNAL_SERVER_ERROR: undefined, +}; + type FixAction = { name: string; fn: () => Promise; @@ -48,35 +80,9 @@ export function useBackupFatalIssue(backup: BackupInfo) { } async function findBackupFolder(backup: BackupInfo) { - const result = await window.electron.changeBackupPath(backup.pathname); - if (result) window.electron.startBackupsProcess(); -} + const chosen = await window.electron.getFolderPath(); + if (!chosen) return; -type Action = { - name: string; - fn: undefined | ((backup: BackupInfo) => Promise); -}; - -type BackupErrorActionMap = Record; - -export const backupsErrorActions: BackupErrorActionMap = { - BASE_DIRECTORY_DOES_NOT_EXIST: { - name: 'issues.actions.find-folder', - fn: findBackupFolder, - }, - NOT_EXISTS: undefined, - NO_INTERNET: undefined, - NO_REMOTE_CONNECTION: undefined, - BAD_RESPONSE: undefined, - EMPTY_FILE: undefined, - FILE_TOO_BIG: undefined, - FILE_NON_EXTENSION: undefined, - UNKNOWN: undefined, - DUPLICATED_NODE: undefined, - ACTION_NOT_PERMITTED: undefined, - FILE_ALREADY_EXISTS: undefined, - COULD_NOT_ENCRYPT_NAME: undefined, - BAD_REQUEST: undefined, - INSUFFICIENT_PERMISSION: undefined, - NOT_ENOUGH_SPACE: undefined, -}; + const { data } = await window.electron.changeBackupPath({ currentPath: backup.pathname, newPath: chosen.path }); + if (data) window.electron.startBackupsProcess(); +} diff --git a/src/apps/renderer/hooks/backups/useBackups.tsx b/src/apps/renderer/hooks/backups/useBackups.tsx index 400cdf3cd8..9a169ec57c 100644 --- a/src/apps/renderer/hooks/backups/useBackups.tsx +++ b/src/apps/renderer/hooks/backups/useBackups.tsx @@ -1,7 +1,8 @@ import { useContext, useEffect, useState } from 'react'; import { BackupInfo } from '../../../backups/BackupInfo'; import { DeviceContext } from '../../context/DeviceContext'; -import { Device } from '../../../main/device/service'; +import { Device } from '../../../../backend/features/backup/types/Device'; +import { AbsolutePath } from '../../../../context/local/localFile/infrastructure/AbsolutePath'; export type BackupsState = 'LOADING' | 'ERROR' | 'SUCCESS'; @@ -11,7 +12,7 @@ export interface BackupContextProps { disableBackup: (backup: BackupInfo) => Promise; addBackup: () => Promise; deleteBackups: (device: Device, isCurrent?: boolean) => Promise; - downloadBackups: (device: Device) => Promise; + downloadBackups: (device: Device, pathName: AbsolutePath) => Promise; abortDownloadBackups: (device: Device) => void; hasExistingBackups: boolean; } @@ -56,8 +57,8 @@ export function useBackups(): BackupContextProps { }, [selected, devices]); async function addBackup() { - const newBackup = await window.electron.addBackup(); - if (!newBackup) return; + const { data: newBackup, error } = await window.electron.addBackup(); + if (error) return; setBackups((prevBackups) => { const existingIndex = prevBackups.findIndex((backup) => backup.folderId === newBackup.folderId); @@ -91,15 +92,13 @@ export function useBackups(): BackupContextProps { } } - async function downloadBackups(device: Device) { - try { - await window.electron.downloadBackup(device); - } catch (error) { - reportError(error); - } + async function downloadBackups(device: Device, pathName: AbsolutePath) { + if (!selected) return; + await window.electron.downloadBackup(device, pathName); } function abortDownloadBackups(device: Device) { + if (!selected) return; return window.electron.abortDownloadBackups(device.uuid); } diff --git a/src/apps/renderer/hooks/devices/useDevices.tsx b/src/apps/renderer/hooks/devices/useDevices.tsx index 958a0741f9..5b3de5d94b 100644 --- a/src/apps/renderer/hooks/devices/useDevices.tsx +++ b/src/apps/renderer/hooks/devices/useDevices.tsx @@ -1,5 +1,5 @@ import { useEffect, useState } from 'react'; -import { Device } from '../../../main/device/service'; +import { Device } from '../../../../backend/features/backup/types/Device'; export function useDevices() { const [devices, setDevices] = useState>([]); diff --git a/src/apps/renderer/hooks/useUserAvailableProducts/useUserAvailableProducts.test.ts b/src/apps/renderer/hooks/useUserAvailableProducts/useUserAvailableProducts.test.ts index 7b5970aa8a..1274ea6e54 100644 --- a/src/apps/renderer/hooks/useUserAvailableProducts/useUserAvailableProducts.test.ts +++ b/src/apps/renderer/hooks/useUserAvailableProducts/useUserAvailableProducts.test.ts @@ -10,6 +10,7 @@ describe('useUserAvailableProducts', () => { antivirus: false, cleaner: true, }; + const loggerErrorMock = vi.mocked(window.electron.logger.error); beforeEach(() => { vi.clearAllMocks(); @@ -82,17 +83,16 @@ describe('useUserAvailableProducts', () => { const error = new Error('Failed to fetch products'); vi.mocked(window.electron.userAvailableProducts.get).mockRejectedValue(error); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const { result } = renderHook(() => useUserAvailableProducts()); // Wait for the promise to reject and be handled await vi.waitFor(() => { - expect(consoleErrorSpy).toHaveBeenCalledWith('Failed to fetch user available products:', error); + expect(loggerErrorMock).toHaveBeenCalledWith({ + msg: '[RENDERER] Failed to fetch user available products', + error, + }); }); expect(result.current.products).toBeUndefined(); - - consoleErrorSpy.mockRestore(); }); }); diff --git a/src/apps/renderer/hooks/useUserAvailableProducts/useUserAvailableProducts.ts b/src/apps/renderer/hooks/useUserAvailableProducts/useUserAvailableProducts.ts index 57bd789962..58d659a167 100644 --- a/src/apps/renderer/hooks/useUserAvailableProducts/useUserAvailableProducts.ts +++ b/src/apps/renderer/hooks/useUserAvailableProducts/useUserAvailableProducts.ts @@ -10,7 +10,10 @@ export function useUserAvailableProducts() { .get() .then(setProducts) .catch((error) => { - console.error('Failed to fetch user available products:', error); + window.electron.logger.error({ + msg: '[RENDERER] Failed to fetch user available products', + error, + }); }); userAvailableProducts.subscribe(); diff --git a/src/apps/renderer/hooks/useVirtualDriveStatus.tsx b/src/apps/renderer/hooks/useVirtualDriveStatus.tsx index caa4e37efb..47f266bc8e 100644 --- a/src/apps/renderer/hooks/useVirtualDriveStatus.tsx +++ b/src/apps/renderer/hooks/useVirtualDriveStatus.tsx @@ -9,13 +9,19 @@ export default function useVirtualDriveStatus() { .getVirtualDriveStatus() .then((status: FuseDriveStatus) => setVirtualDriveStatus(status)) .catch((err) => { - reportError(err); + window.electron.logger.error({ + msg: '[RENDERER] Failed to fetch virtual drive status', + error: err, + }); }); }, []); useEffect(() => { const removeListener = window.electron.onVirtualDriveStatusChange((status) => { - console.debug('status changed'); + window.electron.logger.debug({ + msg: '[RENDERER] Virtual drive status changed', + status, + }); setVirtualDriveStatus(status.status); }); diff --git a/src/apps/renderer/localize/locales/en.json b/src/apps/renderer/localize/locales/en.json index f98187a7a9..ec57078c4e 100644 --- a/src/apps/renderer/localize/locales/en.json +++ b/src/apps/renderer/localize/locales/en.json @@ -1,35 +1,11 @@ { "login": { - "email": { - "section": "Email address" - }, - "password": { - "section": "Password", - "placeholder": "Password", - "forgotten": "Forgot your password?", - "hide": "Hide", - "show": "Show" - }, "action": { - "login": "Log in", - "is-logging-in": "Logging you in...", "login-in-browser": "Log in with browser" }, "create-account": "Create account", "welcome": "Welcome to Internxt", - "no-account": "Don't have an account?", - "2fa": { - "section": "Authentication code", - "description": "You have configured two factor authentication, please enter the 6 digit code", - "change-account": "Change account", - "wrong-code": "Incorrect code, try again" - }, - "error": { - "empty-fields": "Incorrect password or email" - }, - "warning": { - "no-internet": "No internet connection" - } + "no-account": "Don't have an account?" }, "onboarding": { "slides": { @@ -68,48 +44,7 @@ "skip": "Skip", "open-drive": "Open Internxt Drive", "new": "New", - "platform-phrase": { - "windows": "file explorer", - "linux": "file browser", - "macos": "Finder" - } - } - }, - "migration": { - "slides": { - "welcome": { - "title": "Internxt’s new desktop app is ready to go!", - "features": { - "title": "Fresh updates:", - "feature-1": "Select what you want to download and save hard drive space.", - "feature-2": "Native OS look and feel for managing your files and folders." - } - }, - "migration": { - "title": "Let's make sure all your files are safe", - "in-progress": "Uploading pending files", - "item-progress": "{{processed_items}} of {{total_items}} items uploaded" - }, - "migration-failed": { - "title": "Let's make sure all your files are safe", - "message": "Some files could not be uploaded", - "description": "We’ve moved these files to your desktop, drag and drop them to your Internxt Drive", - "show-files": "Show files" - }, - "delete-old-drive-folder": { - "title": "Same Internxt Drive, new location", - "message": "Your personal Internxt Drive folder is located in your {{platform_app}} sidebar" - }, - "new-widget": { - "title": "Be more productive with our redesigned widget", - "message": "We've reimagined and rebuilt our widget to reduce clutter, add convenience, and boost speed.", - "message-2": "All changes now update in real time." - } - }, - "common": { - "continue": "Continue", - "cancel": "Cancel", - "open-drive": "Open Internxt Drive" + "platform-phrase": "file explorer" } }, "widget": { @@ -121,11 +56,9 @@ "dropdown": { "preferences": "Preferences", "issues": "Issues", - "send-feedback": "Send feedback", "support": "Support", "logout": "Log out", "quit": "Quit", - "antivirus": "Antivirus", "cleaner": "Cleaner", "new": "New", "sync": "Sync" @@ -146,22 +79,12 @@ "renamed": "Renamed" } }, - "no-activity": { - "title": "There is no recent activity", - "description": "Information will show up here when changes are made to sync your local folder with Internxt Drive" - }, "upToDate": { "title": "Your files are up to date", "subtitle": "Sync activity will show up here" }, "errors": { - "sync": {}, - "backups": { - "folder-not-found": { - "text": "Can't upload backup, missing folder", - "action": "View error" - } - } + "sync": {} } }, "footer": { @@ -171,7 +94,6 @@ "failed": "Sync failed" }, "errors": { - "lock": "Sync locked by other device", "offline": "Not connected to the internet" } }, @@ -182,9 +104,7 @@ }, "virtual-drive-error": { "title": "Can't mount your drive", - "message": "We are having issues mounting your Internxt Drive, try unmounting it manually and starting the app again", - "mounting": "Mounting...", - "button": "Mount" + "message": "We are having issues mounting your Internxt Drive, try unmounting it manually and starting the app again" }, "banners": { "update-available": { @@ -234,13 +154,8 @@ "dark": "Dark" } }, - "sync": { - "folder": "Internxt Drive Folder", - "change-folder": "Change folder" - }, "app-info": { "open-logs": "Open logs", - "open-migration": "Start migration", "more": "Learn more about Internxt" } }, @@ -250,17 +165,11 @@ "display": "Used {{used}} of {{total}}", "upgrade": "Upgrade", "change": "Change", - "plan": "Current plan", "free": "Free", "loadError": { "title": "Couldn't fetch your usage details", "action": "Retry" }, - "current": { - "used": "Used", - "of": "of", - "in-use": "in use" - }, "full": { "title": "Your storage is full", "subtitle": "You can't upload, sync, or backup files. Upgrade now your plan or remove files to save up space." @@ -278,20 +187,17 @@ "selected-folder_one": "{{count}} folder", "selected-folder_other": "{{count}} folders", "add-folders": "Click + to select the folders\n you want to back up", - "activate": "Back up your folders and files", "view-backups": "Browse files", "selected-folders-title": "Selected folders", "select-folders": "Change folders", "last-backup-had-issues": "Last backup had some issues", "see-issues": "See issues", - "backing-up": "Backing up...", "backups-help": "Backups Help", "this-device": "This device", "devices": "Devices", "action": { "start": "Backup now", "stop": "Stop backup", - "running": "Backup in progress {{progress}}", "last-run": "Last updated" }, "frequency": { @@ -334,12 +240,6 @@ "title": "Something went wrong while scanning the directory", "button": "Try again" }, - "deactivateAntivirus": { - "title": "Windows Defender is active", - "description": "Please disable Windows Defender to be able to use Internxt Antivirus. To do this, open Windows Security > Virus and Threat Protection > Manage settings > disable Real-time protection.", - "retry": "Retry", - "cancel": "Cancel" - }, "realtimeProtection": { "title": "Real-time protection", "infoAriaLabel": "About real-time protection", @@ -380,8 +280,7 @@ }, "securityWarning": { "title": "Security warning", - "description": "Malware is still present, and your device is at risk.", - "confirmToCancel": "Are you sure you want to cancel?" + "description": "Malware is still present, and your device is at risk." } } }, @@ -389,7 +288,6 @@ "scanning": "Scanning...", "scannedFiles": "Scanned files", "detectedFiles": "Detected files", - "errorWhileScanning": "An error occurred while scanning the items. Please try again.", "noFilesFound": { "title": "No threats were found", "subtitle": "No further actions are necessary" @@ -404,11 +302,7 @@ "filesContainingMalwareModal": { "title": "Files containing malware", "selectedItems": "Selected {{selectedFiles}} out of {{totalFiles}}", - "selectAll": "Select all", - "actions": { - "cancel": "Cancel", - "remove": "Remove" - } + "selectAll": "Select all" } }, "cleaner": { @@ -465,9 +359,7 @@ }, "no-issues": "No issues found", "actions": { - "select-folder": "Select folder", - "find-folder": "Locate folder", - "try-again": "Try again" + "find-folder": "Locate folder" }, "short-error-messages": { "unknown": "Unknown error", @@ -492,41 +384,9 @@ "insufficient-permission-accessing-base-directory": "Internxt App does not have permission to access your sync folder", "cannot-access-base-directory": "We could not access your local folder", "cannot-access-tmp-directory": "We could not access your local folder", - "unknown": "An unknown error ocurred while trying to sync your files", - "empty-file": "We don't support files with a size of 0 bytes because of our processes of sharding and encryption", - "bad-response": "We got a bad response from our servers while processing this file. Please, try starting the sync process again.", - "file-does-not-exist": "This file was present when we compared your local folder with your Internxt drive but disappeared when we tried to access it. If you deleted this file, don't worry, this error should dissapear the next time the sync process starts.", - "file-too-big": "Max upload size is 20GB. Please try smaller files.", - "file-non-extension": "Files without extensions are not supported. Not synchronized.", - "duplicated-node": "There are two elements (file or folder) with the same name on a folder. Rename one of them to sync them both", - "action-not-permitted": "The operation could not be completed, possibly due to a conflict with another file.", - "file-already-exists": "Unable to complete the operation. The file already exists on Internxt servers", - "not-enough-space": "You have not enough space to complete the operation" - }, - "report-modal": { - "actions": { - "close": "Close", - "cancel": "Cancel", - "report": "Report", - "send": "Send" - }, - "help-url": "To get help visit", - "report": "You can also send a report about this error.", - "user-comments": "Comments", - "include-logs": "Include the logs of this sync process for debug purposes" + "unknown": "An unknown error ocurred while trying to sync your files" } }, - "feedback": { - "window-title": "Internxt Desktop feedback", - "title": "Share feedback with Internxt", - "description": "Your feedback makes Internxt improve and helps us to create better product experiences", - "placeholder": "Let us know what's in your mind, what you'd like to improve or describe the bug or issue", - "characters-count": "{{character_count}}/{{character_limit}}", - "send-feedback": "Send feedback", - "sent-title": "Thank you for sharing your feedback", - "sent-message": "We really appreciate your time and effort to help us improve our services.", - "close": "Close" - }, "common": { "cancel": "Cancel" }, diff --git a/src/apps/renderer/localize/locales/es.json b/src/apps/renderer/localize/locales/es.json index 78daef474e..f6cb17abfe 100644 --- a/src/apps/renderer/localize/locales/es.json +++ b/src/apps/renderer/localize/locales/es.json @@ -1,35 +1,11 @@ { "login": { - "email": { - "section": "Correo electrónico" - }, - "password": { - "section": "Contraseña", - "placeholder": "Contraseña", - "forgotten": "¿Has olvidado tu contraseña?", - "hide": "Ocultar", - "show": "Mostrar" - }, "action": { - "login": "Iniciar sesión", - "is-logging-in": "Iniciando sesión...", "login-in-browser": "Iniciar sesión con el navegador" }, "create-account": "Crear cuenta", "welcome": "Bienvenido a Internxt", - "no-account": "¿No tienes cuenta?", - "2fa": { - "section": "Código de autenticación", - "description": "Has configurado la autenticación en dos pasos, por favor introduce el código de 6 dígitos", - "change-account": "Cambiar cuenta", - "wrong-code": "Código incorrecto, inténtalo de nuevo" - }, - "error": { - "empty-fields": "Contraseña o correo electrónico incorrectos" - }, - "warning": { - "no-internet": "Sin conexión a internet" - } + "no-account": "¿No tienes cuenta?" }, "onboarding": { "slides": { @@ -68,48 +44,7 @@ "open-drive": "Abrir Internxt Drive", "skip": "Saltar", "new": "Nuevo", - "platform-phrase": { - "windows": "explorador de archivos", - "linux": "buscador de archivos", - "macos": "Finder" - } - } - }, - "migration": { - "slides": { - "welcome": { - "title": "Nueva actualización de la aplicación de escritorio de Internxt!", - "features": { - "title": "Novedades en esta versión:", - "feature-1": "Selecciona lo que desees descargar y ahorra espacio en el disco duro.", - "feature-2": "Apariencia y sensación nativa del sistema operativo para gestionar tus archivos y carpetas." - } - }, - "migration": { - "title": "Nos aseguramos de que todos tus archivos están a salvo", - "in-progress": "Subiendo archivos pendientes", - "item-progress": "{{processed_items}} de {{total_items}} archivos subidos" - }, - "migration-failed": { - "title": "Nos aseguramos de que todos tus archivos están a salvo", - "message": "No se han podido cargar algunos archivos", - "description": "Hemos movido esos archivos a tu escritorio, arrástralos y suéltalos en tu Internxt Drive", - "show-files": "Mostrar archivos" - }, - "delete-old-drive-folder": { - "title": "Tu Internxt Drive de siempre, en una nueva ubicación", - "message": "Tu carpeta personal de Internxt Drive se encuentra en la barra lateral {{platform_app}}." - }, - "new-widget": { - "title": "Sé más productivo con nuestro widget rediseñado", - "message": "Hemos rediseñado y reconstruido nuestro widget para aumentar la productividad, la comodidad y la velocidad.", - "message-2": "Todos los cambios se actualizan en tiempo real." - } - }, - "common": { - "continue": "Continuar", - "cancel": "Cancelar", - "open-drive": "Abrir Internxt Drive" + "platform-phrase": "explorador de archivos" } }, "widget": { @@ -121,11 +56,9 @@ "dropdown": { "preferences": "Preferencias", "issues": "Lista de errores", - "send-feedback": "Enviar feedback", "support": "Ayuda", "logout": "Cerrar sesión", "quit": "Salir", - "antivirus": "Antivirus", "cleaner": "Cleaner", "new": "Nuevo", "sync": "Sincronizar" @@ -146,22 +79,12 @@ "renamed": "Renombrado" } }, - "no-activity": { - "title": "No hay actividad reciente", - "description": "La información aparecerá aquí cuando hagas cambios, para sincronizar tu carpeta local con Internxt Drive" - }, "upToDate": { "title": "Tus archivos están actualizados", "subtitle": "La actividad de sincronización se mostrará aquí" }, "errors": { - "sync": {}, - "backups": { - "folder-not-found": { - "text": "No se pudo realizar la copia, no se encuentra la carpeta", - "action": "Ver error" - } - } + "sync": {} } }, "footer": { @@ -171,7 +94,6 @@ "failed": "Sincronización fallida" }, "errors": { - "lock": "Sincronización bloqueada por otro dispositivo", "offline": "No hay conexión a internet" } }, @@ -182,9 +104,7 @@ }, "virtual-drive-error": { "title": "No se puede montar tu Drive", - "message": "Estamos teniendo problemas al montar tu unidad Internxt. Intenta desmontarla manualmente y luego reiniciar la aplicación.", - "mounting": "Montando..", - "button": "Montar" + "message": "Estamos teniendo problemas al montar tu unidad Internxt. Intenta desmontarla manualmente y luego reiniciar la aplicación." }, "banners": { "update-available": { @@ -234,13 +154,8 @@ "dark": "Oscuro" } }, - "sync": { - "folder": "Carpeta Internxt Drive", - "change-folder": "Cambiar carpeta" - }, "app-info": { "open-logs": "Abrir registros", - "open-migration": "Empezar migración", "more": "Más información sobre Internxt" } }, @@ -250,17 +165,11 @@ "display": "Usado {{used}} de {{total}}", "upgrade": "Comprar espacio", "change": "Cambiar", - "plan": "Plan actual", "free": "Gratis", "loadError": { "title": "No se han podido obtener tus datos de uso", "action": "Reintentar" }, - "current": { - "used": "usado", - "of": "de", - "in-use": "usado" - }, "full": { "title": "Tu almacenamiento está lleno", "subtitle": "No puedes subir, sincronizar ni hacer copias de seguridad de archivos. Amplía ahora tu plan o elimina archivos para ahorrar espacio." @@ -278,20 +187,17 @@ "add-folders": "Haz clic en + para hacer una copia de seguridad de tus carpetas", "selected-folder_one": "{{count}} carpeta", "selected-folder_other": "{{count}} carpetas", - "activate": "Hacer copia de seguridad de tus carpetas", "view-backups": "Explorar archivos", "selected-folders-title": "Carpetas seleccionadas", "select-folders": "Cambiar carpetas", "last-backup-had-issues": "La última copia de seguridad tuvo algunos problemas", "see-issues": "Ver problemas", - "backing-up": "Haciendo la copia", "backups-help": "Ayuda sobre copias de seguridad", "this-device": "Este dispositivo", "devices": "Dispositivos", "action": { "start": "Hacer copia", "stop": "Stop backup", - "running": "Subiendo backup {{progress}}", "last-run": "Última ejecución" }, "frequency": { @@ -334,12 +240,6 @@ "title": "Algo salió mal al escanear el directorio", "button": "Intentar de nuevo" }, - "deactivateAntivirus": { - "title": "Windows Defender está activo", - "description": "Por favor, desactiva Windows Defender para poder usar Internxt Antivirus. Para hacerlo, abre Seguridad de Windows > Protección contra virus y amenazas > Administrar configuración > desactiva la Protección en tiempo real.", - "retry": "Reintentar", - "cancel": "Cancelar" - }, "realtimeProtection": { "title": "Protección en tiempo real", "infoAriaLabel": "Acerca de la protección en tiempo real", @@ -380,8 +280,7 @@ }, "securityWarning": { "title": "Advertencia de seguridad", - "description": "El malware sigue presente y tu dispositivo está en riesgo.", - "confirmToCancel": "¿Estás seguro de querer cancelar?" + "description": "El malware sigue presente y tu dispositivo está en riesgo." } } }, @@ -389,7 +288,6 @@ "scanning": "Escaneando...", "scannedFiles": "Archivos escaneados", "detectedFiles": "Archivos detectados", - "errorWhileScanning": "Ocurrió un error al escanear los elementos. Por favor, intenta nuevamente.", "noFilesFound": { "title": "No se encontraron amenazas", "subtitle": "No es necesario realizar más acciones" @@ -404,11 +302,7 @@ "filesContainingMalwareModal": { "title": "Archivos que contienen malware", "selectedItems": "Seleccionados {{selectedFiles}} de {{totalFiles}}", - "selectAll": "Seleccionar todo", - "actions": { - "cancel": "Cancelar", - "remove": "Eliminar" - } + "selectAll": "Seleccionar todo" } }, "cleaner": { @@ -432,12 +326,6 @@ "saveUpTo": "Ahorra hasta", "ofYourSpace": "de tu espacio" }, - "cleanupConfirmDialog": { - "title": "Confirmar borrado", - "description": "Esta acción eliminará permanentemente los archivos seleccionados de tu dispositivo. Esta acción no se puede deshacer. Confirme para continuar.", - "cancelButton": "Cancelar", - "confirmButton": "Eliminar archivos" - }, "cleanupConfirmDialogView": { "title": "Confirmar limpieza", "description": "Esta acción eliminará de forma permanente los archivos seleccionados de tu dispositivo. Esta acción no se puede deshacer. Confirma para continuar.", @@ -471,9 +359,7 @@ }, "no-issues": "No se han encontrado errores", "actions": { - "select-folder": "Seleccionar carpeta", - "find-folder": "Buscar la carpeta", - "try-again": "Volver a intentar" + "find-folder": "Buscar la carpeta" }, "short-error-messages": { "unknown": "Error desconocido", @@ -498,41 +384,9 @@ "insufficient-permission-accessing-base-directory": "Internxt App no tiene permiso para acceder a su carpeta de sincronización", "cannot-access-base-directory": "No hemos podido acceder a su carpeta local", "cannot-access-tmp-directory": "No hemos podido acceder a su carpeta local", - "unknown": "Error desconocido al intentar sincronizar sus archivos", - "empty-file": "No admitimos archivos con un tamaño de 0 bytes debido a nuestros procesos de cifrado", - "bad-response": "Error de servidor al procesar este archivo. Por favor, intente iniciar de nuevo el proceso de sincronización", - "file-does-not-exist": "Este archivo estaba presente cuando comparamos su carpeta local con su unidad Internxt, pero desapareció cuando intentamos acceder a él. Si has eliminado este archivo, no te preocupes, este error debería desaparecer la próxima vez que se inicie el proceso de sincronización", - "file-too-big": "El tamaño máximo de carga es de 20GB. Por favor, intenta con archivos más pequeños.", - "file-non-extension": "Los archivos sin extensiones no son soportados. No sincronizado", - "duplicated-node": "Hay dos elementos (archivo o carpeta) con el mismo nombre en una carpeta. Cambia el nombre de uno de ellos para sincronizar ambos.", - "action-not-permitted": "La operación no pudo completarse, posiblemente debido a un conflicto con otro archivo.", - "file-already-exists": "No se puede completar la operación. El archivo ya existe en los servidores de Internxt.", - "not-enough-space": "No tienes suficiente espacio para completar la operación." - }, - "report-modal": { - "actions": { - "close": "Cerrar", - "cancel": "Cancelar", - "report": "Informar", - "send": "Enviar" - }, - "help-url": "Para obtener ayuda, visita", - "report": "También puedes enviar un informe sobre este error", - "user-comments": "Comentarios", - "include-logs": "Incluir los registros de este proceso de sincronización con fines de solucionar el error" + "unknown": "Error desconocido al intentar sincronizar sus archivos" } }, - "feedback": { - "window-title": "Comentarios sobre Internxt para Escritorio", - "title": "Comparte tus opiniones con Internxt", - "description": "Tus comentarios hacen que mejoremos y creemos mejores experiencias de producto", - "placeholder": "Haznos saber lo que tienes en mente, lo que te gustaría mejorar o describe el error o problema", - "characters-count": "{{character_count}}/{{character_limit}}", - "send-feedback": "Enviar comentarios", - "sent-title": "Gracias por compartir tus comentarios", - "sent-message": "Apreciamos tu tiempo y esfuerzo para ayudarnos a mejorar nuestros servicios.", - "close": "Cerrar" - }, "common": { "cancel": "Cancelar" }, diff --git a/src/apps/renderer/localize/locales/fr.json b/src/apps/renderer/localize/locales/fr.json index cdc4faafa0..5c26d57114 100644 --- a/src/apps/renderer/localize/locales/fr.json +++ b/src/apps/renderer/localize/locales/fr.json @@ -1,35 +1,11 @@ { "login": { - "email": { - "section": "Adresse électronique" - }, - "password": { - "section": "Mot de passe", - "placeholder": "Mot de passe", - "forgotten": "Vous avez oublié votre mot de passe?", - "hide": "Cacher", - "show": "Afficher" - }, "action": { - "login": "S'identifier", - "is-logging-in": "Se connecter...", "login-in-browser": "Se connecter avec le navigateur" }, "create-account": "Créer un compte", "welcome": "Bienvenue chez Internxt", - "no-account": "Vous n'avez pas de compte ?", - "2fa": { - "section": "Code d'authentification", - "description": "Vous avez configuré l'authentification en deux étapes (2FA), veuillez saisir le code à 6 chiffres", - "change-account": "Changer de compte", - "wrong-code": "Code incorrect, veuillez réessayer" - }, - "error": { - "empty-fields": "Mot de passe ou courriel incorrect" - }, - "warning": { - "no-internet": "Pas de connexion internet" - } + "no-account": "Vous n'avez pas de compte ?" }, "onboarding": { "slides": { @@ -68,48 +44,7 @@ "continue": "Continuer", "skip": "Sauter", "new": "Nouveau", - "platform-phrase": { - "windows": "navigateur de fichiers", - "linux": "navigateur de fichiers", - "macos": "Finder" - } - } - }, - "migration": { - "slides": { - "welcome": { - "title": "Nouvelle mise à jour de l'application de bureau d’Internxt !", - "features": { - "title": "Nouvelles mises à jour:", - "feature-1": "Sélectionnez ce que vous voulez télécharger et économisez de l'espace sur votre disque dur.", - "feature-2": "Un système d'exploitation natif disponible pour gérer vos fichiers et dossiers." - } - }, - "migration": { - "title": "Nous nous assurons que tous vos fichiers sont en sécurité", - "in-progress": "Téléchargement de fichiers en attente", - "item-progress": "{{processed_items}} sur {{total_items}} éléments téléchargés" - }, - "migration-failed": { - "title": "Nous nous assurons que tous vos fichiers sont en sécurité", - "message": "Certains fichiers n'ont pas pu être téléchargés", - "description": "Nous avons déplacé ces fichiers sur votre bureau, faites-les glisser et déposez-les sur votre disque interne.", - "show-files": "Afficher les fichiers" - }, - "delete-old-drive-folder": { - "title": "Même Internxt Drive, nouvel emplacement", - "message": "Votre dossier personnel Internxt Drive est situé dans le barre latérale {{platform_app}}." - }, - "new-widget": { - "title": "Soyez plus productif grâce à notre widget redessiné", - "message": "Nous avons repensé et reconstruit notre widget afin d'accroître la productivité, la commodité et la rapidité.", - "message-2": "Tous les changements sont désormais mis à jour en temps réel." - } - }, - "common": { - "continue": "Continuer", - "cancel": "Annuler", - "open-drive": "Ouvrir Internxt Drive" + "platform-phrase": "navigateur de fichiers" } }, "widget": { @@ -121,11 +56,9 @@ "dropdown": { "preferences": "Préférences", "issues": "Liste d'erreurs", - "send-feedback": "Envoyer des commentaires", "support": "Aide", "logout": "Déconnecter", "quit": "Fermer", - "antivirus": "Antivirus", "cleaner": "Cleaner", "new": "Nouveau", "sync": "Synchroniser" @@ -146,22 +79,12 @@ "renamed": "Renommé" } }, - "no-activity": { - "title": "Aucune activité récente", - "description": "Les informations apparaîtront ici lorsque vous effectuerez des modifications, pour synchroniser votre dossier local avec Internxt Drive" - }, "upToDate": { "title": "Vos fichiers sont à jour", "subtitle": "L'activité de synchronisation s'affichera ici" }, "errors": { - "sync": {}, - "backups": { - "folder-not-found": { - "text": "Impossible de copier, dossier non trouvé", - "action": "Afficher l'erreur" - } - } + "sync": {} } }, "footer": { @@ -171,7 +94,6 @@ "failed": "Échec de la synchronisation" }, "errors": { - "lock": "Synchronisation bloquée par un autre dispositif", "offline": "Pas de connexion à internet" } }, @@ -182,9 +104,7 @@ }, "virtual-drive-error": { "title": "Impossible de créer le lecteur", - "message": "Nous rencontrons des problèmes pour monter votre disque Internxt. Essayez de le démonter manuellement et de relancer l'application. ", - "mounting": "Montage...", - "button": "Monter" + "message": "Nous rencontrons des problèmes pour monter votre disque Internxt. Essayez de le démonter manuellement et de relancer l'application. " }, "banners": { "update-available": { @@ -234,13 +154,8 @@ "dark": "Sombre" } }, - "sync": { - "folder": "Dossier Internxt Drive", - "change-folder": "Changer de dossier" - }, "app-info": { "open-logs": "Ouvrir les registres", - "open-migration": "Démarrer la migration", "more": "Plus d'informations sur Internxt" } }, @@ -250,17 +165,11 @@ "display": "Utilisé {{used}} sur {{total}}", "upgrade": "Acheter", "change": "Changement", - "plan": "Plan actuel", "free": "Gratuit", "loadError": { "title": "Impossible d'obtenir les détails de votre utilisation", "action": "Réessayer" }, - "current": { - "used": "utilisés", - "of": "de", - "in-use": "utilisé" - }, "full": { "title": "Votre espace de stockage est plein", "subtitle": "Vous ne pouvez pas télécharger, synchroniser ou sauvegarder des fichiers. Mettez votre forfait à niveau ou supprimez des fichiers pour économiser de l'espace." @@ -278,20 +187,17 @@ "add-folders": "Cliquez sur + pour sélectionner les dossiers que vous souhaitez sauvegarder", "selected-folder_one": "{{count}} dossier", "selected-folder_other": "{{count}} dossiers", - "activate": "Sauvegarder vos dossiers", "view-backups": "Parcourir les fichiers", "selected-folders-title": "Dossiers sélectionnés", "select-folders": "Changer les dossiers", "last-backup-had-issues": "La dernière sauvegarde a rencontré quelques problèmes", "see-issues": "Voir des problèmes", - "backing-up": "Sauvegarde...", "backups-help": "Aide sur les sauvegardes", "this-device": "Cet appareil", "devices": "Appareils", "action": { "start": "Faire une copie ", "stop": "Arrêter la sauvegarde", - "running": "Sauvegarde en cours {{progress}}", "last-run": "Dernière exécution" }, "frequency": { @@ -334,12 +240,6 @@ "title": "Une erreur s'est produite lors de l'analyse du répertoire", "button": "Réessayer" }, - "deactivateAntivirus": { - "title": "Windows Defender est actif", - "description": "Veuillez désactiver Windows Defender afin de pouvoir utiliser Internxt Antivirus. Pour ce faire, ouvrez Sécurité Windows > Protection contre les virus et menaces > Gérer les paramètres > désactivez la protection en temps réel.", - "retry": "Réessayer", - "cancel": "Annuler" - }, "realtimeProtection": { "title": "Protection en temps réel", "infoAriaLabel": "À propos de la protection en temps réel", @@ -380,8 +280,7 @@ }, "securityWarning": { "title": "Attention de sécurité", - "description": "Le malware est toujours présent et votre appareil est en danger.", - "confirmToCancel": "Êtes-vous sûr de vouloir annuler ?" + "description": "Le malware est toujours présent et votre appareil est en danger." } } }, @@ -389,7 +288,6 @@ "scanning": "Analyse en cours...", "scannedFiles": "Fichiers analysés", "detectedFiles": "Fichiers détectés", - "errorWhileScanning": "Une erreur s'est produite lors de l'analyse des éléments. Veuillez réessayer.", "noFilesFound": { "title": "Aucune menace détectée", "subtitle": "Aucune action supplémentaire requise" @@ -404,11 +302,7 @@ "filesContainingMalwareModal": { "title": "Fichiers contenant des malwares", "selectedItems": "Sélectionné {{selectedFiles}} sur {{totalFiles}}", - "selectAll": "Tout sélectionner", - "actions": { - "cancel": "Annuler", - "remove": "Supprimer" - } + "selectAll": "Tout sélectionner" } }, "cleaner": { @@ -432,12 +326,6 @@ "saveUpTo": "Économisez jusqu'à", "ofYourSpace": "de votre espace" }, - "cleanupConfirmDialog": { - "title": "Confirmer le nettoyage", - "description": "Cette action supprimera définitivement les fichiers sélectionnés de votre appareil. Cette action ne peut pas être annulée. Veuillez confirmer pour continuer.", - "cancelButton": "Annuler", - "confirmButton": "Supprimer les fichiers " - }, "cleanupConfirmDialogView": { "title": "Confirmer le nettoyage", "description": "Cette action supprimera définitivement les fichiers sélectionnés de votre appareil. Cette action ne peut pas être annulée. Veuillez confirmer pour continuer.", @@ -471,9 +359,7 @@ }, "no-issues": "Aucune erreur trouvée", "actions": { - "select-folder": "Sélectionner un dossier", - "find-folder": "Trouver un dossier", - "try-again": "Essayer à nouveau" + "find-folder": "Trouver un dossier" }, "short-error-messages": { "unknown": "Erreur inconnue", @@ -498,41 +384,9 @@ "insufficient-permission-accessing-base-directory": "Internxt App n'a pas la permission d'accéder à votre dossier de synchronisation", "cannot-access-base-directory": "Nous n'avons pas pu accéder à votre dossier local", "cannot-access-tmp-directory": "Nous n'avons pas pu accéder à votre dossier local", - "unknown": "Une erreur inconnue s'est produite lors de la synchronisation de vos fichiers", - "empty-file": "Nous ne prenons pas en charge les fichiers d'une taille de 0 octet en raison de nos processus de chiffrement", - "bad-response": "Nous avons reçu une mauvaise réponse de nos serveurs lors du traitement de ce fichier. Veuillez essayer de relancer le processus de synchronisation.", - "file-does-not-exist": "Ce fichier était présent lorsque nous avons comparé votre dossier local avec votre disque interne, mais il a disparu lorsque nous avons essayé d'y accéder. Si vous avez supprimé ce fichier, ne vous inquiétez pas, cette erreur devrait disparaître au prochain démarrage du processus de synchronisation.", - "file-too-big": "La taille maximale de téléchargement est de 20 GB. Veuillez essayer des fichiers plus petits.", - "file-non-extension": "Les archives sans extensions ne sont pas supportées. Non synchronisées", - "duplicated-node": "Il y a deux éléments (fichier ou dossier) avec le même nom dans un dossier. Renommez l'un d'eux pour les synchroniser tous les deux.", - "action-not-permitted": "L'opération n'a pas pu être complétée, probablement en raison d'un conflit avec un autre fichier.", - "file-already-exists": "Impossible de terminer l'opération. Le fichier existe déjà sur les serveurs Internxt.", - "not-enough-space": "Vous n'avez pas assez d'espace pour compléter l'opération." - }, - "report-modal": { - "actions": { - "close": "Fermer", - "cancel": "Annuler", - "report": "Rapport", - "send": "Envoyer" - }, - "help-url": "Pour obtenir de l'aide, visitez", - "report": "Vous pouvez également envoyer un rapport sur cette erreur", - "user-comments": "Commentaires", - "include-logs": "Inclure les logs de ce processus de synchronisation à des fins de diagnostic" + "unknown": "Une erreur inconnue s'est produite lors de la synchronisation de vos fichiers" } }, - "feedback": { - "window-title": "Commentaires sur Internxt for Desktop", - "title": "Faites-nous part de vos commentaires sur Internxt", - "description": "Vos commentaires nous aident à améliorer et à créer de meilleures expériences de produits.", - "placeholder": "Laissez-nous savoir ce qui vous préoccupe, ce que vous aimeriez améliorer ou décrivez l'erreur ou le problème.", - "characters-count": "{{character_count}}/{{character_limit}}", - "send-feedback": "Envoyer les commentaire", - "sent-title": "Merci de nous avoir fait part de vos commentaires", - "sent-message": "Nous apprécions le temps et les efforts que vous consacrez à l'amélioration de nos services.", - "close": "Fermer" - }, "common": { "cancel": "Annuler" }, diff --git a/src/apps/renderer/pages/Login/index.tsx b/src/apps/renderer/pages/Login/index.tsx index e9ff0c0535..a665250630 100644 --- a/src/apps/renderer/pages/Login/index.tsx +++ b/src/apps/renderer/pages/Login/index.tsx @@ -12,7 +12,10 @@ export default function Login() { setIsLoading(true); await window.electron.openUrl(URL); } catch (error) { - console.error('Error opening URL:', error); + window.electron.logger.error({ + msg: '[RENDERER] Failed to open URL from login screen', + error, + }); } finally { setIsLoading(false); } @@ -28,7 +31,7 @@ export default function Login() { return (

- +
diff --git a/src/apps/renderer/pages/Onboarding/helpers.tsx b/src/apps/renderer/pages/Onboarding/helpers.tsx index 6bf237459d..f20919608b 100644 --- a/src/apps/renderer/pages/Onboarding/helpers.tsx +++ b/src/apps/renderer/pages/Onboarding/helpers.tsx @@ -9,7 +9,6 @@ export type OnboardingSlideProps = { backupFolders: BackupFolder[]; currentSlide: number; totalSlides: number; - platform: string; }; export type OnboardingSlide = { diff --git a/src/apps/renderer/pages/Onboarding/index.tsx b/src/apps/renderer/pages/Onboarding/index.tsx index b7975673b3..db0c4c9505 100644 --- a/src/apps/renderer/pages/Onboarding/index.tsx +++ b/src/apps/renderer/pages/Onboarding/index.tsx @@ -1,7 +1,6 @@ import { useMemo, useState } from 'react'; import { SLIDES } from './config'; import { BackupFolder, BackupsFoldersSelector } from '../../components/Backups/BackupsFoldersSelector'; -import useClientPlatform from '../../hooks/ClientPlatform'; // Slide 1 is welcome slide, last slide is summary, doesn't count const totalSlides = SLIDES.length - 2; @@ -10,7 +9,6 @@ export default function Onboarding() { const [backupFolders, setBackupFolders] = useState([]); const [slideIndex, setSlideIndex] = useState(0); const [backupsModalOpen, setBackupsModalOpen] = useState(false); - const desktopPlatform = useClientPlatform(); const finish = () => { if (backupFolders?.length) { @@ -19,9 +17,13 @@ export default function Onboarding() { * if this fails, the user can fix this * from the Desktop settings */ - window.electron.addBackupsFromLocalPaths(backupFolders.map((backupFolder) => backupFolder.path)).catch((err) => { - reportError(err); - }); + window.electron + .addBackupsFromLocalPaths(backupFolders.map((backupFolder) => backupFolder.path)) + .then(({ error }) => { + if (error) { + window.electron.logger.error({ msg: 'Failed to add backup folders during onboarding', error }); + } + }); } window.electron.finishOnboarding(); @@ -64,12 +66,10 @@ export default function Onboarding() { }, 300); }; - if (!desktopPlatform) return <>; return (
= () => {

{translate('onboarding.slides.drive.description', { - platform_app: translate('onboarding.common.platform-phrase.windows'), + platform_app: translate('onboarding.common.platform-phrase'), })}

diff --git a/src/apps/renderer/pages/Onboarding/slides/onboarding-completed-slide.tsx b/src/apps/renderer/pages/Onboarding/slides/onboarding-completed-slide.tsx index e916aea62a..2a8558886f 100644 --- a/src/apps/renderer/pages/Onboarding/slides/onboarding-completed-slide.tsx +++ b/src/apps/renderer/pages/Onboarding/slides/onboarding-completed-slide.tsx @@ -23,7 +23,7 @@ export const OnboardingCompletedSlide: React.FC = () => {

{translate('onboarding.slides.onboarding-completed.desktop-ready.description', { - platform_phrase: translate('onboarding.common.platform-phrase.windows'), + platform_phrase: translate('onboarding.common.platform-phrase'), })}

diff --git a/src/apps/renderer/pages/Settings/Account/Usage.tsx b/src/apps/renderer/pages/Settings/Account/Usage.tsx index c563fcbbf3..0a48d9ab84 100644 --- a/src/apps/renderer/pages/Settings/Account/Usage.tsx +++ b/src/apps/renderer/pages/Settings/Account/Usage.tsx @@ -13,9 +13,9 @@ export default function Usage({ isInfinite, offerUpgrade, usageInBytes, limitInB if (isInfinite) { return { amount: '∞', unit: '' }; } else { - const amount = bytes.format(limitInBytes).match(/\d+/g)?.[0] ?? ''; - const unit = bytes.format(limitInBytes).match(/[a-zA-Z]+/g)?.[0] ?? ''; - return { amount: amount, unit: unit }; + const amount = bytes.format(limitInBytes)?.match(/\d+/g)?.[0] ?? ''; + const unit = bytes.format(limitInBytes)?.match(/[a-zA-Z]+/g)?.[0] ?? ''; + return { amount, unit }; } }; @@ -23,7 +23,10 @@ export default function Usage({ isInfinite, offerUpgrade, usageInBytes, limitInB try { await window.electron.openUrl('https://drive.internxt.com/preferences?tab=plans'); } catch (error) { - reportError(error); + window.electron.logger.error({ + msg: '[RENDERER] Failed to open upgrade URL from usage section', + error, + }); } }; @@ -52,8 +55,8 @@ export default function Usage({ isInfinite, offerUpgrade, usageInBytes, limitInB

{translate('settings.account.usage.display', { - used: bytes.format(usageInBytes), - total: bytes.format(limitInBytes), + used: bytes.format(usageInBytes) || '0 B', + total: bytes.format(limitInBytes) || '0 B', })}

diff --git a/src/apps/renderer/pages/Settings/Antivirus/components/CustomScanItemsSelectorDropdown.test.tsx b/src/apps/renderer/pages/Settings/Antivirus/components/CustomScanItemsSelectorDropdown.test.tsx index 067132b28c..7644d6879c 100644 --- a/src/apps/renderer/pages/Settings/Antivirus/components/CustomScanItemsSelectorDropdown.test.tsx +++ b/src/apps/renderer/pages/Settings/Antivirus/components/CustomScanItemsSelectorDropdown.test.tsx @@ -3,7 +3,7 @@ import { CustomScanItemsSelectorDropdown } from './CustomScanItemsSelectorDropdo // Mock the DropdownItem component vi.mock('./DropdownItem', () => ({ - DropdownItem: ({ children, onClick }: any) => ( + DropdownItem: ({ children, onClick }: { children: React.ReactNode; onClick: () => void }) => ( diff --git a/src/apps/renderer/pages/Settings/Antivirus/views/LockedState.tsx b/src/apps/renderer/pages/Settings/Antivirus/views/LockedState.tsx index 4f19bfe081..00d27e3ea2 100644 --- a/src/apps/renderer/pages/Settings/Antivirus/views/LockedState.tsx +++ b/src/apps/renderer/pages/Settings/Antivirus/views/LockedState.tsx @@ -9,7 +9,10 @@ export const LockedState = () => { try { await window.electron.openUrl('https://internxt.com/pricing'); } catch (error) { - reportError(error); + window.electron.logger.error({ + msg: '[RENDERER] Failed to open antivirus pricing page', + error, + }); } }; diff --git a/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicePill.test.tsx b/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicePill.test.tsx index 0ee708653e..e47902afe4 100644 --- a/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicePill.test.tsx +++ b/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicePill.test.tsx @@ -1,4 +1,4 @@ -import { Device } from '../../../../../main/device/service'; +import { Device } from '../../../../../../backend/features/backup/types/Device'; import { screen, render, fireEvent } from '@testing-library/react'; import DevicePill from './DevicePill'; @@ -19,7 +19,7 @@ const mockDevice: Device = { describe('DevicePill', () => { afterAll(() => { - // @ts-ignore + // @ts-expect-error - window.electron is defined by preload and not deletable by type delete window.electron; }); diff --git a/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicePill.tsx b/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicePill.tsx index 9e50f60ecd..a9a3950c1d 100644 --- a/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicePill.tsx +++ b/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicePill.tsx @@ -1,4 +1,4 @@ -import { Device } from '../../../../../main/device/service'; +import { Device } from '../../../../../../backend/features/backup/types/Device'; import { type FC } from 'react'; import { useTranslationContext } from '../../../../context/LocalContext'; diff --git a/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicesList.test.tsx b/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicesList.test.tsx index e715577e66..a3706d94cb 100644 --- a/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicesList.test.tsx +++ b/src/apps/renderer/pages/Settings/Backups/DevicesList/DevicesList.test.tsx @@ -1,4 +1,4 @@ -import { Device } from '../../../../../main/device/service'; +import { Device } from '../../../../../../backend/features/backup/types/Device'; import { fireEvent, render, screen } from '@testing-library/react'; import { DeviceContext, DeviceState } from '../../../../context/DeviceContext'; import { DevicesList } from './DevicesList'; @@ -73,7 +73,7 @@ describe('DevicesList', () => { }); afterAll(() => { - // @ts-ignore + // @ts-expect-error - window.electron is defined by preload and not deletable by type delete window.electron; }); diff --git a/src/apps/renderer/pages/Settings/Backups/DevicesList/Help.tsx b/src/apps/renderer/pages/Settings/Backups/DevicesList/Help.tsx index d8888be670..462f385a62 100644 --- a/src/apps/renderer/pages/Settings/Backups/DevicesList/Help.tsx +++ b/src/apps/renderer/pages/Settings/Backups/DevicesList/Help.tsx @@ -10,7 +10,10 @@ const Help: FC = () => { 'https://help.internxt.com/en/articles/6583477-how-do-backups-work-on-internxt-drive', ); } catch (error) { - reportError(error); + window.electron.logger.error({ + msg: '[RENDERER] Failed to open backups help URL', + error, + }); } }; diff --git a/src/apps/renderer/pages/Settings/Backups/DownloadBackups.tsx b/src/apps/renderer/pages/Settings/Backups/DownloadBackups.tsx index 76a04fb2f2..fec9360841 100644 --- a/src/apps/renderer/pages/Settings/Backups/DownloadBackups.tsx +++ b/src/apps/renderer/pages/Settings/Backups/DownloadBackups.tsx @@ -11,18 +11,23 @@ export function DownloadBackups({ className }: ViewBackupsProps) { useContext(BackupContext); const handleDownloadBackup = async () => { + if (!selected) return; + if (!thereIsDownloadProgress) { - await downloadBackups(selected!); - } else { - try { - abortDownloadBackups(selected!); - } catch (err) { - // error while aborting (aborting also throws an exception itself) - } finally { - setTimeout(() => { - clearBackupDownloadProgress(selected!.uuid); - }, 600); - } + const chosenFolder = await window.electron.getFolderPath(); + if (!chosenFolder) return; + await downloadBackups(selected, chosenFolder.path); + return; + } + + try { + abortDownloadBackups(selected); + } catch (err) { + // error while aborting (aborting also throws an exception itself) + } finally { + setTimeout(() => { + clearBackupDownloadProgress(selected.uuid); + }, 600); } }; diff --git a/src/apps/renderer/pages/Settings/Backups/ViewBackups.tsx b/src/apps/renderer/pages/Settings/Backups/ViewBackups.tsx index 8ac5c49dd9..c17a375d91 100644 --- a/src/apps/renderer/pages/Settings/Backups/ViewBackups.tsx +++ b/src/apps/renderer/pages/Settings/Backups/ViewBackups.tsx @@ -10,7 +10,10 @@ export function ViewBackups({ className }: ViewBackupsProps) { try { await window.electron.openUrl('https://drive.internxt.com/app/backups'); } catch (error) { - reportError(error); + window.electron.logger.error({ + msg: '[RENDERER] Failed to open backups page URL', + error, + }); } }; diff --git a/src/apps/renderer/pages/Settings/General/AppInfo.tsx b/src/apps/renderer/pages/Settings/General/AppInfo.tsx index 551677fa80..06740d0e56 100644 --- a/src/apps/renderer/pages/Settings/General/AppInfo.tsx +++ b/src/apps/renderer/pages/Settings/General/AppInfo.tsx @@ -8,7 +8,10 @@ export default function AppInfo() { try { await window.electron.openUrl(URL); } catch (error) { - reportError(error); + window.electron.logger.error({ + msg: '[RENDERER] Failed to open URL from app info', + error, + }); } }; diff --git a/src/apps/renderer/pages/Widget/AccountSection.test.tsx b/src/apps/renderer/pages/Widget/AccountSection.test.tsx index c7c4aaefa1..9c918ec35e 100644 --- a/src/apps/renderer/pages/Widget/AccountSection.test.tsx +++ b/src/apps/renderer/pages/Widget/AccountSection.test.tsx @@ -3,6 +3,7 @@ import { type Mock } from 'vitest'; import { useTranslationContext } from '../../context/LocalContext'; import { useUsage } from '../../context/UsageContext/useUsage'; import { AccountSection } from './AccountSection'; +import { type User } from '../../../main/types'; vi.mock('../../context/LocalContext'); vi.mock('../../context/UsageContext/useUsage'); @@ -13,7 +14,7 @@ describe('AccountSection', () => { beforeEach(() => { vi.clearAllMocks(); (useTranslationContext as Mock).mockReturnValue({ translate: (key: string) => key }); - getUserMock.mockResolvedValue(null as any); + getUserMock.mockResolvedValue(null); }); it('renders the account section container', () => { @@ -26,7 +27,11 @@ describe('AccountSection', () => { it('shows user initials when user is loaded', async () => { (useUsage as Mock).mockReturnValue({ status: 'ready', usage: null }); - getUserMock.mockResolvedValue({ name: 'John', lastname: 'Doe', email: 'john@example.com' } as any); + getUserMock.mockResolvedValue({ + name: 'John', + lastname: 'Doe', + email: 'john@example.com', + } as Partial as User); render(); @@ -35,7 +40,11 @@ describe('AccountSection', () => { it('shows user email when user is loaded', async () => { (useUsage as Mock).mockReturnValue({ status: 'ready', usage: null }); - getUserMock.mockResolvedValue({ name: 'John', lastname: 'Doe', email: 'john@example.com' } as any); + getUserMock.mockResolvedValue({ + name: 'John', + lastname: 'Doe', + email: 'john@example.com', + } as Partial as User); render(); diff --git a/src/apps/renderer/pages/Widget/Header.tsx b/src/apps/renderer/pages/Widget/Header.tsx index ff010b430a..e6bb877f12 100644 --- a/src/apps/renderer/pages/Widget/Header.tsx +++ b/src/apps/renderer/pages/Widget/Header.tsx @@ -20,7 +20,10 @@ export default function Header() { try { await window.electron.openUrl(URL); } catch (error) { - reportError(error); + window.electron.logger.error({ + msg: '[RENDERER] Failed to open URL from widget header', + error, + }); } }; diff --git a/src/apps/renderer/pages/Widget/ItemsSection.tsx b/src/apps/renderer/pages/Widget/ItemsSection.tsx index 2a84b21dab..b2025a0ce6 100644 --- a/src/apps/renderer/pages/Widget/ItemsSection.tsx +++ b/src/apps/renderer/pages/Widget/ItemsSection.tsx @@ -20,7 +20,12 @@ export function ItemsSection({ numberOfIssues, numberOfIssuesDisplay, onQuitClic const handleManualSync = () => { if (isSyncing) return; - window.electron.startRemoteSync().catch(reportError); + window.electron.startRemoteSync().catch((error) => { + window.electron.logger.error({ + msg: '[RENDERER] Failed to start manual sync from widget menu', + error, + }); + }); }; return ( diff --git a/src/apps/renderer/pages/Widget/SyncAction.tsx b/src/apps/renderer/pages/Widget/SyncAction.tsx index ab10bd42c8..1051c9b636 100644 --- a/src/apps/renderer/pages/Widget/SyncAction.tsx +++ b/src/apps/renderer/pages/Widget/SyncAction.tsx @@ -20,7 +20,10 @@ export default function SyncAction(props: { syncStatus: SyncStatus }) { try { await window.electron.openUrl('https://drive.internxt.com/preferences?tab=plans'); } catch (error) { - reportError(error); + window.electron.logger.error({ + msg: '[RENDERER] Failed to open upgrade URL from widget sync action', + error, + }); } }; diff --git a/src/apps/renderer/pages/Widget/index.tsx b/src/apps/renderer/pages/Widget/index.tsx index e49df69d51..2c130af03b 100644 --- a/src/apps/renderer/pages/Widget/index.tsx +++ b/src/apps/renderer/pages/Widget/index.tsx @@ -11,7 +11,10 @@ import { InfoBanners } from './InfoBanners/InfoBanners'; const handleRetrySync = () => { window.electron.startRemoteSync().catch((err) => { - reportError(err); + window.electron.logger.error({ + msg: '[RENDERER] Failed to retry sync from widget', + error: err, + }); }); }; diff --git a/src/apps/shared/IPC/TypedIPC.ts b/src/apps/shared/IPC/TypedIPC.ts index 7b2f1829af..733b1e5d5e 100644 --- a/src/apps/shared/IPC/TypedIPC.ts +++ b/src/apps/shared/IPC/TypedIPC.ts @@ -1,6 +1,6 @@ import { IpcMainEvent } from 'electron'; -type EventHandler = (...args: any) => any; +type EventHandler = (...args: unknown[]) => unknown; type CustomIPCEvents = Record; diff --git a/src/apps/main/backups/add-backup.test.ts b/src/backend/features/backup/add-backup.test.ts similarity index 76% rename from src/apps/main/backups/add-backup.test.ts rename to src/backend/features/backup/add-backup.test.ts index a9300526a0..be01661f5d 100644 --- a/src/apps/main/backups/add-backup.test.ts +++ b/src/backend/features/backup/add-backup.test.ts @@ -1,9 +1,10 @@ -import * as getPathFromDialogModule from '../../../backend/features/backup/get-path-from-dialog'; +import * as getPathFromDialogModule from '../../../core/utils/get-path-from-dialog'; import * as createBackupModule from './create-backup'; import * as DeviceModuleModule from './../../../backend/features/device/device.module'; import * as enableExistingBackupModule from './enable-existing-backup'; import * as fetchDeviceModule from '../../../backend/features/device/fetchDevice'; -import configStoreModule from '../config'; +import configStoreModule from '../../../apps/main/config'; +import { createAbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; import { addBackup } from './add-backup'; import { loggerMock } from 'tests/vitest/mocks.helper'; import { call, partialSpyOn } from 'tests/vitest/utils.helper'; @@ -33,9 +34,11 @@ describe('addBackup', () => { const mockError = new Error('Device not found'); mockedGetOrCreateDevice.mockResolvedValue({ error: mockError, data: undefined }); - await expect(addBackup()).rejects.toThrow('Error message'); + const result = await addBackup(); + + expect(result).toMatchObject({ error: expect.any(Error) }); call(loggerMock.error).toMatchObject({ - msg: 'Error adding backup: No device found', + msg: 'Error fetching or creating device', }); }); @@ -45,11 +48,11 @@ describe('addBackup', () => { const result = await addBackup(); - expect(result).toBeUndefined(); + expect(result).toMatchObject({ error: expect.any(Error) }); }); it('should create new backup when backup does not exist', async () => { - const chosenPath = '/path/to/backup'; + const chosenPath = createAbsolutePath('/path/to/backup'); const mockBackupInfo = { folderUuid: 'folder-uuid', folderId: 123, @@ -62,7 +65,7 @@ describe('addBackup', () => { mockedGetOrCreateDevice.mockResolvedValue({ error: undefined, data: mockDevice }); mockedGetPathFromDialog.mockResolvedValue({ path: chosenPath, itemName: 'backup' }); mockedConfigStoreGet.mockReturnValue({}); - mockedCreateBackup.mockResolvedValue(mockBackupInfo); + mockedCreateBackup.mockResolvedValue({ data: mockBackupInfo } as never); const result = await addBackup(); @@ -70,11 +73,11 @@ describe('addBackup', () => { pathname: chosenPath, device: mockDevice, }); - expect(result).toStrictEqual(mockBackupInfo); + expect(result).toStrictEqual({ data: mockBackupInfo }); }); it('should enable existing backup when backup exists', async () => { - const chosenPath = '/path/to/existing'; + const chosenPath = createAbsolutePath('/path/to/existing'); const existingBackupData = { folderUuid: 'existing-uuid', folderId: 456, @@ -92,11 +95,14 @@ describe('addBackup', () => { mockedGetOrCreateDevice.mockResolvedValue({ error: undefined, data: mockDevice }); mockedGetPathFromDialog.mockResolvedValue({ path: chosenPath, itemName: 'existing' }); mockedConfigStoreGet.mockReturnValue({ [chosenPath]: existingBackupData }); - mockedEnableExistingBackup.mockResolvedValue(mockBackupInfo); + mockedEnableExistingBackup.mockResolvedValue({ data: mockBackupInfo } as never); const result = await addBackup(); - call(mockedEnableExistingBackup).toMatchObject([chosenPath, mockDevice]); - expect(result).toStrictEqual(mockBackupInfo); + call(mockedEnableExistingBackup).toMatchObject({ + pathname: chosenPath, + device: mockDevice, + }); + expect(result).toStrictEqual({ data: mockBackupInfo }); }); }); diff --git a/src/backend/features/backup/add-backup.ts b/src/backend/features/backup/add-backup.ts new file mode 100644 index 0000000000..6db7e40ea2 --- /dev/null +++ b/src/backend/features/backup/add-backup.ts @@ -0,0 +1,43 @@ +import configStore from '../../../apps/main/config'; +import { createBackup } from './create-backup'; +import { DeviceModule } from '../../../backend/features/device/device.module'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { enableExistingBackup } from './enable-existing-backup'; +import { getPathFromDialog } from '../../../core/utils/get-path-from-dialog'; +import { createAbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import { Result } from '../../../context/shared/domain/Result'; +import { BackupInfo } from '../../../apps/backups/BackupInfo'; + +export async function addBackup(): Promise> { + const { error, data } = await DeviceModule.getOrCreateDevice(); + if (error) { + logger.error({ tag: 'BACKUPS', msg: 'Error fetching or creating device', error }); + return { error: new Error('Error adding backup: No device found') }; + } + + const chosenItem = await getPathFromDialog(); + if (!chosenItem) return { error: new Error('No path chosen') }; + + const chosenPath = createAbsolutePath(chosenItem.path); + const backupList = configStore.get('backupList'); + const existingBackup = backupList[chosenPath]; + + if (!existingBackup) { + const { data: newBackup, error: createError } = await createBackup({ pathname: chosenPath, device: data }); + if (createError) { + logger.error({ tag: 'BACKUPS', msg: 'Error creating backup', error: createError }); + return { error: createError }; + } + return { data: newBackup }; + } else { + const { data: existingBackupInfo, error: enableError } = await enableExistingBackup({ + pathname: chosenPath, + device: data, + }); + if (enableError) { + logger.error({ tag: 'BACKUPS', msg: 'Error enabling existing backup', error: enableError }); + return { error: enableError }; + } + return { data: existingBackupInfo }; + } +} diff --git a/src/backend/features/backup/build-backup-folder-tree-snapshot.test.ts b/src/backend/features/backup/build-backup-folder-tree-snapshot.test.ts new file mode 100644 index 0000000000..6d85dfde59 --- /dev/null +++ b/src/backend/features/backup/build-backup-folder-tree-snapshot.test.ts @@ -0,0 +1,27 @@ +import { buildBackupFolderTreeSnapshot } from './build-backup-folder-tree-snapshot'; + +describe('build-backup-folder-tree-snapshot', () => { + it('should accumulate all file sizes and decrypted names across tree', () => { + const decryptFileName = vi.fn((name: string) => `dec:${name}`); + const tree = { + id: 1, + plainName: 'root', + files: [{ id: 101, name: 'f1', folderId: 1, size: '2' }], + children: [ + { + id: 2, + plainName: 'child', + files: [{ id: 102, name: 'f2', folderId: 2, size: '3' }], + children: [], + }, + ], + }; + + const result = buildBackupFolderTreeSnapshot({ tree: tree as never, decryptFileName }); + + expect(result.size).toBe(5); + expect(result.folderDecryptedNames).toStrictEqual({ 1: 'root', 2: 'child' }); + expect(result.fileDecryptedNames).toStrictEqual({ 101: 'dec:f1', 102: 'dec:f2' }); + expect(decryptFileName).toBeCalledTimes(2); + }); +}); diff --git a/src/backend/features/backup/build-backup-folder-tree-snapshot.ts b/src/backend/features/backup/build-backup-folder-tree-snapshot.ts new file mode 100644 index 0000000000..8c6ab7be2f --- /dev/null +++ b/src/backend/features/backup/build-backup-folder-tree-snapshot.ts @@ -0,0 +1,52 @@ +import { FolderTree } from '@internxt/sdk/dist/drive/storage/types'; +import { BackupFolderTreeSnapshot } from './types/BackupFolderTreeSnapshot'; + +type NodeSnapshot = { + folderId: number; + folderName: string; + fileNames: Record; + size: number; +}; + +type SnapshotProps = { + node: FolderTree; + decryptFileName: (name: string, folderId: number) => string; +}; + +function snapshotNode({ node, decryptFileName }: SnapshotProps): NodeSnapshot { + const fileNames: Record = {}; + let size = 0; + + for (const file of node.files) { + fileNames[file.id] = decryptFileName(file.name, file.folderId); + size += Number(file.size); + } + + return { folderId: node.id, folderName: node.plainName, fileNames, size }; +} + +type Props = { + tree: FolderTree; + decryptFileName: (name: string, folderId: number) => string; +}; + +export function buildBackupFolderTreeSnapshot({ tree, decryptFileName }: Props): BackupFolderTreeSnapshot { + let size = 0; + const folderDecryptedNames: Record = {}; + const fileDecryptedNames: Record = {}; + + const stack = [tree]; + + while (stack.length > 0) { + const currentNode = stack.pop()!; + const { folderId, folderName, fileNames, size: nodeSize } = snapshotNode({ node: currentNode, decryptFileName }); + + folderDecryptedNames[folderId] = folderName; + Object.assign(fileDecryptedNames, fileNames); + size += nodeSize; + + stack.push(...currentNode.children); + } + + return { tree, folderDecryptedNames, fileDecryptedNames, size }; +} diff --git a/src/backend/features/backup/change-backup-path.test.ts b/src/backend/features/backup/change-backup-path.test.ts new file mode 100644 index 0000000000..d0cccc8e97 --- /dev/null +++ b/src/backend/features/backup/change-backup-path.test.ts @@ -0,0 +1,112 @@ +import * as getBackupFolderUuidModule from '../../../infra/drive-server/services/folder/services/fetch-backup-folder-uuid'; +import * as renameFolderModule from '../../../infra/drive-server/services/folder/services/rename-folder'; +import * as migrateBackupEntryIfNeededModule from './migrate-backup-entry-if-needed'; +import configStoreModule from '../../../apps/main/config'; +import { DriveServerError } from '../../../infra/drive-server/drive-server.error'; +import { changeBackupPath } from './change-backup-path'; +import { call, partialSpyOn } from '../../../../tests/vitest/utils.helper'; +import { createAbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; + +describe('change-backup-path', () => { + const mockedConfigStoreGet = partialSpyOn(configStoreModule, 'get'); + const mockedConfigStoreSet = partialSpyOn(configStoreModule, 'set'); + const mockedGetBackupFolderUuid = partialSpyOn(getBackupFolderUuidModule, 'getBackupFolderUuid'); + const mockedRenameFolder = partialSpyOn(renameFolderModule, 'renameFolder'); + const mockedMigrateBackupEntryIfNeeded = partialSpyOn(migrateBackupEntryIfNeededModule, 'migrateBackupEntryIfNeeded'); + + const currentPath = createAbsolutePath('/home/dev/Documents/current-backup'); + const newPath = createAbsolutePath('/home/dev/Documents/new-backup'); + + it('should return error when backup no longer exists', async () => { + mockedConfigStoreGet.mockReturnValue({}); + + const result = await changeBackupPath({ currentPath, newPath }); + + expect(result).toMatchObject({ error: new Error('No backup found with the provided path') }); + }); + + it('should return error when new path already exists as backup', async () => { + const existingBackup = { folderId: 12, folderUuid: 'folder-uuid', enabled: true }; + + mockedConfigStoreGet.mockReturnValue({ + [currentPath]: existingBackup, + [newPath]: { folderId: 99, folderUuid: 'another-folder-uuid', enabled: true }, + }); + + const result = await changeBackupPath({ currentPath, newPath }); + + expect(result).toMatchObject({ error: new Error('A backup with this path already exists') }); + expect(mockedGetBackupFolderUuid).not.toBeCalled(); + expect(mockedRenameFolder).not.toBeCalled(); + expect(mockedConfigStoreSet).not.toBeCalled(); + }); + + it('should return false when folder names are equal', async () => { + const currentPathWithSameName = createAbsolutePath('/home/dev/Documents/project'); + const newPathWithSameName = createAbsolutePath('/mnt/external/project'); + + mockedConfigStoreGet.mockReturnValue({ + [currentPathWithSameName]: { folderId: 12, folderUuid: 'folder-uuid', enabled: true }, + }); + + const result = await changeBackupPath({ currentPath: currentPathWithSameName, newPath: newPathWithSameName }); + + expect(result).toStrictEqual({ data: false }); + expect(mockedGetBackupFolderUuid).not.toBeCalled(); + expect(mockedRenameFolder).not.toBeCalled(); + expect(mockedConfigStoreSet).not.toBeCalled(); + }); + + it('should rename backup folder and move backup entry to the new path', async () => { + const existingBackup = { folderId: 12, folderUuid: 'folder-uuid', enabled: true }; + const migratedBackup = { folderId: 12, folderUuid: 'folder-uuid', enabled: true }; + const backupList = { + [currentPath]: existingBackup, + }; + + mockedConfigStoreGet.mockReturnValue(backupList); + mockedGetBackupFolderUuid.mockResolvedValue({ data: 'remote-folder-uuid' }); + mockedRenameFolder.mockResolvedValue({ data: {} }); + mockedMigrateBackupEntryIfNeeded.mockResolvedValue(migratedBackup); + + const result = await changeBackupPath({ currentPath, newPath }); + + expect(result).toStrictEqual({ data: true }); + call(mockedGetBackupFolderUuid).toStrictEqual({ folderId: '12' }); + call(mockedRenameFolder).toStrictEqual({ + uuid: 'remote-folder-uuid', + plainName: 'new-backup', + }); + call(mockedMigrateBackupEntryIfNeeded).toStrictEqual({ pathname: newPath, backup: existingBackup }); + call(mockedConfigStoreSet).toStrictEqual([ + 'backupList', + { + [newPath]: migratedBackup, + }, + ]); + }); + + it('should return error when resolving remote backup folder uuid fails', async () => { + const existingBackup = { folderId: 12, folderUuid: 'folder-uuid', enabled: true }; + const error = new DriveServerError('UNKNOWN', undefined, 'uuid lookup failed'); + + mockedConfigStoreGet.mockReturnValue({ [currentPath]: existingBackup }); + mockedGetBackupFolderUuid.mockResolvedValue({ error }); + + const result = await changeBackupPath({ currentPath, newPath }); + + expect(result).toStrictEqual({ error }); + }); + + it('should return error when rename request fails', async () => { + const existingBackup = { folderId: 12, folderUuid: 'folder-uuid', enabled: true }; + + mockedConfigStoreGet.mockReturnValue({ [currentPath]: existingBackup }); + mockedGetBackupFolderUuid.mockResolvedValue({ data: 'remote-folder-uuid' }); + mockedRenameFolder.mockResolvedValue({ error: new DriveServerError('UNKNOWN', undefined, 'rename failed') }); + + const result = await changeBackupPath({ currentPath, newPath }); + + expect(result).toMatchObject({ error: new Error('Error in the request to rename a backup') }); + }); +}); diff --git a/src/backend/features/backup/change-backup-path.ts b/src/backend/features/backup/change-backup-path.ts new file mode 100644 index 0000000000..9a2879c58d --- /dev/null +++ b/src/backend/features/backup/change-backup-path.ts @@ -0,0 +1,57 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { basename } from 'node:path'; +import configStore from '../../../apps/main/config'; +import { getBackupFolderUuid } from '../../../infra/drive-server/services/folder/services/fetch-backup-folder-uuid'; +import { renameFolder } from '../../../infra/drive-server/services/folder/services/rename-folder'; +import { migrateBackupEntryIfNeeded } from './migrate-backup-entry-if-needed'; +import { AbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import { Result } from '../../../context/shared/domain/Result'; + +type Props = { + currentPath: AbsolutePath; + newPath: AbsolutePath; +}; + +export async function changeBackupPath({ currentPath, newPath }: Props): Promise> { + const backupsList = configStore.get('backupList'); + const existingBackup = backupsList[currentPath]; + + if (!existingBackup) { + return { error: new Error('No backup found with the provided path') }; + } + + if (backupsList[newPath]) { + return { error: new Error('A backup with this path already exists') }; + } + + const oldFolderName = basename(currentPath); + const newFolderName = basename(newPath); + if (oldFolderName !== newFolderName) { + logger.debug({ tag: 'BACKUPS', msg: 'Renaming backup', existingBackup }); + + const getFolderUuidResponse = await getBackupFolderUuid({ folderId: String(existingBackup.folderId) }); + if (getFolderUuidResponse.error) { + return { error: getFolderUuidResponse.error }; + } + const { data: folderUuid } = getFolderUuidResponse; + + const res = await renameFolder({ uuid: folderUuid, plainName: newFolderName }); + if (res.error) { + return { error: new Error('Error in the request to rename a backup') }; + } + + delete backupsList[currentPath]; + + const migratedExistingBackup = await migrateBackupEntryIfNeeded({ + pathname: newPath, + backup: existingBackup, + }); + backupsList[newPath] = migratedExistingBackup; + + configStore.set('backupList', backupsList); + + return { data: true }; + } + + return { data: false }; +} diff --git a/src/apps/main/backups/create-backup-folder.test.ts b/src/backend/features/backup/create-backup-folder.test.ts similarity index 97% rename from src/apps/main/backups/create-backup-folder.test.ts rename to src/backend/features/backup/create-backup-folder.test.ts index fc02618bec..3c49ff8571 100644 --- a/src/apps/main/backups/create-backup-folder.test.ts +++ b/src/backend/features/backup/create-backup-folder.test.ts @@ -4,7 +4,7 @@ import { logger } from '@internxt/drive-desktop-core/build/backend'; import { DriveServerError } from '../../../infra/drive-server/drive-server.error'; import { call } from '../../../../tests/vitest/utils.helper'; import { partialSpyOn } from '../../../../tests/vitest/utils.helper'; -import * as findBackupFolderByNameModule from './find-backup-folder-by-name'; +import * as findBackupFolderByNameModule from '../../../apps/main/backups/find-backup-folder-by-name'; vi.mock(import('@internxt/drive-desktop-core/build/backend')); diff --git a/src/apps/main/backups/create-backup-folder.ts b/src/backend/features/backup/create-backup-folder.ts similarity index 84% rename from src/apps/main/backups/create-backup-folder.ts rename to src/backend/features/backup/create-backup-folder.ts index d628777ba7..03f51aa5a8 100644 --- a/src/apps/main/backups/create-backup-folder.ts +++ b/src/backend/features/backup/create-backup-folder.ts @@ -1,8 +1,8 @@ -import { Device } from '../device/service'; -import { Backup } from './types'; +import { Device } from './types/Device'; +import { Backup } from '../../../apps/main/backups/types'; import { logger } from '@internxt/drive-desktop-core/build/backend'; import { createFolder } from '../../../infra/drive-server/services/folder/services/create-folder'; -import { findBackupFolderByName } from './find-backup-folder-by-name'; +import { findBackupFolderByName } from '../../../apps/main/backups/find-backup-folder-by-name'; type Props = { folderName: string; diff --git a/src/apps/main/backups/create-backup.test.ts b/src/backend/features/backup/create-backup.test.ts similarity index 68% rename from src/apps/main/backups/create-backup.test.ts rename to src/backend/features/backup/create-backup.test.ts index da6482d826..106917fb81 100644 --- a/src/apps/main/backups/create-backup.test.ts +++ b/src/backend/features/backup/create-backup.test.ts @@ -1,11 +1,13 @@ import { createBackup } from './create-backup'; -import { createBackupFolder } from './create-backup-folder'; -import configStore from '../config'; +import { createBackupFolder } from '../../../backend/features/backup/create-backup-folder'; +import configStore from '../../../apps/main/config'; +import { AbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; import { app } from 'electron'; import path from 'node:path'; +import { DriveServerError } from 'src/infra/drive-server/drive-server.error'; vi.mock('./create-backup-folder'); -vi.mock('../config'); +vi.mock('../../../apps/main/config'); vi.mock('node:path'); const mockPostBackup = vi.mocked(createBackupFolder); @@ -48,7 +50,7 @@ describe('createBackup', () => { }); const result = await createBackup({ - pathname: '/home/user/TestFolder', + pathname: '/home/user/TestFolder' as AbsolutePath, device: mockDevice, }); @@ -66,26 +68,28 @@ describe('createBackup', () => { }); expect(result).toStrictEqual({ - folderUuid: 'backup-uuid-456', - folderId: 123, - pathname: '/home/user/TestFolder', - name: 'TestFolder', - tmpPath: '/tmp', - backupsBucket: 'test-bucket', + data: { + folderUuid: 'backup-uuid-456', + folderId: 123, + pathname: '/home/user/TestFolder', + name: 'TestFolder', + tmpPath: '/tmp', + backupsBucket: 'test-bucket', + }, }); }); it('should return undefined when createBackupFolder fails', async () => { mockPostBackup.mockResolvedValue({ - error: new Error('Failed to create backup folder') as any, + error: new DriveServerError('NOT_FOUND'), }); const result = await createBackup({ - pathname: '/home/user/FailedFolder', + pathname: '/home/user/FailedFolder' as AbsolutePath, device: mockDevice, }); - expect(result).toBeUndefined(); + expect(result).toStrictEqual({ error: expect.any(Error) }); expect(mockConfigStore.set).not.toBeCalled(); }); }); diff --git a/src/apps/main/backups/create-backup.ts b/src/backend/features/backup/create-backup.ts similarity index 50% rename from src/apps/main/backups/create-backup.ts rename to src/backend/features/backup/create-backup.ts index df43b8240e..4645ffe57e 100644 --- a/src/apps/main/backups/create-backup.ts +++ b/src/backend/features/backup/create-backup.ts @@ -1,19 +1,21 @@ import path from 'node:path'; -import { Device } from '../device/service'; -import configStore from '../config'; -import { BackupInfo } from 'src/apps/backups/BackupInfo'; +import { Device } from './types/Device'; +import configStore from '../../../apps/main/config'; +import { BackupInfo } from '../../../apps/backups/BackupInfo'; import { app } from 'electron'; -import { createBackupFolder } from './create-backup-folder'; +import { createBackupFolder } from '../../../backend/features/backup/create-backup-folder'; +import { AbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import { Result } from '../../../context/shared/domain/Result'; type Props = { - pathname: string; + pathname: AbsolutePath; device: Device; }; -export async function createBackup({ pathname, device }: Props) { +export async function createBackup({ pathname, device }: Props): Promise> { const { base } = path.parse(pathname); const { error, data: newBackup } = await createBackupFolder({ folderName: base, device }); - if (error) return; + if (error) return { error }; const backupList = configStore.get('backupList'); backupList[pathname] = { @@ -27,11 +29,11 @@ export async function createBackup({ pathname, device }: Props) { const createdBackup: BackupInfo = { folderUuid: newBackup.uuid, folderId: newBackup.id, - pathname: pathname, + pathname, name: base, tmpPath: app.getPath('temp'), backupsBucket: device.bucket, }; - return createdBackup; + return { data: createdBackup }; } diff --git a/src/backend/features/backup/create-backups-from-local-paths.test.ts b/src/backend/features/backup/create-backups-from-local-paths.test.ts new file mode 100644 index 0000000000..2048fe2376 --- /dev/null +++ b/src/backend/features/backup/create-backups-from-local-paths.test.ts @@ -0,0 +1,69 @@ +import * as createBackupModule from './create-backup'; +import * as DeviceModuleModule from '../device/device.module'; +import configStoreModule from '../../../apps/main/config'; +import { createAbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import { call, calls, partialSpyOn } from '../../../../tests/vitest/utils.helper'; +import { createBackupsFromLocalPaths } from './create-backups-from-local-paths'; + +describe('create-backups-from-local-paths', () => { + const createBackupMock = partialSpyOn(createBackupModule, 'createBackup'); + const getOrCreateDeviceMock = partialSpyOn(DeviceModuleModule.DeviceModule, 'getOrCreateDevice'); + const configStoreSetMock = partialSpyOn(configStoreModule, 'set'); + + it('should enable backups and create one backup per local path', async () => { + const device = { + id: 1, + uuid: 'device-uuid', + name: 'Device', + bucket: 'bucket', + removed: false, + hasBackups: true, + }; + + const folderPaths = [createAbsolutePath('/home/dev/Documents'), createAbsolutePath('/home/dev/Pictures')]; + + getOrCreateDeviceMock.mockResolvedValue({ data: device }); + createBackupMock.mockResolvedValue(undefined as never); + + const result = await createBackupsFromLocalPaths({ folderPaths }); + + expect(result).toStrictEqual({ data: true }); + call(configStoreSetMock).toStrictEqual(['backupsEnabled', true]); + call(getOrCreateDeviceMock).toStrictEqual([]); + calls(createBackupMock).toStrictEqual([ + { pathname: folderPaths[0], device }, + { pathname: folderPaths[1], device }, + ]); + }); + + it('should return an error when no device can be created or fetched', async () => { + const error = new Error('Device error'); + const folderPaths = [createAbsolutePath('/home/dev/Documents')]; + + getOrCreateDeviceMock.mockResolvedValue({ error }); + + await expect(createBackupsFromLocalPaths({ folderPaths })).resolves.toStrictEqual({ error }); + calls(createBackupMock).toHaveLength(0); + calls(configStoreSetMock).toHaveLength(0); + }); + + it('should return an error when creating a backup fails', async () => { + const error = new Error('Backup error'); + const device = { + id: 1, + uuid: 'device-uuid', + name: 'Device', + bucket: 'bucket', + removed: false, + hasBackups: true, + }; + const folderPaths = [createAbsolutePath('/home/dev/Documents')]; + + getOrCreateDeviceMock.mockResolvedValue({ data: device }); + createBackupMock.mockRejectedValue(error); + + await expect(createBackupsFromLocalPaths({ folderPaths })).rejects.toThrow('Backup error'); + call(createBackupMock).toStrictEqual({ pathname: folderPaths[0], device }); + calls(configStoreSetMock).toHaveLength(0); + }); +}); diff --git a/src/backend/features/backup/create-backups-from-local-paths.ts b/src/backend/features/backup/create-backups-from-local-paths.ts new file mode 100644 index 0000000000..98fe1411ac --- /dev/null +++ b/src/backend/features/backup/create-backups-from-local-paths.ts @@ -0,0 +1,22 @@ +import configStore from '../../../apps/main/config'; +import { createBackup } from './create-backup'; +import { DeviceModule } from '../device/device.module'; +import { createAbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import { Result } from '../../../context/shared/domain/Result'; + +type Props = { + folderPaths: string[]; +}; + +export async function createBackupsFromLocalPaths({ folderPaths }: Props): Promise> { + const { error, data } = await DeviceModule.getOrCreateDevice(); + if (error) return { error }; + + const operations = folderPaths.map((folderPath) => + createBackup({ pathname: createAbsolutePath(folderPath), device: data }), + ); + await Promise.all(operations); + + configStore.set('backupsEnabled', true); + return { data: true }; +} diff --git a/src/backend/features/backup/delete-backup.test.ts b/src/backend/features/backup/delete-backup.test.ts new file mode 100644 index 0000000000..ad592c6cec --- /dev/null +++ b/src/backend/features/backup/delete-backup.test.ts @@ -0,0 +1,68 @@ +import * as addFolderToTrashModule from '../../../infra/drive-server/services/folder/services/add-folder-to-trash'; +import configStoreModule from '../../../apps/main/config'; +import { createAbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import { DriveServerError } from '../../../infra/drive-server/drive-server.error'; +import { call, partialSpyOn } from '../../../../tests/vitest/utils.helper'; +import { deleteBackup } from './delete-backup'; + +describe('delete-backup', () => { + const addFolderToTrashMock = partialSpyOn(addFolderToTrashModule, 'addFolderToTrash'); + const configStoreGetMock = partialSpyOn(configStoreModule, 'get'); + const configStoreSetMock = partialSpyOn(configStoreModule, 'set'); + + const backup = { + folderUuid: 'folder-uuid', + folderId: 1, + tmpPath: '/tmp', + backupsBucket: 'bucket', + pathname: createAbsolutePath('/home/dev/Documents'), + name: 'Documents', + }; + + it('should return an error when request to trash folder fails', async () => { + addFolderToTrashMock.mockResolvedValue({ error: new DriveServerError('UNKNOWN', undefined, 'request failed') }); + + const result = await deleteBackup({ backup }); + + expect(result).toMatchObject({ error: { message: 'Request to delete backup wasnt succesful' } }); + }); + + it('should not update backup list when isCurrent is false', async () => { + addFolderToTrashMock.mockResolvedValue({ data: undefined as never }); + + await deleteBackup({ backup, isCurrent: false }); + + call(addFolderToTrashMock).toBe('folder-uuid'); + expect(configStoreGetMock).not.toBeCalled(); + expect(configStoreSetMock).not.toBeCalled(); + }); + + it('should remove backup from local list when isCurrent is true', async () => { + addFolderToTrashMock.mockResolvedValue({ data: undefined as never }); + configStoreGetMock.mockReturnValue({ + '/home/dev/Documents': backup, + '/home/dev/Pictures': { + ...backup, + folderId: 2, + folderUuid: 'folder-uuid-2', + pathname: createAbsolutePath('/home/dev/Pictures'), + name: 'Pictures', + }, + } as never); + + await deleteBackup({ backup, isCurrent: true }); + + call(configStoreSetMock).toStrictEqual([ + 'backupList', + { + '/home/dev/Pictures': { + ...backup, + folderId: 2, + folderUuid: 'folder-uuid-2', + pathname: createAbsolutePath('/home/dev/Pictures'), + name: 'Pictures', + }, + }, + ]); + }); +}); diff --git a/src/backend/features/backup/delete-backup.ts b/src/backend/features/backup/delete-backup.ts new file mode 100644 index 0000000000..ee3969930c --- /dev/null +++ b/src/backend/features/backup/delete-backup.ts @@ -0,0 +1,26 @@ +import configStore from '../../../apps/main/config'; +import { BackupInfo } from '../../../apps/backups/BackupInfo'; +import { addFolderToTrash } from '../../../infra/drive-server/services/folder/services/add-folder-to-trash'; +import { Result } from '../../../context/shared/domain/Result'; + +type Props = { + backup: BackupInfo; + isCurrent?: boolean; +}; + +export async function deleteBackup({ backup, isCurrent }: Props): Promise> { + const { error } = await addFolderToTrash(backup.folderUuid); + if (error) { + return { error: new Error('Request to delete backup wasnt succesful') }; + } + + if (isCurrent) { + const backupsList = configStore.get('backupList'); + const entriesFiltered = Object.entries(backupsList).filter(([, b]) => b.folderId !== backup.folderId); + const backupListFiltered = Object.fromEntries(entriesFiltered); + + configStore.set('backupList', backupListFiltered); + } + + return { data: true }; +} diff --git a/src/backend/features/backup/delete-device-backups.test.ts b/src/backend/features/backup/delete-device-backups.test.ts new file mode 100644 index 0000000000..f036bceebb --- /dev/null +++ b/src/backend/features/backup/delete-device-backups.test.ts @@ -0,0 +1,81 @@ +import * as addFolderToTrashModule from '../../../infra/drive-server/services/folder/services/add-folder-to-trash'; +import * as getBackupFolderTreeSnapshotModule from './get-backup-folder-tree-snapshot'; +import * as deleteBackupModule from './delete-backup'; +import * as DeviceModuleModule from '../device/device.module'; +import { createAbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import { calls, partialSpyOn } from '../../../../tests/vitest/utils.helper'; +import { deleteDeviceBackups } from './delete-device-backups'; + +describe('delete-device-backups', () => { + const getBackupsFromDeviceMock = partialSpyOn(DeviceModuleModule.DeviceModule, 'getBackupsFromDevice'); + const deleteBackupMock = partialSpyOn(deleteBackupModule, 'deleteBackup'); + const getBackupFolderTreeSnapshotMock = partialSpyOn( + getBackupFolderTreeSnapshotModule, + 'getBackupFolderTreeSnapshot', + ); + const addFolderToTrashMock = partialSpyOn(addFolderToTrashModule, 'addFolderToTrash'); + + const device = { + id: 1, + uuid: 'device-uuid', + name: 'Desktop', + bucket: 'bucket', + removed: false, + hasBackups: true, + }; + + it('should delete each backup and trash only stale folders from backup tree', async () => { + const backups = [ + { + folderUuid: 'folder-uuid-1', + folderId: 10, + tmpPath: '/tmp', + backupsBucket: 'bucket', + pathname: createAbsolutePath('/home/dev/Documents'), + name: 'Documents', + }, + ]; + + getBackupsFromDeviceMock.mockResolvedValue(backups); + deleteBackupMock.mockResolvedValue(undefined); + getBackupFolderTreeSnapshotMock.mockResolvedValue({ + data: { + tree: { + children: [ + { id: 10, uuid: 'folder-uuid-1' }, + { id: 20, uuid: 'folder-uuid-2' }, + ], + }, + }, + } as never); + addFolderToTrashMock.mockResolvedValue({ data: undefined as never }); + + await deleteDeviceBackups({ device, isCurrent: true }); + + calls(deleteBackupMock).toStrictEqual([{ backup: backups[0], isCurrent: true }]); + calls(addFolderToTrashMock).toStrictEqual(['folder-uuid-2']); + }); + + it('should not trash any folder when all tree children belong to backups', async () => { + const backups = [ + { + folderUuid: 'folder-uuid-1', + folderId: 10, + tmpPath: '/tmp', + backupsBucket: 'bucket', + pathname: createAbsolutePath('/home/dev/Documents'), + name: 'Documents', + }, + ]; + + getBackupsFromDeviceMock.mockResolvedValue(backups); + deleteBackupMock.mockResolvedValue(undefined); + getBackupFolderTreeSnapshotMock.mockResolvedValue({ + data: { tree: { children: [{ id: 10, uuid: 'folder-uuid-1' }] } }, + } as never); + + await deleteDeviceBackups({ device, isCurrent: false }); + + expect(addFolderToTrashMock).not.toBeCalled(); + }); +}); diff --git a/src/backend/features/backup/delete-device-backups.ts b/src/backend/features/backup/delete-device-backups.ts new file mode 100644 index 0000000000..a821caf833 --- /dev/null +++ b/src/backend/features/backup/delete-device-backups.ts @@ -0,0 +1,33 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import type { Device } from './types/Device'; +import { DeviceModule } from '../device/device.module'; +import { addFolderToTrash } from '../../../infra/drive-server/services/folder/services/add-folder-to-trash'; +import { getBackupFolderTreeSnapshot } from './get-backup-folder-tree-snapshot'; +import { deleteBackup } from './delete-backup'; + +type Props = { + device: Device; + isCurrent?: boolean; +}; + +export async function deleteDeviceBackups({ device, isCurrent }: Props) { + const backups = await DeviceModule.getBackupsFromDevice(device, isCurrent); + logger.debug({ tag: 'BACKUPS', msg: '[BACKUPS] Deleting backups from device', count: backups.length }); + logger.debug({ tag: 'BACKUPS', msg: '[BACKUPS] Backups details', backups }); + + const backupDeletionPromises = backups.map((backup) => deleteBackup({ backup, isCurrent })); + await Promise.all(backupDeletionPromises); + + const { error, data } = await getBackupFolderTreeSnapshot({ folderUuid: device.uuid }); + if (error) { + logger.error({ tag: 'BACKUPS', msg: 'Error fetching backup folder tree snapshot', error }); + return; + } + + const { tree } = data; + const foldersToDelete = tree.children.filter((folder) => !backups.some((backup) => backup.folderId === folder.id)); + const folderDeletionPromises = foldersToDelete.map(async (folder) => { + await addFolderToTrash(folder.uuid); + }); + await Promise.all(folderDeletionPromises); +} diff --git a/src/backend/features/backup/disable-backup.test.ts b/src/backend/features/backup/disable-backup.test.ts new file mode 100644 index 0000000000..b48b9e6d47 --- /dev/null +++ b/src/backend/features/backup/disable-backup.test.ts @@ -0,0 +1,75 @@ +import * as findBackupPathnameFromIdModule from './find-backup-pathname-from-id'; +import * as getBackupFolderTreeSnapshotModule from './get-backup-folder-tree-snapshot'; +import * as deleteBackupModule from './delete-backup'; +import configStoreModule from '../../../apps/main/config'; +import { createAbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import { call, partialSpyOn } from '../../../../tests/vitest/utils.helper'; +import { loggerMock } from '../../../../tests/vitest/mocks.helper'; +import { disableBackup } from './disable-backup'; + +describe('disable-backup', () => { + const findBackupPathnameFromIdMock = partialSpyOn(findBackupPathnameFromIdModule, 'findBackupPathnameFromId'); + const getBackupFolderTreeSnapshotMock = partialSpyOn( + getBackupFolderTreeSnapshotModule, + 'getBackupFolderTreeSnapshot', + ); + const deleteBackupMock = partialSpyOn(deleteBackupModule, 'deleteBackup'); + const configStoreGetMock = partialSpyOn(configStoreModule, 'get'); + const configStoreSetMock = partialSpyOn(configStoreModule, 'set'); + + const backup = { + folderUuid: 'folder-uuid', + folderId: 1, + tmpPath: '/tmp', + backupsBucket: 'bucket', + pathname: createAbsolutePath('/home/dev/Documents'), + name: 'Documents', + }; + + it('should throw when backup pathname is not found', async () => { + configStoreGetMock.mockReturnValue({}); + findBackupPathnameFromIdMock.mockReturnValue(undefined); + + await expect(disableBackup({ backup })).rejects.toBeUndefined(); + + expect(configStoreSetMock).not.toBeCalled(); + expect(getBackupFolderTreeSnapshotMock).not.toBeCalled(); + }); + + it('should disable backup and delete it when tree size is zero', async () => { + const backupList = { + '/home/dev/Documents': { folderId: 1, folderUuid: 'folder-uuid', enabled: true }, + }; + + configStoreGetMock.mockReturnValue(backupList); + findBackupPathnameFromIdMock.mockReturnValue('/home/dev/Documents'); + getBackupFolderTreeSnapshotMock.mockResolvedValue({ data: { size: 0 } } as never); + deleteBackupMock.mockResolvedValue({ data: true }); + + await disableBackup({ backup }); + + call(configStoreSetMock).toStrictEqual([ + 'backupList', + { + '/home/dev/Documents': { folderId: 1, folderUuid: 'folder-uuid', enabled: false }, + }, + ]); + call(deleteBackupMock).toStrictEqual({ backup, isCurrent: true }); + }); + + it('should log error when fetching the backup folder tree snapshot fails', async () => { + const error = new Error('snapshot failed'); + configStoreGetMock.mockReturnValue({ + '/home/dev/Documents': { folderId: 1, folderUuid: 'folder-uuid', enabled: true }, + }); + findBackupPathnameFromIdMock.mockReturnValue('/home/dev/Documents'); + getBackupFolderTreeSnapshotMock.mockResolvedValue({ error } as never); + + await expect(disableBackup({ backup })).rejects.toBeUndefined(); + + call(loggerMock.error).toMatchObject({ + tag: 'BACKUPS', + msg: 'Error fetching backup folder tree snapshot', + }); + }); +}); diff --git a/src/backend/features/backup/disable-backup.ts b/src/backend/features/backup/disable-backup.ts new file mode 100644 index 0000000000..318fc533fd --- /dev/null +++ b/src/backend/features/backup/disable-backup.ts @@ -0,0 +1,35 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import configStore from '../../../apps/main/config'; +import { BackupInfo } from '../../../apps/backups/BackupInfo'; +import { findBackupPathnameFromId } from './find-backup-pathname-from-id'; +import { getBackupFolderTreeSnapshot } from './get-backup-folder-tree-snapshot'; +import { deleteBackup } from './delete-backup'; + +type Props = { + backup: BackupInfo; +}; + +export async function disableBackup({ backup }: Props): Promise { + const backupsList = configStore.get('backupList'); + const pathname = findBackupPathnameFromId({ id: backup.folderId }); + + if (!pathname) { + throw logger.error({ tag: 'BACKUPS', msg: 'Error finding backup pathname to disable backup' }); + } + + backupsList[pathname].enabled = false; + configStore.set('backupList', backupsList); + + const { error, data } = await getBackupFolderTreeSnapshot({ folderUuid: backup.folderUuid }); + if (error) { + throw logger.error({ tag: 'BACKUPS', msg: 'Error fetching backup folder tree snapshot', error }); + } + + const { size } = data; + if (size === 0) { + const { error } = await deleteBackup({ backup, isCurrent: true }); + if (error) { + throw logger.error({ tag: 'BACKUPS', msg: 'Error deleting backup after disabling it', error }); + } + } +} diff --git a/src/backend/features/backup/download-backup.test.ts b/src/backend/features/backup/download-backup.test.ts new file mode 100644 index 0000000000..0c4f5d6efa --- /dev/null +++ b/src/backend/features/backup/download-backup.test.ts @@ -0,0 +1,122 @@ +import path from 'node:path'; +import { rm } from 'node:fs/promises'; +import { ipcMain } from 'electron'; +import { createAbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import { call, partialSpyOn } from '../../../../tests/vitest/utils.helper'; +import { loggerMock } from '../../../../tests/vitest/mocks.helper'; +import * as windowsModule from '../../../apps/main/windows'; +import * as downloadDeviceBackupZipModule from './download-device-backup-zip'; +import * as authServiceModule from '../../../apps/main/auth/service'; +import { downloadBackup } from './download-backup'; + +vi.mock('node:fs/promises', () => ({ + rm: vi.fn(), +})); + +describe('download-backup', () => { + const broadcastToWindowsMock = partialSpyOn(windowsModule, 'broadcastToWindows'); + const downloadDeviceBackupZipMock = partialSpyOn(downloadDeviceBackupZipModule, 'downloadDeviceBackupZip'); + const getUserMock = partialSpyOn(authServiceModule, 'getUser'); + + const ipcMainOnMock = vi.mocked(ipcMain.on); + const rmMock = vi.mocked(rm); + + const user = { bridgeUser: 'bridge-user', userId: 'user-id' }; + + const device = { + id: 1, + uuid: 'device-uuid', + name: 'Desktop', + bucket: 'bucket', + removed: false, + hasBackups: true, + }; + + const pathname = createAbsolutePath('/home/dev/Downloads'); + + let removeListenerMock: ReturnType; + + beforeEach(() => { + vi.useFakeTimers(); + vi.setSystemTime(new Date(2026, 3, 21, 9, 8, 7)); + + removeListenerMock = vi.fn(); + ipcMainOnMock.mockReturnValue({ removeListener: removeListenerMock } as never); + rmMock.mockResolvedValue(undefined as never); + getUserMock.mockReturnValue(user as never); + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + it('should download backup and broadcast progress when not aborted', async () => { + downloadDeviceBackupZipMock.mockImplementation(async ({ updateProgress }) => { + updateProgress(33); + }); + + await downloadBackup({ device, pathname }); + + call(loggerMock.debug).toMatchObject({ + tag: 'BACKUPS', + msg: '[BACKUPS] Downloading Device', + deviceName: device.name, + pathname, + }); + + call(downloadDeviceBackupZipMock).toMatchObject({ + device, + path: path.join(pathname, 'Backup_2026421987.zip'), + }); + + call(broadcastToWindowsMock).toStrictEqual([ + 'backup-download-progress', + { + id: device.uuid, + progress: 33, + }, + ]); + + expect(rmMock).not.toHaveBeenCalled(); + expect(removeListenerMock).toHaveBeenCalledWith('abort-download-backups-' + device.uuid, expect.any(Function)); + }); + + it('should skip broadcasting progress when aborted for the same device', async () => { + downloadDeviceBackupZipMock.mockImplementation(async ({ updateProgress }) => { + const abortListener = ipcMainOnMock.mock.calls[0]?.[1]; + abortListener?.({} as never, device.uuid); + updateProgress(90); + }); + + await downloadBackup({ device, pathname }); + + expect(broadcastToWindowsMock).not.toHaveBeenCalled(); + }); + + it('should keep broadcasting when abort event is for another device', async () => { + downloadDeviceBackupZipMock.mockImplementation(async ({ updateProgress }) => { + const abortListener = ipcMainOnMock.mock.calls[0]?.[1]; + abortListener?.({} as never, 'other-device-uuid'); + updateProgress(12); + }); + + await downloadBackup({ device, pathname }); + + call(broadcastToWindowsMock).toStrictEqual([ + 'backup-download-progress', + { + id: device.uuid, + progress: 12, + }, + ]); + }); + + it('should remove generated zip file when download fails', async () => { + downloadDeviceBackupZipMock.mockRejectedValue(new Error('download failed')); + + await downloadBackup({ device, pathname }); + + call(rmMock).toStrictEqual([path.join(pathname, 'Backup_2026421987.zip'), { force: true }]); + expect(removeListenerMock).toHaveBeenCalledWith('abort-download-backups-' + device.uuid, expect.any(Function)); + }); +}); diff --git a/src/backend/features/backup/download-backup.ts b/src/backend/features/backup/download-backup.ts new file mode 100644 index 0000000000..8eef91919c --- /dev/null +++ b/src/backend/features/backup/download-backup.ts @@ -0,0 +1,78 @@ +import { rm } from 'node:fs/promises'; +import { IpcMainEvent, ipcMain } from 'electron'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import type { Device } from './types/Device'; +import { broadcastToWindows } from '../../../apps/main/windows'; +import { downloadDeviceBackupZip } from './download-device-backup-zip'; +import { AbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import path from 'node:path'; +import { getUser } from '../../../apps/main/auth/service'; + +function createBackupZipFilePath({ pathname }: { pathname: AbsolutePath }) { + const date = new Date(); + const timestamp = [ + String(date.getFullYear()), + String(date.getMonth() + 1), + String(date.getDate()), + String(date.getHours()), + String(date.getMinutes()), + String(date.getSeconds()), + ].join(''); + + return path.join(pathname, `Backup_${timestamp}.zip`); +} + +type Props = { + device: Device; + pathname: AbsolutePath; +}; + +export async function downloadBackup({ device, pathname }: Props): Promise { + const user = getUser(); + if (!user) { + throw logger.error({ tag: 'BACKUPS', msg: 'No user found when trying to download backup' }); + } + + logger.debug({ + tag: 'BACKUPS', + msg: '[BACKUPS] Downloading Device', + deviceName: device.name, + pathname, + }); + + const zipFilePath = createBackupZipFilePath({ pathname }); + const abortController = new AbortController(); + + const abortListener = (_: IpcMainEvent, abortDeviceUuid: string) => { + if (abortDeviceUuid === device.uuid) { + abortController.abort(); + } + }; + + const listenerName = 'abort-download-backups-' + device.uuid; + const removeListenerIpc = ipcMain.on(listenerName, abortListener); + + try { + await downloadDeviceBackupZip({ + user, + device, + path: zipFilePath, + updateProgress: (progress) => { + if (abortController.signal.aborted) { + return; + } + + broadcastToWindows('backup-download-progress', { + id: device.uuid, + progress, + }); + }, + abortController, + }); + } catch (error) { + logger.error({ tag: 'BACKUPS', msg: 'Error downloading backup for device', deviceName: device.name, error }); + await rm(zipFilePath, { force: true }); + } + + removeListenerIpc.removeListener(listenerName, abortListener); +} diff --git a/src/backend/features/backup/download-device-backup-zip.test.ts b/src/backend/features/backup/download-device-backup-zip.test.ts new file mode 100644 index 0000000000..1943860ef2 --- /dev/null +++ b/src/backend/features/backup/download-device-backup-zip.test.ts @@ -0,0 +1,58 @@ +import * as fetchFolderModule from '../../../infra/drive-server/services/folder/services/fetch-folder'; +import * as getCredentialsModule from '../../../apps/main/auth/get-credentials'; +import * as downloadModule from '../../../apps/main/network/download'; +import { call, partialSpyOn } from '../../../../tests/vitest/utils.helper'; +import { downloadDeviceBackupZip } from './download-device-backup-zip'; +import { User } from '../../../apps/main/types'; + +describe('download-device-backup-zip', () => { + const fetchFolderMock = partialSpyOn(fetchFolderModule, 'fetchFolder'); + const getCredentialsMock = partialSpyOn(getCredentialsModule, 'getCredentials'); + const downloadFolderAsZipMock = partialSpyOn(downloadModule, 'downloadFolderAsZip'); + + const updateProgress = vi.fn(); + const abortController = new AbortController(); + const user = { bridgeUser: 'bridge-user', userId: 'user-id' } as unknown as User; + + const device = { + id: 1, + uuid: 'device-uuid', + name: 'Laptop', + bucket: 'bucket', + removed: false, + hasBackups: true, + }; + + it('should return error when folder fetch fails', async () => { + fetchFolderMock.mockResolvedValue({ error: new Error('fetch failed') } as never); + + const result = await downloadDeviceBackupZip({ user, device, path: '/tmp/backup.zip', updateProgress }); + + expect(result.error?.message).toBe('Unsuccesful request to fetch folder'); + }); + + it('should download backup zip with credentials and progress hooks', async () => { + process.env.BRIDGE_URL = 'https://bridge.local'; + fetchFolderMock.mockResolvedValue({ data: { uuid: 'folder-uuid' } } as never); + getCredentialsMock.mockReturnValue({ mnemonic: 'mnemonic' } as never); + downloadFolderAsZipMock.mockResolvedValue(undefined as never); + + await downloadDeviceBackupZip({ user, device, path: '/tmp/backup.zip', updateProgress, abortController }); + + call(downloadFolderAsZipMock).toStrictEqual([ + 'Laptop', + 'https://bridge.local', + 'folder-uuid', + '/tmp/backup.zip', + { + bridgeUser: 'bridge-user', + bridgePass: 'user-id', + encryptionKey: 'mnemonic', + }, + { + abortController, + updateProgress, + }, + ]); + }); +}); diff --git a/src/backend/features/backup/download-device-backup-zip.ts b/src/backend/features/backup/download-device-backup-zip.ts new file mode 100644 index 0000000000..9dd0affa1a --- /dev/null +++ b/src/backend/features/backup/download-device-backup-zip.ts @@ -0,0 +1,58 @@ +import { PathLike } from 'node:fs'; +import type { Device } from './types/Device'; +import { User } from '../../../apps/main/types'; +import { fetchFolder } from '../../../infra/drive-server/services/folder/services/fetch-folder'; +import { getCredentials } from '../../../apps/main/auth/get-credentials'; +import { downloadFolderAsZip } from '../../../apps/main/network/download'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { Result } from '../../../context/shared/domain/Result'; + +type Props = { + user: User; + device: Device; + path: PathLike; + updateProgress: (progress: number) => void; + abortController?: AbortController; +}; + +export async function downloadDeviceBackupZip({ + user, + device, + path, + updateProgress, + abortController, +}: Props): Promise> { + const { data: folder, error } = await fetchFolder(device.uuid); + if (error) { + logger.error({ tag: 'BACKUPS', msg: 'Unsuccesful request to fetch folder', error }); + return { error: new Error('Unsuccesful request to fetch folder') }; + } + + if (!folder || folder.uuid.length === 0) { + logger.error({ tag: 'BACKUPS', msg: 'No backup data found' }); + return { error: new Error('No backup data found') }; + } + + const networkApiUrl = process.env.BRIDGE_URL; + const bridgeUser = user.bridgeUser; + const bridgePass = user.userId; + const { mnemonic } = getCredentials(); + + await downloadFolderAsZip( + device.name, + networkApiUrl, + folder.uuid, + path, + { + bridgeUser, + bridgePass, + encryptionKey: mnemonic, + }, + { + abortController, + updateProgress, + }, + ); + + return { data: true }; +} diff --git a/src/backend/features/backup/enable-existing-backup.test.ts b/src/backend/features/backup/enable-existing-backup.test.ts new file mode 100644 index 0000000000..ef1942df3b --- /dev/null +++ b/src/backend/features/backup/enable-existing-backup.test.ts @@ -0,0 +1,92 @@ +import { enableExistingBackup } from './enable-existing-backup'; +import configStore from '../../../apps/main/config'; +import { fetchFolder } from '../../../infra/drive-server/services/folder/services/fetch-folder'; +import { createBackup } from './create-backup'; +import { migrateBackupEntryIfNeeded } from './migrate-backup-entry-if-needed'; +import { PATHS } from '../../../core/electron/paths'; +import { createAbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import { DriveServerError } from 'src/infra/drive-server/drive-server.error'; +import { GetFolderContentDto } from 'src/infra/drive-server/out/dto'; + +vi.mock('../../../apps/main/config'); +vi.mock('../../../infra/drive-server/services/folder/services/fetch-folder'); +vi.mock('./create-backup'); +vi.mock('../../../backend/features/backup/migrate-backup-entry-if-needed'); + +const mockedConfigStore = vi.mocked(configStore); +const mockedFetchFolder = vi.mocked(fetchFolder); +const mockedCreateBackup = vi.mocked(createBackup); +const mockedMigrateBackupEntryIfNeeded = vi.mocked(migrateBackupEntryIfNeeded); + +describe('enable-existing-backup', () => { + const mockDevice = { + id: 123, + bucket: 'test-bucket', + uuid: 'device-uuid', + name: 'Test Device', + removed: false, + hasBackups: false, + }; + + const pathname = createAbsolutePath('/path/to/backup'); + const existingBackupData = { + folderUuid: 'existing-uuid', + folderId: 456, + enabled: false, + }; + + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('should create new backup when folder no longer exists', async () => { + const mockNewBackupInfo = { + folderUuid: 'new-folder-uuid', + folderId: 789, + pathname, + name: 'backup', + tmpPath: '/tmp', + backupsBucket: 'test-bucket', + }; + + mockedConfigStore.get.mockReturnValue({ [pathname]: existingBackupData }); + mockedFetchFolder.mockResolvedValue({ error: new DriveServerError('NOT_FOUND', 400, 'Folder not found') }); + mockedCreateBackup.mockResolvedValue({ data: mockNewBackupInfo }); + + const result = await enableExistingBackup({ pathname, device: mockDevice }); + + expect(mockedMigrateBackupEntryIfNeeded).not.toBeCalled(); + expect(mockedFetchFolder).toBeCalledWith(existingBackupData.folderUuid); + expect(mockedCreateBackup).toBeCalledWith({ pathname, device: mockDevice }); + expect(result).toStrictEqual({ data: mockNewBackupInfo }); + }); + + it('should enable existing backup when folder still exists', async () => { + mockedConfigStore.get + .mockReturnValueOnce({ [pathname]: existingBackupData }) + .mockReturnValueOnce({ [pathname]: existingBackupData }); + + mockedFetchFolder.mockResolvedValue({ + data: { id: existingBackupData.folderId } as unknown as GetFolderContentDto, + }); + + const result = await enableExistingBackup({ pathname, device: mockDevice }); + + expect(mockedMigrateBackupEntryIfNeeded).not.toBeCalled(); + expect(mockedFetchFolder).toBeCalledWith(existingBackupData.folderUuid); + expect(mockedConfigStore.set).toBeCalledWith('backupList', { + [pathname]: { ...existingBackupData, enabled: true }, + }); + + expect(result).toStrictEqual({ + data: { + folderUuid: existingBackupData.folderUuid, + folderId: existingBackupData.folderId, + pathname, + name: 'backup', + tmpPath: PATHS.TEMPORAL_FOLDER, + backupsBucket: mockDevice.bucket, + }, + }); + }); +}); diff --git a/src/backend/features/backup/enable-existing-backup.ts b/src/backend/features/backup/enable-existing-backup.ts new file mode 100644 index 0000000000..84dcacb2f8 --- /dev/null +++ b/src/backend/features/backup/enable-existing-backup.ts @@ -0,0 +1,76 @@ +import configStore from '../../../apps/main/config'; +import { BackupInfo } from '../../../apps/backups/BackupInfo'; +import { parse } from 'node:path'; +import { fetchFolder } from '../../../infra/drive-server/services/folder/services/fetch-folder'; +import { createBackup } from './create-backup'; +import { migrateBackupEntryIfNeeded } from './migrate-backup-entry-if-needed'; +import { Device } from './types/Device'; +import { AbsolutePath } from '../../../context/local/localFile/infrastructure/AbsolutePath'; +import { PATHS } from '../../../core/electron/paths'; +import { Result } from '../../../context/shared/domain/Result'; +import { BackupEntry } from './types/BackupEntry'; + +type Props = { + pathname: AbsolutePath; + device: Device; +}; + +async function resolveBackupEntry({ + pathname, + backup, +}: { + pathname: AbsolutePath; + backup: BackupEntry; +}): Promise> { + if (backup.folderUuid) { + return { data: backup }; + } + + return migrateBackupEntryIfNeeded({ pathname, backup }); +} + +function markBackupAsEnabled({ pathname }: { pathname: AbsolutePath }) { + const backupList = configStore.get('backupList'); + configStore.set('backupList', { ...backupList, [pathname]: { ...backupList[pathname], enabled: true } }); +} + +function buildBackupInfo({ + pathname, + backup, + device, +}: { + pathname: AbsolutePath; + backup: BackupEntry; + device: Device; +}): BackupInfo { + const { base } = parse(pathname); + return { + folderUuid: backup.folderUuid, + folderId: backup.folderId, + pathname, + name: base, + tmpPath: PATHS.TEMPORAL_FOLDER, + backupsBucket: device.bucket, + }; +} + +export async function enableExistingBackup({ pathname, device }: Props): Promise> { + const backupList = configStore.get('backupList'); + const rawBackup = backupList[pathname]; + + const { data: backup, error } = await resolveBackupEntry({ pathname, backup: rawBackup }); + if (error) return { error }; + + const { error: fetchError } = await fetchFolder(backup.folderUuid); + if (fetchError) { + const { data, error } = await createBackup({ pathname, device }); + if (error) return { error }; + + return { data }; + } + + markBackupAsEnabled({ pathname }); + const backupInfo = buildBackupInfo({ pathname, backup, device }); + + return { data: backupInfo }; +} diff --git a/src/backend/features/backup/find-backup-pathname-from-id.test.ts b/src/backend/features/backup/find-backup-pathname-from-id.test.ts new file mode 100644 index 0000000000..744a20f1bf --- /dev/null +++ b/src/backend/features/backup/find-backup-pathname-from-id.test.ts @@ -0,0 +1,28 @@ +import configStoreModule from '../../../apps/main/config'; +import { partialSpyOn } from '../../../../tests/vitest/utils.helper'; +import { findBackupPathnameFromId } from './find-backup-pathname-from-id'; + +describe('find-backup-pathname-from-id', () => { + const configStoreGetMock = partialSpyOn(configStoreModule, 'get'); + + it('should return pathname when backup id exists', () => { + configStoreGetMock.mockReturnValue({ + '/home/dev/Documents': { folderId: 1, enabled: true, folderUuid: 'uuid-1' }, + '/home/dev/Pictures': { folderId: 2, enabled: true, folderUuid: 'uuid-2' }, + }); + + const result = findBackupPathnameFromId({ id: 2 }); + + expect(result).toBe('/home/dev/Pictures'); + }); + + it('should return undefined when backup id does not exist', () => { + configStoreGetMock.mockReturnValue({ + '/home/dev/Documents': { folderId: 1, enabled: true, folderUuid: 'uuid-1' }, + }); + + const result = findBackupPathnameFromId({ id: 99 }); + + expect(result).toBeUndefined(); + }); +}); diff --git a/src/backend/features/backup/find-backup-pathname-from-id.ts b/src/backend/features/backup/find-backup-pathname-from-id.ts new file mode 100644 index 0000000000..d2366b7ece --- /dev/null +++ b/src/backend/features/backup/find-backup-pathname-from-id.ts @@ -0,0 +1,12 @@ +import configStore from '../../../apps/main/config'; + +type Props = { + id: number; +}; + +export function findBackupPathnameFromId({ id }: Props): string | undefined { + const backupsList = configStore.get('backupList'); + const entryfound = Object.entries(backupsList).find(([, backup]) => backup.folderId === id); + + return entryfound?.[0]; +} diff --git a/src/backend/features/backup/get-backup-folder-tree-snapshot.test.ts b/src/backend/features/backup/get-backup-folder-tree-snapshot.test.ts new file mode 100644 index 0000000000..0ed04d8663 --- /dev/null +++ b/src/backend/features/backup/get-backup-folder-tree-snapshot.test.ts @@ -0,0 +1,40 @@ +import { aes } from '@internxt/lib'; +import * as fetchFolderTreeByUuidModule from '../../../infra/drive-server/services/folder/services/fetch-folder-tree-by-uuid'; +import * as buildBackupFolderTreeSnapshotModule from './build-backup-folder-tree-snapshot'; +import { call, partialSpyOn } from '../../../../tests/vitest/utils.helper'; +import { getBackupFolderTreeSnapshot } from './get-backup-folder-tree-snapshot'; + +describe('get-backup-folder-tree-snapshot', () => { + const fetchFolderTreeByUuidMock = partialSpyOn(fetchFolderTreeByUuidModule, 'fetchFolderTreeByUuid'); + const buildBackupFolderTreeSnapshotMock = partialSpyOn( + buildBackupFolderTreeSnapshotModule, + 'buildBackupFolderTreeSnapshot', + ); + const aesDecryptMock = partialSpyOn(aes, 'decrypt'); + + it('should return an error when fetching folder tree fails', async () => { + const error = new Error('Unsuccesful request to fetch folder tree'); + fetchFolderTreeByUuidMock.mockResolvedValue({ error: new Error('fetch failed') } as never); + + await expect(getBackupFolderTreeSnapshot({ folderUuid: 'folder-uuid' })).resolves.toStrictEqual({ error }); + }); + + it('should build backup tree snapshot and provide decrypt function', async () => { + process.env.NEW_CRYPTO_KEY = 'crypto-key'; + const tree = { id: 10, children: [], files: [], plainName: 'Root' }; + const expectedSnapshot = { tree, size: 0, folderDecryptedNames: {}, fileDecryptedNames: {} }; + + fetchFolderTreeByUuidMock.mockResolvedValue({ data: { tree } } as never); + buildBackupFolderTreeSnapshotMock.mockImplementation(({ decryptFileName }) => { + decryptFileName('encrypted-name', 10); + return expectedSnapshot as never; + }); + aesDecryptMock.mockReturnValue('decrypted-name'); + + const result = await getBackupFolderTreeSnapshot({ folderUuid: 'folder-uuid' }); + + call(fetchFolderTreeByUuidMock).toStrictEqual({ uuid: 'folder-uuid' }); + call(aesDecryptMock).toStrictEqual(['encrypted-name', 'crypto-key-10']); + expect(result).toStrictEqual({ data: expectedSnapshot }); + }); +}); diff --git a/src/backend/features/backup/get-backup-folder-tree-snapshot.ts b/src/backend/features/backup/get-backup-folder-tree-snapshot.ts new file mode 100644 index 0000000000..1325f5cb00 --- /dev/null +++ b/src/backend/features/backup/get-backup-folder-tree-snapshot.ts @@ -0,0 +1,27 @@ +import { aes } from '@internxt/lib'; +import { fetchFolderTreeByUuid } from '../../../infra/drive-server/services/folder/services/fetch-folder-tree-by-uuid'; +import { buildBackupFolderTreeSnapshot } from './build-backup-folder-tree-snapshot'; +import { BackupFolderTreeSnapshot } from './types/BackupFolderTreeSnapshot'; +import { Result } from '../../../context/shared/domain/Result'; + +type Props = { + folderUuid: string; +}; + +export async function getBackupFolderTreeSnapshot({ + folderUuid, +}: Props): Promise> { + const { data, error } = await fetchFolderTreeByUuid({ uuid: folderUuid }); + + if (error) { + return { error: new Error('Unsuccesful request to fetch folder tree') }; + } + + const { tree } = data; + const backupFolderTreeSnapshot = buildBackupFolderTreeSnapshot({ + tree, + decryptFileName: (name, folderId) => aes.decrypt(name, `${process.env.NEW_CRYPTO_KEY}-${folderId}`), + }); + + return { data: backupFolderTreeSnapshot }; +} diff --git a/src/backend/features/backup/ipc/device-ipc-handlers.ts b/src/backend/features/backup/ipc/device-ipc-handlers.ts new file mode 100644 index 0000000000..379d8e4637 --- /dev/null +++ b/src/backend/features/backup/ipc/device-ipc-handlers.ts @@ -0,0 +1,37 @@ +import { ipcMain } from 'electron'; +import { DeviceModule } from '../../device/device.module'; +import { addBackup } from '../add-backup'; +import { getPathFromDialog } from '../../../../core/utils/get-path-from-dialog'; +import { getActiveBackupDevices } from '../../device/get-active-backup-devices'; +import { createBackupsFromLocalPaths } from '../create-backups-from-local-paths'; +import { deleteBackup } from '../delete-backup'; +import { deleteDeviceBackups } from '../delete-device-backups'; +import { disableBackup } from '../disable-backup'; +import { changeBackupPath } from '../change-backup-path'; +import { downloadBackup } from '../download-backup'; + +ipcMain.handle('devices.get-all', () => getActiveBackupDevices()); + +ipcMain.handle('get-or-create-device', DeviceModule.getOrCreateDevice); + +ipcMain.handle('rename-device', (_, v) => DeviceModule.renameDevice(v)); + +ipcMain.handle('get-backups-from-device', (_, d, c?) => DeviceModule.getBackupsFromDevice(d, c)); + +ipcMain.handle('add-backup', () => addBackup()); + +ipcMain.handle('add-multiple-backups', (_, folderPaths) => createBackupsFromLocalPaths({ folderPaths })); + +ipcMain.handle('download-backup', (_, device, pathname) => downloadBackup({ device, pathname })); + +ipcMain.handle('delete-backup', (_, v, c?) => deleteBackup({ backup: v, isCurrent: c })); + +ipcMain.handle('delete-backups-from-device', (_, v, c?) => deleteDeviceBackups({ device: v, isCurrent: c })); + +ipcMain.handle('disable-backup', (_, v) => disableBackup({ backup: v })); + +ipcMain.handle('change-backup-path', (_, { currentPath, newPath }) => changeBackupPath({ currentPath, newPath })); + +ipcMain.on('add-device-issue', (_, e) => DeviceModule.addUnknownDeviceIssue(e)); + +ipcMain.handle('get-folder-path', () => getPathFromDialog()); diff --git a/src/backend/features/backup/migrate-backup-entry-if-needed.test.ts b/src/backend/features/backup/migrate-backup-entry-if-needed.test.ts new file mode 100644 index 0000000000..eeef0aa7eb --- /dev/null +++ b/src/backend/features/backup/migrate-backup-entry-if-needed.test.ts @@ -0,0 +1,38 @@ +import configStoreModule from '../../../apps/main/config'; +import * as getBackupFolderUuidModule from '../../../infra/drive-server/services/folder/services/fetch-backup-folder-uuid'; +import { logger } from '@internxt/drive-desktop-core/build/backend/core/logger/logger'; +import { call, partialSpyOn } from '../../../../tests/vitest/utils.helper'; +import { migrateBackupEntryIfNeeded } from './migrate-backup-entry-if-needed'; + +describe('migrate-backup-entry-if-needed', () => { + const getBackupFolderUuidMock = partialSpyOn(getBackupFolderUuidModule, 'getBackupFolderUuid'); + const configStoreGetMock = partialSpyOn(configStoreModule, 'get'); + const configStoreSetMock = partialSpyOn(configStoreModule, 'set'); + const loggerErrorMock = partialSpyOn(logger, 'error'); + + it('should migrate backup by fetching folder uuid and persisting it', async () => { + const pathname = '/home/dev/Documents'; + const backup = { folderId: 1, folderUuid: '', enabled: true }; + const backupList = { [pathname]: backup }; + + getBackupFolderUuidMock.mockResolvedValue({ data: 'new-folder-uuid' }); + configStoreGetMock.mockReturnValue(backupList); + + const result = await migrateBackupEntryIfNeeded({ pathname, backup }); + + expect(result.data?.folderUuid).toBe('new-folder-uuid'); + call(configStoreSetMock).toStrictEqual(['backupList', backupList]); + }); + + it('should return error when folder uuid retrieval fails', async () => { + const error = new Error('uuid request failed'); + const backup = { folderId: 1, folderUuid: '', enabled: true }; + + getBackupFolderUuidMock.mockResolvedValue({ error } as never); + + const result = await migrateBackupEntryIfNeeded({ pathname: '/home/dev/Documents', backup }); + + expect(result.error?.message).toBe(error.message); + expect(loggerErrorMock).toBeCalled(); + }); +}); diff --git a/src/backend/features/backup/migrate-backup-entry-if-needed.ts b/src/backend/features/backup/migrate-backup-entry-if-needed.ts new file mode 100644 index 0000000000..d84446cbcc --- /dev/null +++ b/src/backend/features/backup/migrate-backup-entry-if-needed.ts @@ -0,0 +1,35 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend/core/logger/logger'; +import configStore from '../../../apps/main/config'; +import { getBackupFolderUuid } from '../../../infra/drive-server/services/folder/services/fetch-backup-folder-uuid'; +import { Result } from '../../../context/shared/domain/Result'; +import { BackupEntry } from './types/BackupEntry'; + +type Props = { + pathname: string; + backup: BackupEntry; +}; + +export async function migrateBackupEntryIfNeeded({ pathname, backup }: Props): Promise> { + const { error, data: folderUuid } = await getBackupFolderUuid({ folderId: String(backup.folderId) }); + if (error) { + logger.error({ + tag: 'BACKUPS', + msg: `Failed to migrate backup entry for ${pathname}`, + error, + }); + return { error }; + } + + backup.folderUuid = folderUuid; + + const backupList = configStore.get('backupList'); + backupList[pathname] = backup; + configStore.set('backupList', backupList); + + logger.debug({ + tag: 'BACKUPS', + msg: `Successfully migrated backup entry for ${pathname} with UUID ${folderUuid}`, + }); + + return { data: backup }; +} diff --git a/src/backend/features/backup/types/BackupEntry.ts b/src/backend/features/backup/types/BackupEntry.ts new file mode 100644 index 0000000000..2b653e264c --- /dev/null +++ b/src/backend/features/backup/types/BackupEntry.ts @@ -0,0 +1,5 @@ +export type BackupEntry = { + enabled: boolean; + folderId: number; + folderUuid: string; +}; diff --git a/src/backend/features/backup/types/BackupFolderTreeSnapshot.ts b/src/backend/features/backup/types/BackupFolderTreeSnapshot.ts new file mode 100644 index 0000000000..41a119e5ee --- /dev/null +++ b/src/backend/features/backup/types/BackupFolderTreeSnapshot.ts @@ -0,0 +1,8 @@ +import { FolderTree } from '@internxt/sdk/dist/drive/storage/types'; + +export type BackupFolderTreeSnapshot = { + tree: FolderTree; + folderDecryptedNames: Record; + fileDecryptedNames: Record; + size: number; +}; diff --git a/src/backend/features/backup/types/Device.ts b/src/backend/features/backup/types/Device.ts new file mode 100644 index 0000000000..7983391bfa --- /dev/null +++ b/src/backend/features/backup/types/Device.ts @@ -0,0 +1,8 @@ +export type Device = { + id: number; + uuid: string; + name: string; + bucket: string; + removed: boolean; + hasBackups: boolean; +}; diff --git a/src/backend/features/backup/upload/create-backup-update-executor.test.ts b/src/backend/features/backup/upload/create-backup-update-executor.test.ts index bd9d683623..03e3ef2406 100644 --- a/src/backend/features/backup/upload/create-backup-update-executor.test.ts +++ b/src/backend/features/backup/upload/create-backup-update-executor.test.ts @@ -4,6 +4,7 @@ import { FileMother } from '../../../../context/virtual-drive/files/domain/__tes import { LocalFileMother } from '../../../../context/local/localFile/domain/__test-helpers__/LocalFileMother'; import { BackupProgressTracker } from '../backup-progress-tracker'; import { mockDeep } from 'vitest-mock-extended'; +import { Environment } from '@internxt/inxt-js'; import { createBackupUpdateExecutor, ModifiedFilePair } from './create-backup-update-executor'; import * as updateFileToBackupModule from './update-file-to-backup'; import * as backupErrorsTrackerModule from '..'; @@ -14,14 +15,16 @@ describe('createBackupUpdateExecutor', () => { let tracker: BackupProgressTracker; let abortController: AbortController; + let environment: Environment; beforeEach(() => { tracker = mockDeep(); abortController = new AbortController(); + environment = mockDeep(); }); function createExecutor() { - return createBackupUpdateExecutor('bucket', {} as any, tracker); + return createBackupUpdateExecutor('bucket', environment, tracker); } function createPair(): ModifiedFilePair { @@ -82,7 +85,7 @@ describe('createBackupUpdateExecutor', () => { size: localFile.size, bucket: 'bucket', fileUuid: remoteFile.uuid, - environment: {}, + environment, signal: abortController.signal, }); }); diff --git a/src/backend/features/cleaner/web-cache/utils/scan-firefox-cache-profiles.ts b/src/backend/features/cleaner/web-cache/utils/scan-firefox-cache-profiles.ts index 061d5a56b0..91aa469cad 100644 --- a/src/backend/features/cleaner/web-cache/utils/scan-firefox-cache-profiles.ts +++ b/src/backend/features/cleaner/web-cache/utils/scan-firefox-cache-profiles.ts @@ -25,7 +25,7 @@ export async function scanFirefoxCacheProfiles(firefoxCacheDir: string): Promise const profileDirsChecks = await Promise.allSettled( entries.map(async (entry) => { const isProfileDir = await isFirefoxProfileDirectory(entry, firefoxCacheDir); - return { entry: entry, isProfileDir }; + return { entry, isProfileDir }; }), ); diff --git a/src/backend/features/device/createNewDevice.ts b/src/backend/features/device/createNewDevice.ts index 026ffb2942..d094f5e987 100644 --- a/src/backend/features/device/createNewDevice.ts +++ b/src/backend/features/device/createNewDevice.ts @@ -1,5 +1,5 @@ import { Either, right } from './../../../context/shared/domain/Either'; -import { Device } from '../../../apps/main/device/service'; +import { Device } from '../backup/types/Device'; import { createUniqueDevice } from './createUniqueDevice'; import { saveDeviceToConfig } from './saveDeviceToConfig'; import { DeviceIdentifierDTO } from './device.types'; diff --git a/src/backend/features/device/createUniqueDevice.ts b/src/backend/features/device/createUniqueDevice.ts index e90de230d0..5cc6304044 100644 --- a/src/backend/features/device/createUniqueDevice.ts +++ b/src/backend/features/device/createUniqueDevice.ts @@ -1,5 +1,5 @@ -import { Device } from '../../../apps/main/device/service'; -import os from 'os'; +import { Device } from '../backup/types/Device'; +import { hostname } from 'node:os'; import { logger } from '@internxt/drive-desktop-core/build/backend'; import { tryCreateDevice } from './tryCreateDevice'; import { Either, left, right } from '../../../context/shared/domain/Either'; @@ -14,7 +14,7 @@ export async function createUniqueDevice( deviceIdentifier: DeviceIdentifierDTO, attempts = 1000, ): Promise> { - const baseName = os.hostname(); + const baseName = hostname(); const nameVariants = [baseName, ...Array.from({ length: attempts }, (_, i) => `${baseName} (${i + 1})`)]; for (const name of nameVariants) { diff --git a/src/backend/features/device/get-active-backup-devices.test.ts b/src/backend/features/device/get-active-backup-devices.test.ts new file mode 100644 index 0000000000..ed92ddcbb1 --- /dev/null +++ b/src/backend/features/device/get-active-backup-devices.test.ts @@ -0,0 +1,30 @@ +import { driveServerModule } from '../../../infra/drive-server/drive-server.module'; +import { partialSpyOn } from '../../../../tests/vitest/utils.helper'; +import { getActiveBackupDevices } from './get-active-backup-devices'; + +describe('get-active-backup-devices', () => { + const getDevicesMock = partialSpyOn(driveServerModule.backup, 'getDevices'); + + it('should return only active devices with backups', async () => { + getDevicesMock.mockResolvedValue({ + isLeft: () => false, + getRight: () => [ + { id: 1, uuid: '1', name: 'a', bucket: 'b', removed: false, hasBackups: true }, + { id: 2, uuid: '2', name: 'b', bucket: 'b', removed: true, hasBackups: true }, + { id: 3, uuid: '3', name: 'c', bucket: 'b', removed: false, hasBackups: false }, + ], + } as never); + + const result = await getActiveBackupDevices(); + + expect(result).toStrictEqual([{ id: 1, uuid: '1', name: 'a', bucket: 'b', removed: false, hasBackups: true }]); + }); + + it('should return empty array when service returns left response', async () => { + getDevicesMock.mockResolvedValue({ isLeft: () => true, getLeft: () => new Error('left error') } as never); + + const result = await getActiveBackupDevices(); + + expect(result).toStrictEqual([]); + }); +}); diff --git a/src/backend/features/device/get-active-backup-devices.ts b/src/backend/features/device/get-active-backup-devices.ts new file mode 100644 index 0000000000..f21bac6b0e --- /dev/null +++ b/src/backend/features/device/get-active-backup-devices.ts @@ -0,0 +1,14 @@ +import { driveServerModule } from '../../../infra/drive-server/drive-server.module'; +import type { Device } from '../backup/types/Device'; +import { logger } from '@internxt/drive-desktop-core/build/backend/core/logger/logger'; + +export async function getActiveBackupDevices(): Promise> { + const response = await driveServerModule.backup.getDevices(); + if (response.isLeft()) { + logger.error({ tag: 'BACKUPS', msg: 'Failed to fetch devices for backup', error: response.getLeft() }); + return []; + } + + const devices = response.getRight(); + return devices.filter(({ removed, hasBackups }) => !removed && hasBackups).map((device) => device); +} diff --git a/src/backend/features/device/getBackupsFromDevice.ts b/src/backend/features/device/getBackupsFromDevice.ts index 1154a008cc..460a3c307f 100644 --- a/src/backend/features/device/getBackupsFromDevice.ts +++ b/src/backend/features/device/getBackupsFromDevice.ts @@ -2,9 +2,10 @@ import { FolderDtoWithPathname } from './device.types'; import { fetchFolder } from '../../../infra/drive-server/services/folder/services/fetch-folder'; import configStore from '../../../apps/main/config'; import { BackupInfo } from './../../../apps/backups/BackupInfo'; -import { Device, findBackupPathnameFromId } from './../../../apps/main/device/service'; +import { Device } from '../backup/types/Device'; import { FolderDto } from '../../../infra/drive-server/out/dto'; import { mapFolderDtoToBackupInfo } from './utils/mapFolderDtoToBackupInfo'; +import { findBackupPathnameFromId } from '../backup/find-backup-pathname-from-id'; export async function getBackupsFromDevice(device: Device, isCurrent?: boolean): Promise> { const { data: folder, error } = await fetchFolder(device.uuid); @@ -16,7 +17,7 @@ export async function getBackupsFromDevice(device: Device, isCurrent?: boolean): const result = folder.children .map((backup: FolderDto) => ({ ...backup, - pathname: findBackupPathnameFromId(backup.id), + pathname: findBackupPathnameFromId({ id: backup.id }), })) .filter((backup): backup is FolderDtoWithPathname => { return !!(backup.pathname && backupsList[backup.pathname]?.enabled); diff --git a/src/backend/features/device/getOrCreateDevice.ts b/src/backend/features/device/getOrCreateDevice.ts index 101b4f69bb..170292fce4 100644 --- a/src/backend/features/device/getOrCreateDevice.ts +++ b/src/backend/features/device/getOrCreateDevice.ts @@ -1,4 +1,4 @@ -import { Device } from '../../../apps/main/device/service'; +import { Device } from '../backup/types/Device'; import configStore from '../../../apps/main/config'; import { logger } from '@internxt/drive-desktop-core/build/backend'; import { addUnknownDeviceIssue } from './addUnknownDeviceIssue'; diff --git a/src/backend/features/device/migrateLegacyDeviceIdentifier.ts b/src/backend/features/device/migrateLegacyDeviceIdentifier.ts index 05854de1db..217966be7b 100644 --- a/src/backend/features/device/migrateLegacyDeviceIdentifier.ts +++ b/src/backend/features/device/migrateLegacyDeviceIdentifier.ts @@ -1,6 +1,6 @@ import { logger } from '@internxt/drive-desktop-core/build/backend'; import { driveServerModule } from './../../../infra/drive-server/drive-server.module'; -import { Device } from './../../../apps/main/device/service'; +import { Device } from '../backup/types/Device'; import { getDeviceIdentifier } from './getDeviceIdentifier'; import configStore from './../../../apps/main/config'; import { BackupError } from '../../../infra/drive-server/services/backup/backup.error'; diff --git a/src/backend/features/device/renameDevice.ts b/src/backend/features/device/renameDevice.ts index 2c05eddcbe..e2e86f3b80 100644 --- a/src/backend/features/device/renameDevice.ts +++ b/src/backend/features/device/renameDevice.ts @@ -1,14 +1,14 @@ -import { Device } from '../../../apps/main/device/service'; +import { Device } from '../backup/types/Device'; import { driveServerModule } from '../../../infra/drive-server/drive-server.module'; import { getDeviceIdentifier } from './getDeviceIdentifier'; export async function renameDevice(deviceName: string): Promise { const deviceIdentifier = getDeviceIdentifier(); - if (deviceIdentifier.isLeft()) { + if (deviceIdentifier.error) { throw new Error('Error in the request to rename a device'); } - const response = await driveServerModule.backup.updateDeviceByIdentifier(deviceIdentifier.getRight().key, deviceName); + const response = await driveServerModule.backup.updateDeviceByIdentifier(deviceIdentifier.data.key, deviceName); if (response.isRight()) { return response.getRight(); } else { diff --git a/src/backend/features/device/saveDeviceToConfig.ts b/src/backend/features/device/saveDeviceToConfig.ts index 50d7899356..da116eea71 100644 --- a/src/backend/features/device/saveDeviceToConfig.ts +++ b/src/backend/features/device/saveDeviceToConfig.ts @@ -1,5 +1,5 @@ import configStore from '../../../apps/main/config'; -import { Device } from '../../../apps/main/device/service'; +import { Device } from '../backup/types/Device'; export function saveDeviceToConfig(device: Device) { configStore.set('deviceId', -1); diff --git a/src/backend/features/device/tryCreateDevice.ts b/src/backend/features/device/tryCreateDevice.ts index 536835048e..bf15078ce5 100644 --- a/src/backend/features/device/tryCreateDevice.ts +++ b/src/backend/features/device/tryCreateDevice.ts @@ -1,4 +1,4 @@ -import { Device } from './../../../apps/main/device/service'; +import { Device } from '../backup/types/Device'; import { left, right } from './../../../context/shared/domain/Either'; import { driveServerModule } from './../../../infra/drive-server/drive-server.module'; import { logger } from '@internxt/drive-desktop-core/build/backend'; diff --git a/src/backend/features/device/utils/deviceMapper.ts b/src/backend/features/device/utils/deviceMapper.ts index 9a1c84c833..09dbb45cd7 100644 --- a/src/backend/features/device/utils/deviceMapper.ts +++ b/src/backend/features/device/utils/deviceMapper.ts @@ -1,5 +1,5 @@ import { components } from '../../../../infra/schemas'; -import { Device } from '../../../../apps/main/device/service'; +import { Device } from '../../backup/types/Device'; /** * Maps a DeviceAsFolder from the API to the internal Device type diff --git a/src/backend/features/fuse/on-read/constants.ts b/src/backend/features/fuse/on-read/constants.ts new file mode 100644 index 0000000000..d0b25712aa --- /dev/null +++ b/src/backend/features/fuse/on-read/constants.ts @@ -0,0 +1 @@ +export const EMPTY = Buffer.alloc(0); diff --git a/src/backend/features/fuse/on-read/download-cache/allocate-file.test.ts b/src/backend/features/fuse/on-read/download-cache/allocate-file.test.ts new file mode 100644 index 0000000000..88bed40bf3 --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/allocate-file.test.ts @@ -0,0 +1,54 @@ +import fs from 'node:fs/promises'; +import { allocateFile } from './allocate-file'; + +vi.mock('node:fs/promises', () => ({ + default: { + open: vi.fn(), + }, +})); + +const fsMock = vi.mocked(fs); + +function createHandle() { + return { + truncate: vi.fn().mockResolvedValue(undefined), + close: vi.fn().mockResolvedValue(undefined), + }; +} + +describe('allocateFile', () => { + it('opens the file for writing and truncates it to the requested size', async () => { + const handle = createHandle(); + fsMock.open.mockResolvedValue(handle as unknown as Awaited>); + + await allocateFile('/tmp/cache-file', 1024); + + expect(fsMock.open).toHaveBeenCalledWith('/tmp/cache-file', 'w'); + expect(handle.truncate).toHaveBeenCalledWith(1024); + }); + + it('closes the file handle after successful allocation', async () => { + const handle = createHandle(); + fsMock.open.mockResolvedValue(handle as unknown as Awaited>); + + await allocateFile('/tmp/cache-file', 1024); + + expect(handle.close).toHaveBeenCalledOnce(); + }); + + it('closes the file handle when truncate fails', async () => { + const handle = createHandle(); + handle.truncate.mockRejectedValue(new Error('truncate failed')); + fsMock.open.mockResolvedValue(handle as unknown as Awaited>); + + await expect(allocateFile('/tmp/cache-file', 1024)).rejects.toThrow('truncate failed'); + + expect(handle.close).toHaveBeenCalledOnce(); + }); + + it('propagates open failures', async () => { + fsMock.open.mockRejectedValue(new Error('open failed')); + + await expect(allocateFile('/tmp/cache-file', 1024)).rejects.toThrow('open failed'); + }); +}); diff --git a/src/backend/features/fuse/on-read/download-cache/allocate-file.ts b/src/backend/features/fuse/on-read/download-cache/allocate-file.ts new file mode 100644 index 0000000000..e8a1c1ae00 --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/allocate-file.ts @@ -0,0 +1,21 @@ +import fs from 'node:fs/promises'; + +/** + * Pre-allocates a file on disk to the full expected size before any ranges are downloaded. + * + * This is necessary for random-access writes: since FUSE reads can arrive in any order, + * we need the file to exist at its full size so we can write each range at its correct + * byte offset. Without pre-allocation, writing at offset 500MB would fail because the + * file doesn't exist yet. + * + * The file is filled with zeros initially, the {@link rangeRegistry} tracks which regions + * contain real downloaded bytes vs unfilled zeros. + */ +export async function allocateFile(filePath: string, size: number): Promise { + const handle = await fs.open(filePath, 'w'); + try { + await handle.truncate(size); + } finally { + await handle.close(); + } +} diff --git a/src/backend/features/fuse/on-read/download-cache/constants.ts b/src/backend/features/fuse/on-read/download-cache/constants.ts new file mode 100644 index 0000000000..1fc635e970 --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/constants.ts @@ -0,0 +1,7 @@ +/** + * 4MB blocks — matches the chunk size used by the legacy downloader, proven to work well + * for this codebase. Each block is downloaded in full on first access regardless of how + * small the FUSE read is, so subsequent reads within the same block are served from disk. + */ +export const BLOCK_SIZE = 4 * 1024 * 1024; +export const BITS_PER_BYTE = 8; diff --git a/src/backend/features/fuse/on-read/download-cache/download-and-save-block.test.ts b/src/backend/features/fuse/on-read/download-cache/download-and-save-block.test.ts new file mode 100644 index 0000000000..f082fda5c4 --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/download-and-save-block.test.ts @@ -0,0 +1,214 @@ +import { type File } from '../../../../../context/virtual-drive/files/domain/File'; +import { downloadFileRange } from '../../../../../infra/environment/download-file/download-file'; +import { writeChunkToDisk } from '../read-chunk-from-disk'; +import { BLOCK_SIZE } from './constants'; +import { downloadAndCacheBlock } from './download-and-save-block'; +import { + clearHydrationState, + getOrCreateHydrationState, + isRangeHydrated, + markBlocksInRangeDownloaded, + type FileHydrationState, +} from './hydration-state'; + +vi.mock('../../../../../infra/environment/download-file/download-file', () => ({ + downloadFileRange: vi.fn(), +})); + +vi.mock('../read-chunk-from-disk', () => ({ + writeChunkToDisk: vi.fn(), +})); + +const downloadFileRangeMock = vi.mocked(downloadFileRange); +const writeChunkToDiskMock = vi.mocked(writeChunkToDisk); + +const virtualFile = { + contentsId: 'contents-id', + name: 'video', + nameWithExtension: 'video.mp4', + type: 'mp4', + uuid: 'uuid', + size: 1024, +} as unknown as File; + +function createState(): FileHydrationState { + return getOrCreateHydrationState(virtualFile.contentsId, virtualFile.size); +} + +function createVirtualFile(overrides: Partial = {}): File { + return { + ...virtualFile, + ...overrides, + } as File; +} + +function createProps(overrides: Partial[0]> = {}) { + return { + bucketId: 'bucket-id', + mnemonic: 'mnemonic', + network: {} as Parameters[0]['network'], + onDownloadProgress: vi.fn(), + virtualFile, + filePath: '/tmp/cache-file', + state: createState(), + blockStart: 100, + blockLength: 50, + ...overrides, + }; +} + +describe('downloadAndCacheBlock', () => { + beforeEach(() => { + clearHydrationState(); + downloadFileRangeMock.mockResolvedValue({ data: Buffer.from('downloaded') }); + writeChunkToDiskMock.mockResolvedValue(undefined); + }); + + it('downloads the requested range and writes it to the cache file offset', async () => { + const props = createProps(); + + await downloadAndCacheBlock(props); + + expect(downloadFileRangeMock).toHaveBeenCalledWith({ + fileId: virtualFile.contentsId, + bucketId: props.bucketId, + mnemonic: props.mnemonic, + network: props.network, + range: { position: props.blockStart, length: props.blockLength }, + signal: props.state.abortController.signal, + }); + expect(writeChunkToDiskMock).toHaveBeenCalledWith('/tmp/cache-file', Buffer.from('downloaded'), 100); + }); + + it('marks the block hydrated only after download and disk write succeed', async () => { + const props = createProps(); + + await downloadAndCacheBlock(props); + + expect(isRangeHydrated(props.state, { position: props.blockStart, length: props.blockLength })).toBe(true); + }); + + it('emits progress from hydrated bytes after the block is written and marked hydrated', async () => { + const onDownloadProgress = vi.fn(); + const hydratedFile = createVirtualFile({ contentsId: 'first-block-file', size: BLOCK_SIZE * 2 }); + const state = getOrCreateHydrationState(hydratedFile.contentsId, hydratedFile.size); + state.stopwatch = { elapsedTime: vi.fn(() => 123) } as unknown as FileHydrationState['stopwatch']; + + await downloadAndCacheBlock( + createProps({ + state, + onDownloadProgress, + virtualFile: hydratedFile, + blockStart: 0, + blockLength: BLOCK_SIZE, + }), + ); + + expect(onDownloadProgress).toHaveBeenCalledWith('video', 'mp4', BLOCK_SIZE, hydratedFile.size, 123); + }); + + it('does not report full progress for a random EOF block when earlier blocks are missing', async () => { + const onDownloadProgress = vi.fn(); + const eofFile = createVirtualFile({ contentsId: 'eof-file', size: BLOCK_SIZE * 3 + 123 }); + const state = getOrCreateHydrationState(eofFile.contentsId, eofFile.size); + + await downloadAndCacheBlock( + createProps({ + state, + onDownloadProgress, + virtualFile: eofFile, + blockStart: BLOCK_SIZE * 3, + blockLength: 123, + }), + ); + + expect(onDownloadProgress).toHaveBeenCalledWith('video', 'mp4', 123, eofFile.size, 0); + }); + + it('counts the final block by its actual length', async () => { + const onDownloadProgress = vi.fn(); + const fileWithPartialFinalBlock = createVirtualFile({ + contentsId: 'partial-final-block-file', + size: BLOCK_SIZE + 123, + }); + const state = getOrCreateHydrationState(fileWithPartialFinalBlock.contentsId, fileWithPartialFinalBlock.size); + + await downloadAndCacheBlock( + createProps({ + state, + onDownloadProgress, + virtualFile: fileWithPartialFinalBlock, + blockStart: BLOCK_SIZE, + blockLength: 123, + }), + ); + + expect(onDownloadProgress).toHaveBeenCalledWith('video', 'mp4', 123, fileWithPartialFinalBlock.size, 0); + }); + + it('reports 100% progress when every block is hydrated', async () => { + const onDownloadProgress = vi.fn(); + const fullyHydratedFile = createVirtualFile({ contentsId: 'fully-hydrated-file', size: BLOCK_SIZE + 123 }); + const state = getOrCreateHydrationState(fullyHydratedFile.contentsId, fullyHydratedFile.size); + markBlocksInRangeDownloaded(state, { position: 0, length: BLOCK_SIZE }); + + await downloadAndCacheBlock( + createProps({ + state, + onDownloadProgress, + virtualFile: fullyHydratedFile, + blockStart: BLOCK_SIZE, + blockLength: 123, + }), + ); + + expect(onDownloadProgress).toHaveBeenCalledWith('video', 'mp4', fullyHydratedFile.size, fullyHydratedFile.size, 0); + }); + + it('does not write, mark hydrated, or emit progress when the range download fails', async () => { + const props = createProps(); + downloadFileRangeMock.mockResolvedValue({ error: new Error('network failed') }); + + await expect(downloadAndCacheBlock(props)).resolves.toStrictEqual({ error: new Error('network failed') }); + + expect(writeChunkToDiskMock).not.toHaveBeenCalled(); + expect(isRangeHydrated(props.state, { position: props.blockStart, length: props.blockLength })).toBe(false); + expect(props.onDownloadProgress).not.toHaveBeenCalled(); + }); + + it('does not mark hydrated or emit progress when the disk write fails', async () => { + const props = createProps(); + writeChunkToDiskMock.mockRejectedValue(new Error('write failed')); + + await expect(downloadAndCacheBlock(props)).resolves.toStrictEqual({ error: new Error('write failed') }); + + expect(isRangeHydrated(props.state, { position: props.blockStart, length: props.blockLength })).toBe(false); + expect(props.onDownloadProgress).not.toHaveBeenCalled(); + }); + + it('does not start a download when hydration is already aborted', async () => { + const props = createProps(); + props.state.abortController.abort(); + + await expect(downloadAndCacheBlock(props)).resolves.toStrictEqual({ data: undefined }); + + expect(downloadFileRangeMock).not.toHaveBeenCalled(); + expect(writeChunkToDiskMock).not.toHaveBeenCalled(); + expect(isRangeHydrated(props.state, { position: props.blockStart, length: props.blockLength })).toBe(false); + expect(props.onDownloadProgress).not.toHaveBeenCalled(); + }); + + it('does not write, mark hydrated, or emit progress when hydration aborts after download', async () => { + const props = createProps(); + downloadFileRangeMock.mockImplementation(async () => { + props.state.abortController.abort(); + return { data: Buffer.from('downloaded') }; + }); + + await expect(downloadAndCacheBlock(props)).resolves.toStrictEqual({ data: undefined }); + + expect(writeChunkToDiskMock).not.toHaveBeenCalled(); + expect(isRangeHydrated(props.state, { position: props.blockStart, length: props.blockLength })).toBe(false); + expect(props.onDownloadProgress).not.toHaveBeenCalled(); + }); +}); diff --git a/src/backend/features/fuse/on-read/download-cache/download-and-save-block.ts b/src/backend/features/fuse/on-read/download-cache/download-and-save-block.ts new file mode 100644 index 0000000000..ccfccc5521 --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/download-and-save-block.ts @@ -0,0 +1,62 @@ +import { type HandleReadDeps } from '../types'; +import { writeChunkToDisk } from '../read-chunk-from-disk'; +import { getHydratedBytes, type FileHydrationState, markBlocksInRangeDownloaded } from './hydration-state'; +import { type File } from '../../../../../context/virtual-drive/files/domain/File'; +import { downloadFileRange } from '../../../../../infra/environment/download-file/download-file'; +import { type Result } from '../../../../../context/shared/domain/Result'; +type Props = { + bucketId: HandleReadDeps['bucketId']; + mnemonic: HandleReadDeps['mnemonic']; + network: HandleReadDeps['network']; + onDownloadProgress: HandleReadDeps['onDownloadProgress']; + virtualFile: File; + filePath: string; + state: FileHydrationState; + blockStart: number; + blockLength: number; +}; + +/** + * Downloads a block range, writes it to disk at the correct offset, and marks it as downloaded. + */ +export async function downloadAndCacheBlock({ + bucketId, + mnemonic, + network, + onDownloadProgress, + virtualFile, + filePath, + state, + blockStart, + blockLength, +}: Props): Promise> { + if (isAborted(state)) return { data: undefined }; + + try { + const download = await downloadFileRange({ + fileId: virtualFile.contentsId, + bucketId, + mnemonic, + network, + range: { position: blockStart, length: blockLength }, + signal: state.abortController.signal, + }); + if (isAborted(state)) return { data: undefined }; + if (download.error) return { error: download.error }; + + await writeChunkToDisk(filePath, download.data, blockStart); + if (isAborted(state)) return { data: undefined }; + + markBlocksInRangeDownloaded(state, { position: blockStart, length: blockLength }); + const elapsedTime = state.stopwatch?.elapsedTime() ?? 0; + onDownloadProgress(virtualFile.name, virtualFile.type, getHydratedBytes(state), virtualFile.size, elapsedTime); + return { data: undefined }; + } catch (error) { + if (isAborted(state)) return { data: undefined }; + return { error: error instanceof Error ? error : new Error('Unknown error occurred') }; + } +} + +function isAborted(state: FileHydrationState): boolean { + return state.abortController.signal.aborted; +} diff --git a/src/backend/features/fuse/on-read/download-cache/expand-to-block-boundaries.test.ts b/src/backend/features/fuse/on-read/download-cache/expand-to-block-boundaries.test.ts new file mode 100644 index 0000000000..8c7e5f9600 --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/expand-to-block-boundaries.test.ts @@ -0,0 +1,43 @@ +import { BLOCK_SIZE } from './constants'; +import { expandToBlockBoundaries } from './expand-to-block-boundaries'; + +describe('expandToBlockBoundaries', () => { + it('expands a small read inside the first block to the full first block', () => { + const result = expandToBlockBoundaries({ + range: { position: 100, length: 4096 }, + fileSize: BLOCK_SIZE * 3, + }); + + expect(result).toStrictEqual({ blockStart: 0, blockLength: BLOCK_SIZE }); + }); + + it('starts at the containing block boundary for reads after the first block', () => { + const result = expandToBlockBoundaries({ + range: { position: BLOCK_SIZE + 100, length: 4096 }, + fileSize: BLOCK_SIZE * 3, + }); + + expect(result).toStrictEqual({ blockStart: BLOCK_SIZE, blockLength: BLOCK_SIZE }); + }); + + it('expands reads crossing a block boundary to cover every touched block', () => { + const result = expandToBlockBoundaries({ + range: { position: BLOCK_SIZE - 100, length: 200 }, + fileSize: BLOCK_SIZE * 3, + }); + + expect(result).toStrictEqual({ blockStart: 0, blockLength: BLOCK_SIZE * 2 }); + }); + + it('expands a read inside a partial last block to that whole partial block', () => { + const partialLastBlockLength = 500; + const fileSize = BLOCK_SIZE + partialLastBlockLength; + + const result = expandToBlockBoundaries({ + range: { position: BLOCK_SIZE + 100, length: 100 }, + fileSize, + }); + + expect(result).toStrictEqual({ blockStart: BLOCK_SIZE, blockLength: partialLastBlockLength }); + }); +}); diff --git a/src/backend/features/fuse/on-read/download-cache/expand-to-block-boundaries.ts b/src/backend/features/fuse/on-read/download-cache/expand-to-block-boundaries.ts new file mode 100644 index 0000000000..ebfc009bd5 --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/expand-to-block-boundaries.ts @@ -0,0 +1,17 @@ +import { ReadRange } from '../types'; +import { BLOCK_SIZE } from './constants'; + +/** + * Given a position and length, rounds up to 4MB block boundaries so that every + * request downloads complete blocks. Ensuring correct bitmap tracking, prefetching, + * and preventing double downloads. + */ +export function expandToBlockBoundaries({ range, fileSize }: { range: ReadRange; fileSize: number }): { + blockStart: number; + blockLength: number; +} { + const blockStart = Math.floor(range.position / BLOCK_SIZE) * BLOCK_SIZE; + const end = range.position + range.length; + const blockEnd = Math.min(Math.ceil(end / BLOCK_SIZE) * BLOCK_SIZE, fileSize); + return { blockStart, blockLength: blockEnd - blockStart }; +} diff --git a/src/backend/features/fuse/on-read/download-cache/file-exists-on-disk.test.ts b/src/backend/features/fuse/on-read/download-cache/file-exists-on-disk.test.ts new file mode 100644 index 0000000000..2b5318f6f1 --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/file-exists-on-disk.test.ts @@ -0,0 +1,26 @@ +import fs from 'node:fs/promises'; +import { fileExistsOnDisk } from './file-exists-on-disk'; + +vi.mock('node:fs/promises', () => ({ + default: { + stat: vi.fn(), + }, +})); + +const fsMock = vi.mocked(fs); + +describe('fileExistsOnDisk', () => { + it('returns true when fs.stat succeeds', async () => { + fsMock.stat.mockResolvedValue({} as Awaited>); + + await expect(fileExistsOnDisk('/tmp/cache-file')).resolves.toBe(true); + + expect(fsMock.stat).toHaveBeenCalledWith('/tmp/cache-file'); + }); + + it('returns false when fs.stat rejects', async () => { + fsMock.stat.mockRejectedValue(new Error('missing')); + + await expect(fileExistsOnDisk('/tmp/cache-file')).resolves.toBe(false); + }); +}); diff --git a/src/backend/features/fuse/on-read/download-cache/file-exists-on-disk.ts b/src/backend/features/fuse/on-read/download-cache/file-exists-on-disk.ts new file mode 100644 index 0000000000..a8237fbd3a --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/file-exists-on-disk.ts @@ -0,0 +1,7 @@ +import fs from 'node:fs/promises'; +export async function fileExistsOnDisk(filePath: string): Promise { + return fs + .stat(filePath) + .then(() => true) + .catch(() => false); +} diff --git a/src/backend/features/fuse/on-read/download-cache/hydration-state.test.ts b/src/backend/features/fuse/on-read/download-cache/hydration-state.test.ts new file mode 100644 index 0000000000..255461e2ba --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/hydration-state.test.ts @@ -0,0 +1,233 @@ +import { + abortAllHydrations, + abortHydrationState, + clearHydrationState, + ensureAllocatedOnce, + finalizeIfNeeded, + getExistingHydrationState, + getHydratedBytes, + getOrCreateHydrationState, + isFileHydrated, + markBlocksInRangeDownloaded, + markFinalized, +} from './hydration-state'; +import { allocateFile } from './allocate-file'; +import { BLOCK_SIZE } from './constants'; + +vi.mock('./allocate-file', () => ({ + allocateFile: vi.fn(), +})); + +const allocateFileMock = vi.mocked(allocateFile); + +describe('hydration-state lifecycle', () => { + beforeEach(() => { + clearHydrationState(); + }); + + it('reads an existing state without creating a new one', () => { + const created = getOrCreateHydrationState('contents-id', 1024); + + const existing = getExistingHydrationState('contents-id'); + + expect(existing).toBe(created); + }); + + it('does not create state when reading a missing contents id', () => { + const missing = getExistingHydrationState('missing'); + + expect(missing).toBeUndefined(); + expect(getExistingHydrationState('missing')).toBeUndefined(); + }); + + it('creates state once per contents id', () => { + const first = getOrCreateHydrationState('contents-id', 1024); + const second = getOrCreateHydrationState('contents-id', 2048); + + expect(second).toBe(first); + }); + + it('creates new states with a fresh AbortController and unfinished finalization state', () => { + const first = getOrCreateHydrationState('first', 1024); + const second = getOrCreateHydrationState('second', 1024); + + expect(first.abortController).toBeInstanceOf(AbortController); + expect(second.abortController).toBeInstanceOf(AbortController); + expect(first.abortController).not.toBe(second.abortController); + expect(first.fileSize).toBe(1024); + expect(first.hydratedBytes).toBe(0); + expect(first.finalized).toBe(false); + expect(first.finalization).toBeUndefined(); + }); + + it('aborts one hydration state without aborting another', () => { + const first = getOrCreateHydrationState('first', 1024); + const second = getOrCreateHydrationState('second', 1024); + + abortHydrationState(first); + + expect(first.abortController.signal.aborted).toBe(true); + expect(second.abortController.signal.aborted).toBe(false); + }); + + it('aborts every hydration state', () => { + const first = getOrCreateHydrationState('first', 1024); + const second = getOrCreateHydrationState('second', 1024); + + expect(first.abortController.signal.aborted).toBe(false); + expect(second.abortController.signal.aborted).toBe(false); + abortAllHydrations(); + + expect(first.abortController.signal.aborted).toBe(true); + expect(second.abortController.signal.aborted).toBe(true); + }); + + it('reuses the in-flight repository registration for concurrent finalization attempts', () => { + const state = getOrCreateHydrationState('contents-id', 1024); + const promise = new Promise(() => undefined); + + const first = finalizeIfNeeded(state, () => promise); + const second = finalizeIfNeeded(state, () => Promise.resolve()); + + expect(second).toBe(first); + expect(state.finalization).toBe(first); + expect(state.finalized).toBe(false); + }); + + it('allows failed finalization to be retried', async () => { + const state = getOrCreateHydrationState('contents-id', 1024); + + await expect(finalizeIfNeeded(state, () => Promise.reject(new Error('register failed')))).rejects.toThrow( + 'register failed', + ); + + expect(state.finalization).toBeUndefined(); + expect(state.finalized).toBe(false); + + await finalizeIfNeeded(state, () => Promise.resolve()); + + expect(state.finalized).toBe(true); + }); + + it('marks successful finalization as finalized', async () => { + const state = getOrCreateHydrationState('contents-id', 1024); + + await finalizeIfNeeded(state, () => Promise.resolve()); + + expect(state.finalized).toBe(true); + expect(state.finalization).toBeUndefined(); + }); + + it('can mark a state as finalized directly', () => { + const state = getOrCreateHydrationState('contents-id', 1024); + + markFinalized(state); + + expect(state.finalized).toBe(true); + }); + + it('reports hydrated bytes from completed blocks only', () => { + const fileSize = BLOCK_SIZE * 2 + 123; + const state = getOrCreateHydrationState('contents-id', fileSize); + + markBlocksInRangeDownloaded(state, { position: 0, length: BLOCK_SIZE }); + + expect(getHydratedBytes(state)).toBe(BLOCK_SIZE); + }); + + it('counts the final block by its actual byte length', () => { + const fileSize = BLOCK_SIZE * 2 + 123; + const state = getOrCreateHydrationState('contents-id', fileSize); + + markBlocksInRangeDownloaded(state, { position: BLOCK_SIZE * 2, length: 123 }); + + expect(getHydratedBytes(state)).toBe(123); + }); + + it('reports full file size when every block is hydrated', () => { + const fileSize = BLOCK_SIZE + 123; + const state = getOrCreateHydrationState('contents-id', fileSize); + + markBlocksInRangeDownloaded(state, { position: 0, length: BLOCK_SIZE }); + markBlocksInRangeDownloaded(state, { position: BLOCK_SIZE, length: 123 }); + + expect(getHydratedBytes(state)).toBe(fileSize); + }); + + it('counts hydrated bytes only once when the same block is marked again', () => { + const fileSize = BLOCK_SIZE + 123; + const state = getOrCreateHydrationState('contents-id', fileSize); + + markBlocksInRangeDownloaded(state, { position: 0, length: BLOCK_SIZE }); + markBlocksInRangeDownloaded(state, { position: 10, length: 10 }); + + expect(getHydratedBytes(state)).toBe(BLOCK_SIZE); + }); + + it('treats an empty file as fully hydrated without marking any blocks', () => { + const state = getOrCreateHydrationState('empty-contents-id', 0); + + expect(isFileHydrated(state)).toBe(true); + expect(getHydratedBytes(state)).toBe(0); + }); + + describe('file allocation', () => { + it('allocates a file only once for concurrent callers', async () => { + const state = getOrCreateHydrationState('contents-id', 1024); + let resolveAllocation: () => void = () => undefined; + allocateFileMock.mockReturnValue( + new Promise((resolve) => { + resolveAllocation = resolve; + }), + ); + + const first = ensureAllocatedOnce(state, '/tmp/cache-file', 1024); + const second = ensureAllocatedOnce(state, '/tmp/cache-file', 1024); + + expect(first).toBe(second); + expect(allocateFileMock).toHaveBeenCalledOnce(); + expect(allocateFileMock).toHaveBeenCalledWith('/tmp/cache-file', 1024); + + resolveAllocation(); + await expect(first).resolves.toStrictEqual({ data: undefined }); + await expect(second).resolves.toStrictEqual({ data: undefined }); + }); + + it('keeps successful allocation in state so later callers reuse it', async () => { + const state = getOrCreateHydrationState('contents-id', 1024); + allocateFileMock.mockResolvedValue(undefined); + + const first = ensureAllocatedOnce(state, '/tmp/cache-file', 1024); + await expect(first).resolves.toStrictEqual({ data: undefined }); + const second = ensureAllocatedOnce(state, '/tmp/cache-file', 1024); + + expect(second).toBe(first); + expect(allocateFileMock).toHaveBeenCalledOnce(); + }); + + it('allows failed allocation to be retried', async () => { + const state = getOrCreateHydrationState('contents-id', 1024); + allocateFileMock.mockRejectedValueOnce(new Error('allocation failed')).mockResolvedValueOnce(undefined); + + await expect(ensureAllocatedOnce(state, '/tmp/cache-file', 1024)).resolves.toStrictEqual({ + error: new Error('allocation failed'), + }); + + expect(state.allocation).toBeUndefined(); + + await expect(ensureAllocatedOnce(state, '/tmp/cache-file', 1024)).resolves.toStrictEqual({ data: undefined }); + + expect(allocateFileMock).toHaveBeenCalledTimes(2); + }); + + it('starts the state stopwatch when allocation begins', async () => { + const state = getOrCreateHydrationState('contents-id', 1024); + allocateFileMock.mockResolvedValue(undefined); + + await expect(ensureAllocatedOnce(state, '/tmp/cache-file', 1024)).resolves.toStrictEqual({ data: undefined }); + + expect(state.stopwatch).toBeDefined(); + expect(state.stopwatch?.elapsedTime()).not.toBe(-1); + }); + }); +}); diff --git a/src/backend/features/fuse/on-read/download-cache/hydration-state.ts b/src/backend/features/fuse/on-read/download-cache/hydration-state.ts new file mode 100644 index 0000000000..51dc4e9281 --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/hydration-state.ts @@ -0,0 +1,245 @@ +import { BITS_PER_BYTE, BLOCK_SIZE } from './constants'; +import { allocateFile } from './allocate-file'; +import { Stopwatch } from '../../../../../apps/shared/types/Stopwatch'; +import { type Result } from '../../../../../context/shared/domain/Result'; +import { ReadRange } from '../types'; + +/** + * Tracks which byte ranges of a file have been downloaded and written to disk. + * + * Uses a bitmap where each bit represents one 4MB block of the file. + * A set bit means that block has been FULLY downloaded and written to disk. + * An unset bit means that block contains pre-allocation zeros — not real data. + * + * This is necessary because files are pre-allocated to their full size before any + * data is downloaded, making it impossible to distinguish real bytes from zeros + * by inspecting the file alone. + * + * A block is only marked after its full write to disk succeeds — never partially. + * A hard kill mid-write is handled by wiping the download cache on startup. + * + * Concurrent reads for the same block share the in-flight block download promise + * instead of starting duplicate downloads. + */ + +export type FileHydrationState = { + bitmap: Buffer; + fileSize: number; + totalBlocks: number; + hydratedBytes: number; + blocksBeingDownloaded: Map>>; + allocation?: Promise>; + stopwatch?: Stopwatch; + finalized: boolean; + finalization?: Promise; + abortController: AbortController; +}; + +const hydrationState = new Map(); + +export function getExistingHydrationState(contentsId: string): FileHydrationState | undefined { + return hydrationState.get(contentsId); +} + +export function getOrCreateHydrationState(contentsId: string, fileSize: number): FileHydrationState { + const existing = getExistingHydrationState(contentsId); + if (existing) return existing; + + const totalBlocks = Math.ceil(fileSize / BLOCK_SIZE); + const size = Math.ceil(totalBlocks / BITS_PER_BYTE); + const state: FileHydrationState = { + bitmap: Buffer.alloc(size, 0), + fileSize, + totalBlocks, + hydratedBytes: 0, + blocksBeingDownloaded: new Map(), + finalized: false, + abortController: new AbortController(), + }; + hydrationState.set(contentsId, state); + return state; +} + +export function ensureAllocatedOnce( + state: FileHydrationState, + filePath: string, + fileSize: number, +): Promise> { + if (state.allocation) return state.allocation; + + state.stopwatch = new Stopwatch(); + state.stopwatch.start(); + + const allocation = allocateFile(filePath, fileSize).then( + (): Result => ({ data: undefined }), + (error): Result => { + if (state.allocation === allocation) { + state.allocation = undefined; + state.stopwatch = undefined; + } + return { error: error instanceof Error ? error : new Error('Unknown error occurred') }; + }, + ); + + state.allocation = allocation; + return allocation; +} + +function blockIndexForByte(byte: number): number { + return Math.floor(byte / BLOCK_SIZE); +} + +/** + * Creates a bitmask: a number where exactly ONE bit is turned on. + * + * Think of a byte as 8 switches: + * [bit7][bit6][bit5][bit4][bit3][bit2][bit1][bit0] + * + * The mask selects exactly one of those switches. + * + * Examples: + * bitIndexInByte = 0 is 0b00000001 (selects bit 0) + * bitIndexInByte = 2 is 0b00000100 (selects bit 2) + * bitIndexInByte = 7 is 0b10000000 (selects bit 7) + * + * Why we need this: + * - AND (&) with the mask → checks if that bit is set + * - OR (|) with the mask → sets that bit + * + * Implementation: + * Start with 1 (0b00000001) and shift it left N times. + */ +function bitMask(bitIndexInByte: number): number { + return 1 << bitIndexInByte; +} + +function getBit(bitmap: Buffer, blockIndex: number): boolean { + const byteIndex = Math.floor(blockIndex / BITS_PER_BYTE); + const bitIndexInByte = blockIndex % BITS_PER_BYTE; + return (bitmap[byteIndex] & bitMask(bitIndexInByte)) !== 0; +} + +function setBit(bitmap: Buffer, blockIndex: number): void { + const byteIndex = Math.floor(blockIndex / BITS_PER_BYTE); + const bitIndexInByte = blockIndex % BITS_PER_BYTE; + bitmap[byteIndex] = bitmap[byteIndex] | bitMask(bitIndexInByte); +} + +export function isFileHydrated(state: FileHydrationState): boolean { + return state.hydratedBytes === state.fileSize; +} + +export function getHydratedBytes(state: FileHydrationState): number { + return state.hydratedBytes; +} + +function blocksWithinRange({ position, length }: ReadRange): Array { + const first = blockIndexForByte(position); + const last = blockIndexForByte(position + length - 1); + const blocks: number[] = []; + for (let block = first; block <= last; block++) { + blocks.push(block); + } + return blocks; +} + +export function isRangeHydrated(state: FileHydrationState, { position, length }: ReadRange): boolean { + return blocksWithinRange({ position, length }).every((block) => getBit(state.bitmap, block)); +} + +export function markBlocksInRangeDownloaded(state: FileHydrationState, { position, length }: ReadRange): void { + for (const block of blocksWithinRange({ position, length })) { + if (!getBit(state.bitmap, block)) { + setBit(state.bitmap, block); + state.hydratedBytes += blockByteLength(state, block); + } + } +} + +function blockByteLength(state: FileHydrationState, block: number): number { + const blockStart = block * BLOCK_SIZE; + return Math.min(BLOCK_SIZE, state.fileSize - blockStart); +} + +/** + * Returns block indices within the range that are neither hydrated nor already downloading. + * Call after waiting for existing in-flight blocks to identify the remaining work. + */ +export function getMissingBlocks(state: FileHydrationState, { position, length }: ReadRange): number[] { + return blocksWithinRange({ position, length }).filter( + (block) => !getBit(state.bitmap, block) && !state.blocksBeingDownloaded.has(block), + ); +} + +export function getBlocksBeingDownloaded( + state: FileHydrationState, + { position, length }: ReadRange, +): Map>> { + const blocksBeingDownloadedWithinRange = new Map>>(); + for (const block of blocksWithinRange({ position, length })) { + const existing = state.blocksBeingDownloaded.get(block); + if (existing) blocksBeingDownloadedWithinRange.set(block, existing); + } + return blocksBeingDownloadedWithinRange; +} + +export function setBlockDownloadInFlight( + state: FileHydrationState, + block: number, + promise: Promise>, +): void { + state.blocksBeingDownloaded.set(block, promise); +} + +export function clearBlockDownloadInFlight( + state: FileHydrationState, + block: number, + promise: Promise>, +): void { + if (state.blocksBeingDownloaded.get(block) === promise) { + state.blocksBeingDownloaded.delete(block); + } +} + +export function finalizeIfNeeded(state: FileHydrationState, finalize: () => Promise): Promise { + if (state.finalized) return Promise.resolve(); + if (state.finalization) return state.finalization; + + const finalization = Promise.resolve() + .then(finalize) + .then(() => { + markFinalized(state); + }) + .finally(() => { + if (state.finalization === finalization) { + state.finalization = undefined; + } + }); + state.finalization = finalization; + return finalization; +} + +export function markFinalized(state: FileHydrationState): void { + state.finalized = true; +} + +export function abortHydrationState(state: FileHydrationState): void { + state.abortController.abort(); +} + +export function abortAllHydrations(): void { + for (const state of hydrationState.values()) { + abortHydrationState(state); + } +} + +/** + * Removes the bitmap for a file — call when the file is deleted or cache is cleared. + */ +export function deleteHydrationState(contentsId: string): void { + hydrationState.delete(contentsId); +} + +export function clearHydrationState(): void { + hydrationState.clear(); +} diff --git a/src/backend/features/fuse/on-read/download-cache/read-if-hydrated.test.ts b/src/backend/features/fuse/on-read/download-cache/read-if-hydrated.test.ts new file mode 100644 index 0000000000..033b9d6639 --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/read-if-hydrated.test.ts @@ -0,0 +1,53 @@ +import { readIfHydrated } from './read-if-hydrated'; +import { + clearHydrationState, + getExistingHydrationState, + getOrCreateHydrationState, + markBlocksInRangeDownloaded, +} from './hydration-state'; +import { readChunkFromDisk } from '../read-chunk-from-disk'; + +vi.mock('../read-chunk-from-disk', () => ({ + readChunkFromDisk: vi.fn(), +})); + +const readChunkFromDiskMock = vi.mocked(readChunkFromDisk); + +describe('readIfHydrated', () => { + beforeEach(() => { + clearHydrationState(); + }); + + it('returns undefined when no hydration state exists', async () => { + const result = await readIfHydrated('/tmp/cache-file', 'contents-id', { position: 0, length: 10 }); + + expect(result).toBeUndefined(); + }); + + it('does not create hydration state when no hydration state exists', async () => { + await readIfHydrated('/tmp/cache-file', 'contents-id', { position: 0, length: 10 }); + + expect(getExistingHydrationState('contents-id')).toBeUndefined(); + }); + + it('returns undefined when the requested range is not hydrated', async () => { + getOrCreateHydrationState('contents-id', 1024); + + const result = await readIfHydrated('/tmp/cache-file', 'contents-id', { position: 0, length: 10 }); + + expect(result).toBeUndefined(); + expect(readChunkFromDiskMock).not.toHaveBeenCalled(); + }); + + it('reads bytes from disk when the requested range is hydrated', async () => { + const chunk = Buffer.from('cached'); + const state = getOrCreateHydrationState('contents-id', 1024); + markBlocksInRangeDownloaded(state, { position: 0, length: 10 }); + readChunkFromDiskMock.mockResolvedValue(chunk); + + const result = await readIfHydrated('/tmp/cache-file', 'contents-id', { position: 0, length: 10 }); + + expect(result).toBe(chunk); + expect(readChunkFromDiskMock).toHaveBeenCalledWith('/tmp/cache-file', 10, 0); + }); +}); diff --git a/src/backend/features/fuse/on-read/download-cache/read-if-hydrated.ts b/src/backend/features/fuse/on-read/download-cache/read-if-hydrated.ts new file mode 100644 index 0000000000..a204506a80 --- /dev/null +++ b/src/backend/features/fuse/on-read/download-cache/read-if-hydrated.ts @@ -0,0 +1,15 @@ +import { readChunkFromDisk } from '../read-chunk-from-disk'; +import { getExistingHydrationState, isRangeHydrated } from './hydration-state'; + +type Range = { + position: number; + length: number; +}; + +export async function readIfHydrated(filePath: string, contentsId: string, range: Range): Promise { + const state = getExistingHydrationState(contentsId); + if (!state) return undefined; + if (!isRangeHydrated(state, range)) return undefined; + + return readChunkFromDisk(filePath, range.length, range.position); +} diff --git a/src/backend/features/fuse/on-read/handle-read-callback.test.ts b/src/backend/features/fuse/on-read/handle-read-callback.test.ts index 5b9ad9b08a..7766e4c2c3 100644 --- a/src/backend/features/fuse/on-read/handle-read-callback.test.ts +++ b/src/backend/features/fuse/on-read/handle-read-callback.test.ts @@ -1,18 +1,26 @@ -import { PassThrough } from 'node:stream'; -import { handleReadCallback, type HandleReadCallbackDeps } from './handle-read-callback'; +import { handleReadCallback, type HandleReadCallbackProps } from './handle-read-callback'; import * as readChunkModule from './read-chunk-from-disk'; -import * as createDownloadModule from './create-download-to-disk'; -import * as hydrationRegistryModule from './hydration-registry'; -import * as openFlagsTrackerModule from '../on-open/open-flags-tracker'; +import * as processBlocklistModule from '../../../features/virtual-drive/utils/process-blocklist'; +import * as fileExistsModule from './download-cache/file-exists-on-disk'; +import * as allocateFileModule from './download-cache/allocate-file'; +import * as downloadAndSaveBlockModule from './download-cache/download-and-save-block'; +import * as downloadFileModule from '../../../../infra/environment/download-file/download-file'; +import { + clearHydrationState, + getExistingHydrationState, + getOrCreateHydrationState, + markBlocksInRangeDownloaded, +} from './download-cache/hydration-state'; import { partialSpyOn, call } from '../../../../../tests/vitest/utils.helper'; import { type File } from '../../../../context/virtual-drive/files/domain/File'; -import { FuseNoSuchFileOrDirectoryError } from '../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseIOError, FuseNoSuchFileOrDirectoryError } from '../../../../apps/drive/fuse/callbacks/FuseErrors'; const readChunkFromDiskMock = partialSpyOn(readChunkModule, 'readChunkFromDisk'); -const createDownloadToDiskMock = partialSpyOn(createDownloadModule, 'createDownloadToDisk'); -const getHydrationMock = partialSpyOn(hydrationRegistryModule, 'getHydration'); -const setHydrationMock = partialSpyOn(hydrationRegistryModule, 'setHydration'); -const shouldDownloadMock = partialSpyOn(openFlagsTrackerModule, 'shouldDownload'); +const isBlocklistedProcessMock = partialSpyOn(processBlocklistModule, 'isBlocklistedProcess'); +const fileExistsOnDiskMock = partialSpyOn(fileExistsModule, 'fileExistsOnDisk'); +const allocateFileMock = partialSpyOn(allocateFileModule, 'allocateFile'); +const downloadAndCacheBlockMock = partialSpyOn(downloadAndSaveBlockModule, 'downloadAndCacheBlock'); +const downloadFileRangeMock = partialSpyOn(downloadFileModule, 'downloadFileRange'); const virtualFile = { contentsId: 'contents-123', @@ -23,32 +31,31 @@ const virtualFile = { size: 1000, } as unknown as File; -function createDeps(overrides: Partial = {}): HandleReadCallbackDeps { +function createDeps(overrides: Partial = {}): HandleReadCallbackProps { return { findVirtualFile: vi.fn().mockResolvedValue(virtualFile), findTemporalFile: vi.fn().mockResolvedValue(undefined), - existsOnDisk: vi.fn().mockResolvedValue(false), - startDownload: vi.fn().mockResolvedValue({ stream: new PassThrough(), elapsedTime: () => 0 }), onDownloadProgress: vi.fn(), saveToRepository: vi.fn().mockResolvedValue(undefined), + bucketId: 'bucket-id', + mnemonic: 'mnemonic', + network: {} as HandleReadCallbackProps['network'], + path: '/file.mp4', + range: { position: 0, length: 10 }, + processName: 'vlc', ...overrides, }; } -function createWriterMock(bytesAvailable = 0) { - return { - waitForBytes: vi.fn().mockResolvedValue(undefined), - getBytesAvailable: vi.fn().mockReturnValue(bytesAvailable), - destroy: vi.fn().mockResolvedValue(undefined), - }; -} - describe('handleReadCallback', () => { beforeEach(() => { - shouldDownloadMock.mockReturnValue(true); - getHydrationMock.mockReturnValue(undefined); + clearHydrationState(); + vi.clearAllMocks(); + isBlocklistedProcessMock.mockReturnValue(false); + fileExistsOnDiskMock.mockResolvedValue(true); + allocateFileMock.mockResolvedValue(undefined); + downloadAndCacheBlockMock.mockResolvedValue({ data: undefined }); readChunkFromDiskMock.mockResolvedValue(Buffer.from('data')); - createDownloadToDiskMock.mockReturnValue(createWriterMock()); }); describe('when virtual file is not found', () => { @@ -58,10 +65,9 @@ describe('handleReadCallback', () => { findTemporalFile: vi.fn().mockResolvedValue(undefined), }); - const result = await handleReadCallback(deps, '/file.txt', 10, 0); + const result = await handleReadCallback({ ...deps, path: '/file.txt' }); - expect(result.isLeft()).toBe(true); - expect(result.getLeft()).toBeInstanceOf(FuseNoSuchFileOrDirectoryError); + expect(result.error).toBeInstanceOf(FuseNoSuchFileOrDirectoryError); }); it('should read from temporal file when virtual file is not found but temporal exists', async () => { @@ -75,10 +81,9 @@ describe('handleReadCallback', () => { }), }); - const result = await handleReadCallback(deps, '/file.txt', 13, 0); + const result = await handleReadCallback({ ...deps, path: '/file.txt', range: { position: 0, length: 13 } }); - expect(result.isRight()).toBe(true); - expect(result.getRight()).toBe(chunk); + expect(result.data).toBe(chunk); call(readChunkFromDiskMock).toStrictEqual(['/tmp/internxt-drive-tmp/uuid', 13, 0]); }); @@ -88,84 +93,156 @@ describe('handleReadCallback', () => { findTemporalFile: vi.fn().mockResolvedValue({ path: { value: '/virtual/file.txt' } }), }); - const result = await handleReadCallback(deps, '/file.txt', 10, 0); + const result = await handleReadCallback({ ...deps, path: '/file.txt' }); - expect(result.isLeft()).toBe(true); - expect(result.getLeft()).toBeInstanceOf(FuseNoSuchFileOrDirectoryError); + expect(result.error).toBeInstanceOf(FuseNoSuchFileOrDirectoryError); }); }); - describe('when shouldDownload returns false', () => { - it('should return empty buffer', async () => { - shouldDownloadMock.mockReturnValue(false); - const deps = createDeps(); + describe.skip('when process is blocklisted', () => { + it('should return empty buffer without side effects when the requested range is not cached', async () => { + isBlocklistedProcessMock.mockReturnValue(true); + fileExistsOnDiskMock.mockResolvedValue(false); + const deps = createDeps({ processName: 'pool-org.gnome.' }); + + const result = await handleReadCallback(deps); + + expect(result.data).toHaveLength(0); + expect(getExistingHydrationState(virtualFile.contentsId)).toBeUndefined(); + expect(fileExistsOnDiskMock).not.toHaveBeenCalled(); + expect(allocateFileMock).not.toHaveBeenCalled(); + expect(downloadAndCacheBlockMock).not.toHaveBeenCalled(); + expect(deps.onDownloadProgress).not.toHaveBeenCalled(); + expect(deps.saveToRepository).not.toHaveBeenCalled(); + expect(readChunkFromDiskMock).not.toHaveBeenCalled(); + }); - const result = await handleReadCallback(deps, '/file.mp4', 10, 0); + it('should return empty buffer when hydration state exists but the requested range is not cached', async () => { + isBlocklistedProcessMock.mockReturnValue(true); + getOrCreateHydrationState(virtualFile.contentsId, virtualFile.size); + const deps = createDeps({ processName: 'pool-org.gnome.' }); - expect(result.isRight()).toBe(true); - expect(result.getRight()).toHaveLength(0); + const result = await handleReadCallback(deps); + + expect(result.data).toHaveLength(0); + expect(allocateFileMock).not.toHaveBeenCalled(); + expect(readChunkFromDiskMock).not.toHaveBeenCalled(); + expect(downloadAndCacheBlockMock).not.toHaveBeenCalled(); + expect(deps.onDownloadProgress).not.toHaveBeenCalled(); + expect(deps.saveToRepository).not.toHaveBeenCalled(); + }); + + it('should return requested bytes when the range is already cached', async () => { + isBlocklistedProcessMock.mockReturnValue(true); + const state = getOrCreateHydrationState(virtualFile.contentsId, virtualFile.size); + markBlocksInRangeDownloaded(state, { position: 0, length: 10 }); + const cached = Buffer.from('cached'); + readChunkFromDiskMock.mockResolvedValue(cached); + const deps = createDeps({ processName: 'pool-org.gnome.' }); + + const result = await handleReadCallback(deps); + + expect(result.data).toBe(cached); + expect(fileExistsOnDiskMock).not.toHaveBeenCalled(); + expect(allocateFileMock).not.toHaveBeenCalled(); + expect(downloadAndCacheBlockMock).not.toHaveBeenCalled(); + expect(deps.onDownloadProgress).not.toHaveBeenCalled(); + expect(deps.saveToRepository).not.toHaveBeenCalled(); + expect(readChunkFromDiskMock).toHaveBeenCalledWith(expect.stringContaining(virtualFile.contentsId), 10, 0); }); }); - describe('when file already exists on disk', () => { - it('should read chunk directly from disk', async () => { - const chunk = Buffer.from('cached'); - readChunkFromDiskMock.mockResolvedValue(chunk); - const deps = createDeps({ - existsOnDisk: vi.fn().mockResolvedValue(true), + describe('when process is a thumbnail generator', () => { + it('should download the exact requested range without block expansion', async () => { + const chunk = Buffer.from('image-header'); + downloadFileRangeMock.mockResolvedValue({ data: chunk }); + const deps = createDeps({ processName: 'pool-org.gnome.', range: { position: 0, length: 32768 } }); + + const result = await handleReadCallback(deps); + + expect(result.data).toBe(chunk); + call(downloadFileRangeMock).toMatchObject({ + fileId: virtualFile.contentsId, + bucketId: deps.bucketId, + mnemonic: deps.mnemonic, + range: { position: 0, length: 32768 }, }); + }); + + it('should not allocate a cache file or download blocks', async () => { + downloadFileRangeMock.mockResolvedValue({ data: Buffer.from('bytes') }); + const deps = createDeps({ processName: 'pool-org.gnome.' }); - const result = await handleReadCallback(deps, '/file.mp4', 6, 100); + await handleReadCallback(deps); - expect(result.isRight()).toBe(true); - expect(result.getRight()).toBe(chunk); - expect(deps.startDownload).not.toHaveBeenCalled(); + expect(fileExistsOnDiskMock).not.toHaveBeenCalled(); + expect(allocateFileMock).not.toHaveBeenCalled(); + expect(downloadAndCacheBlockMock).not.toHaveBeenCalled(); }); - }); - describe('when file needs to be downloaded', () => { - it('should start a new hydration when none exists', async () => { - const writer = createWriterMock(); - createDownloadToDiskMock.mockReturnValue(writer); - const deps = createDeps(); + it('should not emit download progress or register the file', async () => { + downloadFileRangeMock.mockResolvedValue({ data: Buffer.from('bytes') }); + const deps = createDeps({ processName: 'pool-org.gnome.' }); + + await handleReadCallback(deps); + + expect(deps.onDownloadProgress).not.toHaveBeenCalled(); + expect(deps.saveToRepository).not.toHaveBeenCalled(); + }); + + it('should return EIO when the ranged download fails', async () => { + downloadFileRangeMock.mockResolvedValue({ error: new Error('network error') }); + const deps = createDeps({ processName: 'pool-org.gnome.' }); - await handleReadCallback(deps, '/file.mp4', 10, 50); + const result = await handleReadCallback(deps); - expect(deps.startDownload).toHaveBeenCalledWith(virtualFile); - expect(setHydrationMock).toHaveBeenCalledOnce(); - expect(writer.waitForBytes).toHaveBeenCalledWith(50, 10); + expect(result.error).toBeInstanceOf(FuseIOError); }); + }); - it('should reuse existing hydration when one exists', async () => { - const writer = createWriterMock(); - getHydrationMock.mockReturnValue({ writer }); + describe('when allocating the cache file', () => { + it('returns EIO and does not download when allocation fails', async () => { + fileExistsOnDiskMock.mockResolvedValue(false); + allocateFileMock.mockRejectedValueOnce(new Error('disk full')); const deps = createDeps(); - await handleReadCallback(deps, '/file.mp4', 10, 50); + const result = await handleReadCallback(deps); - expect(deps.startDownload).not.toHaveBeenCalled(); - expect(writer.waitForBytes).toHaveBeenCalledWith(50, 10); + expect(result.error).toBeInstanceOf(FuseIOError); + expect(downloadAndCacheBlockMock).not.toHaveBeenCalled(); + expect(readChunkFromDiskMock).not.toHaveBeenCalled(); + expect(deps.onDownloadProgress).not.toHaveBeenCalled(); + expect(deps.saveToRepository).not.toHaveBeenCalled(); }); - it('should read chunk from disk after waitForBytes resolves', async () => { - const chunk = Buffer.from('downloaded'); - readChunkFromDiskMock.mockResolvedValue(chunk); + it('retries allocation on a later read after allocation fails', async () => { + fileExistsOnDiskMock.mockResolvedValue(false); + allocateFileMock.mockRejectedValueOnce(new Error('disk full')).mockResolvedValueOnce(undefined); const deps = createDeps(); - const result = await handleReadCallback(deps, '/file.mp4', 10, 0); + const first = await handleReadCallback(deps); + const second = await handleReadCallback(deps); - expect(result.isRight()).toBe(true); - expect(result.getRight()).toBe(chunk); + expect(first.error).toBeInstanceOf(FuseIOError); + expect(second.data).toStrictEqual(Buffer.from('data')); + expect(allocateFileMock).toHaveBeenCalledTimes(2); + expect(downloadAndCacheBlockMock).toHaveBeenCalledOnce(); }); + }); - it('should skip waitForBytes when bytes are already available', async () => { - const writer = createWriterMock(1000); - getHydrationMock.mockReturnValue({ writer }); + describe('when file needs to be downloaded', () => { + it('allocates missing cache file, downloads missing blocks, then reads from disk', async () => { + fileExistsOnDiskMock.mockResolvedValue(false); + const chunk = Buffer.from('downloaded'); + readChunkFromDiskMock.mockResolvedValue(chunk); const deps = createDeps(); - await handleReadCallback(deps, '/file.mp4', 10, 50); + const result = await handleReadCallback(deps); - expect(writer.waitForBytes).toHaveBeenCalledWith(50, 10); + expect(result.data).toBe(chunk); + expect(allocateFileMock).toHaveBeenCalledWith(expect.stringContaining(virtualFile.contentsId), virtualFile.size); + expect(downloadAndCacheBlockMock).toHaveBeenCalledOnce(); + expect(readChunkFromDiskMock).toHaveBeenCalledWith(expect.stringContaining(virtualFile.contentsId), 10, 0); }); }); }); diff --git a/src/backend/features/fuse/on-read/handle-read-callback.ts b/src/backend/features/fuse/on-read/handle-read-callback.ts index da7e0e17b1..fdc3dc6ff1 100644 --- a/src/backend/features/fuse/on-read/handle-read-callback.ts +++ b/src/backend/features/fuse/on-read/handle-read-callback.ts @@ -1,127 +1,113 @@ import { logger } from '@internxt/drive-desktop-core/build/backend'; -import { type Readable } from 'stream'; import { type TemporalFile } from '../../../../context/storage/TemporalFiles/domain/TemporalFile'; import { type File } from '../../../../context/virtual-drive/files/domain/File'; -import { left, right, type Either } from '../../../../context/shared/domain/Either'; -import { type FuseError, FuseNoSuchFileOrDirectoryError } from '../../../../apps/drive/fuse/callbacks/FuseErrors'; -import { tryCatch } from '../../../../shared/try-catch'; -import { createDownloadToDisk } from './create-download-to-disk'; -import { deleteHydration, getHydration, HydrationEntry, setHydration } from './hydration-registry'; +import { + type FuseError, + FuseIOError, + FuseNoSuchFileOrDirectoryError, +} from '../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { downloadFileRange } from '../../../../infra/environment/download-file/download-file'; +import { type Result } from '../../../../context/shared/domain/Result'; import { readChunkFromDisk } from './read-chunk-from-disk'; -import { shouldDownload } from '../on-open/open-flags-tracker'; import nodePath from 'node:path'; import { PATHS } from '../../../../core/electron/paths'; -import { formatBytes } from '../../../../shared/format-bytes'; - -export type HandleReadCallbackDeps = { +import { EMPTY } from './constants'; +import { readOrHydrate } from './read-or-hydrate'; +import { type HandleReadDeps, type ReadRange } from './types'; +import { isThumbnailProcess } from './thumbnail-processes'; +export type HandleReadCallbackProps = HandleReadDeps & { findVirtualFile: (path: string) => Promise; findTemporalFile: (path: string) => Promise; - existsOnDisk: (contentsId: string) => Promise; - startDownload: (virtualFile: File) => Promise<{ stream: Readable; elapsedTime: () => number }>; - onDownloadProgress: (name: string, extension: string, progress: { percentage: number; elapsedTime: number }) => void; - saveToRepository: (contentsId: string, size: number, uuid: string, name: string, extension: string) => Promise; + path: string; + range: ReadRange; + processName: string; }; -const EMPTY = Buffer.alloc(0); - -async function startHydration( - deps: HandleReadCallbackDeps, - virtualFile: File, - filePath: string, -): Promise { - const { stream, elapsedTime } = await deps.startDownload(virtualFile); - const writer = createDownloadToDisk(stream, filePath, { - onProgress: (bytesWritten) => { - deps.onDownloadProgress(virtualFile.name, virtualFile.type, { - percentage: Math.min(bytesWritten / virtualFile.size, 1), - elapsedTime: elapsedTime(), - }); - }, - onFinished: () => { - deleteHydration(virtualFile.contentsId); - deps.saveToRepository( - virtualFile.contentsId, - virtualFile.size, - virtualFile.uuid, - virtualFile.name, - virtualFile.type, - ); - }, - onError: (err) => { - logger.error({ msg: '[startHydration] onError', error: err }); - tryCatch(() => writer.destroy()); - deleteHydration(virtualFile.contentsId); - }, - }); - - setHydration(virtualFile.contentsId, { writer }); - return { writer }; -} -export async function handleReadCallback( - deps: HandleReadCallbackDeps, - path: string, - length: number, - position: number, -): Promise> { - const virtualFile = await deps.findVirtualFile(path); +/** + * Routes reads between virtual-drive files and temporal local files. + * + * Virtual-file reads enforce process policy: blocklisted processes are cache-only + * readers, while normal processes may hydrate missing cache blocks and finalize the + * file once the full contents are available. + */ +export async function handleReadCallback({ + findVirtualFile, + findTemporalFile, + onDownloadProgress, + saveToRepository, + bucketId, + mnemonic, + network, + path, + range, + processName, +}: HandleReadCallbackProps): Promise> { + const virtualFile = await findVirtualFile(path); if (!virtualFile) { - const temporalFile = await deps.findTemporalFile(path); - - if (!temporalFile || !temporalFile.contentFilePath) { - logger.error({ msg: '[ReadCallback] File not found', path }); - return left(new FuseNoSuchFileOrDirectoryError(path)); - } - - const chunk = await readChunkFromDisk(temporalFile.contentFilePath, length, position); - return right(chunk); + return readFromTemporalFile(findTemporalFile, path, range.length, range.position); } - if (!shouldDownload(path)) { - logger.debug({ msg: '[ReadCallback] Download blocked - system open', path }); - return right(EMPTY); + if (isThumbnailProcess(processName)) { + logger.debug({ + msg: '[ReadCallback] thumbnail process, downloading exact range', + process: processName, + file: virtualFile.nameWithExtension, + }); + return readExactRangeForThumbnail({ bucketId, mnemonic, network, virtualFile, range }); } const filePath = nodePath.join(PATHS.DOWNLOADED, virtualFile.contentsId); - logger.debug({ - msg: '[ReadCallback] read request:', - file: virtualFile.nameWithExtension, - position, - length, - targetByte: position + length, + return readOrHydrate({ + bucketId, + mnemonic, + network, + onDownloadProgress, + saveToRepository, + virtualFile, + filePath, + range, }); +} - if (await deps.existsOnDisk(virtualFile.contentsId)) { - const chunk = await readChunkFromDisk(filePath, length, position); - return right(chunk); - } - - const hydration = getHydration(virtualFile.contentsId) ?? (await startHydration(deps, virtualFile, filePath)); - const targetByte = position + length; - const bytesAvailable = hydration.writer.getBytesAvailable(); - const waitStart = Date.now(); +async function readFromTemporalFile( + findTemporalFile: HandleReadCallbackProps['findTemporalFile'], + path: string, + length: number, + position: number, +): Promise> { + const temporalFile = await findTemporalFile(path); - if (bytesAvailable < targetByte) { - logger.debug({ - msg: '[ReadCallback] waiting for download to catch up', - file: virtualFile.nameWithExtension, - position: formatBytes(position), - targetByte: formatBytes(targetByte), - bytesAvailable: formatBytes(bytesAvailable), - bytesAhead: formatBytes(targetByte - bytesAvailable), - }); + if (!temporalFile || !temporalFile.contentFilePath) { + logger.error({ msg: '[ReadCallback] File not found', path }); + return { error: new FuseNoSuchFileOrDirectoryError(path) }; } - await hydration.writer.waitForBytes(position, length); + const chunk = await readChunkFromDisk(temporalFile.contentFilePath, length, position); + return { data: chunk ?? EMPTY }; +} + +type ThumbnailRangeProps = Pick & { + virtualFile: File; +}; - logger.debug({ - msg: '[ReadCallback] wait resolved', - file: virtualFile.nameWithExtension, - position: formatBytes(position), - waitedMs: Date.now() - waitStart, +async function readExactRangeForThumbnail({ + bucketId, + mnemonic, + network, + virtualFile, + range, +}: ThumbnailRangeProps): Promise> { + const { signal } = new AbortController(); + const result = await downloadFileRange({ + fileId: virtualFile.contentsId, + bucketId, + mnemonic, + network, + range, + signal, }); - - const chunk = await readChunkFromDisk(filePath, length, position); - return right(chunk); + if (result.error) return { error: new FuseIOError(result.error.message) }; + return { data: result.data }; } diff --git a/src/backend/features/fuse/on-read/read-chunk-from-disk.test.ts b/src/backend/features/fuse/on-read/read-chunk-from-disk.test.ts index cf3088a286..aa7b47f172 100644 --- a/src/backend/features/fuse/on-read/read-chunk-from-disk.test.ts +++ b/src/backend/features/fuse/on-read/read-chunk-from-disk.test.ts @@ -1,17 +1,18 @@ import fs from 'fs/promises'; import { readChunkFromDisk } from './read-chunk-from-disk'; import { call } from '../../../../../tests/vitest/utils.helper'; +import { mockDeep } from 'vitest-mock-extended'; vi.mock(import('fs/promises')); -const fsMock = vi.mocked(fs); +const fsMock = mockDeep(fs); describe('readChunkFromDisk', () => { const closeMock = vi.fn(); const readMock = vi.fn(); beforeEach(() => { - fsMock.open.mockResolvedValue({ read: readMock, close: closeMock } as any); + fsMock.open.mockResolvedValue({ read: readMock, close: closeMock } as unknown as fs.FileHandle); }); it('should open the file in read mode', async () => { diff --git a/src/backend/features/fuse/on-read/read-chunk-from-disk.ts b/src/backend/features/fuse/on-read/read-chunk-from-disk.ts index c8e41fd3b6..d2e206834a 100644 --- a/src/backend/features/fuse/on-read/read-chunk-from-disk.ts +++ b/src/backend/features/fuse/on-read/read-chunk-from-disk.ts @@ -1,4 +1,13 @@ import fs from 'node:fs/promises'; +// TODO: Rename chunk -> block +export async function writeChunkToDisk(filePath: string, buffer: Buffer, position: number): Promise { + const handle = await fs.open(filePath, 'r+'); + try { + await handle.write(new Uint8Array(buffer), 0, buffer.length, position); + } finally { + await handle.close(); + } +} export async function readChunkFromDisk(filePath: string, length: number, position: number): Promise { const handle = await fs.open(filePath, 'r'); diff --git a/src/backend/features/fuse/on-read/read-or-hydrate.test.ts b/src/backend/features/fuse/on-read/read-or-hydrate.test.ts new file mode 100644 index 0000000000..4fbd395dc6 --- /dev/null +++ b/src/backend/features/fuse/on-read/read-or-hydrate.test.ts @@ -0,0 +1,496 @@ +import { type File } from '../../../../context/virtual-drive/files/domain/File'; +import { FuseIOError } from '../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { call, partialSpyOn, testSleep } from '../../../../../tests/vitest/utils.helper'; +import * as readChunkModule from './read-chunk-from-disk'; +import * as fileExistsModule from './download-cache/file-exists-on-disk'; +import * as allocateFileModule from './download-cache/allocate-file'; +import * as downloadAndSaveBlockModule from './download-cache/download-and-save-block'; +import { + clearHydrationState, + getExistingHydrationState, + getOrCreateHydrationState, + isRangeHydrated, + markBlocksInRangeDownloaded, +} from './download-cache/hydration-state'; +import { BLOCK_SIZE } from './download-cache/constants'; +import { readOrHydrate, type ReadOrHydrateDeps } from './read-or-hydrate'; + +const readChunkFromDiskMock = partialSpyOn(readChunkModule, 'readChunkFromDisk'); +const fileExistsOnDiskMock = partialSpyOn(fileExistsModule, 'fileExistsOnDisk'); +const allocateFileMock = partialSpyOn(allocateFileModule, 'allocateFile'); +const downloadAndCacheBlockMock = partialSpyOn(downloadAndSaveBlockModule, 'downloadAndCacheBlock'); + +const virtualFile = { + contentsId: 'contents-123', + name: 'video', + nameWithExtension: 'video.mp4', + type: 'mp4', + uuid: 'uuid-123', + size: 1000, +} as unknown as File; + +function createDeps(overrides: Partial = {}): ReadOrHydrateDeps { + return { + onDownloadProgress: vi.fn(), + saveToRepository: vi.fn().mockResolvedValue(undefined), + bucketId: 'bucket-id', + mnemonic: 'mnemonic', + network: {} as ReadOrHydrateDeps['network'], + ...overrides, + }; +} + +describe('readOrHydrate', () => { + beforeEach(() => { + clearHydrationState(); + vi.clearAllMocks(); + fileExistsOnDiskMock.mockResolvedValue(true); + allocateFileMock.mockResolvedValue(undefined); + downloadAndCacheBlockMock.mockResolvedValue({ data: undefined }); + readChunkFromDiskMock.mockResolvedValue(Buffer.from('data')); + }); + + it('reads an already hydrated range from disk without downloading', async () => { + const chunk = Buffer.from('cached'); + const state = getOrCreateHydrationState(virtualFile.contentsId, virtualFile.size); + markBlocksInRangeDownloaded(state, { position: 0, length: 10 }); + readChunkFromDiskMock.mockResolvedValue(chunk); + + const result = await readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(result.data).toBe(chunk); + expect(downloadAndCacheBlockMock).not.toHaveBeenCalled(); + expect(readChunkFromDiskMock).toHaveBeenCalledWith('/tmp/cache-file', 10, 0); + }); + + it('downloads a missing range then reads requested bytes from disk', async () => { + const chunk = Buffer.from('downloaded'); + readChunkFromDiskMock.mockResolvedValue(chunk); + const deps = createDeps(); + + const result = await readOrHydrate({ + ...deps, + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(result.data).toBe(chunk); + expect(downloadAndCacheBlockMock).toHaveBeenCalledOnce(); + expect(readChunkFromDiskMock).toHaveBeenCalledWith('/tmp/cache-file', 10, 0); + }); + + it('creates hydration state for normal reads', async () => { + await readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(getExistingHydrationState(virtualFile.contentsId)).toBeDefined(); + }); + + it('allocates the cache file when it is missing', async () => { + fileExistsOnDiskMock.mockResolvedValue(false); + + await readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(allocateFileMock).toHaveBeenCalledWith('/tmp/cache-file', virtualFile.size); + }); + + it('passes progress reporting through to block hydration', async () => { + const deps = createDeps(); + downloadAndCacheBlockMock.mockImplementation(async ({ onDownloadProgress }) => { + onDownloadProgress(virtualFile.name, virtualFile.type, 10, virtualFile.size, 1); + return { data: undefined }; + }); + + await readOrHydrate({ + ...deps, + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + call(deps.onDownloadProgress).toStrictEqual([virtualFile.name, virtualFile.type, 10, virtualFile.size, 1]); + }); + + it('returns empty when in-flight hydration is aborted before the read resolves', async () => { + downloadAndCacheBlockMock.mockImplementation(async () => { + const state = getExistingHydrationState(virtualFile.contentsId); + state?.abortController.abort(); + return { data: undefined }; + }); + + const result = await readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(result.data).toStrictEqual(Buffer.alloc(0)); + expect(readChunkFromDiskMock).not.toHaveBeenCalled(); + }); + + it('returns non-abort download errors', async () => { + downloadAndCacheBlockMock.mockResolvedValue({ error: new Error('network failed') }); + + const result = await readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(result.error).toBeInstanceOf(FuseIOError); + expect(result.error?.message).toContain('network failed'); + }); + + it('returns allocation errors as Fuse IO errors', async () => { + fileExistsOnDiskMock.mockResolvedValue(false); + allocateFileMock.mockRejectedValue(new Error('disk full')); + + const result = await readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(result.error).toBeInstanceOf(FuseIOError); + }); + + it('downloads one block once for overlapping concurrent reads', async () => { + let resolveDownload: () => void = () => undefined; + downloadAndCacheBlockMock.mockImplementation( + ({ state, blockStart, blockLength }) => + new Promise<{ data: undefined }>((resolve) => { + resolveDownload = () => { + markBlocksInRangeDownloaded(state, { position: blockStart, length: blockLength }); + resolve({ data: undefined }); + }; + }), + ); + + const first = readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + const second = readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 5, length: 10 }, + }); + + await testSleep(0); + expect(downloadAndCacheBlockMock).toHaveBeenCalledOnce(); + + resolveDownload(); + + await expect(first).resolves.toHaveProperty('data', Buffer.from('data')); + await expect(second).resolves.toHaveProperty('data', Buffer.from('data')); + expect(getExistingHydrationState(virtualFile.contentsId)?.blocksBeingDownloaded.size).toBe(0); + expect(downloadAndCacheBlockMock).toHaveBeenCalledOnce(); + }); + + it('keeps overlapping reads waiting on the existing block download promise', async () => { + let resolveDownload: () => void = () => undefined; + let secondSettled = false; + downloadAndCacheBlockMock.mockImplementation( + ({ state, blockStart, blockLength }) => + new Promise<{ data: undefined }>((resolve) => { + resolveDownload = () => { + markBlocksInRangeDownloaded(state, { position: blockStart, length: blockLength }); + resolve({ data: undefined }); + }; + }), + ); + + const first = readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + const second = readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 5, length: 10 }, + }).then((result) => { + secondSettled = true; + return result; + }); + + await testSleep(0); + expect(secondSettled).toBe(false); + + resolveDownload(); + await Promise.all([first, second]); + + expect(secondSettled).toBe(true); + }); + + it('settles overlapping waiters with the failed block download result', async () => { + let resolveDownload: (result: { error: Error }) => void = () => undefined; + downloadAndCacheBlockMock.mockImplementation( + () => + new Promise((resolve) => { + resolveDownload = resolve; + }), + ); + + const first = readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + const second = readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 5, length: 10 }, + }); + + await testSleep(0); + resolveDownload({ error: new Error('range request failed') }); + + const firstResult = await first; + const secondResult = await second; + const state = getExistingHydrationState(virtualFile.contentsId); + + expect(firstResult.error).toBeInstanceOf(FuseIOError); + expect(secondResult.error).toBeInstanceOf(FuseIOError); + expect(firstResult.error?.message).toContain('range request failed'); + expect(secondResult.error?.message).toContain('range request failed'); + expect(downloadAndCacheBlockMock).toHaveBeenCalledOnce(); + expect(state?.blocksBeingDownloaded.size).toBe(0); + expect(state && isRangeHydrated(state, { position: 0, length: 10 })).toBe(false); + }); + + it('does not mark failed block downloads as hydrated and allows a later retry', async () => { + downloadAndCacheBlockMock + .mockResolvedValueOnce({ error: new Error('range request failed') }) + .mockImplementationOnce(async ({ state, blockStart, blockLength }) => { + markBlocksInRangeDownloaded(state, { position: blockStart, length: blockLength }); + return { data: undefined }; + }); + + const first = await readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + const state = getExistingHydrationState(virtualFile.contentsId); + + expect(first.error).toBeInstanceOf(FuseIOError); + expect(state?.hydratedBytes).toBe(0); + expect(state?.blocksBeingDownloaded.size).toBe(0); + + const second = await readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(second.data).toStrictEqual(Buffer.from('data')); + expect(downloadAndCacheBlockMock).toHaveBeenCalledTimes(2); + expect(state?.hydratedBytes).toBe(virtualFile.size); + }); + + it('does not mark aborted block downloads as hydrated and clears the in-flight entry', async () => { + downloadAndCacheBlockMock.mockImplementation(async ({ state }) => { + state.abortController.abort(); + return { data: undefined }; + }); + + const result = await readOrHydrate({ + ...createDeps(), + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + const state = getExistingHydrationState(virtualFile.contentsId); + + expect(result.data).toStrictEqual(Buffer.alloc(0)); + expect(state?.hydratedBytes).toBe(0); + expect(state?.blocksBeingDownloaded.size).toBe(0); + }); + + it('does not finalize partially hydrated files', async () => { + const partialFile = { ...virtualFile, size: BLOCK_SIZE + 100 } as unknown as File; + const deps = createDeps(); + const state = getOrCreateHydrationState(partialFile.contentsId, partialFile.size); + markBlocksInRangeDownloaded(state, { position: 0, length: BLOCK_SIZE }); + + const result = await readOrHydrate({ + ...deps, + virtualFile: partialFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(result.data).toStrictEqual(Buffer.from('data')); + expect(deps.saveToRepository).not.toHaveBeenCalled(); + expect(state.finalized).toBe(false); + }); + + it('treats empty files as already hydrated and finalizes without downloading blocks', async () => { + const emptyFile = { ...virtualFile, contentsId: 'empty-contents-id', size: 0 } as unknown as File; + const deps = createDeps(); + readChunkFromDiskMock.mockResolvedValue(Buffer.alloc(0)); + + const result = await readOrHydrate({ + ...deps, + virtualFile: emptyFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 0 }, + }); + const state = getExistingHydrationState(emptyFile.contentsId); + + expect(result.data).toStrictEqual(Buffer.alloc(0)); + expect(downloadAndCacheBlockMock).not.toHaveBeenCalled(); + expect(deps.saveToRepository).toHaveBeenCalledOnce(); + expect(state?.hydratedBytes).toBe(0); + expect(state?.finalized).toBe(true); + }); + + it('finalizes fully hydrated files', async () => { + const deps = createDeps(); + const state = getOrCreateHydrationState(virtualFile.contentsId, virtualFile.size); + markBlocksInRangeDownloaded(state, { position: 0, length: virtualFile.size }); + + const result = await readOrHydrate({ + ...deps, + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(result.data).toStrictEqual(Buffer.from('data')); + expect(deps.saveToRepository).toHaveBeenCalledOnce(); + expect(deps.saveToRepository).toHaveBeenCalledWith( + virtualFile.contentsId, + virtualFile.size, + virtualFile.uuid, + virtualFile.name, + virtualFile.type, + ); + expect(state.finalized).toBe(true); + }); + + it('registers once for concurrent full-hydration reads and shares in-flight finalization', async () => { + let resolveRegistration: () => void = () => undefined; + const saveToRepository = vi.fn( + () => + new Promise((resolve) => { + resolveRegistration = resolve; + }), + ); + const deps = createDeps({ saveToRepository }); + const state = getOrCreateHydrationState(virtualFile.contentsId, virtualFile.size); + markBlocksInRangeDownloaded(state, { position: 0, length: virtualFile.size }); + + const first = readOrHydrate({ + ...deps, + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + const second = readOrHydrate({ + ...deps, + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 5, length: 10 }, + }); + + await testSleep(0); + expect(saveToRepository).toHaveBeenCalledOnce(); + expect(state.finalization).toBeDefined(); + expect(state.finalized).toBe(false); + + resolveRegistration(); + + await expect(first).resolves.toHaveProperty('data', Buffer.from('data')); + await expect(second).resolves.toHaveProperty('data', Buffer.from('data')); + expect(state.finalization).toBeUndefined(); + expect(state.finalized).toBe(true); + }); + + it('allows failed finalization to be retried by a later normal read', async () => { + const saveToRepository = vi + .fn() + .mockRejectedValueOnce(new Error('register failed')) + .mockResolvedValueOnce(undefined); + const deps = createDeps({ saveToRepository }); + const state = getOrCreateHydrationState(virtualFile.contentsId, virtualFile.size); + markBlocksInRangeDownloaded(state, { position: 0, length: virtualFile.size }); + + const first = await readOrHydrate({ + ...deps, + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(first.error).toBeInstanceOf(FuseIOError); + expect(first.error?.message).toContain('register failed'); + expect(state.finalization).toBeUndefined(); + expect(state.finalized).toBe(false); + + const second = await readOrHydrate({ + ...deps, + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(second.data).toStrictEqual(Buffer.from('data')); + expect(saveToRepository).toHaveBeenCalledTimes(2); + expect(state.finalized).toBe(true); + }); + + it('fires downloadFinished once after successful finalization', async () => { + const downloadFinished = vi.fn(); + const saveToRepository = vi.fn().mockImplementation(async () => { + downloadFinished(); + }); + const deps = createDeps({ saveToRepository }); + const state = getOrCreateHydrationState(virtualFile.contentsId, virtualFile.size); + markBlocksInRangeDownloaded(state, { position: 0, length: virtualFile.size }); + + await readOrHydrate({ + ...deps, + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + await readOrHydrate({ + ...deps, + virtualFile, + filePath: '/tmp/cache-file', + range: { position: 0, length: 10 }, + }); + + expect(saveToRepository).toHaveBeenCalledOnce(); + expect(downloadFinished).toHaveBeenCalledOnce(); + }); +}); diff --git a/src/backend/features/fuse/on-read/read-or-hydrate.ts b/src/backend/features/fuse/on-read/read-or-hydrate.ts new file mode 100644 index 0000000000..0b02e20180 --- /dev/null +++ b/src/backend/features/fuse/on-read/read-or-hydrate.ts @@ -0,0 +1,199 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { type File } from '../../../../context/virtual-drive/files/domain/File'; +import { FuseError, FuseIOError } from '../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { type Result } from '../../../../context/shared/domain/Result'; +import { formatBytes } from '../../../../shared/format-bytes'; +import { readChunkFromDisk } from './read-chunk-from-disk'; +import { EMPTY } from './constants'; +import { BLOCK_SIZE } from './download-cache/constants'; +import { downloadAndCacheBlock } from './download-cache/download-and-save-block'; +import { expandToBlockBoundaries } from './download-cache/expand-to-block-boundaries'; +import { fileExistsOnDisk } from './download-cache/file-exists-on-disk'; +import { + ensureAllocatedOnce, + finalizeIfNeeded, + type FileHydrationState, + getBlocksBeingDownloaded, + getMissingBlocks, + getOrCreateHydrationState, + isFileHydrated, + isRangeHydrated, + clearBlockDownloadInFlight, + setBlockDownloadInFlight, +} from './download-cache/hydration-state'; +import { type HandleReadDeps, type ReadRange } from './types'; +export type ReadOrHydrateDeps = HandleReadDeps; + +type Props = HandleReadDeps & { + virtualFile: File; + filePath: string; + range: ReadRange; +}; + +export async function readOrHydrate({ + onDownloadProgress, + saveToRepository, + bucketId, + mnemonic, + network, + virtualFile, + filePath, + range, +}: Props): Promise> { + logger.debug({ + msg: '[ReadCallback] read request:', + file: virtualFile.nameWithExtension, + position: formatBytes(range.position), + length: formatBytes(range.length), + }); + + const state = await ensureFileAllocated(filePath, virtualFile); + if (state.error) return { error: state.error }; + if (wasAborted(state.data)) return { data: EMPTY }; + + try { + if (isRangeHydrated(state.data, range)) { + logger.debug({ msg: '[ReadCallback] serving from disk cache', file: virtualFile.nameWithExtension }); + } else { + logger.debug({ msg: '[ReadCallback] downloading range', file: virtualFile.nameWithExtension }); + const downloadResult = await ensureRangeDownloaded({ + onDownloadProgress, + bucketId, + mnemonic, + network, + virtualFile, + filePath, + state: state.data, + range, + }); + if (wasAborted(state.data)) return { data: EMPTY }; + if (downloadResult.error) return { error: fuseIOErrorFrom(downloadResult.error) }; + } + + await finalizeFullyHydratedFileIfNeeded(saveToRepository, virtualFile, state.data); + if (wasAborted(state.data)) return { data: EMPTY }; + + return { data: await readChunkFromDisk(filePath, range.length, range.position) }; + } catch (error) { + if (wasAborted(state.data)) return { data: EMPTY }; + return { error: fuseIOErrorFrom(error) }; + } +} + +async function ensureFileAllocated( + filePath: string, + virtualFile: File, +): Promise> { + const state = getOrCreateHydrationState(virtualFile.contentsId, virtualFile.size); + const allocated = await fileExistsOnDisk(filePath); + if (wasAborted(state)) return { data: state }; + + if (!allocated) { + const { error } = await ensureAllocatedOnce(state, filePath, virtualFile.size); + if (error) { + return { error: new FuseIOError('Unable to allocate cache file.') }; + } + } + return { data: state }; +} + +async function ensureRangeDownloaded({ + bucketId, + mnemonic, + network, + onDownloadProgress, + virtualFile, + filePath, + state, + range, +}: { + onDownloadProgress: HandleReadDeps['onDownloadProgress']; + bucketId: HandleReadDeps['bucketId']; + mnemonic: HandleReadDeps['mnemonic']; + network: HandleReadDeps['network']; + virtualFile: File; + filePath: string; + range: ReadRange; + state: FileHydrationState; +}): Promise> { + const { blockStart, blockLength } = expandToBlockBoundaries({ range, fileSize: virtualFile.size }); + + const blocksBeingDownloaded = getBlocksBeingDownloaded(state, { position: blockStart, length: blockLength }); + if (blocksBeingDownloaded.size > 0) { + logger.debug({ + msg: '[ReadCallback] waiting for requested blocks being downloaded', + file: virtualFile.nameWithExtension, + }); + const result = await waitForBlockDownloads([...blocksBeingDownloaded.values()]); + if (result.error) return { error: result.error }; + } + + if (wasAborted(state)) return { data: undefined }; + + const missingBlocks = getMissingBlocks(state, { position: blockStart, length: blockLength }); + if (missingBlocks.length > 0) { + logger.debug({ + msg: '[ReadCallback] downloading missing blocks', + file: virtualFile.nameWithExtension, + blocks: missingBlocks, + }); + const downloads = missingBlocks.map((block) => { + const start = block * BLOCK_SIZE; + const end = Math.min(start + BLOCK_SIZE, virtualFile.size); + const download = downloadAndCacheBlock({ + bucketId, + mnemonic, + network, + onDownloadProgress, + virtualFile, + filePath, + state, + blockStart: start, + blockLength: end - start, + }); + setBlockDownloadInFlight(state, block, download); + download.finally(() => clearBlockDownloadInFlight(state, block, download)); + return download; + }); + const result = await waitForBlockDownloads(downloads); + if (result.error) return { error: result.error }; + } + + return { data: undefined }; +} + +async function waitForBlockDownloads(downloads: Array>>): Promise> { + const results = await Promise.all(downloads); + const failed = results.find((result) => result.error); + if (failed?.error) return { error: failed.error }; + return { data: undefined }; +} + +async function finalizeFullyHydratedFileIfNeeded( + saveToRepository: HandleReadDeps['saveToRepository'], + virtualFile: File, + state: FileHydrationState, +): Promise { + if (!isFileHydrated(state)) return; + + await finalizeIfNeeded(state, async () => { + await saveToRepository( + virtualFile.contentsId, + virtualFile.size, + virtualFile.uuid, + virtualFile.name, + virtualFile.type, + ); + state.stopwatch = undefined; + }); +} + +function fuseIOErrorFrom(error: unknown): FuseError { + if (error instanceof FuseError) return error; + const details = error instanceof Error ? error.message : 'Unknown error occurred'; + return new FuseIOError(details); +} + +function wasAborted(state: FileHydrationState): boolean { + return state.abortController.signal.aborted; +} diff --git a/src/backend/features/fuse/on-read/thumbnail-processes.ts b/src/backend/features/fuse/on-read/thumbnail-processes.ts new file mode 100644 index 0000000000..34cb36b43d --- /dev/null +++ b/src/backend/features/fuse/on-read/thumbnail-processes.ts @@ -0,0 +1,24 @@ +/** + * Linux's /proc/PID/comm truncates process names to 15 characters. + * The names below are the known thumbnailer process names after that truncation. + * + * Examples of full → truncated names: + * pool-org.gnome.N… → pool-org.gnome. (GNOME/Nautilus thumbnailer thread pool) + * gnome-thumbnail-factory → gnome-thumbnail + * evince-thumbnailer → evince-thumbnai + * totem-video-thumbnailer → totem-video-thu + * ffmpegthumbnailer → ffmpegthumbnaile + * tumbler-1 → tumbler-1 + */ +const THUMBNAIL_PROCESS_NAMES = new Set([ + 'pool-org.gnome.', // GNOME/Nautilus thumbnailer thread pool (pool-org.gnome.NautilusThumbnailFactory, etc.) + 'gnome-thumbnail', // gnome-thumbnail-factory (GNOME Files) + 'evince-thumbnai', // evince-thumbnailer (Evince document viewer) + 'totem-video-thu', // totem-video-thumbnailer (Totem video) + 'ffmpegthumbnaile', // ffmpegthumbnailer + 'tumbler-1', // xfce tumbler +]); + +export function isThumbnailProcess(processName: string): boolean { + return THUMBNAIL_PROCESS_NAMES.has(processName); +} diff --git a/src/backend/features/fuse/on-read/types.ts b/src/backend/features/fuse/on-read/types.ts new file mode 100644 index 0000000000..85b6b6b8d8 --- /dev/null +++ b/src/backend/features/fuse/on-read/types.ts @@ -0,0 +1,20 @@ +import { type Network } from '@internxt/sdk'; + +export type ReadRange = { + position: number; + length: number; +}; + +export type HandleReadDeps = { + onDownloadProgress: ( + name: string, + extension: string, + bytesDownloaded: number, + fileSize: number, + elapsedTime: number, + ) => void; + saveToRepository: (contentsId: string, size: number, uuid: string, name: string, extension: string) => Promise; + bucketId: string; + mnemonic: string; + network: Network.Network; +}; diff --git a/src/backend/features/virtual-drive/constants.ts b/src/backend/features/virtual-drive/constants.ts new file mode 100644 index 0000000000..f09c992f71 --- /dev/null +++ b/src/backend/features/virtual-drive/constants.ts @@ -0,0 +1,48 @@ +export const DAEMON_ROUTE = '/daemon'; +export const OPERATIONS_ROUTE = '/op'; +export const DAEMON_PATHS = { + READY: '/ready', +} as const; + +export const OPERATION_PATHS = { + GET_ATTR: '/getattributes', + OPEN: '/open', + OPEN_DIR: '/opendir', + READ: '/read', + TRUNCATE: '/truncate', + CREATE: '/create', + WRITE: '/write', + RELEASE: '/release', + MKDIR: '/mkdir', + RENAME: '/rename', + UNLINK: '/unlink', + RMDIR: '/rmdir', + STAT_FS: '/statfs', +} as const; +/** + * property to define a regular file when requesting in the get attributes fuse request. + * encodes both file type and permissions + */ + +export const FILE_MODE = 33188; +/** + * property to define a folder when requesting in the get attributes fuse request. + * encodes both file type and permissions + */ +export const FOLDER_MODE = 16877; + +export type GetAttributesCallbackData = { + mode: number; + size: number; + mtime: Date; + ctime: Date; + atime?: Date; + uid: number; + gid: number; + /** this property tells the kernel the number of hard links + * for directories this is at least 2 + * when nlink reaches 0 and no process has the file open, the kernel interprets + * its a deleted file/folder + * */ + nlink: number; +}; diff --git a/src/backend/features/virtual-drive/controllers/daemon.controller.test.ts b/src/backend/features/virtual-drive/controllers/daemon.controller.test.ts new file mode 100644 index 0000000000..2bdc4b09be --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/daemon.controller.test.ts @@ -0,0 +1,19 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { daemonReadyController } from './daemon.controller'; +import * as daemonServiceModule from '../services/daemon.service'; +import { partialSpyOn } from '../../../../../tests/vitest/utils.helper'; + +describe('daemonReadyController', () => { + const resolveDaemonReadyMock = partialSpyOn(daemonServiceModule, 'resolveDaemonReady'); + + it('should resolve the daemon ready signal and return 200', () => { + const req = mockDeep(); + const res = mockDeep(); + + daemonReadyController(req, res); + + expect(resolveDaemonReadyMock).toHaveBeenCalledOnce(); + expect(res.sendStatus).toHaveBeenCalledWith(200); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/daemon.controller.ts b/src/backend/features/virtual-drive/controllers/daemon.controller.ts new file mode 100644 index 0000000000..cd4f5f4fda --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/daemon.controller.ts @@ -0,0 +1,9 @@ +import { Request, Response } from 'express'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { resolveDaemonReady } from '../services/daemon.service'; + +export function daemonReadyController(_: Request, res: Response): void { + logger.debug({ msg: '[FUSE DAEMON] daemon ready signal received' }); + resolveDaemonReady(); + res.sendStatus(200); +} diff --git a/src/backend/features/virtual-drive/controllers/ensure-leading-slash.ts b/src/backend/features/virtual-drive/controllers/ensure-leading-slash.ts new file mode 100644 index 0000000000..9c4b08c72c --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/ensure-leading-slash.ts @@ -0,0 +1,4 @@ +export function ensureLeadingSlash(rawPath: string): string { + if (rawPath === '' || rawPath === '/') return '/'; + return rawPath.startsWith('/') ? rawPath : `/${rawPath}`; +} diff --git a/src/backend/features/virtual-drive/controllers/operations/create.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/create.controller.test.ts new file mode 100644 index 0000000000..129aa75a2e --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/create.controller.test.ts @@ -0,0 +1,39 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { createController } from './create.controller'; +import * as createServiceModule from '../../services/operations/create.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +describe('createController', () => { + const createMock = partialSpyOn(createServiceModule, 'create'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + }); + + it('should return errno 0 when file is created successfully', async () => { + req.body = { path: '/some/file.txt' }; + createMock.mockResolvedValue({ data: undefined }); + + await createController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: 0 }); + }); + + it('should return errno EIO when create fails', async () => { + req.body = { path: '/some/file.txt' }; + createMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'io error') }); + + await createController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: FuseCodes.EIO }); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/create.controller.ts b/src/backend/features/virtual-drive/controllers/operations/create.controller.ts new file mode 100644 index 0000000000..4266d2ec43 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/create.controller.ts @@ -0,0 +1,20 @@ +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { create } from '../../services/operations/create.service'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; + +export async function createController(req: Request, res: Response, container: Container) { + const rawPath: string = req.body.path ?? ''; + logger.debug({ msg: `[FUSE DAEMON] Create signal received for path: ${rawPath}` }); + + const normalizedPath = ensureLeadingSlash(rawPath); + const result = await create(normalizedPath, container); + + if (result.error) { + res.json({ errno: result.error.code }); + return; + } + + res.json({ errno: 0 }); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/get-attributes.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/get-attributes.controller.test.ts new file mode 100644 index 0000000000..6c729f5254 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/get-attributes.controller.test.ts @@ -0,0 +1,62 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { getAttributesController } from './get-attributes.controller'; +import * as getAttributesServiceModule from '../../services/operations/get-attributes.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FILE_MODE } from '../../constants'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +vi.mock('@internxt/drive-desktop-core/build/backend'); +vi.mock('../../services/operations/get-attributes.service'); + +describe('getAttributesController', () => { + const getAttributesMock = partialSpyOn(getAttributesServiceModule, 'getAttributes'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + }); + + it('should return errno ENOENT when getAttributes returns a not found error', async () => { + req.body = { path: '/missing.txt' }; + getAttributesMock.mockResolvedValue({ + error: new FuseError(FuseCodes.ENOENT, 'File not found'), + }); + + await getAttributesController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: FuseCodes.ENOENT }); + }); + + it('should return errno 0 with attributes when file is found', async () => { + const now = new Date(); + req.body = { path: '/some/file.txt' }; + getAttributesMock.mockResolvedValue({ + data: { + mode: FILE_MODE, + size: 1234, + mtime: now, + ctime: now, + uid: 1000, + gid: 1000, + nlink: 1, + }, + }); + + await getAttributesController(req, res, container); + + expect(res.json).toHaveBeenCalledWith( + expect.objectContaining({ + errno: 0, + mode: FILE_MODE, + size: 1234, + }), + ); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/get-attributes.controller.ts b/src/backend/features/virtual-drive/controllers/operations/get-attributes.controller.ts new file mode 100644 index 0000000000..fb9e36e9f4 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/get-attributes.controller.ts @@ -0,0 +1,15 @@ +import { getAttributes } from '../../services/operations/get-attributes.service'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; + +export async function getAttributesController(req: Request, res: Response, container: Container) { + const rawPath: string = req.body.path ?? ''; + const normalizedPath = ensureLeadingSlash(rawPath); + const result = await getAttributes(normalizedPath, container); + if (result.error) { + res.json({ errno: result.error.code }); + return; + } + res.json({ errno: 0, ...result.data }); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/mkdir.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/mkdir.controller.test.ts new file mode 100644 index 0000000000..fc8ac37793 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/mkdir.controller.test.ts @@ -0,0 +1,39 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { mkdirController } from './mkdir.controller'; +import * as mkdirServiceModule from '../../services/operations/mkdir.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +describe('mkdirController', () => { + const mkdirMock = partialSpyOn(mkdirServiceModule, 'mkdir'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + }); + + it('should return errno 0 when folder is created successfully', async () => { + req.body = { path: '/Documents/NewFolder' }; + mkdirMock.mockResolvedValue({ data: undefined }); + + await mkdirController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: 0 }); + }); + + it('should return errno EIO when folder creation fails', async () => { + req.body = { path: '/Documents/NewFolder' }; + mkdirMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'io error') }); + + await mkdirController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: FuseCodes.EIO }); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/mkdir.controller.ts b/src/backend/features/virtual-drive/controllers/operations/mkdir.controller.ts new file mode 100644 index 0000000000..986e831307 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/mkdir.controller.ts @@ -0,0 +1,20 @@ +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { mkdir } from '../../services/operations/mkdir.service'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; + +export async function mkdirController(req: Request, res: Response, container: Container) { + const rawPath: string = req.body.path ?? ''; + logger.debug({ msg: `[FUSE DAEMON] Mkdir signal received for path: ${rawPath}` }); + + const normalizedPath = ensureLeadingSlash(rawPath); + const result = await mkdir(normalizedPath, container); + + if (result.error) { + res.json({ errno: result.error.code }); + return; + } + + res.json({ errno: 0 }); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/open.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/open.controller.test.ts new file mode 100644 index 0000000000..1907c14870 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/open.controller.test.ts @@ -0,0 +1,48 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { openController } from './open.controller'; +import * as openServiceModule from '../../services/operations/open.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +describe('openController', () => { + const openMock = partialSpyOn(openServiceModule, 'open'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + }); + + it('should return errno 0 when file is opened successfully', async () => { + req.body = { path: '/some/file.txt', flags: 0, processName: 'cat' }; + openMock.mockResolvedValue({ data: undefined }); + + await openController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: 0 }); + }); + + it('should return errno ENOENT when file is not found', async () => { + req.body = { path: '/missing.txt', flags: 0, processName: 'cat' }; + openMock.mockResolvedValue({ error: new FuseError(FuseCodes.ENOENT, 'File not found') }); + + await openController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: FuseCodes.ENOENT }); + }); + + it('should return errno EEXIST when path is an auxiliary file conflict', async () => { + req.body = { path: '/some/.tmp', flags: 0, processName: 'cat' }; + openMock.mockResolvedValue({ error: new FuseError(FuseCodes.EEXIST, 'Conflict') }); + + await openController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: FuseCodes.EEXIST }); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/open.controller.ts b/src/backend/features/virtual-drive/controllers/operations/open.controller.ts new file mode 100644 index 0000000000..882add8f95 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/open.controller.ts @@ -0,0 +1,23 @@ +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { open } from '../../services/operations/open.service'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; + +export async function openController(req: Request, res: Response, container: Container) { + const rawPath: string = req.body.path ?? ''; + logger.debug({ + msg: `[FUSE DAEMON] Open signal received for path: ${rawPath} by process: ${req.body.processName ?? ''}`, + }); + const processName: string = req.body.processName ?? ''; + const normalizedPath = ensureLeadingSlash(rawPath); + + const result = await open(normalizedPath, processName, container); + + if (result.error) { + res.json({ errno: result.error.code }); + return; + } + + res.json({ errno: 0 }); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/opendir.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/opendir.controller.test.ts new file mode 100644 index 0000000000..fe24b51ecb --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/opendir.controller.test.ts @@ -0,0 +1,53 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { openDirController } from './opendir.controller'; +import * as openDirServiceModule from '../../services/operations/opendir.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FILE_MODE, FOLDER_MODE } from '../../constants'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +describe('openDirController', () => { + const opendirMock = partialSpyOn(openDirServiceModule, 'opendir'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + }); + + it('should return errno 0 with entries when directory is opened successfully', async () => { + req.body = { path: '/some/folder' }; + opendirMock.mockResolvedValue({ + data: { + entries: [ + { name: 'file.txt', mode: FILE_MODE }, + { name: 'subdir', mode: FOLDER_MODE }, + ], + }, + }); + + await openDirController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ + errno: 0, + entries: [ + { name: 'file.txt', mode: FILE_MODE }, + { name: 'subdir', mode: FOLDER_MODE }, + ], + }); + }); + + it('should return errno EIO when directory read fails', async () => { + req.body = { path: '/broken/folder' }; + opendirMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'IO error') }); + + await openDirController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: FuseCodes.EIO }); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/opendir.controller.ts b/src/backend/features/virtual-drive/controllers/operations/opendir.controller.ts new file mode 100644 index 0000000000..0b445e2415 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/opendir.controller.ts @@ -0,0 +1,21 @@ +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { opendir } from '../../services/operations/opendir.service'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; + +export async function openDirController(req: Request, res: Response, container: Container) { + const rawPath: string = req.body.path ?? ''; + logger.debug({ msg: `[FUSE DAEMON] OpenDir signal received for path: ${rawPath}` }); + const normalizedPath = ensureLeadingSlash(rawPath); + + const { data, error } = await opendir(normalizedPath, container); + + if (error) { + logger.error({ msg: error.message }); + res.json({ errno: error.code }); + return; + } + + res.json({ errno: 0, ...data }); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/read.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/read.controller.test.ts new file mode 100644 index 0000000000..d36c14fd5c --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/read.controller.test.ts @@ -0,0 +1,72 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { readController } from './read.controller'; +import * as readServiceModule from '../../services/operations/read.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +describe('readController', () => { + const readMock = partialSpyOn(readServiceModule, 'read'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + res.set.mockReturnValue(res); + }); + + it('should send buffer and X-Errno 0 on success', async () => { + const chunk = Buffer.from('file data'); + req.body = { path: '/file.mp4', length: 10, offset: 0, processName: 'vlc' }; + readMock.mockResolvedValue({ data: chunk }); + + await readController(req, res, container); + + expect(res.set).toHaveBeenCalledWith('X-Errno', '0'); + expect(res.set).toHaveBeenCalledWith('Content-Type', 'application/octet-stream'); + expect(res.send).toHaveBeenCalledWith(chunk); + }); + + it('should send X-Errno with error code and empty buffer on error', async () => { + req.body = { path: '/file.mp4', length: 10, offset: 0, processName: 'vlc' }; + readMock.mockResolvedValue({ error: new FuseError(FuseCodes.ENOENT, 'not found') }); + + await readController(req, res, container); + + expect(res.set).toHaveBeenCalledWith('X-Errno', String(FuseCodes.ENOENT)); + expect(res.send).toHaveBeenCalledWith(Buffer.alloc(0)); + }); + + it('should send EINVAL and empty buffer when required fields are missing', async () => { + req.body = { path: '/file.mp4' }; + + await readController(req, res, container); + + expect(res.set).toHaveBeenCalledWith('X-Errno', String(FuseCodes.EINVAL)); + expect(res.send).toHaveBeenCalledWith(Buffer.alloc(0)); + expect(readMock).not.toHaveBeenCalled(); + }); + + it('should normalize path by adding leading slash', async () => { + req.body = { path: 'file.mp4', length: 10, offset: 0, processName: 'vlc' }; + readMock.mockResolvedValue({ data: Buffer.alloc(0) }); + + await readController(req, res, container); + + expect(readMock).toHaveBeenCalledWith('/file.mp4', 10, 0, 'vlc', container); + }); + + it('should default processName to empty string when not a string', async () => { + req.body = { path: '/file.mp4', length: 10, offset: 0, processName: 123 }; + readMock.mockResolvedValue({ data: Buffer.alloc(0) }); + + await readController(req, res, container); + + expect(readMock).toHaveBeenCalledWith('/file.mp4', 10, 0, '', container); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/read.controller.ts b/src/backend/features/virtual-drive/controllers/operations/read.controller.ts new file mode 100644 index 0000000000..45372638cc --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/read.controller.ts @@ -0,0 +1,36 @@ +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { read } from '../../services/operations/read.service'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; + +export async function readController(req: Request, res: Response, container: Container) { + const { path: rawPath, length, offset } = req.body; + const processName = typeof req.body.processName === 'string' ? req.body.processName : ''; + + if (rawPath === undefined || length === undefined || offset === undefined) { + logger.error({ msg: '[FUSE DAEMON] Read: missing required fields', body: req.body }); + res.set('X-Errno', String(FuseCodes.EINVAL)); + res.send(Buffer.alloc(0)); + return; + } + + const normalizedPath = ensureLeadingSlash(rawPath); + + logger.debug({ + msg: `[FUSE DAEMON] Read signal received for path: ${normalizedPath} by process: ${processName} and length: ${length} offset: ${offset}`, + }); + + const result = await read(normalizedPath, length, offset, processName, container); + + if (result.error) { + res.set('X-Errno', String(result.error.code)); + res.send(Buffer.alloc(0)); + return; + } + + res.set('X-Errno', '0'); + res.set('Content-Type', 'application/octet-stream'); + res.send(result.data); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/release.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/release.controller.test.ts new file mode 100644 index 0000000000..1ddfe04b1b --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/release.controller.test.ts @@ -0,0 +1,69 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { releaseController } from './release.controller'; +import * as releaseServiceModule from '../../services/operations/release.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +vi.mock(import('@internxt/drive-desktop-core/build/backend')); + +describe('releaseController', () => { + const releaseMock = partialSpyOn(releaseServiceModule, 'release'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + }); + + it('should return errno 0 when release succeeds', async () => { + req.body = { path: '/some/file.txt', processName: 'cat' }; + releaseMock.mockResolvedValue({ data: undefined }); + + await releaseController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: 0 }); + }); + + it('should return errno EIO when release fails', async () => { + req.body = { path: '/some/file.txt', processName: 'cat' }; + releaseMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'Upload failed') }); + + await releaseController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: FuseCodes.EIO }); + }); + + it('should normalize path by adding leading slash', async () => { + req.body = { path: 'some/file.txt', processName: 'cat' }; + releaseMock.mockResolvedValue({ data: undefined }); + + await releaseController(req, res, container); + + expect(releaseMock).toHaveBeenCalledWith(expect.objectContaining({ path: '/some/file.txt' })); + }); + + it('should forward processName to the service', async () => { + req.body = { path: '/file.txt', processName: 'pool-org.gnome.' }; + releaseMock.mockResolvedValue({ data: undefined }); + + await releaseController(req, res, container); + + expect(releaseMock).toHaveBeenCalledWith(expect.objectContaining({ processName: 'pool-org.gnome.' })); + }); + + it('should handle missing body fields gracefully', async () => { + req.body = {}; + releaseMock.mockResolvedValue({ data: undefined }); + + await releaseController(req, res, container); + + expect(releaseMock).toHaveBeenCalledWith(expect.objectContaining({ path: '/', processName: '' })); + expect(res.json).toHaveBeenCalledWith({ errno: 0 }); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/release.controller.ts b/src/backend/features/virtual-drive/controllers/operations/release.controller.ts new file mode 100644 index 0000000000..ee4315dbf0 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/release.controller.ts @@ -0,0 +1,22 @@ +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { release } from '../../services/operations/release.service'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; +export async function releaseController(req: Request, res: Response, container: Container) { + const rawPath: string = req.body.path ?? ''; + const processName: string = req.body.processName ?? ''; + logger.debug({ + msg: `[FUSE DAEMON] Release signal received for path: ${rawPath} by process: ${processName}`, + }); + const normalizedPath = ensureLeadingSlash(rawPath); + + const result = await release({ path: normalizedPath, processName, container }); + + if (result.error) { + res.json({ errno: result.error.code }); + return; + } + + res.json({ errno: 0 }); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/rename.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/rename.controller.test.ts new file mode 100644 index 0000000000..74bf601015 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/rename.controller.test.ts @@ -0,0 +1,57 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { renameController } from './rename.controller'; +import * as renameServiceModule from '../../services/operations/rename.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; + +describe('renameController', () => { + const renameMock = partialSpyOn(renameServiceModule, 'rename'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + }); + + it('should return errno EINVAL when newPath is missing', async () => { + req.body = { oldPath: '/a/path' }; + + await renameController(req, res, container); + + expect(res.json).toBeCalledWith({ errno: FuseCodes.EINVAL }); + expect(renameMock).not.toBeCalled(); + }); + + it('should return errno EINVAL when oldPath is missing', async () => { + req.body = { newPath: '/a/path' }; + + await renameController(req, res, container); + + expect(res.json).toBeCalledWith({ errno: FuseCodes.EINVAL }); + expect(renameMock).not.toBeCalled(); + }); + + it('should return errno 0 when rename succeeds', async () => { + req.body = { oldPath: '/old/path', newPath: '/new/path' }; + renameMock.mockResolvedValue({ data: undefined }); + + await renameController(req, res, container); + + expect(res.json).toBeCalledWith({ errno: 0 }); + }); + + it('should return errno from service when rename fails', async () => { + req.body = { oldPath: '/old/path', newPath: '/new/path' }; + renameMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'rename error') }); + + await renameController(req, res, container); + + expect(res.json).toBeCalledWith({ errno: FuseCodes.EIO }); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/rename.controller.ts b/src/backend/features/virtual-drive/controllers/operations/rename.controller.ts new file mode 100644 index 0000000000..70c0a22e4e --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/rename.controller.ts @@ -0,0 +1,31 @@ +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { rename } from '../../services/operations/rename.service'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; + +export async function renameController(req: Request, res: Response, container: Container) { + const rawOldPath = req.body.oldPath; + const rawNewPath = req.body.newPath; + + if (typeof rawOldPath !== 'string' || typeof rawNewPath !== 'string') { + logger.error({ msg: '[FUSE DAEMON] Rename: missing required fields', body: req.body }); + res.json({ errno: FuseCodes.EINVAL }); + return; + } + + const oldPath = ensureLeadingSlash(rawOldPath); + const newPath = ensureLeadingSlash(rawNewPath); + + logger.debug({ msg: '[FUSE DAEMON] Rename signal received', oldPath, newPath }); + + const result = await rename({ src: oldPath, dest: newPath, container }); + + if (result.error) { + res.json({ errno: result.error.code }); + return; + } + + res.json({ errno: 0 }); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/rmdir.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/rmdir.controller.test.ts new file mode 100644 index 0000000000..a9904e09d8 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/rmdir.controller.test.ts @@ -0,0 +1,51 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { rmdirController } from './rmdir.controller'; +import * as rmdirServiceModule from '../../services/operations/rmdir.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +vi.mock('@internxt/drive-desktop-core/build/backend'); +vi.mock('../../services/operations/rmdir.service'); + +describe('rmdirController', () => { + const rmdirMock = partialSpyOn(rmdirServiceModule, 'rmdir'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + }); + + it('should return errno 0 when rmdir succeeds', async () => { + req.body = { path: '/some/folder' }; + rmdirMock.mockResolvedValue({ data: undefined }); + + await rmdirController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: 0 }); + }); + + it('should return errno ENOENT when rmdir returns ENOENT', async () => { + req.body = { path: '/missing/folder' }; + rmdirMock.mockResolvedValue({ error: new FuseError(FuseCodes.ENOENT, 'not found') }); + + await rmdirController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: FuseCodes.ENOENT }); + }); + + it('should return errno EIO when rmdir returns non-ENOENT error', async () => { + req.body = { path: '/some/folder' }; + rmdirMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'io error') }); + + await rmdirController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: FuseCodes.EIO }); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/rmdir.controller.ts b/src/backend/features/virtual-drive/controllers/operations/rmdir.controller.ts new file mode 100644 index 0000000000..a4395da67d --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/rmdir.controller.ts @@ -0,0 +1,22 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { Container } from 'diod'; +import { Request, Response } from 'express'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; +import { rmdir } from '../../services/operations/rmdir.service'; + +export async function rmdirController(req: Request, res: Response, container: Container) { + logger.debug({ msg: '[FUSE DAEMON] Rmdir signal received' }); + + const rawPath: string = req.body.path ?? ''; + const normalizedPath = ensureLeadingSlash(rawPath); + const response = await rmdir(normalizedPath, container); + + if (response.error) { + logger.error({ msg: response.error.message }); + + res.json({ errno: response.error.code }); + return; + } + + res.json({ errno: 0 }); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/statfs.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/statfs.controller.test.ts new file mode 100644 index 0000000000..3da6edb2af --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/statfs.controller.test.ts @@ -0,0 +1,46 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { statfsController } from './statfs.controller'; +import * as statfsServiceModule from '../../services/operations/statfs.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +describe('statfsController', () => { + const statfsMock = partialSpyOn(statfsServiceModule, 'statfs'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + }); + + it('should return errno 0 and stats on success', async () => { + const stats = { + blocks: 1000000, + bfree: 500000, + bavail: 490000, + files: 100000, + ffree: 90000, + bsize: 4096, + nameLen: 255, + }; + statfsMock.mockResolvedValue({ data: stats }); + + await statfsController(req, res, container); + + expect(res.json).toBeCalledWith({ errno: 0, ...stats }); + }); + + it('should return errno EIO when service fails', async () => { + statfsMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'disk error') }); + + await statfsController(req, res, container); + + expect(res.json).toBeCalledWith({ errno: FuseCodes.EIO }); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/statfs.controller.ts b/src/backend/features/virtual-drive/controllers/operations/statfs.controller.ts new file mode 100644 index 0000000000..5c64ea5ceb --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/statfs.controller.ts @@ -0,0 +1,14 @@ +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { statfs } from '../../services/operations/statfs.service'; + +export async function statfsController(req: Request, res: Response, container: Container) { + const result = await statfs({ container }); + + if (result.error) { + res.json({ errno: result.error.code }); + return; + } + + res.json({ errno: 0, ...result.data }); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/truncate.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/truncate.controller.test.ts new file mode 100644 index 0000000000..6681c4dd51 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/truncate.controller.test.ts @@ -0,0 +1,48 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { truncateController } from './truncate.controller'; +import * as truncateServiceModule from '../../services/operations/truncate.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +describe('truncateController', () => { + const truncateMock = partialSpyOn(truncateServiceModule, 'truncate'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + }); + + it('should return errno EINVAL when payload is invalid', async () => { + req.body = { path: '/some/file.txt', size: -1 }; + + await truncateController(req, res, container); + + expect(res.json).toBeCalledWith({ errno: FuseCodes.EINVAL }); + expect(truncateMock).not.toHaveBeenCalled(); + }); + + it('should return errno 0 when truncate succeeds', async () => { + req.body = { path: '/some/file.txt', size: 0 }; + truncateMock.mockResolvedValue({ data: undefined }); + + await truncateController(req, res, container); + + expect(res.json).toBeCalledWith({ errno: 0 }); + }); + + it('should return errno EIO when truncate fails', async () => { + req.body = { path: '/some/file.txt', size: 0 }; + truncateMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'io error') }); + + await truncateController(req, res, container); + + expect(res.json).toBeCalledWith({ errno: FuseCodes.EIO }); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/truncate.controller.ts b/src/backend/features/virtual-drive/controllers/operations/truncate.controller.ts new file mode 100644 index 0000000000..6bf6b341a7 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/truncate.controller.ts @@ -0,0 +1,27 @@ +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { truncate } from '../../services/operations/truncate.service'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; + +export async function truncateController(req: Request, res: Response, container: Container) { + const rawPath: string = req.body.path ?? ''; + const rawSize = req.body.size; + + const size = Number.isInteger(rawSize) && rawSize >= 0 ? rawSize : Number.NaN; + + if (!rawPath || Number.isNaN(size)) { + res.json({ errno: FuseCodes.EINVAL }); + return; + } + + const normalizedPath = ensureLeadingSlash(rawPath); + const result = await truncate({ path: normalizedPath, size, container }); + + if (result.error) { + res.json({ errno: result.error.code }); + return; + } + + res.json({ errno: 0 }); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/unlink.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/unlink.controller.test.ts new file mode 100644 index 0000000000..267e21c952 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/unlink.controller.test.ts @@ -0,0 +1,51 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { unlinkController } from './unlink.controller'; +import * as unlinkServiceModule from '../../services/operations/unlink.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +vi.mock('@internxt/drive-desktop-core/build/backend'); +vi.mock('../../services/operations/unlink.service'); + +describe('unlinkController', () => { + const unlinkMock = partialSpyOn(unlinkServiceModule, 'unlink'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + }); + + it('should return errno 0 when unlink succeeds', async () => { + req.body = { path: '/some/file.txt' }; + unlinkMock.mockResolvedValue({ data: undefined }); + + await unlinkController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: 0 }); + }); + + it('should return errno ENOENT when unlink returns ENOENT', async () => { + req.body = { path: '/missing.txt' }; + unlinkMock.mockResolvedValue({ error: new FuseError(FuseCodes.ENOENT, 'not found') }); + + await unlinkController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: FuseCodes.ENOENT }); + }); + + it('should return errno EIO when unlink returns non-ENOENT error', async () => { + req.body = { path: '/some/file.txt' }; + unlinkMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'io error') }); + + await unlinkController(req, res, container); + + expect(res.json).toHaveBeenCalledWith({ errno: FuseCodes.EIO }); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/unlink.controller.ts b/src/backend/features/virtual-drive/controllers/operations/unlink.controller.ts new file mode 100644 index 0000000000..b2d1944c26 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/unlink.controller.ts @@ -0,0 +1,22 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { Container } from 'diod'; +import { Request, Response } from 'express'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; +import { unlink } from '../../services/operations/unlink.service'; + +export async function unlinkController(req: Request, res: Response, container: Container) { + logger.debug({ msg: '[FUSE DAEMON] Unlink signal received' }); + + const rawPath: string = req.body.path ?? ''; + const normalizedPath = ensureLeadingSlash(rawPath); + const response = await unlink(normalizedPath, container); + + if (response.error) { + logger.error({ msg: response.error.message }); + + res.json({ errno: response.error.code }); + return; + } + + res.json({ errno: 0 }); +} diff --git a/src/backend/features/virtual-drive/controllers/operations/write.controller.test.ts b/src/backend/features/virtual-drive/controllers/operations/write.controller.test.ts new file mode 100644 index 0000000000..8134e97341 --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/write.controller.test.ts @@ -0,0 +1,94 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { writeController } from './write.controller'; +import * as writeServiceModule from '../../services/operations/write.service'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +describe('writeController', () => { + const writeMock = partialSpyOn(writeServiceModule, 'write'); + let req: ReturnType>; + let res: ReturnType>; + let container: ReturnType>; + + beforeEach(() => { + req = mockDeep(); + res = mockDeep(); + container = mockDeep(); + res.set.mockReturnValue(res); + }); + + it('should return errno EINVAL when payload is invalid', async () => { + req.header.calledWith('X-Path-B64').mockReturnValue(Buffer.from('/some/file.txt', 'utf8').toString('base64')); + req.header.calledWith('X-Offset').mockReturnValue('wrong'); + req.body = Buffer.from('hello'); + + await writeController(req, res, container); + + expect(res.set).toHaveBeenCalledWith('X-Errno', String(FuseCodes.EINVAL)); + expect(res.send).toHaveBeenCalledWith(Buffer.alloc(0)); + expect(writeMock).not.toHaveBeenCalled(); + }); + + it('should return errno 0 and written bytes when write succeeds', async () => { + req.header.calledWith('X-Path-B64').mockReturnValue(Buffer.from('/some/file.txt', 'utf8').toString('base64')); + req.header.calledWith('X-Offset').mockReturnValue('0'); + req.body = Buffer.from('hello'); + writeMock.mockResolvedValue({ data: 5 }); + + await writeController(req, res, container); + + expect(res.set).toHaveBeenCalledWith('X-Errno', '0'); + expect(res.set).toHaveBeenCalledWith('X-Written', '5'); + expect(res.send).toHaveBeenCalledWith(Buffer.alloc(0)); + }); + + it('should return errno EIO when write fails', async () => { + req.header.calledWith('X-Path-B64').mockReturnValue(Buffer.from('/some/file.txt', 'utf8').toString('base64')); + req.header.calledWith('X-Offset').mockReturnValue('0'); + req.body = Buffer.from('hello'); + writeMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'io error') }); + + await writeController(req, res, container); + + expect(res.set).toHaveBeenCalledWith('X-Errno', String(FuseCodes.EIO)); + expect(res.send).toHaveBeenCalledWith(Buffer.alloc(0)); + }); + + it('should decode UTF-8 path from base64 header before write', async () => { + const encodedPath = Buffer.from('/тестовое изображение.jpeg', 'utf8').toString('base64'); + req.header.calledWith('X-Path-B64').mockReturnValue(encodedPath); + req.header.calledWith('X-Offset').mockReturnValue('0'); + req.body = Buffer.from('hello'); + writeMock.mockResolvedValue({ data: 5 }); + + await writeController(req, res, container); + + expect(writeMock).toHaveBeenCalledWith({ + path: '/тестовое изображение.jpeg', + content: Buffer.from('hello'), + offset: 0, + container, + }); + }); + + it('should decode base64 path from header when filename contains newline', async () => { + const newlinePath = '/nombre\narchivo.txt'; + const encodedPath = Buffer.from(newlinePath, 'utf8').toString('base64'); + req.header.calledWith('X-Path-B64').mockReturnValue(encodedPath); + req.header.calledWith('X-Offset').mockReturnValue('0'); + req.body = Buffer.from('hello'); + writeMock.mockResolvedValue({ data: 5 }); + + await writeController(req, res, container); + + expect(writeMock).toHaveBeenCalledWith({ + path: newlinePath, + content: Buffer.from('hello'), + offset: 0, + container, + }); + }); +}); diff --git a/src/backend/features/virtual-drive/controllers/operations/write.controller.ts b/src/backend/features/virtual-drive/controllers/operations/write.controller.ts new file mode 100644 index 0000000000..0c4591420d --- /dev/null +++ b/src/backend/features/virtual-drive/controllers/operations/write.controller.ts @@ -0,0 +1,45 @@ +import { Request, Response } from 'express'; +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { write } from '../../services/operations/write.service'; +import { ensureLeadingSlash } from '../ensure-leading-slash'; + +export async function writeController(req: Request, res: Response, container: Container) { + const rawBase64Path = req.header('X-Path-B64'); + const rawOffset = req.header('X-Offset'); + const body = req.body; + const content = Buffer.isBuffer(body) ? body : undefined; + const offset = rawOffset ? Number.parseInt(rawOffset, 10) : Number.NaN; + + if (typeof rawBase64Path !== 'string' || Number.isNaN(offset) || !content) { + logger.error({ + msg: '[FUSE DAEMON] Write: missing required fields', + headers: req.headers, + bodyType: typeof req.body, + }); + res.set('X-Errno', String(FuseCodes.EINVAL)); + res.send(Buffer.alloc(0)); + return; + } + const normalizedPath = ensureLeadingSlash(Buffer.from(rawBase64Path, 'base64').toString('utf8')); + + logger.debug({ + msg: '[FUSE DAEMON] Write signal received', + path: normalizedPath, + offset, + length: content.length, + }); + + const result = await write({ path: normalizedPath, content, offset, container }); + + if (result.error) { + res.set('X-Errno', String(result.error.code)); + res.send(Buffer.alloc(0)); + return; + } + + res.set('X-Errno', '0'); + res.set('X-Written', String(result.data)); + res.send(Buffer.alloc(0)); +} diff --git a/src/backend/features/virtual-drive/index.ts b/src/backend/features/virtual-drive/index.ts new file mode 100644 index 0000000000..e6b0f884aa --- /dev/null +++ b/src/backend/features/virtual-drive/index.ts @@ -0,0 +1,2 @@ +export { startFuseDaemonServer, stopFuseDaemonServer } from './services/server.service'; +export { startDaemon, stopDaemon } from './services/daemon.service'; diff --git a/src/backend/features/virtual-drive/ipc/handlers.ts b/src/backend/features/virtual-drive/ipc/handlers.ts new file mode 100644 index 0000000000..40c9c2841e --- /dev/null +++ b/src/backend/features/virtual-drive/ipc/handlers.ts @@ -0,0 +1,22 @@ +import { ipcMain } from 'electron'; +import eventBus from '../../../../apps/main/event-bus'; +import { getVirtualDriveContainer, startVirtualDrive, stopVirtualDrive } from '../services/virtual-drive.service'; +import { updateVirtualDriveContainer } from '../services/update-virtual-drive-container.service'; +import { DependencyInjectionUserProvider } from '../../../../apps/shared/dependency-injection/DependencyInjectionUserProvider'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { getVirtualDriveState } from '../services/daemon.service'; + +function remoteChangesSyncedHandler() { + const container = getVirtualDriveContainer(); + if (container) { + updateVirtualDriveContainer({ container, user: DependencyInjectionUserProvider.get() }); + } else { + logger.warn({ msg: '[FUSE] updateVirtualDriveContainer called before container was initialized' }); + } +} + +export function registerVirtualDriveHandlers() { + eventBus.on('INITIAL_SYNC_READY', startVirtualDrive); + eventBus.on('REMOTE_CHANGES_SYNCHED', remoteChangesSyncedHandler); + ipcMain.handle('get-virtual-drive-status', getVirtualDriveState); +} diff --git a/src/backend/features/virtual-drive/routes/daemon.routes.test.ts b/src/backend/features/virtual-drive/routes/daemon.routes.test.ts new file mode 100644 index 0000000000..9e3096f5cf --- /dev/null +++ b/src/backend/features/virtual-drive/routes/daemon.routes.test.ts @@ -0,0 +1,22 @@ +import { daemonReadyController } from '../controllers/daemon.controller'; +import { buildDaemonRouter } from './daemon.routes'; +import { DAEMON_PATHS } from '../constants'; + +vi.mock('../services/daemon.service'); + +describe('buildDaemonRouter', () => { + it('should register POST /ready and attach daemonReadyController', () => { + const router = buildDaemonRouter(); + + const postRoutes = router.stack + .filter((layer) => layer.route) + .map((layer) => ({ + path: layer.route!.path, + handler: layer.route!.stack[0].handle, + })); + + const readyRoute = postRoutes.find((r) => r.path === DAEMON_PATHS.READY); + expect(readyRoute).toBeDefined(); + expect(readyRoute?.handler).toBe(daemonReadyController); + }); +}); diff --git a/src/backend/features/virtual-drive/routes/daemon.routes.ts b/src/backend/features/virtual-drive/routes/daemon.routes.ts new file mode 100644 index 0000000000..c48cfcc3a6 --- /dev/null +++ b/src/backend/features/virtual-drive/routes/daemon.routes.ts @@ -0,0 +1,11 @@ +import { Router } from 'express'; +import { daemonReadyController } from '../controllers/daemon.controller'; +import { DAEMON_PATHS } from '../constants'; + +export function buildDaemonRouter(): Router { + const router = Router(); + + router.post(DAEMON_PATHS.READY, daemonReadyController); + + return router; +} diff --git a/src/backend/features/virtual-drive/routes/operations.routes.test.ts b/src/backend/features/virtual-drive/routes/operations.routes.test.ts new file mode 100644 index 0000000000..d0b3ad3456 --- /dev/null +++ b/src/backend/features/virtual-drive/routes/operations.routes.test.ts @@ -0,0 +1,37 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { buildOperationsRouter } from './operations.routes'; +import { OPERATION_PATHS } from '../constants'; + +describe('buildOperationsRouter', () => { + let routes: string[]; + + beforeEach(() => { + const container = mockDeep(); + const router = buildOperationsRouter(container); + routes = router.stack.filter((layer) => layer.route).map((layer) => layer.route!.path); + }); + + it('should register POST /getattributes', () => { + expect(routes).toContain(OPERATION_PATHS.GET_ATTR); + expect(routes).toContain(OPERATION_PATHS.CREATE); + expect(routes).toContain(OPERATION_PATHS.WRITE); + expect(routes).toContain(OPERATION_PATHS.RENAME); + }); + + it('should register POST /open', () => { + expect(routes).toContain(OPERATION_PATHS.OPEN); + }); + + it('should register POST /opendir', () => { + expect(routes).toContain(OPERATION_PATHS.OPEN_DIR); + }); + + it('should register POST /read', () => { + expect(routes).toContain(OPERATION_PATHS.READ); + }); + + it('should register POST /release', () => { + expect(routes).toContain(OPERATION_PATHS.RELEASE); + }); +}); diff --git a/src/backend/features/virtual-drive/routes/operations.routes.ts b/src/backend/features/virtual-drive/routes/operations.routes.ts new file mode 100644 index 0000000000..d404145629 --- /dev/null +++ b/src/backend/features/virtual-drive/routes/operations.routes.ts @@ -0,0 +1,45 @@ +import { raw, Router } from 'express'; +import { Container } from 'diod'; +import { OPERATION_PATHS } from '../constants'; +import { getAttributesController } from '../controllers/operations/get-attributes.controller'; +import { openController } from '../controllers/operations/open.controller'; +import { openDirController } from '../controllers/operations/opendir.controller'; +import { readController } from '../controllers/operations/read.controller'; +import { truncateController } from '../controllers/operations/truncate.controller'; +import { createController } from '../controllers/operations/create.controller'; +import { writeController } from '../controllers/operations/write.controller'; +import { releaseController } from '../controllers/operations/release.controller'; +import { mkdirController } from '../controllers/operations/mkdir.controller'; +import { renameController } from '../controllers/operations/rename.controller'; +import { unlinkController } from '../controllers/operations/unlink.controller'; +import { rmdirController } from '../controllers/operations/rmdir.controller'; +import { statfsController } from '../controllers/operations/statfs.controller'; + +// Routes for FUSE operation endpoints (POST /op/). +// Each operation will be registered here as it is implemented in PB-6161. +export function buildOperationsRouter(container: Container): Router { + const router = Router(); + router.post(OPERATION_PATHS.GET_ATTR, (req, res) => getAttributesController(req, res, container)); + router.post(OPERATION_PATHS.OPEN, (req, res) => openController(req, res, container)); + router.post(OPERATION_PATHS.OPEN_DIR, (req, res) => openDirController(req, res, container)); + router.post(OPERATION_PATHS.READ, (req, res) => readController(req, res, container)); + router.post(OPERATION_PATHS.TRUNCATE, (req, res) => truncateController(req, res, container)); + router.post(OPERATION_PATHS.CREATE, (req, res) => createController(req, res, container)); + /** + * v.2.6.0 + * Esteban Galvis Triana + * FUSE write operations can send chunks up to 128 KB. + * We keep the parser limit at 1 MB to avoid PayloadTooLarge errors + * and provide safe headroom for binary write payload handling. + */ + router.post(OPERATION_PATHS.WRITE, raw({ type: 'application/octet-stream', limit: '1mb' }), (req, res) => + writeController(req, res, container), + ); + router.post(OPERATION_PATHS.RELEASE, (req, res) => releaseController(req, res, container)); + router.post(OPERATION_PATHS.MKDIR, (req, res) => mkdirController(req, res, container)); + router.post(OPERATION_PATHS.UNLINK, (req, res) => unlinkController(req, res, container)); + router.post(OPERATION_PATHS.RMDIR, (req, res) => rmdirController(req, res, container)); + router.post(OPERATION_PATHS.RENAME, (req, res) => renameController(req, res, container)); + router.post(OPERATION_PATHS.STAT_FS, (req, res) => statfsController(req, res, container)); + return router; +} diff --git a/src/backend/features/virtual-drive/services/daemon.service.test.ts b/src/backend/features/virtual-drive/services/daemon.service.test.ts new file mode 100644 index 0000000000..2bbbe7bd73 --- /dev/null +++ b/src/backend/features/virtual-drive/services/daemon.service.test.ts @@ -0,0 +1,78 @@ +import { EventEmitter } from 'node:events'; +import { spawn } from 'node:child_process'; +import { resolveDaemonReady, daemonReady, stopDaemon, startDaemon } from './daemon.service'; + +vi.mock('node:child_process', () => ({ + spawn: vi.fn(), +})); + +describe('daemon.service', () => { + describe('resolveDaemonReady', () => { + it('should resolve the daemonReady promise', async () => { + resolveDaemonReady(); + + await expect(daemonReady).resolves.toBeUndefined(); + }); + }); + + describe('startDaemon', () => { + let fakeDaemon: EventEmitter & { kill: ReturnType; stderr: EventEmitter }; + + beforeEach(() => { + fakeDaemon = Object.assign(new EventEmitter(), { + kill: vi.fn(), + stderr: new EventEmitter(), + }); + vi.mocked(spawn).mockReturnValue(fakeDaemon as unknown as ReturnType); + }); + + afterEach(async () => { + const stopPromise = stopDaemon(); + fakeDaemon.emit('exit', 0); + await stopPromise; + }); + + it('should spawn the daemon with the correct environment variables', () => { + startDaemon('/mock/mount'); + + expect(spawn).toHaveBeenCalledWith( + expect.any(String), + [], + expect.objectContaining({ + env: expect.objectContaining({ + INTERNXT_MOUNT: '/mock/mount', + }), + }), + ); + }); + + it('should reject if the daemon exits with a non-zero code', async () => { + const startPromise = startDaemon('/mock/mount'); + fakeDaemon.emit('exit', 1); + + await expect(startPromise).rejects.toThrow('fuse daemon exited before ready with code 1'); + }); + }); + + describe('stopDaemon', () => { + it('should resolve immediately when no daemon is running', async () => { + await expect(stopDaemon()).resolves.toBeUndefined(); + }); + + it('should send SIGTERM and resolve when daemon exits', async () => { + const fakeDaemon = Object.assign(new EventEmitter(), { + kill: vi.fn(), + stderr: new EventEmitter(), + }); + vi.mocked(spawn).mockReturnValue(fakeDaemon as unknown as ReturnType); + + startDaemon('/mock/mount'); + + const stopPromise = stopDaemon(); + fakeDaemon.emit('exit', 0); + + await expect(stopPromise).resolves.toBeUndefined(); + expect(fakeDaemon.kill).toHaveBeenCalledWith('SIGTERM'); + }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/daemon.service.ts b/src/backend/features/virtual-drive/services/daemon.service.ts new file mode 100644 index 0000000000..501b5d0e2d --- /dev/null +++ b/src/backend/features/virtual-drive/services/daemon.service.ts @@ -0,0 +1,88 @@ +import { spawn, ChildProcess } from 'node:child_process'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { PATHS } from '../../../../core/electron/paths'; +import { FuseDriveStatus } from '../../../../apps/drive/fuse/FuseDriveStatus'; +import { broadcastToWindows } from '../../../../apps/main/windows'; + +let resolveReady: () => void; +let daemon: ChildProcess | null = null; +let status: FuseDriveStatus = 'UNMOUNTED'; +const SIGKILL_TIMEOUT_MS = 5_000; + +export const daemonReady = new Promise((resolve) => { + resolveReady = resolve; +}); + +export function resolveDaemonReady(): void { + resolveReady(); +} + +export function getVirtualDriveState(): FuseDriveStatus { + return status; +} + +export function startDaemon(mountPoint: string): Promise { + const spawnedDaemon = spawn(PATHS.FUSE_DAEMON_BINARY, [], { + env: { + ...process.env, + INTERNXT_MOUNT: mountPoint, + INTERNXT_SOCKET: PATHS.FUSE_DAEMON_SOCKET, + INTERNXT_LOG_FILE: PATHS.FUSE_DAEMON_LOG, + }, + }); + + daemon = spawnedDaemon; + + spawnedDaemon.stderr?.on('data', (data: Buffer) => { + logger.debug({ msg: `[FUSE DAEMON] ${data.toString().trim()}` }); + }); + + spawnedDaemon.once('exit', (code: number | null) => { + if (code !== 0 && code !== null) { + status = 'ERROR'; + broadcastToWindows('virtual-drive-status-change', 'ERROR'); + } else { + status = 'UNMOUNTED'; + } + daemon = null; + }); + + return new Promise((resolve, reject) => { + spawnedDaemon.once('exit', (code: number) => { + if (code !== 0) { + reject(new Error(`fuse daemon exited before ready with code ${code}`)); + } + }); + + daemonReady.then(() => { + logger.debug({ msg: '[VIRTUAL DRIVE] virtual drive mounted and ready' }); + status = 'MOUNTED'; + broadcastToWindows('virtual-drive-status-change', 'MOUNTED'); + resolve(); + }); + }); +} + +export function stopDaemon(): Promise { + return new Promise((resolve) => { + if (!daemon) { + status = 'UNMOUNTED'; + resolve(); + return; + } + + const timeout = setTimeout(() => { + logger.warn({ msg: '[FUSE DAEMON] daemon did not exit after SIGTERM, sending SIGKILL' }); + daemon?.kill('SIGKILL'); + }, SIGKILL_TIMEOUT_MS); + + daemon.once('exit', () => { + clearTimeout(timeout); + daemon = null; + status = 'UNMOUNTED'; + resolve(); + }); + + daemon.kill('SIGTERM'); + }); +} diff --git a/src/backend/features/virtual-drive/services/operations/create.service.test.ts b/src/backend/features/virtual-drive/services/operations/create.service.test.ts new file mode 100644 index 0000000000..73eddbe800 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/create.service.test.ts @@ -0,0 +1,32 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { TemporalFileCreator } from '../../../../../context/storage/TemporalFiles/application/creation/TemporalFileCreator'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { create } from './create.service'; + +describe('create', () => { + let container: ReturnType>; + const temporalFileCreator = mockDeep(); + + beforeEach(() => { + container = mockDeep(); + container.get.calledWith(TemporalFileCreator).mockReturnValue(temporalFileCreator); + }); + + it('should create temporal file when request is valid', async () => { + const { data, error } = await create('/some/file.txt', container); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + expect(temporalFileCreator.run).toHaveBeenCalledWith('/some/file.txt'); + }); + + it('should return EIO when temporal file creation fails', async () => { + temporalFileCreator.run.mockRejectedValue(new Error('boom')); + + const { data, error } = await create('/some/file.txt', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EIO); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/create.service.ts b/src/backend/features/virtual-drive/services/operations/create.service.ts new file mode 100644 index 0000000000..b9be2a2ade --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/create.service.ts @@ -0,0 +1,16 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { Result } from '../../../../../context/shared/domain/Result'; +import { TemporalFileCreator } from '../../../../../context/storage/TemporalFiles/application/creation/TemporalFileCreator'; + +export async function create(path: string, container: Container): Promise> { + try { + await container.get(TemporalFileCreator).run(path); + return { data: undefined }; + } catch (error: unknown) { + logger.error({ msg: '[FUSE - Create] Unable to create temporal file', error, path }); + return { error: new FuseError(FuseCodes.EIO, `[FUSE - Create] IO error: ${path}`) }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/ensure-temporal-file-exists-for-auxiliary-path.test.ts b/src/backend/features/virtual-drive/services/operations/ensure-temporal-file-exists-for-auxiliary-path.test.ts new file mode 100644 index 0000000000..51129d8781 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/ensure-temporal-file-exists-for-auxiliary-path.test.ts @@ -0,0 +1,59 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { TemporalFileCreator } from '../../../../../context/storage/TemporalFiles/application/creation/TemporalFileCreator'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { TemporalFile } from '../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { ensureTemporalFileExistsForAuxiliaryPath } from './ensure-temporal-file-exists-for-auxiliary-path'; + +describe('ensure-temporal-file-exists-for-auxiliary-path', () => { + let container: ReturnType>; + const temporalFileByPathFinder = mockDeep(); + const temporalFileCreator = mockDeep(); + + beforeEach(() => { + vi.restoreAllMocks(); + container = mockDeep(); + container.get.calledWith(TemporalFileByPathFinder).mockReturnValue(temporalFileByPathFinder); + container.get.calledWith(TemporalFileCreator).mockReturnValue(temporalFileCreator); + temporalFileByPathFinder.run.mockResolvedValue(undefined); + }); + + it('should skip when path is not auxiliary', async () => { + // Given + vi.spyOn(TemporalFile, 'isTemporaryPath').mockReturnValue(false); + + // When + await ensureTemporalFileExistsForAuxiliaryPath({ path: '/some/file.txt', container }); + + // Then + expect(temporalFileByPathFinder.run).not.toHaveBeenCalled(); + expect(temporalFileCreator.run).not.toHaveBeenCalled(); + }); + + it('should skip creation when auxiliary temporal file already exists', async () => { + // Given + vi.spyOn(TemporalFile, 'isTemporaryPath').mockReturnValue(true); + temporalFileByPathFinder.run.mockResolvedValue( + {} as unknown as Awaited>, + ); + + // When + await ensureTemporalFileExistsForAuxiliaryPath({ path: '/.test-file.txt.swp', container }); + + // Then + expect(temporalFileByPathFinder.run).toHaveBeenCalledWith('/.test-file.txt.swp'); + expect(temporalFileCreator.run).not.toHaveBeenCalled(); + }); + + it('should create temporal file when auxiliary path has no temporal file yet', async () => { + // Given + vi.spyOn(TemporalFile, 'isTemporaryPath').mockReturnValue(true); + + // When + await ensureTemporalFileExistsForAuxiliaryPath({ path: '/.test-file.txt.swp', container }); + + // Then + expect(temporalFileByPathFinder.run).toHaveBeenCalledWith('/.test-file.txt.swp'); + expect(temporalFileCreator.run).toHaveBeenCalledWith('/.test-file.txt.swp'); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/ensure-temporal-file-exists-for-auxiliary-path.ts b/src/backend/features/virtual-drive/services/operations/ensure-temporal-file-exists-for-auxiliary-path.ts new file mode 100644 index 0000000000..270ea475e1 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/ensure-temporal-file-exists-for-auxiliary-path.ts @@ -0,0 +1,23 @@ +import { Container } from 'diod'; +import { TemporalFileCreator } from '../../../../../context/storage/TemporalFiles/application/creation/TemporalFileCreator'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { TemporalFile } from '../../../../../context/storage/TemporalFiles/domain/TemporalFile'; + +type Props = { + path: string; + container: Container; +}; + +export async function ensureTemporalFileExistsForAuxiliaryPath({ path, container }: Props): Promise { + if (!TemporalFile.isTemporaryPath(path)) { + return; + } + + const temporalFile = await container.get(TemporalFileByPathFinder).run(path); + + if (temporalFile) { + return; + } + + await container.get(TemporalFileCreator).run(path); +} diff --git a/src/backend/features/virtual-drive/services/operations/get-attributes.service.test.ts b/src/backend/features/virtual-drive/services/operations/get-attributes.service.test.ts new file mode 100644 index 0000000000..762ce3f533 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/get-attributes.service.test.ts @@ -0,0 +1,96 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { getAttributes } from './get-attributes.service'; +import { FILE_MODE, FOLDER_MODE } from '../../constants'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { SingleFolderMatchingSearcher } from '../../../../../context/virtual-drive/folders/application/SingleFolderMatchingSearcher'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import type { File } from '../../../../../context/virtual-drive/files/domain/File'; +import type { Folder } from '../../../../../context/virtual-drive/folders/domain/Folder'; +import type { TemporalFile } from '../../../../../context/storage/TemporalFiles/domain/TemporalFile'; + +vi.mock('@internxt/drive-desktop-core/build/backend'); + +describe('getAttributes', () => { + let now: Date; + let container: ReturnType>; + const fileSearcher = mockDeep(); + const folderSearcher = mockDeep(); + const temporalFinder = mockDeep(); + + beforeEach(() => { + now = new Date(); + container = mockDeep(); + container.get.calledWith(FirstsFileSearcher).mockReturnValue(fileSearcher); + fileSearcher.run.mockResolvedValue(undefined); + folderSearcher.run.mockResolvedValue(undefined); + }); + + describe('when path is root', () => { + it('should return folder attributes for "/"', async () => { + const { data, error } = await getAttributes('/', container); + + expect(error).toBeUndefined(); + expect(data).toMatchObject({ mode: FOLDER_MODE, size: 0, nlink: 2 }); + }); + + it('should return folder attributes for empty string', async () => { + const { data, error } = await getAttributes('', container); + + expect(error).toBeUndefined(); + expect(data).toMatchObject({ mode: FOLDER_MODE, size: 0, nlink: 2 }); + }); + }); + + describe('when a file is found', () => { + it('should return file attributes', async () => { + fileSearcher.run.mockResolvedValue({ size: 4096, createdAt: now, updatedAt: now } as unknown as File); + + const { data, error } = await getAttributes('/some/file.txt', container); + + expect(error).toBeUndefined(); + expect(data).toMatchObject({ mode: FILE_MODE, size: 4096, nlink: 1 }); + }); + }); + + describe('when a folder is found', () => { + it('should return folder attributes', async () => { + folderSearcher.run.mockResolvedValue({ createdAt: now, updatedAt: now } as unknown as Folder); + container.get.calledWith(SingleFolderMatchingSearcher).mockReturnValue(folderSearcher); + + const { data, error } = await getAttributes('/some/folder', container); + + expect(error).toBeUndefined(); + expect(data).toMatchObject({ mode: FOLDER_MODE, size: 0, nlink: 2 }); + }); + }); + + describe('when a temporal file is found', () => { + it('should return file attributes', async () => { + container.get.calledWith(SingleFolderMatchingSearcher).mockReturnValue(folderSearcher); + + temporalFinder.run.mockResolvedValue({ size: { value: 2048 }, createdAt: now } as unknown as TemporalFile); + container.get.calledWith(TemporalFileByPathFinder).mockReturnValue(temporalFinder); + + const { data, error } = await getAttributes('/some/temp.txt', container); + + expect(error).toBeUndefined(); + expect(data).toMatchObject({ mode: FILE_MODE, size: 2048, nlink: 1 }); + }); + }); + + describe('when nothing is found', () => { + it('should return ENOENT error', async () => { + container.get.calledWith(SingleFolderMatchingSearcher).mockReturnValue(folderSearcher); + + temporalFinder.run.mockResolvedValue(undefined); + container.get.calledWith(TemporalFileByPathFinder).mockReturnValue(temporalFinder); + + const { data, error } = await getAttributes('/missing/file.txt', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.ENOENT); + }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/get-attributes.service.ts b/src/backend/features/virtual-drive/services/operations/get-attributes.service.ts new file mode 100644 index 0000000000..eb376112cd --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/get-attributes.service.ts @@ -0,0 +1,83 @@ +import { Container } from 'diod'; +import { Result } from '../../../../../context/shared/domain/Result'; +import { FILE_MODE, FOLDER_MODE, GetAttributesCallbackData } from '../../constants'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FileStatuses } from '../../../../../context/virtual-drive/files/domain/FileStatus'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { SingleFolderMatchingSearcher } from '../../../../../context/virtual-drive/folders/application/SingleFolderMatchingSearcher'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +export async function getAttributes( + path: string, + container: Container, +): Promise> { + if (path === '/' || path === '') { + return { + data: { + mode: FOLDER_MODE, + size: 0, + mtime: new Date(), + ctime: new Date(), + atime: undefined, + uid: process.getuid?.() || 0, + gid: process.getgid?.() || 0, + nlink: 2, + }, + }; + } + + const file = await container.get(FirstsFileSearcher).run({ + path, + status: FileStatuses.EXISTS, + }); + if (file) { + return { + data: { + mode: FILE_MODE, + size: file.size, + ctime: file.createdAt, + mtime: file.updatedAt, + atime: new Date(), + uid: process.getuid?.() || 0, + gid: process.getgid?.() || 0, + nlink: 1, + }, + }; + } + const folder = await container.get(SingleFolderMatchingSearcher).run({ + path, + }); + if (folder) { + return { + data: { + mode: FOLDER_MODE, + size: 0, + ctime: folder.createdAt, + mtime: folder.updatedAt, + atime: folder.createdAt, + uid: process.getuid?.() || 0, + gid: process.getgid?.() || 0, + nlink: 2, + }, + }; + } + const document = await container.get(TemporalFileByPathFinder).run(path); + + if (document) { + return { + data: { + mode: FILE_MODE, + size: document.size.value, + mtime: new Date(), + ctime: document.createdAt, + atime: document.createdAt, + uid: process.getuid?.() || 0, + gid: process.getgid?.() || 0, + nlink: 1, + }, + }; + } + const msg = `[FUSE - GetAttributes] File not found: ${path}`; + return { error: new FuseError(FuseCodes.ENOENT, msg) }; +} diff --git a/src/backend/features/virtual-drive/services/operations/mkdir.service.test.ts b/src/backend/features/virtual-drive/services/operations/mkdir.service.test.ts new file mode 100644 index 0000000000..713fd1af1f --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/mkdir.service.test.ts @@ -0,0 +1,48 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FolderCreator } from '../../../../../context/virtual-drive/folders/application/create/FolderCreator'; +import { SyncFolderMessenger } from '../../../../../context/virtual-drive/folders/domain/SyncFolderMessenger'; +import { mkdir } from './mkdir.service'; + +describe('mkdir', () => { + let container: ReturnType>; + const folderCreator = mockDeep(); + const syncFolderMessenger = mockDeep(); + + beforeEach(() => { + container = mockDeep(); + container.get.calledWith(FolderCreator).mockReturnValue(folderCreator); + container.get.calledWith(SyncFolderMessenger).mockReturnValue(syncFolderMessenger); + }); + + it('should create folder and notify when path is valid', async () => { + const { data, error } = await mkdir('/Documents/NewFolder', container); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + expect(syncFolderMessenger.creating).toBeCalledWith('/Documents/NewFolder'); + expect(folderCreator.run).toBeCalledWith('/Documents/NewFolder'); + expect(syncFolderMessenger.created).toBeCalledWith('/Documents/NewFolder'); + }); + + it('should return success without calling FolderCreator when path starts with /.Trash', async () => { + const { data, error } = await mkdir('/.Trash-1000/files/doc.txt', container); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + expect(folderCreator.run).not.toBeCalled(); + }); + + it('should return EIO and notify issue when FolderCreator throws', async () => { + folderCreator.run.mockRejectedValue(new Error('remote error')); + + const { data, error } = await mkdir('/Documents/NewFolder', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EIO); + expect(syncFolderMessenger.issue).toBeCalledWith( + expect.objectContaining({ error: 'FOLDER_CREATE_ERROR', cause: 'UNKNOWN' }), + ); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/mkdir.service.ts b/src/backend/features/virtual-drive/services/operations/mkdir.service.ts new file mode 100644 index 0000000000..2dff8ec1dc --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/mkdir.service.ts @@ -0,0 +1,33 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { Container } from 'diod'; +import { basename } from 'node:path'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { Result } from '../../../../../context/shared/domain/Result'; +import { FolderCreator } from '../../../../../context/virtual-drive/folders/application/create/FolderCreator'; +import { SyncFolderMessenger } from '../../../../../context/virtual-drive/folders/domain/SyncFolderMessenger'; +import { VirtualDriveFolderIssue } from '../../../../../shared/issues/VirtualDriveIssue'; + +export async function mkdir(path: string, container: Container): Promise> { + if (path.startsWith('/.Trash')) { + return { data: undefined }; + } + + try { + await container.get(SyncFolderMessenger).creating(path); + await container.get(FolderCreator).run(path); + await container.get(SyncFolderMessenger).created(path); + return { data: undefined }; + } catch (error: unknown) { + logger.error({ msg: '[FUSE - Mkdir] Unable to create folder', error, path }); + + const issue: VirtualDriveFolderIssue = { + error: 'FOLDER_CREATE_ERROR', + cause: 'UNKNOWN', + name: basename(path), + }; + await container.get(SyncFolderMessenger).issue(issue); + + return { error: new FuseError(FuseCodes.EIO, `[FUSE - Mkdir] IO error: ${path}`) }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/open.service.test.ts b/src/backend/features/virtual-drive/services/operations/open.service.test.ts new file mode 100644 index 0000000000..69bf8a5f76 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/open.service.test.ts @@ -0,0 +1,76 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { open } from './open.service'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { TemporalFile } from '../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import type { File } from '../../../../../context/virtual-drive/files/domain/File'; + +describe('open', () => { + let container: ReturnType>; + const fileSearcher = mockDeep(); + const temporalFinder = mockDeep(); + + beforeEach(() => { + container = mockDeep(); + container.get.calledWith(FirstsFileSearcher).mockReturnValue(fileSearcher); + container.get.calledWith(TemporalFileByPathFinder).mockReturnValue(temporalFinder); + fileSearcher.run.mockResolvedValue(undefined); + temporalFinder.run.mockResolvedValue(undefined); + }); + + describe('when a virtual file is found', () => { + it('should return success', async () => { + fileSearcher.run.mockResolvedValue({} as unknown as File); + + const { data, error } = await open('/some/file.txt', 'cat', container); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + }); + }); + + describe('when a temporal file is found', () => { + it('should return success', async () => { + temporalFinder.run.mockResolvedValue({} as unknown as TemporalFile); + + const { data, error } = await open('/some/file.txt', 'cat', container); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + }); + }); + + describe('when no file is found', () => { + it('should return ENOENT', async () => { + const { data, error } = await open('/missing/file.txt', 'cat', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.ENOENT); + }); + }); + + describe('when an unexpected error is thrown on a non-temporary path', () => { + it('should return EIO', async () => { + fileSearcher.run.mockRejectedValue(new Error('unexpected')); + + const { data, error } = await open('/some/file.txt', 'cat', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EIO); + }); + }); + + describe('when an unexpected error is thrown on a temporary path', () => { + it('should return EEXIST', async () => { + fileSearcher.run.mockRejectedValue(new Error('unexpected')); + vi.spyOn(TemporalFile, 'isTemporaryPath').mockReturnValue(true); + + const { data, error } = await open('/some/.tmp123', 'cat', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EEXIST); + }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/open.service.ts b/src/backend/features/virtual-drive/services/operations/open.service.ts new file mode 100644 index 0000000000..f7d357d458 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/open.service.ts @@ -0,0 +1,36 @@ +import { Container } from 'diod'; +import { Result } from '../../../../../context/shared/domain/Result'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { TemporalFile } from '../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; + +export async function open(path: string, processName: string, container: Container): Promise> { + try { + const virtualFile = await container.get(FirstsFileSearcher).run({ path }); + + if (virtualFile) { + return { data: undefined }; + } + + const temporalFile = await container.get(TemporalFileByPathFinder).run(path); + + if (temporalFile) { + return { data: undefined }; + } + + const msg = `[FUSE - Open] File not found: ${path}`; + logger.error({ msg, processName }); + return { error: new FuseError(FuseCodes.ENOENT, msg) }; + } catch (err) { + if (TemporalFile.isTemporaryPath(path)) { + const msg = `[FUSE - Open] Auxiliary path conflict: ${path}`; + return { error: new FuseError(FuseCodes.EEXIST, msg) }; + } + + logger.error({ msg: '[FUSE - Open] Unexpected error', error: err, path }); + return { error: new FuseError(FuseCodes.EIO, `[FUSE - Open] IO error: ${path}`) }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/opendir.service.test.ts b/src/backend/features/virtual-drive/services/operations/opendir.service.test.ts new file mode 100644 index 0000000000..45401df69f --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/opendir.service.test.ts @@ -0,0 +1,83 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { opendir } from './opendir.service'; +import { FilesByFolderPathSearcher } from '../../../../../context/virtual-drive/files/application/search/FilesByFolderPathSearcher'; +import { FoldersByParentPathLister } from '../../../../../context/virtual-drive/folders/application/FoldersByParentPathLister'; +import { TemporalFileByFolderFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByFolderFinder'; +import { FolderNotFoundError } from '../../../../../context/virtual-drive/folders/domain/errors/FolderNotFoundError'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FILE_MODE, FOLDER_MODE } from '../../constants'; +import type { TemporalFile } from '../../../../../context/storage/TemporalFiles/domain/TemporalFile'; + +describe('opendir', () => { + let container: ReturnType>; + const fileSearcher = mockDeep(); + const folderLister = mockDeep(); + const temporalFinder = mockDeep(); + + beforeEach(() => { + container = mockDeep(); + container.get.calledWith(FilesByFolderPathSearcher).mockReturnValue(fileSearcher); + container.get.calledWith(FoldersByParentPathLister).mockReturnValue(folderLister); + container.get.calledWith(TemporalFileByFolderFinder).mockReturnValue(temporalFinder); + fileSearcher.run.mockResolvedValue([]); + folderLister.run.mockResolvedValue([]); + temporalFinder.run.mockResolvedValue([]); + }); + + describe('when directory has files and subfolders', () => { + it('should return entries with correct modes', async () => { + fileSearcher.run.mockResolvedValue(['file.txt', 'photo.jpg']); + folderLister.run.mockResolvedValue(['subdir']); + + const { data, error } = await opendir('/some/folder', container); + + expect(error).toBeUndefined(); + expect(data?.entries).toStrictEqual([ + { name: 'file.txt', mode: FILE_MODE }, + { name: 'photo.jpg', mode: FILE_MODE }, + { name: 'subdir', mode: FOLDER_MODE }, + ]); + }); + }); + + describe('when directory has auxiliary temporal files', () => { + it('should include only auxiliary temporal files in entries', async () => { + const auxiliaryFile = mockDeep(); + auxiliaryFile.isAuxiliary.mockReturnValue(true); + (auxiliaryFile as { name: string }).name = 'aux.tmp'; + + const nonAuxiliaryFile = mockDeep(); + nonAuxiliaryFile.isAuxiliary.mockReturnValue(false); + + temporalFinder.run.mockResolvedValue([auxiliaryFile, nonAuxiliaryFile]); + + const { data, error } = await opendir('/some/folder', container); + + expect(error).toBeUndefined(); + expect(data?.entries).toStrictEqual([{ name: 'aux.tmp', mode: FILE_MODE }]); + }); + }); + + describe('when folder is not yet synced', () => { + it('should return empty entries', async () => { + folderLister.run.mockRejectedValue(new FolderNotFoundError('not synced')); + + const { data, error } = await opendir('/unsynced/folder', container); + + expect(error).toBeUndefined(); + expect(data?.entries).toStrictEqual([]); + }); + }); + + describe('when an unexpected error is thrown', () => { + it('should return EIO', async () => { + fileSearcher.run.mockRejectedValue(new Error('unexpected')); + + const { data, error } = await opendir('/some/folder', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EIO); + }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/opendir.service.ts b/src/backend/features/virtual-drive/services/operations/opendir.service.ts new file mode 100644 index 0000000000..0731053d6c --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/opendir.service.ts @@ -0,0 +1,44 @@ +import { Container } from 'diod'; +import { Result } from '../../../../../context/shared/domain/Result'; +import { FILE_MODE, FOLDER_MODE } from '../../constants'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FilesByFolderPathSearcher } from '../../../../../context/virtual-drive/files/application/search/FilesByFolderPathSearcher'; +import { FoldersByParentPathLister } from '../../../../../context/virtual-drive/folders/application/FoldersByParentPathLister'; +import { TemporalFileByFolderFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByFolderFinder'; +import { FolderNotFoundError } from '../../../../../context/virtual-drive/folders/domain/errors/FolderNotFoundError'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; + +export type DirEntry = { + name: string; + mode: number; +}; + +export type OpenDirData = { + entries: DirEntry[]; +}; + +export async function opendir(path: string, container: Container): Promise> { + try { + const [fileNames, folderNames, temporalFiles] = await Promise.all([ + container.get(FilesByFolderPathSearcher).run(path), + container.get(FoldersByParentPathLister).run(path), + container.get(TemporalFileByFolderFinder).run(path), + ]); + + const entries: DirEntry[] = [ + ...fileNames.map((name) => ({ name, mode: FILE_MODE })), + ...folderNames.map((name) => ({ name, mode: FOLDER_MODE })), + ...temporalFiles.filter((f) => f.isAuxiliary()).map((f) => ({ name: f.name, mode: FILE_MODE })), + ]; + + return { data: { entries } }; + } catch (err) { + if (err instanceof FolderNotFoundError) { + logger.debug({ msg: '[FUSE - OpenDir] Folder not yet synced, returning empty', path }); + return { data: { entries: [] } }; + } + logger.error({ msg: '[FUSE - OpenDir] Error reading directory', error: err, path }); + return { error: new FuseError(FuseCodes.EIO, `[FUSE - OpenDir] IO error: ${path}`) }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/read.service.test.ts b/src/backend/features/virtual-drive/services/operations/read.service.test.ts new file mode 100644 index 0000000000..e22aa5f13e --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/read.service.test.ts @@ -0,0 +1,96 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { read } from './read.service'; +import * as handleReadCallbackModule from '../../../../features/fuse/on-read/handle-read-callback'; +import { partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { StorageFilesRepository } from '../../../../../context/storage/StorageFiles/domain/StorageFilesRepository'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { DownloadProgressTracker } from '../../../../../context/shared/domain/DownloadProgressTracker'; +import * as getCredentialsModule from '../../../../../apps/main/auth/get-credentials'; +import { DependencyInjectionUserProvider } from '../../../../../apps/shared/dependency-injection/DependencyInjectionUserProvider'; +import * as buildNetworkClientModule from '../../../../../infra/environment/download-file/build-network-client'; + +const handleReadCallbackMock = partialSpyOn(handleReadCallbackModule, 'handleReadCallback'); +const getCredentialsMock = partialSpyOn(getCredentialsModule, 'getCredentials'); +const userProviderGetMock = partialSpyOn(DependencyInjectionUserProvider, 'get'); +const buildNetworkClientMock = partialSpyOn(buildNetworkClientModule, 'buildNetworkClient'); + +describe('read', () => { + let container: ReturnType>; + const fileSearcher = mockDeep(); + const temporalFinder = mockDeep(); + const repo = mockDeep(); + const tracker = mockDeep(); + const network = {}; + + beforeEach(() => { + container = mockDeep(); + container.get.calledWith(FirstsFileSearcher).mockReturnValue(fileSearcher); + container.get.calledWith(TemporalFileByPathFinder).mockReturnValue(temporalFinder); + container.get.calledWith(StorageFilesRepository).mockReturnValue(repo); + container.get.calledWith(DownloadProgressTracker).mockReturnValue(tracker); + getCredentialsMock.mockReturnValue({ mnemonic: 'mnemonic' } as never); + userProviderGetMock.mockReturnValue({ + bucket: 'bucket-id', + bridgeUser: 'bridge-user', + userId: 'user-id', + } as never); + buildNetworkClientMock.mockReturnValue(network as never); + }); + + describe('when handleReadCallback succeeds', () => { + it('should return the buffer from handleReadCallback', async () => { + const chunk = Buffer.from('file data'); + handleReadCallbackMock.mockResolvedValue({ data: chunk }); + + const { data, error } = await read('/file.mp4', 10, 0, 'vlc', container); + + expect(error).toBeUndefined(); + expect(data).toBe(chunk); + }); + + it('should forward path, length, position and processName to handleReadCallback', async () => { + handleReadCallbackMock.mockResolvedValue({ data: Buffer.alloc(0) }); + + await read('/file.mp4', 32768, 4096, 'vlc', container); + + expect(handleReadCallbackMock).toHaveBeenCalledWith( + expect.objectContaining({ + bucketId: 'bucket-id', + mnemonic: 'mnemonic', + network, + path: '/file.mp4', + range: { + length: 32768, + position: 4096, + }, + processName: 'vlc', + }), + ); + }); + }); + + describe('when handleReadCallback returns an error', () => { + it('should propagate the error', async () => { + handleReadCallbackMock.mockResolvedValue({ error: { code: FuseCodes.ENOENT } }); + + const { data, error } = await read('/missing.mp4', 10, 0, 'vlc', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.ENOENT); + }); + }); + + describe('when an unexpected error is thrown', () => { + it('should return EIO', async () => { + handleReadCallbackMock.mockRejectedValue(new Error('unexpected')); + + const { data, error } = await read('/file.mp4', 10, 0, 'vlc', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EIO); + }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/read.service.ts b/src/backend/features/virtual-drive/services/operations/read.service.ts new file mode 100644 index 0000000000..e875de543a --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/read.service.ts @@ -0,0 +1,62 @@ +import { Container } from 'diod'; +import { type Result } from '../../../../../context/shared/domain/Result'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { StorageFilesRepository } from '../../../../../context/storage/StorageFiles/domain/StorageFilesRepository'; +import { StorageFile } from '../../../../../context/storage/StorageFiles/domain/StorageFile'; +import { DownloadProgressTracker } from '../../../../../context/shared/domain/DownloadProgressTracker'; +import { handleReadCallback } from '../../../../features/fuse/on-read/handle-read-callback'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { getCredentials } from '../../../../../apps/main/auth/get-credentials'; +import { DependencyInjectionUserProvider } from '../../../../../apps/shared/dependency-injection/DependencyInjectionUserProvider'; +import { buildNetworkClient } from '../../../../../infra/environment/download-file/build-network-client'; + +export async function read( + path: string, + length: number, + position: number, + processName: string, + container: Container, +): Promise> { + try { + const { mnemonic } = getCredentials(); + const user = DependencyInjectionUserProvider.get(); + const network = buildNetworkClient({ bridgeUser: user.bridgeUser, userId: user.userId }); + const repo = container.get(StorageFilesRepository); + const tracker = container.get(DownloadProgressTracker); + + return await handleReadCallback({ + findVirtualFile: (p) => container.get(FirstsFileSearcher).run({ path: p }), + findTemporalFile: (p) => container.get(TemporalFileByPathFinder).run(p), + onDownloadProgress: (name, extension, bytesDownloaded, fileSize, elapsedTime) => { + tracker.downloadUpdate(name, extension, { + percentage: Math.min(bytesDownloaded / fileSize, 1), + elapsedTime, + }); + }, + saveToRepository: async (contentsId, size, uuid, name, extension) => { + const storage = StorageFile.from({ + id: contentsId, + virtualId: uuid, + size, + }); + await repo.register(storage); + tracker.downloadFinished(name, extension); + }, + bucketId: user.bucket, + mnemonic, + network, + path, + range: { + length, + position, + }, + processName, + }); + } catch (err) { + logger.error({ msg: '[FUSE - Read] Unexpected error', error: err, path }); + return { error: new FuseError(FuseCodes.EIO, `[FUSE - Read] IO error: ${path}`) }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/release.service.test.ts b/src/backend/features/virtual-drive/services/operations/release.service.test.ts new file mode 100644 index 0000000000..1d024bf2be --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/release.service.test.ts @@ -0,0 +1,131 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { release } from './release.service'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { TemporalFileUploader } from '../../../../../context/storage/TemporalFiles/application/upload/TemporalFileUploader'; +import { TemporalFileDeleter } from '../../../../../context/storage/TemporalFiles/application/deletion/TemporalFileDeleter'; +import { TemporalFile } from '../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { File, FileAttributes } from '../../../../../context/virtual-drive/files/domain/File'; +import { FileStatuses } from '../../../../../context/virtual-drive/files/domain/FileStatus'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { call, calls } from '../../../../../../tests/vitest/utils.helper'; + +const fileAttrs: FileAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + contentsId: 'aabbccddeeff001122334455', + folderId: 0, + createdAt: new Date().toISOString(), + modificationTime: new Date().toISOString(), + path: '/Documents/report.pdf', + size: 100, + updatedAt: new Date().toISOString(), + status: FileStatuses.EXISTS, +}; + +function createTemporalFile(path: string): TemporalFile { + return TemporalFile.from({ path, size: 100, createdAt: new Date(), modifiedAt: new Date() }); +} + +function createAuxiliaryFile(path: string): TemporalFile { + return TemporalFile.from({ path, size: 0, createdAt: new Date(), modifiedAt: new Date() }); +} + +describe('release', () => { + let container: ReturnType>; + const finder = mockDeep(); + const uploader = mockDeep(); + const deleter = mockDeep(); + const fileSearcher = mockDeep(); + + beforeEach(() => { + container = mockDeep(); + container.get.calledWith(TemporalFileByPathFinder).mockReturnValue(finder); + container.get.calledWith(TemporalFileUploader).mockReturnValue(uploader); + container.get.calledWith(TemporalFileDeleter).mockReturnValue(deleter); + container.get.calledWith(FirstsFileSearcher).mockReturnValue(fileSearcher); + fileSearcher.run.mockResolvedValue(undefined); + }); + + describe('when no temporal file is found', () => { + it('should return success without uploading', async () => { + finder.run.mockResolvedValue(undefined); + + const { data, error } = await release({ path: '/Documents/file.pdf', processName: 'cat', container }); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + calls(uploader.run).toHaveLength(0); + }); + }); + + describe('when an auxiliary file is found', () => { + it('should return success, skip upload and delete it', async () => { + finder.run.mockResolvedValue(createAuxiliaryFile('/Documents/.~lock.file.odt#')); + + const { data, error } = await release({ path: '/Documents/.~lock.file.odt#', processName: 'cat', container }); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + calls(uploader.run).toHaveLength(0); + call(deleter.run).toStrictEqual('/Documents/.~lock.file.odt#'); + }); + }); + + describe('when a temporal file is found', () => { + it('should upload without replaces when no virtual file exists', async () => { + const temporalFile = createTemporalFile('/Documents/report.pdf'); + finder.run.mockResolvedValue(temporalFile); + fileSearcher.run.mockResolvedValue(undefined); + uploader.run.mockResolvedValue('contents-id-123'); + + const { data, error } = await release({ path: '/Documents/report.pdf', processName: 'cat', container }); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + call(uploader.run).toStrictEqual([temporalFile, undefined]); + }); + + it('should upload with replaces when a virtual file exists at the same path', async () => { + const temporalFile = createTemporalFile('/Documents/report.pdf'); + const existingFile = File.from(fileAttrs); + finder.run.mockResolvedValue(temporalFile); + fileSearcher.run.mockResolvedValue(existingFile); + uploader.run.mockResolvedValue('new-contents-id'); + + const { data, error } = await release({ path: '/Documents/report.pdf', processName: 'cat', container }); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + call(uploader.run).toStrictEqual([ + temporalFile, + { contentsId: existingFile.contentsId, name: existingFile.name, extension: existingFile.type }, + ]); + }); + + it('should delete the file and return EIO when upload fails', async () => { + finder.run.mockResolvedValue(createTemporalFile('/Documents/report.pdf')); + uploader.run.mockRejectedValue(new Error('Network error')); + + const { data, error } = await release({ path: '/Documents/report.pdf', processName: 'cat', container }); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EIO); + call(deleter.run).toStrictEqual('/Documents/report.pdf'); + }); + }); + + describe('when finder throws an unexpected error', () => { + it('should return EIO without uploading or deleting', async () => { + finder.run.mockRejectedValue(new Error('DB error')); + + const { data, error } = await release({ path: '/Documents/report.pdf', processName: 'cat', container }); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EIO); + calls(uploader.run).toHaveLength(0); + calls(deleter.run).toHaveLength(0); + }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/release.service.ts b/src/backend/features/virtual-drive/services/operations/release.service.ts new file mode 100644 index 0000000000..017863a7e4 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/release.service.ts @@ -0,0 +1,48 @@ +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { type Result } from '../../../../../context/shared/domain/Result'; +import { FuseError, FuseIOError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { TemporalFileUploader } from '../../../../../context/storage/TemporalFiles/application/upload/TemporalFileUploader'; +import { TemporalFileDeleter } from '../../../../../context/storage/TemporalFiles/application/deletion/TemporalFileDeleter'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { FileStatuses } from '../../../../../context/virtual-drive/files/domain/FileStatus'; +type Props = { + path: string; + processName: string; + container: Container; +}; +export async function release({ path, processName, container }: Props): Promise> { + try { + const temporalFile = await container.get(TemporalFileByPathFinder).run(path); + + if (!temporalFile) { + logger.debug({ msg: '[Release] No temporal file found, nothing to upload', path, processName }); + return { data: undefined }; + } + + if (temporalFile.isAuxiliary()) { + logger.debug({ msg: '[Release] Auxiliary file detected, deleting without upload', path, processName }); + await container.get(TemporalFileDeleter).run(path); + return { data: undefined }; + } + + const existingFile = await container.get(FirstsFileSearcher).run({ path, status: FileStatuses.EXISTS }); + const replaces = existingFile + ? { contentsId: existingFile.contentsId, name: existingFile.name, extension: existingFile.type } + : undefined; + + try { + await container.get(TemporalFileUploader).run(temporalFile, replaces); + logger.debug({ msg: '[Release] Temporal file uploaded', path, processName }); + return { data: undefined }; + } catch (uploadError) { + logger.error({ msg: '[Release] Upload failed, deleting temporal file', error: uploadError, path, processName }); + await container.get(TemporalFileDeleter).run(path); + return { error: new FuseIOError('Upload failed due to insufficient storage or network issues.') }; + } + } catch (err: unknown) { + logger.error({ msg: '[Release] Unexpected error', error: err, path, processName }); + return { error: new FuseIOError('An unexpected error occurred during file release.') }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/rename.service.test.ts b/src/backend/features/virtual-drive/services/operations/rename.service.test.ts new file mode 100644 index 0000000000..8326bb3ebb --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename.service.test.ts @@ -0,0 +1,96 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { rename } from './rename.service'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError, FuseNoSuchFileOrDirectoryError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import * as handleFileRenameIntentModule from './rename/handle-file-rename-intent'; +import * as handleFolderRenameIntentModule from './rename/handle-folder-rename-intent'; +import * as handleOfflineUploadOnRenameModule from './rename/handle-temporal-file-upload-on-rename'; +import { call, calls, partialSpyOn } from '../../../../../../tests/vitest/utils.helper'; + +describe('rename', () => { + const fileHandlerMock = partialSpyOn(handleFileRenameIntentModule, 'handleFileRenameIntent'); + const folderHandlerMock = partialSpyOn(handleFolderRenameIntentModule, 'handleFolderRenameIntent'); + const uploadHandlerMock = partialSpyOn(handleOfflineUploadOnRenameModule, 'handleTemporalFileUploadOnRename'); + + let container: ReturnType>; + + beforeEach(() => { + container = mockDeep(); + fileHandlerMock.mockResolvedValue({ data: undefined }); + folderHandlerMock.mockResolvedValue({ data: undefined }); + uploadHandlerMock.mockResolvedValue({ data: undefined }); + }); + + it('should return success when file rename succeeds', async () => { + fileHandlerMock.mockResolvedValue({ data: undefined }); + + const result = await rename({ src: '/old/file.txt', dest: '/new/file.txt', container }); + + expect(result.error).toBeUndefined(); + expect(result.data).toBeUndefined(); + call(fileHandlerMock).toStrictEqual({ src: '/old/file.txt', dest: '/new/file.txt', container }); + calls(folderHandlerMock).toHaveLength(0); + calls(uploadHandlerMock).toHaveLength(0); + }); + + it('should return success when folder rename succeeds', async () => { + fileHandlerMock.mockResolvedValue({ error: new FuseNoSuchFileOrDirectoryError('/old/folder') }); + folderHandlerMock.mockResolvedValue({ data: undefined }); + + const result = await rename({ src: '/old/folder', dest: '/new/folder', container }); + + expect(result.error).toBeUndefined(); + expect(result.data).toBeUndefined(); + call(fileHandlerMock).toStrictEqual({ src: '/old/folder', dest: '/new/folder', container }); + call(folderHandlerMock).toStrictEqual({ src: '/old/folder', dest: '/new/folder', container }); + calls(uploadHandlerMock).toHaveLength(0); + }); + + it('should return success when upload on rename succeeds after ENOENT file/folder', async () => { + fileHandlerMock.mockResolvedValue({ error: new FuseNoSuchFileOrDirectoryError('/offline/file.txt') }); + folderHandlerMock.mockResolvedValue({ error: new FuseNoSuchFileOrDirectoryError('/offline/file.txt') }); + uploadHandlerMock.mockResolvedValue({ data: undefined }); + + const result = await rename({ src: '/offline/file.txt', dest: '/existing/file.txt', container }); + + expect(result.error).toBeUndefined(); + expect(result.data).toBeUndefined(); + call(fileHandlerMock).toStrictEqual({ src: '/offline/file.txt', dest: '/existing/file.txt', container }); + call(folderHandlerMock).toStrictEqual({ src: '/offline/file.txt', dest: '/existing/file.txt', container }); + call(uploadHandlerMock).toStrictEqual({ src: '/offline/file.txt', dest: '/existing/file.txt', container }); + }); + + it('should return error when file rename fails', async () => { + fileHandlerMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'file rename failed') }); + + const result = await rename({ src: '/old/file.txt', dest: '/new/file.txt', container }); + + expect(result.data).toBeUndefined(); + expect(result.error?.code).toBe(FuseCodes.EIO); + calls(folderHandlerMock).toHaveLength(0); + calls(uploadHandlerMock).toHaveLength(0); + }); + + it('should return non-ENOENT error from folder handler', async () => { + fileHandlerMock.mockResolvedValue({ error: new FuseNoSuchFileOrDirectoryError('/old/folder') }); + folderHandlerMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'folder rename failed') }); + + const result = await rename({ src: '/old/folder', dest: '/new/folder', container }); + + expect(result.data).toBeUndefined(); + expect(result.error?.code).toBe(FuseCodes.EIO); + calls(uploadHandlerMock).toHaveLength(0); + }); + + it('should return ENOENT when upload on rename fails with no such file', async () => { + fileHandlerMock.mockResolvedValue({ error: new FuseNoSuchFileOrDirectoryError('/missing/path') }); + folderHandlerMock.mockResolvedValue({ error: new FuseNoSuchFileOrDirectoryError('/missing/path') }); + uploadHandlerMock.mockResolvedValue({ error: new FuseNoSuchFileOrDirectoryError('/missing/path') }); + + const result = await rename({ src: '/missing/path', dest: '/new/path', container }); + + expect(result.data).toBeUndefined(); + expect(result.error?.code).toBe(FuseCodes.ENOENT); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename.service.ts b/src/backend/features/virtual-drive/services/operations/rename.service.ts new file mode 100644 index 0000000000..c881cb1b95 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename.service.ts @@ -0,0 +1,27 @@ +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { Result } from '../../../../../context/shared/domain/Result'; +import { handleFileRenameIntent } from './rename/handle-file-rename-intent'; +import { handleFolderRenameIntent } from './rename/handle-folder-rename-intent'; +import { handleTemporalFileUploadOnRename } from './rename/handle-temporal-file-upload-on-rename'; + +type Props = { + src: string; + dest: string; + container: Container; +}; + +export async function rename({ src, dest, container }: Props): Promise> { + const { error: fileError } = await handleFileRenameIntent({ src, dest, container }); + if (!fileError) return { data: undefined }; + if (fileError.code !== FuseCodes.ENOENT) return { error: fileError }; + + const { error: folderError } = await handleFolderRenameIntent({ src, dest, container }); + if (!folderError) return { data: undefined }; + if (folderError.code !== FuseCodes.ENOENT) return { error: folderError }; + + const { error: uploadError } = await handleTemporalFileUploadOnRename({ src, dest, container }); + if (uploadError) return { error: uploadError }; + return { data: undefined }; +} diff --git a/src/backend/features/virtual-drive/services/operations/rename/compare-temporal-file.test.ts b/src/backend/features/virtual-drive/services/operations/rename/compare-temporal-file.test.ts new file mode 100644 index 0000000000..54fee9bb21 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/compare-temporal-file.test.ts @@ -0,0 +1,101 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { File, FileAttributes } from '../../../../../../context/virtual-drive/files/domain/File'; +import { FileStatuses } from '../../../../../../context/virtual-drive/files/domain/FileStatus'; +import { + TemporalFile, + TemporalFileAttributes, +} from '../../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { TemporalFilePath } from '../../../../../../context/storage/TemporalFiles/domain/TemporalFilePath'; +import { RelativePathToAbsoluteConverter } from '../../../../../../context/virtual-drive/shared/application/RelativePathToAbsoluteConverter'; +import { TemporalFileByteByByteComparator } from '../../../../../../context/storage/TemporalFiles/application/comparation/TemporalFileByteByByteComparator'; +import { compareTemporalFile } from './compare-temporal-file'; +import { call, calls } from '../../../../../../../tests/vitest/utils.helper'; + +const fileAttrs: FileAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + contentsId: 'aabbccddeeff001122334455', + folderId: 0, + createdAt: new Date().toISOString(), + modificationTime: new Date().toISOString(), + path: '/folder/file.txt', + size: 100, + updatedAt: new Date().toISOString(), + status: FileStatuses.EXISTS, +}; + +const temporalAttrs: TemporalFileAttributes = { + createdAt: new Date(), + modifiedAt: new Date(), + path: '/tmp/internxt/file.txt', + size: 100, +}; + +describe('compare-temporal-file', () => { + const virtual = File.from(fileAttrs); + const document = TemporalFile.from(temporalAttrs); + let container: ReturnType>; + let converterMock: ReturnType>; + let comparatorMock: ReturnType>; + + const props = { virtual, document, container: undefined as unknown as Container }; + + beforeEach(() => { + converterMock = mockDeep(); + comparatorMock = mockDeep(); + container = mockDeep(); + container.get.calledWith(RelativePathToAbsoluteConverter).mockReturnValue(converterMock); + container.get.calledWith(TemporalFileByteByByteComparator).mockReturnValue(comparatorMock); + props.container = container; + }); + + it('should return true immediately when sizes differ', async () => { + // Given + const differentSizeDoc = TemporalFile.from({ ...temporalAttrs, size: 200 }); + + // When + const result = await compareTemporalFile({ virtual, document: differentSizeDoc, container }); + + // Then + expect(result).toBe(true); + calls(comparatorMock.run).toHaveLength(0); + }); + + it('should return false when byte comparator says files are equal', async () => { + // Given + converterMock.run.mockReturnValue('/abs/aabbccddeeff001122334455'); + comparatorMock.run.mockResolvedValue(true); + + // When + const result = await compareTemporalFile(props); + + // Then + expect(result).toBe(false); + call(comparatorMock.run).toStrictEqual([expect.any(TemporalFilePath), document.path]); + }); + + it('should return true when byte comparator says files differ', async () => { + // Given + converterMock.run.mockReturnValue('/abs/aabbccddeeff001122334455'); + comparatorMock.run.mockResolvedValue(false); + + // When + const result = await compareTemporalFile(props); + + // Then + expect(result).toBe(true); + }); + + it('should return false when byte comparator throws', async () => { + // Given + converterMock.run.mockReturnValue('/abs/aabbccddeeff001122334455'); + comparatorMock.run.mockRejectedValue(new Error('disk error')); + + // When + const result = await compareTemporalFile(props); + + // Then + expect(result).toBe(false); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename/compare-temporal-file.ts b/src/backend/features/virtual-drive/services/operations/rename/compare-temporal-file.ts new file mode 100644 index 0000000000..e6b64a8a5f --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/compare-temporal-file.ts @@ -0,0 +1,35 @@ +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { File } from '../../../../../../context/virtual-drive/files/domain/File'; +import { RelativePathToAbsoluteConverter } from '../../../../../../context/virtual-drive/shared/application/RelativePathToAbsoluteConverter'; +import { TemporalFile } from '../../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { TemporalFileByteByByteComparator } from '../../../../../../context/storage/TemporalFiles/application/comparation/TemporalFileByteByByteComparator'; +import { TemporalFilePath } from '../../../../../../context/storage/TemporalFiles/domain/TemporalFilePath'; + +type Props = { + virtual: File; + document: TemporalFile; + container: Container; +}; + +export async function compareTemporalFile({ virtual, document, container }: Props): Promise { + if (virtual.size !== document.size.value) { + return true; + } + + try { + const filePath = container.get(RelativePathToAbsoluteConverter).run(virtual.contentsId); + + const areEqual = await container + .get(TemporalFileByteByByteComparator) + .run(new TemporalFilePath(filePath), document.path); + + logger.debug({ msg: `Contents of <${virtual.path}> did not change` }); + + return !areEqual; + } catch (error) { + logger.error({ msg: 'Error comparing file contents', error }); + } + + return false; +} diff --git a/src/backend/features/virtual-drive/services/operations/rename/handle-file-rename-intent.test.ts b/src/backend/features/virtual-drive/services/operations/rename/handle-file-rename-intent.test.ts new file mode 100644 index 0000000000..8722ada605 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/handle-file-rename-intent.test.ts @@ -0,0 +1,139 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FirstsFileSearcher } from '../../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { File, FileAttributes } from '../../../../../../context/virtual-drive/files/domain/File'; +import { FileStatuses } from '../../../../../../context/virtual-drive/files/domain/FileStatus'; +import { FuseCodes } from '../../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { handleFileRenameIntent } from './handle-file-rename-intent'; +import * as trashFileModule from './trash-file'; +import * as moveFileModule from './move-file'; +import { call, calls, partialSpyOn } from '../../../../../../../tests/vitest/utils.helper'; + +const fileAttrs: FileAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + contentsId: 'aabbccddeeff001122334455', + folderId: 0, + createdAt: new Date().toISOString(), + modificationTime: new Date().toISOString(), + path: '/old/file.txt', + size: 100, + updatedAt: new Date().toISOString(), + status: FileStatuses.EXISTS, +}; + +describe('handle-file-rename-intent', () => { + const trashFileMock = partialSpyOn(trashFileModule, 'trashFile'); + const moveFileMock = partialSpyOn(moveFileModule, 'moveFile'); + let container: ReturnType>; + let searcherMock: ReturnType>; + + const props: Parameters[0] = { + src: '/old/file.txt', + dest: '/new/file.txt', + container: undefined as unknown as Container, + }; + + beforeEach(() => { + searcherMock = mockDeep(); + container = mockDeep(); + container.get.calledWith(FirstsFileSearcher).mockReturnValue(searcherMock); + props.container = container; + trashFileMock.mockResolvedValue({ data: undefined }); + moveFileMock.mockResolvedValue({ data: undefined }); + }); + + it('should return ENOENT when file is not found', async () => { + // Given + searcherMock.run.mockResolvedValue(undefined); + + // When + const result = await handleFileRenameIntent(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.ENOENT); + calls(trashFileMock).toHaveLength(0); + calls(moveFileMock).toHaveLength(0); + }); + + it('should delegate to trashFile when dest starts with /.Trash', async () => { + // Given + const file = File.from(fileAttrs); + searcherMock.run.mockResolvedValue(file); + + // When + const result = await handleFileRenameIntent({ ...props, dest: '/.Trash/file.txt' }); + + // Then + expect(result.data).toBeUndefined(); + call(trashFileMock).toStrictEqual({ file, container }); + calls(moveFileMock).toHaveLength(0); + }); + + it('should delegate to moveFile when dest is a regular path', async () => { + // Given + const file = File.from(fileAttrs); + searcherMock.run.mockResolvedValue(file); + + // When + const result = await handleFileRenameIntent(props); + + // Then + expect(result.data).toBeUndefined(); + call(moveFileMock).toStrictEqual({ file, src: props.src, dest: props.dest, container }); + calls(trashFileMock).toHaveLength(0); + }); + + it('should return success without moving when dest is an auxiliary path', async () => { + // Given + const file = File.from(fileAttrs); + searcherMock.run.mockResolvedValue(file); + + // When + const result = await handleFileRenameIntent({ ...props, dest: '/old/file.txt~' }); + + // Then + expect(result.error).toBeUndefined(); + expect(result.data).toBeUndefined(); + calls(moveFileMock).toHaveLength(0); + calls(trashFileMock).toHaveLength(0); + }); + + it('should propagate error from trashFile', async () => { + // Given + const file = File.from(fileAttrs); + searcherMock.run.mockResolvedValue(file); + trashFileMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'trash failed') }); + + // When + const result = await handleFileRenameIntent({ ...props, dest: '/.Trash/file.txt' }); + + // Then + expect(result.error?.code).toBe(FuseCodes.EIO); + }); + + it('should propagate error from moveFile', async () => { + // Given + const file = File.from(fileAttrs); + searcherMock.run.mockResolvedValue(file); + moveFileMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'move failed') }); + + // When + const result = await handleFileRenameIntent(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.EIO); + }); + + it('should search for file with EXISTS status and src path', async () => { + // Given + searcherMock.run.mockResolvedValue(undefined); + + // When + await handleFileRenameIntent(props); + + // Then + call(searcherMock.run).toStrictEqual({ path: props.src, status: FileStatuses.EXISTS }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename/handle-file-rename-intent.ts b/src/backend/features/virtual-drive/services/operations/rename/handle-file-rename-intent.ts new file mode 100644 index 0000000000..3b914e8e2f --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/handle-file-rename-intent.ts @@ -0,0 +1,27 @@ +import { Container } from 'diod'; +import { FirstsFileSearcher } from '../../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { TemporalFile } from '../../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { FileStatuses } from '../../../../../../context/virtual-drive/files/domain/FileStatus'; +import { FuseError, FuseNoSuchFileOrDirectoryError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { Result } from '../../../../../../context/shared/domain/Result'; +import { trashFile } from './trash-file'; +import { moveFile } from './move-file'; + +type Props = { + src: string; + dest: string; + container: Container; +}; + +export async function handleFileRenameIntent({ src, dest, container }: Props): Promise> { + const file = await container.get(FirstsFileSearcher).run({ + path: src, + status: FileStatuses.EXISTS, + }); + + if (!file) return { error: new FuseNoSuchFileOrDirectoryError(src) }; + if (dest.startsWith('/.Trash')) return trashFile({ file, container }); + if (TemporalFile.isTemporaryPath(dest)) return { data: undefined }; + + return moveFile({ file, src, dest, container }); +} diff --git a/src/backend/features/virtual-drive/services/operations/rename/handle-folder-rename-intent.test.ts b/src/backend/features/virtual-drive/services/operations/rename/handle-folder-rename-intent.test.ts new file mode 100644 index 0000000000..e5cb1c682b --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/handle-folder-rename-intent.test.ts @@ -0,0 +1,121 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { SingleFolderMatchingSearcher } from '../../../../../../context/virtual-drive/folders/application/SingleFolderMatchingSearcher'; +import { Folder, FolderAttributes } from '../../../../../../context/virtual-drive/folders/domain/Folder'; +import { FolderStatuses } from '../../../../../../context/virtual-drive/folders/domain/FolderStatus'; +import { FuseCodes } from '../../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { handleFolderRenameIntent } from './handle-folder-rename-intent'; +import * as trashFolderModule from './trash-folder'; +import * as moveFolderModule from './move-folder'; +import { call, calls, partialSpyOn } from '../../../../../../../tests/vitest/utils.helper'; + +const folderAttrs: FolderAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + parentId: 0, + path: '/old/folder', + updatedAt: new Date().toISOString(), + createdAt: new Date().toISOString(), + status: FolderStatuses.EXISTS, +}; + +describe('handle-folder-rename-intent', () => { + const trashFolderMock = partialSpyOn(trashFolderModule, 'trashFolder'); + const moveFolderMock = partialSpyOn(moveFolderModule, 'moveFolder'); + let container: ReturnType>; + let searcherMock: ReturnType>; + + const props: Parameters[0] = { + src: '/old/folder', + dest: '/new/folder', + container: undefined as unknown as Container, + }; + + beforeEach(() => { + searcherMock = mockDeep(); + container = mockDeep(); + container.get.calledWith(SingleFolderMatchingSearcher).mockReturnValue(searcherMock); + props.container = container; + trashFolderMock.mockResolvedValue({ data: undefined }); + moveFolderMock.mockResolvedValue({ data: undefined }); + }); + + it('should return ENOENT when folder is not found', async () => { + // Given + searcherMock.run.mockResolvedValue(undefined); + + // When + const result = await handleFolderRenameIntent(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.ENOENT); + calls(trashFolderMock).toHaveLength(0); + calls(moveFolderMock).toHaveLength(0); + }); + + it('should delegate to trashFolder when dest starts with /.Trash', async () => { + // Given + const folder = Folder.from(folderAttrs); + searcherMock.run.mockResolvedValue(folder); + + // When + const result = await handleFolderRenameIntent({ ...props, dest: '/.Trash/folder' }); + + // Then + expect(result.data).toBeUndefined(); + call(trashFolderMock).toStrictEqual({ folder, container }); + calls(moveFolderMock).toHaveLength(0); + }); + + it('should delegate to moveFolder when dest is a regular path', async () => { + // Given + const folder = Folder.from(folderAttrs); + searcherMock.run.mockResolvedValue(folder); + + // When + const result = await handleFolderRenameIntent(props); + + // Then + expect(result.data).toBeUndefined(); + call(moveFolderMock).toStrictEqual({ folder, src: props.src, dest: props.dest, container }); + calls(trashFolderMock).toHaveLength(0); + }); + + it('should propagate error from trashFolder', async () => { + // Given + const folder = Folder.from(folderAttrs); + searcherMock.run.mockResolvedValue(folder); + trashFolderMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'trash failed') }); + + // When + const result = await handleFolderRenameIntent({ ...props, dest: '/.Trash/folder' }); + + // Then + expect(result.error?.code).toBe(FuseCodes.EIO); + }); + + it('should propagate error from moveFolder', async () => { + // Given + const folder = Folder.from(folderAttrs); + searcherMock.run.mockResolvedValue(folder); + moveFolderMock.mockResolvedValue({ error: new FuseError(FuseCodes.EIO, 'move failed') }); + + // When + const result = await handleFolderRenameIntent(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.EIO); + }); + + it('should search for folder with EXISTS status and src path', async () => { + // Given + searcherMock.run.mockResolvedValue(undefined); + + // When + await handleFolderRenameIntent(props); + + // Then + call(searcherMock.run).toStrictEqual({ path: props.src, status: FolderStatuses.EXISTS }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename/handle-folder-rename-intent.ts b/src/backend/features/virtual-drive/services/operations/rename/handle-folder-rename-intent.ts new file mode 100644 index 0000000000..bae18f27f8 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/handle-folder-rename-intent.ts @@ -0,0 +1,25 @@ +import { Container } from 'diod'; +import { SingleFolderMatchingSearcher } from '../../../../../../context/virtual-drive/folders/application/SingleFolderMatchingSearcher'; +import { FolderStatuses } from '../../../../../../context/virtual-drive/folders/domain/FolderStatus'; +import { FuseError, FuseNoSuchFileOrDirectoryError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { Result } from '../../../../../../context/shared/domain/Result'; +import { trashFolder } from './trash-folder'; +import { moveFolder } from './move-folder'; + +type Props = { + src: string; + dest: string; + container: Container; +}; + +export async function handleFolderRenameIntent({ src, dest, container }: Props): Promise> { + const folder = await container.get(SingleFolderMatchingSearcher).run({ + path: src, + status: FolderStatuses.EXISTS, + }); + + if (!folder) return { error: new FuseNoSuchFileOrDirectoryError(src) }; + if (dest.startsWith('/.Trash')) return trashFolder({ folder, container }); + + return moveFolder({ folder, src, dest, container }); +} diff --git a/src/backend/features/virtual-drive/services/operations/rename/handle-offline-upload-on-rename.test.ts b/src/backend/features/virtual-drive/services/operations/rename/handle-offline-upload-on-rename.test.ts new file mode 100644 index 0000000000..0e3cf3aeb0 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/handle-offline-upload-on-rename.test.ts @@ -0,0 +1,120 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FirstsFileSearcher } from '../../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { File, FileAttributes } from '../../../../../../context/virtual-drive/files/domain/File'; +import { FileStatuses } from '../../../../../../context/virtual-drive/files/domain/FileStatus'; +import { + TemporalFile, + TemporalFileAttributes, +} from '../../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { TemporalFileByPathFinder } from '../../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { FuseCodes } from '../../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { handleOfflineUploadOnRename } from './handle-offline-upload-on-rename'; +import * as uploadTemporalFileOnRenameModule from './upload-temporal-file-on-rename'; +import { call, calls, partialSpyOn } from '../../../../../../../tests/vitest/utils.helper'; + +const fileAttrs: FileAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + contentsId: 'aabbccddeeff001122334455', + folderId: 0, + createdAt: new Date().toISOString(), + modificationTime: new Date().toISOString(), + path: '/folder/file.txt', + size: 100, + updatedAt: new Date().toISOString(), + status: FileStatuses.EXISTS, +}; + +const temporalAttrs: TemporalFileAttributes = { + createdAt: new Date(), + modifiedAt: new Date(), + path: '/tmp/internxt/file.txt', + size: 100, +}; + +describe('handle-offline-upload-on-rename', () => { + const uploadMock = partialSpyOn(uploadTemporalFileOnRenameModule, 'uploadTemporalFileOnRename'); + let container: ReturnType>; + let searcherMock: ReturnType>; + let finderMock: ReturnType>; + + const props: Parameters[0] = { + src: '/tmp/internxt/file.txt', + dest: '/folder/file.txt', + container: undefined as unknown as Container, + }; + + beforeEach(() => { + searcherMock = mockDeep(); + finderMock = mockDeep(); + container = mockDeep(); + container.get.calledWith(FirstsFileSearcher).mockReturnValue(searcherMock); + container.get.calledWith(TemporalFileByPathFinder).mockReturnValue(finderMock); + props.container = container; + uploadMock.mockResolvedValue({ data: undefined }); + }); + + it('should return ENOENT when file to override is not found', async () => { + // Given + searcherMock.run.mockResolvedValue(undefined); + + // When + const result = await handleOfflineUploadOnRename(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.ENOENT); + calls(uploadMock).toHaveLength(0); + }); + + it('should return ENOENT when temporal document is not found', async () => { + // Given + searcherMock.run.mockResolvedValue(File.from(fileAttrs)); + finderMock.run.mockResolvedValue(undefined); + + // When + const result = await handleOfflineUploadOnRename(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.ENOENT); + calls(uploadMock).toHaveLength(0); + }); + + it('should delegate to uploadTemporalFileOnRename when both files are found', async () => { + // Given + const virtual = File.from(fileAttrs); + const document = TemporalFile.from(temporalAttrs); + searcherMock.run.mockResolvedValue(virtual); + finderMock.run.mockResolvedValue(document); + + // When + const result = await handleOfflineUploadOnRename(props); + + // Then + expect(result.data).toBeUndefined(); + call(uploadMock).toStrictEqual({ virtual, document, src: props.src, container }); + }); + + it('should search for override file at dest with EXISTS status', async () => { + // Given + searcherMock.run.mockResolvedValue(undefined); + + // When + await handleOfflineUploadOnRename(props); + + // Then + call(searcherMock.run).toStrictEqual({ path: props.dest, status: FileStatuses.EXISTS }); + }); + + it('should search for temporal document at src', async () => { + // Given + searcherMock.run.mockResolvedValue(File.from(fileAttrs)); + finderMock.run.mockResolvedValue(undefined); + + // When + await handleOfflineUploadOnRename(props); + + // Then + call(finderMock.run).toBe(props.src); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename/handle-offline-upload-on-rename.ts b/src/backend/features/virtual-drive/services/operations/rename/handle-offline-upload-on-rename.ts new file mode 100644 index 0000000000..7f5b99e6f6 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/handle-offline-upload-on-rename.ts @@ -0,0 +1,35 @@ +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { FirstsFileSearcher } from '../../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { FileStatuses } from '../../../../../../context/virtual-drive/files/domain/FileStatus'; +import { TemporalFileByPathFinder } from '../../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { FuseError, FuseNoSuchFileOrDirectoryError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { Result } from '../../../../../../context/shared/domain/Result'; +import { uploadTemporalFileOnRename } from './upload-temporal-file-on-rename'; + +type Props = { + src: string; + dest: string; + container: Container; +}; + +export async function handleOfflineUploadOnRename({ src, dest, container }: Props): Promise> { + const fileToOverride = await container.get(FirstsFileSearcher).run({ + path: dest, + status: FileStatuses.EXISTS, + }); + + if (!fileToOverride) { + logger.debug({ msg: '[UPLOAD ON RENAME] file to override not found', dest }); + return { error: new FuseNoSuchFileOrDirectoryError(dest) }; + } + + const document = await container.get(TemporalFileByPathFinder).run(src); + + if (!document) { + logger.debug({ msg: '[UPLOAD ON RENAME] offline file not found', src }); + return { error: new FuseNoSuchFileOrDirectoryError(src) }; + } + + return uploadTemporalFileOnRename({ virtual: fileToOverride, document, src, container }); +} diff --git a/src/backend/features/virtual-drive/services/operations/rename/handle-temporal-file-upload-on-rename.test.ts b/src/backend/features/virtual-drive/services/operations/rename/handle-temporal-file-upload-on-rename.test.ts new file mode 100644 index 0000000000..17d667287a --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/handle-temporal-file-upload-on-rename.test.ts @@ -0,0 +1,120 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FirstsFileSearcher } from '../../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { File, FileAttributes } from '../../../../../../context/virtual-drive/files/domain/File'; +import { FileStatuses } from '../../../../../../context/virtual-drive/files/domain/FileStatus'; +import { + TemporalFile, + TemporalFileAttributes, +} from '../../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { TemporalFileByPathFinder } from '../../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { FuseCodes } from '../../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { handleTemporalFileUploadOnRename } from './handle-temporal-file-upload-on-rename'; +import * as uploadTemporalFileOnRenameModule from './upload-temporal-file-on-rename'; +import { call, calls, partialSpyOn } from '../../../../../../../tests/vitest/utils.helper'; + +const fileAttrs: FileAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + contentsId: 'aabbccddeeff001122334455', + folderId: 0, + createdAt: new Date().toISOString(), + modificationTime: new Date().toISOString(), + path: '/folder/file.txt', + size: 100, + updatedAt: new Date().toISOString(), + status: FileStatuses.EXISTS, +}; + +const temporalAttrs: TemporalFileAttributes = { + createdAt: new Date(), + modifiedAt: new Date(), + path: '/tmp/internxt/file.txt', + size: 100, +}; + +describe('handle-temporal-file-upload-on-rename', () => { + const uploadMock = partialSpyOn(uploadTemporalFileOnRenameModule, 'uploadTemporalFileOnRename'); + let container: ReturnType>; + let searcherMock: ReturnType>; + let finderMock: ReturnType>; + + const props: Parameters[0] = { + src: '/tmp/internxt/file.txt', + dest: '/folder/file.txt', + container: undefined as unknown as Container, + }; + + beforeEach(() => { + searcherMock = mockDeep(); + finderMock = mockDeep(); + container = mockDeep(); + container.get.calledWith(FirstsFileSearcher).mockReturnValue(searcherMock); + container.get.calledWith(TemporalFileByPathFinder).mockReturnValue(finderMock); + props.container = container; + uploadMock.mockResolvedValue({ data: undefined }); + }); + + it('should return ENOENT when file to override is not found', async () => { + // Given + searcherMock.run.mockResolvedValue(undefined); + + // When + const result = await handleTemporalFileUploadOnRename(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.ENOENT); + calls(uploadMock).toHaveLength(0); + }); + + it('should return ENOENT when temporal document is not found', async () => { + // Given + searcherMock.run.mockResolvedValue(File.from(fileAttrs)); + finderMock.run.mockResolvedValue(undefined); + + // When + const result = await handleTemporalFileUploadOnRename(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.ENOENT); + calls(uploadMock).toHaveLength(0); + }); + + it('should delegate to uploadTemporalFileOnRename when both files are found', async () => { + // Given + const virtual = File.from(fileAttrs); + const document = TemporalFile.from(temporalAttrs); + searcherMock.run.mockResolvedValue(virtual); + finderMock.run.mockResolvedValue(document); + + // When + const result = await handleTemporalFileUploadOnRename(props); + + // Then + expect(result.data).toBeUndefined(); + call(uploadMock).toStrictEqual({ virtual, document, src: props.src, container }); + }); + + it('should search for override file at dest with EXISTS status', async () => { + // Given + searcherMock.run.mockResolvedValue(undefined); + + // When + await handleTemporalFileUploadOnRename(props); + + // Then + call(searcherMock.run).toStrictEqual({ path: props.dest, status: FileStatuses.EXISTS }); + }); + + it('should search for temporal document at src', async () => { + // Given + searcherMock.run.mockResolvedValue(File.from(fileAttrs)); + finderMock.run.mockResolvedValue(undefined); + + // When + await handleTemporalFileUploadOnRename(props); + + // Then + call(finderMock.run).toBe(props.src); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename/handle-temporal-file-upload-on-rename.ts b/src/backend/features/virtual-drive/services/operations/rename/handle-temporal-file-upload-on-rename.ts new file mode 100644 index 0000000000..c609d6e2a1 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/handle-temporal-file-upload-on-rename.ts @@ -0,0 +1,39 @@ +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { FirstsFileSearcher } from '../../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { FileStatuses } from '../../../../../../context/virtual-drive/files/domain/FileStatus'; +import { TemporalFileByPathFinder } from '../../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { FuseError, FuseNoSuchFileOrDirectoryError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { Result } from '../../../../../../context/shared/domain/Result'; +import { uploadTemporalFileOnRename } from './upload-temporal-file-on-rename'; + +type Props = { + src: string; + dest: string; + container: Container; +}; + +export async function handleTemporalFileUploadOnRename({ + src, + dest, + container, +}: Props): Promise> { + const fileToOverride = await container.get(FirstsFileSearcher).run({ + path: dest, + status: FileStatuses.EXISTS, + }); + + if (!fileToOverride) { + logger.debug({ msg: '[UPLOAD ON RENAME] file to override not found', dest }); + return { error: new FuseNoSuchFileOrDirectoryError(dest) }; + } + + const document = await container.get(TemporalFileByPathFinder).run(src); + + if (!document) { + logger.debug({ msg: '[UPLOAD ON RENAME] offline file not found', src }); + return { error: new FuseNoSuchFileOrDirectoryError(src) }; + } + + return uploadTemporalFileOnRename({ virtual: fileToOverride, document, src, container }); +} diff --git a/src/backend/features/virtual-drive/services/operations/rename/has-temporal-file-changed.test.ts b/src/backend/features/virtual-drive/services/operations/rename/has-temporal-file-changed.test.ts new file mode 100644 index 0000000000..4220edb693 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/has-temporal-file-changed.test.ts @@ -0,0 +1,101 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { File, FileAttributes } from '../../../../../../context/virtual-drive/files/domain/File'; +import { FileStatuses } from '../../../../../../context/virtual-drive/files/domain/FileStatus'; +import { + TemporalFile, + TemporalFileAttributes, +} from '../../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { TemporalFilePath } from '../../../../../../context/storage/TemporalFiles/domain/TemporalFilePath'; +import { RelativePathToAbsoluteConverter } from '../../../../../../context/virtual-drive/shared/application/RelativePathToAbsoluteConverter'; +import { TemporalFileByteByByteComparator } from '../../../../../../context/storage/TemporalFiles/application/comparation/TemporalFileByteByByteComparator'; +import { hasTemporalFileChanged } from './has-temporal-file-changed'; +import { call, calls } from '../../../../../../../tests/vitest/utils.helper'; + +const fileAttrs: FileAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + contentsId: 'aabbccddeeff001122334455', + folderId: 0, + createdAt: new Date().toISOString(), + modificationTime: new Date().toISOString(), + path: '/folder/file.txt', + size: 100, + updatedAt: new Date().toISOString(), + status: FileStatuses.EXISTS, +}; + +const temporalAttrs: TemporalFileAttributes = { + createdAt: new Date(), + modifiedAt: new Date(), + path: '/tmp/internxt/file.txt', + size: 100, +}; + +describe('has-temporal-file-changed', () => { + const virtual = File.from(fileAttrs); + const document = TemporalFile.from(temporalAttrs); + let container: ReturnType>; + let converterMock: ReturnType>; + let comparatorMock: ReturnType>; + + const props = { virtual, document, container: undefined as unknown as Container }; + + beforeEach(() => { + converterMock = mockDeep(); + comparatorMock = mockDeep(); + container = mockDeep(); + container.get.calledWith(RelativePathToAbsoluteConverter).mockReturnValue(converterMock); + container.get.calledWith(TemporalFileByteByByteComparator).mockReturnValue(comparatorMock); + props.container = container; + }); + + it('should return true immediately when sizes differ', async () => { + // Given + const differentSizeDoc = TemporalFile.from({ ...temporalAttrs, size: 200 }); + + // When + const result = await hasTemporalFileChanged({ virtual, document: differentSizeDoc, container }); + + // Then + expect(result).toBe(true); + calls(comparatorMock.run).toHaveLength(0); + }); + + it('should return false when byte comparator says files are equal', async () => { + // Given + converterMock.run.mockReturnValue('/abs/aabbccddeeff001122334455'); + comparatorMock.run.mockResolvedValue(true); + + // When + const result = await hasTemporalFileChanged(props); + + // Then + expect(result).toBe(false); + call(comparatorMock.run).toStrictEqual([expect.any(TemporalFilePath), document.path]); + }); + + it('should return true when byte comparator says files differ', async () => { + // Given + converterMock.run.mockReturnValue('/abs/aabbccddeeff001122334455'); + comparatorMock.run.mockResolvedValue(false); + + // When + const result = await hasTemporalFileChanged(props); + + // Then + expect(result).toBe(true); + }); + + it('should return false when byte comparator throws', async () => { + // Given + converterMock.run.mockReturnValue('/abs/aabbccddeeff001122334455'); + comparatorMock.run.mockRejectedValue(new Error('disk error')); + + // When + const result = await hasTemporalFileChanged(props); + + // Then + expect(result).toBe(false); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename/has-temporal-file-changed.ts b/src/backend/features/virtual-drive/services/operations/rename/has-temporal-file-changed.ts new file mode 100644 index 0000000000..28558c546e --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/has-temporal-file-changed.ts @@ -0,0 +1,35 @@ +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { File } from '../../../../../../context/virtual-drive/files/domain/File'; +import { RelativePathToAbsoluteConverter } from '../../../../../../context/virtual-drive/shared/application/RelativePathToAbsoluteConverter'; +import { TemporalFile } from '../../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { TemporalFileByteByByteComparator } from '../../../../../../context/storage/TemporalFiles/application/comparation/TemporalFileByteByByteComparator'; +import { TemporalFilePath } from '../../../../../../context/storage/TemporalFiles/domain/TemporalFilePath'; + +type Props = { + virtual: File; + document: TemporalFile; + container: Container; +}; + +export async function hasTemporalFileChanged({ virtual, document, container }: Props): Promise { + if (virtual.size !== document.size.value) { + return true; + } + + try { + const filePath = container.get(RelativePathToAbsoluteConverter).run(virtual.contentsId); + + const areEqual = await container + .get(TemporalFileByteByByteComparator) + .run(new TemporalFilePath(filePath), document.path); + + logger.debug({ msg: `Contents of <${virtual.path}> did not change` }); + + return !areEqual; + } catch (error) { + logger.error({ msg: 'Error comparing file contents', error }); + } + + return false; +} diff --git a/src/backend/features/virtual-drive/services/operations/rename/move-file.test.ts b/src/backend/features/virtual-drive/services/operations/rename/move-file.test.ts new file mode 100644 index 0000000000..3602e64975 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/move-file.test.ts @@ -0,0 +1,113 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FilePathUpdater } from '../../../../../../context/virtual-drive/files/application/move/FilePathUpdater'; +import { File, FileAttributes } from '../../../../../../context/virtual-drive/files/domain/File'; +import { FileStatuses } from '../../../../../../context/virtual-drive/files/domain/FileStatus'; +import { SyncFileMessenger } from '../../../../../../context/virtual-drive/files/domain/SyncFileMessenger'; +import { FuseCodes } from '../../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { DriveDesktopError } from '../../../../../../context/shared/domain/errors/DriveDesktopError'; +import { moveFile } from './move-file'; +import { call } from '../../../../../../../tests/vitest/utils.helper'; + +const fileAttrs: FileAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + contentsId: 'aabbccddeeff001122334455', + folderId: 0, + createdAt: new Date().toISOString(), + modificationTime: new Date().toISOString(), + path: '/old/file.txt', + size: 100, + updatedAt: new Date().toISOString(), + status: FileStatuses.EXISTS, +}; + +describe('move-file', () => { + const file = File.from(fileAttrs); + let container: ReturnType>; + let updaterMock: ReturnType>; + let messengerMock: ReturnType>; + + const props = { file, src: '/old/file.txt', dest: '/new/file.txt', container: undefined as unknown as Container }; + + beforeEach(() => { + updaterMock = mockDeep(); + messengerMock = mockDeep(); + container = mockDeep(); + container.get.calledWith(FilePathUpdater).mockReturnValue(updaterMock); + container.get.calledWith(SyncFileMessenger).mockReturnValue(messengerMock); + props.container = container; + }); + + it('should notify renaming and renamed and return success', async () => { + // Given + updaterMock.run.mockResolvedValue(undefined); + + // When + const result = await moveFile(props); + + // Then + expect(result.data).toBeUndefined(); + expect(result.error).toBeUndefined(); + call(updaterMock.run).toStrictEqual([file.contentsId, '/new/file.txt']); + }); + + it('should notify renaming before and renamed after update', async () => { + // Given + const order: string[] = []; + messengerMock.renaming.mockImplementation(async () => { + order.push('renaming'); + }); + updaterMock.run.mockImplementation(async () => { + order.push('run'); + }); + messengerMock.renamed.mockImplementation(async () => { + order.push('renamed'); + }); + + // When + await moveFile(props); + + // Then + expect(order).toStrictEqual(['renaming', 'run', 'renamed']); + }); + + it('should report RENAME_ERROR with src basename and return FuseError when updater throws FuseError', async () => { + // Given + const fuseError = new FuseError(FuseCodes.EIO, 'io error'); + updaterMock.run.mockRejectedValue(fuseError); + + // When + const result = await moveFile(props); + + // Then + expect(result.error).toBe(fuseError); + call(messengerMock.issues).toMatchObject({ error: 'RENAME_ERROR', name: 'file.txt' }); + }); + + it('should report RENAME_ERROR with cause from DriveDesktopError', async () => { + // Given + const domainError = new DriveDesktopError('NOT_EXISTS', 'not found'); + updaterMock.run.mockRejectedValue(domainError); + + // When + const result = await moveFile(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.EIO); + call(messengerMock.issues).toMatchObject({ error: 'RENAME_ERROR', cause: 'NOT_EXISTS', name: 'file.txt' }); + }); + + it('should report RENAME_ERROR with UNKNOWN cause for non-DriveDesktopError throws', async () => { + // Given + updaterMock.run.mockRejectedValue(new Error('unexpected')); + + // When + const result = await moveFile(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.EIO); + call(messengerMock.issues).toMatchObject({ error: 'RENAME_ERROR', cause: 'UNKNOWN', name: 'file.txt' }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename/move-file.ts b/src/backend/features/virtual-drive/services/operations/rename/move-file.ts new file mode 100644 index 0000000000..c76a8a3838 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/move-file.ts @@ -0,0 +1,39 @@ +import { Container } from 'diod'; +import { basename } from 'node:path'; +import { DriveDesktopError } from '../../../../../../context/shared/domain/errors/DriveDesktopError'; +import { FilePathUpdater } from '../../../../../../context/virtual-drive/files/application/move/FilePathUpdater'; +import { File } from '../../../../../../context/virtual-drive/files/domain/File'; +import { FilePath } from '../../../../../../context/virtual-drive/files/domain/FilePath'; +import { SyncFileMessenger } from '../../../../../../context/virtual-drive/files/domain/SyncFileMessenger'; +import { Result } from '../../../../../../context/shared/domain/Result'; +import { SyncError } from '../../../../../../shared/issues/SyncErrorCause'; +import { FuseError, FuseUnknownError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; + +type Props = { + file: File; + src: string; + dest: string; + container: Container; +}; + +export async function moveFile({ file, src, dest, container }: Props): Promise> { + try { + const desiredPath = new FilePath(dest); + + await container.get(SyncFileMessenger).renaming(file.nameWithExtension, desiredPath.nameWithExtension()); + await container.get(FilePathUpdater).run(file.contentsId, dest); + await container.get(SyncFileMessenger).renamed(file.nameWithExtension, desiredPath.nameWithExtension()); + + return { data: undefined }; + } catch (error) { + const cause: SyncError = error instanceof DriveDesktopError ? error.cause : 'UNKNOWN'; + + await container.get(SyncFileMessenger).issues({ + error: 'RENAME_ERROR', + cause, + name: basename(src), + }); + + return { error: error instanceof FuseError ? error : new FuseUnknownError() }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/rename/move-folder.test.ts b/src/backend/features/virtual-drive/services/operations/rename/move-folder.test.ts new file mode 100644 index 0000000000..2592d290dc --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/move-folder.test.ts @@ -0,0 +1,96 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FolderPathUpdater } from '../../../../../../context/virtual-drive/folders/application/FolderPathUpdater'; +import { Folder, FolderAttributes } from '../../../../../../context/virtual-drive/folders/domain/Folder'; +import { FolderStatuses } from '../../../../../../context/virtual-drive/folders/domain/FolderStatus'; +import { SyncFolderMessenger } from '../../../../../../context/virtual-drive/folders/domain/SyncFolderMessenger'; +import { FuseCodes } from '../../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { moveFolder } from './move-folder'; +import { call } from '../../../../../../../tests/vitest/utils.helper'; + +const folderAttrs: FolderAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + parentId: 0, + path: '/old/folder', + updatedAt: new Date().toISOString(), + createdAt: new Date().toISOString(), + status: FolderStatuses.EXISTS, +}; + +describe('move-folder', () => { + const folder = Folder.from(folderAttrs); + let container: ReturnType>; + let updaterMock: ReturnType>; + let messengerMock: ReturnType>; + + const props = { folder, src: '/old/folder', dest: '/new/folder', container: undefined as unknown as Container }; + + beforeEach(() => { + updaterMock = mockDeep(); + messengerMock = mockDeep(); + container = mockDeep(); + container.get.calledWith(FolderPathUpdater).mockReturnValue(updaterMock); + container.get.calledWith(SyncFolderMessenger).mockReturnValue(messengerMock); + props.container = container; + }); + + it('should notify rename and renamed and return success', async () => { + // Given + updaterMock.run.mockResolvedValue(undefined); + + // When + const result = await moveFolder(props); + + // Then + expect(result.data).toBeUndefined(); + expect(result.error).toBeUndefined(); + call(updaterMock.run).toStrictEqual([folder.uuid, '/new/folder']); + }); + + it('should notify rename before and renamed after update', async () => { + // Given + const order: string[] = []; + messengerMock.rename.mockImplementation(async () => { + order.push('rename'); + }); + updaterMock.run.mockImplementation(async () => { + order.push('run'); + }); + messengerMock.renamed.mockImplementation(async () => { + order.push('renamed'); + }); + + // When + await moveFolder(props); + + // Then + expect(order).toStrictEqual(['rename', 'run', 'renamed']); + }); + + it('should report FOLDER_RENAME_ERROR with src basename and return FuseError when updater throws FuseError', async () => { + // Given + const fuseError = new FuseError(FuseCodes.EIO, 'io error'); + updaterMock.run.mockRejectedValue(fuseError); + + // When + const result = await moveFolder(props); + + // Then + expect(result.error).toBe(fuseError); + call(messengerMock.issue).toMatchObject({ error: 'FOLDER_RENAME_ERROR', cause: 'UNKNOWN', name: 'folder' }); + }); + + it('should report FOLDER_RENAME_ERROR and return FuseUnknownError for non-FuseError throws', async () => { + // Given + updaterMock.run.mockRejectedValue(new Error('unexpected')); + + // When + const result = await moveFolder(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.EIO); + call(messengerMock.issue).toMatchObject({ error: 'FOLDER_RENAME_ERROR', cause: 'UNKNOWN', name: 'folder' }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename/move-folder.ts b/src/backend/features/virtual-drive/services/operations/rename/move-folder.ts new file mode 100644 index 0000000000..b92a6e28fc --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/move-folder.ts @@ -0,0 +1,35 @@ +import { Container } from 'diod'; +import { basename } from 'node:path'; +import { FolderPathUpdater } from '../../../../../../context/virtual-drive/folders/application/FolderPathUpdater'; +import { Folder } from '../../../../../../context/virtual-drive/folders/domain/Folder'; +import { FolderPath } from '../../../../../../context/virtual-drive/folders/domain/FolderPath'; +import { SyncFolderMessenger } from '../../../../../../context/virtual-drive/folders/domain/SyncFolderMessenger'; +import { Result } from '../../../../../../context/shared/domain/Result'; +import { FuseError, FuseUnknownError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; + +type Props = { + folder: Folder; + src: string; + dest: string; + container: Container; +}; + +export async function moveFolder({ folder, src, dest, container }: Props): Promise> { + try { + const desiredPath = new FolderPath(dest); + + await container.get(SyncFolderMessenger).rename(folder.name, desiredPath.name()); + await container.get(FolderPathUpdater).run(folder.uuid, dest); + await container.get(SyncFolderMessenger).renamed(folder.name, desiredPath.name()); + + return { data: undefined }; + } catch (error) { + await container.get(SyncFolderMessenger).issue({ + error: 'FOLDER_RENAME_ERROR', + cause: 'UNKNOWN', + name: basename(src), + }); + + return { error: error instanceof FuseError ? error : new FuseUnknownError() }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/rename/trash-file.test.ts b/src/backend/features/virtual-drive/services/operations/rename/trash-file.test.ts new file mode 100644 index 0000000000..5708b793de --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/trash-file.test.ts @@ -0,0 +1,93 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FileTrasher } from '../../../../../../context/virtual-drive/files/application/trash/FileTrasher'; +import { File, FileAttributes } from '../../../../../../context/virtual-drive/files/domain/File'; +import { FileStatuses } from '../../../../../../context/virtual-drive/files/domain/FileStatus'; +import { SyncFileMessenger } from '../../../../../../context/virtual-drive/files/domain/SyncFileMessenger'; +import { FuseCodes } from '../../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { DriveDesktopError } from '../../../../../../context/shared/domain/errors/DriveDesktopError'; +import { trashFile } from './trash-file'; +import { call } from '../../../../../../../tests/vitest/utils.helper'; + +const fileAttrs: FileAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + contentsId: 'aabbccddeeff001122334455', + folderId: 0, + createdAt: new Date().toISOString(), + modificationTime: new Date().toISOString(), + path: '/folder/file.txt', + size: 100, + updatedAt: new Date().toISOString(), + status: FileStatuses.EXISTS, +}; + +describe('trash-file', () => { + const file = File.from(fileAttrs); + let container: ReturnType>; + let trasherMock: ReturnType>; + let messengerMock: ReturnType>; + + const props = { file, container: undefined as unknown as Container }; + + beforeEach(() => { + trasherMock = mockDeep(); + messengerMock = mockDeep(); + container = mockDeep(); + container.get.calledWith(FileTrasher).mockReturnValue(trasherMock); + container.get.calledWith(SyncFileMessenger).mockReturnValue(messengerMock); + props.container = container; + }); + + it('should return success when file is trashed', async () => { + // Given + trasherMock.run.mockResolvedValue(undefined); + + // When + const result = await trashFile(props); + + // Then + expect(result.data).toBeUndefined(); + expect(result.error).toBeUndefined(); + call(trasherMock.run).toBe(file.contentsId); + }); + + it('should report DELETE_ERROR and return FuseError when trasher throws FuseError', async () => { + // Given + const fuseError = new FuseError(FuseCodes.EIO, 'io error'); + trasherMock.run.mockRejectedValue(fuseError); + + // When + const result = await trashFile(props); + + // Then + expect(result.error).toBe(fuseError); + call(messengerMock.issues).toMatchObject({ error: 'DELETE_ERROR', name: file.name }); + }); + + it('should report DELETE_ERROR with cause from DriveDesktopError', async () => { + // Given + const domainError = new DriveDesktopError('NOT_EXISTS', 'not found'); + trasherMock.run.mockRejectedValue(domainError); + + // When + const result = await trashFile(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.EIO); + call(messengerMock.issues).toMatchObject({ error: 'DELETE_ERROR', cause: 'NOT_EXISTS', name: file.name }); + }); + + it('should report DELETE_ERROR with UNKNOWN cause for non-DriveDesktopError throws', async () => { + // Given + trasherMock.run.mockRejectedValue(new Error('unexpected')); + + // When + const result = await trashFile(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.EIO); + call(messengerMock.issues).toMatchObject({ error: 'DELETE_ERROR', cause: 'UNKNOWN', name: file.name }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename/trash-file.ts b/src/backend/features/virtual-drive/services/operations/rename/trash-file.ts new file mode 100644 index 0000000000..e4125da20f --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/trash-file.ts @@ -0,0 +1,30 @@ +import { Container } from 'diod'; +import { DriveDesktopError } from '../../../../../../context/shared/domain/errors/DriveDesktopError'; +import { FileTrasher } from '../../../../../../context/virtual-drive/files/application/trash/FileTrasher'; +import { File } from '../../../../../../context/virtual-drive/files/domain/File'; +import { SyncFileMessenger } from '../../../../../../context/virtual-drive/files/domain/SyncFileMessenger'; +import { Result } from '../../../../../../context/shared/domain/Result'; +import { SyncError } from '../../../../../../shared/issues/SyncErrorCause'; +import { FuseError, FuseUnknownError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; + +type Props = { + file: File; + container: Container; +}; + +export async function trashFile({ file, container }: Props): Promise> { + try { + await container.get(FileTrasher).run(file.contentsId); + return { data: undefined }; + } catch (error) { + const cause: SyncError = error instanceof DriveDesktopError ? error.cause : 'UNKNOWN'; + + await container.get(SyncFileMessenger).issues({ + error: 'DELETE_ERROR', + cause, + name: file.name, + }); + + return { error: error instanceof FuseError ? error : new FuseUnknownError() }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/rename/trash-folder.test.ts b/src/backend/features/virtual-drive/services/operations/rename/trash-folder.test.ts new file mode 100644 index 0000000000..be8470a7d3 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/trash-folder.test.ts @@ -0,0 +1,90 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FolderDeleter } from '../../../../../../context/virtual-drive/folders/application/FolderDeleter'; +import { Folder, FolderAttributes } from '../../../../../../context/virtual-drive/folders/domain/Folder'; +import { FolderStatuses } from '../../../../../../context/virtual-drive/folders/domain/FolderStatus'; +import { SyncFileMessenger } from '../../../../../../context/virtual-drive/files/domain/SyncFileMessenger'; +import { FuseCodes } from '../../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { DriveDesktopError } from '../../../../../../context/shared/domain/errors/DriveDesktopError'; +import { trashFolder } from './trash-folder'; +import { call } from '../../../../../../../tests/vitest/utils.helper'; + +const folderAttrs: FolderAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + parentId: 0, + path: '/folder', + updatedAt: new Date().toISOString(), + createdAt: new Date().toISOString(), + status: FolderStatuses.EXISTS, +}; + +describe('trash-folder', () => { + const folder = Folder.from(folderAttrs); + let container: ReturnType>; + let deleterMock: ReturnType>; + let messengerMock: ReturnType>; + + const props = { folder, container: undefined as unknown as Container }; + + beforeEach(() => { + deleterMock = mockDeep(); + messengerMock = mockDeep(); + container = mockDeep(); + container.get.calledWith(FolderDeleter).mockReturnValue(deleterMock); + container.get.calledWith(SyncFileMessenger).mockReturnValue(messengerMock); + props.container = container; + }); + + it('should return success when folder is trashed', async () => { + // Given + deleterMock.run.mockResolvedValue(undefined); + + // When + const result = await trashFolder(props); + + // Then + expect(result.data).toBeUndefined(); + expect(result.error).toBeUndefined(); + call(deleterMock.run).toBe(folder.uuid); + }); + + it('should report DELETE_ERROR and return FuseError when deleter throws FuseError', async () => { + // Given + const fuseError = new FuseError(FuseCodes.EIO, 'io error'); + deleterMock.run.mockRejectedValue(fuseError); + + // When + const result = await trashFolder(props); + + // Then + expect(result.error).toBe(fuseError); + call(messengerMock.issues).toMatchObject({ error: 'DELETE_ERROR', name: folder.name }); + }); + + it('should report DELETE_ERROR with cause from DriveDesktopError', async () => { + // Given + const domainError = new DriveDesktopError('NOT_EXISTS', 'not found'); + deleterMock.run.mockRejectedValue(domainError); + + // When + const result = await trashFolder(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.EIO); + call(messengerMock.issues).toMatchObject({ error: 'DELETE_ERROR', cause: 'NOT_EXISTS', name: folder.name }); + }); + + it('should report DELETE_ERROR with UNKNOWN cause for non-DriveDesktopError throws', async () => { + // Given + deleterMock.run.mockRejectedValue(new Error('unexpected')); + + // When + const result = await trashFolder(props); + + // Then + expect(result.error?.code).toBe(FuseCodes.EIO); + call(messengerMock.issues).toMatchObject({ error: 'DELETE_ERROR', cause: 'UNKNOWN', name: folder.name }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename/trash-folder.ts b/src/backend/features/virtual-drive/services/operations/rename/trash-folder.ts new file mode 100644 index 0000000000..89a00d0b9f --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/trash-folder.ts @@ -0,0 +1,30 @@ +import { Container } from 'diod'; +import { DriveDesktopError } from '../../../../../../context/shared/domain/errors/DriveDesktopError'; +import { FolderDeleter } from '../../../../../../context/virtual-drive/folders/application/FolderDeleter'; +import { Folder } from '../../../../../../context/virtual-drive/folders/domain/Folder'; +import { SyncFileMessenger } from '../../../../../../context/virtual-drive/files/domain/SyncFileMessenger'; +import { Result } from '../../../../../../context/shared/domain/Result'; +import { SyncError } from '../../../../../../shared/issues/SyncErrorCause'; +import { FuseError, FuseUnknownError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; + +type Props = { + folder: Folder; + container: Container; +}; + +export async function trashFolder({ folder, container }: Props): Promise> { + try { + await container.get(FolderDeleter).run(folder.uuid); + return { data: undefined }; + } catch (error) { + const cause: SyncError = error instanceof DriveDesktopError ? error.cause : 'UNKNOWN'; + + await container.get(SyncFileMessenger).issues({ + error: 'DELETE_ERROR', + cause, + name: folder.name, + }); + + return { error: error instanceof FuseError ? error : new FuseUnknownError() }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/rename/upload-temporal-file-on-rename.test.ts b/src/backend/features/virtual-drive/services/operations/rename/upload-temporal-file-on-rename.test.ts new file mode 100644 index 0000000000..a9eeb7b799 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/upload-temporal-file-on-rename.test.ts @@ -0,0 +1,106 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { File, FileAttributes } from '../../../../../../context/virtual-drive/files/domain/File'; +import { FileStatuses } from '../../../../../../context/virtual-drive/files/domain/FileStatus'; +import { + TemporalFile, + TemporalFileAttributes, +} from '../../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { TemporalFileUploader } from '../../../../../../context/storage/TemporalFiles/application/upload/TemporalFileUploader'; +import { TemporalFileDeleter } from '../../../../../../context/storage/TemporalFiles/application/deletion/TemporalFileDeleter'; +import { uploadTemporalFileOnRename } from './upload-temporal-file-on-rename'; +import * as compareTemporalFileModule from './has-temporal-file-changed'; +import { call, calls, partialSpyOn } from '../../../../../../../tests/vitest/utils.helper'; + +const fileAttrs: FileAttributes = { + id: 1, + uuid: 'a1b2c3d4-e5f6-4a7b-8c9d-0e1f2a3b4c5d', + contentsId: 'aabbccddeeff001122334455', + folderId: 0, + createdAt: new Date().toISOString(), + modificationTime: new Date().toISOString(), + path: '/folder/file.txt', + size: 100, + updatedAt: new Date().toISOString(), + status: FileStatuses.EXISTS, +}; + +const temporalAttrs: TemporalFileAttributes = { + createdAt: new Date(), + modifiedAt: new Date(), + path: '/tmp/internxt/file.txt', + size: 100, +}; + +describe('upload-temporal-file-on-rename', () => { + const compareTemporalFileMock = partialSpyOn(compareTemporalFileModule, 'hasTemporalFileChanged'); + const virtual = File.from(fileAttrs); + const document = TemporalFile.from(temporalAttrs); + let container: ReturnType>; + let uploaderMock: ReturnType>; + let deleterMock: ReturnType>; + + const props = { virtual, document, src: '/tmp/internxt/file.txt', container: undefined as unknown as Container }; + + beforeEach(() => { + uploaderMock = mockDeep(); + deleterMock = mockDeep(); + container = mockDeep(); + container.get.calledWith(TemporalFileUploader).mockReturnValue(uploaderMock); + container.get.calledWith(TemporalFileDeleter).mockReturnValue(deleterMock); + props.container = container; + }); + + it('should delete temporal file and return success when files are equal', async () => { + // Given + compareTemporalFileMock.mockResolvedValue(false); + + // When + const result = await uploadTemporalFileOnRename(props); + + // Then + expect(result.data).toBeUndefined(); + calls(uploaderMock.run).toHaveLength(0); + call(deleterMock.run).toBe(props.src); + }); + + it('should upload then delete and return success when files differ', async () => { + // Given + compareTemporalFileMock.mockResolvedValue(true); + uploaderMock.run.mockResolvedValue('uploaded-file-id'); + + // When + const result = await uploadTemporalFileOnRename(props); + + // Then + expect(result.data).toBeUndefined(); + call(uploaderMock.run).toStrictEqual([ + document, + { + contentsId: virtual.contentsId, + name: virtual.name, + extension: virtual.type, + }, + ]); + call(deleterMock.run).toBe(props.src); + }); + + it('should upload before deleting when files differ', async () => { + // Given + const order: string[] = []; + compareTemporalFileMock.mockResolvedValue(true); + uploaderMock.run.mockImplementation(async () => { + order.push('upload'); + return 'uploaded-file-id'; + }); + deleterMock.run.mockImplementation(async () => { + order.push('delete'); + }); + + // When + await uploadTemporalFileOnRename(props); + + // Then + expect(order).toStrictEqual(['upload', 'delete']); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rename/upload-temporal-file-on-rename.ts b/src/backend/features/virtual-drive/services/operations/rename/upload-temporal-file-on-rename.ts new file mode 100644 index 0000000000..59ddb49406 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rename/upload-temporal-file-on-rename.ts @@ -0,0 +1,38 @@ +import { Container } from 'diod'; +import { File } from '../../../../../../context/virtual-drive/files/domain/File'; +import { TemporalFile } from '../../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { TemporalFileUploader } from '../../../../../../context/storage/TemporalFiles/application/upload/TemporalFileUploader'; +import { TemporalFileDeleter } from '../../../../../../context/storage/TemporalFiles/application/deletion/TemporalFileDeleter'; +import { FuseError } from '../../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { Result } from '../../../../../../context/shared/domain/Result'; +import { hasTemporalFileChanged } from './has-temporal-file-changed'; + +type Props = { + virtual: File; + document: TemporalFile; + src: string; + container: Container; +}; + +export async function uploadTemporalFileOnRename({ + virtual, + document, + src, + container, +}: Props): Promise> { + const hasChanged = await hasTemporalFileChanged({ virtual, document, container }); + + if (!hasChanged) { + await container.get(TemporalFileDeleter).run(src); + return { data: undefined }; + } + + await container.get(TemporalFileUploader).run(document, { + contentsId: virtual.contentsId, + name: virtual.name, + extension: virtual.type, + }); + + await container.get(TemporalFileDeleter).run(src); + return { data: undefined }; +} diff --git a/src/backend/features/virtual-drive/services/operations/rmdir.service.test.ts b/src/backend/features/virtual-drive/services/operations/rmdir.service.test.ts new file mode 100644 index 0000000000..c2cd0df12b --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rmdir.service.test.ts @@ -0,0 +1,87 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FolderDeleter } from '../../../../../context/virtual-drive/folders/application/FolderDeleter'; +import { SingleFolderMatchingFinder } from '../../../../../context/virtual-drive/folders/application/SingleFolderMatchingFinder'; +import { Folder } from '../../../../../context/virtual-drive/folders/domain/Folder'; +import { FolderStatuses } from '../../../../../context/virtual-drive/folders/domain/FolderStatus'; +import { SyncFolderMessenger } from '../../../../../context/virtual-drive/folders/domain/SyncFolderMessenger'; +import { FolderNotFoundError } from '../../../../../context/virtual-drive/folders/domain/errors/FolderNotFoundError'; +import { rmdir } from './rmdir.service'; + +vi.mock('@internxt/drive-desktop-core/build/backend'); + +describe('rmdir', () => { + let container: ReturnType>; + const folderFinder = mockDeep(); + const folderDeleter = mockDeep(); + const syncFolderMessenger = mockDeep(); + + beforeEach(() => { + container = mockDeep(); + + container.get.calledWith(SingleFolderMatchingFinder).mockReturnValue(folderFinder); + container.get.calledWith(FolderDeleter).mockReturnValue(folderDeleter); + container.get.calledWith(SyncFolderMessenger).mockReturnValue(syncFolderMessenger); + }); + + it('should trash folder when folder exists', async () => { + folderFinder.run.mockResolvedValue( + Folder.from({ + id: 1, + uuid: '550e8400-e29b-41d4-a716-446655440010', + parentId: 2, + path: '/some/folder', + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + status: FolderStatuses.EXISTS, + }), + ); + + const { data, error } = await rmdir('/some/folder', container); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + expect(folderDeleter.run).toHaveBeenCalledWith('550e8400-e29b-41d4-a716-446655440010'); + expect(syncFolderMessenger.issue).not.toHaveBeenCalled(); + }); + + it('should return ENOENT when folder is not found', async () => { + folderFinder.run.mockRejectedValue(new FolderNotFoundError('/missing/folder')); + + const { data, error } = await rmdir('/missing/folder', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.ENOENT); + expect(syncFolderMessenger.issue).toHaveBeenCalledWith({ + error: 'FOLDER_TRASH_ERROR', + cause: 'UNKNOWN', + name: 'folder', + }); + }); + + it('should return EIO when trash fails', async () => { + folderFinder.run.mockResolvedValue( + Folder.from({ + id: 1, + uuid: '550e8400-e29b-41d4-a716-446655440011', + parentId: 2, + path: '/some/folder', + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + status: FolderStatuses.EXISTS, + }), + ); + folderDeleter.run.mockRejectedValue(new Error('boom')); + + const { data, error } = await rmdir('/some/folder', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EIO); + expect(syncFolderMessenger.issue).toHaveBeenCalledWith({ + error: 'FOLDER_TRASH_ERROR', + cause: 'UNKNOWN', + name: 'folder', + }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/rmdir.service.ts b/src/backend/features/virtual-drive/services/operations/rmdir.service.ts new file mode 100644 index 0000000000..55e689ac7a --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/rmdir.service.ts @@ -0,0 +1,40 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { basename } from 'node:path'; +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { Result } from '../../../../../context/shared/domain/Result'; +import { FolderDeleter } from '../../../../../context/virtual-drive/folders/application/FolderDeleter'; +import { SingleFolderMatchingFinder } from '../../../../../context/virtual-drive/folders/application/SingleFolderMatchingFinder'; +import { FolderStatuses } from '../../../../../context/virtual-drive/folders/domain/FolderStatus'; +import { SyncFolderMessenger } from '../../../../../context/virtual-drive/folders/domain/SyncFolderMessenger'; +import { FolderNotFoundError } from '../../../../../context/virtual-drive/folders/domain/errors/FolderNotFoundError'; + +export async function rmdir(path: string, container: Container): Promise> { + try { + const folder = await container.get(SingleFolderMatchingFinder).run({ + path, + status: FolderStatuses.EXISTS, + }); + + await container.get(FolderDeleter).run(folder.uuid); + + return { data: undefined }; + } catch (error: unknown) { + await container.get(SyncFolderMessenger).issue({ + error: 'FOLDER_TRASH_ERROR', + cause: 'UNKNOWN', + name: basename(path), + }); + + if (error instanceof FolderNotFoundError) { + const msg = `[FUSE - Rmdir] Folder not found: ${path}`; + logger.error({ msg }); + return { error: new FuseError(FuseCodes.ENOENT, msg) }; + } + + const msg = `[FUSE - Rmdir] Unable to trash folder: ${path}`; + logger.error({ msg }); + return { error: new FuseError(FuseCodes.EIO, msg) }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/statfs.service.test.ts b/src/backend/features/virtual-drive/services/operations/statfs.service.test.ts new file mode 100644 index 0000000000..d11ddc71e7 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/statfs.service.test.ts @@ -0,0 +1,43 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { statfs } from './statfs.service'; +import { TemporalFileRepository } from '../../../../../context/storage/TemporalFiles/domain/TemporalFileRepository'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; + +const diskStats = { + blocks: 2000000, + bfree: 1000000, + bavail: 990000, + files: 500000, + ffree: 400000, + bsize: 4096, +}; + +describe('statfs', () => { + let container: ReturnType>; + let repository: ReturnType>; + + beforeEach(() => { + repository = mockDeep(); + container = mockDeep(); + container.get.calledWith(TemporalFileRepository).mockReturnValue(repository); + }); + + it('should return disk stats on success', async () => { + repository.statFs.mockResolvedValue(diskStats); + + const result = await statfs({ container }); + + expect(result.data).toStrictEqual({ ...diskStats, nameLen: 255 }); + expect(result.error).toBeUndefined(); + }); + + it('should return EIO when repository throws', async () => { + repository.statFs.mockRejectedValue(new Error('disk read error')); + + const result = await statfs({ container }); + + expect(result.data).toBeUndefined(); + expect(result.error?.code).toBe(FuseCodes.EIO); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/statfs.service.ts b/src/backend/features/virtual-drive/services/operations/statfs.service.ts new file mode 100644 index 0000000000..69d8dc848f --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/statfs.service.ts @@ -0,0 +1,38 @@ +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { type Result } from '../../../../../context/shared/domain/Result'; +import { FuseError, FuseIOError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { TemporalFileRepository } from '../../../../../context/storage/TemporalFiles/domain/TemporalFileRepository'; + +/** + * v.2.6.0 + * Esteban Galvis Triana + * Standard Linux NAME_MAX: the maximum number of bytes in a filename component. + * Without this, f_namelen in statfs would be 0 and file managers (e.g. Nautilus) + * would reject every rename attempt as "File name is too long". + */ +const NAME_MAX = 255; + +export type StatFsResult = { + blocks: number; + bfree: number; + bavail: number; + files: number; + ffree: number; + bsize: number; + nameLen: number; +}; + +type Props = { + container: Container; +}; + +export async function statfs({ container }: Props): Promise> { + try { + const stats = await container.get(TemporalFileRepository).statFs(); + return { data: { ...stats, nameLen: NAME_MAX } }; + } catch (err) { + logger.error({ msg: '[StatFs] Failed to read filesystem stats', error: err }); + return { error: new FuseIOError('Failed to read filesystem stats') }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/truncate.service.test.ts b/src/backend/features/virtual-drive/services/operations/truncate.service.test.ts new file mode 100644 index 0000000000..3cb819e729 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/truncate.service.test.ts @@ -0,0 +1,71 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { TemporalFileCreator } from '../../../../../context/storage/TemporalFiles/application/creation/TemporalFileCreator'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { TemporalFileTruncater } from '../../../../../context/storage/TemporalFiles/application/truncate/TemporalFileTruncater'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { truncate } from './truncate.service'; + +describe('truncate', () => { + let container: ReturnType>; + const firstsFileSearcher = mockDeep(); + const temporalFileByPathFinder = mockDeep(); + const temporalFileCreator = mockDeep(); + const temporalFileTruncater = mockDeep(); + + beforeEach(() => { + container = mockDeep(); + container.get.calledWith(FirstsFileSearcher).mockReturnValue(firstsFileSearcher); + container.get.calledWith(TemporalFileByPathFinder).mockReturnValue(temporalFileByPathFinder); + container.get.calledWith(TemporalFileCreator).mockReturnValue(temporalFileCreator); + container.get.calledWith(TemporalFileTruncater).mockReturnValue(temporalFileTruncater); + + firstsFileSearcher.run.mockResolvedValue( + {} as unknown as NonNullable>>, + ); + temporalFileByPathFinder.run.mockResolvedValue(undefined); + }); + + it('should create temporal file and truncate it when not present', async () => { + const { data, error } = await truncate({ + path: '/some/file.txt', + size: 0, + container, + }); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + expect(temporalFileCreator.run).toBeCalledWith('/some/file.txt'); + expect(temporalFileTruncater.run).toBeCalledWith('/some/file.txt', 0); + }); + + it('should return ENOENT when virtual and temporal file do not exist', async () => { + firstsFileSearcher.run.mockResolvedValue(undefined); + temporalFileByPathFinder.run.mockResolvedValue(undefined); + + const { data, error } = await truncate({ + path: '/some/file.txt', + size: 0, + container, + }); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.ENOENT); + expect(temporalFileCreator.run).not.toHaveBeenCalled(); + expect(temporalFileTruncater.run).not.toHaveBeenCalled(); + }); + + it('should return EIO when temporal truncation fails', async () => { + temporalFileTruncater.run.mockRejectedValue(new Error('boom')); + + const { data, error } = await truncate({ + path: '/some/file.txt', + size: 0, + container, + }); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EIO); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/truncate.service.ts b/src/backend/features/virtual-drive/services/operations/truncate.service.ts new file mode 100644 index 0000000000..6ceb43939a --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/truncate.service.ts @@ -0,0 +1,37 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { Result } from '../../../../../context/shared/domain/Result'; +import { TemporalFileCreator } from '../../../../../context/storage/TemporalFiles/application/creation/TemporalFileCreator'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { TemporalFileTruncater } from '../../../../../context/storage/TemporalFiles/application/truncate/TemporalFileTruncater'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; + +type TruncateProps = { + path: string; + size: number; + container: Container; +}; + +export async function truncate({ path, size, container }: TruncateProps): Promise> { + try { + const virtualFile = await container.get(FirstsFileSearcher).run({ path }); + const temporalFile = await container.get(TemporalFileByPathFinder).run(path); + + if (!virtualFile && !temporalFile) { + const msg = `[FUSE - Truncate] File not found: ${path}`; + return { error: new FuseError(FuseCodes.ENOENT, msg) }; + } + + if (!temporalFile) { + await container.get(TemporalFileCreator).run(path); + } + + await container.get(TemporalFileTruncater).run(path, size); + return { data: undefined }; + } catch (error: unknown) { + logger.error({ msg: '[FUSE - Truncate] Unable to truncate temporal file', error, path, size }); + return { error: new FuseError(FuseCodes.EIO, `[FUSE - Truncate] IO error: ${path}`) }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/unlink.service.test.ts b/src/backend/features/virtual-drive/services/operations/unlink.service.test.ts new file mode 100644 index 0000000000..0665ab4a08 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/unlink.service.test.ts @@ -0,0 +1,111 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { TemporalFileDeleter } from '../../../../../context/storage/TemporalFiles/application/deletion/TemporalFileDeleter'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { TemporalFile } from '../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { FileTrasher } from '../../../../../context/virtual-drive/files/application/trash/FileTrasher'; +import { FileStatuses } from '../../../../../context/virtual-drive/files/domain/FileStatus'; +import { File } from '../../../../../context/virtual-drive/files/domain/File'; +import { unlink } from './unlink.service'; + +vi.mock('@internxt/drive-desktop-core/build/backend'); + +describe('unlink', () => { + let container: ReturnType>; + const fileSearcher = mockDeep(); + const temporalFinder = mockDeep(); + const temporalDeleter = mockDeep(); + const fileTrasher = mockDeep(); + + beforeEach(() => { + container = mockDeep(); + + container.get.calledWith(FirstsFileSearcher).mockReturnValue(fileSearcher); + container.get.calledWith(TemporalFileByPathFinder).mockReturnValue(temporalFinder); + container.get.calledWith(TemporalFileDeleter).mockReturnValue(temporalDeleter); + container.get.calledWith(FileTrasher).mockReturnValue(fileTrasher); + + fileSearcher.run.mockResolvedValue(undefined); + temporalFinder.run.mockResolvedValue(undefined); + }); + + it('should trash file when file exists', async () => { + fileSearcher.run.mockResolvedValue( + File.from({ + id: 1, + uuid: '550e8400-e29b-41d4-a716-446655440001', + contentsId: 'aabbccddeeff001122334455', + folderId: 1, + createdAt: new Date().toISOString(), + modificationTime: new Date().toISOString(), + path: '/some/file.txt', + size: 1, + updatedAt: new Date().toISOString(), + status: FileStatuses.EXISTS, + }), + ); + + const { data, error } = await unlink('/some/file.txt', container); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + expect(fileTrasher.run).toHaveBeenCalledWith('aabbccddeeff001122334455'); + }); + + it('should delete temporal file when regular file does not exist', async () => { + temporalFinder.run.mockResolvedValue( + TemporalFile.from({ + createdAt: new Date(), + modifiedAt: new Date(), + path: '/some/temp.txt', + size: 10, + }), + ); + + const { data, error } = await unlink('/some/temp.txt', container); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + expect(temporalDeleter.run).toHaveBeenCalledWith('/some/temp.txt'); + }); + + it('should return ENOENT when no file is found', async () => { + const { data, error } = await unlink('/missing.txt', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.ENOENT); + }); + + it('should return success when a missing auxiliary file is unlinked', async () => { + const { data, error } = await unlink('/some/file.txt~', container); + + expect(error).toBeUndefined(); + expect(data).toBeUndefined(); + expect(temporalDeleter.run).not.toHaveBeenCalled(); + }); + + it('should return EIO when file trash fails', async () => { + fileSearcher.run.mockResolvedValue( + File.from({ + id: 1, + uuid: '550e8400-e29b-41d4-a716-446655440002', + contentsId: 'ffeeddccbbaa001122334455', + folderId: 1, + createdAt: new Date().toISOString(), + modificationTime: new Date().toISOString(), + path: '/some/file.txt', + size: 1, + updatedAt: new Date().toISOString(), + status: FileStatuses.EXISTS, + }), + ); + fileTrasher.run.mockRejectedValue(new Error('boom')); + + const { data, error } = await unlink('/some/file.txt', container); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EIO); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/unlink.service.ts b/src/backend/features/virtual-drive/services/operations/unlink.service.ts new file mode 100644 index 0000000000..88bf347a60 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/unlink.service.ts @@ -0,0 +1,43 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { TemporalFileDeleter } from '../../../../../context/storage/TemporalFiles/application/deletion/TemporalFileDeleter'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { Result } from '../../../../../context/shared/domain/Result'; +import { FirstsFileSearcher } from '../../../../../context/virtual-drive/files/application/search/FirstsFileSearcher'; +import { FileTrasher } from '../../../../../context/virtual-drive/files/application/trash/FileTrasher'; +import { FileStatuses } from '../../../../../context/virtual-drive/files/domain/FileStatus'; +import { TemporalFile } from '../../../../../context/storage/TemporalFiles/domain/TemporalFile'; + +export async function unlink(path: string, container: Container): Promise> { + const file = await container.get(FirstsFileSearcher).run({ + path, + status: FileStatuses.EXISTS, + }); + + if (!file) { + const temporalFile = await container.get(TemporalFileByPathFinder).run(path); + + if (!temporalFile && !TemporalFile.isTemporaryPath(path)) { + const msg = `[FUSE - Unlink] File not found: ${path}`; + logger.error({ msg }); + return { error: new FuseError(FuseCodes.ENOENT, msg) }; + } + + if (temporalFile) { + await container.get(TemporalFileDeleter).run(path); + } + + return { data: undefined }; + } + + try { + await container.get(FileTrasher).run(file.contentsId); + return { data: undefined }; + } catch { + const msg = `[FUSE - Unlink] Unable to trash file: ${path}`; + logger.error({ msg }); + return { error: new FuseError(FuseCodes.EIO, msg) }; + } +} diff --git a/src/backend/features/virtual-drive/services/operations/write.service.test.ts b/src/backend/features/virtual-drive/services/operations/write.service.test.ts new file mode 100644 index 0000000000..0a299f2fce --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/write.service.test.ts @@ -0,0 +1,73 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { TemporalFileCreator } from '../../../../../context/storage/TemporalFiles/application/creation/TemporalFileCreator'; +import { TemporalFileByPathFinder } from '../../../../../context/storage/TemporalFiles/application/find/TemporalFileByPathFinder'; +import { TemporalFileWriter } from '../../../../../context/storage/TemporalFiles/application/write/TemporalFileWriter'; +import { TemporalFile } from '../../../../../context/storage/TemporalFiles/domain/TemporalFile'; +import { write } from './write.service'; + +describe('write', () => { + let container: ReturnType>; + const temporalFileWriter = mockDeep(); + const temporalFileByPathFinder = mockDeep(); + const temporalFileCreator = mockDeep(); + + beforeEach(() => { + vi.restoreAllMocks(); + container = mockDeep(); + container.get.calledWith(TemporalFileWriter).mockReturnValue(temporalFileWriter); + container.get.calledWith(TemporalFileByPathFinder).mockReturnValue(temporalFileByPathFinder); + container.get.calledWith(TemporalFileCreator).mockReturnValue(temporalFileCreator); + temporalFileByPathFinder.run.mockResolvedValue(undefined); + }); + + it('should write bytes into temporal file and return written length', async () => { + const content = Buffer.from('hello'); + vi.spyOn(TemporalFile, 'isTemporaryPath').mockReturnValue(false); + + const { data, error } = await write({ + path: '/some/file.txt', + content, + offset: 7, + container, + }); + + expect(error).toBeUndefined(); + expect(data).toBe(content.length); + expect(temporalFileCreator.run).not.toHaveBeenCalled(); + expect(temporalFileWriter.run).toHaveBeenCalledWith('/some/file.txt', content, content.length, 7); + }); + + it('should create auxiliary temporal file on first write when missing', async () => { + const content = Buffer.from('hello'); + vi.spyOn(TemporalFile, 'isTemporaryPath').mockReturnValue(true); + + const { data, error } = await write({ + path: '/.test-test-file.txt.swp', + content, + offset: 4096, + container, + }); + + expect(error).toBeUndefined(); + expect(data).toBe(content.length); + expect(temporalFileCreator.run).toHaveBeenCalledWith('/.test-test-file.txt.swp'); + expect(temporalFileWriter.run).toHaveBeenCalledWith('/.test-test-file.txt.swp', content, content.length, 4096); + }); + + it('should return EIO when temporal write fails', async () => { + vi.spyOn(TemporalFile, 'isTemporaryPath').mockReturnValue(false); + temporalFileWriter.run.mockRejectedValue(new Error('boom')); + + const { data, error } = await write({ + path: '/some/file.txt', + content: Buffer.from('hello'), + offset: 0, + container, + }); + + expect(data).toBeUndefined(); + expect(error?.code).toBe(FuseCodes.EIO); + }); +}); diff --git a/src/backend/features/virtual-drive/services/operations/write.service.ts b/src/backend/features/virtual-drive/services/operations/write.service.ts new file mode 100644 index 0000000000..bead71ddc4 --- /dev/null +++ b/src/backend/features/virtual-drive/services/operations/write.service.ts @@ -0,0 +1,25 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { Container } from 'diod'; +import { FuseCodes } from '../../../../../apps/drive/fuse/callbacks/FuseCodes'; +import { FuseError } from '../../../../../apps/drive/fuse/callbacks/FuseErrors'; +import { Result } from '../../../../../context/shared/domain/Result'; +import { TemporalFileWriter } from '../../../../../context/storage/TemporalFiles/application/write/TemporalFileWriter'; +import { ensureTemporalFileExistsForAuxiliaryPath } from './ensure-temporal-file-exists-for-auxiliary-path'; + +type WritePops = { + path: string; + content: Buffer; + offset: number; + container: Container; +}; + +export async function write({ path, content, offset, container }: WritePops): Promise> { + try { + await ensureTemporalFileExistsForAuxiliaryPath({ path, container }); + await container.get(TemporalFileWriter).run(path, content, content.length, offset); + return { data: content.length }; + } catch (error: unknown) { + logger.error({ msg: '[FUSE - Write] Unable to write temporal file', error, path, offset }); + return { error: new FuseError(FuseCodes.EIO, `[FUSE - Write] IO error: ${path}`) }; + } +} diff --git a/src/backend/features/virtual-drive/services/server.service.test.ts b/src/backend/features/virtual-drive/services/server.service.test.ts new file mode 100644 index 0000000000..b435afc59c --- /dev/null +++ b/src/backend/features/virtual-drive/services/server.service.test.ts @@ -0,0 +1,95 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { Container } from 'diod'; +import { rmSync } from 'node:fs'; +import { startFuseDaemonServer, stopFuseDaemonServer } from './server.service'; +import { DAEMON_ROUTE, OPERATIONS_ROUTE } from '../constants'; +import { PATHS } from '../../../../core/electron/paths'; + +vi.mock('../routes/daemon.routes', () => ({ + buildDaemonRouter: vi.fn().mockReturnValue('daemon-router'), +})); + +vi.mock('../routes/operations.routes', () => ({ + buildOperationsRouter: vi.fn().mockReturnValue('operations-router'), +})); + +vi.mock('node:fs', () => ({ + rmSync: vi.fn(), +})); + +const mockClose = vi.hoisted(() => vi.fn()); +const mockListen = vi.hoisted(() => vi.fn()); +const mockUse = vi.hoisted(() => vi.fn()); + +vi.mock('express', () => { + const mockApp = { use: mockUse, listen: mockListen }; + const express = vi.fn().mockReturnValue(mockApp); + (express as unknown as { json: () => string }).json = vi.fn().mockReturnValue('json-middleware'); + return { default: express }; +}); + +describe('server.service', () => { + let container: ReturnType>; + + beforeEach(() => { + container = mockDeep(); + mockListen.mockImplementation((_path: string, callback: () => void) => { + callback(); + return { close: mockClose }; + }); + mockClose.mockImplementation((callback: (err?: Error) => void) => callback()); + }); + + describe('startFuseDaemonServer', () => { + it('should register the daemon router on DAEMON_ROUTE', async () => { + await startFuseDaemonServer(container); + + expect(mockUse).toHaveBeenCalledWith(DAEMON_ROUTE, 'daemon-router'); + }); + + it('should register the operations router on OPERATIONS_ROUTE', async () => { + await startFuseDaemonServer(container); + + expect(mockUse).toHaveBeenCalledWith(OPERATIONS_ROUTE, 'operations-router'); + }); + + it('should listen on the FUSE daemon socket path', async () => { + await startFuseDaemonServer(container); + + expect(mockListen).toHaveBeenCalledWith(PATHS.FUSE_DAEMON_SOCKET, expect.any(Function)); + }); + + it('should remove the socket file before listening', async () => { + await startFuseDaemonServer(container); + + expect(rmSync).toHaveBeenCalledWith(PATHS.FUSE_DAEMON_SOCKET, { force: true }); + }); + }); + + describe('stopFuseDaemonServer', () => { + it('should resolve immediately when no server is running', async () => { + await expect(stopFuseDaemonServer()).resolves.toBeUndefined(); + }); + + it('should resolve after stopping a running server', async () => { + await startFuseDaemonServer(container); + + await expect(stopFuseDaemonServer()).resolves.toBeUndefined(); + }); + + it('should remove the socket file on stop', async () => { + await startFuseDaemonServer(container); + await stopFuseDaemonServer(); + + expect(rmSync).toHaveBeenLastCalledWith(PATHS.FUSE_DAEMON_SOCKET, { force: true }); + }); + + it('should reject when server.close returns an error', async () => { + const closeError = new Error('close failed'); + mockClose.mockImplementation((callback: (err?: Error) => void) => callback(closeError)); + + await startFuseDaemonServer(container); + await expect(stopFuseDaemonServer()).rejects.toThrow('close failed'); + }); + }); +}); diff --git a/src/backend/features/virtual-drive/services/server.service.ts b/src/backend/features/virtual-drive/services/server.service.ts new file mode 100644 index 0000000000..f24d8eb71b --- /dev/null +++ b/src/backend/features/virtual-drive/services/server.service.ts @@ -0,0 +1,47 @@ +import { rmSync } from 'node:fs'; +import express from 'express'; +import { Server } from 'node:http'; +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { PATHS } from '../../../../core/electron/paths'; +import { DAEMON_ROUTE, OPERATIONS_ROUTE } from '../constants'; +import { buildDaemonRouter } from '../routes/daemon.routes'; +import { buildOperationsRouter } from '../routes/operations.routes'; + +let server: Server | null = null; + +export function startFuseDaemonServer(container: Container): Promise { + return new Promise((resolve) => { + const app = express(); + app.use(express.json()); + + app.use(DAEMON_ROUTE, buildDaemonRouter()); + app.use(OPERATIONS_ROUTE, buildOperationsRouter(container)); + + rmSync(PATHS.FUSE_DAEMON_SOCKET, { force: true }); + + server = app.listen(PATHS.FUSE_DAEMON_SOCKET, () => { + logger.debug({ msg: '[FUSE DAEMON] server listening', socket: PATHS.FUSE_DAEMON_SOCKET }); + resolve(); + }); + }); +} + +export function stopFuseDaemonServer(): Promise { + return new Promise((resolve, reject) => { + if (!server) { + resolve(); + return; + } + + server.close((err) => { + if (err) { + reject(err); + return; + } + rmSync(PATHS.FUSE_DAEMON_SOCKET, { force: true }); + server = null; + resolve(); + }); + }); +} diff --git a/src/backend/features/virtual-drive/services/update-virtual-drive-container.service.ts b/src/backend/features/virtual-drive/services/update-virtual-drive-container.service.ts new file mode 100644 index 0000000000..465d733b35 --- /dev/null +++ b/src/backend/features/virtual-drive/services/update-virtual-drive-container.service.ts @@ -0,0 +1,24 @@ +import { RemoteTreeBuilder } from '../../../../context/virtual-drive/remoteTree/application/RemoteTreeBuilder'; +import { FolderRepositorySynchronizer } from '../../../../context/virtual-drive/folders/application/FolderRepositorySynchronizer/FolderRepositorySynchronizer'; +import { FileRepositorySynchronizer } from '../../../../context/virtual-drive/files/application/FileRepositorySynchronizer'; +import { StorageRemoteChangesSyncher } from '../../../../context/storage/StorageFiles/application/sync/StorageRemoteChangesSyncher'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { User } from '../../../../apps/main/types'; +import { Container } from 'diod'; + +// This is the old src/apps/drive/fuse/FuseApp.update +export async function updateVirtualDriveContainer({ container, user }: { container: Container; user: User }) { + try { + const tree = await container.get(RemoteTreeBuilder).run(user.root_folder_id, user.rootFolderId); + await Promise.all([ + container.get(FileRepositorySynchronizer).run(tree.files), + container.get(FolderRepositorySynchronizer).run(tree.folders), + container.get(StorageRemoteChangesSyncher).run(), + ]); + logger.debug({ msg: '[VIRTUAL DRIVE] Tree updated successfully' }); + return { data: true }; + } catch (err) { + logger.error({ msg: '[VIRTUAL DRIVE] Error updating tree', error: err }); + return { data: false }; + } +} diff --git a/src/backend/features/virtual-drive/services/virtual-drive.service.test.ts b/src/backend/features/virtual-drive/services/virtual-drive.service.test.ts new file mode 100644 index 0000000000..7fed05b2f1 --- /dev/null +++ b/src/backend/features/virtual-drive/services/virtual-drive.service.test.ts @@ -0,0 +1,60 @@ +import { stopDaemon } from './daemon.service'; +import { stopFuseDaemonServer } from './server.service'; +import { abortAllHydrations, clearHydrationState } from '../../fuse/on-read/download-cache/hydration-state'; +import { stopVirtualDrive } from './virtual-drive.service'; +vi.mock('./daemon.service', () => ({ + startDaemon: vi.fn(), + stopDaemon: vi.fn(), +})); + +vi.mock('./server.service', () => ({ + startFuseDaemonServer: vi.fn(), + stopFuseDaemonServer: vi.fn(), +})); + +vi.mock('../../fuse/on-read/download-cache/hydration-state', () => ({ + abortAllHydrations: vi.fn(), + clearHydrationState: vi.fn(), +})); + +const stopDaemonMock = vi.mocked(stopDaemon); +const stopFuseDaemonServerMock = vi.mocked(stopFuseDaemonServer); +const abortAllHydrationsMock = vi.mocked(abortAllHydrations); +const clearHydrationStateMock = vi.mocked(clearHydrationState); + +describe('stopVirtualDrive', () => { + beforeEach(() => { + vi.clearAllMocks(); + stopDaemonMock.mockResolvedValue(undefined); + stopFuseDaemonServerMock.mockResolvedValue(undefined); + }); + + it('aborts active hydrations before clearing hydration state', async () => { + await stopVirtualDrive(); + + expect(abortAllHydrationsMock).toHaveBeenCalledOnce(); + expect(clearHydrationStateMock).toHaveBeenCalledOnce(); + expect(abortAllHydrationsMock.mock.invocationCallOrder[0]).toBeLessThan( + clearHydrationStateMock.mock.invocationCallOrder[0], + ); + }); + + it('shares an in-flight stop when stop is requested twice', async () => { + let resolveStopDaemon: () => void; + stopDaemonMock.mockReturnValueOnce( + new Promise((resolve) => { + resolveStopDaemon = resolve; + }), + ); + + const firstStop = stopVirtualDrive(); + const secondStop = stopVirtualDrive(); + + expect(stopDaemonMock).toHaveBeenCalledOnce(); + + resolveStopDaemon!(); + await Promise.all([firstStop, secondStop]); + + expect(stopFuseDaemonServerMock).toHaveBeenCalledOnce(); + }); +}); diff --git a/src/backend/features/virtual-drive/services/virtual-drive.service.ts b/src/backend/features/virtual-drive/services/virtual-drive.service.ts new file mode 100644 index 0000000000..d888226253 --- /dev/null +++ b/src/backend/features/virtual-drive/services/virtual-drive.service.ts @@ -0,0 +1,60 @@ +import { Container } from 'diod'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { DriveDependencyContainerFactory } from '../../../../apps/drive/dependency-injection/DriveDependencyContainerFactory'; +import { getRootVirtualDrive } from '../../../../apps/main/virtual-root-folder/service'; +import { startDaemon, stopDaemon } from './daemon.service'; +import { startFuseDaemonServer, stopFuseDaemonServer } from './server.service'; +import { updateVirtualDriveContainer } from './update-virtual-drive-container.service'; +import { DependencyInjectionUserProvider } from '../../../../apps/shared/dependency-injection/DependencyInjectionUserProvider'; +import { abortAllHydrations, clearHydrationState } from '../../fuse/on-read/download-cache/hydration-state'; +import { StorageFilesRepository } from '../../../../context/storage/StorageFiles/domain/StorageFilesRepository'; + +let container: Container | undefined; +let stopInFlight: Promise | undefined; + +export function getVirtualDriveContainer(): Container | undefined { + return container; +} + +export async function startVirtualDrive() { + const localRoot = getRootVirtualDrive(); + container = await DriveDependencyContainerFactory.build(); + await updateVirtualDriveContainer({ container, user: DependencyInjectionUserProvider.get() }); + /** + * Clear stale block-cache state and orphaned hydrated files before mounting. + * Future virtual-drive reads recreate cache files and hydrate only requested blocks. + */ + clearHydrationState(); + await container.get(StorageFilesRepository).deleteAll(); + await startFuseDaemonServer(container); + await startDaemon(localRoot); +} + +export async function stopVirtualDrive() { + if (stopInFlight) { + return stopInFlight; + } + + stopInFlight = stopVirtualDriveOnce(); + + try { + await stopInFlight; + } finally { + stopInFlight = undefined; + } +} + +async function stopVirtualDriveOnce() { + logger.debug({ msg: '[VIRTUAL DRIVE] stopping daemon...' }); + abortAllHydrations(); + await stopDaemon(); + logger.debug({ msg: '[VIRTUAL DRIVE] clearing storage cache...' }); + clearHydrationState(); + if (container) { + await container.get(StorageFilesRepository).deleteAll(); + } + logger.debug({ msg: '[VIRTUAL DRIVE] stopping server...' }); + await stopFuseDaemonServer(); + container = undefined; + logger.debug({ msg: '[VIRTUAL DRIVE] stopped' }); +} diff --git a/src/backend/features/virtual-drive/utils/process-blocklist.test.ts b/src/backend/features/virtual-drive/utils/process-blocklist.test.ts new file mode 100644 index 0000000000..6876d961e6 --- /dev/null +++ b/src/backend/features/virtual-drive/utils/process-blocklist.test.ts @@ -0,0 +1,32 @@ +import { describe, it, expect } from 'vitest'; +import { isBlocklistedProcess } from './process-blocklist'; + +describe('isBlocklistedProcess', () => { + it('should block pool-org.gnome (Nautilus thumbnail generation)', () => { + expect(isBlocklistedProcess('pool-org.gnome')).toBe(true); + }); + + it('should block pool-org.gnome. with trailing dot (kernel 16-char truncation variant)', () => { + expect(isBlocklistedProcess('pool-org.gnome.')).toBe(true); + }); + + it('should not block pool-gnome-text (GNOME Text Editor user open)', () => { + expect(isBlocklistedProcess('pool-gnome-text')).toBe(false); + }); + + it('should not block vlc (user-initiated open)', () => { + expect(isBlocklistedProcess('vlc')).toBe(false); + }); + + it('should not block evince (user-initiated open)', () => { + expect(isBlocklistedProcess('evince')).toBe(false); + }); + + it('should not block empty string (unknown process defaults to allow)', () => { + expect(isBlocklistedProcess('')).toBe(false); + }); + + it('should not block nautilus (file manager process itself is not the thumbnail daemon)', () => { + expect(isBlocklistedProcess('nautilus')).toBe(false); + }); +}); diff --git a/src/backend/features/virtual-drive/utils/process-blocklist.ts b/src/backend/features/virtual-drive/utils/process-blocklist.ts new file mode 100644 index 0000000000..2e77d5878c --- /dev/null +++ b/src/backend/features/virtual-drive/utils/process-blocklist.ts @@ -0,0 +1,25 @@ +/** + * Processes known to trigger file reads for system purposes (thumbnail generation, + * directory browsing) rather than user-initiated file opens. + * + * Matched with startsWith to handle kernel's 16-char /proc//comm truncation + * and version-suffixed variants. + * + * To expand compatibility for a new file manager, add its thumbnail daemon here. + * + * WARNING: Never block the broad `pool-` prefix — GNOME user apps (e.g. Text Editor + * as `pool-gnome-text`, VLC as `vlc`) use different pool names and must be allowed through. + * Only add specific known thumbnail/system daemon prefixes. + */ +const BLOCKLISTED_PROCESS_PREFIXES = [ + 'pool-org.gnome', // GNOME thread pool — Nautilus thumbnail generation + 'gdk-pixbuf-thum', // GDK pixbuf thumbnailer (truncated at 15 chars by kernel) + //'EogJobScheduler', // Eye of GNOME (image viewer) background job scheduler + // 'tumblerd', // Thunar, Caja, PCManFM thumbnail daemon (freedesktop spec) + // 'kio_thumbnail', // Dolphin KIO thumbnail worker + // 'thumbnail.so', // Dolphin KIO thumbnail worker (alternative name) +]; + +export function isBlocklistedProcess(processName: string): boolean { + return BLOCKLISTED_PROCESS_PREFIXES.some((prefix) => processName.startsWith(prefix)); +} diff --git a/src/context/local/localFile/infrastructure/AbsolutePath.test.ts b/src/context/local/localFile/infrastructure/AbsolutePath.test.ts new file mode 100644 index 0000000000..19c0eefd76 --- /dev/null +++ b/src/context/local/localFile/infrastructure/AbsolutePath.test.ts @@ -0,0 +1,21 @@ +import { createAbsolutePath } from './AbsolutePath'; + +describe('AbsolutePath', () => { + it('should join path parts', () => { + const result = createAbsolutePath('/home', 'dev', 'Documents'); + + expect(result).toBe('/home/dev/Documents'); + }); + + it('should normalize dot segments and duplicated separators', () => { + const result = createAbsolutePath('/home//dev', './Documents', '../Pictures'); + + expect(result).toBe('/home/dev/Pictures'); + }); + + it('should remove trailing slash', () => { + const result = createAbsolutePath('/home/dev/Documents/'); + + expect(result).toBe('/home/dev/Documents'); + }); +}); diff --git a/src/context/local/localFile/infrastructure/AbsolutePath.ts b/src/context/local/localFile/infrastructure/AbsolutePath.ts index 1bb8a6d965..6608c09953 100644 --- a/src/context/local/localFile/infrastructure/AbsolutePath.ts +++ b/src/context/local/localFile/infrastructure/AbsolutePath.ts @@ -1,3 +1,11 @@ import { Brand } from '../../../shared/domain/Brand'; +import { posix } from 'node:path'; export type AbsolutePath = Brand; + +export function createAbsolutePath(...parts: string[]): AbsolutePath { + let path = posix.join(...parts); + path = posix.normalize(path); + if (path.endsWith(posix.sep)) path = path.slice(0, -1); + return path as AbsolutePath; +} diff --git a/src/context/shared/domain/DomainEvent.ts b/src/context/shared/domain/DomainEvent.ts index db7baa9475..1b14600f76 100644 --- a/src/context/shared/domain/DomainEvent.ts +++ b/src/context/shared/domain/DomainEvent.ts @@ -1,6 +1,6 @@ import * as uuid from 'uuid'; -type DomainEventAttributes = any; +type DomainEventAttributes = Record; export abstract class DomainEvent { static EVENT_NAME: string; diff --git a/src/context/shared/domain/system-path/PathInfo.ts b/src/context/shared/domain/system-path/PathInfo.ts new file mode 100644 index 0000000000..0f360266b7 --- /dev/null +++ b/src/context/shared/domain/system-path/PathInfo.ts @@ -0,0 +1,7 @@ +import { AbsolutePath } from '../../../local/localFile/infrastructure/AbsolutePath'; + +export type PathInfo = { + path: AbsolutePath; + itemName: string; + isDirectory?: boolean; +}; diff --git a/src/context/shared/infrastructure/MainProcess/MainProcessDownloadProgressTracker.ts b/src/context/shared/infrastructure/MainProcess/MainProcessDownloadProgressTracker.ts index c686ed3a66..24e1d8ddb0 100644 --- a/src/context/shared/infrastructure/MainProcess/MainProcessDownloadProgressTracker.ts +++ b/src/context/shared/infrastructure/MainProcess/MainProcessDownloadProgressTracker.ts @@ -1,4 +1,4 @@ -import { setTrayStatus } from '../../../../apps/main/tray/tray'; +import { setTrayStatus } from '../../../../apps/main/tray/tray-setup'; import { broadcastToWindows } from '../../../../apps/main/windows'; import { DownloadProgressTracker } from '../../domain/DownloadProgressTracker'; import { SyncMessenger } from '../../domain/SyncMessenger'; diff --git a/src/context/shared/infrastructure/MainProcess/MainProcessUploadProgressTracker.ts b/src/context/shared/infrastructure/MainProcess/MainProcessUploadProgressTracker.ts index cfeb6f920a..03e1b318a6 100644 --- a/src/context/shared/infrastructure/MainProcess/MainProcessUploadProgressTracker.ts +++ b/src/context/shared/infrastructure/MainProcess/MainProcessUploadProgressTracker.ts @@ -1,5 +1,5 @@ import { Service } from 'diod'; -import { setTrayStatus } from '../../../../apps/main/tray/tray'; +import { setTrayStatus } from '../../../../apps/main/tray/tray-setup'; import { broadcastToWindows } from '../../../../apps/main/windows'; import { SyncMessenger } from '../../domain/SyncMessenger'; import { UploadProgressTracker } from '../../domain/UploadProgressTracker'; @@ -7,6 +7,8 @@ import { UploadProgressTracker } from '../../domain/UploadProgressTracker'; @Service() export class MainProcessUploadProgressTracker extends SyncMessenger implements UploadProgressTracker { uploadStarted(name: string, extension: string, size: number): void { + setTrayStatus('SYNCING'); + const nameWithExtension = this.nameWithExtension(name, extension); broadcastToWindows('sync-info-update', { diff --git a/src/context/storage/StorageFiles/__mocks__/StorageFilesRepositoryMock.ts b/src/context/storage/StorageFiles/__mocks__/StorageFilesRepositoryMock.ts index 7f5d15ba94..9940148e8a 100644 --- a/src/context/storage/StorageFiles/__mocks__/StorageFilesRepositoryMock.ts +++ b/src/context/storage/StorageFiles/__mocks__/StorageFilesRepositoryMock.ts @@ -11,6 +11,7 @@ export class StorageFilesRepositoryMock implements StorageFilesRepository { private deleteMock = vi.fn(); private deleteAllMock = vi.fn(); private allMock = vi.fn(); + private registerMock = vi.fn(); async exists(id: StorageFileId): Promise { return this.existsMock(id); @@ -35,8 +36,8 @@ export class StorageFilesRepositoryMock implements StorageFilesRepository { this.retrieveMock.mockReturnValueOnce(file); } - async store(file: StorageFile, readable: Readable): Promise { - return this.storeMock(file, readable); + async store(file: StorageFile, readable: Readable, onProgress: (bytesWritten: number) => void): Promise { + return this.storeMock(file, readable, onProgress); } async read(id: StorageFileId): Promise { @@ -57,7 +58,7 @@ export class StorageFilesRepositoryMock implements StorageFilesRepository { expect(this.deleteMock).not.toBeCalled(); } - async deleteAll(): Promise { + async deleteAll(): Promise>> { return this.deleteAllMock(); } @@ -65,6 +66,10 @@ export class StorageFilesRepositoryMock implements StorageFilesRepository { return this.allMock(); } + async register(file: StorageFile): Promise { + return this.registerMock(file); + } + returnAll(files: Awaited>) { this.allMock.mockReturnValueOnce(files); } diff --git a/src/context/storage/StorageFiles/application/delete/StorageClearer.ts b/src/context/storage/StorageFiles/application/delete/StorageClearer.ts deleted file mode 100644 index 7b3ec94db9..0000000000 --- a/src/context/storage/StorageFiles/application/delete/StorageClearer.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Service } from 'diod'; -import { StorageFilesRepository } from '../../domain/StorageFilesRepository'; - -@Service() -export class StorageClearer { - constructor(private readonly repo: StorageFilesRepository) {} - - async run(): Promise { - await this.repo.deleteAll(); - } -} diff --git a/src/context/storage/StorageFiles/application/download/__test-helpers__/StorageFileDownloaderTestClass.ts b/src/context/storage/StorageFiles/application/download/__test-helpers__/StorageFileDownloaderTestClass.ts index daf5047dc1..28b06ea48e 100644 --- a/src/context/storage/StorageFiles/application/download/__test-helpers__/StorageFileDownloaderTestClass.ts +++ b/src/context/storage/StorageFiles/application/download/__test-helpers__/StorageFileDownloaderTestClass.ts @@ -4,6 +4,7 @@ import { StorageFile } from '../../../domain/StorageFile'; import { DownloadProgressTrackerMock } from '../../../__mocks__/DownloadProgressTrackerMock'; import { DownloaderHandlerFactoryMock } from '../../../domain/download/__mocks__/DownloaderHandlerFactoryMock'; import { DownloaderHandler } from '../../../domain/download/DownloaderHandler'; +import { partialSpyOn } from 'tests/vitest/utils.helper'; export class StorageFileDownloaderTestClass extends StorageFileDownloader { private mock = vi.fn(); @@ -24,7 +25,7 @@ export class StorageFileDownloaderTestClass extends StorageFileDownloader { returnsAReadable() { const factory = new DownloaderHandlerFactoryMock(); const handler = factory.downloader(); - (handler.elapsedTime as any).mockReturnValue(1000); + partialSpyOn(handler, 'elapsedTime').mockReturnValue(1000); this.mock.mockResolvedValue({ stream: Readable.from('Hello world!'), metadata: { name: 'test', type: 'txt', size: 12 }, @@ -39,6 +40,6 @@ export class StorageFileDownloaderTestClass extends StorageFileDownloader { } assertHasNotBeenCalled() { - expect(this.mock).not.toHaveBeenCalled(); + expect(this.mock).not.toBeCalled(); } } diff --git a/src/context/storage/StorageFiles/application/download/download-with-progress-tracking.test.ts b/src/context/storage/StorageFiles/application/download/download-with-progress-tracking.test.ts index fc95118b65..f66e89f6c1 100644 --- a/src/context/storage/StorageFiles/application/download/download-with-progress-tracking.test.ts +++ b/src/context/storage/StorageFiles/application/download/download-with-progress-tracking.test.ts @@ -4,7 +4,8 @@ import { DownloadProgressTrackerMock } from '../../__mocks__/DownloadProgressTra import { FileMother } from '../../../../virtual-drive/files/domain/__test-helpers__/FileMother'; import { StorageFilesRepositoryMock } from '../../__mocks__/StorageFilesRepositoryMock'; import { StorageFile } from '../../domain/StorageFile'; -import { call, calls } from 'tests/vitest/utils.helper'; +import { StorageFileDownloader } from './StorageFileDownloader/StorageFileDownloader'; +import { call, calls, partialSpyOn } from 'tests/vitest/utils.helper'; describe('downloadWithProgressTracking', () => { const elapsedTime = 123; @@ -22,6 +23,8 @@ describe('downloadWithProgressTracking', () => { }); it('tracks progress, stores the file, and returns the storage file', async () => { + const storeMock = partialSpyOn(repository, 'store'); + const virtualFile = FileMother.fromPartial({ size: 100, path: 'folder/test-file.txt', @@ -32,15 +35,14 @@ describe('downloadWithProgressTracking', () => { const metadata = { name: virtualFile.name, type: virtualFile.type, size: virtualFile.size }; downloader.run.mockResolvedValue({ stream, metadata, handler }); - const storeSpy = vi.fn(async (_file: StorageFile, _readable: Readable, onProgress: (bytes: number) => void) => { + storeMock.mockImplementation(async (_file, _readable, onProgress) => { [20, 200].forEach((bytes) => onProgress(bytes)); }); - (repository as any).store = storeSpy; const result = await downloadWithProgressTracking({ virtualFile, tracker, - downloader: downloader as any, + downloader: downloader as unknown as StorageFileDownloader, repository, }); @@ -53,9 +55,9 @@ describe('downloadWithProgressTracking', () => { call(tracker.downloadFinished).toMatchObject([metadata.name, metadata.type]); call(downloader.run).toMatchObject([expect.any(StorageFile), virtualFile]); - call(storeSpy).toMatchObject([expect.any(StorageFile), stream, expect.any(Function)]); + call(storeMock).toMatchObject([expect.any(StorageFile), stream, expect.any(Function)]); - expect(result.attributes()).toEqual({ + expect(result.attributes()).toStrictEqual({ id: virtualFile.contentsId, virtualId: virtualFile.uuid, size: virtualFile.size, diff --git a/src/context/storage/StorageFiles/domain/StorageFilesRepository.ts b/src/context/storage/StorageFiles/domain/StorageFilesRepository.ts index cd63e7b82f..77b6a9fc68 100644 --- a/src/context/storage/StorageFiles/domain/StorageFilesRepository.ts +++ b/src/context/storage/StorageFiles/domain/StorageFilesRepository.ts @@ -1,6 +1,7 @@ import { Readable } from 'stream'; import { StorageFileId } from './StorageFileId'; import { StorageFile } from './StorageFile'; +import { Result } from '../../../shared/domain/Result'; export abstract class StorageFilesRepository { abstract exists(id: StorageFileId): Promise; @@ -15,7 +16,7 @@ export abstract class StorageFilesRepository { abstract delete(id: StorageFileId): Promise; - abstract deleteAll(): Promise; + abstract deleteAll(): Promise>; abstract all(): Promise>; } diff --git a/src/context/storage/StorageFiles/infrastructure/download/EnvironmentContentFileDownloader.ts b/src/context/storage/StorageFiles/infrastructure/download/EnvironmentContentFileDownloader.ts index 548a352d9f..abd2b5c47e 100644 --- a/src/context/storage/StorageFiles/infrastructure/download/EnvironmentContentFileDownloader.ts +++ b/src/context/storage/StorageFiles/infrastructure/download/EnvironmentContentFileDownloader.ts @@ -23,12 +23,7 @@ export class EnvironmentContentFileDownloader implements DownloaderHandler { } forceStop(): void { - //@ts-ignore - // Logger.debug('Finish emitter type', this.state?.type); - // Logger.debug('Finish emitter stop method', this.state?.stop); this.state?.stop(); - // this.eventEmitter.emit('error'); - // this.eventEmitter.emit('finish'); } download(file: StorageFile): Promise { diff --git a/src/context/storage/StorageFiles/infrastructure/persistance/repository/typeorm/TypeOrmAndNodeFsStorageFilesRepository.test.ts b/src/context/storage/StorageFiles/infrastructure/persistance/repository/typeorm/TypeOrmAndNodeFsStorageFilesRepository.test.ts index ef1759b405..74bcfdefdf 100644 --- a/src/context/storage/StorageFiles/infrastructure/persistance/repository/typeorm/TypeOrmAndNodeFsStorageFilesRepository.test.ts +++ b/src/context/storage/StorageFiles/infrastructure/persistance/repository/typeorm/TypeOrmAndNodeFsStorageFilesRepository.test.ts @@ -1,71 +1,60 @@ -// import 'reflect-metadata'; -// import path from 'node:path'; -// import { DataSource } from 'typeorm'; -// import { TypeOrmAndNodeFsStorageFilesRepository } from './TypeOrmAndNodeFsStorageFilesRepository'; -// import { obtainSqliteDataSource } from './__test-helpers__/sqlDataSource'; -// import { StorageFileMother } from '../../../../../__test-helpers__/StorageFileMother'; -// import { createReadable } from './__test-helpers__/createReadable'; -// import { createFile } from './__test-helpers__/createFile'; +import 'reflect-metadata'; +import { mkdtemp, mkdir, readdir, rm, writeFile } from 'node:fs/promises'; +import os from 'node:os'; +import path from 'node:path'; +import { DataSource } from 'typeorm'; +import { TypeOrmAndNodeFsStorageFilesRepository } from './TypeOrmAndNodeFsStorageFilesRepository'; -/** - * SKIPPED: This test requires better-sqlite3 native module which must be compiled - * for the specific Node.js version being used. - * - * The production app runs on Node.js v16 (NODE_MODULE_VERSION 106), but if you're - * running tests with a different Node version, better-sqlite3 will fail to load. - * - * ADDITIONAL ISSUE: typeorm and better-sqlite3 are installed in release/app/package.json - * (separate from the main package.json), which creates module resolution issues in the - * test environment. The proper solution is to consolidate into a single package.json. - * - * To fix: - * 1. Use Node.js v16 to match production: `nvm use 16` - * 2. Or rebuild the native module: `npm rebuild better-sqlite3` - * 3. Or run tests in a container with the correct Node version - * - * Once you're on Node v16, remove the .skip to enable this test. - */ -describe.skip('TypeOrmAndNodeFsStorageFilesRepository', () => { - // const directory = 'sqlite'; - // // let dataSource: DataSource; - // let repository: TypeOrmAndNodeFsStorageFilesRepository; - // beforeAll(async () => { - // const on = path.join(__dirname, directory); - // dataSource = await obtainSqliteDataSource(on); - // repository = new TypeOrmAndNodeFsStorageFilesRepository(on, dataSource); - // }); - // afterAll(async () => { - // await dataSource?.dropDatabase(); - // }); - // afterEach(async () => { - // await repository.deleteAll(); - // }); - // it('stores and retrieve a file from database and file system', async () => { - // const file = StorageFileMother.random(); - // const content = 'Hello Wold!!'; - // await repository.store(file, createReadable(content)); - // const retrievedBuffer = await repository.read(file.id); - // expect(retrievedBuffer.toString()).toBe(content); - // }); - // it('deletes a stored file', async () => { - // const file = await createFile(repository); - // await repository.delete(file.id); - // const result = await repository.exists(file.id); - // expect(result).toBe(false); - // }); - // it('finds a file after being stored', async () => { - // const stored = await createFile(repository); - // const exists = await repository.exists(stored.id); - // expect(exists).toBe(true); - // }); - // it('retrieves a stored Storage File', async () => { - // const stored = await createFile(repository); - // const retrieved = await repository.retrieve(stored.id); - // expect(stored).toEqual(retrieved); - // }); - // it('returns all files', async () => { - // const files = await Promise.all([createFile(repository), createFile(repository), createFile(repository)]); - // const allFilesRetrieved = await repository.all(); - // expect(files).toEqual(expect.arrayContaining(allFilesRetrieved)); - // }); +describe('TypeOrmAndNodeFsStorageFilesRepository', () => { + let baseFolder: string; + let db: { + find: ReturnType; + delete: ReturnType; + }; + let repository: TypeOrmAndNodeFsStorageFilesRepository; + + beforeEach(async () => { + baseFolder = await mkdtemp(path.join(os.tmpdir(), 'storage-files-repository-')); + db = { + find: vi.fn().mockResolvedValue([]), + delete: vi.fn().mockResolvedValue(undefined), + }; + + const dataSource = { + getRepository: vi.fn().mockReturnValue(db), + } as unknown as DataSource; + + repository = new TypeOrmAndNodeFsStorageFilesRepository(baseFolder, dataSource); + }); + + afterEach(async () => { + await rm(baseFolder, { recursive: true, force: true }); + }); + + it('should delete orphaned files from the storage folder when deleting all', async () => { + await writeFile(path.join(baseFolder, 'orphaned-contents-id'), 'partial hydration'); + + await repository.deleteAll(); + + await expect(readdir(baseFolder)).resolves.toEqual([]); + }); + + it('should delete registered files and any remaining orphaned files from the storage folder', async () => { + db.find.mockResolvedValue([{ id: 'registeredcontentsid0000' }]); + await writeFile(path.join(baseFolder, 'registeredcontentsid0000'), 'hydrated file'); + await writeFile(path.join(baseFolder, 'orphaned-contents-id'), 'partial hydration'); + await mkdir(path.join(baseFolder, 'nested-directory')); + + await repository.deleteAll(); + + expect(db.delete).toHaveBeenCalledWith({ id: 'registeredcontentsid0000' }); + await expect(readdir(baseFolder)).resolves.toEqual(['nested-directory']); + }); + + it('should return an error if deleting all files throws', async () => { + db.find.mockRejectedValue(new Error('The database connection is not open')); + const result = await repository.deleteAll(); + expect(result).toEqual({ error: new Error('The database connection is not open') }); + expect(db.delete).not.toHaveBeenCalled(); + }); }); diff --git a/src/context/storage/StorageFiles/infrastructure/persistance/repository/typeorm/TypeOrmAndNodeFsStorageFilesRepository.ts b/src/context/storage/StorageFiles/infrastructure/persistance/repository/typeorm/TypeOrmAndNodeFsStorageFilesRepository.ts index 377fac1d87..ab3c325b49 100644 --- a/src/context/storage/StorageFiles/infrastructure/persistance/repository/typeorm/TypeOrmAndNodeFsStorageFilesRepository.ts +++ b/src/context/storage/StorageFiles/infrastructure/persistance/repository/typeorm/TypeOrmAndNodeFsStorageFilesRepository.ts @@ -1,6 +1,6 @@ import { Service } from 'diod'; import { Readable } from 'form-data'; -import { readFile, unlink } from 'fs/promises'; +import { readFile, readdir, unlink } from 'fs/promises'; import path from 'path'; import { DataSource, Repository } from 'typeorm'; import { tryCatch } from '../../../../../../../shared/try-catch'; @@ -10,6 +10,7 @@ import { StorageFile } from '../../../../domain/StorageFile'; import { StorageFileId } from '../../../../domain/StorageFileId'; import { StorageFilesRepository } from '../../../../domain/StorageFilesRepository'; import { TypeOrmStorageFile } from './entities/TypeOrmStorageFile'; +import { Result } from '../../../../../../shared/domain/Result'; @Service() export class TypeOrmAndNodeFsStorageFilesRepository implements StorageFilesRepository { @@ -78,14 +79,20 @@ export class TypeOrmAndNodeFsStorageFilesRepository implements StorageFilesRepos await this.db.delete({ id: id.value }); } - async deleteAll(): Promise { - const all = await this.db.find(); + async deleteAll(): Promise> { + try { + const all = await this.db.find(); - const deleted = all - .map((att: { id: string }) => new StorageFileId(att.id)) - .map((id: StorageFileId) => this.delete(id)); + const deleted = all + .map((att: { id: string }) => new StorageFileId(att.id)) + .map((id: StorageFileId) => this.delete(id)); - await Promise.all(deleted); + await Promise.all(deleted); + await this.deleteOrphanFilesFromBaseFolder(); + return { data: undefined }; + } catch (error) { + return { error: error instanceof Error ? error : new Error('Unknown error during deleteAll') }; + } } async all(): Promise { @@ -93,4 +100,14 @@ export class TypeOrmAndNodeFsStorageFilesRepository implements StorageFilesRepos return all.map(StorageFile.from); } + + private async deleteOrphanFilesFromBaseFolder(): Promise { + const entries = await readdir(this.baseFolder, { withFileTypes: true }); + const deleted = entries + .filter((entry) => entry.isFile() || entry.isSymbolicLink()) + .map((entry) => path.join(this.baseFolder, entry.name)) + .map((pathToUnlink) => tryCatch(() => unlink(pathToUnlink))); + + await Promise.all(deleted); + } } diff --git a/src/context/storage/TemporalFiles/application/truncate/TemporalFileTruncater.ts b/src/context/storage/TemporalFiles/application/truncate/TemporalFileTruncater.ts new file mode 100644 index 0000000000..5d72b01caa --- /dev/null +++ b/src/context/storage/TemporalFiles/application/truncate/TemporalFileTruncater.ts @@ -0,0 +1,19 @@ +import { Service } from 'diod'; +import { TemporalFilePath } from '../../domain/TemporalFilePath'; +import { TemporalFileRepository } from '../../domain/TemporalFileRepository'; +import { TemporalFileIOError } from '../../domain/errors/TemporalFileIOError'; + +@Service() +export class TemporalFileTruncater { + constructor(private readonly repository: TemporalFileRepository) {} + + async run(path: string, size: number): Promise { + const documentPath = new TemporalFilePath(path); + + try { + await this.repository.truncate(documentPath, size); + } catch { + throw new TemporalFileIOError(); + } + } +} diff --git a/src/context/storage/TemporalFiles/application/upload/TemporalFileUploader.ts b/src/context/storage/TemporalFiles/application/upload/TemporalFileUploader.ts index b8b83bbba9..758e0ccf3c 100644 --- a/src/context/storage/TemporalFiles/application/upload/TemporalFileUploader.ts +++ b/src/context/storage/TemporalFiles/application/upload/TemporalFileUploader.ts @@ -3,7 +3,6 @@ import { extname } from 'node:path'; import { logger } from '@internxt/drive-desktop-core/build/backend'; import { canGenerateThumbnail } from '../../../../../backend/features/thumbnails/thumbnail.extensions'; import { TemporalFileRepository } from '../../domain/TemporalFileRepository'; -import { TemporalFilePath } from '../../domain/TemporalFilePath'; import { TemporalFileUploaderFactory } from '../../domain/upload/TemporalFileUploaderFactory'; import { TemporalFileUploadedDomainEvent } from '../../domain/upload/TemporalFileUploadedDomainEvent'; import { EventBus } from '../../../../virtual-drive/shared/domain/EventBus'; diff --git a/src/context/storage/TemporalFiles/application/write/TemporalFileWriter.test.ts b/src/context/storage/TemporalFiles/application/write/TemporalFileWriter.test.ts new file mode 100644 index 0000000000..66d37cd491 --- /dev/null +++ b/src/context/storage/TemporalFiles/application/write/TemporalFileWriter.test.ts @@ -0,0 +1,29 @@ +import { mockDeep } from 'vitest-mock-extended'; +import { TemporalFileWriter } from './TemporalFileWriter'; +import { TemporalFileRepository } from '../../domain/TemporalFileRepository'; +import { TemporalFileIOError } from '../../domain/errors/TemporalFileIOError'; + +describe('TemporalFileWriter', () => { + const repository = mockDeep(); + let temporalFileWriter: TemporalFileWriter; + + beforeEach(() => { + temporalFileWriter = new TemporalFileWriter(repository); + }); + + it('should await repository writes', async () => { + const writePromise = Promise.resolve(); + repository.write.mockReturnValue(writePromise); + + await expect(temporalFileWriter.run('/some/file.txt', Buffer.from('hello'), 5, 0)).resolves.toBeUndefined(); + expect(repository.write).toHaveBeenCalledTimes(1); + }); + + it('should throw TemporalFileIOError when repository write rejects', async () => { + repository.write.mockRejectedValue(new Error('boom')); + + await expect(temporalFileWriter.run('/some/file.txt', Buffer.from('hello'), 5, 0)).rejects.toBeInstanceOf( + TemporalFileIOError, + ); + }); +}); diff --git a/src/context/storage/TemporalFiles/application/write/TemporalFileWriter.ts b/src/context/storage/TemporalFiles/application/write/TemporalFileWriter.ts index 4410525172..e8f26cb8bd 100644 --- a/src/context/storage/TemporalFiles/application/write/TemporalFileWriter.ts +++ b/src/context/storage/TemporalFiles/application/write/TemporalFileWriter.ts @@ -11,7 +11,7 @@ export class TemporalFileWriter { const documentPath = new TemporalFilePath(path); try { - this.repository.write(documentPath, buffer, length, position); + await this.repository.write(documentPath, buffer, length, position); } catch (error: unknown) { throw new TemporalFileIOError(); } diff --git a/src/context/storage/TemporalFiles/domain/TemporalFile.test.ts b/src/context/storage/TemporalFiles/domain/TemporalFile.test.ts new file mode 100644 index 0000000000..41f87f5ae6 --- /dev/null +++ b/src/context/storage/TemporalFiles/domain/TemporalFile.test.ts @@ -0,0 +1,22 @@ +import { TemporalFile } from './TemporalFile'; + +describe('TemporalFile', () => { + describe('isTemporaryPath', () => { + it('should detect vim swap files', () => { + expect(TemporalFile.isTemporaryPath('/Documents/.test-file.txt.swp')).toBe(true); + expect(TemporalFile.isTemporaryPath('/Documents/.test-file.txt.swx')).toBe(true); + }); + + it('should detect vim backup files', () => { + expect(TemporalFile.isTemporaryPath('/Documents/test-file.txt~')).toBe(true); + }); + + it('should detect vim probe files', () => { + expect(TemporalFile.isTemporaryPath('/Documents/4913')).toBe(true); + }); + + it('should not classify regular files as auxiliary', () => { + expect(TemporalFile.isTemporaryPath('/Documents/test-file.txt')).toBe(false); + }); + }); +}); diff --git a/src/context/storage/TemporalFiles/domain/TemporalFile.ts b/src/context/storage/TemporalFiles/domain/TemporalFile.ts index 851aeae7ad..c2784d84dd 100644 --- a/src/context/storage/TemporalFiles/domain/TemporalFile.ts +++ b/src/context/storage/TemporalFiles/domain/TemporalFile.ts @@ -17,13 +17,15 @@ export type TemporalFileAttributes = { * Once the file descriptor is closed (FUSE release), the temporal file is uploaded to the cloud * (see {@link TemporalFileUploader}) and then deleted from disk (see {@link DeleteTemporalFileOnFileCreated}). * - * Auxiliary files (lock files, .tmp, vim swap, .goutputstream-*) are ignored. + * Auxiliary files (lock files, .tmp, vim swap, vim probe/backup files, .goutputstream-*) are ignored. */ export class TemporalFile extends AggregateRoot { private static readonly TEMPORAL_EXTENSION = 'tmp'; private static readonly LOCK_FILE_NAME_PREFIX = '.~lock.'; private static readonly OUTPUT_STREAM_NAME_PREFIX = '.goutputstream-'; - private static readonly VIM_SWAP_EXTENSIONS = ['.swp', '.swo', '.swn', '.swm']; + private static readonly VIM_SWAP_FILE_PATTERN = /\.sw[a-z]$/i; + private static readonly VIM_BACKUP_FILE_SUFFIX = '~'; + private static readonly VIM_PROBE_FILE_NAME = '4913'; private constructor( private _createdAt: Date, @@ -49,6 +51,10 @@ export class TemporalFile extends AggregateRoot { return this._path.name(); } + public get nameWithExtension() { + return this._path.nameWithExtension(); + } + public get extension() { return this._path.extension(); } @@ -98,12 +104,14 @@ export class TemporalFile extends AggregateRoot { const isTemporal = this.isTemporal(); const isOutputStream = this.isOutputStream(); const isVimSwap = this.isVimSwapFile(); + const isVimBackup = this.isVimBackupFile(); + const isVimProbe = this.isVimProbeFile(); - return isLockFile || isTemporal || isOutputStream || isVimSwap; + return isLockFile || isTemporal || isOutputStream || isVimSwap || isVimBackup || isVimProbe; } isLockFile(): boolean { - return this.name.startsWith(TemporalFile.LOCK_FILE_NAME_PREFIX); + return this.nameWithExtension.startsWith(TemporalFile.LOCK_FILE_NAME_PREFIX); } isTemporal(): boolean { @@ -111,11 +119,19 @@ export class TemporalFile extends AggregateRoot { } isOutputStream(): boolean { - return this.name.startsWith(TemporalFile.OUTPUT_STREAM_NAME_PREFIX); + return this.nameWithExtension.startsWith(TemporalFile.OUTPUT_STREAM_NAME_PREFIX); } isVimSwapFile(): boolean { - return TemporalFile.VIM_SWAP_EXTENSIONS.some((ext) => this.name.endsWith(ext)); + return TemporalFile.VIM_SWAP_FILE_PATTERN.test(this.nameWithExtension); + } + + isVimBackupFile(): boolean { + return this.nameWithExtension.endsWith(TemporalFile.VIM_BACKUP_FILE_SUFFIX); + } + + isVimProbeFile(): boolean { + return this.nameWithExtension === TemporalFile.VIM_PROBE_FILE_NAME; } attributes(): TemporalFileAttributes { diff --git a/src/context/storage/TemporalFiles/domain/TemporalFileRepository.ts b/src/context/storage/TemporalFiles/domain/TemporalFileRepository.ts index f1c4055923..319cf5f5d9 100644 --- a/src/context/storage/TemporalFiles/domain/TemporalFileRepository.ts +++ b/src/context/storage/TemporalFiles/domain/TemporalFileRepository.ts @@ -12,6 +12,8 @@ export abstract class TemporalFileRepository { abstract write(path: TemporalFilePath, buffer: Buffer, length: number, position: number): Promise; + abstract truncate(path: TemporalFilePath, size: number): Promise; + abstract read(path: TemporalFilePath): Promise; abstract stream(path: TemporalFilePath): Promise; @@ -21,4 +23,13 @@ export abstract class TemporalFileRepository { abstract watchFile(documentPath: TemporalFilePath, callback: () => void): () => void; abstract areEqual(doc1: TemporalFilePath, doc2: TemporalFilePath): Promise; + + abstract statFs(): Promise<{ + blocks: number; + bfree: number; + bavail: number; + files: number; + ffree: number; + bsize: number; + }>; } diff --git a/src/context/storage/TemporalFiles/infrastructure/NodeTemporalFileRepository.test.ts b/src/context/storage/TemporalFiles/infrastructure/NodeTemporalFileRepository.test.ts new file mode 100644 index 0000000000..fb4926fe64 --- /dev/null +++ b/src/context/storage/TemporalFiles/infrastructure/NodeTemporalFileRepository.test.ts @@ -0,0 +1,46 @@ +import { mkdtemp, rm } from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; +import { NodeTemporalFileRepository } from './NodeTemporalFileRepository'; +import { TemporalFilePath } from '../domain/TemporalFilePath'; + +describe('NodeTemporalFileRepository', () => { + let folder: string; + let repository: NodeTemporalFileRepository; + + beforeEach(async () => { + folder = await mkdtemp(join(tmpdir(), 'internxt-temporal-files-')); + repository = new NodeTemporalFileRepository(folder); + repository.init(); + }); + + afterEach(async () => { + await rm(folder, { recursive: true, force: true }); + }); + + it('should return empty when mapped file no longer exists on disk', async () => { + const documentPath = new TemporalFilePath('/Documents/.test-file.txt.swp'); + + await repository.create(documentPath); + const temporalFile = await repository.find(documentPath); + const contentFilePath = temporalFile.get().contentFilePath; + + await rm(contentFilePath, { force: true }); + + const result = await repository.find(documentPath); + + expect(result.isPresent()).toBe(false); + }); + + it('should ignore ENOENT when deleting a stale mapped file', async () => { + const documentPath = new TemporalFilePath('/Documents/.test-file.txt.swp'); + + await repository.create(documentPath); + const temporalFile = await repository.find(documentPath); + const contentFilePath = temporalFile.get().contentFilePath; + + await rm(contentFilePath, { force: true }); + + await expect(repository.delete(documentPath)).resolves.toBeUndefined(); + }); +}); diff --git a/src/context/storage/TemporalFiles/infrastructure/NodeTemporalFileRepository.ts b/src/context/storage/TemporalFiles/infrastructure/NodeTemporalFileRepository.ts index 03e7f29636..1a4de651b1 100644 --- a/src/context/storage/TemporalFiles/infrastructure/NodeTemporalFileRepository.ts +++ b/src/context/storage/TemporalFiles/infrastructure/NodeTemporalFileRepository.ts @@ -95,9 +95,9 @@ export class NodeTemporalFileRepository implements TemporalFileRepository { const fsDeletion = new Promise((resolve, reject) => { fs.unlink(pathToDelete, (err: NodeJS.ErrnoException | null) => { if (err) { - if (err.code !== 'ENOENT') { + if (err.code === 'ENOENT') { logger.debug({ - msg: `Could not delete ${pathToDelete}, it already does not exists`, + msg: `Could not delete ${pathToDelete}, it already does not exist`, }); resolve(); return; @@ -140,14 +140,25 @@ export class NodeTemporalFileRepository implements TemporalFileRepository { } const fd = fs.openSync(pathToWrite, 'r+'); + const bytes = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); try { - fs.writeSync(fd, buffer, 0, length, position); + fs.writeSync(fd, bytes, 0, length, position); } finally { fs.closeSync(fd); } } + async truncate(documentPath: TemporalFilePath, size: number): Promise { + const pathToWrite = this.map.get(documentPath.value); + + if (!pathToWrite) { + throw new Error(`Document with path ${documentPath.value} not found`); + } + + fs.truncateSync(pathToWrite, size); + } + async stream(documentPath: TemporalFilePath): Promise { const pathToRead = this.map.get(documentPath.value); @@ -166,7 +177,21 @@ export class NodeTemporalFileRepository implements TemporalFileRepository { return Optional.empty(); } - const stat = fs.statSync(pathToSearch); + let stat: fs.Stats; + + try { + stat = fs.statSync(pathToSearch); + } catch (error) { + if (error instanceof Error && 'code' in error && error.code === 'ENOENT') { + logger.debug({ + msg: 'Temporal file was removed from disk before reading attributes', + documentPath: documentPath.value, + pathToSearch, + }); + } + + return Optional.empty(); + } const doc = TemporalFile.from({ createdAt: stat.ctime, @@ -202,4 +227,24 @@ export class NodeTemporalFileRepository implements TemporalFileRepository { watcher.close(); }; } + + statFs(): Promise<{ blocks: number; bfree: number; bavail: number; files: number; ffree: number; bsize: number }> { + return new Promise((resolve, reject) => { + fs.statfs(this.folder, (err, stats) => { + if (err) { + reject(err); + return; + } + + resolve({ + blocks: stats.blocks, + bfree: stats.bfree, + bavail: stats.bavail, + files: stats.files, + ffree: stats.ffree, + bsize: stats.bsize, + }); + }); + }); + } } diff --git a/src/context/virtual-drive/files/application/create/CreateFileOnTemporalFileUploaded.ts b/src/context/virtual-drive/files/application/create/CreateFileOnTemporalFileUploaded.ts index f409c270c7..8e4070eff7 100644 --- a/src/context/virtual-drive/files/application/create/CreateFileOnTemporalFileUploaded.ts +++ b/src/context/virtual-drive/files/application/create/CreateFileOnTemporalFileUploaded.ts @@ -50,7 +50,7 @@ export class CreateFileOnTemporalFileUploaded implements DomainEventSubscriber { try { - this.create(event); + await this.create(event); } catch (err) { logger.error({ msg: '[CreateFileOnOfflineFileUploaded] Error creating file:', diff --git a/src/context/virtual-drive/files/application/create/FileCreator.test.ts b/src/context/virtual-drive/files/application/create/FileCreator.test.ts index 103847c136..14de8f1c53 100644 --- a/src/context/virtual-drive/files/application/create/FileCreator.test.ts +++ b/src/context/virtual-drive/files/application/create/FileCreator.test.ts @@ -10,6 +10,7 @@ import { FileMother } from '../../domain/__test-helpers__/FileMother'; import { FileSizeMother } from '../../domain/__test-helpers__/FileSizeMother'; import { right } from '../../../../shared/domain/Either'; import { EventBusMock } from '../../../../../context/virtual-drive/shared/__mocks__/EventBusMock'; +import { clearPendingCreations } from '../../../folders/application/create/PendingFolderCreationTracker'; describe('File Creator', () => { let remoteFileSystemMock: RemoteFileSystemMock; @@ -25,6 +26,7 @@ describe('File Creator', () => { const parentFolderFinder = FolderFinderFactory.existingFolder(); eventBus = new EventBusMock(); notifier = new FileSyncNotifierMock(); + clearPendingCreations(); SUT = new FileCreator(remoteFileSystemMock, fileRepository, parentFolderFinder, eventBus, notifier); }); diff --git a/src/context/virtual-drive/files/application/create/FileCreator.ts b/src/context/virtual-drive/files/application/create/FileCreator.ts index 30030c2a85..7f01b0f6b7 100644 --- a/src/context/virtual-drive/files/application/create/FileCreator.ts +++ b/src/context/virtual-drive/files/application/create/FileCreator.ts @@ -12,6 +12,7 @@ import { SyncFileMessenger } from '../../domain/SyncFileMessenger'; import { RemoteFileSystem } from '../../domain/file-systems/RemoteFileSystem'; import { FileContentsId } from '../../domain/FileContentsId'; import { FileFolderId } from '../../domain/FileFolderId'; +import { runAfterParentCreations } from '../../../folders/application/create/PendingFolderCreationTracker'; @Service() export class FileCreator { @@ -25,37 +26,42 @@ export class FileCreator { async run(path: string, contentsId: string, size: number): Promise { try { - const fileSize = new FileSize(size); - const fileContentsId = new FileContentsId(contentsId); - const filePath = new FilePath(path); + const file = await runAfterParentCreations({ + path, + action: async () => { + const fileSize = new FileSize(size); + const fileContentsId = new FileContentsId(contentsId); + const filePath = new FilePath(path); - const folder = await this.parentFolderFinder.run(filePath); - const fileFolderId = new FileFolderId(folder.id); + const folder = await this.parentFolderFinder.run(filePath); + const fileFolderId = new FileFolderId(folder.id); - const either = await this.remote.persist({ - contentsId: fileContentsId, - path: filePath, - size: fileSize, - folderId: fileFolderId, - folderUuid: folder.uuid, - }); + const either = await this.remote.persist({ + contentsId: fileContentsId, + path: filePath, + size: fileSize, + folderId: fileFolderId, + folderUuid: folder.uuid, + }); - if (either.isLeft()) { - throw either.getLeft(); - } + if (either.isLeft()) { + throw either.getLeft(); + } - const { modificationTime, id, uuid, createdAt } = either.getRight(); + const { modificationTime, id, uuid, createdAt } = either.getRight(); - const file = File.create({ - id, - uuid, - contentsId: fileContentsId.value, - folderId: fileFolderId.value, - createdAt, - modificationTime, - path: filePath.value, - size: fileSize.value, - updatedAt: modificationTime, + return File.create({ + id, + uuid, + contentsId: fileContentsId.value, + folderId: fileFolderId.value, + createdAt, + modificationTime, + path: filePath.value, + size: fileSize.value, + updatedAt: modificationTime, + }); + }, }); await this.repository.upsert(file); diff --git a/src/context/virtual-drive/files/application/trash/FileTrasher.test.ts b/src/context/virtual-drive/files/application/trash/FileTrasher.test.ts index b82a883d58..f8ebdd2d73 100644 --- a/src/context/virtual-drive/files/application/trash/FileTrasher.test.ts +++ b/src/context/virtual-drive/files/application/trash/FileTrasher.test.ts @@ -94,7 +94,7 @@ describe('FileTrasher', () => { expect(syncFileMessengerMock.trashed).toBeCalledWith(file.name, file.type, file.size); }); - it('should NOT call remote.trash for files with size 0', async () => { + it('should call remote.trash for files with size 0', async () => { const file = FileMother.fromPartial({ size: 0 }); fileRepositoryMock.matchingPartial.mockReturnValue([file]); allParentFoldersStatusIsExistsMock.run.mockResolvedValue(true); @@ -102,7 +102,7 @@ describe('FileTrasher', () => { await sut.run(file.contentsId); expect(syncFileMessengerMock.trashing).toBeCalledWith(file.name, file.type, file.size); - expect(addFileToTrashMock).not.toBeCalled(); + call(addFileToTrashMock).toBe(file.uuid); expect(fileRepositoryMock.update).toBeCalledWith(expect.objectContaining({ status: FileStatus.Trashed })); expect(syncFileMessengerMock.trashed).toBeCalledWith(file.name, file.type, file.size); }); diff --git a/src/context/virtual-drive/files/application/trash/FileTrasher.ts b/src/context/virtual-drive/files/application/trash/FileTrasher.ts index 9bc2a17ce3..c0054520ce 100644 --- a/src/context/virtual-drive/files/application/trash/FileTrasher.ts +++ b/src/context/virtual-drive/files/application/trash/FileTrasher.ts @@ -46,12 +46,11 @@ export class FileTrasher { try { file.trash(); - if (file.size > 0) { - const { error } = await addFileToTrash(file.uuid); - if (error) { - throw new Error('Error when deleting file'); - } + const { error } = await addFileToTrash(file.uuid); + if (error) { + throw new Error('Error when deleting file'); } + await this.repository.update(file); await this.notifier.trashed(file.name, file.type, file.size); } catch (error: unknown) { diff --git a/src/context/virtual-drive/files/infrastructure/SyncFileMessengers/MainProcessSyncFileMessenger.ts b/src/context/virtual-drive/files/infrastructure/SyncFileMessengers/MainProcessSyncFileMessenger.ts index eb9c09e691..108f4ce841 100644 --- a/src/context/virtual-drive/files/infrastructure/SyncFileMessengers/MainProcessSyncFileMessenger.ts +++ b/src/context/virtual-drive/files/infrastructure/SyncFileMessengers/MainProcessSyncFileMessenger.ts @@ -1,6 +1,6 @@ import { Service } from 'diod'; import { addVirtualDriveIssue } from '../../../../../apps/main/issues/virtual-drive'; -import { setTrayStatus } from '../../../../../apps/main/tray/tray'; +import { setTrayStatus } from '../../../../../apps/main/tray/tray-setup'; import { broadcastToWindows } from '../../../../../apps/main/windows'; import { VirtualDriveFileIssue } from '../../../../../shared/issues/VirtualDriveIssue'; import { SyncMessenger } from '../../../../shared/domain/SyncMessenger'; @@ -17,10 +17,12 @@ export class MainProcessSyncFileMessenger extends SyncMessenger implements SyncF }); } + // eslint-disable-next-line @typescript-eslint/no-unused-vars async trashing(_name: string, _type: string, _size: number): Promise { setTrayStatus('SYNCING'); } + // eslint-disable-next-line @typescript-eslint/no-unused-vars async trashed(name: string, type: string, size: number): Promise { const nameWithExtension = this.nameWithExtension(name, type); diff --git a/src/context/virtual-drive/folders/__mocks__/FolderRemoteFileSystemMock.ts b/src/context/virtual-drive/folders/__mocks__/FolderRemoteFileSystemMock.ts index 6a3d76e2ab..450be67619 100644 --- a/src/context/virtual-drive/folders/__mocks__/FolderRemoteFileSystemMock.ts +++ b/src/context/virtual-drive/folders/__mocks__/FolderRemoteFileSystemMock.ts @@ -1,4 +1,4 @@ -import { Either, right } from '../../../shared/domain/Either'; +import { Either, left, right } from '../../../shared/domain/Either'; import { Folder } from '../domain/Folder'; import { FolderId } from '../domain/FolderId'; import { FolderPath } from '../domain/FolderPath'; @@ -39,6 +39,15 @@ export class FolderRemoteFileSystemMock implements RemoteFileSystem { ); } + shouldFailPersistWith(plainName: string, parentFolderUuid: string, error: RemoteFileSystemErrors) { + this.persistMock(plainName, parentFolderUuid); + this.persistMock.mockResolvedValueOnce(left(error)); + } + + shouldFindFolder(folder?: Folder) { + this.searchWithMock.mockResolvedValueOnce(folder); + } + shouldTrash(folder: Folder, error?: Error) { this.trashMock(folder.id); diff --git a/src/context/virtual-drive/folders/application/FolderDeleter.test.ts b/src/context/virtual-drive/folders/application/FolderDeleter.test.ts index 0d2894faac..d8c5e37cd0 100644 --- a/src/context/virtual-drive/folders/application/FolderDeleter.test.ts +++ b/src/context/virtual-drive/folders/application/FolderDeleter.test.ts @@ -6,6 +6,7 @@ import { FolderRepositoryMock } from '../__mocks__/FolderRepositoryMock'; import { FolderMother } from '../domain/__test-helpers__/FolderMother'; import * as addFolderToTrashModule from '../../../../infra/drive-server/services/folder/services/add-folder-to-trash'; import { call, partialSpyOn } from 'tests/vitest/utils.helper'; +import { DriveServerError } from 'src/infra/drive-server/drive-server.error'; describe('Folder deleter', () => { let repository: FolderRepositoryMock; @@ -69,7 +70,7 @@ describe('Folder deleter', () => { repository.searchByUuidMock.mockResolvedValueOnce(folder); vi.spyOn(allParentFoldersStatusIsExists, 'run').mockResolvedValueOnce(true); - addFolderToTrashMock.mockResolvedValue({ error: new Error('Error during the deletion') } as any); + addFolderToTrashMock.mockResolvedValue({ error: new DriveServerError('UNKNOWN') }); await SUT.run(folder.uuid); diff --git a/src/context/virtual-drive/folders/application/FolderMover.test.ts b/src/context/virtual-drive/folders/application/FolderMover.test.ts index 84d72a2319..fc1b7806a8 100644 --- a/src/context/virtual-drive/folders/application/FolderMover.test.ts +++ b/src/context/virtual-drive/folders/application/FolderMover.test.ts @@ -7,6 +7,7 @@ import { FolderMother } from '../domain/__test-helpers__/FolderMother'; import { FolderDescendantsPathUpdater } from './FolderDescendantsPathUpdater'; import * as moveFolderModule from '../../../../infra/drive-server/services/folder/services/move-folder'; import { call, partialSpyOn } from 'tests/vitest/utils.helper'; +import { DriveServerError } from 'src/infra/drive-server/drive-server.error'; describe('Folder Mover', () => { let repository: FolderRepositoryMock; @@ -59,7 +60,7 @@ describe('Folder Mover', () => { const destinationPath = new FolderPath(path.join(parentDestination.path, original.name)); - moveFolderMock.mockResolvedValue({ data: {} as any }); + moveFolderMock.mockResolvedValue({ data: {} }); repository.matchingPartialMock.mockReturnValueOnce([]).mockReturnValueOnce([parentDestination]); @@ -76,7 +77,7 @@ describe('Folder Mover', () => { const destinationPath = new FolderPath(path.join(parentDestination.path, original.name)); - moveFolderMock.mockResolvedValue({ data: {} as any }); + moveFolderMock.mockResolvedValue({ data: {} }); repository.matchingPartialMock.mockReturnValueOnce([]).mockReturnValueOnce([parentDestination]); @@ -96,8 +97,8 @@ describe('Folder Mover', () => { const destinationPath = new FolderPath(path.join(parentDestination.path, original.name)); - const error = new Error('move failed'); - moveFolderMock.mockResolvedValue({ error } as any); + const error = new DriveServerError('UNKNOWN'); + moveFolderMock.mockResolvedValue({ error }); repository.matchingPartialMock.mockReturnValueOnce([]).mockReturnValueOnce([parentDestination]); diff --git a/src/context/virtual-drive/folders/application/FolderRenamer.test.ts b/src/context/virtual-drive/folders/application/FolderRenamer.test.ts index 7a6d02838c..16d20890ad 100644 --- a/src/context/virtual-drive/folders/application/FolderRenamer.test.ts +++ b/src/context/virtual-drive/folders/application/FolderRenamer.test.ts @@ -8,6 +8,7 @@ import { FolderMother } from '../domain/__test-helpers__/FolderMother'; import { EventBusMock } from '../../shared/__mocks__/EventBusMock'; import { FolderDescendantsPathUpdater } from './FolderDescendantsPathUpdater'; import * as renameFolderModule from '../../../../infra/drive-server/services/folder/services/rename-folder'; +import { DriveServerError } from '../../../../infra/drive-server/drive-server.error'; import { call, partialSpyOn } from 'tests/vitest/utils.helper'; describe('Folder Renamer', () => { @@ -44,7 +45,7 @@ describe('Folder Renamer', () => { path: destination.value, }); - renameFolderMock.mockResolvedValue({ data: {} as any }); + renameFolderMock.mockResolvedValue({ data: {} }); return { folder, @@ -130,8 +131,8 @@ describe('Folder Renamer', () => { const folder = FolderMother.any(); const destination = FolderPathMother.onFolder(folder.dirname); - const error = new Error('rename failed'); - renameFolderMock.mockResolvedValue({ error } as any); + const error = new DriveServerError('UNKNOWN'); + renameFolderMock.mockResolvedValue({ error }); await expect(renamer.run(folder, destination)).rejects.toBe(error); diff --git a/src/context/virtual-drive/folders/application/create/FolderCreator.test.ts b/src/context/virtual-drive/folders/application/create/FolderCreator.test.ts index 5579a14235..494569c094 100644 --- a/src/context/virtual-drive/folders/application/create/FolderCreator.test.ts +++ b/src/context/virtual-drive/folders/application/create/FolderCreator.test.ts @@ -9,6 +9,7 @@ import { FolderRemoteFileSystemMock } from '../../__mocks__/FolderRemoteFileSyst import { FolderRepositoryMock } from '../../__mocks__/FolderRepositoryMock'; import { FolderPathMother } from '../../domain/__test-helpers__/FolderPathMother'; import { FolderMother } from '../../domain/__test-helpers__/FolderMother'; +import { clearPendingCreations } from './PendingFolderCreationTracker'; describe('Folder Creator', () => { let repository: FolderRepositoryMock; @@ -21,6 +22,7 @@ describe('Folder Creator', () => { repository = new FolderRepositoryMock(); remote = new FolderRemoteFileSystemMock(); eventBus = new EventBusMock(); + clearPendingCreations(); const parentFolderFinder = new ParentFolderFinder(repository); @@ -106,4 +108,33 @@ describe('Folder Creator', () => { expect.arrayContaining([expect.objectContaining({ aggregateId: createdFolder.uuid })]), ); }); + + it('throws when remote folder creation fails with non-recoverable error', async () => { + const path = FolderPathMother.any(); + const parent = FolderMother.fromPartial({ path: path.dirname() }); + + remote.shouldFailPersistWith(path.name(), parent.uuid, 'UNHANDLED'); + repository.matchingPartialMock.mockReturnValueOnce([]).mockReturnValueOnce([parent]).mockReturnValueOnce([parent]); + + await expect(SUT.run(path.value)).rejects.toThrow(`Could not create folder ${path.value}: UNHANDLED`); + }); + + it('recovers from ALREADY_EXISTS by finding the folder remotely', async () => { + const path = FolderPathMother.any(); + const parent = FolderMother.fromPartial({ path: path.dirname() }); + const existingFolder = FolderMother.fromPartial({ + path: path.value, + parentId: parent.id, + }); + + remote.shouldFailPersistWith(path.name(), parent.uuid, 'ALREADY_EXISTS'); + remote.shouldFindFolder(existingFolder); + + repository.matchingPartialMock.mockReturnValueOnce([]).mockReturnValueOnce([parent]).mockReturnValueOnce([parent]); + + await SUT.run(path.value); + + expect(repository.addMock).toBeCalledWith(expect.objectContaining({ uuid: existingFolder.uuid })); + expect(eventBus.publishMock).not.toBeCalled(); + }); }); diff --git a/src/context/virtual-drive/folders/application/create/FolderCreator.ts b/src/context/virtual-drive/folders/application/create/FolderCreator.ts index d884069072..7a18d7bed4 100644 --- a/src/context/virtual-drive/folders/application/create/FolderCreator.ts +++ b/src/context/virtual-drive/folders/application/create/FolderCreator.ts @@ -12,6 +12,7 @@ import { FolderUuid } from '../../domain/FolderUuid'; import { FolderInPathAlreadyExistsError } from '../../domain/errors/FolderInPathAlreadyExistsError'; import { RemoteFileSystem } from '../../domain/file-systems/RemoteFileSystem'; import { ParentFolderFinder } from '../ParentFolderFinder'; +import { runTrackingCreation } from './PendingFolderCreationTracker'; @Service() export class FolderCreator { @@ -39,36 +40,53 @@ export class FolderCreator { } async run(path: string): Promise { - const folderPath = new FolderPath(path); + await runTrackingCreation({ + path, + action: async () => { + const folderPath = new FolderPath(path); - await this.ensureItDoesNotExists(folderPath); - const parent = await this.parentFolderFinder.run(folderPath); - const parentId = await this.findParentId(folderPath); + await this.ensureItDoesNotExists(folderPath); + const parent = await this.parentFolderFinder.run(folderPath); + const parentId = await this.findParentId(folderPath); - const response = await this.remote.persist(folderPath.name(), parent.uuid); + const response = await this.remote.persist(folderPath.name(), parent.uuid); - if (response.isLeft()) { - logger.error({ - msg: 'Error creating folder:', - error: response.getLeft(), - }); - return; - } + if (response.isLeft()) { + const error = response.getLeft(); + + logger.error({ + msg: 'Error creating folder:', + error, + }); + + if (error === 'ALREADY_EXISTS') { + const existingFolder = await this.remote.searchWith(parentId, folderPath); - const dto = response.getRight(); + if (existingFolder) { + await this.repository.add(existingFolder); + return; + } + } - const folder = Folder.create( - new FolderId(dto.id), - new FolderUuid(dto.uuid), - folderPath, - parentId, - FolderCreatedAt.fromString(dto.createdAt), - FolderUpdatedAt.fromString(dto.updatedAt), - ); + throw new Error(`Could not create folder ${folderPath.value}: ${error}`); + } - await this.repository.add(folder); + const dto = response.getRight(); - const events = folder.pullDomainEvents(); - this.eventBus.publish(events); + const folder = Folder.create( + new FolderId(dto.id), + new FolderUuid(dto.uuid), + folderPath, + parentId, + FolderCreatedAt.fromString(dto.createdAt), + FolderUpdatedAt.fromString(dto.updatedAt), + ); + + await this.repository.add(folder); + + const events = folder.pullDomainEvents(); + this.eventBus.publish(events); + }, + }); } } diff --git a/src/context/virtual-drive/folders/application/create/FolderCreatorFromServerFolder.ts b/src/context/virtual-drive/folders/application/create/FolderCreatorFromServerFolder.ts index 999e2d161e..e903df7c6f 100644 --- a/src/context/virtual-drive/folders/application/create/FolderCreatorFromServerFolder.ts +++ b/src/context/virtual-drive/folders/application/create/FolderCreatorFromServerFolder.ts @@ -10,7 +10,7 @@ export function createFolderFromServerFolder(server: ServerFolder, relativePath: parentId: server.parentId as number, updatedAt: server.updatedAt, createdAt: server.createdAt, - path: path, + path, status: server.status, }); } diff --git a/src/context/virtual-drive/folders/application/create/PendingFolderCreationTracker.test.ts b/src/context/virtual-drive/folders/application/create/PendingFolderCreationTracker.test.ts new file mode 100644 index 0000000000..2b5761309c --- /dev/null +++ b/src/context/virtual-drive/folders/application/create/PendingFolderCreationTracker.test.ts @@ -0,0 +1,43 @@ +import { clearPendingCreations, runAfterParentCreations, runTrackingCreation } from './PendingFolderCreationTracker'; + +describe('PendingFolderCreationTracker', () => { + beforeEach(() => { + clearPendingCreations(); + }); + + it('waits for a parent folder creation before running child action', async () => { + let resolveParentCreation: (() => void) | undefined; + const events: string[] = []; + + const parentPromise = runTrackingCreation({ + path: '/Documents', + action: async () => { + events.push('parent-started'); + + await new Promise((resolve) => { + resolveParentCreation = resolve; + }); + + events.push('parent-finished'); + }, + }); + + const childPromise = runAfterParentCreations({ + path: '/Documents/Taxes/file.txt', + action: async () => { + events.push('child-started'); + }, + }); + + await Promise.resolve(); + + expect(events).toStrictEqual(['parent-started']); + + resolveParentCreation?.(); + + await parentPromise; + await childPromise; + + expect(events).toStrictEqual(['parent-started', 'parent-finished', 'child-started']); + }); +}); diff --git a/src/context/virtual-drive/folders/application/create/PendingFolderCreationTracker.ts b/src/context/virtual-drive/folders/application/create/PendingFolderCreationTracker.ts new file mode 100644 index 0000000000..c3dff3e10d --- /dev/null +++ b/src/context/virtual-drive/folders/application/create/PendingFolderCreationTracker.ts @@ -0,0 +1,81 @@ +import { posix } from 'node:path'; + +type ActionProps = { + path: string; + action: () => Promise; +}; + +const pendingFolderCreationByPath = new Map>(); + +function normalizePath(path: string): string { + const normalizedPath = posix.normalize(path); + + if (normalizedPath.length > 1 && normalizedPath.endsWith('/')) { + return normalizedPath.slice(0, -1); + } + + return normalizedPath; +} + +function getParentPaths(path: string): string[] { + const normalizedPath = normalizePath(path); + const parentPaths: string[] = []; + + let currentPath = posix.dirname(normalizedPath); + + while (currentPath !== '.' && currentPath !== '/') { + parentPaths.unshift(currentPath); + currentPath = posix.dirname(currentPath); + } + + return parentPaths; +} + +function getPendingParentCreations(path: string): Promise[] { + const parentPaths = getParentPaths(path); + + return parentPaths + .map((parentPath) => pendingFolderCreationByPath.get(parentPath)) + .filter((pending): pending is Promise => Boolean(pending)); +} + +function track(path: string, creationPromise: Promise): void { + const normalizedPath = normalizePath(path); + + const pendingPromise = creationPromise.then(() => undefined).catch(() => undefined); + + pendingFolderCreationByPath.set(normalizedPath, pendingPromise); + + void pendingPromise.finally(() => { + if (pendingFolderCreationByPath.get(normalizedPath) === pendingPromise) { + pendingFolderCreationByPath.delete(normalizedPath); + } + }); +} + +export async function runAfterParentCreations({ path, action }: ActionProps): Promise { + const pendingParentCreations = getPendingParentCreations(path); + + if (pendingParentCreations.length > 0) { + await Promise.all(pendingParentCreations); + } + + return action(); +} + +export async function runTrackingCreation({ path, action }: ActionProps): Promise { + const pendingParentCreations = getPendingParentCreations(path); + + if (pendingParentCreations.length > 0) { + await Promise.all(pendingParentCreations); + } + + const creationPromise = action(); + track(path, creationPromise); + + return creationPromise; +} + +export function clearPendingCreations(): void { + pendingFolderCreationByPath.clear(); +} diff --git a/src/context/virtual-drive/folders/infrastructure/SyncMessengers/MainProcessSyncFolderMessenger.ts b/src/context/virtual-drive/folders/infrastructure/SyncMessengers/MainProcessSyncFolderMessenger.ts index b4c6dddc26..9541807203 100644 --- a/src/context/virtual-drive/folders/infrastructure/SyncMessengers/MainProcessSyncFolderMessenger.ts +++ b/src/context/virtual-drive/folders/infrastructure/SyncMessengers/MainProcessSyncFolderMessenger.ts @@ -1,5 +1,5 @@ import { addVirtualDriveIssue } from '../../../../../apps/main/issues/virtual-drive'; -import { setTrayStatus } from '../../../../../apps/main/tray/tray'; +import { setTrayStatus } from '../../../../../apps/main/tray/tray-setup'; import { virtualDriveUpdate } from '../../../../../apps/main/windows'; import { VirtualDriveFolderIssue } from '../../../../../shared/issues/VirtualDriveIssue'; import { SyncFolderMessenger } from '../../domain/SyncFolderMessenger'; @@ -35,7 +35,7 @@ export class MainProcessSyncFolderMessenger implements SyncFolderMessenger { virtualDriveUpdate({ action: 'CREATING_FOLDER', oldName: undefined, - name: name, + name, progress: undefined, }); } @@ -46,7 +46,7 @@ export class MainProcessSyncFolderMessenger implements SyncFolderMessenger { virtualDriveUpdate({ action: 'FOLDER_CREATED', oldName: undefined, - name: name, + name, progress: undefined, }); } diff --git a/src/context/virtual-drive/shared/application/PlatformPathConverter.test.ts b/src/context/virtual-drive/shared/application/PlatformPathConverter.test.ts deleted file mode 100644 index 22301b8179..0000000000 --- a/src/context/virtual-drive/shared/application/PlatformPathConverter.test.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { PlatformPathConverter } from './PlatformPathConverter'; - -describe('PlatformPathConverter', () => { - describe('winToPosix', () => { - it('works with a single level', () => { - const win = '\\New Folder (4)'; - const posix = PlatformPathConverter.winToPosix(win); - - expect(posix).toBe('/New Folder (4)'); - }); - it('works with two levels', () => { - const win = '\\New Folder (4)\\Subfolder'; - const posix = PlatformPathConverter.winToPosix(win); - - expect(posix).toBe('/New Folder (4)/Subfolder'); - }); - }); -}); diff --git a/src/context/virtual-drive/shared/application/PlatformPathConverter.ts b/src/context/virtual-drive/shared/application/PlatformPathConverter.ts deleted file mode 100644 index 58bebe8eaf..0000000000 --- a/src/context/virtual-drive/shared/application/PlatformPathConverter.ts +++ /dev/null @@ -1,26 +0,0 @@ -import path from 'path'; - -export class PlatformPathConverter { - static convertAnyToCurrent(anyPlatformPath: string): string { - const fromPlatform = anyPlatformPath.includes(path.posix.sep) ? path.posix : path.win32; - - const toPlatform = path.sep === path.posix.sep ? path.posix : path.win32; - - return anyPlatformPath.split(fromPlatform.sep).join(toPlatform.sep); - } - - static winToPosix(win: string): string { - return win.split(path.win32.sep).join(path.posix.sep); - } - - static posixToWin(posix: string): string { - return posix.split(path.posix.sep).join(path.win32.sep); - } - - static getFatherPathPosix(posixPath: string): string { - const pathArray = posixPath.split('/'); - pathArray.pop(); - const parentPath = pathArray.join('/'); - return this.winToPosix(parentPath); - } -} diff --git a/src/core/auto-launch/handlers.ts b/src/core/auto-launch/handlers.ts new file mode 100644 index 0000000000..e29720a92d --- /dev/null +++ b/src/core/auto-launch/handlers.ts @@ -0,0 +1,7 @@ +import { ipcMain } from 'electron'; + +import { desktopEntryIsPresent, toggleDesktopEntry } from './service'; + +ipcMain.handle('is-auto-launch-enabled', desktopEntryIsPresent); + +ipcMain.handle('toggle-auto-launch', toggleDesktopEntry); diff --git a/src/core/auto-launch/service.test.ts b/src/core/auto-launch/service.test.ts new file mode 100644 index 0000000000..b7fae21bb8 --- /dev/null +++ b/src/core/auto-launch/service.test.ts @@ -0,0 +1,99 @@ +import * as fsPromises from 'node:fs/promises'; +import * as fsSync from 'node:fs'; +import * as serviceModule from './service'; +import { calls, deepMocked } from 'tests/vitest/utils.helper'; +import { loggerMock } from 'tests/vitest/mocks.helper'; + +vi.mock(import('node:fs/promises')); +vi.mock('node:fs', () => ({ existsSync: vi.fn() })); +vi.mock('node:os', () => ({ homedir: vi.fn().mockReturnValue('/home/user') })); +vi.mock('../../../../package.json', () => ({ default: { name: 'internxt', version: '1.0.0' } })); + +const desktopFilePath = '/home/user/.config/autostart'; +const desktopFile = `${desktopFilePath}/internxt.desktop`; + +describe('auto-launch service', () => { + let mkdirMock: ReturnType>; + let writeFileMock: ReturnType>; + let existsSyncMock: ReturnType>; + let unlinkMock: ReturnType>; + + beforeEach(() => { + mkdirMock = deepMocked(fsPromises.mkdir); + writeFileMock = deepMocked(fsPromises.writeFile); + existsSyncMock = deepMocked(fsSync.existsSync); + unlinkMock = deepMocked(fsPromises.unlink); + }); + + describe('createDesktopEntry', () => { + it('should create the desktop entry', async () => { + mkdirMock.mockResolvedValue(undefined); + writeFileMock.mockResolvedValue(undefined); + + await serviceModule.createDesktopEntry(); + + calls(mkdirMock).toContainEqual([desktopFilePath, { recursive: true }]); + expect(writeFileMock).toBeCalled(); + expect(loggerMock.error).not.toBeCalled(); + }); + + it('should log error if createDesktopEntry fails', async () => { + mkdirMock.mockRejectedValue(new Error('fail')); + await serviceModule.createDesktopEntry(); + expect(loggerMock.error).toBeCalled(); + }); + }); + + describe('deleteDesktopEntry', () => { + it('should delete the desktop entry if it exists', async () => { + existsSyncMock.mockReturnValue(true); + unlinkMock.mockResolvedValue(undefined); + await serviceModule.deleteDesktopEntry(); + calls(unlinkMock).toContainEqual(desktopFile); + expect(loggerMock.error).not.toBeCalled(); + }); + + it('should not try to delete if desktop entry does not exist', async () => { + existsSyncMock.mockReturnValue(false); + await serviceModule.deleteDesktopEntry(); + expect(unlinkMock).not.toBeCalled(); + }); + + it('should log error if deleteDesktopEntry fails', async () => { + existsSyncMock.mockReturnValue(true); + unlinkMock.mockRejectedValue(new Error('fail')); + await serviceModule.deleteDesktopEntry(); + expect(loggerMock.error).toBeCalled(); + }); + }); + + describe('desktopEntryIsPresent', () => { + it('should return true if desktop entry exists', () => { + existsSyncMock.mockReturnValue(true); + expect(serviceModule.desktopEntryIsPresent()).toBe(true); + }); + + it('should return false if desktop entry does not exist', () => { + existsSyncMock.mockReturnValue(false); + expect(serviceModule.desktopEntryIsPresent()).toBe(false); + }); + }); + + describe('toggleDesktopEntry', () => { + it('should call unlink when entry exists', async () => { + existsSyncMock.mockReturnValue(true); + unlinkMock.mockResolvedValue(undefined); + await serviceModule.toggleDesktopEntry(); + expect(unlinkMock).toBeCalled(); + }); + + it('should create entry when it does not exist', async () => { + existsSyncMock.mockReturnValue(false); + mkdirMock.mockResolvedValue(undefined); + writeFileMock.mockResolvedValue(undefined); + await serviceModule.toggleDesktopEntry(); + expect(mkdirMock).toBeCalled(); + expect(writeFileMock).toBeCalled(); + }); + }); +}); diff --git a/src/core/auto-launch/service.ts b/src/core/auto-launch/service.ts new file mode 100644 index 0000000000..119f71f376 --- /dev/null +++ b/src/core/auto-launch/service.ts @@ -0,0 +1,52 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { mkdir, writeFile, unlink } from 'node:fs/promises'; +import { existsSync } from 'node:fs'; +import { homedir } from 'node:os'; + +import packageJson from '../../../package.json'; + +const fileName = `${packageJson.name}.desktop`; +const desktopFilePath = `${homedir()}/.config/autostart`; +const desktopFile = `${desktopFilePath}/${fileName}`; +const execPath = process.execPath; + +export async function createDesktopEntry() { + const fileContent = `[Desktop Entry] + Type=Application + Version=${packageJson.version} + Name=${packageJson.name} + Comment=${packageJson.name} startup script + Exec="${execPath}" --process-start-args --hidden + StartupNotify=false + Terminal=false + `; + + try { + await mkdir(desktopFilePath, { recursive: true }); + await writeFile(desktopFile, fileContent, { mode: 0o755 }); + } catch (err) { + logger.error({ msg: 'Error creating desktop entry for auto-launch:', err }); + } +} + +export async function deleteDesktopEntry() { + if (existsSync(desktopFile)) { + try { + await unlink(desktopFile); + } catch (err) { + logger.error({ msg: 'Error deleting desktop entry for auto-launch:', err }); + } + } +} + +export function desktopEntryIsPresent(): boolean { + return existsSync(desktopFile); +} + +export async function toggleDesktopEntry() { + if (desktopEntryIsPresent()) { + await deleteDesktopEntry(); + return; + } + await createDesktopEntry(); +} diff --git a/src/core/bootstrap/bootstrap-runtime-state.ts b/src/core/bootstrap/bootstrap-runtime-state.ts new file mode 100644 index 0000000000..80c0d4d645 --- /dev/null +++ b/src/core/bootstrap/bootstrap-runtime-state.ts @@ -0,0 +1,11 @@ +export type PendingUpdateInfo = { version: string } | null; + +let pendingUpdateInfo: PendingUpdateInfo = null; + +export function getPendingUpdateInfo() { + return pendingUpdateInfo; +} + +export function setPendingUpdateInfo(updateInfo: Exclude) { + pendingUpdateInfo = updateInfo; +} diff --git a/src/core/bootstrap/main-process-bootstrap.test.ts b/src/core/bootstrap/main-process-bootstrap.test.ts new file mode 100644 index 0000000000..e3f69909ff --- /dev/null +++ b/src/core/bootstrap/main-process-bootstrap.test.ts @@ -0,0 +1,40 @@ +import { bootstrapMainProcess } from './main-process-bootstrap'; +import * as registerAppReadyFlowModule from './register-app-ready-flow'; +import * as registerMainIpcHandlersModule from './register-main-ipc-handlers'; +import * as registerProcessHandlersModule from './register-process-handlers'; +import * as registerSecondInstanceFlowModule from './register-second-instance-flow'; +import * as registerSessionEventHandlersModule from './register-session-event-handlers'; +import * as setupEnvironmentDebugToolsModule from './setup-environment-debug-tools'; +import { partialSpyOn } from 'tests/vitest/utils.helper'; + +describe('main-process-bootstrap', () => { + const setupEnvironmentDebugToolsMock = partialSpyOn(setupEnvironmentDebugToolsModule, 'setupEnvironmentDebugTools'); + const registerMainIpcHandlersMock = partialSpyOn(registerMainIpcHandlersModule, 'registerMainIpcHandlers'); + const registerAppReadyFlowMock = partialSpyOn(registerAppReadyFlowModule, 'registerAppReadyFlow'); + const registerSecondInstanceFlowMock = partialSpyOn(registerSecondInstanceFlowModule, 'registerSecondInstanceFlow'); + const registerSessionEventHandlersMock = partialSpyOn( + registerSessionEventHandlersModule, + 'registerSessionEventHandlers', + ); + const registerProcessHandlersMock = partialSpyOn(registerProcessHandlersModule, 'registerProcessHandlers'); + + beforeEach(() => { + setupEnvironmentDebugToolsMock.mockImplementation(() => undefined); + registerMainIpcHandlersMock.mockImplementation(() => undefined); + registerAppReadyFlowMock.mockImplementation(() => undefined); + registerSecondInstanceFlowMock.mockImplementation(() => undefined); + registerSessionEventHandlersMock.mockImplementation(() => undefined); + registerProcessHandlersMock.mockImplementation(() => undefined); + }); + + it('should register all main process bootstrap flows', () => { + bootstrapMainProcess(); + + expect(setupEnvironmentDebugToolsMock).toBeCalled(); + expect(registerMainIpcHandlersMock).toBeCalled(); + expect(registerAppReadyFlowMock).toBeCalled(); + expect(registerSecondInstanceFlowMock).toBeCalled(); + expect(registerSessionEventHandlersMock).toBeCalled(); + expect(registerProcessHandlersMock).toBeCalled(); + }); +}); diff --git a/src/core/bootstrap/main-process-bootstrap.ts b/src/core/bootstrap/main-process-bootstrap.ts new file mode 100644 index 0000000000..edcd5e9119 --- /dev/null +++ b/src/core/bootstrap/main-process-bootstrap.ts @@ -0,0 +1,15 @@ +import { registerAppReadyFlow } from './register-app-ready-flow'; +import { setupEnvironmentDebugTools } from './setup-environment-debug-tools'; +import { registerMainIpcHandlers } from './register-main-ipc-handlers'; +import { registerProcessHandlers } from './register-process-handlers'; +import { registerSecondInstanceFlow } from './register-second-instance-flow'; +import { registerSessionEventHandlers } from './register-session-event-handlers'; + +export function bootstrapMainProcess() { + setupEnvironmentDebugTools(); + registerMainIpcHandlers(); + registerAppReadyFlow(); + registerSecondInstanceFlow(); + registerSessionEventHandlers(); + registerProcessHandlers(); +} diff --git a/src/core/bootstrap/register-app-ready-flow.ts b/src/core/bootstrap/register-app-ready-flow.ts new file mode 100644 index 0000000000..3340783411 --- /dev/null +++ b/src/core/bootstrap/register-app-ready-flow.ts @@ -0,0 +1,56 @@ +import { app } from 'electron'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import eventBus from '../../apps/main/event-bus'; +import { getIsLoggedIn } from '../../apps/main/auth/handlers'; +import { createAuthWindow } from '../../apps/main/windows/auth'; +import { setupTrayIcon, setTrayStatus } from '../../apps/main/tray/tray-setup'; +import { broadcastToWindows } from '../../apps/main/windows'; +import { setupThemeListener } from '../theme'; +import { registerAvailableUserProductsHandlers } from '../../backend/features/payments/ipc/register-available-user-products-handlers'; +import { setupAppImageDeeplink } from '../../apps/main/auth/deeplink/setup-appimage-deeplink'; +import { INTERNXT_VERSION } from '../utils/utils'; +import { checkForUpdates } from '../../apps/main/auto-update/check-for-updates'; +import { setPendingUpdateInfo } from './bootstrap-runtime-state'; + +export function registerAppReadyFlow() { + app + .whenReady() + .then(async () => { + /** + * v.2.5.1 + * Esteban Galvis Triana + * .AppImage users may experience login issues because the deeplink protocol + * is not registered automatically, unlike with .deb packages. + * This function manually registers the protocol handler for .AppImage installations. + */ + await setupAppImageDeeplink(); + /** + * TODO: Nautilus extension disabled temporarily + * v.2.5.4 + * Esteban Galvis Triana + * The Nautilus extension will be temporarily disabled + * while the exact behavior of the context menu options is being determined. + */ + // await installNautilusExtension(); + setupThemeListener(); + setupTrayIcon(); + + eventBus.emit('APP_IS_READY'); + const isLoggedIn = getIsLoggedIn(); + + if (!isLoggedIn) { + await createAuthWindow(); + setTrayStatus('IDLE'); + } + + await checkForUpdates({ + currentVersion: INTERNXT_VERSION, + onUpdateAvailable: (updateInfo) => { + setPendingUpdateInfo(updateInfo); + broadcastToWindows('update-available', updateInfo); + }, + }); + registerAvailableUserProductsHandlers(); + }) + .catch((exc) => logger.error({ msg: 'Error starting app', exc })); +} diff --git a/src/core/bootstrap/register-main-ipc-handlers.ts b/src/core/bootstrap/register-main-ipc-handlers.ts new file mode 100644 index 0000000000..ae8c8b06d1 --- /dev/null +++ b/src/core/bootstrap/register-main-ipc-handlers.ts @@ -0,0 +1,17 @@ +import dns from 'node:dns'; +import { ipcMain } from 'electron'; +import { getPendingUpdateInfo } from './bootstrap-runtime-state'; + +export function registerMainIpcHandlers() { + ipcMain.handle('get-update-status', () => getPendingUpdateInfo()); + + ipcMain.handle('check-internet-connection', async () => { + return new Promise((resolve) => { + dns.lookup('google.com', (err) => { + resolve(!err); + }); + + setTimeout(() => resolve(false), 3000); + }); + }); +} diff --git a/src/core/bootstrap/register-process-handlers.ts b/src/core/bootstrap/register-process-handlers.ts new file mode 100644 index 0000000000..be2c1c73c8 --- /dev/null +++ b/src/core/bootstrap/register-process-handlers.ts @@ -0,0 +1,25 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; + +export function registerProcessHandlers() { + process.on('uncaughtException', (error) => { + /** + * v.2.5.1 + * Esteban Galvis Triana + * EPIPE errors close stdout, so they must be handled specially to avoid infinite logging loops. + */ + if ('code' in error && error.code === 'EPIPE') { + return; + } + + if (error.name === 'AbortError') { + logger.debug({ msg: 'Fetch request was aborted' }); + return; + } + + try { + logger.error({ msg: 'Uncaught exception in main process: ', error }); + } catch { + return; + } + }); +} diff --git a/src/core/bootstrap/register-second-instance-flow.ts b/src/core/bootstrap/register-second-instance-flow.ts new file mode 100644 index 0000000000..4f951f1b91 --- /dev/null +++ b/src/core/bootstrap/register-second-instance-flow.ts @@ -0,0 +1,19 @@ +import { app } from 'electron'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { handleDeeplink } from '../../apps/main/auth/deeplink/handle-deeplink'; + +export function registerSecondInstanceFlow() { + app.on('second-instance', async (_, argv) => { + logger.debug({ tag: 'AUTH', msg: 'Deeplink received on second instance, processing...' }); + const deeplinkArg = argv.find((arg) => arg.startsWith('internxt://')); + if (!deeplinkArg) { + return; + } + + try { + await handleDeeplink({ url: deeplinkArg }); + } catch (error) { + logger.error({ tag: 'AUTH', msg: 'Error handling deeplink', error }); + } + }); +} diff --git a/src/core/bootstrap/register-session-event-handlers.ts b/src/core/bootstrap/register-session-event-handlers.ts new file mode 100644 index 0000000000..edd48a8eda --- /dev/null +++ b/src/core/bootstrap/register-session-event-handlers.ts @@ -0,0 +1,89 @@ +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import eventBus from '../../apps/main/event-bus'; +import { AppDataSource, resetAppDataSourceOnLogout } from '../../apps/main/database/data-source'; +import { getOrCreateWidged, getWidget, setBoundsOfWidgetByPath } from '../../apps/main/windows/widget'; +import { createAuthWindow, getAuthWindow } from '../../apps/main/windows/auth'; +import configStore from '../../apps/main/config'; +import { getTray, setTrayStatus } from '../../apps/main/tray/tray-setup'; +import { openOnboardingWindow } from '../../apps/main/windows/onboarding'; +import { getTheme } from '../theme'; +import { getAntivirusManager } from '../../apps/main/antivirus/antivirusManager'; +import { trySetupAntivirusIpcAndInitialize } from '../../apps/main/background-processes/antivirus/try-setup-antivirus-ipc-and-initialize'; +import { getUserAvailableProductsAndStore } from '../../backend/features/payments/services/get-user-available-products-and-store'; +import { registerBackupHandlers } from '../../backend/features/backup/register-backup-handlers'; +import { startBackupsIfAvailable } from '../../backend/features/backup/start-backups-if-available'; +import { stopVirtualDrive } from '../../backend/features/virtual-drive/services/virtual-drive.service'; + +function onWidgetIsReady() { + registerBackupHandlers(); + startBackupsIfAvailable(); +} + +async function onUserLoggedIn() { + try { + if (!AppDataSource.isInitialized) { + await AppDataSource.initialize(); + eventBus.emit('APP_DATA_SOURCE_INITIALIZED'); + } + + getAuthWindow()?.hide(); + + getTheme(); + + setTrayStatus('IDLE'); + const widget = await getOrCreateWidged(); + const tray = getTray(); + if (widget && tray) { + setBoundsOfWidgetByPath(widget, tray); + } + + setTimeout(() => { + const authWin = getAuthWindow(); + if (authWin && !authWin.isDestroyed()) { + authWin.destroy(); + } + }, 300); + + const lastOnboardingShown = configStore.get('lastOnboardingShown'); + + if (!lastOnboardingShown) { + openOnboardingWindow(); + } else if (widget) { + widget.show(); + } + await getUserAvailableProductsAndStore(); + await trySetupAntivirusIpcAndInitialize(); + } catch (error) { + logger.error({ + msg: 'Error on main process while handling USER_LOGGED_IN event:', + error, + }); + } +} + +async function onUserLoggedOut() { + setTrayStatus('IDLE'); + const widget = getWidget(); + + if (widget) { + widget.hide(); + + void getAntivirusManager().shutdown(); + } + + await createAuthWindow(); + + if (widget) { + widget.destroy(); + } + await stopVirtualDrive(); + await resetAppDataSourceOnLogout(); + + // await uninstallNautilusExtension(); +} + +export function registerSessionEventHandlers() { + eventBus.on('WIDGET_IS_READY', onWidgetIsReady); + eventBus.on('USER_LOGGED_IN', onUserLoggedIn); + eventBus.on('USER_LOGGED_OUT', onUserLoggedOut); +} diff --git a/src/core/bootstrap/setup-environment-debug-tools.ts b/src/core/bootstrap/setup-environment-debug-tools.ts new file mode 100644 index 0000000000..03f53b8aae --- /dev/null +++ b/src/core/bootstrap/setup-environment-debug-tools.ts @@ -0,0 +1,12 @@ +export function setupEnvironmentDebugTools() { + if (process.env.NODE_ENV === 'production') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const sourceMapSupport = require('source-map-support'); + sourceMapSupport.install(); + } + + if (process.env.NODE_ENV === 'development') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + require('electron-debug')({ showDevTools: false }); + } +} diff --git a/src/core/electron/paths.ts b/src/core/electron/paths.ts index 1d53f5218b..491834c6b7 100644 --- a/src/core/electron/paths.ts +++ b/src/core/electron/paths.ts @@ -13,6 +13,14 @@ const THUMBNAILS_FOLDER = path.join(os.homedir(), '.cache', 'thumbnails'); const TEMPORAL_FOLDER = app.getPath('temp'); const INTERNXT_DRIVE_TMP = path.join(TEMPORAL_FOLDER, 'internxt-drive-tmp'); const DOWNLOADED = join(INTERNXT, 'downloaded'); +const FUSE_DAEMON_LOG = join(LOGS, 'fuse-daemon.log'); +const FUSE_DAEMON_SOCKET = join(process.env.XDG_RUNTIME_DIR ?? '/tmp', 'internxt-fuse.sock'); +const FUSE_DAEMON_BINARY = app.isPackaged + ? join(process.resourcesPath, 'dist', 'fuse-daemon') + : join(__dirname, '../../../dist/fuse-daemon'); +const RESOURCES_PATH = app.isPackaged + ? path.join(process.resourcesPath, 'assets') + : path.join(__dirname, '../../../assets'); export const PATHS = { HOME_FOLDER_PATH, @@ -24,4 +32,8 @@ export const PATHS = { INTERNXT_DRIVE_TMP, ROOT_DRIVE_FOLDER, DOWNLOADED, + FUSE_DAEMON_LOG, + FUSE_DAEMON_SOCKET, + FUSE_DAEMON_BINARY, + RESOURCES_PATH, }; diff --git a/src/core/quit/quit.handler.test.ts b/src/core/quit/quit.handler.test.ts index bba3a44a07..14aaffcd7e 100644 --- a/src/core/quit/quit.handler.test.ts +++ b/src/core/quit/quit.handler.test.ts @@ -1,11 +1,11 @@ import { app, ipcMain } from 'electron'; import { call } from 'tests/vitest/utils.helper'; -import * as driveModule from '../../apps/drive'; +import * as virtualDriveServiceModule from '../../backend/features/virtual-drive/services/virtual-drive.service'; import { partialSpyOn } from 'tests/vitest/utils.helper'; import * as registerQuitHandlerModule from './quit.handler'; describe('quit', () => { - const stopAndClearFuseAppMock = partialSpyOn(driveModule, 'stopAndClearFuseApp'); + const stopVirtualDriveMock = partialSpyOn(virtualDriveServiceModule, 'stopVirtualDrive'); const appQuitMock = partialSpyOn(app, 'quit'); const appOnMock = partialSpyOn(app, 'on', false); const ipcMainOnMock = partialSpyOn(ipcMain, 'on', false); @@ -13,7 +13,7 @@ describe('quit', () => { beforeEach(() => { registerQuitHandlerMock.mockRestore(); - stopAndClearFuseAppMock.mockResolvedValue(undefined); + stopVirtualDriveMock.mockResolvedValue(undefined); }); it('should register user-quit handler', () => { @@ -32,7 +32,7 @@ describe('quit', () => { registerQuitHandlerModule.registerQuitHandler(); await (ipcMainOnMock.mock.calls[0][1] as () => Promise)(); - expect(stopAndClearFuseAppMock).toBeCalled(); + expect(stopVirtualDriveMock).toBeCalled(); }); it('should call app.quit on user-quit event', async () => { @@ -54,7 +54,7 @@ describe('quit', () => { await Promise.resolve(); expect(preventDefault).toBeCalled(); - expect(stopAndClearFuseAppMock).toBeCalled(); + expect(stopVirtualDriveMock).toBeCalled(); expect(appQuitMock).toBeCalled(); }); @@ -71,7 +71,7 @@ describe('quit', () => { beforeQuitHandler({ preventDefault } as unknown as Electron.Event); await Promise.resolve(); - expect(stopAndClearFuseAppMock).toBeCalledTimes(1); + expect(stopVirtualDriveMock).toBeCalledTimes(1); expect(preventDefault).not.toBeCalled(); }); }); diff --git a/src/core/quit/quit.handler.ts b/src/core/quit/quit.handler.ts index cf2931fe48..c3318b732e 100644 --- a/src/core/quit/quit.handler.ts +++ b/src/core/quit/quit.handler.ts @@ -1,5 +1,6 @@ import { app, ipcMain } from 'electron'; -import { stopAndClearFuseApp } from '../../apps/drive'; +import { logger } from '@internxt/drive-desktop-core/build/backend'; +import { stopVirtualDrive } from '../../backend/features/virtual-drive/services/virtual-drive.service'; export function registerQuitHandler() { let isQuitting = false; @@ -10,7 +11,9 @@ export function registerQuitHandler() { } isQuitting = true; - await stopAndClearFuseApp(); + logger.debug({ msg: '[APP] quitting, stopping virtual drive...' }); + await stopVirtualDrive(); + logger.debug({ msg: '[APP] virtual drive stopped, quitting' }); app.quit(); }; diff --git a/src/core/utils/get-multiple-paths-from-dialog.test.ts b/src/core/utils/get-multiple-paths-from-dialog.test.ts new file mode 100644 index 0000000000..92a962da90 --- /dev/null +++ b/src/core/utils/get-multiple-paths-from-dialog.test.ts @@ -0,0 +1,45 @@ +import { dialog } from 'electron'; +import * as pathTypeCheckerModule from '../../apps/shared/fs/PathTypeChecker '; +import { call, partialSpyOn } from '../../../tests/vitest/utils.helper'; +import { getMultiplePathsFromDialog } from './get-multiple-paths-from-dialog'; + +describe('get-multiple-paths-from-dialog', () => { + const showOpenDialogMock = partialSpyOn(dialog, 'showOpenDialog'); + const isFolderMock = partialSpyOn(pathTypeCheckerModule.PathTypeChecker, 'isFolder'); + + it('should return null when dialog is canceled', async () => { + showOpenDialogMock.mockResolvedValue({ canceled: true, filePaths: [] }); + + const result = await getMultiplePathsFromDialog(); + + expect(result).toBe(null); + }); + + it('should open dialog for directories by default', async () => { + showOpenDialogMock.mockResolvedValue({ canceled: true, filePaths: [] }); + + await getMultiplePathsFromDialog(); + + call(showOpenDialogMock).toStrictEqual({ properties: ['multiSelections', 'openDirectory'] }); + }); + + it('should open dialog for files when allowFiles is true', async () => { + showOpenDialogMock.mockResolvedValue({ canceled: true, filePaths: [] }); + + await getMultiplePathsFromDialog({ allowFiles: true }); + + call(showOpenDialogMock).toStrictEqual({ properties: ['multiSelections', 'openFile'] }); + }); + + it('should map selected paths into PathInfo entries', async () => { + showOpenDialogMock.mockResolvedValue({ canceled: false, filePaths: ['/home/dev/file.txt', '/home/dev/Documents'] }); + isFolderMock.mockResolvedValueOnce(false).mockResolvedValueOnce(true); + + const result = await getMultiplePathsFromDialog(); + + expect(result).toStrictEqual([ + { path: '/home/dev/file.txt', itemName: 'file.txt', isDirectory: false }, + { path: '/home/dev/Documents', itemName: 'Documents', isDirectory: true }, + ]); + }); +}); diff --git a/src/core/utils/get-multiple-paths-from-dialog.ts b/src/core/utils/get-multiple-paths-from-dialog.ts new file mode 100644 index 0000000000..bebf03aa48 --- /dev/null +++ b/src/core/utils/get-multiple-paths-from-dialog.ts @@ -0,0 +1,37 @@ +import { dialog } from 'electron'; +import type { OpenDialogOptions } from 'electron'; +import path from 'node:path'; +import { PathTypeChecker } from '../../apps/shared/fs/PathTypeChecker '; +import type { PathInfo } from '../../context/shared/domain/system-path/PathInfo'; + +type Props = { + allowFiles?: boolean; +}; + +export async function getMultiplePathsFromDialog({ allowFiles = false }: Props = {}): Promise { + const properties: NonNullable = [ + 'multiSelections', + allowFiles ? 'openFile' : 'openDirectory', + ]; + + const result = await dialog.showOpenDialog({ properties }); + + if (result.canceled || result.filePaths.length === 0) { + return null; + } + + const paths = await Promise.all( + result.filePaths.map(async (filePath) => { + const isFolder = await PathTypeChecker.isFolder(filePath); + const itemName = path.basename(filePath); + + return { + path: filePath, + itemName, + isDirectory: isFolder, + }; + }), + ); + + return paths; +} diff --git a/src/backend/features/backup/get-path-from-dialog.test.ts b/src/core/utils/get-path-from-dialog.test.ts similarity index 89% rename from src/backend/features/backup/get-path-from-dialog.test.ts rename to src/core/utils/get-path-from-dialog.test.ts index 47855c542f..aa512cf81b 100644 --- a/src/backend/features/backup/get-path-from-dialog.test.ts +++ b/src/core/utils/get-path-from-dialog.test.ts @@ -1,7 +1,6 @@ import { BrowserWindow, dialog } from 'electron'; import { call, partialSpyOn } from 'tests/vitest/utils.helper'; import { getPathFromDialog } from './get-path-from-dialog'; -import path from 'node:path'; import { mockDeep } from 'vitest-mock-extended'; describe('getPathFromDialog', () => { const mockWindow = mockDeep(); @@ -59,24 +58,24 @@ describe('getPathFromDialog', () => { expect(result).toBe(null); }); - it('should return the path with a trailing separator and the item name', async () => { + it('should return the normalized path and item name', async () => { mockedDialog.mockResolvedValue({ canceled: false, filePaths: ['/home/user/Documents'] }); const result = await getPathFromDialog(); expect(result).toStrictEqual({ - path: `/home/user/Documents${path.sep}`, + path: '/home/user/Documents', itemName: 'Documents', }); }); - it('should not duplicate the separator if the path already ends with one', async () => { - mockedDialog.mockResolvedValue({ canceled: false, filePaths: [`/home/user/Documents${path.sep}`] }); + it('should normalize when the selected path ends with a separator', async () => { + mockedDialog.mockResolvedValue({ canceled: false, filePaths: ['/home/user/Documents/'] }); const result = await getPathFromDialog(); expect(result).toStrictEqual({ - path: `/home/user/Documents${path.sep}`, + path: '/home/user/Documents', itemName: 'Documents', }); }); @@ -100,7 +99,7 @@ describe('getPathFromDialog', () => { expect(mockWindow.hide).not.toHaveBeenCalled(); expect(mockWindow.show).not.toHaveBeenCalled(); expect(result).toStrictEqual({ - path: `/home/user/folder${path.sep}`, + path: '/home/user/folder', itemName: 'folder', }); }); diff --git a/src/backend/features/backup/get-path-from-dialog.ts b/src/core/utils/get-path-from-dialog.ts similarity index 70% rename from src/backend/features/backup/get-path-from-dialog.ts rename to src/core/utils/get-path-from-dialog.ts index bfa2cdfd3f..e85cbb9220 100644 --- a/src/backend/features/backup/get-path-from-dialog.ts +++ b/src/core/utils/get-path-from-dialog.ts @@ -1,6 +1,7 @@ -import { BrowserWindow, dialog } from 'electron'; -import { PathInfo } from '../../../apps/main/device/service'; import path from 'node:path'; +import { BrowserWindow, dialog } from 'electron'; +import { PathInfo } from '../../context/shared/domain/system-path/PathInfo'; +import { createAbsolutePath } from '../../context/local/localFile/infrastructure/AbsolutePath'; export async function getPathFromDialog(): Promise | null> { const parentWindow = BrowserWindow.getFocusedWindow() ?? BrowserWindow.getAllWindows().find((w) => w.isVisible()); @@ -22,13 +23,10 @@ export async function getPathFromDialog(): Promise } const chosenPath = result.filePaths[0]; - - const itemPath = `${chosenPath}${chosenPath.endsWith(path.sep) ? '' : path.sep}`; - - const itemName = path.basename(itemPath); + const itemName = path.basename(chosenPath); return { - path: itemPath, + path: createAbsolutePath(chosenPath), itemName, }; } diff --git a/src/infra/device/getMachineId.ts b/src/infra/device/getMachineId.ts index fa5bc9da09..165a9b9c7b 100644 --- a/src/infra/device/getMachineId.ts +++ b/src/infra/device/getMachineId.ts @@ -18,11 +18,12 @@ export function getMachineId(): Result { try { const id = readFileSync('/etc/machine-id', 'utf-8').trim(); return id ? { data: id } : { error: new MachineIdError('NON_EXISTS') }; - } catch (err: any) { - if (err.code === 'ENOENT') { + } catch (err) { + const code = err instanceof Error ? (err as NodeJS.ErrnoException).code : undefined; + if (code === 'ENOENT') { return { error: new MachineIdError('NON_EXISTS', err) }; } - if (err.code === 'EACCES') { + if (code === 'EACCES') { return { error: new MachineIdError('NO_ACCESS', err) }; } return { error: new MachineIdError('UNKNOWN', err) }; diff --git a/src/infra/drive-server/client/drive-server.client.instance.test.ts b/src/infra/drive-server/client/drive-server.client.instance.test.ts index 8faf9147ae..321be5faf1 100644 --- a/src/infra/drive-server/client/drive-server.client.instance.test.ts +++ b/src/infra/drive-server/client/drive-server.client.instance.test.ts @@ -1,22 +1,19 @@ -import { closeUserSession } from '../../../apps/main/auth/handlers'; -import { getNewApiHeaders } from '../../../apps/main/auth/service'; -import { createClient } from '../drive-server.client'; -import { call } from 'tests/vitest/utils.helper'; +import { partialSpyOn } from 'tests/vitest/utils.helper'; -vi.mock('../drive-server.client', () => ({ - createClient: vi.fn(() => ({})), -})); +describe('driveServerClient instance', () => { + let originalEnv: string | undefined; -vi.mock('../../../apps/main/auth/service', () => ({ - getNewApiHeaders: vi.fn(() => ({ Authorization: 'Bearer token' })), -})); + async function importAndSpy() { + const driveServerClientModule = await import('../drive-server.client'); + const createClientMock = partialSpyOn(driveServerClientModule, 'createClient'); -vi.mock('../../../apps/main/auth/handlers', () => ({ - closeUserSession: vi.fn(), -})); + await import('./drive-server.client.instance'); -describe('driveServerClient instance', () => { - let originalEnv: string | undefined; + const authServiceModule = await import('../../../apps/main/auth/service'); + const authHandlersModule = await import('../../../apps/main/auth/handlers'); + + return { createClientMock, authServiceModule, authHandlersModule }; + } beforeEach(() => { originalEnv = process.env.NEW_DRIVE_URL; @@ -27,44 +24,41 @@ describe('driveServerClient instance', () => { if (originalEnv !== undefined) { process.env.NEW_DRIVE_URL = originalEnv; } else { - delete (process.env as any).NEW_DRIVE_URL; + Reflect.deleteProperty(process.env, 'NEW_DRIVE_URL'); } }); it('should call createClient with expected options', async () => { - await import('./drive-server.client.instance'); - - call(createClient).toMatchObject({ - baseUrl: expect.any(String), - authHeadersProvider: expect.any(Function), - onUnauthorized: expect.any(Function), - }); + const { createClientMock } = await importAndSpy(); + + expect(createClientMock).toBeCalledWith( + expect.objectContaining({ + baseUrl: expect.any(String), + authHeadersProvider: expect.any(Function), + onUnauthorized: expect.any(Function), + }), + ); }); - it('should call getNewApiHeaders when authHeadersProvider is triggered', async () => { - await import('./drive-server.client.instance'); - const createClientCalls = vi.mocked(createClient).mock.calls; - const [{ authHeadersProvider }] = createClientCalls[0]!; - - authHeadersProvider!(); + it('should use getNewApiHeaders as authHeadersProvider', async () => { + const { createClientMock, authServiceModule } = await importAndSpy(); + const clientOptions = createClientMock.mock.lastCall![0]!; - expect(getNewApiHeaders).toHaveBeenCalled(); + expect(clientOptions.authHeadersProvider).toBe(authServiceModule.getNewApiHeaders); }); - it('should call closeUserSession when onUnauthorized is triggered', async () => { - await import('./drive-server.client.instance'); - const [{ onUnauthorized }] = vi.mocked(createClient).mock.calls[0]!; - - onUnauthorized!(); + it('should use closeUserSession as onUnauthorized', async () => { + const { createClientMock, authHandlersModule } = await importAndSpy(); + const clientOptions = createClientMock.mock.lastCall![0]!; - expect(closeUserSession).toHaveBeenCalled(); + expect(clientOptions.onUnauthorized).toBe(authHandlersModule.closeUserSession); }); it('should use process.env.NEW_DRIVE_URL as baseUrl', async () => { process.env.NEW_DRIVE_URL = 'https://mock.api'; - await import('./drive-server.client.instance'); + const { createClientMock } = await importAndSpy(); - call(createClient).toMatchObject({ baseUrl: 'https://mock.api' }); + expect(createClientMock).toBeCalledWith(expect.objectContaining({ baseUrl: 'https://mock.api' })); }); }); diff --git a/src/infra/drive-server/client/interceptors/auth/attach-auth-interceptors.test.ts b/src/infra/drive-server/client/interceptors/auth/attach-auth-interceptors.test.ts index 638ac5f3ca..bd17771144 100644 --- a/src/infra/drive-server/client/interceptors/auth/attach-auth-interceptors.test.ts +++ b/src/infra/drive-server/client/interceptors/auth/attach-auth-interceptors.test.ts @@ -3,6 +3,7 @@ import { call } from 'tests/vitest/utils.helper'; import { attachAuthInterceptors } from './attach-auth-interceptors'; import { createRequestInterceptor } from './create-request-interceptor'; import { createResponseInterceptor } from './create-response-interceptor'; +import { AxiosInstance } from 'axios'; vi.mock('./create-request-interceptor'); vi.mock('./create-response-interceptor'); @@ -20,7 +21,7 @@ describe('attachAuthInterceptors', () => { request: { use: mockRequestUse }, response: { use: mockResponseUse }, }, - } as any; + } as unknown as AxiosInstance; beforeEach(() => { vi.clearAllMocks(); @@ -52,8 +53,8 @@ describe('attachAuthInterceptors', () => { call(createRequestInterceptor).toMatchObject(authHeadersProvider); call(mockRequestUse).toMatchObject(mockRequestInterceptor); - expect(createResponseInterceptor).not.toHaveBeenCalled(); - expect(mockResponseUse).not.toHaveBeenCalled(); + expect(createResponseInterceptor).not.toBeCalled(); + expect(mockResponseUse).not.toBeCalled(); }); it('should only register response interceptor when only onUnauthorized is provided', () => { @@ -61,8 +62,8 @@ describe('attachAuthInterceptors', () => { attachAuthInterceptors(instance, { onUnauthorized }); - expect(createRequestInterceptor).not.toHaveBeenCalled(); - expect(mockRequestUse).not.toHaveBeenCalled(); + expect(createRequestInterceptor).not.toBeCalled(); + expect(mockRequestUse).not.toBeCalled(); call(createResponseInterceptor).toMatchObject(onUnauthorized); call(mockResponseUse).toMatchObject([mockOnFulfilled, mockOnRejected]); }); diff --git a/src/infra/drive-server/client/interceptors/auth/create-response-interceptor.test.ts b/src/infra/drive-server/client/interceptors/auth/create-response-interceptor.test.ts index 57a2400580..7d5ca2f010 100644 --- a/src/infra/drive-server/client/interceptors/auth/create-response-interceptor.test.ts +++ b/src/infra/drive-server/client/interceptors/auth/create-response-interceptor.test.ts @@ -1,4 +1,4 @@ -import { AxiosError } from 'axios'; +import { AxiosError, AxiosHeaders, AxiosResponse, InternalAxiosRequestConfig } from 'axios'; import { createResponseInterceptor } from './create-response-interceptor'; vi.unmock('axios'); @@ -6,13 +6,13 @@ vi.unmock('axios'); describe('createResponseInterceptor', () => { it('should return response unchanged on fulfilled', () => { const { onFulfilled } = createResponseInterceptor(vi.fn()); - const response = { + const response: AxiosResponse = { data: { ok: true }, status: 200, statusText: 'OK', - headers: {}, - config: {}, - } as any; + headers: new AxiosHeaders(), + config: { headers: new AxiosHeaders() } as InternalAxiosRequestConfig, + }; const result = onFulfilled(response); @@ -27,9 +27,9 @@ describe('createResponseInterceptor', () => { error.response = { status: 401, statusText: 'Unauthorized', - headers: {}, + headers: new AxiosHeaders(), data: {}, - config: {} as any, + config: { headers: new AxiosHeaders() } as InternalAxiosRequestConfig, }; await expect(onRejected(error)).rejects.toBe(error); @@ -44,9 +44,9 @@ describe('createResponseInterceptor', () => { error.response = { status: 500, statusText: 'Server Error', - headers: {}, + headers: new AxiosHeaders(), data: {}, - config: {} as any, + config: { headers: new AxiosHeaders() } as InternalAxiosRequestConfig, }; await expect(onRejected(error)).rejects.toBe(error); diff --git a/src/infra/drive-server/client/interceptors/rate-limiter/attach-rate-limiter-interceptors.test.ts b/src/infra/drive-server/client/interceptors/rate-limiter/attach-rate-limiter-interceptors.test.ts index 61532eade3..5b350ae7d7 100644 --- a/src/infra/drive-server/client/interceptors/rate-limiter/attach-rate-limiter-interceptors.test.ts +++ b/src/infra/drive-server/client/interceptors/rate-limiter/attach-rate-limiter-interceptors.test.ts @@ -3,6 +3,7 @@ import { call } from 'tests/vitest/utils.helper'; import { attachRateLimiterInterceptors } from './attach-rate-limiter-interceptors'; import { createRequestInterceptor } from './create-request-interceptor'; import { createResponseInterceptor } from './create-response-interceptor'; +import { AxiosInstance } from 'axios'; vi.mock('./create-request-interceptor'); vi.mock('./create-response-interceptor'); @@ -20,7 +21,7 @@ describe('attachRateLimiterInterceptors', () => { request: { use: mockRequestUse }, response: { use: mockResponseUse }, }, - } as any; + } as unknown as AxiosInstance; beforeEach(() => { (createRequestInterceptor as Mock).mockReturnValue(mockRequestInterceptor); diff --git a/src/infra/drive-server/drive-server.client.ts b/src/infra/drive-server/drive-server.client.ts index 05b9d9044d..27e04a2cc0 100644 --- a/src/infra/drive-server/drive-server.client.ts +++ b/src/infra/drive-server/drive-server.client.ts @@ -56,6 +56,38 @@ type OperationResponse = ? Res : never; +/** + * Infers the query parameters for an endpoint, if any. + */ +type OperationQuery = + MethodShape extends { + parameters: { query: infer Q }; + } + ? Q + : never; + +/** + * Infers the path parameters for an endpoint, if any. + */ +type OperationPath = + MethodShape extends { + parameters: { path: infer PP }; + } + ? PP extends Record + ? PP + : never + : never; + +/** + * Options for a typed HTTP request. + */ +type RequestOptions = { + path?: OperationPath; + headers?: Record; + query?: OperationQuery; + body?: OperationRequestBody; +}; + /** * Creates a client bound to a specific OpenAPI `paths` record. * @@ -86,12 +118,7 @@ export function createClient(opts: ClientOptions) { async function request>( method: M, path: P, - o?: { - path?: Record; - headers?: Record; - query?: Record; - body?: OperationRequestBody; - }, + o?: RequestOptions, ): Promise, DriveServerError>> { let url = path as string; @@ -128,12 +155,12 @@ export function createClient(opts: ClientOptions) { }; } } - // TODO: type `o` properly instead of `any` — currently callers get no type checking on body, path, headers, or query return { - GET:

>(p: P, o?: any) => request('get', p, o), - POST:

>(p: P, o?: any) => request('post', p, o), - PUT:

>(p: P, o?: any) => request('put', p, o), - PATCH:

>(p: P, o?: any) => request('patch', p, o), - DELETE:

>(p: P, o?: any) => request('delete', p, o), + GET:

>(p: P, o?: RequestOptions) => request('get', p, o), + POST:

>(p: P, o?: RequestOptions) => request('post', p, o), + PUT:

>(p: P, o?: RequestOptions) => request('put', p, o), + PATCH:

>(p: P, o?: RequestOptions) => request('patch', p, o), + DELETE:

>(p: P, o?: RequestOptions) => + request('delete', p, o), }; } diff --git a/src/infra/drive-server/services/auth/auth.service.test.ts b/src/infra/drive-server/services/auth/auth.service.test.ts index 91c3080810..1a38efedc1 100644 --- a/src/infra/drive-server/services/auth/auth.service.test.ts +++ b/src/infra/drive-server/services/auth/auth.service.test.ts @@ -1,9 +1,11 @@ import { AuthService } from './auth.service'; -import { authClient } from './auth.client'; -import { getBaseApiHeaders, getNewApiHeaders } from '../../../../apps/main/auth/service'; +import * as authClientModule from './auth.client'; +import * as authServiceModule from '../../../../apps/main/auth/service'; import { LoginAccessRequest, LoginAccessResponse, LoginResponse } from './auth.types'; import { logger } from '@internxt/drive-desktop-core/build/backend'; -import { Mock } from 'vitest'; +import { partialSpyOn } from 'tests/vitest/utils.helper'; +import { MockInstance } from 'vitest'; + vi.mock('axios', async (importOriginal) => { const actual = await importOriginal(); return { @@ -12,39 +14,30 @@ vi.mock('axios', async (importOriginal) => { }; }); -vi.mock('@internxt/drive-desktop-core/build/backend', () => ({ - logger: { - error: vi.fn(), - debug: vi.fn(), - info: vi.fn(), - warn: vi.fn(), - }, -})); - -vi.mock('../../../../apps/main/auth/service', () => ({ - getNewApiHeaders: vi.fn(), - getBaseApiHeaders: vi.fn(), -})); - -vi.mock('./auth.client', () => ({ - authClient: { - GET: vi.fn(), - POST: vi.fn(), +vi.mock('../../drive-server.module', () => ({ + driveServerModule: { + auth: {}, + backup: {}, + user: {}, }, + DriveServerModule: vi.fn(), })); describe('AuthService', () => { let sut: AuthService; + const getNewApiHeadersMock = partialSpyOn(authServiceModule, 'getNewApiHeaders'); + const getBaseApiHeadersMock = partialSpyOn(authServiceModule, 'getBaseApiHeaders'); + const authGetMock = partialSpyOn(authClientModule.authClient, 'GET') as unknown as MockInstance; + const authPostMock = partialSpyOn(authClientModule.authClient, 'POST') as unknown as MockInstance; beforeEach(() => { sut = new AuthService(); - vi.clearAllMocks(); }); describe('refresh', () => { it('should return token and newToken when response is succesful', async () => { const data = { token: 'token', newToken: 'newToken' }; - (authClient.GET as Mock).mockResolvedValue({ data }); + authGetMock.mockResolvedValue({ data }); const mockedHeaders: Record = { Authorization: 'Bearer newToken', 'content-type': 'application/json; charset=utf-8', @@ -52,18 +45,18 @@ describe('AuthService', () => { 'internxt-version': '2.4.8', 'x-internxt-desktop-header': 'test-header', }; - (getNewApiHeaders as Mock).mockReturnValue(mockedHeaders); + getNewApiHeadersMock.mockReturnValue(mockedHeaders); const result = await sut.refresh(); expect(result.isRight()).toEqual(true); expect(result.getRight()).toEqual(data); - expect(authClient.GET).toHaveBeenCalledWith('/users/refresh', { + expect(authGetMock).toHaveBeenCalledWith('/users/refresh', { headers: mockedHeaders, }); }); it('should return error when response is not successful', async () => { - (authClient.GET as Mock).mockResolvedValue({ data: undefined }); + authGetMock.mockResolvedValue({ data: undefined }); const result = await sut.refresh(); @@ -86,7 +79,7 @@ describe('AuthService', () => { it('should return error when request throws an exception', async () => { const error = new Error('Request failed'); - (authClient.GET as Mock).mockRejectedValue(error); + authGetMock.mockRejectedValue(error); const result = await sut.refresh(); @@ -116,7 +109,7 @@ describe('AuthService', () => { hasKyberKeys: false, hasEccKeys: false, }; - (authClient.POST as Mock).mockResolvedValue({ data }); + authPostMock.mockResolvedValue({ data }); const mockedHeaders: Record = { Authorization: 'Bearer token', 'content-type': 'application/json; charset=utf-8', @@ -124,13 +117,13 @@ describe('AuthService', () => { 'internxt-version': '2.4.8', 'x-internxt-desktop-header': 'test-header', }; - (getBaseApiHeaders as Mock).mockReturnValue(mockedHeaders); + getBaseApiHeadersMock.mockReturnValue(mockedHeaders); const result = await sut.login(email); expect(result.isRight()).toBe(true); expect(result.getRight()).toEqual(data); - expect(authClient.POST).toHaveBeenCalledWith('/auth/login', { + expect(authPostMock).toHaveBeenCalledWith('/auth/login', { body: { email }, headers: mockedHeaders, }); @@ -138,9 +131,8 @@ describe('AuthService', () => { it('should return error when request is not successful', async () => { const email = 'test@example.com'; - (authClient.POST as Mock).mockResolvedValue({ data: undefined }); - (getBaseApiHeaders as Mock).mockReturnValue({}); - + authPostMock.mockResolvedValue({ data: undefined }); + getBaseApiHeadersMock.mockReturnValue({}); const result = await sut.login(email); expect(result.isLeft()).toBe(true); @@ -162,8 +154,8 @@ describe('AuthService', () => { it('should return error when request throws an exception', async () => { const email = 'test@example.com'; const error = new Error('Network error'); - (authClient.POST as Mock).mockRejectedValue(error); - (getBaseApiHeaders as Mock).mockReturnValue({}); + authPostMock.mockRejectedValue(error); + getBaseApiHeadersMock.mockReturnValue({}); const result = await sut.login(email); @@ -191,18 +183,17 @@ describe('AuthService', () => { tfa: '123456', }; - const data: LoginAccessResponse = { + const data = { user: { - id: 'user-1', email: 'test@example.com', name: 'Test User', - } as any, + }, token: 'jwt-token', userTeam: {}, newToken: 'refresh-jwt', - }; + } as unknown as LoginAccessResponse; - (authClient.POST as Mock).mockResolvedValue({ data }); + authPostMock.mockResolvedValue({ data }); const mockedHeaders: Record = { Authorization: 'Bearer token', @@ -211,13 +202,13 @@ describe('AuthService', () => { 'internxt-version': '2.4.8', 'x-internxt-desktop-header': 'test-header', }; - (getBaseApiHeaders as Mock).mockReturnValue(mockedHeaders); + getBaseApiHeadersMock.mockReturnValue(mockedHeaders); const result = await sut.access(credentials); expect(result.isRight()).toBe(true); expect(result.getRight()).toEqual(data); - expect(authClient.POST).toHaveBeenCalledWith('/auth/login/access', { + expect(authPostMock).toHaveBeenCalledWith('/auth/login/access', { body: credentials, headers: mockedHeaders, }); @@ -230,8 +221,8 @@ describe('AuthService', () => { tfa: '123456', }; - (authClient.POST as Mock).mockResolvedValue({ data: undefined }); - (getBaseApiHeaders as Mock).mockReturnValue({}); + authPostMock.mockResolvedValue({ data: undefined }); + getBaseApiHeadersMock.mockReturnValue({}); const result = await sut.access(credentials); @@ -259,8 +250,8 @@ describe('AuthService', () => { }; const error = new Error('Network error'); - (authClient.POST as Mock).mockRejectedValue(error); - (getBaseApiHeaders as Mock).mockReturnValue({}); + authPostMock.mockRejectedValue(error); + getBaseApiHeadersMock.mockReturnValue({}); const result = await sut.access(credentials); diff --git a/src/infra/drive-server/services/backup/backup.service.ts b/src/infra/drive-server/services/backup/backup.service.ts index 252e383fc0..62010f42da 100644 --- a/src/infra/drive-server/services/backup/backup.service.ts +++ b/src/infra/drive-server/services/backup/backup.service.ts @@ -5,7 +5,7 @@ import { mapError } from '../utils/mapError'; import { AxiosError } from 'axios'; import { BackupError } from './backup.error'; import { mapDeviceAsFolderToDevice } from '../../../../backend/features/device/utils/deviceMapper'; -import { Device } from '../../../../apps/main/device/service'; +import { Device } from '../../../../backend/features/backup/types/Device'; import { logger } from '@internxt/drive-desktop-core/build/backend'; type getDevicesByIdentifierQuery = operations['BackupController_getDevicesAndFolders']['parameters']['query']; diff --git a/src/infra/drive-server/services/utils/mapError.test.ts b/src/infra/drive-server/services/utils/mapError.test.ts index 6c7d475330..4303d098e1 100644 --- a/src/infra/drive-server/services/utils/mapError.test.ts +++ b/src/infra/drive-server/services/utils/mapError.test.ts @@ -22,7 +22,7 @@ describe('mapError', () => { const err = mapError(axiosError); expect(err).toBeInstanceOf(Error); expect(err.message).toBe('Invalid token'); - expect((err as any).cause).toBe(axiosError); + expect(err.cause).toBe(axiosError); }); it('should fall back to Axios error message if response.data.message is missing', () => { diff --git a/src/infra/environment/download-file/build-crypto-lib.test.ts b/src/infra/environment/download-file/build-crypto-lib.test.ts new file mode 100644 index 0000000000..05b1003f1f --- /dev/null +++ b/src/infra/environment/download-file/build-crypto-lib.test.ts @@ -0,0 +1,61 @@ +import { Network } from '@internxt/sdk'; +import { Environment } from '@internxt/inxt-js'; +import { validateMnemonic } from 'bip39'; +import { buildCryptoLib } from './build-crypto-lib'; + +vi.mock('@internxt/sdk', () => ({ + Network: { + ALGORITHMS: { + AES256CTR: 'aes-256-ctr', + }, + }, +})); + +vi.mock('@internxt/inxt-js', () => ({ + Environment: { + utils: { + generateFileKey: vi.fn(() => Buffer.from('file-key')), + }, + }, +})); + +vi.mock('bip39', () => ({ + validateMnemonic: vi.fn(), +})); + +const validateMnemonicMock = vi.mocked(validateMnemonic); +const generateFileKeyMock = vi.mocked(Environment.utils.generateFileKey); + +describe('buildCryptoLib', () => { + it('uses AES-256-CTR as the crypto algorithm', () => { + const cryptoLib = buildCryptoLib(); + + expect(cryptoLib.algorithm).toBe(Network.ALGORITHMS.AES256CTR); + }); + + it('delegates mnemonic validation to bip39', () => { + validateMnemonicMock.mockReturnValue(true); + const cryptoLib = buildCryptoLib(); + + const result = cryptoLib.validateMnemonic('seed phrase'); + + expect(result).toBe(true); + expect(validateMnemonicMock).toHaveBeenCalledWith('seed phrase'); + }); + + it('delegates file-key generation to Environment utils', () => { + const index = Buffer.from('index'); + const cryptoLib = buildCryptoLib(); + + const result = cryptoLib.generateFileKey('mnemonic', 'bucket-id', index); + + expect(result).toStrictEqual(Buffer.from('file-key')); + expect(generateFileKeyMock).toHaveBeenCalledWith('mnemonic', 'bucket-id', index); + }); + + it('exposes randomBytes from node crypto', () => { + const cryptoLib = buildCryptoLib(); + + expect(cryptoLib.randomBytes(8)).toHaveLength(8); + }); +}); diff --git a/src/infra/environment/download-file/build-crypto-lib.ts b/src/infra/environment/download-file/build-crypto-lib.ts new file mode 100644 index 0000000000..2c7c262134 --- /dev/null +++ b/src/infra/environment/download-file/build-crypto-lib.ts @@ -0,0 +1,14 @@ +import { Network } from '@internxt/sdk'; +import { validateMnemonic } from 'bip39'; +import { Environment } from '@internxt/inxt-js'; +import { randomBytes } from 'node:crypto'; + +export function buildCryptoLib(): Network.Crypto { + return { + algorithm: Network.ALGORITHMS.AES256CTR, + validateMnemonic: (mnemonic: string) => validateMnemonic(mnemonic), + generateFileKey: (mnemonic, bucketId, index) => + Environment.utils.generateFileKey(mnemonic, bucketId, index as Buffer), + randomBytes, + }; +} diff --git a/src/infra/environment/download-file/build-network-client.test.ts b/src/infra/environment/download-file/build-network-client.test.ts new file mode 100644 index 0000000000..dbc48e9df6 --- /dev/null +++ b/src/infra/environment/download-file/build-network-client.test.ts @@ -0,0 +1,42 @@ +import { Network } from '@internxt/sdk'; +import { createHash } from 'node:crypto'; +import { INTERNXT_CLIENT, INTERNXT_VERSION } from '../../../core/utils/utils'; +import { buildNetworkClient } from './build-network-client'; + +vi.mock('@internxt/sdk', () => ({ + Network: { + Network: { + client: vi.fn(() => ({ network: true })), + }, + }, +})); + +const networkClientMock = vi.mocked(Network.Network.client); + +describe('buildNetworkClient', () => { + beforeEach(() => { + process.env.BRIDGE_URL = 'https://bridge.test'; + process.env.INTERNXT_DESKTOP_HEADER_KEY = 'desktop-header'; + }); + + it('builds an SDK network client with app metadata and hashed user id', () => { + const client = buildNetworkClient({ + bridgeUser: 'bridge-user', + userId: 'user-id', + }); + + expect(client).toStrictEqual({ network: true }); + expect(networkClientMock).toHaveBeenCalledWith( + 'https://bridge.test', + { + clientName: INTERNXT_CLIENT, + clientVersion: INTERNXT_VERSION, + desktopHeader: 'desktop-header', + }, + { + bridgeUser: 'bridge-user', + userId: createHash('sha256').update('user-id').digest('hex'), + }, + ); + }); +}); diff --git a/src/infra/environment/download-file/build-network-client.ts b/src/infra/environment/download-file/build-network-client.ts new file mode 100644 index 0000000000..4a45688e36 --- /dev/null +++ b/src/infra/environment/download-file/build-network-client.ts @@ -0,0 +1,23 @@ +import { Network } from '@internxt/sdk'; +import { createHash } from 'node:crypto'; +import { INTERNXT_CLIENT, INTERNXT_VERSION } from '../../../core/utils/utils'; + +export type NetworkClientCredentials = { + bridgeUser: string; + userId: string; +}; + +export function buildNetworkClient(credentials: NetworkClientCredentials): Network.Network { + return Network.Network.client( + process.env.BRIDGE_URL, + { + clientName: INTERNXT_CLIENT, + clientVersion: INTERNXT_VERSION, + desktopHeader: process.env.INTERNXT_DESKTOP_HEADER_KEY, + }, + { + bridgeUser: credentials.bridgeUser, + userId: createHash('sha256').update(credentials.userId).digest('hex'), + }, + ); +} diff --git a/src/infra/environment/download-file/decrypt-at-offset.test.ts b/src/infra/environment/download-file/decrypt-at-offset.test.ts new file mode 100644 index 0000000000..0647fdf3bb --- /dev/null +++ b/src/infra/environment/download-file/decrypt-at-offset.test.ts @@ -0,0 +1,40 @@ +import { createCipheriv } from 'node:crypto'; +import { decryptAtOffset } from './decrypt-at-offset'; + +function encrypt(plainText: Buffer, key: Buffer, iv: Buffer): Buffer { + const cipher = createCipheriv('aes-256-ctr', new Uint8Array(key), new Uint8Array(iv)); + return cipher.update(new Uint8Array(plainText)); +} + +describe('decryptAtOffset', () => { + const key = Buffer.from('00112233445566778899aabbccddeeff00112233445566778899aabbccddeeff', 'hex'); + const iv = Buffer.from('0102030405060708090a0b0c0d0e0f10', 'hex'); + const plainText = Buffer.from('abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'); + const encrypted = encrypt(plainText, key, iv); + + it('decrypts a range starting at a block boundary', () => { + const position = 16; + const encryptedRange = encrypted.subarray(position, position + 12); + + const decrypted = decryptAtOffset(encryptedRange, key, iv, position); + + expect(decrypted).toStrictEqual(plainText.subarray(position, position + 12)); + }); + + it('decrypts a range starting in the middle of a block', () => { + const position = 19; + const encryptedRange = encrypted.subarray(position, position + 17); + + const decrypted = decryptAtOffset(encryptedRange, key, iv, position); + + expect(decrypted).toStrictEqual(plainText.subarray(position, position + 17)); + }); + + it('decrypts a range from the beginning of the file', () => { + const encryptedRange = encrypted.subarray(0, 20); + + const decrypted = decryptAtOffset(encryptedRange, key, iv, 0); + + expect(decrypted).toStrictEqual(plainText.subarray(0, 20)); + }); +}); diff --git a/src/infra/environment/download-file/decrypt-at-offset.ts b/src/infra/environment/download-file/decrypt-at-offset.ts new file mode 100644 index 0000000000..22028ea459 --- /dev/null +++ b/src/infra/environment/download-file/decrypt-at-offset.ts @@ -0,0 +1,38 @@ +import { createDecipheriv } from 'node:crypto'; + +/** + * Decrypts a byte range of an AES-256-CTR encrypted file starting at a given position. + * + * AES-CTR is a stream cipher that works by encrypting sequential counter blocks and XORing + * the result with the plaintext. This makes it seekable: to decrypt bytes starting at position N, + * you only need to know which counter block N falls in, rather than decrypting all preceding bytes. + * + * The counter block for position N is: originalIV + floor(N / 16) + * If N is mid-block (N % 16 !== 0), we advance the decipher by the partial block remainder + * before decrypting the actual bytes. + * + * @param encryptedBytes - The raw encrypted bytes for the requested range (fetched via HTTP Range header) + * @param key - The AES-256 file key + * @param iv - Initialization Vector: a random 16-byte value generated when the file was encrypted, + * stored in the file's network metadata index. Ensures that two files with the same key + * produce different ciphertext. Retrieved by the SDK as the first 16 bytes of the file index. + * @param position - The byte offset in the full file where this range starts + */ +export function decryptAtOffset(encryptedBytes: Buffer, key: Buffer, iv: Buffer, position: number): Buffer { + const AES_BLOCK_SIZE = 16; + const partialBlock = position % AES_BLOCK_SIZE; + const startBlockNumber = (position - partialBlock) / AES_BLOCK_SIZE; + + // Compute the IV for the starting block by adding the block number to the original IV + const ivForRange = (BigInt('0x' + iv.toString('hex')) + BigInt(startBlockNumber)).toString(16).padStart(32, '0'); + const offsetIv = Buffer.from(ivForRange, 'hex'); + + const decipher = createDecipheriv('aes-256-ctr', new Uint8Array(key), new Uint8Array(offsetIv)); + + // If position is mid-block, skip the leading partial block bytes + if (partialBlock > 0) { + decipher.update(new Uint8Array(partialBlock)); + } + + return decipher.update(new Uint8Array(encryptedBytes)); +} diff --git a/src/infra/environment/download-file/download-file.test.ts b/src/infra/environment/download-file/download-file.test.ts new file mode 100644 index 0000000000..e805d8c900 --- /dev/null +++ b/src/infra/environment/download-file/download-file.test.ts @@ -0,0 +1,70 @@ +import { Readable } from 'node:stream'; +import axios from 'axios'; +import { downloadFile as sdkDownloadFile } from '@internxt/sdk/dist/network/download'; +import { decryptAtOffset } from './decrypt-at-offset'; +import { downloadFileRange } from './download-file'; + +vi.mock('axios', () => ({ + default: { + get: vi.fn(), + }, +})); + +vi.mock('@internxt/sdk/dist/network/download', () => ({ + downloadFile: vi.fn(), +})); + +vi.mock('./build-crypto-lib', () => ({ + buildCryptoLib: vi.fn(() => ({})), +})); + +vi.mock('./decrypt-at-offset', () => ({ + decryptAtOffset: vi.fn(), +})); + +const axiosGetMock = vi.mocked(axios.get); +const sdkDownloadFileMock = vi.mocked(sdkDownloadFile); +const decryptAtOffsetMock = vi.mocked(decryptAtOffset); + +describe('downloadFileRange', () => { + beforeEach(() => { + axiosGetMock.mockResolvedValue({ + data: Readable.from([Buffer.from('encrypted')]), + }); + decryptAtOffsetMock.mockReturnValue(Buffer.from('decrypted')); + sdkDownloadFileMock.mockImplementation(async (...args) => { + const downloadFileCb = args[6]; + const decryptFileCb = args[7]; + + await downloadFileCb([{ url: 'https://download.test/file' }] as never, 9); + await decryptFileCb( + undefined as never, + Buffer.from('keykeykeykeykeykeykeykeykeykey12'), + Buffer.from('iviviviviviviviv'), + 9, + ); + }); + }); + + it('passes the abort signal to the HTTP range request', async () => { + const abortController = new AbortController(); + + const result = await downloadFileRange({ + fileId: 'file-id', + bucketId: 'bucket-id', + mnemonic: 'mnemonic', + network: {} as never, + range: { position: 10, length: 20 }, + signal: abortController.signal, + }); + + expect(result.data).toStrictEqual(Buffer.from('decrypted')); + expect(axiosGetMock).toHaveBeenCalledWith('https://download.test/file', { + responseType: 'stream', + signal: abortController.signal, + headers: { + range: 'bytes=10-29', + }, + }); + }); +}); diff --git a/src/infra/environment/download-file/download-file.ts b/src/infra/environment/download-file/download-file.ts new file mode 100644 index 0000000000..9f250d1589 --- /dev/null +++ b/src/infra/environment/download-file/download-file.ts @@ -0,0 +1,97 @@ +import { DecryptFileFunction, DownloadFileFunction } from '@internxt/sdk/dist/network'; +import { downloadFile as sdkDownloadFile } from '@internxt/sdk/dist/network/download'; +import axios from 'axios'; +import { buildCryptoLib } from './build-crypto-lib'; +import { DownloadFileProps } from './types'; +import { decryptAtOffset } from './decrypt-at-offset'; +import { type Result } from '../../../context/shared/domain/Result'; + +export async function downloadFileRange({ + signal, + fileId, + bucketId, + mnemonic, + network, + range, +}: DownloadFileProps): Promise> { + let encryptedBytes: Buffer | undefined; + let decryptedBuffer: Buffer | undefined; + let operationError: Error | undefined; + + const downloadFileCb: DownloadFileFunction = async (downloadables) => { + if (range && downloadables.length > 1) { + operationError = new Error('Multi-Part Download with Range-Requests is not implemented'); + return; + } + for (const downloadable of downloadables) { + if (signal.aborted) { + return; + } + // eslint-disable-next-line no-await-in-loop + encryptedBytes = await fetchEncryptedRange(downloadable.url, range.position, range.length, signal); + } + }; + + const decryptFileCb: DecryptFileFunction = async (_, key, iv) => { + if (signal.aborted) { + return; + } + if (!encryptedBytes) { + operationError = new Error('No encrypted bytes to decrypt'); + return; + } + decryptedBuffer = decryptAtOffset( + encryptedBytes, + Buffer.from(key.toString('hex'), 'hex'), + Buffer.from(iv.toString('hex'), 'hex'), + range.position, + ); + }; + + try { + await sdkDownloadFile( + fileId, + bucketId, + mnemonic, + network, + buildCryptoLib(), + Buffer.from, + downloadFileCb, + decryptFileCb, + ); + } catch (error) { + if (signal.aborted) return abortedDownloadResult(); + return { error: error instanceof Error ? error : new Error('Unknown error occurred') }; + } + + if (signal.aborted) return abortedDownloadResult(); + if (operationError) return { error: operationError }; + if (!decryptedBuffer) return { error: new Error('Decryption did not produce a buffer') }; + return { data: decryptedBuffer }; +} + +function abortedDownloadResult(): Result { + return { data: Buffer.alloc(0) }; +} + +async function fetchEncryptedRange( + url: string, + position: number, + length: number, + signal: AbortSignal, +): Promise { + const response = await axios.get(url, { + responseType: 'stream', + signal, + headers: { + range: `bytes=${position}-${position + length - 1}`, + }, + }); + + return new Promise((resolve, reject) => { + const chunks: Uint8Array[] = []; + response.data.on('data', (chunk: Uint8Array) => chunks.push(chunk)); + response.data.on('end', () => resolve(Buffer.concat(chunks))); + response.data.on('error', reject); + }); +} diff --git a/src/infra/environment/download-file/types.ts b/src/infra/environment/download-file/types.ts new file mode 100644 index 0000000000..8568a189b1 --- /dev/null +++ b/src/infra/environment/download-file/types.ts @@ -0,0 +1,13 @@ +import { Network } from '@internxt/sdk'; + +export type DownloadFileProps = { + signal: AbortSignal; + fileId: string; + bucketId: string; + mnemonic: string; + network: Network.Network; + range: { + position: number; + length: number; + }; +}; diff --git a/src/infra/ipc/auth-ipc-handlers.test.ts b/src/infra/ipc/auth-ipc-handlers.test.ts index 35a6a038eb..13a7bea575 100644 --- a/src/infra/ipc/auth-ipc-handlers.test.ts +++ b/src/infra/ipc/auth-ipc-handlers.test.ts @@ -1,39 +1,30 @@ +import { IpcMainEvent } from 'electron'; import { registerAuthIPCHandlers } from './auth-ipc-handlers'; import { AuthIPCMain } from './auth-ipc-main'; import { driveServerModule } from '../drive-server/drive-server.module'; -import { LoginResponse } from '../drive-server/services/auth/auth.types'; -import { Mock } from 'vitest'; - -vi.mock('../drive-server/drive-server.module', () => ({ - driveServerModule: { - auth: { - login: vi.fn(), - access: vi.fn(), - }, - }, -})); - -vi.mock('./auth-ipc-main', () => ({ - AuthIPCMain: { - handle: vi.fn(), - }, -})); +import { LoginAccessResponse, LoginResponse } from '../drive-server/services/auth/auth.types'; +import { partialSpyOn } from 'tests/vitest/utils.helper'; describe('registerAuthIPCHandlers', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); + const loginMock = partialSpyOn(driveServerModule.auth, 'login'); + const accessMock = partialSpyOn(driveServerModule.auth, 'access'); + const authIPCMainHandleMock = partialSpyOn(AuthIPCMain, 'handle'); + + function getHandler(eventName: string) { + registerAuthIPCHandlers(); + const call = authIPCMainHandleMock.mock.calls.find(([name]) => name === eventName); + if (!call) throw new Error(`Handler for '${eventName}' not registered`); + return call[1]; + } describe('auth:login', () => { it('should register the auth:login handler', () => { registerAuthIPCHandlers(); - expect(AuthIPCMain.handle).toHaveBeenCalledWith('auth:login', expect.any(Function)); + expect(authIPCMainHandleMock).toBeCalledWith('auth:login', expect.any(Function)); }); it('should return a successful response for auth:login', async () => { - registerAuthIPCHandlers(); - const loginMock = driveServerModule.auth.login as Mock; const response: LoginResponse = { hasKeys: true, sKey: 'test-sKey', @@ -42,31 +33,28 @@ describe('registerAuthIPCHandlers', () => { hasEccKeys: false, }; loginMock.mockResolvedValueOnce({ - fold: (_onLeft: any, onRight: any) => onRight(response), + fold: (_onLeft: (err: Error) => T, onRight: (data: LoginResponse) => T): T => onRight(response), }); - const handler = (AuthIPCMain.handle as Mock).mock.calls.find(([eventName]) => eventName === 'auth:login')![1]; + const handler = getHandler('auth:login'); + const result = await handler({} as IpcMainEvent, 'test@example.com'); - const result = await handler({}, 'test@example.com'); - - expect(result).toEqual({ + expect(result).toStrictEqual({ success: true, data: response, }); }); it('should return an error response for auth:login', async () => { - registerAuthIPCHandlers(); - const loginMock = driveServerModule.auth.login as Mock; loginMock.mockResolvedValueOnce({ - fold: (onLeft: any, _onRight: any) => onLeft(new Error('Login failed')), + fold: (onLeft: (err: Error) => T, _onRight: (data: LoginResponse) => T): T => + onLeft(new Error('Login failed')), }); - const handler = (AuthIPCMain.handle as Mock).mock.calls.find(([eventName]) => eventName === 'auth:login')![1]; + const handler = getHandler('auth:login'); + const result = await handler({} as IpcMainEvent, 'test@example.com'); - const result = await handler({}, 'test@example.com'); - - expect(result).toEqual({ + expect(result).toStrictEqual({ success: false, error: 'Login failed', }); @@ -76,39 +64,34 @@ describe('registerAuthIPCHandlers', () => { describe('auth:access', () => { it('should register the auth:access handler', () => { registerAuthIPCHandlers(); - expect(AuthIPCMain.handle).toHaveBeenCalledWith('auth:access', expect.any(Function)); + expect(authIPCMainHandleMock).toBeCalledWith('auth:access', expect.any(Function)); }); it('should return a successful response for auth:access', async () => { - registerAuthIPCHandlers(); - const accessMock = driveServerModule.auth.access as Mock; - const mockAccessData = { sessionId: 'abc123' }; + const mockAccessData = { sessionId: 'abc123' } as unknown as LoginAccessResponse; accessMock.mockResolvedValueOnce({ - fold: (_onLeft: any, onRight: any) => onRight(mockAccessData), + fold: (_onLeft: (err: Error) => T, onRight: (data: LoginAccessResponse) => T): T => onRight(mockAccessData), }); - const handler = (AuthIPCMain.handle as Mock).mock.calls.find(([eventName]) => eventName === 'auth:access')![1]; - - const result = await handler({}, { email: 'test@example.com', code: '123456' }); + const handler = getHandler('auth:access'); + const result = await handler({} as IpcMainEvent, { email: 'test@example.com', password: '123456' }); - expect(result).toEqual({ + expect(result).toStrictEqual({ success: true, data: mockAccessData, }); }); it('should return an error response for auth:access', async () => { - registerAuthIPCHandlers(); - const accessMock = driveServerModule.auth.access as Mock; accessMock.mockResolvedValueOnce({ - fold: (onLeft: any, _onRight: any) => onLeft(new Error('Access denied')), + fold: (onLeft: (err: Error) => T, _onRight: (data: LoginAccessResponse) => T): T => + onLeft(new Error('Access denied')), }); - const handler = (AuthIPCMain.handle as Mock).mock.calls.find(([eventName]) => eventName === 'auth:access')![1]; - - const result = await handler({}, { email: 'test@example.com', code: '123456' }); + const handler = getHandler('auth:access'); + const result = await handler({} as IpcMainEvent, { email: 'test@example.com', password: '123456' }); - expect(result).toEqual({ + expect(result).toStrictEqual({ success: false, error: 'Access denied', }); diff --git a/src/types/NodeClamError.d.ts b/src/types/NodeClamError.d.ts index 4a45a137ca..b9e795dc8a 100644 --- a/src/types/NodeClamError.d.ts +++ b/src/types/NodeClamError.d.ts @@ -3,7 +3,6 @@ declare module '@internxt/scan/lib/NodeClamError' { constructor(message: string); data?: { err?: Error; - [key: string]: any; }; } } diff --git a/vitest.setup.main.ts b/vitest.setup.main.ts index 12d60c6c66..6e485c15bd 100644 --- a/vitest.setup.main.ts +++ b/vitest.setup.main.ts @@ -74,16 +74,6 @@ vi.mock('@internxt/drive-desktop-core/src/backend', () => ({ }, })); -// Mock native modules that require system libraries -vi.mock('@gcas/fuse', () => ({ - default: vi.fn(), - Fuse: vi.fn().mockImplementation(() => ({ - mount: vi.fn(), - unmount: vi.fn(), - ops: {}, - })), -})); - // Mock electron-store vi.mock('electron-store', () => { return {