diff --git a/examples/graphql/package.json b/examples/graphql/package.json
index e41fb22..02fa937 100644
--- a/examples/graphql/package.json
+++ b/examples/graphql/package.json
@@ -14,6 +14,7 @@
},
"dependencies": {
"@neuledge/engine": "^0.2.1",
+ "@neuledge/postgresql-store": "*",
"@neuledge/mongodb-store": "^0.2.0",
"dotenv": "^16.0.3",
"fastify": "^4.14.1",
diff --git a/packages/engine/README.md b/packages/engine/README.md
index 343efcd..fb2eec8 100644
--- a/packages/engine/README.md
+++ b/packages/engine/README.md
@@ -12,8 +12,9 @@
MongoDB ⇄
- MySQL (soon) ⇄
- PostgreSQL (soon)
+ MySQL ⇄
+ PostgreSQL ⇄
+ Your DB (request)
diff --git a/packages/engine/src/engine/exec/alter.test.ts b/packages/engine/src/engine/exec/alter.test.ts
index 1d7d152..5bf70c7 100644
--- a/packages/engine/src/engine/exec/alter.test.ts
+++ b/packages/engine/src/engine/exec/alter.test.ts
@@ -63,13 +63,11 @@ describe('engine/exec/alter', () => {
collection: metadata['collections']['categories'],
where: { id: { $eq: 1 }, __h: { $eq: hash }, __v: { $eq: 0 } },
set: { name: 'foo', description: 'bar', __v: 1 },
- limit: 1,
});
expect(update).toHaveBeenNthCalledWith(2, {
collection: metadata['collections']['categories'],
where: { id: { $eq: 2 }, __h: { $eq: hash }, __v: { $eq: 0 } },
set: { name: 'foo', description: 'bar', __v: 1 },
- limit: 1,
});
});
@@ -129,13 +127,11 @@ describe('engine/exec/alter', () => {
collection: metadata['collections']['categories'],
where: { id: { $eq: 1 }, __h: { $eq: hash }, __v: { $eq: 0 } },
set: { name: 'foo', description: 'bar', __v: 1 },
- limit: 1,
});
expect(update).toHaveBeenNthCalledWith(2, {
collection: metadata['collections']['categories'],
where: { id: { $eq: 2 }, __h: { $eq: hash }, __v: { $eq: 0 } },
set: { name: 'foo', description: 'bar', __v: 1 },
- limit: 1,
});
});
@@ -182,13 +178,11 @@ describe('engine/exec/alter', () => {
collection: metadata['collections']['categories'],
where: { id: { $eq: 1 }, __h: { $eq: hash }, __v: { $eq: 0 } },
set: { name: 'foo', description: 'bar', __v: 1 },
- limit: 1,
});
expect(update).toHaveBeenNthCalledWith(2, {
collection: metadata['collections']['categories'],
where: { id: { $eq: 2 }, __h: { $eq: hash }, __v: { $eq: 0 } },
set: { name: 'foo', description: 'bar', __v: 1 },
- limit: 1,
});
});
@@ -264,13 +258,11 @@ describe('engine/exec/alter', () => {
collection: metadata['collections']['posts'],
where: { id: { $eq: 1 }, __h: { $eq: hash }, __v: { $eq: 0 } },
set: { title: 'foo', content: 'bar', category_id: 1, __v: 1 },
- limit: 1,
});
expect(update).toHaveBeenNthCalledWith(2, {
collection: metadata['collections']['posts'],
where: { id: { $eq: 2 }, __h: { $eq: hash }, __v: { $eq: 0 } },
set: { title: 'foo', content: 'bar', category_id: 1, __v: 1 },
- limit: 1,
});
});
});
@@ -376,7 +368,6 @@ describe('engine/exec/alter', () => {
collection: metadata['collections']['categories'],
where: { id: { $eq: 1 }, __h: { $eq: hash }, __v: { $eq: 0 } },
set: { name: 'foo', description: 'bar', __v: 1 },
- limit: 1,
});
});
@@ -454,7 +445,6 @@ describe('engine/exec/alter', () => {
collection: metadata['collections']['categories'],
where: { id: { $eq: 1 }, __h: { $eq: hash }, __v: { $eq: 0 } },
set: { name: 'foo', description: 'bar', __v: 1 },
- limit: 1,
});
});
diff --git a/packages/engine/src/engine/metadata/collections.ts b/packages/engine/src/engine/metadata/collections.ts
index 8b53ff2..ea496c4 100644
--- a/packages/engine/src/engine/metadata/collections.ts
+++ b/packages/engine/src/engine/metadata/collections.ts
@@ -19,6 +19,8 @@ const ensureStoreCollection = async (
store: Store,
collection: MetadataCollection,
): Promise => {
+ // FIXME how we handle fields or indexes changes? (e.g. a field changed to be nullable)
+
await store.ensureCollection({
collection,
indexes: Object.values(collection.indexes),
diff --git a/packages/engine/src/engine/metadata/load.ts b/packages/engine/src/engine/metadata/load.ts
index 274c7f4..d440653 100644
--- a/packages/engine/src/engine/metadata/load.ts
+++ b/packages/engine/src/engine/metadata/load.ts
@@ -4,10 +4,10 @@ import { Store } from '@neuledge/store';
import { ensureStoreCollections } from './collections';
import {
ensureMetadataCollection,
- getMetadataCollection,
getStoreMetadataSnapshot,
syncStoreMetadata,
} from './store';
+import { getMetadataCollection } from './state';
const DEFAULT_METADATA_COLLECTION_NAME = '__neuledge_metadata';
diff --git a/packages/engine/src/engine/metadata/state.ts b/packages/engine/src/engine/metadata/state.ts
index 7458771..8e9e963 100644
--- a/packages/engine/src/engine/metadata/state.ts
+++ b/packages/engine/src/engine/metadata/state.ts
@@ -1,12 +1,14 @@
+import { StoreCollection, StoreField, StorePrimaryKey } from '@neuledge/store';
import { NeuledgeError } from '@/error';
import {
StateSnapshot,
StateFieldSnapshot,
StateRelationSnapshot,
+ METADATA_HASH_BYTES,
} from '@/metadata';
export interface StoreMetadataState {
- collectionName: string;
+ collection_name: string;
name: string;
hash: Buffer;
fields: StoreMetadataStateField[];
@@ -32,6 +34,38 @@ interface StoreMetadataStateRelation {
index: number;
}
+// FIXME we can't save buffers on json fields. We need to encode them somehow or use relations for `fields` and `relations` fields.
+
+export const getMetadataCollection = (
+ metadataCollectionName: string,
+): StoreCollection => {
+ const hash: StoreField = {
+ name: 'hash',
+ type: 'binary',
+ size: METADATA_HASH_BYTES,
+ };
+
+ const primaryKey: StorePrimaryKey = {
+ name: 'hash',
+ fields: { [hash.name]: { sort: 'asc' } },
+ unique: 'primary',
+ };
+
+ return {
+ name: metadataCollectionName,
+ primaryKey,
+ indexes: { [primaryKey.name]: primaryKey },
+ fields: {
+ [hash.name]: hash,
+ collection_name: { name: 'collection_name', type: 'string' },
+ name: { name: 'name', type: 'string' },
+ fields: { name: 'fields', type: 'json', list: true },
+ relations: { name: 'relations', type: 'json', list: true },
+ v: { name: 'v', type: 'number', unsigned: true, scale: 0, precision: 4 },
+ },
+ };
+};
+
export const fromStoreMetadataState = (
getState: (hash: Buffer) => StateSnapshot,
getType: (key: string) => StateFieldSnapshot['type'],
@@ -45,7 +79,7 @@ export const fromStoreMetadataState = (
}
return getState(doc.hash).assign({
- collectionName: doc.collectionName,
+ collectionName: doc.collection_name,
name: doc.name,
hash: doc.hash,
fields: doc.fields.map((field) =>
@@ -60,7 +94,7 @@ export const fromStoreMetadataState = (
export const toStoreMetadataState = (
state: StateSnapshot,
): StoreMetadataState => ({
- collectionName: state.collectionName,
+ collection_name: state.collectionName,
name: state.name,
hash: state.hash,
fields: state.fields.map((field) => toStoreMetadataStateField(field)),
diff --git a/packages/engine/src/engine/metadata/store.ts b/packages/engine/src/engine/metadata/store.ts
index ab9dcea..23a9a1b 100644
--- a/packages/engine/src/engine/metadata/store.ts
+++ b/packages/engine/src/engine/metadata/store.ts
@@ -1,13 +1,7 @@
import { NeuledgeError } from '@/error';
-import { MetadataChange, StateSnapshot, METADATA_HASH_BYTES } from '@/metadata';
+import { MetadataChange, StateSnapshot } from '@/metadata';
import { MetadataSnapshot } from '@/metadata/snapshot';
-import {
- Store,
- StoreCollection,
- StoreField,
- StoreList,
- StorePrimaryKey,
-} from '@neuledge/store';
+import { Store, StoreCollection, StoreList } from '@neuledge/store';
import pLimit from 'p-limit';
import {
fromStoreMetadataState,
@@ -16,34 +10,7 @@ import {
} from './state';
const HASH_ENCODING = 'base64url';
-const COLLECTION_FIND_LIMIT = 1000;
-
-export const getMetadataCollection = (
- metadataCollectionName: string,
-): StoreCollection => {
- const hash: StoreField = {
- name: 'hash',
- type: 'binary',
- size: METADATA_HASH_BYTES,
- };
-
- const primaryKey: StorePrimaryKey = {
- name: 'hash',
- fields: { [hash.name]: { sort: 'asc' } },
- unique: 'primary',
- };
-
- return {
- name: metadataCollectionName,
- primaryKey,
- indexes: { [primaryKey.name]: primaryKey },
- fields: {
- [hash.name]: hash,
- key: { name: 'key', type: 'string' },
- payload: { name: 'payload', type: 'json' },
- },
- };
-};
+const COLLECTION_FIND_LIMIT = 100;
export const ensureMetadataCollection = async (
store: Store,
@@ -140,7 +107,6 @@ export const syncStoreMetadata = async (
collection: metadataCollection,
where: { hash: { $eq: hash } },
set: set as never,
- limit: 1,
}),
),
),
diff --git a/packages/engine/src/engine/mutations/store.ts b/packages/engine/src/engine/mutations/store.ts
index 1faf2e2..91e202b 100644
--- a/packages/engine/src/engine/mutations/store.ts
+++ b/packages/engine/src/engine/mutations/store.ts
@@ -58,7 +58,6 @@ const updateStoreDocument = async (
collection,
where: getWhereRecordByPrimaryKeys(collection, document),
set: Object.fromEntries(setEntries),
- limit: 1,
});
return !!res.affectedCount;
@@ -74,7 +73,6 @@ const deleteStoreDocuments = async (
await store.delete({
collection,
where: getWhereByPrimaryKeys(collection, documents),
- limit: documents.length,
});
};
@@ -86,7 +84,6 @@ const deleteStoreDocuments = async (
// await store.delete({
// collectionName: collection.name,
// where: getWhereRecord(collection.primaryKeys, document),
-// limit: 1,
// });
// };
diff --git a/packages/mongodb-store/README.md b/packages/mongodb-store/README.md
index 1dc51c2..5e55ee3 100644
--- a/packages/mongodb-store/README.md
+++ b/packages/mongodb-store/README.md
@@ -14,7 +14,7 @@ npm install @neuledge/mongodb-store
import { Engine } from '@neuledge/engine';
import { MongoDBStore } from '@neuledge/mongodb-store';
-const store = store: new MongoDBStore({
+const store = new MongoDBStore({
url: process.env.MONGODB_URL ?? 'mongodb://localhost:27017',
name: process.env.MONGODB_DATABASE ?? 'my-database',
});
diff --git a/packages/mongodb-store/src/indexes.ts b/packages/mongodb-store/src/indexes.ts
index 4c3d31d..ae48a60 100644
--- a/packages/mongodb-store/src/indexes.ts
+++ b/packages/mongodb-store/src/indexes.ts
@@ -1,4 +1,4 @@
-import { StoreIndex, StorePrimaryKey } from '@neuledge/store';
+import { StoreIndex, StorePrimaryKey, throwStoreError } from '@neuledge/store';
import { Collection } from 'mongodb';
import { escapeFieldName } from './fields';
@@ -6,7 +6,9 @@ export const dropIndexes = async (
collection: Collection,
indexes: string[],
): Promise => {
- await Promise.all(indexes.map((index) => collection.dropIndex(index)));
+ await Promise.all(indexes.map((index) => collection.dropIndex(index))).catch(
+ throwStoreError,
+ );
};
export const ensureIndexes = async (
@@ -14,7 +16,11 @@ export const ensureIndexes = async (
collection: Collection,
indexes: StoreIndex[],
): Promise => {
- const exists = await collection.listIndexes().toArray();
+ const exists = await collection
+ .listIndexes()
+ .toArray()
+ .catch(throwStoreError);
+
const existMap = new Map(exists.map((item) => [item.name, item]));
for (const index of indexes) {
@@ -39,11 +45,13 @@ export const ensureIndexes = async (
// documents that don't have the indexed fields. This maintains the same
// behavior with relational databases where NULL values are not indexed.
- await collection.createIndex(indexSpec, {
- name: index.name,
- unique: !!index.unique,
- sparse: true,
- background: true,
- });
+ await collection
+ .createIndex(indexSpec, {
+ name: index.name,
+ unique: !!index.unique,
+ sparse: true,
+ background: true,
+ })
+ .catch(throwStoreError);
}
};
diff --git a/packages/mongodb-store/src/inserted-ids.ts b/packages/mongodb-store/src/inserted-ids.ts
index 7d4ae86..70d2f66 100644
--- a/packages/mongodb-store/src/inserted-ids.ts
+++ b/packages/mongodb-store/src/inserted-ids.ts
@@ -1,4 +1,4 @@
-import { StoreCollection, StoreError } from '@neuledge/store';
+import { StoreCollection, StoreError, throwStoreError } from '@neuledge/store';
import { Collection } from 'mongodb';
export interface AutoIncrementDocument {
@@ -56,17 +56,19 @@ const autoIncrementPrimaryKey = async (
autoIncrement: Collection,
collectionName: string,
): Promise => {
- const { value: doc } = await autoIncrement.findOneAndUpdate(
- {
- _id: collectionName,
- },
- {
- $inc: { value: 1 },
- },
- {
- upsert: true,
- },
- );
+ const { value: doc } = await autoIncrement
+ .findOneAndUpdate(
+ {
+ _id: collectionName,
+ },
+ {
+ $inc: { value: 1 },
+ },
+ {
+ upsert: true,
+ },
+ )
+ .catch(throwStoreError);
return (doc?.value ?? 0) + 1;
};
diff --git a/packages/mongodb-store/src/store.ts b/packages/mongodb-store/src/store.ts
index 4693a06..3bf1046 100644
--- a/packages/mongodb-store/src/store.ts
+++ b/packages/mongodb-store/src/store.ts
@@ -17,6 +17,7 @@ import {
StoreIndex,
StoreIndexField,
StorePrimaryKey,
+ throwStoreError,
} from '@neuledge/store';
import {
Db,
@@ -95,7 +96,10 @@ export class MongoDBStore implements Store {
typeof name === 'string'
? this.client
.connect()
- .then((client) => client.db(name, db as DbOptions | undefined))
+ .then(
+ (client) => client.db(name, db as DbOptions | undefined),
+ throwStoreError,
+ )
: Promise.resolve(db as Db);
this.collections = {};
@@ -112,12 +116,15 @@ export class MongoDBStore implements Store {
}
async close(): Promise {
- await this.client.close();
+ await this.client.close().catch(throwStoreError);
}
async listCollections(): Promise {
const db = await this.db;
- const res = await db.listCollections({}, { nameOnly: true }).toArray();
+ const res = await db
+ .listCollections({}, { nameOnly: true })
+ .toArray()
+ .catch(throwStoreError);
return res.map((item): StoreCollection_Slim => ({ name: item.name }));
}
@@ -126,7 +133,10 @@ export class MongoDBStore implements Store {
options: StoreDescribeCollectionOptions,
): Promise {
const collection = await this.collection(options.collection.name);
- const indexes = await collection.listIndexes().toArray();
+ const indexes = await collection
+ .listIndexes()
+ .toArray()
+ .catch(throwStoreError);
const storeIndexes = indexes.map(
(index): StoreIndex => ({
@@ -189,7 +199,7 @@ export class MongoDBStore implements Store {
async dropCollection(options: StoreDropCollectionOptions): Promise {
const db = await this.db;
- await db.dropCollection(options.collection.name);
+ await db.dropCollection(options.collection.name).catch(throwStoreError);
}
async find(options: StoreFindOptions): Promise {
@@ -221,7 +231,7 @@ export class MongoDBStore implements Store {
);
}
- const rawDocs = await query.toArray();
+ const rawDocs = await query.toArray().catch(throwStoreError);
let docs = rawDocs.map((doc) => unescapeDocument(options.collection, doc));
const asyncLimit = pLimit(this.readConcurrency);
@@ -250,14 +260,16 @@ export class MongoDBStore implements Store {
),
);
- const res = await collection.insertMany(
- insertedIds.map((insertedId, i) =>
- escapeDocument(options.collection, {
- ...options.documents[i],
- ...insertedId,
- }),
- ),
- );
+ const res = await collection
+ .insertMany(
+ insertedIds.map((insertedId, i) =>
+ escapeDocument(options.collection, {
+ ...options.documents[i],
+ ...insertedId,
+ }),
+ ),
+ )
+ .catch(throwStoreError);
return {
insertedIds: insertedIds,
@@ -271,33 +283,15 @@ export class MongoDBStore implements Store {
const filter = options.where
? findFilter(options.collection.primaryKey, options.where)
: {};
+
const update = updateFilter(
options.collection.primaryKey,
options.set as Document,
);
- let res;
-
- if (options.limit === 1) {
- res = await collection.updateOne(filter, update);
- } else {
- const ids = await collection
- // unicon issue: https://github.com/sindresorhus/eslint-plugin-unicorn/issues/1947
- // eslint-disable-next-line unicorn/no-array-callback-reference, unicorn/no-array-method-this-argument
- .find(filter, {
- limit: options.limit,
- projection: { _id: 1 },
- })
- .toArray();
-
- if (!ids.length) {
- return { affectedCount: 0 };
- }
-
- res = await collection.updateMany(
- { ...filter, _id: { $in: ids.map((id) => id._id) } },
- update,
- );
- }
+
+ const res = await collection
+ .updateMany(filter, update)
+ .catch(throwStoreError);
return {
affectedCount: res.modifiedCount,
@@ -310,29 +304,8 @@ export class MongoDBStore implements Store {
const filter = options.where
? findFilter(options.collection.primaryKey, options.where)
: {};
- let res;
-
- if (options.limit === 1) {
- res = await collection.deleteOne(filter);
- } else {
- const ids = await collection
- // unicon issue: https://github.com/sindresorhus/eslint-plugin-unicorn/issues/1947
- // eslint-disable-next-line unicorn/no-array-callback-reference, unicorn/no-array-method-this-argument
- .find(filter, {
- limit: options.limit,
- projection: { _id: 1 },
- })
- .toArray();
-
- if (!ids.length) {
- return { affectedCount: 0 };
- }
-
- res = await collection.deleteMany({
- ...filter,
- _id: { $in: ids.map((id) => id._id) },
- });
- }
+
+ const res = await collection.deleteMany(filter).catch(throwStoreError);
return { affectedCount: res.deletedCount };
}
@@ -347,7 +320,8 @@ export class MongoDBStore implements Store {
async (db) => {
const [exists] = await db
.listCollections({ name: collectionName }, { nameOnly: true })
- .toArray();
+ .toArray()
+ .catch(throwStoreError);
if (!exists) {
// allow retry on next call
@@ -389,7 +363,7 @@ export class MongoDBStore implements Store {
query = query.project(join.project ?? { _id: 1 });
}
- const rawDocs = await query.toArray();
+ const rawDocs = await query.toArray().catch(throwStoreError);
return rawDocs.map((doc) => unescapeDocument(join.collection, doc));
}
diff --git a/packages/mysql-store/.npmignore b/packages/mysql-store/.npmignore
new file mode 100644
index 0000000..d2ee3b5
--- /dev/null
+++ b/packages/mysql-store/.npmignore
@@ -0,0 +1,6 @@
+/*
+!/dist/*.js
+!/dist/*.js.map
+!/dist/*.mjs
+!/dist/*.mjs.map
+!/dist/**/*.d.ts
\ No newline at end of file
diff --git a/packages/mysql-store/README.md b/packages/mysql-store/README.md
new file mode 100644
index 0000000..ad5cf63
--- /dev/null
+++ b/packages/mysql-store/README.md
@@ -0,0 +1,30 @@
+# Neuledge MySQL Store
+
+A store for [Neuledge Engine](https://neuledge.com) that uses [MySQL](https://www.mysql.com) database as a backend.
+
+## 📦 Installation
+
+```bash
+npm install @neuledge/mysql-store
+```
+
+## 🚀 Getting started
+
+```ts
+import { MySQLStore } from '@neuledge/mysql-store';
+
+const store = new MySQLStore({
+ uri: process.env.MYSQL_URI ?? 'mysql://localhost:3306',
+ database: process.env.MYSQL_DATABASE ?? 'my-database',
+});
+
+const engine = new Engine({
+ store,
+});
+```
+
+For more information, please refer to the [main repository](https://github.com/neuledge/engine-js).
+
+## 📄 License
+
+Neuledge is [Apache 2.0 licensed](https://github.com/neuledge/engine-js/blob/main/LICENSE).
diff --git a/packages/mysql-store/package.json b/packages/mysql-store/package.json
new file mode 100644
index 0000000..7a44483
--- /dev/null
+++ b/packages/mysql-store/package.json
@@ -0,0 +1,44 @@
+{
+ "name": "@neuledge/mysql-store",
+ "version": "0.0.0",
+ "deascription": "MySQL store implementation for Neuledge Engine",
+ "keywords": [
+ "neuledge",
+ "mysql",
+ "store",
+ "database"
+ ],
+ "main": "./dist/index.js",
+ "module": "./dist/index.mjs",
+ "types": "./dist/index.d.js",
+ "exports": {
+ ".": {
+ "require": "./dist/index.js",
+ "import": "./dist/index.mjs",
+ "types": "./dist/index.d.ts"
+ }
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/neuledge/engine-js.git"
+ },
+ "license": "Apache-2.0",
+ "publishConfig": {
+ "access": "public"
+ },
+ "engines": {
+ "node": ">= 16"
+ },
+ "scripts": {
+ "types": "rimraf --glob dist/*.{d.ts,d.ts.map} dist/**/*.{d.ts,d.ts.map} && tsc --emitDeclarationOnly && tsc-alias",
+ "build": "rimraf --glob dist/*.{js,js.map,mjs,mjs.map} && tsup",
+ "lint": "eslint . --ext \"js,jsx,ts,tsx,mjs,cjs\"",
+ "lint:strict": "yarn lint --max-warnings 0"
+ },
+ "dependencies": {
+ "@neuledge/store": "^0.2.0",
+ "@neuledge/sql-store": "^0.0.0",
+ "@types/mysql": "^2.15.21",
+ "mysql": "^2.18.1"
+ }
+}
diff --git a/packages/mysql-store/src/index.ts b/packages/mysql-store/src/index.ts
new file mode 100644
index 0000000..d406816
--- /dev/null
+++ b/packages/mysql-store/src/index.ts
@@ -0,0 +1 @@
+export * from './store';
diff --git a/packages/mysql-store/src/queries/add-column.ts b/packages/mysql-store/src/queries/add-column.ts
new file mode 100644
index 0000000..166da74
--- /dev/null
+++ b/packages/mysql-store/src/queries/add-column.ts
@@ -0,0 +1,8 @@
+import { StoreCollection, StoreField } from '@neuledge/store';
+import { MySQLConnection } from './connection';
+
+export const addColumn = async (
+ connection: MySQLConnection,
+ collection: StoreCollection,
+ field: StoreField,
+): Promise => {};
diff --git a/packages/mysql-store/src/queries/add-index.ts b/packages/mysql-store/src/queries/add-index.ts
new file mode 100644
index 0000000..4cb7f21
--- /dev/null
+++ b/packages/mysql-store/src/queries/add-index.ts
@@ -0,0 +1,8 @@
+import { StoreCollection, StoreIndex } from '@neuledge/store';
+import { MySQLConnection } from './connection';
+
+export const addIndex = async (
+ connection: MySQLConnection,
+ collection: StoreCollection,
+ index: StoreIndex,
+): Promise => {};
diff --git a/packages/mysql-store/src/queries/connection.ts b/packages/mysql-store/src/queries/connection.ts
new file mode 100644
index 0000000..d65c071
--- /dev/null
+++ b/packages/mysql-store/src/queries/connection.ts
@@ -0,0 +1,3 @@
+import { Connection, Pool } from 'mysql';
+
+export type MySQLConnection = Pick;
diff --git a/packages/mysql-store/src/queries/create-table.ts b/packages/mysql-store/src/queries/create-table.ts
new file mode 100644
index 0000000..1deeaf3
--- /dev/null
+++ b/packages/mysql-store/src/queries/create-table.ts
@@ -0,0 +1,7 @@
+import { StoreCollection } from '@neuledge/store';
+import { MySQLConnection } from './connection';
+
+export const createTableIfNotExists = async (
+ connection: MySQLConnection,
+ collection: StoreCollection,
+): Promise => {};
diff --git a/packages/mysql-store/src/queries/drop-column.ts b/packages/mysql-store/src/queries/drop-column.ts
new file mode 100644
index 0000000..594fdad
--- /dev/null
+++ b/packages/mysql-store/src/queries/drop-column.ts
@@ -0,0 +1,8 @@
+import { StoreCollection } from '@neuledge/store';
+import { MySQLConnection } from './connection';
+
+export const dropColumn = async (
+ connection: MySQLConnection,
+ collection: StoreCollection,
+ field: string,
+): Promise => {};
diff --git a/packages/mysql-store/src/queries/drop-index.ts b/packages/mysql-store/src/queries/drop-index.ts
new file mode 100644
index 0000000..0d05fb1
--- /dev/null
+++ b/packages/mysql-store/src/queries/drop-index.ts
@@ -0,0 +1,8 @@
+import { StoreCollection } from '@neuledge/store';
+import { MySQLConnection } from './connection';
+
+export const dropIndex = async (
+ connection: MySQLConnection,
+ collection: StoreCollection,
+ index: string,
+): Promise => {};
diff --git a/packages/mysql-store/src/queries/drop-table.ts b/packages/mysql-store/src/queries/drop-table.ts
new file mode 100644
index 0000000..14716fb
--- /dev/null
+++ b/packages/mysql-store/src/queries/drop-table.ts
@@ -0,0 +1,6 @@
+import { MySQLConnection } from './connection';
+
+export const dropTableIfExists = async (
+ connection: MySQLConnection,
+ tableName: string,
+): Promise => {};
diff --git a/packages/mysql-store/src/queries/index.ts b/packages/mysql-store/src/queries/index.ts
new file mode 100644
index 0000000..fe9dba7
--- /dev/null
+++ b/packages/mysql-store/src/queries/index.ts
@@ -0,0 +1,9 @@
+export * from './add-column';
+export * from './add-index';
+export * from './create-table';
+export * from './drop-column';
+export * from './drop-index';
+export * from './drop-table';
+export * from './list-table-columns';
+export * from './list-table-statistics';
+export * from './list-tables';
diff --git a/packages/mysql-store/src/queries/list-table-columns.ts b/packages/mysql-store/src/queries/list-table-columns.ts
new file mode 100644
index 0000000..bfb3018
--- /dev/null
+++ b/packages/mysql-store/src/queries/list-table-columns.ts
@@ -0,0 +1,47 @@
+import { StoreShapeType } from '@neuledge/store';
+import { MySQLConnection } from './connection';
+
+/**
+ * A table column from the information_schema.columns table.
+ */
+export interface MySQLColumn {
+ column_name: string;
+ data_type: string;
+ character_maximum_length: number | null;
+ numeric_precision: number | null;
+ numeric_scale: number | null;
+ is_nullable: 1 | 0;
+ is_auto_increment: 1 | 0;
+}
+
+export const listTableColumns = async (
+ connection: MySQLConnection,
+ tableName: string,
+): Promise =>
+ new Promise((resolve, reject) =>
+ connection.query(
+ `SELECT column_name, data_type, character_maximum_length, numeric_precision, numeric_scale, (is_nullable = 'YES') AS is_nullable, extra LIKE '%auto_increment%' AS is_auto_increment
+FROM information_schema.columns
+WHERE table_schema = DATABASE() AND table_name = ?`,
+ [tableName],
+ (error, results) => (error ? reject(error) : resolve(results)),
+ ),
+ );
+
+export const dataTypeMap: Record = {
+ varchar: 'string',
+ char: 'string',
+ text: 'string',
+ numeric: 'number',
+ decimal: 'number',
+ float: 'number',
+ double: 'number',
+ integer: 'number',
+ bigint: 'number',
+ boolean: 'boolean',
+ bytea: 'binary',
+ timestamp: 'date-time',
+ timestamptz: 'date-time',
+ json: 'json',
+ jsonb: 'json',
+};
diff --git a/packages/mysql-store/src/queries/list-table-statistics.ts b/packages/mysql-store/src/queries/list-table-statistics.ts
new file mode 100644
index 0000000..f6816f0
--- /dev/null
+++ b/packages/mysql-store/src/queries/list-table-statistics.ts
@@ -0,0 +1,28 @@
+import { MySQLConnection } from './connection';
+
+/**
+ * A table statistic row from the information_schema.statistics table.
+ */
+export interface MySQLIndexAttribute {
+ index_name: string;
+ column_name: string;
+ seq_in_index: number;
+ direction: 'ASC' | 'DESC';
+ is_unique: 1 | 0;
+ is_primary: 1 | 0;
+}
+
+export const listIndexAttributes = async (
+ connection: MySQLConnection,
+ tableName: string,
+): Promise =>
+ new Promise((resolve, reject) =>
+ connection.query(
+ `SELECT index_name, column_name, seq_in_index, CASE collation WHEN 'A' THEN 'ASC' ELSE 'DESC' END AS direction, non_unique, (index_name == 'PRIMARY') as is_primary
+ FROM information_schema.statistics
+ WHERE table_schema = DATABASE() AND table_name = ?
+ ORDER BY index_name, seq_in_index`,
+ [tableName],
+ (error, results) => (error ? reject(error) : resolve(results)),
+ ),
+ );
diff --git a/packages/mysql-store/src/queries/list-tables.ts b/packages/mysql-store/src/queries/list-tables.ts
new file mode 100644
index 0000000..ee6a9ee
--- /dev/null
+++ b/packages/mysql-store/src/queries/list-tables.ts
@@ -0,0 +1,18 @@
+import { MySQLConnection } from './connection';
+
+/**
+ * The tables in the database. This is a view of the `information_schema.tables` table.
+ */
+export interface MySQLTable {
+ table_name: string;
+}
+
+export const listTables = async (
+ connection: MySQLConnection,
+): Promise =>
+ new Promise((resolve, reject) =>
+ connection.query(
+ `SELECT table_name FROM information_schema.tables WHERE table_schema = DATABASE()`,
+ (error, results) => (error ? reject(error) : resolve(results)),
+ ),
+ );
diff --git a/packages/mysql-store/src/store.ts b/packages/mysql-store/src/store.ts
new file mode 100644
index 0000000..cb8372f
--- /dev/null
+++ b/packages/mysql-store/src/store.ts
@@ -0,0 +1,115 @@
+import { Connection, Pool, PoolConfig, createPool } from 'mysql';
+import {
+ Store,
+ StoreCollection,
+ StoreCollection_Slim,
+ StoreDeleteOptions,
+ StoreDescribeCollectionOptions,
+ StoreDropCollectionOptions,
+ StoreEnsureCollectionOptions,
+ StoreError,
+ StoreFindOptions,
+ StoreInsertOptions,
+ StoreInsertionResponse,
+ StoreList,
+ StoreMutationResponse,
+ StoreUpdateOptions,
+} from '@neuledge/store';
+import {
+ dataTypeMap,
+ listTableColumns,
+ listIndexAttributes,
+ listTables,
+ createTableIfNotExists,
+ addIndex,
+ addColumn,
+ dropColumn,
+ dropIndex,
+ dropTableIfExists,
+} from './queries';
+import {
+ describeCollection,
+ dropCollection,
+ ensureCollection,
+ listCollections,
+} from '@neuledge/sql-store';
+
+export type MySQLStoreClient = Pick;
+
+export type MySQLStoreOptions = PoolConfig | { client: MySQLStoreClient };
+
+export class MySQLStore implements Store {
+ private connection: MySQLStoreClient;
+
+ constructor(options: MySQLStoreOptions) {
+ this.connection =
+ 'client' in options ? options.client : createPool(options);
+ }
+
+ // connection methods
+
+ async close(): Promise {
+ await new Promise((resolve, reject) =>
+ this.connection.end((error) =>
+ error
+ ? reject(
+ new StoreError(
+ StoreError.Code.INTERNAL_ERROR,
+ error.message,
+ error,
+ ),
+ )
+ : resolve(),
+ ),
+ );
+ }
+
+ // store methods
+
+ async listCollections(): Promise {
+ return listCollections(this.connection, { listTables });
+ }
+
+ async describeCollection(
+ options: StoreDescribeCollectionOptions,
+ ): Promise {
+ return describeCollection(options, this.connection, {
+ listTableColumns,
+ listIndexAttributes,
+ dataTypeMap,
+ });
+ }
+
+ async ensureCollection(options: StoreEnsureCollectionOptions): Promise {
+ return ensureCollection(options, this.connection, {
+ createTableIfNotExists,
+ addIndex,
+ addColumn,
+ dropColumn,
+ dropIndex,
+ listTableColumns,
+ listIndexAttributes,
+ dataTypeMap,
+ });
+ }
+
+ async dropCollection(options: StoreDropCollectionOptions): Promise {
+ return dropCollection(options, this.connection, { dropTableIfExists });
+ }
+
+ async find(options: StoreFindOptions): Promise {
+ throw new Error('Method not implemented.');
+ }
+
+ async insert(options: StoreInsertOptions): Promise {
+ throw new Error('Method not implemented.');
+ }
+
+ async update(options: StoreUpdateOptions): Promise {
+ throw new Error('Method not implemented.');
+ }
+
+ async delete(options: StoreDeleteOptions): Promise {
+ throw new Error('Method not implemented.');
+ }
+}
diff --git a/packages/mysql-store/tsconfig.json b/packages/mysql-store/tsconfig.json
new file mode 100644
index 0000000..c67724d
--- /dev/null
+++ b/packages/mysql-store/tsconfig.json
@@ -0,0 +1,10 @@
+{
+ "extends": "@neuledge/tsconfig/base.json",
+ "compilerOptions": {
+ "baseUrl": "src",
+ "rootDir": "src",
+ "outDir": "dist"
+ },
+ "include": ["src"],
+ "exclude": ["node_modules", "**/__ignore__/**"]
+}
diff --git a/packages/mysql-store/tsup.config.json b/packages/mysql-store/tsup.config.json
new file mode 100644
index 0000000..2f3a43d
--- /dev/null
+++ b/packages/mysql-store/tsup.config.json
@@ -0,0 +1,6 @@
+{
+ "entry": ["src/index.ts"],
+ "format": ["esm", "cjs"],
+ "sourcemap": true,
+ "shims": true
+}
diff --git a/packages/postgresql-store/.npmignore b/packages/postgresql-store/.npmignore
new file mode 100644
index 0000000..d2ee3b5
--- /dev/null
+++ b/packages/postgresql-store/.npmignore
@@ -0,0 +1,6 @@
+/*
+!/dist/*.js
+!/dist/*.js.map
+!/dist/*.mjs
+!/dist/*.mjs.map
+!/dist/**/*.d.ts
\ No newline at end of file
diff --git a/packages/postgresql-store/README.md b/packages/postgresql-store/README.md
new file mode 100644
index 0000000..977c75b
--- /dev/null
+++ b/packages/postgresql-store/README.md
@@ -0,0 +1,34 @@
+# Neuledge PostgreSQL Store
+
+A store for [Neuledge Engine](https://neuledge.com) that uses [PostgreSQL](https://www.postgresql.org) database as a backend.
+
+## 📦 Installation
+
+```bash
+npm install @neuledge/postgresql-store
+```
+
+## 🚀 Getting started
+
+```ts
+import { PostgreSQLStore } from '@neuledge/postgresql-store';
+
+const store = new PostgreSQLStore({
+ host: process.env.POSTGRESQL_HOST ?? 'localhost',
+ port: Number(process.env.POSTGRESQL_PORT) ?? 5432,
+ user: process.env.POSTGRESQL_USER ?? 'postgres',
+ password: process.env.POSTGRESQL_PASSWORD,
+ ssl: process.env.POSTGRESQL_SSL === 'true',
+ database: process.env.POSTGRESQL_DATABASE ?? 'my-database',
+});
+
+const engine = new Engine({
+ store,
+});
+```
+
+For more information, please refer to the [main repository](https://github.com/neuledge/engine-js).
+
+## 📄 License
+
+Neuledge is [Apache 2.0 licensed](https://github.com/neuledge/engine-js/blob/main/LICENSE).
diff --git a/packages/postgresql-store/jest.config.json b/packages/postgresql-store/jest.config.json
new file mode 100644
index 0000000..5901941
--- /dev/null
+++ b/packages/postgresql-store/jest.config.json
@@ -0,0 +1,3 @@
+{
+ "preset": "@neuledge/jest-ts-preset"
+}
diff --git a/packages/postgresql-store/package.json b/packages/postgresql-store/package.json
new file mode 100644
index 0000000..cf2d864
--- /dev/null
+++ b/packages/postgresql-store/package.json
@@ -0,0 +1,50 @@
+{
+ "name": "@neuledge/postgresql-store",
+ "version": "0.0.0",
+ "deascription": "PostgreSQL store implementation for Neuledge Engine",
+ "keywords": [
+ "neuledge",
+ "postgres",
+ "postgresql",
+ "store",
+ "database"
+ ],
+ "main": "./dist/index.js",
+ "module": "./dist/index.mjs",
+ "types": "./dist/index.d.js",
+ "exports": {
+ ".": {
+ "require": "./dist/index.js",
+ "import": "./dist/index.mjs",
+ "types": "./dist/index.d.ts"
+ }
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/neuledge/engine-js.git"
+ },
+ "license": "Apache-2.0",
+ "publishConfig": {
+ "access": "public"
+ },
+ "engines": {
+ "node": ">= 16"
+ },
+ "scripts": {
+ "types": "rimraf --glob dist/*.{d.ts,d.ts.map} dist/**/*.{d.ts,d.ts.map} && tsc --emitDeclarationOnly && tsc-alias",
+ "build": "rimraf --glob dist/*.{js,js.map,mjs,mjs.map} && tsup",
+ "test": "jest",
+ "lint": "eslint . --ext \"js,jsx,ts,tsx,mjs,cjs\"",
+ "lint:strict": "yarn lint --max-warnings 0"
+ },
+ "dependencies": {
+ "@neuledge/sql-store": "^0.0.0",
+ "@neuledge/store": "^0.2.0",
+ "@types/pg": "^8.6.6",
+ "pg": "^8.10.0",
+ "pg-format": "^1.0.4"
+ },
+ "devDependencies": {
+ "@types/pg-format": "^1.0.2"
+ }
+}
diff --git a/packages/postgresql-store/src/index.ts b/packages/postgresql-store/src/index.ts
new file mode 100644
index 0000000..d406816
--- /dev/null
+++ b/packages/postgresql-store/src/index.ts
@@ -0,0 +1 @@
+export * from './store';
diff --git a/packages/postgresql-store/src/queries/__fixtures__/posts-table.ts b/packages/postgresql-store/src/queries/__fixtures__/posts-table.ts
new file mode 100644
index 0000000..0f05ad5
--- /dev/null
+++ b/packages/postgresql-store/src/queries/__fixtures__/posts-table.ts
@@ -0,0 +1,91 @@
+import { StoreCollection, StoreDocument } from '@neuledge/store';
+
+export const postsTableName = 'posts';
+
+export const postsCollection: StoreCollection = {
+ name: postsTableName,
+ fields: {
+ id: {
+ name: 'id',
+ type: 'number',
+ list: false,
+ nullable: false,
+ size: null,
+ precision: 32,
+ scale: 0,
+ },
+ author_id: {
+ name: 'author_id',
+ type: 'number',
+ list: false,
+ nullable: false,
+ size: null,
+ precision: 32,
+ scale: 0,
+ },
+ title: {
+ name: 'title',
+ type: 'string',
+ list: false,
+ nullable: false,
+ size: 100,
+ precision: null,
+ scale: null,
+ },
+ body: {
+ name: 'body',
+ type: 'string',
+ list: false,
+ nullable: false,
+ size: 1000,
+ precision: null,
+ scale: null,
+ },
+ created_at: {
+ name: 'created_at',
+ type: 'date-time',
+ list: false,
+ nullable: false,
+ size: null,
+ precision: null,
+ scale: null,
+ },
+ updated_at: {
+ name: 'updated_at',
+ type: 'date-time',
+ list: false,
+ nullable: false,
+ size: null,
+ precision: null,
+ scale: null,
+ },
+ },
+ primaryKey: {
+ name: 'id',
+ fields: { id: { sort: 'asc' } },
+ unique: 'primary',
+ auto: 'increment',
+ },
+ indexes: {
+ id: {
+ name: 'id',
+ fields: { id: { sort: 'asc' } },
+ unique: 'primary',
+ auto: 'increment',
+ },
+ posts_author_id_index: {
+ name: 'posts_author_id_index',
+ fields: { author_id: { sort: 'asc' } },
+ unique: false,
+ },
+ },
+};
+
+export const postsTableRow1: StoreDocument = {
+ id: 1,
+ author_id: 1,
+ title: 'Post 1',
+ body: 'Post 1 body',
+ created_at: new Date('2020-01-01T00:00:00.000Z'),
+ updated_at: new Date('2020-01-01T00:00:00.000Z'),
+};
diff --git a/packages/postgresql-store/src/queries/__fixtures__/users-table.ts b/packages/postgresql-store/src/queries/__fixtures__/users-table.ts
new file mode 100644
index 0000000..ba8f4f5
--- /dev/null
+++ b/packages/postgresql-store/src/queries/__fixtures__/users-table.ts
@@ -0,0 +1,225 @@
+import {
+ StoreCollection,
+ StoreCollection_Slim,
+ StoreDocument,
+} from '@neuledge/store';
+import { PostgreSQLTable } from '../list-tables';
+import { PostgreSQLColumn } from '../list-table-columns';
+import { PostgreSQLIndexAttribute } from '../list-table-statistics';
+
+export const usersTableName = 'users';
+
+export const usersTable_dropSql = `DROP TABLE IF EXISTS users`;
+
+export const usersTable_createSql = `CREATE TABLE IF NOT EXISTS users (id BIGSERIAL NOT NULL, name VARCHAR(50), email VARCHAR(100) NOT NULL, phone VARCHAR(20), created_at TIMESTAMP NOT NULL, updated_at TIMESTAMP NOT NULL, CONSTRAINT users_pkey PRIMARY KEY (id))`;
+
+export const usersTable_phoneAddSql = `ALTER TABLE users ADD COLUMN phone VARCHAR(20)`;
+
+export const usersTable_emailIndexCreateSql = `CREATE UNIQUE INDEX IF NOT EXISTS users_email_idx ON users (email ASC)`;
+
+export const usersTable_phoneEmailIndexCreateSql = `CREATE INDEX IF NOT EXISTS users_phone_email_idx ON users (phone DESC, email ASC)`;
+
+export const usersTable: PostgreSQLTable = { table_name: usersTableName };
+
+export const usersCollection_slim: StoreCollection_Slim = {
+ name: usersTableName,
+};
+
+export const usersTableColumns: PostgreSQLColumn[] = [
+ {
+ column_name: 'id',
+ data_type: 'integer',
+ list: false,
+ character_maximum_length: null,
+ numeric_precision: 32,
+ numeric_scale: 0,
+ is_nullable: false,
+ is_auto_increment: true,
+ },
+ {
+ column_name: 'name',
+ data_type: 'character varying',
+ list: false,
+ character_maximum_length: 50,
+ numeric_precision: null,
+ numeric_scale: null,
+ is_nullable: true,
+ is_auto_increment: null,
+ },
+ {
+ column_name: 'email',
+ data_type: 'character varying',
+ list: false,
+ character_maximum_length: 100,
+ numeric_precision: null,
+ numeric_scale: null,
+ is_nullable: false,
+ is_auto_increment: null,
+ },
+ {
+ column_name: 'phone',
+ data_type: 'character varying',
+ list: false,
+ character_maximum_length: 20,
+ numeric_precision: null,
+ numeric_scale: null,
+ is_nullable: true,
+ is_auto_increment: null,
+ },
+ {
+ column_name: 'created_at',
+ data_type: 'timestamp without time zone',
+ list: false,
+ character_maximum_length: null,
+ numeric_precision: null,
+ numeric_scale: null,
+ is_nullable: false,
+ is_auto_increment: null,
+ },
+ {
+ column_name: 'updated_at',
+ data_type: 'timestamp without time zone',
+ list: false,
+ character_maximum_length: null,
+ numeric_precision: null,
+ numeric_scale: null,
+ is_nullable: false,
+ is_auto_increment: null,
+ },
+];
+
+export const usersTablePrimaryIndexes: PostgreSQLIndexAttribute[] = [
+ {
+ index_name: `users_id_idx`,
+ column_name: 'id',
+ seq_in_index: 1,
+ direction: 'ASC',
+ nulls: 'LAST',
+ is_unique: true,
+ is_primary: true,
+ },
+];
+
+export const usersTableIndexes: PostgreSQLIndexAttribute[] = [
+ {
+ index_name: `users_email_idx`,
+ column_name: 'email',
+ seq_in_index: 1,
+ direction: 'ASC',
+ nulls: 'LAST',
+ is_unique: true,
+ is_primary: false,
+ },
+ {
+ index_name: `users_phone_email_idx`,
+ column_name: 'phone',
+ seq_in_index: 1,
+ direction: 'DESC',
+ nulls: 'FIRST',
+ is_unique: false,
+ is_primary: false,
+ },
+ {
+ index_name: `users_phone_email_idx`,
+ column_name: 'email',
+ seq_in_index: 2,
+ direction: 'ASC',
+ nulls: 'LAST',
+ is_unique: false,
+ is_primary: false,
+ },
+ ...usersTablePrimaryIndexes,
+];
+
+export const usersCollection: StoreCollection = {
+ name: usersTableName,
+ fields: {
+ id: {
+ name: 'id',
+ type: 'number',
+ list: false,
+ nullable: false,
+ size: null,
+ precision: 32,
+ scale: 0,
+ },
+ name: {
+ name: 'name',
+ type: 'string',
+ list: false,
+ nullable: true,
+ size: 50,
+ precision: null,
+ scale: null,
+ },
+ email: {
+ name: 'email',
+ type: 'string',
+ list: false,
+ nullable: false,
+ size: 100,
+ precision: null,
+ scale: null,
+ },
+ phone: {
+ name: 'phone',
+ type: 'string',
+ list: false,
+ nullable: true,
+ size: 20,
+ precision: null,
+ scale: null,
+ },
+ created_at: {
+ name: 'created_at',
+ type: 'date-time',
+ list: false,
+ nullable: false,
+ size: null,
+ precision: null,
+ scale: null,
+ },
+ updated_at: {
+ name: 'updated_at',
+ type: 'date-time',
+ list: false,
+ nullable: false,
+ size: null,
+ precision: null,
+ scale: null,
+ },
+ },
+ primaryKey: {
+ name: 'id',
+ fields: { id: { sort: 'asc' } },
+ unique: 'primary',
+ auto: 'increment',
+ },
+ indexes: {
+ id: {
+ name: 'id',
+ fields: { id: { sort: 'asc' } },
+ unique: 'primary',
+ auto: 'increment',
+ },
+ email: {
+ name: 'email',
+ fields: { email: { sort: 'asc' } },
+ unique: true,
+ },
+ phone_email: {
+ name: 'phone_email',
+ fields: { phone: { sort: 'desc' }, email: { sort: 'asc' } },
+ unique: false,
+ },
+ },
+};
+
+export const usersTableRow1: StoreDocument = {
+ id: 1,
+ name: 'John Doe',
+ email: 'john@example.com',
+ phone: '+1 555 555 5555',
+ created_at: new Date('2020-01-01T00:00:00.000Z'),
+ updated_at: new Date('2020-01-01T00:02:00.000Z'),
+};
diff --git a/packages/postgresql-store/src/queries/add-column.ts b/packages/postgresql-store/src/queries/add-column.ts
new file mode 100644
index 0000000..8d34c92
--- /dev/null
+++ b/packages/postgresql-store/src/queries/add-column.ts
@@ -0,0 +1,100 @@
+import { StoreCollection, StoreField } from '@neuledge/store';
+import { PostgreSQLConnection, encodeIdentifier } from './connection';
+
+export const addColumn = async (
+ connection: PostgreSQLConnection,
+ collection: StoreCollection,
+ field: StoreField,
+): Promise => {
+ await connection.query(
+ `ALTER TABLE ${encodeIdentifier(
+ collection.name,
+ )} ADD COLUMN ${getColumnDefinition(field, collection)}`,
+ );
+};
+
+export const getColumnDefinition = (
+ field: StoreField,
+ collection: StoreCollection,
+): string =>
+ `${encodeIdentifier(field.name)} ${getColumnDataType(field, collection)}${
+ field.list ? '[]' : ''
+ }${field.nullable ? '' : ' NOT NULL'}`;
+
+export const getColumnDataType = (
+ field: StoreField,
+ collection?: StoreCollection,
+): string => {
+ switch (field.type) {
+ case 'string': {
+ if (field.size) {
+ return `VARCHAR(${field.size})`;
+ }
+
+ return 'TEXT';
+ }
+
+ case 'number': {
+ return getNumberDateType(field, collection);
+ }
+
+ case 'date-time': {
+ return 'TIMESTAMP';
+ }
+
+ case 'boolean': {
+ return 'BOOLEAN';
+ }
+
+ case 'json': {
+ return 'JSONB';
+ }
+
+ case 'enum': {
+ return `ENUM(${field.values?.map((value) => `'${value}'`).join(', ')})`;
+ }
+
+ case 'binary': {
+ return 'BYTEA';
+ }
+
+ default: {
+ throw new Error(`Unsupported field type: ${field.type}`);
+ }
+ }
+};
+
+// https://www.postgresql.org/docs/current/datatype-numeric.html
+const getNumberDateType = (
+ field: StoreField,
+ collection?: StoreCollection,
+): string => {
+ if (field.scale === 0) {
+ if (
+ collection?.primaryKey.auto === 'increment' &&
+ collection.primaryKey.fields[field.name]
+ ) {
+ if (!field.precision || field.precision >= 10) {
+ return 'BIGSERIAL';
+ }
+
+ if (field.precision < 5) {
+ return 'SMALLSERIAL';
+ }
+
+ return 'SERIAL';
+ }
+
+ if (field.precision) {
+ return `NUMERIC(${field.precision})`;
+ }
+
+ return 'BIGINT';
+ }
+
+ if (field.precision && field.scale) {
+ return `NUMERIC(${field.precision}, ${field.scale})`;
+ }
+
+ return 'DOUBLE PRECISION';
+};
diff --git a/packages/postgresql-store/src/queries/add-index.ts b/packages/postgresql-store/src/queries/add-index.ts
new file mode 100644
index 0000000..a4c8dd8
--- /dev/null
+++ b/packages/postgresql-store/src/queries/add-index.ts
@@ -0,0 +1,21 @@
+import { StoreCollection, StoreIndex } from '@neuledge/store';
+import { PostgreSQLConnection, encodeIdentifier } from './connection';
+
+export const addIndex = async (
+ connection: PostgreSQLConnection,
+ collection: StoreCollection,
+ index: StoreIndex,
+): Promise => {
+ await connection.query(
+ `CREATE ${
+ index.unique ? 'UNIQUE INDEX' : 'INDEX'
+ } IF NOT EXISTS ${encodeIdentifier(
+ `${collection.name}_${index.name}_idx`,
+ )} ON ${encodeIdentifier(collection.name)} (${Object.entries(index.fields)
+ .map(
+ ([field, { sort }]) =>
+ `${encodeIdentifier(field)} ${sort === 'desc' ? 'DESC' : 'ASC'}`,
+ )
+ .join(', ')})`,
+ );
+};
diff --git a/packages/postgresql-store/src/queries/connection.ts b/packages/postgresql-store/src/queries/connection.ts
new file mode 100644
index 0000000..c46434b
--- /dev/null
+++ b/packages/postgresql-store/src/queries/connection.ts
@@ -0,0 +1,47 @@
+import { QueryHelpers } from '@neuledge/sql-store';
+import {
+ StoreError,
+ StoreField,
+ StoreScalarValue,
+ StoreShape,
+} from '@neuledge/store';
+import { Client, Pool } from 'pg';
+import format from 'pg-format';
+import { getColumnDataType } from './add-column';
+
+export type PostgreSQLConnection = Pick;
+
+export const encodeIdentifier = format.ident;
+
+export const encodeLiteral = (val: StoreScalarValue, field: StoreField) => {
+ if (field.list) {
+ if (!Array.isArray(val)) {
+ throw new StoreError(
+ StoreError.Code.INVALID_INPUT,
+ `Expected array value for field ${field.name} but got ${val}`,
+ );
+ }
+
+ if (!val.length) {
+ return `ARRAY[]::${getColumnDataType(field)}[]`;
+ }
+
+ return `ARRAY[${val.map((v) => encodeScalar(v, field)).join(', ')}]`;
+ }
+
+ return encodeScalar(val, field);
+};
+
+const encodeScalar = (val: StoreScalarValue, shape: StoreShape) => {
+ if (shape.type === 'json') {
+ return `${format.literal(JSON.stringify(val))}::JSONB`;
+ }
+
+ // format.literal will convert everything else to string which will work fine for bigint
+ return format.literal(val as Exclude);
+};
+
+export const queryHelpers: QueryHelpers = {
+ encodeIdentifier,
+ encodeLiteral,
+};
diff --git a/packages/postgresql-store/src/queries/create-table.ts b/packages/postgresql-store/src/queries/create-table.ts
new file mode 100644
index 0000000..e04523e
--- /dev/null
+++ b/packages/postgresql-store/src/queries/create-table.ts
@@ -0,0 +1,31 @@
+import { StoreCollection } from '@neuledge/store';
+import { getColumnDefinition } from './add-column';
+import { addIndex } from './add-index';
+import { PostgreSQLConnection, encodeIdentifier } from './connection';
+
+export const createTableIfNotExists = async (
+ connection: PostgreSQLConnection,
+ collection: StoreCollection,
+): Promise => {
+ await connection.query(
+ `CREATE TABLE IF NOT EXISTS ${encodeIdentifier(
+ collection.name,
+ )} (${Object.values(collection.fields)
+ .map((field) => getColumnDefinition(field, collection))
+ .join(', ')}, CONSTRAINT ${encodeIdentifier(
+ `${collection.name}_pkey`,
+ )} PRIMARY KEY (${Object.keys(collection.primaryKey.fields)
+ .map((val) => encodeIdentifier(val))
+ .join(', ')}))`,
+ );
+
+ // add unique constraints if primary key has descending fields
+ // https://stackoverflow.com/a/45604459/518153
+ if (
+ Object.values(collection.primaryKey.fields).some(
+ (field) => field.sort === 'desc',
+ )
+ ) {
+ await addIndex(connection, collection, collection.primaryKey);
+ }
+};
diff --git a/packages/postgresql-store/src/queries/delete-from.ts b/packages/postgresql-store/src/queries/delete-from.ts
new file mode 100644
index 0000000..eb47fa6
--- /dev/null
+++ b/packages/postgresql-store/src/queries/delete-from.ts
@@ -0,0 +1,15 @@
+import { PostgreSQLConnection, encodeIdentifier } from './connection';
+
+export const deleteFrom = async (
+ connection: PostgreSQLConnection,
+ name: string,
+ where: string | null,
+): Promise => {
+ const res = await connection.query(
+ where
+ ? `DELETE FROM ${encodeIdentifier(name)} WHERE ${where}`
+ : `TRUNCATE TABLE ${encodeIdentifier(name)}`,
+ );
+
+ return res.rowCount;
+};
diff --git a/packages/postgresql-store/src/queries/drop-column.ts b/packages/postgresql-store/src/queries/drop-column.ts
new file mode 100644
index 0000000..810fe3c
--- /dev/null
+++ b/packages/postgresql-store/src/queries/drop-column.ts
@@ -0,0 +1,14 @@
+import { StoreCollection } from '@neuledge/store';
+import { PostgreSQLConnection, encodeIdentifier } from './connection';
+
+export const dropColumn = async (
+ connection: PostgreSQLConnection,
+ collection: StoreCollection,
+ field: string,
+): Promise => {
+ await connection.query(
+ `ALTER TABLE ${encodeIdentifier(
+ collection.name,
+ )} DROP COLUMN ${encodeIdentifier(field)}`,
+ );
+};
diff --git a/packages/postgresql-store/src/queries/drop-index.ts b/packages/postgresql-store/src/queries/drop-index.ts
new file mode 100644
index 0000000..f942c2c
--- /dev/null
+++ b/packages/postgresql-store/src/queries/drop-index.ts
@@ -0,0 +1,14 @@
+import { StoreCollection } from '@neuledge/store';
+import { PostgreSQLConnection, encodeIdentifier } from './connection';
+
+export const dropIndex = async (
+ connection: PostgreSQLConnection,
+ collection: StoreCollection,
+ index: string,
+): Promise => {
+ await connection.query(
+ `DROP INDEX IF EXISTS ${encodeIdentifier(
+ `${collection.name}_${index}_idx`,
+ )}`,
+ );
+};
diff --git a/packages/postgresql-store/src/queries/drop-table.ts b/packages/postgresql-store/src/queries/drop-table.ts
new file mode 100644
index 0000000..b462ba0
--- /dev/null
+++ b/packages/postgresql-store/src/queries/drop-table.ts
@@ -0,0 +1,9 @@
+import format from 'pg-format';
+import { PostgreSQLConnection } from './connection';
+
+export const dropTableIfExists = async (
+ connection: PostgreSQLConnection,
+ tableName: string,
+): Promise => {
+ await connection.query(`DROP TABLE IF EXISTS ${format.ident(tableName)}`);
+};
diff --git a/packages/postgresql-store/src/queries/index.ts b/packages/postgresql-store/src/queries/index.ts
new file mode 100644
index 0000000..7c03111
--- /dev/null
+++ b/packages/postgresql-store/src/queries/index.ts
@@ -0,0 +1,13 @@
+export * from './add-column';
+export * from './add-index';
+export * from './create-table';
+export * from './delete-from';
+export * from './drop-column';
+export * from './drop-index';
+export * from './drop-table';
+export * from './insert-into';
+export * from './list-tables';
+export * from './list-table-columns';
+export * from './list-table-statistics';
+export * from './update-set';
+export * from './select-from';
diff --git a/packages/postgresql-store/src/queries/insert-into.ts b/packages/postgresql-store/src/queries/insert-into.ts
new file mode 100644
index 0000000..4eefd77
--- /dev/null
+++ b/packages/postgresql-store/src/queries/insert-into.ts
@@ -0,0 +1,31 @@
+import { StoreDocument, StoreField, StoreScalarValue } from '@neuledge/store';
+import {
+ PostgreSQLConnection,
+ encodeIdentifier,
+ encodeLiteral,
+} from './connection';
+
+export const insertInto = async (
+ connection: PostgreSQLConnection,
+ name: string,
+ columns: StoreField[],
+ values: (StoreScalarValue | undefined)[][],
+ returns: string[],
+): Promise =>
+ connection
+ .query(
+ `INSERT INTO ${encodeIdentifier(name)} (${columns
+ .map((column) => encodeIdentifier(column.name))
+ .join(', ')}) VALUES (${values
+ .map((arr) =>
+ arr
+ .map((v, i) =>
+ v === undefined ? 'DEFAULT' : encodeLiteral(v, columns[i]),
+ )
+ .join(', '),
+ )
+ .join('), (')}) RETURNING ${returns
+ .map((column) => encodeIdentifier(column))
+ .join(', ')}`,
+ )
+ .then((res) => res.rows);
diff --git a/packages/postgresql-store/src/queries/list-table-columns.ts b/packages/postgresql-store/src/queries/list-table-columns.ts
new file mode 100644
index 0000000..7a7b6d6
--- /dev/null
+++ b/packages/postgresql-store/src/queries/list-table-columns.ts
@@ -0,0 +1,75 @@
+import { StoreShapeType } from '@neuledge/store';
+import { PostgreSQLConnection } from './connection';
+
+/**
+ * A table column from the information_schema.columns table.
+ */
+export interface PostgreSQLColumn {
+ column_name: string;
+ data_type: string;
+ list: boolean;
+ character_maximum_length: number | null;
+ numeric_precision: number | null;
+ numeric_scale: number | null;
+ is_nullable: boolean;
+ is_auto_increment: boolean | null;
+}
+
+export const listTableColumns = async (
+ connection: PostgreSQLConnection,
+ tableName: string,
+): Promise =>
+ connection
+ .query(listTableColumns_sql, [tableName])
+ .then((result) => result.rows);
+
+export const listTableColumns_sql = `SELECT c.column_name, COALESCE(o.data_type, c.data_type) as data_type,(c.data_type = 'ARRAY') as list, c.character_maximum_length, c.numeric_precision, c.numeric_scale, (c.is_nullable = 'YES') as is_nullable, c.column_default LIKE 'nextval(%)' AS is_auto_increment FROM information_schema.columns c LEFT JOIN information_schema.element_types o ON o.object_catalog = c.table_catalog AND o.object_schema = c.table_schema AND o.object_name = c.table_name AND o.object_type = 'TABLE' AND o.collection_type_identifier = c.dtd_identifier WHERE c.table_catalog = current_database() AND c.table_schema = current_schema() AND c.table_name = $1`;
+
+// will prduce typnames instead of data_type:
+// export const listTableColumns_sql = `SELECT s.column_name, COALESCE(e.typname, t.typname) as data_type, a.attndims as dimensions,s.character_maximum_length, s.numeric_precision, s.numeric_scale, (s.is_nullable = 'YES') as is_nullable, s.column_default LIKE 'nextval(%)' AS is_auto_increment FROM information_schema.columns AS s JOIN pg_namespace AS n ON n.nspname = s.table_schema JOIN pg_class AS c ON c.relnamespace = n.oid AND c.relname = s.table_name JOIN pg_attribute AS a ON a.attrelid = c.oid AND a.attname = s.column_name JOIN pg_type t ON t.oid = a.atttypid LEFT JOIN pg_type e ON e.oid = t.typelem WHERE table_catalog = current_database() AND table_schema = current_schema() AND table_name = $1`;
+
+export const dataTypeMap: Record = {
+ bigint: 'number',
+ bigserial: 'number',
+ bit: 'string',
+ 'bit varying': 'string',
+ boolean: 'boolean',
+ box: 'string',
+ bytea: 'string',
+ character: 'string',
+ 'character varying': 'string',
+ cidr: 'string',
+ circle: 'string',
+ date: 'string',
+ 'double precision': 'number',
+ inet: 'string',
+ integer: 'number',
+ interval: 'string',
+ json: 'json',
+ jsonb: 'json',
+ line: 'string',
+ lseg: 'string',
+ macaddr: 'string',
+ money: 'string',
+ numeric: 'number',
+ path: 'string',
+ pg_lsn: 'string',
+ point: 'string',
+ polygon: 'string',
+ real: 'number',
+ smallint: 'number',
+ smallserial: 'number',
+ serial: 'number',
+ text: 'string',
+ time: 'date-time',
+ 'time with time zone': 'date-time',
+ 'time without time zone': 'date-time',
+ timestamp: 'date-time',
+ 'timestamp with time zone': 'date-time',
+ 'timestamp without time zone': 'date-time',
+ tsquery: 'string',
+ tsvector: 'string',
+ txid_snapshot: 'string',
+ uuid: 'string',
+ xml: 'string',
+};
diff --git a/packages/postgresql-store/src/queries/list-table-statistics.ts b/packages/postgresql-store/src/queries/list-table-statistics.ts
new file mode 100644
index 0000000..4f231cc
--- /dev/null
+++ b/packages/postgresql-store/src/queries/list-table-statistics.ts
@@ -0,0 +1,53 @@
+import { PostgreSQLConnection } from './connection';
+
+/**
+ * A table statistic row from the information_schema.statistics table.
+ */
+export interface PostgreSQLIndexAttribute {
+ index_name: string;
+ column_name: string;
+ seq_in_index: number;
+ direction: 'ASC' | 'DESC';
+ nulls: 'FIRST' | 'LAST';
+ is_unique: boolean;
+ is_primary: boolean;
+}
+
+export const listIndexAttributes = async (
+ connection: PostgreSQLConnection,
+ tableName: string,
+): Promise => {
+ const { rows } = await connection.query(
+ listIndexAttributes_sql,
+ [tableName],
+ );
+
+ for (const row of rows) {
+ if (!row.index_name.startsWith(`${tableName}_`)) continue;
+
+ row.index_name = row.index_name
+ .slice(tableName.length + 1)
+ .replace(/_idx$/, '');
+ }
+
+ return rows;
+};
+
+export const listIndexAttributes_sql =
+ `SELECT` +
+ ` irel.relname AS index_name,` +
+ ` a.attname AS column_name,` +
+ ` c.ordinality as seq_in_index,` +
+ ` CASE o.option & 1 WHEN 1 THEN 'DESC' ELSE 'ASC' END AS direction,` +
+ ` CASE o.option & 2 WHEN 2 THEN 'FIRST' ELSE 'LAST' END AS nulls,` +
+ ` i.indisunique AS is_unique,` +
+ ` i.indisprimary AS is_primary` +
+ ` FROM pg_index AS i` +
+ ` JOIN pg_class AS trel ON trel.oid = i.indrelid` +
+ ` JOIN pg_namespace AS tnsp ON trel.relnamespace = tnsp.oid` +
+ ` JOIN pg_class AS irel ON irel.oid = i.indexrelid` +
+ ` CROSS JOIN LATERAL unnest (i.indkey) WITH ORDINALITY AS c (colnum, ordinality)` +
+ ` LEFT JOIN LATERAL unnest (i.indoption) WITH ORDINALITY AS o (option, ordinality) ON c.ordinality = o.ordinality` +
+ ` JOIN pg_attribute AS a ON trel.oid = a.attrelid AND a.attnum = c.colnum` +
+ ` WHERE tnsp.nspname = current_schema() AND trel.relname = $1` +
+ ` ORDER BY index_name, seq_in_index`;
diff --git a/packages/postgresql-store/src/queries/list-tables.ts b/packages/postgresql-store/src/queries/list-tables.ts
new file mode 100644
index 0000000..8e0e7f8
--- /dev/null
+++ b/packages/postgresql-store/src/queries/list-tables.ts
@@ -0,0 +1,17 @@
+import { PostgreSQLConnection } from './connection';
+
+/**
+ * The tables in the database. This is a view of the `information_schema.tables` table.
+ */
+export interface PostgreSQLTable {
+ table_name: string;
+}
+
+export const listTables = async (
+ connection: PostgreSQLConnection,
+): Promise =>
+ connection
+ .query(listTables_sql)
+ .then((result) => result.rows);
+
+export const listTables_sql = `SELECT table_name FROM information_schema.tables WHERE table_catalog = current_database() AND table_schema = current_schema() AND table_type = 'BASE TABLE'`;
diff --git a/packages/postgresql-store/src/queries/select-from.ts b/packages/postgresql-store/src/queries/select-from.ts
new file mode 100644
index 0000000..ff1f0ae
--- /dev/null
+++ b/packages/postgresql-store/src/queries/select-from.ts
@@ -0,0 +1,20 @@
+import { StoreDocument } from '@neuledge/store';
+import { PostgreSQLConnection } from './connection';
+
+export const selectFrom = async (
+ connection: PostgreSQLConnection,
+ select: string,
+ from: string,
+ where: string | null,
+ orderBy: string | null,
+ limit: number,
+ offset: number,
+): Promise => {
+ const { rows } = await connection.query(
+ `SELECT ${select} FROM ${from}${where ? ` WHERE ${where}` : ''}${
+ orderBy ? ` ORDER BY ${orderBy}` : ''
+ } LIMIT ${Number(limit)} OFFSET ${Number(offset)}`,
+ );
+
+ return rows;
+};
diff --git a/packages/postgresql-store/src/queries/update-set.ts b/packages/postgresql-store/src/queries/update-set.ts
new file mode 100644
index 0000000..165d3a2
--- /dev/null
+++ b/packages/postgresql-store/src/queries/update-set.ts
@@ -0,0 +1,26 @@
+import { StoreField, StoreScalarValue } from '@neuledge/store';
+import {
+ PostgreSQLConnection,
+ encodeIdentifier,
+ encodeLiteral,
+} from './connection';
+
+export const updateSet = async (
+ connection: PostgreSQLConnection,
+ name: string,
+ setValues: [field: StoreField, value: StoreScalarValue][],
+ where: string | null,
+): Promise => {
+ const setClauses = setValues.map(
+ ([field, value]) =>
+ `${encodeIdentifier(field.name)} = ${encodeLiteral(value, field)}`,
+ );
+
+ const res = await connection.query(
+ `UPDATE ${encodeIdentifier(name)} SET ${setClauses.join(', ')}${
+ where ? ` WHERE ${where}` : ''
+ }`,
+ );
+
+ return res.rowCount;
+};
diff --git a/packages/postgresql-store/src/store.test.ts b/packages/postgresql-store/src/store.test.ts
new file mode 100644
index 0000000..aee1df4
--- /dev/null
+++ b/packages/postgresql-store/src/store.test.ts
@@ -0,0 +1,459 @@
+import {
+ listIndexAttributes_sql,
+ listTableColumns_sql,
+ listTables_sql,
+} from './queries';
+import {
+ postsCollection,
+ postsTableName,
+ postsTableRow1,
+} from './queries/__fixtures__/posts-table';
+import {
+ usersCollection,
+ usersCollection_slim,
+ usersTable,
+ usersTableColumns,
+ usersTableIndexes,
+ usersTableName,
+ usersTablePrimaryIndexes,
+ usersTableRow1,
+ usersTable_createSql,
+ usersTable_dropSql,
+ usersTable_emailIndexCreateSql,
+ usersTable_phoneAddSql,
+ usersTable_phoneEmailIndexCreateSql,
+} from './queries/__fixtures__/users-table';
+import { PostgreSQLStore } from './store';
+
+/* eslint-disable max-lines-per-function */
+
+describe('store', () => {
+ describe('PostgreSQLStore()', () => {
+ describe('.constructor()', () => {
+ it('should be able to create a new store', () => {
+ const store = new PostgreSQLStore({
+ client: {} as never,
+ });
+
+ expect(store).toBeInstanceOf(PostgreSQLStore);
+ });
+ });
+
+ describe('.close()', () => {
+ it('should be able to close the store', async () => {
+ const end = jest.fn().mockResolvedValue(void 0);
+
+ const store = new PostgreSQLStore({
+ client: { end } as never,
+ });
+
+ expect(store).toBeInstanceOf(PostgreSQLStore);
+ expect(end).toHaveBeenCalledTimes(0);
+
+ await store.close();
+ expect(end).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ let store: PostgreSQLStore;
+ let query: jest.Mock;
+
+ beforeEach(() => {
+ query = jest.fn().mockRejectedValue(new Error('unexpected query call'));
+
+ store = new PostgreSQLStore({
+ client: { query } as never,
+ });
+ });
+
+ describe('.listCollections()', () => {
+ it('should be able to list collections', async () => {
+ query.mockResolvedValueOnce({ rows: [usersTable] });
+
+ const collections = await store.listCollections();
+
+ expect(query).toHaveBeenCalledTimes(1);
+ expect(query).toHaveBeenCalledWith(listTables_sql);
+
+ expect(collections).toEqual([usersCollection_slim]);
+ });
+ });
+
+ describe('.describeCollection()', () => {
+ it('should be able to describe a collection', async () => {
+ query.mockResolvedValueOnce({ rows: usersTableColumns });
+ query.mockResolvedValueOnce({ rows: usersTableIndexes });
+
+ const collection = await store.describeCollection({
+ collection: usersCollection_slim,
+ });
+
+ expect(query).toHaveBeenCalledTimes(2);
+ expect(query).toHaveBeenNthCalledWith(1, listTableColumns_sql, [
+ usersTableName,
+ ]);
+ expect(query).toHaveBeenNthCalledWith(2, listIndexAttributes_sql, [
+ usersTableName,
+ ]);
+
+ expect(collection).toEqual(usersCollection);
+ });
+ });
+
+ describe('.ensureCollection()', () => {
+ it('should skip create an existing table', async () => {
+ query.mockResolvedValueOnce({ rows: [] });
+ query.mockResolvedValueOnce({ rows: usersTableColumns });
+ query.mockResolvedValueOnce({ rows: usersTableIndexes });
+
+ await store.ensureCollection({
+ collection: usersCollection,
+ fields: Object.values(usersCollection.fields),
+ indexes: Object.values(usersCollection.indexes),
+ });
+
+ expect(query).toHaveBeenCalledTimes(3);
+ expect(query).toHaveBeenNthCalledWith(1, usersTable_createSql);
+ expect(query).toHaveBeenNthCalledWith(2, listTableColumns_sql, [
+ usersTableName,
+ ]);
+ expect(query).toHaveBeenNthCalledWith(3, listIndexAttributes_sql, [
+ usersTableName,
+ ]);
+ });
+
+ it('should create a new table', async () => {
+ query.mockResolvedValueOnce({ rows: [] });
+ query.mockResolvedValueOnce({ rows: usersTableColumns });
+ query.mockResolvedValueOnce({ rows: usersTablePrimaryIndexes });
+ query.mockResolvedValueOnce({ rows: [] });
+ query.mockResolvedValueOnce({ rows: [] });
+
+ await store.ensureCollection({
+ collection: usersCollection,
+ fields: Object.values(usersCollection.fields),
+ indexes: Object.values(usersCollection.indexes),
+ });
+
+ expect(query).toHaveBeenCalledTimes(5);
+ expect(query).toHaveBeenNthCalledWith(1, usersTable_createSql);
+ expect(query).toHaveBeenNthCalledWith(2, listTableColumns_sql, [
+ usersTableName,
+ ]);
+ expect(query).toHaveBeenNthCalledWith(3, listIndexAttributes_sql, [
+ usersTableName,
+ ]);
+ expect(query).toHaveBeenNthCalledWith(
+ 4,
+ usersTable_emailIndexCreateSql,
+ );
+ expect(query).toHaveBeenNthCalledWith(
+ 5,
+ usersTable_phoneEmailIndexCreateSql,
+ );
+ });
+
+ it('should create fill missing fields and indexes', async () => {
+ query.mockResolvedValueOnce({ rows: [] });
+ query.mockResolvedValueOnce({
+ rows: usersTableColumns.filter((c) => c.column_name !== 'phone'),
+ });
+ query.mockResolvedValueOnce({
+ rows: usersTableIndexes.filter(
+ (i) => !i.index_name.includes('phone'),
+ ),
+ });
+ query.mockResolvedValueOnce({ rows: [] });
+ query.mockResolvedValueOnce({ rows: [] });
+
+ await store.ensureCollection({
+ collection: usersCollection,
+ fields: Object.values(usersCollection.fields),
+ indexes: Object.values(usersCollection.indexes),
+ });
+
+ expect(query).toHaveBeenCalledTimes(5);
+ expect(query).toHaveBeenNthCalledWith(1, usersTable_createSql);
+ expect(query).toHaveBeenNthCalledWith(2, listTableColumns_sql, [
+ usersTableName,
+ ]);
+ expect(query).toHaveBeenNthCalledWith(3, listIndexAttributes_sql, [
+ usersTableName,
+ ]);
+ expect(query).toHaveBeenNthCalledWith(4, usersTable_phoneAddSql);
+ expect(query).toHaveBeenNthCalledWith(
+ 5,
+ usersTable_phoneEmailIndexCreateSql,
+ );
+ });
+ });
+
+ describe('.dropCollection()', () => {
+ it('should be able to drop a collection', async () => {
+ query.mockResolvedValueOnce({ rows: [] });
+
+ await store.dropCollection({ collection: usersCollection });
+
+ expect(query).toHaveBeenCalledTimes(1);
+ expect(query).toHaveBeenCalledWith(usersTable_dropSql);
+ });
+ });
+
+ describe('.find()', () => {
+ it('should be able to find documents', async () => {
+ query.mockResolvedValueOnce({ rows: [usersTableRow1] });
+
+ const res = await store.find({
+ collection: usersCollection,
+ where: {
+ email: { $eq: 'john@example.com' },
+ },
+ limit: 1,
+ });
+
+ expect(query).toHaveBeenCalledTimes(1);
+
+ expect(query).toHaveBeenCalledWith(
+ `SELECT * FROM ${usersTableName} WHERE email = 'john@example.com' LIMIT 1 OFFSET 0`,
+ );
+
+ expect(res).toEqual(Object.assign([usersTableRow1], { nextOffset: 1 }));
+ });
+
+ it('should be able to find documents with offset', async () => {
+ query.mockResolvedValueOnce({ rows: [] });
+
+ const res = await store.find({
+ collection: usersCollection,
+ where: {
+ email: { $eq: 'john@example.com' },
+ },
+ limit: 1,
+ offset: 1,
+ });
+ expect(query).toHaveBeenCalledTimes(1);
+
+ expect(query).toHaveBeenCalledWith(
+ `SELECT * FROM ${usersTableName} WHERE email = 'john@example.com' LIMIT 1 OFFSET 1`,
+ );
+
+ expect(res).toEqual(Object.assign([], { nextOffset: null }));
+ });
+
+ it('should be able to select columns', async () => {
+ query.mockResolvedValueOnce({ rows: [usersTableRow1] });
+
+ const res = await store.find({
+ collection: usersCollection,
+ select: {
+ id: true,
+ name: true,
+ phone: false,
+ },
+ limit: 1,
+ });
+
+ expect(query).toHaveBeenCalledTimes(1);
+
+ expect(query).toHaveBeenCalledWith(
+ `SELECT id, name FROM ${usersTableName} LIMIT 1 OFFSET 0`,
+ );
+
+ expect(res).toEqual(Object.assign([usersTableRow1], { nextOffset: 1 }));
+ });
+
+ it('should be able to join tables', async () => {
+ query.mockResolvedValueOnce({
+ rows: [{ ...postsTableRow1, author$0: usersTableRow1 }],
+ });
+
+ const res = await store.find({
+ collection: postsCollection,
+ innerJoin: {
+ author: [
+ {
+ collection: usersCollection,
+ select: true,
+ by: { author_id: { field: 'id' } },
+ },
+ ],
+ },
+ limit: 1,
+ });
+
+ expect(query).toHaveBeenCalledTimes(1);
+
+ expect(query).toHaveBeenCalledWith(
+ `SELECT "$".*, author$0.id AS "author$0.id", author$0.name AS "author$0.name", author$0.email AS "author$0.email", author$0.phone AS "author$0.phone", author$0.created_at AS "author$0.created_at", author$0.updated_at AS "author$0.updated_at" FROM ${postsTableName} "$" INNER JOIN ${usersTableName} author$0 ON (author$0.author_id = "$".id) LIMIT 1 OFFSET 0`,
+ );
+
+ expect(res).toEqual(
+ Object.assign([{ ...postsTableRow1, author: usersTableRow1 }], {
+ nextOffset: 1,
+ }),
+ );
+ });
+
+ it('should be able to sort documents', async () => {
+ query.mockResolvedValueOnce({ rows: [usersTableRow1] });
+
+ const res = await store.find({
+ collection: usersCollection,
+ sort: {
+ name: 'desc',
+ email: 'asc',
+ },
+ limit: 1,
+ });
+
+ expect(query).toHaveBeenCalledTimes(1);
+
+ expect(query).toHaveBeenCalledWith(
+ `SELECT * FROM ${usersTableName} ORDER BY name DESC, email ASC LIMIT 1 OFFSET 0`,
+ );
+
+ expect(res).toEqual(Object.assign([usersTableRow1], { nextOffset: 1 }));
+ });
+ });
+
+ describe('.insert()', () => {
+ it('should be able to insert a document with auto increment', async () => {
+ query.mockResolvedValueOnce({ rows: [{ id: 1234 }] });
+
+ const res = await store.insert({
+ collection: usersCollection,
+ documents: [
+ {
+ name: 'John Doe',
+ email: 'john@example.com',
+ created_at: new Date('2020-01-01T00:00:00.000Z'),
+ updated_at: new Date('2020-01-01T00:00:00.000Z'),
+ },
+ ],
+ });
+
+ expect(query).toHaveBeenCalledTimes(1);
+
+ expect(query).toHaveBeenCalledWith(
+ `INSERT INTO ${usersTableName} (id, name, email, phone, created_at, updated_at) VALUES (DEFAULT, 'John Doe', 'john@example.com', NULL, '2020-01-01 00:00:00.000+00', '2020-01-01 00:00:00.000+00') RETURNING id`,
+ );
+
+ expect(res).toEqual({
+ affectedCount: 1,
+ insertedIds: [{ id: 1234 }],
+ });
+ });
+
+ it('should be able to insert a document with custom id', async () => {
+ query.mockResolvedValueOnce({ rows: [{ id: 789 }] });
+
+ const res = await store.insert({
+ collection: usersCollection,
+ documents: [
+ {
+ id: 789,
+ name: 'John Doe',
+ email: 'john@example.com',
+ created_at: new Date('2020-01-01T00:00:00.000Z'),
+ updated_at: new Date('2020-01-01T00:00:00.000Z'),
+ },
+ ],
+ });
+
+ expect(query).toHaveBeenCalledTimes(1);
+
+ expect(query).toHaveBeenCalledWith(
+ `INSERT INTO ${usersTableName} (id, name, email, phone, created_at, updated_at) VALUES ('789', 'John Doe', 'john@example.com', NULL, '2020-01-01 00:00:00.000+00', '2020-01-01 00:00:00.000+00') RETURNING id`,
+ );
+
+ expect(res).toEqual({
+ affectedCount: 1,
+ insertedIds: [{ id: 789 }],
+ });
+ });
+ });
+
+ describe('.update()', () => {
+ it('should be able to update a document', async () => {
+ query.mockResolvedValueOnce({ rowCount: 1 });
+
+ const res = await store.update({
+ collection: usersCollection,
+ where: { id: { $eq: 123 } },
+ set: {
+ name: 'John Doe',
+ email: 'john@example.com',
+ phone: undefined,
+ updated_at: new Date('2020-01-01T00:00:00.000Z'),
+ },
+ });
+
+ expect(query).toHaveBeenCalledTimes(1);
+ expect(query).toHaveBeenCalledWith(
+ `UPDATE ${usersTableName} SET name = 'John Doe', email = 'john@example.com', phone = NULL, updated_at = '2020-01-01 00:00:00.000+00' WHERE id = '123'`,
+ );
+
+ expect(res).toEqual({
+ affectedCount: 1,
+ });
+ });
+
+ it('should be able to update multiple arbitrary documents', async () => {
+ query.mockResolvedValueOnce({ rowCount: 2 });
+
+ const res = await store.update({
+ collection: usersCollection,
+ set: {
+ name: 'John Doe',
+ email: 'john@example.com',
+ updated_at: new Date('2020-01-01T00:00:00.000Z'),
+ },
+ });
+
+ expect(query).toHaveBeenCalledTimes(1);
+ expect(query).toHaveBeenCalledWith(
+ `UPDATE ${usersTableName} SET name = 'John Doe', email = 'john@example.com', updated_at = '2020-01-01 00:00:00.000+00'`,
+ );
+
+ expect(res).toEqual({
+ affectedCount: 2,
+ });
+ });
+ });
+
+ describe('.delete()', () => {
+ it('should be able to delete a document', async () => {
+ query.mockResolvedValueOnce({ rowCount: 1 });
+
+ const res = await store.delete({
+ collection: usersCollection,
+ where: { id: { $eq: 123 } },
+ });
+
+ expect(query).toHaveBeenCalledTimes(1);
+ expect(query).toHaveBeenCalledWith(
+ `DELETE FROM ${usersTableName} WHERE id = '123'`,
+ );
+
+ expect(res).toEqual({
+ affectedCount: 1,
+ });
+ });
+
+ it('should be able to delete multiple arbitrary documents', async () => {
+ query.mockResolvedValueOnce({ rowCount: 2 });
+
+ const res = await store.delete({
+ collection: usersCollection,
+ });
+
+ expect(query).toHaveBeenCalledTimes(1);
+ expect(query).toHaveBeenCalledWith(`TRUNCATE TABLE ${usersTableName}`);
+
+ expect(res).toEqual({
+ affectedCount: 2,
+ });
+ });
+ });
+ });
+});
diff --git a/packages/postgresql-store/src/store.ts b/packages/postgresql-store/src/store.ts
new file mode 100644
index 0000000..5631ba8
--- /dev/null
+++ b/packages/postgresql-store/src/store.ts
@@ -0,0 +1,121 @@
+import { Client, Pool, PoolConfig } from 'pg';
+import {
+ dataTypeMap,
+ listTableColumns,
+ listIndexAttributes,
+ listTables,
+ dropIndex,
+ addIndex,
+ createTableIfNotExists,
+ addColumn,
+ dropColumn,
+ dropTableIfExists,
+ insertInto,
+ deleteFrom,
+ updateSet,
+ selectFrom,
+} from './queries';
+import {
+ Store,
+ StoreCollection,
+ StoreCollection_Slim,
+ StoreDeleteOptions,
+ StoreDescribeCollectionOptions,
+ StoreDropCollectionOptions,
+ StoreEnsureCollectionOptions,
+ StoreFindOptions,
+ StoreInsertOptions,
+ StoreInsertionResponse,
+ StoreList,
+ StoreMutationResponse,
+ StoreUpdateOptions,
+ throwStoreError,
+} from '@neuledge/store';
+import {
+ dropCollection,
+ describeCollection,
+ listCollections,
+ ensureCollection,
+ insert,
+ update,
+} from '@neuledge/sql-store';
+import { deletes } from '@neuledge/sql-store';
+import { queryHelpers } from './queries/connection';
+import { find } from '@neuledge/sql-store';
+
+export type PostgreSQLStoreClient = Pick;
+
+export type PostgreSQLStoreOptions =
+ | PoolConfig
+ | {
+ client: PostgreSQLStoreClient;
+ };
+
+export class PostgreSQLStore implements Store {
+ private connection: PostgreSQLStoreClient;
+
+ constructor(options: PostgreSQLStoreOptions) {
+ this.connection =
+ 'client' in options
+ ? options.client
+ : new Pool({
+ connectionTimeoutMillis: 5000,
+ ...options,
+ });
+ }
+
+ // connection methods
+
+ async close(): Promise {
+ await this.connection.end().catch(throwStoreError);
+ }
+
+ // store methods
+
+ async listCollections(): Promise {
+ return listCollections(this.connection, { listTables });
+ }
+
+ async describeCollection(
+ options: StoreDescribeCollectionOptions,
+ ): Promise {
+ return describeCollection(options, this.connection, {
+ listTableColumns,
+ listIndexAttributes,
+ dataTypeMap,
+ });
+ }
+
+ async ensureCollection(options: StoreEnsureCollectionOptions): Promise {
+ return ensureCollection(options, this.connection, {
+ createTableIfNotExists,
+ addIndex,
+ addColumn,
+ dropIndex,
+ dropColumn,
+ listTableColumns,
+ listIndexAttributes,
+ dataTypeMap,
+ });
+ }
+
+ async dropCollection(options: StoreDropCollectionOptions): Promise {
+ return dropCollection(options, this.connection, { dropTableIfExists });
+ }
+
+ async find(options: StoreFindOptions): Promise {
+ return find(options, this.connection, { selectFrom, queryHelpers });
+ }
+
+ async insert(options: StoreInsertOptions): Promise {
+ return insert(options, this.connection, { insertInto });
+ }
+
+ async update(options: StoreUpdateOptions): Promise {
+ return update(options, this.connection, { updateSet, queryHelpers });
+ }
+
+ async delete(options: StoreDeleteOptions): Promise {
+ return deletes(options, this.connection, { deleteFrom, queryHelpers });
+ }
+}
diff --git a/packages/postgresql-store/tsconfig.json b/packages/postgresql-store/tsconfig.json
new file mode 100644
index 0000000..c67724d
--- /dev/null
+++ b/packages/postgresql-store/tsconfig.json
@@ -0,0 +1,10 @@
+{
+ "extends": "@neuledge/tsconfig/base.json",
+ "compilerOptions": {
+ "baseUrl": "src",
+ "rootDir": "src",
+ "outDir": "dist"
+ },
+ "include": ["src"],
+ "exclude": ["node_modules", "**/__ignore__/**"]
+}
diff --git a/packages/postgresql-store/tsup.config.json b/packages/postgresql-store/tsup.config.json
new file mode 100644
index 0000000..2f3a43d
--- /dev/null
+++ b/packages/postgresql-store/tsup.config.json
@@ -0,0 +1,6 @@
+{
+ "entry": ["src/index.ts"],
+ "format": ["esm", "cjs"],
+ "sourcemap": true,
+ "shims": true
+}
diff --git a/packages/sql-store/.npmignore b/packages/sql-store/.npmignore
new file mode 100644
index 0000000..d2ee3b5
--- /dev/null
+++ b/packages/sql-store/.npmignore
@@ -0,0 +1,6 @@
+/*
+!/dist/*.js
+!/dist/*.js.map
+!/dist/*.mjs
+!/dist/*.mjs.map
+!/dist/**/*.d.ts
\ No newline at end of file
diff --git a/packages/sql-store/README.md b/packages/sql-store/README.md
new file mode 100644
index 0000000..932c199
--- /dev/null
+++ b/packages/sql-store/README.md
@@ -0,0 +1,28 @@
+# Neuledge MySQL Store
+
+An store for [Neuledge Engine](https://neuledge.com) that uses [SQL](https://en.wikipedia.org/wiki/SQL) connection as a backend.
+
+This library is not intended to be used directly. It is a dependency of the SQL-based stores such as **MySQL** and **PostgreSQL**. For more information, please refer to the [main repository](https://github.com/neuledge/engine-js)
+
+## 📦 Installation
+
+```bash
+npm install @neuledge/sql-store
+```
+
+## 🚀 Getting started
+
+Import the util functions you need and use them to create your own store:
+
+```ts
+import { Store } from '@neuledge/engine';
+import { ... } from '@neuledge/sql-store';
+
+export class MyStore implements Store {
+ // ...
+}
+```
+
+## 📄 License
+
+Neuledge is [Apache 2.0 licensed](https://github.com/neuledge/engine-js/blob/main/LICENSE).
diff --git a/packages/sql-store/jest.config.json b/packages/sql-store/jest.config.json
new file mode 100644
index 0000000..5901941
--- /dev/null
+++ b/packages/sql-store/jest.config.json
@@ -0,0 +1,3 @@
+{
+ "preset": "@neuledge/jest-ts-preset"
+}
diff --git a/packages/sql-store/package.json b/packages/sql-store/package.json
new file mode 100644
index 0000000..8fd4baa
--- /dev/null
+++ b/packages/sql-store/package.json
@@ -0,0 +1,44 @@
+{
+ "name": "@neuledge/sql-store",
+ "version": "0.0.0",
+ "deascription": "Abstract SQL store implementation for Neuledge Engine",
+ "keywords": [
+ "neuledge",
+ "abstract",
+ "sql",
+ "store",
+ "database"
+ ],
+ "main": "./dist/index.js",
+ "module": "./dist/index.mjs",
+ "types": "./dist/index.d.js",
+ "exports": {
+ ".": {
+ "require": "./dist/index.js",
+ "import": "./dist/index.mjs",
+ "types": "./dist/index.d.ts"
+ }
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/neuledge/engine-js.git"
+ },
+ "license": "Apache-2.0",
+ "publishConfig": {
+ "access": "public"
+ },
+ "engines": {
+ "node": ">= 16"
+ },
+ "scripts": {
+ "types": "rimraf --glob dist/*.{d.ts,d.ts.map} dist/**/*.{d.ts,d.ts.map} && tsc --emitDeclarationOnly && tsc-alias",
+ "build": "rimraf --glob dist/*.{js,js.map,mjs,mjs.map} && tsup",
+ "test": "jest",
+ "lint": "eslint . --ext \"js,jsx,ts,tsx,mjs,cjs\"",
+ "lint:strict": "yarn lint --max-warnings 0"
+ },
+ "dependencies": {
+ "@neuledge/store": "^0.2.0",
+ "p-limit": "^3.1.0"
+ }
+}
diff --git a/packages/sql-store/src/helpers/documents.test.ts b/packages/sql-store/src/helpers/documents.test.ts
new file mode 100644
index 0000000..f8cbb46
--- /dev/null
+++ b/packages/sql-store/src/helpers/documents.test.ts
@@ -0,0 +1,97 @@
+import { convertRawDocument } from './documents';
+
+/* eslint-disable max-lines-per-function */
+
+describe('helpers/documents', () => {
+ describe('convertRawDocument()', () => {
+ it('should convert raw document to nested document', () => {
+ expect(
+ convertRawDocument({
+ id: 123,
+ url: null,
+ 'foo$0.bar': 'baz',
+ 'foo$0.qux': 'quux',
+ 'foo$0.quux': 'corge',
+ 'foo$0.baz.qux': 'grault',
+ 'foo$0.baz.quux.corge.grault': 'fred',
+ }),
+ ).toEqual({
+ id: 123,
+ url: null,
+ foo: {
+ bar: 'baz',
+ qux: 'quux',
+ quux: 'corge',
+ baz: {
+ qux: 'grault',
+ quux: {
+ corge: {
+ grault: 'fred',
+ },
+ },
+ },
+ },
+ });
+ });
+
+ it('should prefer lower choices over higher choices (props asc)', () => {
+ expect(
+ convertRawDocument({
+ id: 123,
+ url: null,
+ 'foo$0.bar': 'baz',
+ 'foo$1.bar': 'qux',
+ 'foo$1.baz': 123,
+ }),
+ ).toEqual({
+ id: 123,
+ url: null,
+ foo: {
+ bar: 'baz',
+ },
+ });
+ });
+
+ it('should prefer lower choices over higher choices (props desc)', () => {
+ expect(
+ convertRawDocument({
+ id: 123,
+ url: null,
+ 'foo$1.bar': 'qux',
+ 'foo$1.baz': 123,
+ 'foo$0.bar': 'baz',
+ }),
+ ).toEqual({
+ id: 123,
+ url: null,
+ foo: {
+ bar: 'baz',
+ },
+ });
+ });
+
+ it('should prefer lower choices over higher choices (nested)', () => {
+ expect(
+ convertRawDocument({
+ id: 123,
+ url: null,
+ 'foo$0.id': 123,
+ 'foo$0.bar$0.id': 1,
+ 'foo$0.bar$1.id': 2,
+ 'foo$1.id': 456,
+ 'foo$1.bar$0.id': 3,
+ 'foo$1.bar$1.id': 4,
+ }),
+ ).toEqual({
+ id: 123,
+ url: null,
+ foo: {
+ id: 123,
+ bar: {
+ id: 1,
+ },
+ },
+ });
+ });
+ });
+});
diff --git a/packages/sql-store/src/helpers/documents.ts b/packages/sql-store/src/helpers/documents.ts
new file mode 100644
index 0000000..2c1bc32
--- /dev/null
+++ b/packages/sql-store/src/helpers/documents.ts
@@ -0,0 +1,64 @@
+import { StoreCollection, StoreDocument, StoreField } from '@neuledge/store';
+
+export const convertRawDocument = (rawDoc: StoreDocument): StoreDocument => {
+ const doc: StoreDocument = {};
+
+ // split by dot notation
+ for (const [key, value] of Object.entries(rawDoc)) {
+ const path = key.split('.');
+
+ let current = doc;
+ for (let i = 0; i < path.length; i++) {
+ const name = path[i];
+
+ if (i === path.length - 1) {
+ current[name] = value;
+ } else {
+ current = current[name] = (current[name] || {}) as StoreDocument;
+ }
+ }
+ }
+
+ preferLowerChoices(doc);
+
+ return doc;
+};
+
+const preferLowerChoices = (doc: StoreDocument): void => {
+ for (const [key, value] of Object.entries(doc).sort()) {
+ const choice = key.match(/^(.+)\$(\d+)$/);
+ if (!choice) continue;
+
+ delete doc[key];
+
+ const name = choice[1];
+ if (name in doc) continue;
+
+ doc[name] = value;
+
+ preferLowerChoices(value as StoreDocument);
+ }
+};
+
+// not sure if we need this method, probably best to pass the responsibility
+// to the underlying database driver and make sure it's consistent with
+// javascript's values.
+export const parseRawDocument = (
+ fields: Record,
+ rawDoc: StoreDocument,
+): StoreDocument => {
+ for (const key in rawDoc) {
+ const field = fields[key];
+ if (field?.type !== 'number') continue;
+
+ const value = rawDoc[key];
+ if (typeof value !== 'string') continue;
+
+ rawDoc[key] =
+ field.scale === 0 && (!field.precision || field.precision > 15)
+ ? BigInt(value)
+ : Number(value);
+ }
+
+ return rawDoc;
+};
diff --git a/packages/sql-store/src/helpers/index.ts b/packages/sql-store/src/helpers/index.ts
new file mode 100644
index 0000000..76316bd
--- /dev/null
+++ b/packages/sql-store/src/helpers/index.ts
@@ -0,0 +1,6 @@
+export * from './documents';
+export * from './join';
+export * from './order';
+export * from './query';
+export * from './select';
+export * from './where';
diff --git a/packages/sql-store/src/helpers/join.test.ts b/packages/sql-store/src/helpers/join.test.ts
new file mode 100644
index 0000000..c08879f
--- /dev/null
+++ b/packages/sql-store/src/helpers/join.test.ts
@@ -0,0 +1,366 @@
+import { StoreCollection } from '@neuledge/store';
+import { QueryHelpers } from './query';
+import { getFromJoins } from './join';
+
+/* eslint-disable max-lines-per-function */
+
+describe('helpers/join', () => {
+ describe('getFromJoins()', () => {
+ const helpers: QueryHelpers = {
+ encodeIdentifier: (name) => `\`${name.replace(/([\\`])/g, '\\$1')}\``,
+ encodeLiteral: (value) => JSON.stringify(value),
+ };
+
+ const collection: StoreCollection = {
+ name: 'collection',
+ primaryKey: { fields: { id: true } },
+ fields: { id: true, name: true, foo: true, bar: true },
+ } as never;
+
+ const otherCollection: StoreCollection = {
+ name: 'otherCollection',
+ primaryKey: { fields: { id: true, subId: true } },
+ fields: { id: true, subId: true, title: true, description: true },
+ } as never;
+
+ const otherCollection2: StoreCollection = {
+ name: 'otherCollection2',
+ primaryKey: { fields: { id: true } },
+ fields: { id: true, image: true, url: true },
+ } as never;
+
+ it('should return null if no joins', () => {
+ expect(
+ getFromJoins(helpers, {
+ collection,
+ }),
+ ).toBeNull();
+ });
+
+ it('should handle simple single inner join', () => {
+ expect(
+ getFromJoins(helpers, {
+ collection,
+ innerJoin: {
+ foo: [
+ {
+ collection: otherCollection,
+ select: true,
+ by: { id: { field: 'foo' } },
+ },
+ ],
+ },
+ }),
+ ).toEqual({
+ selectColumns: [
+ '`foo$0`.`id` AS `foo$0.id`',
+ '`foo$0`.`subId` AS `foo$0.subId`',
+ '`foo$0`.`title` AS `foo$0.title`',
+ '`foo$0`.`description` AS `foo$0.description`',
+ ],
+ joinFields: {
+ 'foo$0.id': otherCollection.fields.id,
+ 'foo$0.subId': otherCollection.fields.subId,
+ 'foo$0.title': otherCollection.fields.title,
+ 'foo$0.description': otherCollection.fields.description,
+ },
+ fromAlias: '$',
+ fromJoins: [
+ 'INNER JOIN `otherCollection` `foo$0` ON (`foo$0`.`id` = `$`.`foo`)',
+ ],
+ whereClauses: [],
+ });
+ });
+
+ it('should handle simple single left join', () => {
+ expect(
+ getFromJoins(helpers, {
+ collection,
+ leftJoin: {
+ foo: [
+ {
+ collection: otherCollection,
+ select: { title: true },
+ by: { id: { field: 'foo' }, subId: { field: 'bar' } },
+ },
+ ],
+ },
+ }),
+ ).toEqual({
+ selectColumns: ['`foo$0`.`title` AS `foo$0.title`'],
+ joinFields: {
+ 'foo$0.title': otherCollection.fields.title,
+ },
+ fromAlias: '$',
+ fromJoins: [
+ 'LEFT JOIN `otherCollection` `foo$0` ON (`foo$0`.`id` = `$`.`foo` AND `foo$0`.`subId` = `$`.`bar`)',
+ ],
+ whereClauses: [],
+ });
+ });
+
+ it('should handle multiple joins', () => {
+ expect(
+ getFromJoins(helpers, {
+ collection,
+ innerJoin: {
+ foo: [
+ {
+ collection: otherCollection,
+ select: { title: true },
+ by: { id: { field: 'foo' } },
+ },
+ ],
+ },
+ leftJoin: {
+ bar: [
+ {
+ collection: otherCollection,
+ select: { description: true },
+ by: { subId: { field: 'bar' } },
+ },
+ ],
+ },
+ }),
+ ).toEqual({
+ selectColumns: [
+ '`foo$0`.`title` AS `foo$0.title`',
+ '`bar$0`.`description` AS `bar$0.description`',
+ ],
+ joinFields: {
+ 'foo$0.title': otherCollection.fields.title,
+ 'bar$0.description': otherCollection.fields.description,
+ },
+ fromAlias: '$',
+ fromJoins: [
+ 'INNER JOIN `otherCollection` `foo$0` ON (`foo$0`.`id` = `$`.`foo`)',
+ 'LEFT JOIN `otherCollection` `bar$0` ON (`bar$0`.`subId` = `$`.`bar`)',
+ ],
+ whereClauses: [],
+ });
+ });
+
+ it('should handle multiple inner join choices on same collection', () => {
+ expect(
+ getFromJoins(helpers, {
+ collection,
+ innerJoin: {
+ foo: [
+ {
+ collection: otherCollection,
+ select: { title: true },
+ by: { id: { field: 'foo' } },
+ },
+ {
+ collection: otherCollection,
+ select: { title: true },
+ by: { subId: { field: 'bar' } },
+ },
+ ],
+ },
+ }),
+ ).toEqual({
+ selectColumns: [
+ '`foo$0`.`title` AS `foo$0.title`',
+ '`foo$0`.`title` AS `foo$1.title`',
+ ],
+ joinFields: {
+ 'foo$0.title': otherCollection.fields.title,
+ 'foo$1.title': otherCollection.fields.title,
+ },
+ fromAlias: '$',
+ fromJoins: [
+ 'INNER JOIN `otherCollection` `foo$0` ON (`foo$0`.`id` = `$`.`foo`) OR (`foo$0`.`subId` = `$`.`bar`)',
+ ],
+ whereClauses: [],
+ });
+ });
+
+ it('should handle multiple inner join choices on different collections', () => {
+ expect(
+ getFromJoins(helpers, {
+ collection,
+ innerJoin: {
+ foo: [
+ {
+ collection: otherCollection,
+ select: { title: true },
+ by: { subId: { field: 'foo' } },
+ },
+ {
+ collection: otherCollection2,
+ select: { url: true },
+ by: { id: { field: 'bar' } },
+ },
+ ],
+ },
+ }),
+ ).toEqual({
+ selectColumns: [
+ '`foo$0`.`title` AS `foo$0.title`',
+ '`foo$1`.`url` AS `foo$1.url`',
+ ],
+ joinFields: {
+ 'foo$0.title': otherCollection.fields.title,
+ 'foo$1.url': otherCollection2.fields.url,
+ },
+ fromAlias: '$',
+ fromJoins: [
+ 'LEFT JOIN `otherCollection` `foo$0` ON (`foo$0`.`subId` = `$`.`foo`)',
+ 'LEFT JOIN `otherCollection2` `foo$1` ON (`foo$1`.`id` = `$`.`bar`)',
+ ],
+ whereClauses: [
+ '(`foo$0`.`id` IS NOT NULL) OR (`foo$1`.`id` IS NOT NULL)',
+ ],
+ });
+ });
+
+ it('should handle inner join with where', () => {
+ expect(
+ getFromJoins(helpers, {
+ collection,
+ innerJoin: {
+ foo: [
+ {
+ collection: otherCollection,
+ select: { title: true },
+ by: { id: { field: 'foo' } },
+ where: { subId: { $eq: 123 } },
+ },
+ ],
+ },
+ }),
+ ).toEqual({
+ selectColumns: ['`foo$0`.`title` AS `foo$0.title`'],
+ joinFields: {
+ 'foo$0.title': otherCollection.fields.title,
+ },
+ fromAlias: '$',
+ fromJoins: [
+ 'INNER JOIN `otherCollection` `foo$0` ON (`foo$0`.`id` = `$`.`foo` AND `foo$0`.`subId` = 123)',
+ ],
+ whereClauses: [],
+ });
+ });
+
+ it('should handle left joins with where', () => {
+ expect(
+ getFromJoins(helpers, {
+ collection,
+ leftJoin: {
+ foo: [
+ {
+ collection: otherCollection,
+ select: { title: true },
+ by: { id: { field: 'foo' } },
+ where: { subId: { $lt: 123 } },
+ },
+ {
+ collection: otherCollection,
+ select: { description: true },
+ by: { subId: { field: 'bar' } },
+ where: { title: { $eq: 'hello' } },
+ },
+ ],
+ },
+ }),
+ ).toEqual({
+ selectColumns: [
+ '`foo$0`.`title` AS `foo$0.title`',
+ '`foo$0`.`description` AS `foo$1.description`',
+ ],
+ joinFields: {
+ 'foo$0.title': otherCollection.fields.title,
+ 'foo$1.description': otherCollection.fields.description,
+ },
+ fromAlias: '$',
+ fromJoins: [
+ 'LEFT JOIN `otherCollection` `foo$0` ON (`foo$0`.`id` = `$`.`foo` AND `foo$0`.`subId` < 123) OR (`foo$0`.`subId` = `$`.`bar` AND `foo$0`.`title` = "hello")',
+ ],
+ whereClauses: [],
+ });
+ });
+
+ it('should handle inner join within left join', () => {
+ expect(
+ getFromJoins(helpers, {
+ collection,
+ leftJoin: {
+ foo: [
+ {
+ collection: otherCollection,
+ select: { title: true },
+ by: { id: { field: 'foo' } },
+ innerJoin: {
+ bar: [
+ {
+ collection: otherCollection2,
+ select: { url: true },
+ by: { id: { field: 'bar' } },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ }),
+ ).toEqual({
+ selectColumns: [
+ '`foo$0`.`title` AS `foo$0.title`',
+ '`foo$0.bar$0`.`url` AS `foo$0.bar$0.url`',
+ ],
+ joinFields: {
+ 'foo$0.title': otherCollection.fields.title,
+ 'foo$0.bar$0.url': otherCollection2.fields.url,
+ },
+ fromAlias: '$',
+ fromJoins: [
+ 'LEFT JOIN `otherCollection` `foo$0` ON (`foo$0`.`id` = `$`.`foo`)',
+ 'INNER JOIN `otherCollection2` `foo$0.bar$0` ON (`foo$0.bar$0`.`id` = `foo$0`.`bar`)',
+ ],
+ whereClauses: [],
+ });
+ });
+
+ it('should handle overlapping inner join within left join', () => {
+ expect(
+ getFromJoins(helpers, {
+ collection,
+ leftJoin: {
+ test: [
+ {
+ collection: otherCollection,
+ select: { title: true },
+ by: { id: { field: 'foo' } },
+ innerJoin: {
+ test: [
+ {
+ collection: otherCollection2,
+ select: { url: true },
+ by: { id: { field: 'bar' } },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ }),
+ ).toEqual({
+ selectColumns: [
+ '`test$0`.`title` AS `test$0.title`',
+ '`test$0.test$0`.`url` AS `test$0.test$0.url`',
+ ],
+ joinFields: {
+ 'test$0.title': otherCollection.fields.title,
+ 'test$0.test$0.url': otherCollection2.fields.url,
+ },
+ fromAlias: '$',
+ fromJoins: [
+ 'LEFT JOIN `otherCollection` `test$0` ON (`test$0`.`id` = `$`.`foo`)',
+ 'INNER JOIN `otherCollection2` `test$0.test$0` ON (`test$0.test$0`.`id` = `test$0`.`bar`)',
+ ],
+ whereClauses: [],
+ });
+ });
+ });
+});
diff --git a/packages/sql-store/src/helpers/join.ts b/packages/sql-store/src/helpers/join.ts
new file mode 100644
index 0000000..06b05b5
--- /dev/null
+++ b/packages/sql-store/src/helpers/join.ts
@@ -0,0 +1,249 @@
+import {
+ StoreCollection,
+ StoreError,
+ StoreField,
+ StoreJoin,
+ StoreJoinChoice,
+} from '@neuledge/store';
+import { QueryHelpers } from './query';
+import { getSelectColumn } from './select';
+import { getWhere } from './where';
+
+export const getFromJoins = (
+ helpers: QueryHelpers,
+ options: Pick,
+): {
+ selectColumns: string[];
+ joinFields: Record;
+ fromAlias: string;
+ fromJoins: string[];
+ whereClauses: string[];
+} | null => {
+ const fromAlias = '$';
+
+ const joins = handleStoreOptions(fromAlias, '', options);
+ if (!joins.length) return null;
+
+ const selectColumns: string[] = [];
+ const joinFields: Record = {};
+ const fromJoins: string[] = [];
+ let whereClauses: string[] = [];
+
+ for (const join of joins) {
+ const { select, fields, fromJoin, where } = getFromJoin(helpers, join);
+
+ selectColumns.push(...select);
+ fromJoins.push(fromJoin);
+ whereClauses.push(...where);
+ Object.assign(joinFields, fields);
+ }
+
+ // remove where duplicates
+ whereClauses = [...new Set(whereClauses)];
+
+ return { selectColumns, joinFields, fromAlias, fromJoins, whereClauses };
+};
+
+// local helpers
+
+const handleStoreOptions = (
+ fromAlias: string,
+ path: string,
+ {
+ innerJoin,
+ leftJoin,
+ }: Pick,
+): Join[] => {
+ const joins: Join[] = [];
+
+ if (innerJoin) {
+ joins.push(...handleStoreJoin(fromAlias, path, innerJoin, true));
+ }
+
+ if (leftJoin) {
+ joins.push(...handleStoreJoin(fromAlias, path, leftJoin));
+ }
+
+ return joins;
+};
+
+const handleStoreJoin = (
+ fromAlias: string,
+ path: string,
+ join: StoreJoin,
+ required?: boolean,
+): Join[] =>
+ Object.entries(join).flatMap(([key, choices]) =>
+ handleStoreJoinChoices(
+ fromAlias,
+ path ? `${path}.${key}` : key,
+ choices,
+ required,
+ ),
+ );
+
+const handleStoreJoinChoices = (
+ fromAlias: string,
+ key: string,
+ choices: StoreJoinChoice[],
+ required?: boolean,
+): Join[] => {
+ const joinsFrom: Record = {};
+ const childJoins: Join[] = [];
+
+ for (const [i, choice] of choices.entries()) {
+ const { collection, by, select, where } = choice;
+
+ let join = joinsFrom[collection.name];
+ if (!join) {
+ join = {
+ collection,
+ alias: `${key}$${i}`,
+ select: {},
+ ons: [],
+ };
+ joinsFrom[collection.name] = join;
+ }
+
+ join.ons.push({ fromAlias: fromAlias, by, where });
+
+ if (select) {
+ for (const name of Object.keys(
+ select === true ? collection.fields : select,
+ )) {
+ join.select[`${key}$${i}.${name}`] = name;
+ }
+ }
+
+ childJoins.push(...handleStoreOptions(join.alias, join.alias, choice));
+ }
+
+ const joins = Object.values(joinsFrom);
+ if (required) {
+ for (const join of joins) {
+ join.required = joins;
+ }
+ }
+
+ return [...joins, ...childJoins];
+};
+
+interface Join {
+ collection: StoreCollection;
+ alias: string;
+ select: Record;
+ ons: {
+ fromAlias: string;
+ by: StoreJoinChoice['by'];
+ where: StoreJoinChoice['where'];
+ }[];
+ required?: Join[];
+}
+
+const getFromJoin = (
+ helpers: QueryHelpers,
+ join: Join,
+): {
+ select: string[];
+ fields: Record;
+ fromJoin: string;
+ where: string[];
+} => {
+ const { collection, alias: joinAlias, select, ons, required } = join;
+
+ const joinFields: Record = {};
+
+ const selectColumns = Object.entries(select).map(([alias, name]) => {
+ joinFields[alias] = collection.fields[name];
+ return getSelectColumn(helpers, joinAlias, name, alias);
+ });
+
+ let joinType: 'INNER' | 'LEFT';
+ const where: string[] = [];
+
+ if (required?.length === 1) {
+ joinType = 'INNER';
+ } else {
+ joinType = 'LEFT';
+
+ if (required) {
+ where.push(getJoinRequiredWhere(helpers, required));
+ }
+ }
+
+ const fromJoin = `${joinType} JOIN ${helpers.encodeIdentifier(
+ collection.name,
+ )} ${helpers.encodeIdentifier(joinAlias)} ON (${ons
+ .map(({ fromAlias, by, where }) =>
+ getJoinOn(helpers, collection, fromAlias, joinAlias, by, where),
+ )
+ .join(') OR (')})`;
+
+ return { select: selectColumns, fields: joinFields, fromJoin, where };
+};
+
+const getJoinOn = (
+ helpers: QueryHelpers,
+ collection: StoreCollection,
+ fromAlias: string,
+ joinAlias: string,
+ by: StoreJoinChoice['by'],
+ where: StoreJoinChoice['where'],
+): string =>
+ [
+ ...Object.entries(by).map(([key, term]) => {
+ const field = `${helpers.encodeIdentifier(
+ joinAlias,
+ )}.${helpers.encodeIdentifier(key)}`;
+
+ if (term.field) {
+ return `${field} = ${helpers.encodeIdentifier(
+ fromAlias,
+ )}.${helpers.encodeIdentifier(term.field)}`;
+ }
+
+ if (term.value != null) {
+ return `${field} = ${helpers.encodeLiteral(
+ term.value,
+ collection.fields[key],
+ )}`;
+ }
+
+ return `${field} IS NULL`;
+ }),
+ ...(where ? [getWhere(helpers, collection, where, joinAlias)] : []),
+ ].join(' AND ');
+
+/**
+ * Check that at least one of the required joins is not null.
+ */
+const getJoinRequiredWhere = (
+ helpers: QueryHelpers,
+ required: Join[],
+): string =>
+ `(${required
+ .map((join) => {
+ const { collection, alias } = join;
+
+ let field = Object.keys(collection.primaryKey.fields).find(
+ (name) => !collection.fields[name].nullable,
+ );
+
+ if (!field) {
+ field = Object.keys(collection.fields).find(
+ (name) => !collection.fields[name].nullable,
+ );
+
+ if (!field) {
+ throw new StoreError(
+ StoreError.Code.INVALID_DATA,
+ `Cannot find a non-nullable field in collection ${collection.name}`,
+ );
+ }
+ }
+
+ return `${helpers.encodeIdentifier(alias)}.${helpers.encodeIdentifier(
+ field,
+ )} IS NOT NULL`;
+ })
+ .join(') OR (')})`;
diff --git a/packages/sql-store/src/helpers/order.ts b/packages/sql-store/src/helpers/order.ts
new file mode 100644
index 0000000..ef523fd
--- /dev/null
+++ b/packages/sql-store/src/helpers/order.ts
@@ -0,0 +1,15 @@
+import { StoreSort } from '@neuledge/store';
+import { QueryHelpers } from './query';
+
+export const getOrderBy = (
+ helpers: QueryHelpers,
+ sort: StoreSort,
+): string | null =>
+ Object.entries(sort)
+ .map(
+ ([fieldName, direction]) =>
+ `${helpers.encodeIdentifier(fieldName)} ${
+ direction === 'asc' ? 'ASC' : 'DESC'
+ }`,
+ )
+ .join(', ') || null;
diff --git a/packages/sql-store/src/helpers/query.ts b/packages/sql-store/src/helpers/query.ts
new file mode 100644
index 0000000..0e8d379
--- /dev/null
+++ b/packages/sql-store/src/helpers/query.ts
@@ -0,0 +1,6 @@
+import { StoreField, StoreScalarValue } from '@neuledge/store';
+
+export interface QueryHelpers {
+ encodeIdentifier(identifier: string): string;
+ encodeLiteral(literal: StoreScalarValue, field: StoreField): string;
+}
diff --git a/packages/sql-store/src/helpers/select.ts b/packages/sql-store/src/helpers/select.ts
new file mode 100644
index 0000000..7d1cdda
--- /dev/null
+++ b/packages/sql-store/src/helpers/select.ts
@@ -0,0 +1,28 @@
+import { StoreCollection, StoreSelect } from '@neuledge/store';
+import { QueryHelpers } from './query';
+
+export const getSelectColumns = (
+ helpers: QueryHelpers,
+ from: string | null,
+ select: StoreSelect | StoreCollection['fields'],
+): string[] =>
+ Object.keys(select)
+ .filter((key) => select[key])
+ .map((name) => getSelectColumn(helpers, from, name));
+
+export const getSelectColumn = (
+ helpers: QueryHelpers,
+ from: string | null,
+ name: string,
+ alias?: string | null,
+): string =>
+ `${
+ from ? `${helpers.encodeIdentifier(from)}.` : ''
+ }${helpers.encodeIdentifier(name)}${
+ alias ? ` AS ${helpers.encodeIdentifier(alias)}` : ''
+ }`;
+
+export const getSelectAny = (
+ helpers: QueryHelpers,
+ from: string | null,
+): string => `${from ? `${helpers.encodeIdentifier(from)}.` : ''}*`;
diff --git a/packages/sql-store/src/helpers/where.ts b/packages/sql-store/src/helpers/where.ts
new file mode 100644
index 0000000..9f6a46c
--- /dev/null
+++ b/packages/sql-store/src/helpers/where.ts
@@ -0,0 +1,159 @@
+import {
+ StoreCollection,
+ StoreField,
+ StoreWhere,
+ StoreWhereRecord,
+ StoreWhereTerm,
+} from '@neuledge/store';
+import { QueryHelpers } from './query';
+
+export const getWhere = (
+ helpers: QueryHelpers,
+ collection: StoreCollection,
+ where: StoreWhere,
+ from?: string | null,
+): string | null => {
+ const { $or } = where;
+
+ if (!Array.isArray($or)) {
+ return (
+ whereRecord(helpers, collection, where as StoreWhereRecord, from) || null
+ );
+ }
+
+ const sql = $or
+ .map((record) => whereRecord(helpers, collection, record, from))
+ .filter(Boolean);
+
+ if (sql.length === 0) {
+ return null;
+ }
+
+ return `(${sql.join(') OR (')})`;
+};
+
+const whereRecord = (
+ helpers: QueryHelpers,
+ collection: StoreCollection,
+ record: StoreWhereRecord,
+ from?: string | null,
+): string => {
+ const fromEntry = from ? `${helpers.encodeIdentifier(from)}.` : '';
+
+ return Object.entries(record)
+ .map(([columnName, term]) =>
+ whereTerm(
+ helpers,
+ `${fromEntry}${helpers.encodeIdentifier(columnName)}`,
+ collection.fields[columnName],
+ term,
+ ),
+ )
+ .filter(Boolean)
+ .join(' AND ');
+};
+
+const whereTerm = (
+ helpers: QueryHelpers,
+ entry: string,
+ field: StoreField,
+ term: StoreWhereTerm,
+): string =>
+ [
+ ...whereComparisonTerm(helpers, entry, field, term),
+ ...whereLikeTerm(helpers, entry, field, term),
+ ...whereInTerm(helpers, entry, field, term),
+ ].join(' AND ');
+
+const whereComparisonTerm = (
+ { encodeLiteral }: QueryHelpers,
+ entry: string,
+ field: StoreField,
+ term: StoreWhereTerm,
+): string[] => {
+ const sql: string[] = [];
+
+ if ('$eq' in term) {
+ sql.push(`${entry} = ${encodeLiteral(term.$eq, field)}`);
+ }
+
+ if ('$ne' in term) {
+ sql.push(`${entry} != ${encodeLiteral(term.$ne, field)}`);
+ }
+
+ if ('$gt' in term) {
+ sql.push(`${entry} > ${encodeLiteral(term.$gt, field)}`);
+ }
+
+ if ('$gte' in term) {
+ sql.push(`${entry} >= ${encodeLiteral(term.$gte, field)}`);
+ }
+
+ if ('$lt' in term) {
+ sql.push(`${entry} < ${encodeLiteral(term.$lt, field)}`);
+ }
+
+ if ('$lte' in term) {
+ sql.push(`${entry} <= ${encodeLiteral(term.$lte, field)}`);
+ }
+
+ return sql;
+};
+
+const whereLikeTerm = (
+ { encodeLiteral }: QueryHelpers,
+ entry: string,
+ field: StoreField,
+ term: StoreWhereTerm,
+): string[] => {
+ const sql: string[] = [];
+
+ if ('$contains' in term) {
+ sql.push(`${entry} LIKE ${encodeLiteral(`%${term.$contains}%`, field)}`);
+ }
+
+ if ('$startsWith' in term) {
+ sql.push(`${entry} LIKE ${encodeLiteral(`${term.$startsWith}%`, field)}`);
+ }
+
+ if ('$endsWith' in term) {
+ sql.push(`${entry} LIKE ${encodeLiteral(`%${term.$endsWith}`, field)}`);
+ }
+
+ if ('$in' in term) {
+ if (term.$in.length === 0) {
+ sql.push('FALSE');
+ } else {
+ sql.push(
+ `${entry} IN (${term.$in
+ .map((v) => encodeLiteral(v, field))
+ .join(', ')})`,
+ );
+ }
+ }
+
+ return sql;
+};
+
+const whereInTerm = (
+ { encodeLiteral }: QueryHelpers,
+ entry: string,
+ field: StoreField,
+ term: StoreWhereTerm,
+): string[] => {
+ const sql: string[] = [];
+
+ if ('$in' in term) {
+ if (term.$in.length === 0) {
+ sql.push('FALSE');
+ } else {
+ sql.push(
+ `${entry} IN (${term.$in
+ .map((v) => encodeLiteral(v, field))
+ .join(', ')})`,
+ );
+ }
+ }
+
+ return sql;
+};
diff --git a/packages/sql-store/src/index.ts b/packages/sql-store/src/index.ts
new file mode 100644
index 0000000..b4f856d
--- /dev/null
+++ b/packages/sql-store/src/index.ts
@@ -0,0 +1,3 @@
+export type { QueryHelpers } from './helpers';
+export * from './logic';
+export * from './mappers';
diff --git a/packages/sql-store/src/logic/collections/describe.ts b/packages/sql-store/src/logic/collections/describe.ts
new file mode 100644
index 0000000..f7fd454
--- /dev/null
+++ b/packages/sql-store/src/logic/collections/describe.ts
@@ -0,0 +1,135 @@
+import {
+ SQLColumn,
+ SQLIndexAttribute,
+ SQLIndexColumn,
+ toStoreField,
+ toStoreIndex,
+} from '@/mappers';
+import {
+ StoreCollection,
+ StoreDescribeCollectionOptions,
+ StoreError,
+ StoreShapeType,
+ throwStoreError,
+} from '@neuledge/store';
+
+export interface DescribeCollectionQueries<
+ Connection,
+ Column extends SQLColumn,
+ IndexAttribute extends SQLIndexAttribute & Omit,
+> {
+ listTableColumns(connection: Connection, name: string): Promise;
+ listIndexAttributes(
+ connection: Connection,
+ name: string,
+ ): Promise;
+ dataTypeMap: Record;
+}
+
+export const describeCollection = async <
+ Connection,
+ Column extends SQLColumn,
+ IndexAttribute extends SQLIndexAttribute & Omit,
+>(
+ options: StoreDescribeCollectionOptions,
+ connection: Connection,
+ queries: DescribeCollectionQueries,
+): Promise => {
+ const { name, fields, indexColumns } = await getCollectionDetails(
+ options,
+ connection,
+ queries,
+ );
+
+ let primaryKey: string | undefined;
+ const indexes = Object.fromEntries(
+ indexColumns.map((columns) => {
+ const index = toStoreIndex(columns);
+ if (index.unique === 'primary') {
+ primaryKey = index.name;
+ }
+
+ return [index.name, index];
+ }),
+ );
+
+ if (!primaryKey) {
+ throw new StoreError(
+ StoreError.Code.INVALID_DATA,
+ `Primary key not found for collection "${name}"`,
+ );
+ }
+
+ return {
+ name,
+ primaryKey: indexes[primaryKey] as StoreCollection['primaryKey'],
+ indexes,
+ fields,
+ };
+};
+
+const getCollectionDetails = async <
+ Connection,
+ Column extends SQLColumn,
+ IndexAttribute extends SQLIndexAttribute & Omit,
+>(
+ options: StoreDescribeCollectionOptions,
+ connection: Connection,
+ {
+ listTableColumns,
+ listIndexAttributes,
+ dataTypeMap,
+ }: DescribeCollectionQueries,
+) => {
+ const { name } = options.collection;
+
+ const [columns, indexAttributes] = await Promise.all([
+ listTableColumns(connection, name),
+ listIndexAttributes(connection, name),
+ ]).catch(throwStoreError);
+
+ const columnMap = Object.fromEntries(
+ columns.map((column) => [column.column_name, column]),
+ );
+
+ const fields = Object.fromEntries(
+ columns.map((column) => [
+ column.column_name,
+ toStoreField(dataTypeMap, column),
+ ]),
+ );
+
+ const indexColumns = groupIndexColumns(columnMap, indexAttributes);
+
+ return { name, fields, indexColumns };
+};
+
+const groupIndexColumns = <
+ Column extends SQLColumn,
+ IndexAttribute extends SQLIndexAttribute & Omit,
+>(
+ columnMap: Record,
+ indexAttributes: IndexAttribute[],
+): SQLIndexColumn[][] => {
+ const groupMap: Record = {};
+
+ for (const statistic of indexAttributes) {
+ let group = groupMap[statistic.index_name];
+ if (!group) {
+ group = [];
+ groupMap[statistic.index_name] = group;
+ }
+
+ const column = columnMap[statistic.column_name];
+ if (!column) {
+ throw new StoreError(
+ StoreError.Code.INVALID_DATA,
+ `Column "${statistic.column_name}" not found for index "${statistic.index_name}"`,
+ );
+ }
+
+ group.push({ ...column, ...statistic } as never);
+ }
+
+ return Object.values(groupMap);
+};
diff --git a/packages/sql-store/src/logic/collections/drop.ts b/packages/sql-store/src/logic/collections/drop.ts
new file mode 100644
index 0000000..4eb1621
--- /dev/null
+++ b/packages/sql-store/src/logic/collections/drop.ts
@@ -0,0 +1,15 @@
+import { StoreDropCollectionOptions, throwStoreError } from '@neuledge/store';
+
+export interface DropCollectionQueries {
+ dropTableIfExists(connection: Connection, name: string): Promise;
+}
+
+export const dropCollection = async (
+ options: StoreDropCollectionOptions,
+ connection: Connection,
+ { dropTableIfExists }: DropCollectionQueries,
+): Promise => {
+ await dropTableIfExists(connection, options.collection.name).catch(
+ throwStoreError,
+ );
+};
diff --git a/packages/sql-store/src/logic/collections/ensure.ts b/packages/sql-store/src/logic/collections/ensure.ts
new file mode 100644
index 0000000..469b39c
--- /dev/null
+++ b/packages/sql-store/src/logic/collections/ensure.ts
@@ -0,0 +1,121 @@
+import pLimit from 'p-limit';
+import {
+ StoreCollection,
+ StoreEnsureCollectionOptions,
+ StoreField,
+ StoreIndex,
+ throwStoreError,
+} from '@neuledge/store';
+import { SQLColumn, SQLIndexAttribute, SQLIndexColumn } from '@/mappers';
+import { DescribeCollectionQueries, describeCollection } from './describe';
+
+export interface EnsureCollectionQueries<
+ Connection,
+ Column extends SQLColumn,
+ IndexAttribute extends SQLIndexAttribute & Omit,
+> extends EnsureCollectionQueriesOnly,
+ DescribeCollectionQueries {}
+
+export interface EnsureCollectionQueriesOnly {
+ createTableIfNotExists(
+ connection: Connection,
+ collection: StoreCollection,
+ ): Promise;
+ addIndex(
+ connection: Connection,
+ collection: StoreCollection,
+ index: StoreIndex,
+ ): Promise;
+ addColumn(
+ connection: Connection,
+ collection: StoreCollection,
+ field: StoreField,
+ ): Promise;
+ dropIndex(
+ connection: Connection,
+ collection: StoreCollection,
+ index: string,
+ ): Promise;
+ dropColumn(
+ connection: Connection,
+ collection: StoreCollection,
+ field: string,
+ ): Promise;
+}
+
+export const ensureCollection = async <
+ Connection,
+ Column extends SQLColumn,
+ IndexAttribute extends SQLIndexAttribute & Omit,
+>(
+ options: StoreEnsureCollectionOptions,
+ connection: Connection,
+ queries: EnsureCollectionQueries,
+): Promise => {
+ await queries
+ .createTableIfNotExists(connection, options.collection)
+ .catch(throwStoreError);
+
+ await dropProperties(options, connection, queries);
+
+ const existsCollection = await describeCollection(
+ options,
+ connection,
+ queries,
+ );
+
+ await addProperties(options, connection, existsCollection, queries);
+};
+
+const dropProperties = async (
+ options: StoreEnsureCollectionOptions,
+ connection: Connection,
+ { dropIndex, dropColumn }: EnsureCollectionQueriesOnly,
+) => {
+ const asyncLimit = pLimit(4);
+
+ await Promise.all(
+ options.dropIndexes?.map((index) =>
+ asyncLimit(() => dropIndex(connection, options.collection, index)),
+ ) || [],
+ ).catch(throwStoreError);
+
+ await Promise.all(
+ options.dropFields?.map((field) =>
+ asyncLimit(() => dropColumn(connection, options.collection, field)),
+ ) || [],
+ ).catch(throwStoreError);
+};
+
+const addProperties = async (
+ options: StoreEnsureCollectionOptions,
+ connection: Connection,
+ existsCollection: StoreCollection,
+ { addIndex, addColumn }: EnsureCollectionQueriesOnly,
+) => {
+ const asyncLimit = pLimit(4);
+
+ // although we support adding columns with non-nullables types, it will be
+ // rejected by the database and for a good reason. It's the responsibility of
+ // the engine to ensure that new columns are nullable if inserted after the
+ // collection has been created and this is the current implementation.
+
+ await Promise.all(
+ options.fields
+ ?.filter((field) => !existsCollection.fields[field.name])
+ .map((field) =>
+ asyncLimit(() => addColumn(connection, existsCollection, field)),
+ ) || [],
+ ).catch(throwStoreError);
+
+ await Promise.all(
+ options.indexes
+ ?.filter(
+ (index) =>
+ !existsCollection.indexes[index.name] && index.unique !== 'primary',
+ )
+ .map((index) =>
+ asyncLimit(() => addIndex(connection, existsCollection, index)),
+ ) || [],
+ ).catch(throwStoreError);
+};
diff --git a/packages/sql-store/src/logic/collections/index.ts b/packages/sql-store/src/logic/collections/index.ts
new file mode 100644
index 0000000..cef6614
--- /dev/null
+++ b/packages/sql-store/src/logic/collections/index.ts
@@ -0,0 +1,4 @@
+export * from './describe';
+export * from './drop';
+export * from './ensure';
+export * from './list';
diff --git a/packages/sql-store/src/logic/collections/list.ts b/packages/sql-store/src/logic/collections/list.ts
new file mode 100644
index 0000000..f4eeb7c
--- /dev/null
+++ b/packages/sql-store/src/logic/collections/list.ts
@@ -0,0 +1,14 @@
+import { SQLTable, toStoreCollection_Slim } from '@/mappers';
+import { StoreCollection_Slim, throwStoreError } from '@neuledge/store';
+
+export interface ListCollectionsQueries {
+ listTables(connection: Connection): Promise;
+}
+
+export const listCollections = async (
+ connection: Connection,
+ { listTables }: ListCollectionsQueries,
+): Promise => {
+ const tables = await listTables(connection).catch(throwStoreError);
+ return tables.map((table) => toStoreCollection_Slim(table));
+};
diff --git a/packages/sql-store/src/logic/delete.ts b/packages/sql-store/src/logic/delete.ts
new file mode 100644
index 0000000..a28bd63
--- /dev/null
+++ b/packages/sql-store/src/logic/delete.ts
@@ -0,0 +1,34 @@
+import { QueryHelpers, getWhere } from '@/helpers';
+import {
+ StoreDeleteOptions,
+ StoreMutationResponse,
+ throwStoreError,
+} from '@neuledge/store';
+
+export interface DeleteQueries {
+ deleteFrom(
+ connection: Connection,
+ name: string,
+ where: string | null,
+ ): Promise;
+ queryHelpers: QueryHelpers;
+}
+
+export const deletes = async (
+ options: StoreDeleteOptions,
+ connection: Connection,
+ { deleteFrom, queryHelpers }: DeleteQueries,
+): Promise => {
+ const { collection, where } = options;
+ const { name } = collection;
+
+ const affectedCount = await deleteFrom(
+ connection,
+ name,
+ where ? getWhere(queryHelpers, collection, where) : null,
+ ).catch(throwStoreError);
+
+ return {
+ affectedCount,
+ };
+};
diff --git a/packages/sql-store/src/logic/find.ts b/packages/sql-store/src/logic/find.ts
new file mode 100644
index 0000000..a868bb2
--- /dev/null
+++ b/packages/sql-store/src/logic/find.ts
@@ -0,0 +1,81 @@
+import {
+ QueryHelpers,
+ convertRawDocument,
+ getFromJoins,
+ getOrderBy,
+ getSelectAny,
+ getSelectColumns,
+ getWhere,
+ parseRawDocument,
+} from '@/helpers';
+import {
+ StoreDocument,
+ StoreFindOptions,
+ StoreList,
+ throwStoreError,
+} from '@neuledge/store';
+
+export interface FindQueries {
+ selectFrom(
+ connection: Connection,
+ select: string,
+ from: string,
+ where: string | null,
+ orderBy: string | null,
+ limit: number,
+ offset: number,
+ ): Promise;
+ queryHelpers: QueryHelpers;
+}
+
+export const find = async (
+ options: StoreFindOptions,
+ connection: Connection,
+ { selectFrom, queryHelpers }: FindQueries,
+): Promise => {
+ const { collection, select, where, limit, offset, sort } = options;
+
+ let selectColumns;
+ let from = queryHelpers.encodeIdentifier(collection.name);
+ let { fields } = collection;
+
+ const join = getFromJoins(queryHelpers, options);
+ const whereClauses = where ? [getWhere(queryHelpers, collection, where)] : [];
+
+ if (join) {
+ from += ` ${queryHelpers.encodeIdentifier(
+ join.fromAlias,
+ )} ${join.fromJoins.join(' ')}`;
+
+ selectColumns = select
+ ? getSelectColumns(queryHelpers, join.fromAlias, select)
+ : [getSelectAny(queryHelpers, join.fromAlias)];
+
+ selectColumns.push(...join.selectColumns);
+ whereClauses.push(...join.whereClauses);
+ fields = { ...fields, ...join.joinFields };
+ } else {
+ selectColumns = select
+ ? getSelectColumns(queryHelpers, null, select)
+ : ['*'];
+ }
+
+ const offsetNumber = offset ? Number(offset) : 0;
+
+ const rawDocs = await selectFrom(
+ connection,
+ selectColumns.join(', '),
+ from,
+ whereClauses.join(' AND ') || null,
+ sort ? getOrderBy(queryHelpers, sort) : null,
+ limit,
+ offsetNumber,
+ ).catch(throwStoreError);
+
+ const docs = rawDocs.map((rawDoc) =>
+ convertRawDocument(parseRawDocument(fields, rawDoc)),
+ );
+ const nextOffset = rawDocs.length < limit ? null : offsetNumber + limit;
+
+ return Object.assign(docs, { nextOffset });
+};
diff --git a/packages/sql-store/src/logic/index.ts b/packages/sql-store/src/logic/index.ts
new file mode 100644
index 0000000..4750cb4
--- /dev/null
+++ b/packages/sql-store/src/logic/index.ts
@@ -0,0 +1,5 @@
+export * from './collections';
+export * from './delete';
+export * from './insert';
+export * from './find';
+export * from './update';
diff --git a/packages/sql-store/src/logic/insert.ts b/packages/sql-store/src/logic/insert.ts
new file mode 100644
index 0000000..660dc21
--- /dev/null
+++ b/packages/sql-store/src/logic/insert.ts
@@ -0,0 +1,53 @@
+import { parseRawDocument } from '@/helpers';
+import {
+ StoreDocument,
+ StoreField,
+ StoreInsertOptions,
+ StoreInsertionResponse,
+ StoreScalarValue,
+ throwStoreError,
+} from '@neuledge/store';
+
+export interface InsertQueries {
+ insertInto(
+ connection: Connection,
+ name: string,
+ columns: StoreField[],
+ values: (StoreScalarValue | undefined)[][],
+ returns: string[],
+ ): Promise;
+}
+
+export const insert = async (
+ options: StoreInsertOptions,
+ connection: Connection,
+ { insertInto }: InsertQueries,
+): Promise => {
+ const { collection, documents } = options;
+ const { name, fields, primaryKey } = collection;
+
+ const columns = Object.values(fields);
+
+ const values = documents.map((document) =>
+ columns.map(
+ (column) =>
+ document[column.name] ??
+ (primaryKey.fields[column.name] ? undefined : null),
+ ),
+ );
+
+ const returns = Object.keys(primaryKey.fields);
+
+ const res = await insertInto(
+ connection,
+ name,
+ columns,
+ values,
+ returns,
+ ).catch(throwStoreError);
+
+ return {
+ affectedCount: res.length,
+ insertedIds: res.map((rawDoc) => parseRawDocument(fields, rawDoc)),
+ };
+};
diff --git a/packages/sql-store/src/logic/update.ts b/packages/sql-store/src/logic/update.ts
new file mode 100644
index 0000000..c30e2eb
--- /dev/null
+++ b/packages/sql-store/src/logic/update.ts
@@ -0,0 +1,45 @@
+import { QueryHelpers, getWhere } from '@/helpers';
+import {
+ StoreField,
+ StoreMutationResponse,
+ StoreScalarValue,
+ StoreUpdateOptions,
+ throwStoreError,
+} from '@neuledge/store';
+
+export interface UpdateQueries {
+ updateSet(
+ connection: Connection,
+ name: string,
+ setValues: [field: StoreField, value: StoreScalarValue][],
+ where: string | null,
+ ): Promise;
+ queryHelpers: QueryHelpers;
+}
+
+export const update = async (
+ options: StoreUpdateOptions,
+ connection: Connection,
+ { updateSet, queryHelpers }: UpdateQueries,
+): Promise => {
+ const { collection, set, where } = options;
+ const { name, fields } = collection;
+
+ const setValues = Object.entries(set).map(
+ ([key, value]): [field: StoreField, value: StoreScalarValue] => [
+ fields[key],
+ value ?? null,
+ ],
+ );
+
+ const affectedCount = await updateSet(
+ connection,
+ name,
+ setValues,
+ where ? getWhere(queryHelpers, collection, where) : null,
+ ).catch(throwStoreError);
+
+ return {
+ affectedCount,
+ };
+};
diff --git a/packages/sql-store/src/mappers/collection.ts b/packages/sql-store/src/mappers/collection.ts
new file mode 100644
index 0000000..1eae8f4
--- /dev/null
+++ b/packages/sql-store/src/mappers/collection.ts
@@ -0,0 +1,11 @@
+import { StoreCollection_Slim } from '@neuledge/store';
+
+export interface SQLTable {
+ table_name: string;
+}
+
+export const toStoreCollection_Slim = (
+ table: SQLTable,
+): StoreCollection_Slim => ({
+ name: table.table_name,
+});
diff --git a/packages/sql-store/src/mappers/field.ts b/packages/sql-store/src/mappers/field.ts
new file mode 100644
index 0000000..e77c966
--- /dev/null
+++ b/packages/sql-store/src/mappers/field.ts
@@ -0,0 +1,34 @@
+import { StoreError, StoreField, StoreShapeType } from '@neuledge/store';
+
+export interface SQLColumn {
+ column_name: string;
+ data_type: string;
+ list?: boolean | 1 | 0 | null;
+ character_maximum_length: number | null;
+ numeric_precision: number | null;
+ numeric_scale: number | null;
+ is_nullable: boolean | 1 | 0;
+}
+
+export const toStoreField = (
+ dataTypeMap: Record,
+ column: SQLColumn,
+): StoreField => {
+ const type = dataTypeMap[column.data_type];
+ if (!type) {
+ throw new StoreError(
+ StoreError.Code.NOT_SUPPORTED,
+ `Unsupported data type "${column.data_type}" for column "${column.column_name}"`,
+ );
+ }
+
+ return {
+ name: column.column_name,
+ type,
+ list: !!column.list,
+ nullable: !!column.is_nullable,
+ size: column.character_maximum_length,
+ precision: column.numeric_precision,
+ scale: column.numeric_scale,
+ };
+};
diff --git a/packages/sql-store/src/mappers/index.ts b/packages/sql-store/src/mappers/index.ts
new file mode 100644
index 0000000..93e2374
--- /dev/null
+++ b/packages/sql-store/src/mappers/index.ts
@@ -0,0 +1,3 @@
+export * from './collection';
+export * from './field';
+export * from './store-index';
diff --git a/packages/sql-store/src/mappers/store-index.test.ts b/packages/sql-store/src/mappers/store-index.test.ts
new file mode 100644
index 0000000..0129e5d
--- /dev/null
+++ b/packages/sql-store/src/mappers/store-index.test.ts
@@ -0,0 +1,28 @@
+import { toStoreIndex } from './store-index';
+
+describe('mappers/store-index', () => {
+ describe('toStoreIndex()', () => {
+ it('should convert a single primary index', () => {
+ expect(
+ toStoreIndex([
+ {
+ index_name: 'id_index',
+ column_name: 'field_name',
+ seq_in_index: 1,
+ direction: 'ASC',
+ is_unique: true,
+ is_primary: true,
+ is_auto_increment: true,
+ },
+ ]),
+ ).toEqual({
+ name: 'id_index',
+ unique: 'primary',
+ auto: 'increment',
+ fields: {
+ field_name: { sort: 'asc' },
+ },
+ });
+ });
+ });
+});
diff --git a/packages/sql-store/src/mappers/store-index.ts b/packages/sql-store/src/mappers/store-index.ts
new file mode 100644
index 0000000..d69b25f
--- /dev/null
+++ b/packages/sql-store/src/mappers/store-index.ts
@@ -0,0 +1,45 @@
+import { StoreIndex, StorePrimaryKey } from '@neuledge/store';
+
+export interface SQLIndexAttribute {
+ index_name: string;
+ column_name: string;
+ seq_in_index: number;
+ direction: 'ASC' | 'DESC';
+ is_unique: boolean | 1 | 0;
+}
+
+export interface SQLIndexColumn extends SQLIndexAttribute {
+ is_primary: boolean | 1 | 0;
+ is_auto_increment: boolean | 1 | 0;
+}
+
+export const toStoreIndex = (
+ indexColumns: SQLIndexColumn[],
+): StoreIndex | StorePrimaryKey => {
+ const { index_name, is_unique, is_primary, is_auto_increment } =
+ indexColumns[0];
+
+ const index: StoreIndex | StorePrimaryKey = {
+ name: index_name,
+ unique: !!is_unique,
+ fields: {},
+ };
+
+ if (index.unique && is_primary) {
+ index.unique = 'primary';
+
+ if (is_auto_increment) {
+ (index as StorePrimaryKey).auto = 'increment';
+ }
+ }
+
+ indexColumns.sort((a, b) => a.seq_in_index - b.seq_in_index);
+
+ for (const statistic of indexColumns) {
+ index.fields[statistic.column_name] = {
+ sort: statistic.direction === 'ASC' ? 'asc' : 'desc',
+ };
+ }
+
+ return index;
+};
diff --git a/packages/sql-store/tsconfig.json b/packages/sql-store/tsconfig.json
new file mode 100644
index 0000000..c67724d
--- /dev/null
+++ b/packages/sql-store/tsconfig.json
@@ -0,0 +1,10 @@
+{
+ "extends": "@neuledge/tsconfig/base.json",
+ "compilerOptions": {
+ "baseUrl": "src",
+ "rootDir": "src",
+ "outDir": "dist"
+ },
+ "include": ["src"],
+ "exclude": ["node_modules", "**/__ignore__/**"]
+}
diff --git a/packages/sql-store/tsup.config.json b/packages/sql-store/tsup.config.json
new file mode 100644
index 0000000..2f3a43d
--- /dev/null
+++ b/packages/sql-store/tsup.config.json
@@ -0,0 +1,6 @@
+{
+ "entry": ["src/index.ts"],
+ "format": ["esm", "cjs"],
+ "sourcemap": true,
+ "shims": true
+}
diff --git a/packages/store/src/collection.ts b/packages/store/src/collection.ts
index 87b717a..e36407f 100644
--- a/packages/store/src/collection.ts
+++ b/packages/store/src/collection.ts
@@ -4,7 +4,7 @@ import { StoreSortDirection } from './sort';
export interface StoreCollection {
name: string;
primaryKey: StorePrimaryKey;
- indexes: Record;
+ indexes: Record;
fields: Record;
}
diff --git a/packages/store/src/error.ts b/packages/store/src/error.ts
index bce2e44..307acc7 100644
--- a/packages/store/src/error.ts
+++ b/packages/store/src/error.ts
@@ -24,9 +24,33 @@ export namespace StoreError {
export class StoreError extends Error {
static Code = StoreErrorCode;
+ public readonly originalError?: Error;
- constructor(public readonly code: StoreErrorCode, message: string) {
+ constructor(
+ public readonly code: StoreErrorCode,
+ message: string,
+ originalError?: Error | unknown,
+ ) {
super(message);
this.name = 'StoreError';
+
+ if (originalError) {
+ this.originalError =
+ originalError instanceof Error
+ ? originalError
+ : new Error(String(originalError));
+ }
}
}
+
+export const throwStoreError = (error: unknown): never => {
+ if (error instanceof StoreError) {
+ throw error;
+ }
+
+ throw new StoreError(
+ StoreError.Code.INTERNAL_ERROR,
+ String((error as Error)?.message || error),
+ error,
+ );
+};
diff --git a/packages/store/src/store.ts b/packages/store/src/store.ts
index 564bbc2..54e1bc8 100644
--- a/packages/store/src/store.ts
+++ b/packages/store/src/store.ts
@@ -52,7 +52,13 @@ export interface StoreDropCollectionOptions {
export interface StoreFindOptions {
collection: StoreCollection;
+
+ /**
+ * Select only the specified fields to be returned.
+ * If not specified, all fields will be returned.
+ */
select?: StoreSelect | null;
+
where?: StoreWhere | null;
innerJoin?: StoreJoin | null;
leftJoin?: StoreLeftJoin | null;
@@ -69,14 +75,18 @@ export interface StoreInsertOptions {
export interface StoreUpdateOptions {
collection: StoreCollection;
where?: StoreWhere | null;
+
+ /**
+ * Set is a document that contains the fields to be updated and their new values.
+ * The fields that are not present in the set document will not be updated.
+ * `undefined` values will be converted to `null` values.
+ */
set: StoreDocument;
- limit: number;
}
export interface StoreDeleteOptions {
collection: StoreCollection;
where?: StoreWhere | null;
- limit: number;
}
export interface StoreInsertionResponse extends StoreMutationResponse {
diff --git a/yarn.lock b/yarn.lock
index 89bf15c..b094968 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -2003,6 +2003,13 @@
resolved "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.2.tgz#ee771e2ba4b3dc5b372935d549fd9617bf345b8c"
integrity sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==
+"@types/mysql@^2.15.21":
+ version "2.15.21"
+ resolved "https://registry.yarnpkg.com/@types/mysql/-/mysql-2.15.21.tgz#7516cba7f9d077f980100c85fd500c8210bd5e45"
+ integrity sha512-NPotx5CVful7yB+qZbWtXL2fA4e7aEHkihHLjklc6ID8aq7bhguHgeIoC1EmSNTAuCgI6ZXrjt2ZSaXnYX0EUg==
+ dependencies:
+ "@types/node" "*"
+
"@types/node@*", "@types/node@^18.15.11":
version "18.15.11"
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.11.tgz#b3b790f09cb1696cffcec605de025b088fa4225f"
@@ -2023,6 +2030,20 @@
resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0"
integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==
+"@types/pg-format@^1.0.2":
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/@types/pg-format/-/pg-format-1.0.2.tgz#3c63fcb3723a6888c8fad740866b1061634d037e"
+ integrity sha512-D3MEO6u3BObw3G4Xewjdx05MF5v/fiog78CedtrXe8BhONM8GvUz2dPfLWtI0BPRBoRd6anPHXe+sbrPReZouQ==
+
+"@types/pg@^8.6.6":
+ version "8.6.6"
+ resolved "https://registry.yarnpkg.com/@types/pg/-/pg-8.6.6.tgz#21cdf873a3e345a6e78f394677e3b3b1b543cb80"
+ integrity sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==
+ dependencies:
+ "@types/node" "*"
+ pg-protocol "*"
+ pg-types "^2.2.0"
+
"@types/pluralize@^0.0.29":
version "0.0.29"
resolved "https://registry.yarnpkg.com/@types/pluralize/-/pluralize-0.0.29.tgz#6ffa33ed1fc8813c469b859681d09707eb40d03c"
@@ -2579,6 +2600,11 @@ better-path-resolve@1.0.0:
dependencies:
is-windows "^1.0.0"
+bignumber.js@9.0.0:
+ version "9.0.0"
+ resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.0.0.tgz#805880f84a329b5eac6e7cb6f8274b6d82bdf075"
+ integrity sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==
+
binary-extensions@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
@@ -2666,6 +2692,11 @@ buffer-from@^1.0.0:
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
+buffer-writer@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/buffer-writer/-/buffer-writer-2.0.0.tgz#ce7eb81a38f7829db09c873f2fbb792c0c98ec04"
+ integrity sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==
+
buffer@^5.5.0:
version "5.7.1"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0"
@@ -3052,6 +3083,11 @@ cookie@^0.5.0:
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b"
integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==
+core-util-is@~1.0.0:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
+ integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
+
cosmiconfig@8.0.0:
version "8.0.0"
resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.0.0.tgz#e9feae014eab580f858f8a0288f38997a7bebe97"
@@ -4471,7 +4507,7 @@ inflight@^1.0.4:
once "^1.3.0"
wrappy "1"
-inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@^2.0.4:
+inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
@@ -4761,6 +4797,11 @@ is-windows@^1.0.0, is-windows@^1.0.1:
resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d"
integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==
+isarray@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
+ integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==
+
isexe@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
@@ -5778,6 +5819,16 @@ mylas@^2.1.9:
resolved "https://registry.yarnpkg.com/mylas/-/mylas-2.1.13.tgz#1e23b37d58fdcc76e15d8a5ed23f9ae9fc0cbdf4"
integrity sha512-+MrqnJRtxdF+xngFfUUkIMQrUUL0KsxbADUkn23Z/4ibGg192Q+z+CQyiYwvWTsYjJygmMR8+w3ZDa98Zh6ESg==
+mysql@^2.18.1:
+ version "2.18.1"
+ resolved "https://registry.yarnpkg.com/mysql/-/mysql-2.18.1.tgz#2254143855c5a8c73825e4522baf2ea021766717"
+ integrity sha512-Bca+gk2YWmqp2Uf6k5NFEurwY/0td0cpebAucFpY/3jhrwrVGuxU2uQFCHjU19SJfje0yQvi+rVWdq78hR5lig==
+ dependencies:
+ bignumber.js "9.0.0"
+ readable-stream "2.3.7"
+ safe-buffer "5.1.2"
+ sqlstring "2.3.1"
+
mz@^2.7.0:
version "2.7.0"
resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32"
@@ -6039,6 +6090,11 @@ p-try@^2.0.0:
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
+packet-reader@1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/packet-reader/-/packet-reader-1.0.0.tgz#9238e5480dedabacfe1fe3f2771063f164157d74"
+ integrity sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==
+
param-case@^3.0.4:
version "3.0.4"
resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5"
@@ -6161,6 +6217,62 @@ pend@~1.2.0:
resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50"
integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==
+pg-connection-string@^2.5.0:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34"
+ integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==
+
+pg-format@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/pg-format/-/pg-format-1.0.4.tgz#27734236c2ad3f4e5064915a59334e20040a828e"
+ integrity sha512-YyKEF78pEA6wwTAqOUaHIN/rWpfzzIuMh9KdAhc3rSLQ/7zkRFcCgYBAEGatDstLyZw4g0s9SNICmaTGnBVeyw==
+
+pg-int8@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c"
+ integrity sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==
+
+pg-pool@^3.6.0:
+ version "3.6.0"
+ resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.6.0.tgz#3190df3e4747a0d23e5e9e8045bcd99bda0a712e"
+ integrity sha512-clFRf2ksqd+F497kWFyM21tMjeikn60oGDmqMT8UBrynEwVEX/5R5xd2sdvdo1cZCFlguORNpVuqxIj+aK4cfQ==
+
+pg-protocol@*, pg-protocol@^1.6.0:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.6.0.tgz#4c91613c0315349363af2084608db843502f8833"
+ integrity sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==
+
+pg-types@^2.1.0, pg-types@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3"
+ integrity sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==
+ dependencies:
+ pg-int8 "1.0.1"
+ postgres-array "~2.0.0"
+ postgres-bytea "~1.0.0"
+ postgres-date "~1.0.4"
+ postgres-interval "^1.1.0"
+
+pg@^8.10.0:
+ version "8.10.0"
+ resolved "https://registry.yarnpkg.com/pg/-/pg-8.10.0.tgz#5b8379c9b4a36451d110fc8cd98fc325fe62ad24"
+ integrity sha512-ke7o7qSTMb47iwzOSaZMfeR7xToFdkE71ifIipOAAaLIM0DYzfOAXlgFFmYUIE2BcJtvnVlGCID84ZzCegE8CQ==
+ dependencies:
+ buffer-writer "2.0.0"
+ packet-reader "1.0.0"
+ pg-connection-string "^2.5.0"
+ pg-pool "^3.6.0"
+ pg-protocol "^1.6.0"
+ pg-types "^2.1.0"
+ pgpass "1.x"
+
+pgpass@1.x:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.5.tgz#9b873e4a564bb10fa7a7dbd55312728d422a223d"
+ integrity sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==
+ dependencies:
+ split2 "^4.1.0"
+
picocolors@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c"
@@ -6238,6 +6350,28 @@ postcss-load-config@^3.0.1:
lilconfig "^2.0.5"
yaml "^1.10.2"
+postgres-array@~2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-2.0.0.tgz#48f8fce054fbc69671999329b8834b772652d82e"
+ integrity sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==
+
+postgres-bytea@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-1.0.0.tgz#027b533c0aa890e26d172d47cf9ccecc521acd35"
+ integrity sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==
+
+postgres-date@~1.0.4:
+ version "1.0.7"
+ resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-1.0.7.tgz#51bc086006005e5061c591cee727f2531bf641a8"
+ integrity sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==
+
+postgres-interval@^1.1.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-1.2.0.tgz#b460c82cb1587507788819a06aa0fffdb3544695"
+ integrity sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==
+ dependencies:
+ xtend "^4.0.0"
+
prebuild-install@^7.0.1:
version "7.1.1"
resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.1.tgz#de97d5b34a70a0c81334fd24641f2a1702352e45"
@@ -6292,6 +6426,11 @@ pretty-format@^29.0.0, pretty-format@^29.5.0:
ansi-styles "^5.0.0"
react-is "^18.0.0"
+process-nextick-args@~2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
+ integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
+
process-warning@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-2.1.0.tgz#1e60e3bfe8183033bbc1e702c2da74f099422d1a"
@@ -6448,6 +6587,19 @@ read@^1.0.7:
dependencies:
mute-stream "~0.0.4"
+readable-stream@2.3.7:
+ version "2.3.7"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
+ integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
+ dependencies:
+ core-util-is "~1.0.0"
+ inherits "~2.0.3"
+ isarray "~1.0.0"
+ process-nextick-args "~2.0.0"
+ safe-buffer "~5.1.1"
+ string_decoder "~1.1.1"
+ util-deprecate "~1.0.1"
+
readable-stream@^3.1.1, readable-stream@^3.4.0:
version "3.6.2"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967"
@@ -6646,6 +6798,11 @@ rxjs@^7.5.5:
dependencies:
tslib "^2.1.0"
+safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
+ integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
+
safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@~5.2.0:
version "5.2.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
@@ -6960,6 +7117,11 @@ split2@^4.0.0:
resolved "https://registry.yarnpkg.com/split2/-/split2-4.1.0.tgz#101907a24370f85bb782f08adaabe4e281ecf809"
integrity sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==
+split2@^4.1.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/split2/-/split2-4.2.0.tgz#c9c5920904d148bab0b9f67145f245a86aadbfa4"
+ integrity sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==
+
sponge-case@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/sponge-case/-/sponge-case-1.0.1.tgz#260833b86453883d974f84854cdb63aecc5aef4c"
@@ -6972,6 +7134,11 @@ sprintf-js@~1.0.2:
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==
+sqlstring@2.3.1:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/sqlstring/-/sqlstring-2.3.1.tgz#475393ff9e91479aea62dcaf0ca3d14983a7fb40"
+ integrity sha512-ooAzh/7dxIG5+uDik1z/Rd1vli0+38izZhGzSa34FwR7IbelPWCCKSNIl8jlL/F7ERvy8CB2jNeM1E9i9mXMAQ==
+
stack-utils@^2.0.3:
version "2.0.6"
resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f"
@@ -7057,6 +7224,13 @@ string_decoder@^1.1.1:
dependencies:
safe-buffer "~5.2.0"
+string_decoder@~1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
+ integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
+ dependencies:
+ safe-buffer "~5.1.0"
+
strip-ansi@^6.0.0, strip-ansi@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
@@ -7603,7 +7777,7 @@ urlpattern-polyfill@^6.0.2:
dependencies:
braces "^3.0.2"
-util-deprecate@^1.0.1:
+util-deprecate@^1.0.1, util-deprecate@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
@@ -7819,7 +7993,7 @@ xmlbuilder@~11.0.0:
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3"
integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==
-xtend@^4.0.2:
+xtend@^4.0.0, xtend@^4.0.2:
version "4.0.2"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==