diff --git a/.changeset/blue-impalas-cheer.md b/.changeset/blue-impalas-cheer.md
new file mode 100644
index 000000000..105fcdabc
--- /dev/null
+++ b/.changeset/blue-impalas-cheer.md
@@ -0,0 +1,5 @@
+---
+'@electric-sql/pglite': patch
+---
+
+Added dict_int, dict_xsyn, file_fdw, intarray, pageinspect, pg_buffercache, pg_freespacemap, pg_surgery, pg_visibility, pg_walinspect, unaccent contrib extensions
diff --git a/docs/docs/videos.md b/docs/docs/videos.md
index b2177c171..8fb39213b 100644
--- a/docs/docs/videos.md
+++ b/docs/docs/videos.md
@@ -3,3 +3,11 @@
Compiling Postgres to WASM with PGlite (Sam Willis at [pgconf.dev](https://pgconf.dev)).
+
+Compiling Postgres to WASM with PGlite (Sam Willis at [San Francisco Bay Area PostgreSQL Users Group](https://www.youtube.com/@sanfranciscobayareapostgre4592))
+
+
+
+Vibe coding with a database in the sandbox - [Bolt](https://bolt.new) + PGlite - Demo video
+
+
diff --git a/docs/extensions/extensions.data.ts b/docs/extensions/extensions.data.ts
index 6535e668e..f7e0107d1 100644
--- a/docs/extensions/extensions.data.ts
+++ b/docs/extensions/extensions.data.ts
@@ -179,6 +179,183 @@ const baseExtensions: Extension[] = [
core: true,
size: 21380,
},
+ {
+ name: 'intarray',
+ description: `
+ The intarray module provides a number of useful functions and operators for
+ manipulating null-free arrays of integers. There is also support for indexed
+ searches using some of the operators.
+ `,
+ shortDescription: 'Operators for manipulating null-free arrays of integers',
+ docs: 'https://www.postgresql.org/docs/9.1/intarray.html',
+ tags: ['postgres extension', 'postgres/contrib'],
+ importPath: '@electric-sql/pglite/contrib/intarray',
+ importName: 'intarray',
+ core: true,
+ size: 14712,
+ },
+ {
+ name: 'dict_xsyn',
+ description: `
+ dict_xsyn (Extended Synonym Dictionary) is an example of an add-on dictionary
+ template for full-text search. This dictionary type replaces words with groups
+ of their synonyms, and so makes it possible to search for a word using any of
+ its synonyms.
+ `,
+ shortDescription: 'Example synonym full-text search dictionary',
+ docs: 'https://www.postgresql.org/docs/18/dict-xsyn.html',
+ tags: ['postgres extension', 'postgres/contrib'],
+ importPath: '@electric-sql/pglite/contrib/dict_xsyn',
+ importName: 'dict_xsyn',
+ core: true,
+ size: 1948,
+ },
+ {
+ name: 'pageinspect',
+ description: `
+ The pageinspect module provides functions that allow you to inspect the contents
+ of database pages at a low level, which is useful for debugging purposes. All of
+ these functions may be used only by superusers.
+ `,
+ shortDescription: 'Low-level inspection of database pages ',
+ docs: 'https://www.postgresql.org/docs/18/pageinspect.html',
+ tags: ['postgres extension', 'postgres/contrib'],
+ importPath: '@electric-sql/pglite/contrib/pageinspect',
+ importName: 'pageinspect',
+ core: true,
+ size: 15923,
+ },
+ {
+ name: 'dict_int',
+ description: `
+ dict_int is an example of an add-on dictionary template for full-text search.
+ The motivation for this example dictionary is to control the indexing of integers
+ (signed and unsigned), allowing such numbers to be indexed while preventing
+ excessive growth in the number of unique words, which greatly affects the
+ performance of searching.
+ `,
+ shortDescription: 'Example full-text search dictionary for integers',
+ docs: 'https://www.postgresql.org/docs/18/dict-int.html',
+ tags: ['postgres extension', 'postgres/contrib'],
+ importPath: '@electric-sql/pglite/contrib/dict_int',
+ importName: 'dict_int',
+ core: true,
+ size: 1361,
+ },
+ {
+ name: 'unaccent',
+ description: `
+ unaccent is a text search dictionary that removes accents (diacritic signs)
+ from lexemes. It's a filtering dictionary, which means its output is always
+ passed to the next dictionary (if any), unlike the normal behavior of
+ dictionaries. This allows accent-insensitive processing for full text search.
+ `,
+ shortDescription: 'A text search dictionary which removes diacritics',
+ docs: 'https://www.postgresql.org/docs/current/unaccent.html',
+ tags: ['postgres extension', 'postgres/contrib'],
+ importPath: '@electric-sql/pglite/contrib/unaccent',
+ importName: 'unaccent',
+ core: true,
+ size: 9323,
+ },
+ {
+ name: 'pg_surgery',
+ description: `
+ The pg_surgery module provides various functions to perform surgery on a damaged
+ relation. These functions are unsafe by design and using them may corrupt
+ (or further corrupt) your database. For example, these functions can easily be
+ used to make a table inconsistent with its own indexes, to cause UNIQUE or
+ FOREIGN KEY constraint violations, or even to make tuples visible which, when read,
+ will cause a database server crash. They should be used with great caution and
+ only as a last resort.
+ `,
+ shortDescription: 'Perform low-level surgery on relation data',
+ docs: 'https://www.postgresql.org/docs/current/pgsurgery.html',
+ tags: ['postgres extension', 'postgres/contrib'],
+ importPath: '@electric-sql/pglite/contrib/pg_surgery',
+ importName: 'pg_surgery',
+ core: true,
+ size: 2635,
+ },
+ {
+ name: 'pg_walinspect',
+ description: `
+ The pg_walinspect module provides SQL functions that allow you to inspect the
+ contents of write-ahead log of a running PostgreSQL database cluster at a low level,
+ which is useful for debugging, analytical, reporting or educational purposes.
+ It is similar to pg_waldump, but accessible through SQL rather than a separate utility.
+ `,
+ shortDescription: 'Low-level WAL inspection',
+ docs: 'https://www.postgresql.org/docs/current/pgwalinspect.html',
+ tags: ['postgres extension', 'postgres/contrib'],
+ importPath: '@electric-sql/pglite/contrib/pg_walinspect',
+ importName: 'pg_walinspect',
+ core: true,
+ size: 4689,
+ },
+ {
+ name: 'pg_visibility',
+ description: `
+ The pg_visibility module provides a means for examining the visibility map (VM)
+ and page-level visibility information of a table. It also provides functions to
+ check the integrity of a visibility map and to force it to be rebuilt.
+ `,
+ shortDescription: 'Visibility map information and utilities',
+ docs: 'https://www.postgresql.org/docs/current/pgvisibility.html',
+ tags: ['postgres extension', 'postgres/contrib'],
+ importPath: '@electric-sql/pglite/contrib/pg_visibility',
+ importName: 'pg_visibility',
+ core: true,
+ size: 4159,
+ },
+ {
+ name: 'pg_freespacemap',
+ description: `
+ The pg_freespacemap module provides a means for examining the free space map (FSM).
+ It provides a function called pg_freespace, or two overloaded functions, to be precise.
+ The functions show the value recorded in the free space map for a given page, or
+ for all pages in the relation.
+ `,
+ shortDescription: 'Examine the free space map',
+ docs: 'https://www.postgresql.org/docs/current/pgfreespacemap.html',
+ tags: ['postgres extension', 'postgres/contrib'],
+ importPath: '@electric-sql/pglite/contrib/pg_freespacemap',
+ importName: 'pg_freespacemap',
+ core: true,
+ size: 1485,
+ },
+ {
+ name: 'pg_buffercache',
+ description: `
+ The pg_buffercache module provides a means for examining what's happening in the
+ shared buffer cache in real time. It also offers a low-level way to evict data
+ from it, for testing purposes.
+ `,
+ shortDescription: 'Inspect PostgreSQL buffer cache state',
+ docs: 'https://www.postgresql.org/docs/current/pgbuffercache.html',
+ tags: ['postgres extension', 'postgres/contrib'],
+ importPath: '@electric-sql/pglite/contrib/pg_buffercache',
+ importName: 'pg_buffercache',
+ core: true,
+ size: 3133,
+ },
+ {
+ name: 'file_fdw',
+ description: `
+ The file_fdw module provides the foreign-data wrapper file_fdw, which can be
+ used to access data files in the server's file system, or to execute programs
+ on the server and read their output. The data file or program output must be
+ in a format that can be read by COPY FROM. Access to data files is currently
+ read-only.
+ `,
+ shortDescription: "Acess data files in the server's file system",
+ docs: 'https://www.postgresql.org/docs/18/file-fdw.html',
+ tags: ['postgres extension', 'postgres/contrib'],
+ importPath: '@electric-sql/pglite/contrib/file_fdw',
+ importName: 'file_fdw',
+ core: true,
+ size: 4467,
+ },
{
name: 'isn',
description: `
diff --git a/docs/repl/allExtensions.ts b/docs/repl/allExtensions.ts
index e57951e76..79590906f 100644
--- a/docs/repl/allExtensions.ts
+++ b/docs/repl/allExtensions.ts
@@ -1,4 +1,3 @@
-export { vector } from '@electric-sql/pglite/vector'
export { amcheck } from '@electric-sql/pglite/contrib/amcheck'
export { auto_explain } from '@electric-sql/pglite/contrib/auto_explain'
export { bloom } from '@electric-sql/pglite/contrib/bloom'
@@ -6,16 +5,28 @@ export { btree_gin } from '@electric-sql/pglite/contrib/btree_gin'
export { btree_gist } from '@electric-sql/pglite/contrib/btree_gist'
export { citext } from '@electric-sql/pglite/contrib/citext'
export { cube } from '@electric-sql/pglite/contrib/cube'
+export { dict_int } from '@electric-sql/pglite/contrib/dict_int'
+export { dict_xsyn } from '@electric-sql/pglite/contrib/dict_xsyn'
export { earthdistance } from '@electric-sql/pglite/contrib/earthdistance'
+export { file_fdw } from '@electric-sql/pglite/contrib/file_fdw'
export { fuzzystrmatch } from '@electric-sql/pglite/contrib/fuzzystrmatch'
export { hstore } from '@electric-sql/pglite/contrib/hstore'
+export { intarray } from '@electric-sql/pglite/contrib/intarray'
export { isn } from '@electric-sql/pglite/contrib/isn'
export { lo } from '@electric-sql/pglite/contrib/lo'
export { ltree } from '@electric-sql/pglite/contrib/ltree'
+export { pageinspect } from '@electric-sql/pglite/contrib/pageinspect'
+export { pg_buffercache } from '@electric-sql/pglite/contrib/pg_buffercache'
+export { pg_freespacemap } from '@electric-sql/pglite/contrib/pg_freespacemap'
+export { pg_surgery } from '@electric-sql/pglite/contrib/pg_surgery'
export { pg_trgm } from '@electric-sql/pglite/contrib/pg_trgm'
+export { pg_visibility } from '@electric-sql/pglite/contrib/pg_visibility'
+export { pg_walinspect } from '@electric-sql/pglite/contrib/pg_walinspect'
export { seg } from '@electric-sql/pglite/contrib/seg'
export { tablefunc } from '@electric-sql/pglite/contrib/tablefunc'
export { tcn } from '@electric-sql/pglite/contrib/tcn'
export { tsm_system_rows } from '@electric-sql/pglite/contrib/tsm_system_rows'
export { tsm_system_time } from '@electric-sql/pglite/contrib/tsm_system_time'
+export { unaccent } from '@electric-sql/pglite/contrib/unaccent'
export { uuid_ossp } from '@electric-sql/pglite/contrib/uuid_ossp'
+export { vector } from '@electric-sql/pglite/vector'
diff --git a/package.json b/package.json
index 96a557ed8..fa32c00fd 100644
--- a/package.json
+++ b/package.json
@@ -12,7 +12,7 @@
"ci:publish": "pnpm changeset publish",
"ts:build": "pnpm -r --filter \"./packages/**\" build",
"ts:build:debug": "DEBUG=true pnpm ts:build",
- "wasm:build": "cd postgres-pglite && ./build-with-docker.sh; cd .. && mkdir -p ./packages/pglite/release/ && cp ./postgres-pglite/dist/bin/pglite.* ./packages/pglite/release/ && cp ./postgres-pglite/dist/extensions/*.tar.gz ./packages/pglite/release/",
+ "wasm:build": "cd postgres-pglite && ./build-with-docker.sh && cd .. && mkdir -p ./packages/pglite/release/ && cp ./postgres-pglite/dist/bin/pglite.* ./packages/pglite/release/ && cp ./postgres-pglite/dist/extensions/*.tar.gz ./packages/pglite/release/",
"wasm:build:debug": "DEBUG=true pnpm wasm:build",
"build:all": "pnpm wasm:build && pnpm ts:build",
"build:all:debug": "DEBUG=true pnpm build:all"
diff --git a/packages/benchmark/src/rtt.js b/packages/benchmark/src/rtt.js
index c9cc362b4..421113647 100644
--- a/packages/benchmark/src/rtt.js
+++ b/packages/benchmark/src/rtt.js
@@ -9,13 +9,11 @@ const CONFIGURATIONS = new Map(
label: 'PGlite Memory
(CMA Transport default)',
db: 'pglite',
dataDir: '',
- options: { defaultDataTransferContainer: 'cma' },
},
{
label: 'PGlite Memory
(File Transport)',
db: 'pglite',
dataDir: '',
- options: { defaultDataTransferContainer: 'file' },
},
{
label: 'PGlite IDB',
diff --git a/packages/pglite-tools/src/pg_dump.ts b/packages/pglite-tools/src/pg_dump.ts
index 217608e69..dcbdaf61b 100644
--- a/packages/pglite-tools/src/pg_dump.ts
+++ b/packages/pglite-tools/src/pg_dump.ts
@@ -148,6 +148,7 @@ async function execPgDump({
await pg.runExclusive(async () => {
exitCode = wasi.start(app.instance.exports)
})
+
return [exitCode!, acc, errorMessage]
}
diff --git a/packages/pglite/src/base.ts b/packages/pglite/src/base.ts
index 0ed588f30..be420d407 100644
--- a/packages/pglite/src/base.ts
+++ b/packages/pglite/src/base.ts
@@ -24,6 +24,7 @@ import {
RowDescriptionMessage,
ParameterDescriptionMessage,
DatabaseError,
+ BackendMessage,
} from '@electric-sql/pg-protocol/messages'
import { makePGliteError } from './errors.js'
@@ -52,6 +53,16 @@ export abstract class BasePGlite
{ syncToFs, onNotice }: ExecProtocolOptions,
): Promise
+ /**
+ * Execute a postgres wire protocol message
+ * @param message The postgres wire protocol message to execute
+ * @returns The parsed results of the query
+ */
+ abstract execProtocolStream(
+ message: Uint8Array,
+ { syncToFs, onNotice }: ExecProtocolOptions,
+ ): Promise
+
/**
* Execute a postgres wire protocol message directly without wrapping the response.
* Only use if `execProtocol()` doesn't suite your needs.
@@ -65,7 +76,7 @@ export abstract class BasePGlite
*/
abstract execProtocolRaw(
message: Uint8Array,
- { syncToFs, dataTransferContainer }: ExecProtocolOptions,
+ { syncToFs }: ExecProtocolOptions,
): Promise
/**
@@ -137,8 +148,11 @@ export abstract class BasePGlite
async #execProtocolNoSync(
message: Uint8Array,
options: ExecProtocolOptions = {},
- ): Promise {
- return await this.execProtocol(message, { ...options, syncToFs: false })
+ ): Promise {
+ return await this.execProtocolStream(message, {
+ ...options,
+ syncToFs: false,
+ })
}
/**
@@ -228,21 +242,19 @@ export abstract class BasePGlite
this.#log('runQuery', query, params, options)
await this._handleBlob(options?.blob)
- let results
+ let results = []
try {
- const { messages: parseResults } = await this.#execProtocolNoSync(
+ const parseResults = await this.#execProtocolNoSync(
serializeProtocol.parse({ text: query, types: options?.paramTypes }),
options,
)
const dataTypeIDs = parseDescribeStatementResults(
- (
- await this.#execProtocolNoSync(
- serializeProtocol.describe({ type: 'S' }),
- options,
- )
- ).messages,
+ await this.#execProtocolNoSync(
+ serializeProtocol.describe({ type: 'S' }),
+ options,
+ ),
)
const values = params.map((param, i) => {
@@ -260,26 +272,20 @@ export abstract class BasePGlite
results = [
...parseResults,
- ...(
- await this.#execProtocolNoSync(
- serializeProtocol.bind({
- values,
- }),
- options,
- )
- ).messages,
- ...(
- await this.#execProtocolNoSync(
- serializeProtocol.describe({ type: 'P' }),
- options,
- )
- ).messages,
- ...(
- await this.#execProtocolNoSync(
- serializeProtocol.execute({}),
- options,
- )
- ).messages,
+ ...(await this.#execProtocolNoSync(
+ serializeProtocol.bind({
+ values,
+ }),
+ options,
+ )),
+ ...(await this.#execProtocolNoSync(
+ serializeProtocol.describe({ type: 'P' }),
+ options,
+ )),
+ ...(await this.#execProtocolNoSync(
+ serializeProtocol.execute({}),
+ options,
+ )),
]
} catch (e) {
if (e instanceof DatabaseError) {
@@ -288,7 +294,12 @@ export abstract class BasePGlite
}
throw e
} finally {
- await this.#execProtocolNoSync(serializeProtocol.sync(), options)
+ results.push(
+ ...(await this.#execProtocolNoSync(
+ serializeProtocol.sync(),
+ options,
+ )),
+ )
}
await this._cleanupBlob()
@@ -315,14 +326,12 @@ export abstract class BasePGlite
// No params so we can just send the query
this.#log('runExec', query, options)
await this._handleBlob(options?.blob)
- let results
+ let results = []
try {
- results = (
- await this.#execProtocolNoSync(
- serializeProtocol.query(query),
- options,
- )
- ).messages
+ results = await this.#execProtocolNoSync(
+ serializeProtocol.query(query),
+ options,
+ )
} catch (e) {
if (e instanceof DatabaseError) {
const pgError = makePGliteError({
@@ -335,7 +344,12 @@ export abstract class BasePGlite
}
throw e
} finally {
- await this.#execProtocolNoSync(serializeProtocol.sync(), options)
+ results.push(
+ ...(await this.#execProtocolNoSync(
+ serializeProtocol.sync(),
+ options,
+ )),
+ )
}
this._cleanupBlob()
if (!this.#inTransaction) {
@@ -360,38 +374,17 @@ export abstract class BasePGlite
query: string,
options?: QueryOptions,
): Promise {
+ let messages = []
try {
await this.#execProtocolNoSync(
serializeProtocol.parse({ text: query, types: options?.paramTypes }),
options,
)
- const describeResults = await this.#execProtocolNoSync(
+ messages = await this.#execProtocolNoSync(
serializeProtocol.describe({ type: 'S' }),
options,
)
- const paramDescription = describeResults.messages.find(
- (msg): msg is ParameterDescriptionMessage =>
- msg.name === 'parameterDescription',
- )
- const resultDescription = describeResults.messages.find(
- (msg): msg is RowDescriptionMessage => msg.name === 'rowDescription',
- )
-
- const queryParams =
- paramDescription?.dataTypeIDs.map((dataTypeID) => ({
- dataTypeID,
- serializer: this.serializers[dataTypeID],
- })) ?? []
-
- const resultFields =
- resultDescription?.fields.map((field) => ({
- name: field.name,
- dataTypeID: field.dataTypeID,
- parser: this.parsers[field.dataTypeID],
- })) ?? []
-
- return { queryParams, resultFields }
} catch (e) {
if (e instanceof DatabaseError) {
const pgError = makePGliteError({
@@ -404,8 +397,33 @@ export abstract class BasePGlite
}
throw e
} finally {
- await this.#execProtocolNoSync(serializeProtocol.sync(), options)
+ messages.push(
+ ...(await this.#execProtocolNoSync(serializeProtocol.sync(), options)),
+ )
}
+
+ const paramDescription = messages.find(
+ (msg): msg is ParameterDescriptionMessage =>
+ msg.name === 'parameterDescription',
+ )
+ const resultDescription = messages.find(
+ (msg): msg is RowDescriptionMessage => msg.name === 'rowDescription',
+ )
+
+ const queryParams =
+ paramDescription?.dataTypeIDs.map((dataTypeID) => ({
+ dataTypeID,
+ serializer: this.serializers[dataTypeID],
+ })) ?? []
+
+ const resultFields =
+ resultDescription?.fields.map((field) => ({
+ name: field.name,
+ dataTypeID: field.dataTypeID,
+ parser: this.parsers[field.dataTypeID],
+ })) ?? []
+
+ return { queryParams, resultFields }
}
/**
diff --git a/packages/pglite/src/contrib/dict_int.ts b/packages/pglite/src/contrib/dict_int.ts
new file mode 100644
index 000000000..fe1e69b30
--- /dev/null
+++ b/packages/pglite/src/contrib/dict_int.ts
@@ -0,0 +1,16 @@
+import type {
+ Extension,
+ ExtensionSetupResult,
+ PGliteInterface,
+} from '../interface'
+
+const setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {
+ return {
+ bundlePath: new URL('../../release/dict_int.tar.gz', import.meta.url),
+ } satisfies ExtensionSetupResult
+}
+
+export const dict_int = {
+ name: 'dict_int',
+ setup,
+} satisfies Extension
diff --git a/packages/pglite/src/contrib/dict_xsyn.ts b/packages/pglite/src/contrib/dict_xsyn.ts
new file mode 100644
index 000000000..d907576ad
--- /dev/null
+++ b/packages/pglite/src/contrib/dict_xsyn.ts
@@ -0,0 +1,16 @@
+import type {
+ Extension,
+ ExtensionSetupResult,
+ PGliteInterface,
+} from '../interface'
+
+const setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {
+ return {
+ bundlePath: new URL('../../release/dict_xsyn.tar.gz', import.meta.url),
+ } satisfies ExtensionSetupResult
+}
+
+export const dict_xsyn = {
+ name: 'dict_xsyn',
+ setup,
+} satisfies Extension
diff --git a/packages/pglite/src/contrib/file_fdw.ts b/packages/pglite/src/contrib/file_fdw.ts
new file mode 100644
index 000000000..c2eb46c20
--- /dev/null
+++ b/packages/pglite/src/contrib/file_fdw.ts
@@ -0,0 +1,16 @@
+import type {
+ Extension,
+ ExtensionSetupResult,
+ PGliteInterface,
+} from '../interface'
+
+const setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {
+ return {
+ bundlePath: new URL('../../release/file_fdw.tar.gz', import.meta.url),
+ } satisfies ExtensionSetupResult
+}
+
+export const file_fdw = {
+ name: 'file_fdw',
+ setup,
+} satisfies Extension
diff --git a/packages/pglite/src/contrib/intarray.ts b/packages/pglite/src/contrib/intarray.ts
new file mode 100644
index 000000000..295b22554
--- /dev/null
+++ b/packages/pglite/src/contrib/intarray.ts
@@ -0,0 +1,16 @@
+import type {
+ Extension,
+ ExtensionSetupResult,
+ PGliteInterface,
+} from '../interface'
+
+const setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {
+ return {
+ bundlePath: new URL('../../release/intarray.tar.gz', import.meta.url),
+ } satisfies ExtensionSetupResult
+}
+
+export const intarray = {
+ name: 'intarray',
+ setup,
+} satisfies Extension
diff --git a/packages/pglite/src/contrib/pageinspect.ts b/packages/pglite/src/contrib/pageinspect.ts
new file mode 100644
index 000000000..8b07d61e9
--- /dev/null
+++ b/packages/pglite/src/contrib/pageinspect.ts
@@ -0,0 +1,16 @@
+import type {
+ Extension,
+ ExtensionSetupResult,
+ PGliteInterface,
+} from '../interface'
+
+const setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {
+ return {
+ bundlePath: new URL('../../release/pageinspect.tar.gz', import.meta.url),
+ } satisfies ExtensionSetupResult
+}
+
+export const pageinspect = {
+ name: 'pageinspect',
+ setup,
+} satisfies Extension
diff --git a/packages/pglite/src/contrib/pg_buffercache.ts b/packages/pglite/src/contrib/pg_buffercache.ts
new file mode 100644
index 000000000..de6b22cd3
--- /dev/null
+++ b/packages/pglite/src/contrib/pg_buffercache.ts
@@ -0,0 +1,16 @@
+import type {
+ Extension,
+ ExtensionSetupResult,
+ PGliteInterface,
+} from '../interface'
+
+const setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {
+ return {
+ bundlePath: new URL('../../release/pg_buffercache.tar.gz', import.meta.url),
+ } satisfies ExtensionSetupResult
+}
+
+export const pg_buffercache = {
+ name: 'pg_buffercache',
+ setup,
+} satisfies Extension
diff --git a/packages/pglite/src/contrib/pg_freespacemap.ts b/packages/pglite/src/contrib/pg_freespacemap.ts
new file mode 100644
index 000000000..2a5079a53
--- /dev/null
+++ b/packages/pglite/src/contrib/pg_freespacemap.ts
@@ -0,0 +1,19 @@
+import type {
+ Extension,
+ ExtensionSetupResult,
+ PGliteInterface,
+} from '../interface'
+
+const setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {
+ return {
+ bundlePath: new URL(
+ '../../release/pg_freespacemap.tar.gz',
+ import.meta.url,
+ ),
+ } satisfies ExtensionSetupResult
+}
+
+export const pg_freespacemap = {
+ name: 'pg_freespacemap',
+ setup,
+} satisfies Extension
diff --git a/packages/pglite/src/contrib/pg_surgery.ts b/packages/pglite/src/contrib/pg_surgery.ts
new file mode 100644
index 000000000..c20a8a770
--- /dev/null
+++ b/packages/pglite/src/contrib/pg_surgery.ts
@@ -0,0 +1,16 @@
+import type {
+ Extension,
+ ExtensionSetupResult,
+ PGliteInterface,
+} from '../interface'
+
+const setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {
+ return {
+ bundlePath: new URL('../../release/pg_surgery.tar.gz', import.meta.url),
+ } satisfies ExtensionSetupResult
+}
+
+export const pg_surgery = {
+ name: 'pg_surgery',
+ setup,
+} satisfies Extension
diff --git a/packages/pglite/src/contrib/pg_visibility.ts b/packages/pglite/src/contrib/pg_visibility.ts
new file mode 100644
index 000000000..c91bed05a
--- /dev/null
+++ b/packages/pglite/src/contrib/pg_visibility.ts
@@ -0,0 +1,16 @@
+import type {
+ Extension,
+ ExtensionSetupResult,
+ PGliteInterface,
+} from '../interface'
+
+const setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {
+ return {
+ bundlePath: new URL('../../release/pg_visibility.tar.gz', import.meta.url),
+ } satisfies ExtensionSetupResult
+}
+
+export const pg_visibility = {
+ name: 'pg_visibility',
+ setup,
+} satisfies Extension
diff --git a/packages/pglite/src/contrib/pg_walinspect.ts b/packages/pglite/src/contrib/pg_walinspect.ts
new file mode 100644
index 000000000..e80de7b24
--- /dev/null
+++ b/packages/pglite/src/contrib/pg_walinspect.ts
@@ -0,0 +1,16 @@
+import type {
+ Extension,
+ ExtensionSetupResult,
+ PGliteInterface,
+} from '../interface'
+
+const setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {
+ return {
+ bundlePath: new URL('../../release/pg_walinspect.tar.gz', import.meta.url),
+ } satisfies ExtensionSetupResult
+}
+
+export const pg_walinspect = {
+ name: 'pg_walinspect',
+ setup,
+} satisfies Extension
diff --git a/packages/pglite/src/contrib/unaccent.ts b/packages/pglite/src/contrib/unaccent.ts
new file mode 100644
index 000000000..47fa97934
--- /dev/null
+++ b/packages/pglite/src/contrib/unaccent.ts
@@ -0,0 +1,16 @@
+import type {
+ Extension,
+ ExtensionSetupResult,
+ PGliteInterface,
+} from '../interface'
+
+const setup = async (_pg: PGliteInterface, _emscriptenOpts: any) => {
+ return {
+ bundlePath: new URL('../../release/unaccent.tar.gz', import.meta.url),
+ } satisfies ExtensionSetupResult
+}
+
+export const unaccent = {
+ name: 'unaccent',
+ setup,
+} satisfies Extension
diff --git a/packages/pglite/src/interface.ts b/packages/pglite/src/interface.ts
index 4a8fefa5f..d75110976 100644
--- a/packages/pglite/src/interface.ts
+++ b/packages/pglite/src/interface.ts
@@ -33,7 +33,6 @@ export interface ExecProtocolOptions {
syncToFs?: boolean
throwOnError?: boolean
onNotice?: (notice: NoticeMessage) => void
- dataTransferContainer?: DataTransferContainer
}
export interface ExtensionSetupResult {
@@ -78,8 +77,6 @@ export interface DumpDataDirResult {
filename: string
}
-export type DataTransferContainer = 'cma' | 'file'
-
export interface PGliteOptions {
dataDir?: string
username?: string
@@ -94,7 +91,6 @@ export interface PGliteOptions {
fsBundle?: Blob | File
parsers?: ParserOptions
serializers?: SerializerOptions
- defaultDataTransferContainer?: DataTransferContainer
}
export type PGliteInterface =
diff --git a/packages/pglite/src/pglite.ts b/packages/pglite/src/pglite.ts
index e9ef1ffea..af32bd27a 100644
--- a/packages/pglite/src/pglite.ts
+++ b/packages/pglite/src/pglite.ts
@@ -17,7 +17,6 @@ import type {
PGliteInterface,
PGliteInterfaceExtensions,
PGliteOptions,
- DataTransferContainer,
Transaction,
} from './interface.js'
import PostgresModFactory, { type PostgresMod } from './postgresMod.js'
@@ -61,8 +60,6 @@ export class PGlite
#fsSyncMutex = new Mutex()
#fsSyncScheduled = false
- #dataTransferContainer: DataTransferContainer = 'cma'
-
readonly debug: DebugLevel = 0
#extensions: Extensions
@@ -78,6 +75,30 @@ export class PGlite
#notifyListeners = new Map void>>()
#globalNotifyListeners = new Set<(channel: string, payload: string) => void>()
+ // receive data from wasm
+ #pglite_write: number = -1
+
+ #currentResults: BackendMessage[] = []
+ #currentThrowOnError: boolean = false
+ #currentOnNotice: ((notice: NoticeMessage) => void) | undefined
+
+ // send data to wasm
+ #pglite_read: number = -1
+ // buffer that holds the data to be sent to wasm
+ #outputData: any = []
+ // read index in the buffer
+ #readOffset: number = 0
+ #currentDatabaseError: DatabaseError | null = null
+
+ #keepRawResponse: boolean = true
+ // these are needed for point 2 above
+ static readonly DEFAULT_RECV_BUF_SIZE: number = 1 * 1024 * 1024 // 1MB default
+ static readonly MAX_BUFFER_SIZE: number = Math.pow(2, 30)
+ // buffer that holds data received from wasm
+ #inputData = new Uint8Array(0)
+ // write index in the buffer
+ #writeOffset: number = 0
+
/**
* Create a new PGlite instance
* @param dataDir The directory to store the database files
@@ -126,11 +147,6 @@ export class PGlite
this.#relaxedDurability = options.relaxedDurability
}
- // Set the default data transfer container
- if (options?.defaultDataTransferContainer !== undefined) {
- this.#dataTransferContainer = options.defaultDataTransferContainer
- }
-
// Save the extensions for later use
this.#extensions = options.extensions ?? {}
@@ -370,6 +386,71 @@ export class PGlite
// Load the database engine
this.mod = await PostgresModFactory(emscriptenOpts)
+ // set the write callback
+ this.#pglite_write = this.mod.addFunction((ptr: any, length: number) => {
+ let bytes
+ try {
+ bytes = this.mod!.HEAPU8.subarray(ptr, ptr + length)
+ } catch (e: any) {
+ console.error('error', e)
+ throw e
+ }
+ this.#protocolParser.parse(bytes, (msg) => {
+ this.#parse(msg)
+ })
+ if (this.#keepRawResponse) {
+ const copied = bytes.slice()
+
+ let requiredSize = this.#writeOffset + copied.length
+
+ if (requiredSize > this.#inputData.length) {
+ const newSize =
+ this.#inputData.length +
+ (this.#inputData.length >> 1) +
+ requiredSize
+ if (requiredSize > PGlite.MAX_BUFFER_SIZE) {
+ requiredSize = PGlite.MAX_BUFFER_SIZE
+ }
+ const newBuffer = new Uint8Array(newSize)
+ newBuffer.set(this.#inputData.subarray(0, this.#writeOffset))
+ this.#inputData = newBuffer
+ }
+
+ this.#inputData.set(copied, this.#writeOffset)
+ this.#writeOffset += copied.length
+
+ return this.#inputData.length
+ }
+ return length
+ }, 'iii')
+
+ // set the read callback
+ this.#pglite_read = (this.mod as any).addFunction(
+ (ptr: any, max_length: number) => {
+ // copy current data to wasm buffer
+ let length = this.#outputData.length - this.#readOffset
+ if (length > max_length) {
+ length = max_length
+ }
+ try {
+ this.mod!.HEAP8.set(
+ (this.#outputData as Uint8Array).subarray(
+ this.#readOffset,
+ this.#readOffset + length,
+ ),
+ ptr,
+ )
+ this.#readOffset += length
+ } catch (e) {
+ console.log(e)
+ }
+ return length
+ },
+ 'iii',
+ )
+
+ this.mod._set_read_write_cbs(this.#pglite_read, this.#pglite_write)
+
// Sync the filesystem from any previous store
await this.fs!.initialSyncFs()
@@ -498,6 +579,8 @@ export class PGlite
try {
await this.execProtocol(serialize.end())
this.mod!._pgl_shutdown()
+ this.mod!.removeFunction(this.#pglite_read)
+ this.mod!.removeFunction(this.#pglite_write)
} catch (e) {
const err = e as { name: string; status: number }
if (err.name === 'ExitStatus' && err.status === 0) {
@@ -575,88 +658,29 @@ export class PGlite
* @param message The postgres wire protocol message to execute
* @returns The direct message data response produced by Postgres
*/
- execProtocolRawSync(
- message: Uint8Array,
- options: { dataTransferContainer?: DataTransferContainer } = {},
- ) {
- let data
+ execProtocolRawSync(message: Uint8Array) {
const mod = this.mod!
- // >0 set buffer content type to wire protocol
- mod._use_wire(1)
- const msg_len = message.length
-
- // TODO: if (message.length>CMA_B) force file
-
- let currDataTransferContainer =
- options.dataTransferContainer ?? this.#dataTransferContainer
+ this.#readOffset = 0
+ this.#writeOffset = 0
+ this.#outputData = message
- // do we overflow allocated shared memory segment
- if (message.length >= mod.FD_BUFFER_MAX) currDataTransferContainer = 'file'
-
- switch (currDataTransferContainer) {
- case 'cma': {
- // set buffer size so answer will be at size+0x2 pointer addr
- mod._interactive_write(message.length)
- // TODO: make it seg num * seg maxsize if multiple channels.
- mod.HEAPU8.set(message, 1)
- break
- }
- case 'file': {
- // Use socketfiles to emulate a socket connection
- const pg_lck = '/tmp/pglite/base/.s.PGSQL.5432.lck.in'
- const pg_in = '/tmp/pglite/base/.s.PGSQL.5432.in'
- mod._interactive_write(0)
- mod.FS.writeFile(pg_lck, message)
- mod.FS.rename(pg_lck, pg_in)
- break
- }
- default:
- throw new Error(
- `Unknown data transfer container: ${currDataTransferContainer}`,
- )
+ if (
+ this.#keepRawResponse &&
+ this.#inputData.length !== PGlite.DEFAULT_RECV_BUF_SIZE
+ ) {
+ // the previous call might have increased the size of the buffer so reset it to its default
+ this.#inputData = new Uint8Array(PGlite.DEFAULT_RECV_BUF_SIZE)
}
// execute the message
- mod._interactive_one()
+ mod._interactive_one(message.length, message[0])
- const channel = mod._get_channel()
- if (channel < 0) currDataTransferContainer = 'file'
+ this.#outputData = []
- // TODO: use channel value for msg_start
- if (channel > 0) currDataTransferContainer = 'cma'
-
- switch (currDataTransferContainer) {
- case 'cma': {
- // Read responses from the buffer
-
- const msg_start = msg_len + 2
- const msg_end = msg_start + mod._interactive_read()
- data = mod.HEAPU8.subarray(msg_start, msg_end)
- break
- }
- case 'file': {
- // Use socketfiles to emulate a socket connection
- const pg_out = '/tmp/pglite/base/.s.PGSQL.5432.out'
- try {
- const fstat = mod.FS.stat(pg_out)
- const stream = mod.FS.open(pg_out, 'r')
- data = new Uint8Array(fstat.size)
- mod.FS.read(stream, data, 0, fstat.size, 0)
- mod.FS.unlink(pg_out)
- } catch (x) {
- // case of single X message.
- data = new Uint8Array(0)
- }
- break
- }
- default:
- throw new Error(
- `Unknown data transfer container: ${currDataTransferContainer}`,
- )
- }
-
- return data
+ if (this.#keepRawResponse && this.#writeOffset)
+ return this.#inputData.subarray(0, this.#writeOffset)
+ return new Uint8Array(0)
}
/**
@@ -672,9 +696,9 @@ export class PGlite
*/
async execProtocolRaw(
message: Uint8Array,
- { syncToFs = true, dataTransferContainer }: ExecProtocolOptions = {},
+ { syncToFs = true }: ExecProtocolOptions = {},
) {
- const data = this.execProtocolRawSync(message, { dataTransferContainer })
+ const data = this.execProtocolRawSync(message)
if (syncToFs) {
await this.syncToFs()
}
@@ -694,14 +718,72 @@ export class PGlite
onNotice,
}: ExecProtocolOptions = {},
): Promise {
+ this.#currentThrowOnError = throwOnError
+ this.#currentOnNotice = onNotice
+ this.#currentResults = []
+ this.#currentDatabaseError = null
+
const data = await this.execProtocolRaw(message, { syncToFs })
- const results: BackendMessage[] = []
- this.#protocolParser.parse(data, (msg) => {
+ const databaseError = this.#currentDatabaseError
+ this.#currentThrowOnError = false
+ this.#currentOnNotice = undefined
+ this.#currentDatabaseError = null
+ const result = { messages: this.#currentResults, data }
+ this.#currentResults = []
+
+ if (throwOnError && databaseError) {
+ this.#protocolParser = new ProtocolParser() // Reset the parser
+ throw databaseError
+ }
+
+ return result
+ }
+
+ /**
+ * Execute a postgres wire protocol message
+ * @param message The postgres wire protocol message to execute
+ * @returns The parsed results of the query
+ */
+ async execProtocolStream(
+ message: Uint8Array,
+ { syncToFs, throwOnError = true, onNotice }: ExecProtocolOptions = {},
+ ): Promise {
+ this.#currentThrowOnError = throwOnError
+ this.#currentOnNotice = onNotice
+ this.#currentResults = []
+ this.#currentDatabaseError = null
+
+ this.#keepRawResponse = false
+
+ await this.execProtocolRaw(message, { syncToFs })
+
+ this.#keepRawResponse = true
+
+ const databaseError = this.#currentDatabaseError
+ this.#currentThrowOnError = false
+ this.#currentOnNotice = undefined
+ this.#currentDatabaseError = null
+ const result = this.#currentResults
+ this.#currentResults = []
+
+ if (throwOnError && databaseError) {
+ this.#protocolParser = new ProtocolParser() // Reset the parser
+ throw databaseError
+ }
+
+ return result
+ }
+
+ #parse(msg: BackendMessage) {
+ // keep the existing logic of throwing the first db exception
+ // as soon as there is a db error, we're not interested in the remaining data
+ // but since the parser is plugged into the pglite_write callback, we can't just throw
+ // and need to ack the messages received from the db
+ if (!this.#currentDatabaseError) {
if (msg instanceof DatabaseError) {
- this.#protocolParser = new ProtocolParser() // Reset the parser
- if (throwOnError) {
- throw msg
+ if (this.#currentThrowOnError) {
+ this.#currentDatabaseError = msg
}
// TODO: Do we want to wrap the error in a custom error?
} else if (msg instanceof NoticeMessage) {
@@ -709,8 +791,8 @@ export class PGlite
// Notice messages are warnings, we should log them
console.warn(msg)
}
- if (onNotice) {
- onNotice(msg)
+ if (this.#currentOnNotice) {
+ this.#currentOnNotice(msg)
}
} else if (msg instanceof CommandCompleteMessage) {
// Keep track of the transaction state
@@ -737,10 +819,8 @@ export class PGlite
queueMicrotask(() => cb(msg.channel, msg.payload))
})
}
- results.push(msg)
- })
-
- return { messages: results, data }
+ this.#currentResults.push(msg)
+ }
}
/**
diff --git a/packages/pglite/src/postgresMod.ts b/packages/pglite/src/postgresMod.ts
index abb4a0155..2a3e8ee1e 100644
--- a/packages/pglite/src/postgresMod.ts
+++ b/packages/pglite/src/postgresMod.ts
@@ -24,16 +24,17 @@ export interface PostgresMod
WASM_PREFIX: string
INITIAL_MEMORY: number
pg_extensions: Record>
- _use_wire: (state: number) => void
_pgl_initdb: () => number
_pgl_backend: () => void
_pgl_shutdown: () => void
- _get_buffer_size: (fd: number) => number
- _get_buffer_addr: (fd: number) => number
- _get_channel: () => number
_interactive_write: (msgLength: number) => void
- _interactive_one: () => void
- _interactive_read: () => number
+ _interactive_one: (length: number, peek: number) => void
+ _set_read_write_cbs: (read_cb: number, write_cb: number) => void
+ addFunction: (
+ cb: (ptr: any, length: number) => void,
+ signature: string,
+ ) => number
+ removeFunction: (f: number) => void
}
type PostgresFactory = (
diff --git a/packages/pglite/src/utils.ts b/packages/pglite/src/utils.ts
index 5f544f1cd..0cbde2f34 100644
--- a/packages/pglite/src/utils.ts
+++ b/packages/pglite/src/utils.ts
@@ -142,23 +142,28 @@ export async function formatQuery(
tx = tx ?? pg
// Get the types of the parameters
- let dataTypeIDs: number[]
+ const messages = []
try {
await pg.execProtocol(serializeProtocol.parse({ text: query }), {
syncToFs: false,
})
- dataTypeIDs = parseDescribeStatementResults(
- (
+ messages.push(
+ ...(
await pg.execProtocol(serializeProtocol.describe({ type: 'S' }), {
syncToFs: false,
})
).messages,
)
} finally {
- await pg.execProtocol(serializeProtocol.sync(), { syncToFs: false })
+ messages.push(
+ ...(await pg.execProtocol(serializeProtocol.sync(), { syncToFs: false }))
+ .messages,
+ )
}
+ const dataTypeIDs = parseDescribeStatementResults(messages)
+
// replace $1, $2, etc with %1L, %2L, etc
const subbedQuery = query.replace(/\$([0-9]+)/g, (_, num) => {
return '%' + num + 'L'
diff --git a/packages/pglite/src/worker/index.ts b/packages/pglite/src/worker/index.ts
index a21b04bda..93a552173 100644
--- a/packages/pglite/src/worker/index.ts
+++ b/packages/pglite/src/worker/index.ts
@@ -1,5 +1,4 @@
import type {
- DataTransferContainer,
DebugLevel,
ExecProtocolResult,
Extensions,
@@ -12,6 +11,7 @@ import type { PGlite } from '../pglite.js'
import { BasePGlite } from '../base.js'
import { toPostgresName, uuid } from '../utils.js'
import { DumpTarCompressionOptions } from '../fs/tarUtils.js'
+import { BackendMessage } from '@electric-sql/pg-protocol/messages'
export type PGliteWorkerOptions =
PGliteOptions & {
@@ -345,6 +345,15 @@ export class PGliteWorker
return await this.#rpc('execProtocol', message)
}
+ /**
+ * Execute a postgres wire protocol message
+ * @param message The postgres wire protocol message to execute
+ * @returns The result of the query
+ */
+ async execProtocolStream(message: Uint8Array): Promise {
+ return await this.#rpc('execProtocolStream', message)
+ }
+
/**
* Sync the database to the filesystem
* @returns Promise that resolves when the database is synced to the filesystem
@@ -639,11 +648,12 @@ function makeWorkerApi(tabId: string, db: PGlite) {
return { messages, data }
}
},
- async execProtocolRaw(
- message: Uint8Array,
- options: { dataTransferContainer?: DataTransferContainer } = {},
- ) {
- const result = await db.execProtocolRaw(message, options)
+ async execProtocolStream(message: Uint8Array) {
+ const messages = await db.execProtocolStream(message)
+ return messages
+ },
+ async execProtocolRaw(message: Uint8Array) {
+ const result = await db.execProtocolRaw(message)
if (result.byteLength !== result.buffer.byteLength) {
// The data is a slice of a larger buffer, this is potentially the whole
// memory of the WASM module. We copy it to a new Uint8Array and return that.
diff --git a/packages/pglite/tests/basic.test.ts b/packages/pglite/tests/basic.test.ts
index 1b75118df..b603b4b9c 100644
--- a/packages/pglite/tests/basic.test.ts
+++ b/packages/pglite/tests/basic.test.ts
@@ -2,7 +2,7 @@ import { describe, it, expect } from 'vitest'
import { expectToThrowAsync, testEsmCjsAndDTC } from './test-utils.ts'
import { identifier } from '../dist/templating.js'
-await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
+await testEsmCjsAndDTC(async (importType) => {
const { PGlite } =
importType === 'esm'
? await import('../dist/index.js')
@@ -12,9 +12,7 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
describe(`basic`, () => {
it('exec', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ const db = await PGlite.create()
await db.exec(`
CREATE TABLE IF NOT EXISTS test (
id SERIAL PRIMARY KEY,
@@ -51,9 +49,7 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
})
it('query', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ const db = new PGlite()
await db.query(`
CREATE TABLE IF NOT EXISTS test (
id SERIAL PRIMARY KEY,
@@ -94,9 +90,7 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
})
it('query templated', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ const db = new PGlite()
const tableName = identifier`test`
await db.sql`
CREATE TABLE IF NOT EXISTS ${tableName} (
@@ -137,9 +131,7 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
})
it('types', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ const db = new PGlite()
await db.query(`
CREATE TABLE IF NOT EXISTS test (
id SERIAL PRIMARY KEY,
@@ -301,7 +293,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('custom parser and serializer', async () => {
const db = new PGlite({
- defaultDataTransferContainer,
serializers: { 1700: (x) => x.toString() },
parsers: { 1700: (x) => BigInt(x) },
})
@@ -338,9 +329,7 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
})
it('params', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ const db = new PGlite()
await db.query(`
CREATE TABLE IF NOT EXISTS test (
id SERIAL PRIMARY KEY,
@@ -374,9 +363,7 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
})
it('array params', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ const db = new PGlite()
await db.query(`
CREATE TABLE IF NOT EXISTS test (
id SERIAL PRIMARY KEY,
@@ -429,9 +416,7 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
})
it('error', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ const db = new PGlite()
await expectToThrowAsync(async () => {
await db.query('SELECT * FROM test;')
}, 'relation "test" does not exist')
@@ -533,9 +518,7 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
})
it('copy to/from blob', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ const db = new PGlite()
await db.exec(`
CREATE TABLE IF NOT EXISTS test (
id SERIAL PRIMARY KEY,
@@ -603,9 +586,7 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
})
it('close', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ const db = new PGlite()
await db.query(`
CREATE TABLE IF NOT EXISTS test (
id SERIAL PRIMARY KEY,
@@ -620,9 +601,7 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
})
it('use same param multiple times', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ const db = new PGlite()
await db.exec(`
CREATE TABLE IF NOT EXISTS test (
@@ -649,9 +628,7 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
})
})
it('timezone', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ const db = new PGlite()
const res = await db.query(
`SELECT now(),* FROM pg_timezone_names WHERE name = current_setting('TIMEZONE')`,
diff --git a/packages/pglite/tests/clone.test.js b/packages/pglite/tests/clone.test.js
index 81283f660..d33824ef2 100644
--- a/packages/pglite/tests/clone.test.js
+++ b/packages/pglite/tests/clone.test.js
@@ -3,7 +3,7 @@ import { PGlite } from '../dist/index.js'
describe('clone', () => {
it('clone pglite instance', async () => {
- const pg1 = new PGlite()
+ const pg1 = await PGlite.create()
await pg1.exec(`
CREATE TABLE IF NOT EXISTS test (
id SERIAL PRIMARY KEY,
@@ -21,7 +21,7 @@ describe('clone', () => {
})
it('clone pglite instance - insert into pg2', async () => {
- const pg1 = new PGlite()
+ const pg1 = await PGlite.create()
await pg1.exec(`
CREATE TABLE IF NOT EXISTS test (
id SERIAL PRIMARY KEY,
diff --git a/packages/pglite/tests/contrib/dict_int.test.js b/packages/pglite/tests/contrib/dict_int.test.js
new file mode 100644
index 000000000..8e44ebd96
--- /dev/null
+++ b/packages/pglite/tests/contrib/dict_int.test.js
@@ -0,0 +1,38 @@
+import { it, expect } from 'vitest'
+import { PGlite } from '../../dist/index.js'
+import { dict_int } from '../../dist/contrib/dict_int.js'
+
+it('dict_int', async () => {
+ const pg = await PGlite.create({
+ extensions: {
+ dict_int,
+ },
+ })
+
+ // from dict_int.sql
+ await pg.exec('CREATE EXTENSION IF NOT EXISTS dict_int;')
+
+ const lexizeResult1 = await pg.query(`
+select ts_lexize('intdict', '511673');
+`)
+
+ expect(lexizeResult1.rows[0]).toEqual({
+ ts_lexize: ['511673'],
+ })
+
+ const lexizeResult2 = await pg.query(`
+select ts_lexize('intdict', '129');
+`)
+
+ expect(lexizeResult2.rows[0]).toEqual({
+ ts_lexize: ['129'],
+ })
+
+ const lexizeResult3 = await pg.query(`
+select ts_lexize('intdict', '40865854');
+`)
+
+ expect(lexizeResult3.rows[0]).toEqual({
+ ts_lexize: ['408658'],
+ })
+})
diff --git a/packages/pglite/tests/contrib/dict_xsyn.test.ts b/packages/pglite/tests/contrib/dict_xsyn.test.ts
new file mode 100644
index 000000000..ba40bbc48
--- /dev/null
+++ b/packages/pglite/tests/contrib/dict_xsyn.test.ts
@@ -0,0 +1,42 @@
+import { it, expect } from 'vitest'
+import { PGlite } from '../../dist/index.js'
+import { dict_xsyn } from '../../dist/contrib/dict_xsyn.js'
+
+it('dict_xsyn', async () => {
+ const pg = await PGlite.create({
+ extensions: {
+ dict_xsyn,
+ },
+ })
+
+ // from dict_xsyn.sql
+ await pg.exec('CREATE EXTENSION IF NOT EXISTS dict_xsyn;')
+ await pg.exec(`
+-- default configuration - match first word and return it among with all synonyms
+ALTER TEXT SEARCH DICTIONARY xsyn (RULES='xsyn_sample', KEEPORIG=true, MATCHORIG=true, KEEPSYNONYMS=true, MATCHSYNONYMS=false);
+`)
+
+ const lexizeResult1 = await pg.query(`
+SELECT ts_lexize('xsyn', 'supernova');
+`)
+
+ expect(lexizeResult1.rows[0]).toEqual({
+ ts_lexize: ['supernova', 'sn', 'sne', '1987a'],
+ })
+
+ const lexizeResult2 = await pg.query(`
+SELECT ts_lexize('xsyn', 'sn');
+`)
+
+ expect(lexizeResult2.rows[0]).toEqual({
+ ts_lexize: null,
+ })
+
+ const lexizeResult3 = await pg.query(`
+SELECT ts_lexize('xsyn', 'grb');
+`)
+
+ expect(lexizeResult3.rows[0]).toEqual({
+ ts_lexize: null,
+ })
+})
diff --git a/packages/pglite/tests/contrib/file_fdw.test.js b/packages/pglite/tests/contrib/file_fdw.test.js
new file mode 100644
index 000000000..57603c1db
--- /dev/null
+++ b/packages/pglite/tests/contrib/file_fdw.test.js
@@ -0,0 +1,27 @@
+import { it, expect } from 'vitest'
+import { PGlite } from '../../dist/index.js'
+import { file_fdw } from '../../dist/contrib/file_fdw.js'
+
+it('file_fdw', async () => {
+ const pg = await PGlite.create({
+ extensions: {
+ file_fdw,
+ },
+ })
+
+ await pg.exec('CREATE EXTENSION IF NOT EXISTS file_fdw;')
+ await pg.exec('CREATE SERVER file_server FOREIGN DATA WRAPPER file_fdw;')
+ await pg.exec(`CREATE FOREIGN TABLE file_contents (line text)
+ SERVER file_server
+ OPTIONS (
+ filename '/tmp/pglite/bin/postgres',
+ format 'text'
+ );`)
+
+ const contents = await pg.query(`SELECT * FROM file_contents;`)
+ expect(contents.rows).toEqual([
+ {
+ line: 'PGlite is the best!',
+ },
+ ])
+})
diff --git a/packages/pglite/tests/contrib/intarray.test.js b/packages/pglite/tests/contrib/intarray.test.js
new file mode 100644
index 000000000..7da749692
--- /dev/null
+++ b/packages/pglite/tests/contrib/intarray.test.js
@@ -0,0 +1,90 @@
+import { it, expect } from 'vitest'
+import { PGlite } from '../../dist/index.js'
+import { intarray } from '../../dist/contrib/intarray.js'
+
+it('intarray', async () => {
+ const pg = await PGlite.create({
+ extensions: {
+ intarray,
+ },
+ })
+
+ await pg.exec('CREATE EXTENSION IF NOT EXISTS intarray;')
+
+ await pg.exec(`
+ CREATE TABLE articles (
+ id SERIAL PRIMARY KEY,
+ title TEXT NOT NULL,
+ tag_ids INTEGER[]);`)
+
+ await pg.exec(`
+ INSERT INTO articles (title, tag_ids) VALUES
+ ('Postgres Performance Tips', '{1,2,3}'),
+ ('Introduction to SQL', '{2,4}'),
+ ('Advanced intarray Usage', '{1,3,5}'),
+ ('Database Normalization', '{4,6}');`)
+
+ const titleTags25 = await pg.query(`
+ SELECT title, tag_ids
+ FROM articles
+ WHERE tag_ids && '{2,5}'::integer[];`)
+
+ expect(titleTags25.rows).toEqual([
+ {
+ title: 'Postgres Performance Tips',
+ tag_ids: [1, 2, 3],
+ },
+ {
+ title: 'Introduction to SQL',
+ tag_ids: [2, 4],
+ },
+ {
+ title: 'Advanced intarray Usage',
+ tag_ids: [1, 3, 5],
+ },
+ ])
+
+ const titleTags12 = await pg.query(`
+ SELECT title, tag_ids
+ FROM articles
+ WHERE tag_ids @> '{1,2}'::integer[];`)
+
+ expect(titleTags12.rows).toEqual([
+ {
+ title: 'Postgres Performance Tips',
+ tag_ids: [1, 2, 3],
+ },
+ ])
+
+ const titleTags1235 = await pg.query(`
+ SELECT title, tag_ids
+ FROM articles
+ WHERE tag_ids <@ '{1,2,3,5}'::integer[];`)
+
+ expect(titleTags1235.rows).toEqual([
+ {
+ title: 'Postgres Performance Tips',
+ tag_ids: [1, 2, 3],
+ },
+ {
+ title: 'Advanced intarray Usage',
+ tag_ids: [1, 3, 5],
+ },
+ ])
+
+ const queryInt = await pg.query(`
+ SELECT title, tag_ids
+ FROM articles
+ WHERE tag_ids @@ '1 & (3|4)'::query_int;`)
+
+ expect(queryInt.rows).toEqual([
+ {
+ title: 'Postgres Performance Tips',
+ tag_ids: [1, 2, 3],
+ },
+ {
+ title: 'Advanced intarray Usage',
+ tag_ids: [1, 3, 5],
+ },
+ ])
+})
diff --git a/packages/pglite/tests/contrib/pageinspect.test.js b/packages/pglite/tests/contrib/pageinspect.test.js
new file mode 100644
index 000000000..204624b60
--- /dev/null
+++ b/packages/pglite/tests/contrib/pageinspect.test.js
@@ -0,0 +1,47 @@
+import { it } from 'vitest'
+import { PGlite } from '../../dist/index.js'
+import { pageinspect } from '../../dist/contrib/pageinspect.js'
+
+it('pageinspect', async () => {
+ const pg = await PGlite.create({
+ extensions: {
+ pageinspect,
+ },
+ })
+
+ await pg.exec('CREATE EXTENSION IF NOT EXISTS pageinspect;')
+
+ await pg.exec(`
+CREATE TABLE pageinspect_test (
+id serial PRIMARY KEY,
+name text,
+value integer);
+`)
+
+ await pg.exec(`INSERT INTO pageinspect_test (name, value)
+SELECT
+'row_' || g,
+(random() * 100)::int
+FROM generate_series(1, 5) AS g;`)
+
+ await pg.exec('CHECKPOINT;')
+
+ await pg.query(`
+SELECT relfilenode, relname
+FROM pg_class
+WHERE relname = 'pageinspect_test';
+`)
+
+ await pg.query(`
+SELECT *
+FROM heap_page_items(get_raw_page('pageinspect_test', 0));
+`)
+
+ await pg.query(`
+SELECT * FROM page_header(get_raw_page('pageinspect_test', 0));
+`)
+
+ await pg.query(`
+SELECT * FROM pageinspect_test ORDER BY id;
+`)
+})
diff --git a/packages/pglite/tests/contrib/pg_buffercache.test.js b/packages/pglite/tests/contrib/pg_buffercache.test.js
new file mode 100644
index 000000000..669910474
--- /dev/null
+++ b/packages/pglite/tests/contrib/pg_buffercache.test.js
@@ -0,0 +1,35 @@
+import { expect, it } from 'vitest'
+import { PGlite } from '../../dist/index.js'
+import { pg_buffercache } from '../../dist/contrib/pg_buffercache.js'
+
+it('pg_buffercache', async () => {
+ const pg = await PGlite.create({
+ extensions: {
+ pg_buffercache,
+ },
+ })
+
+ await pg.exec('CREATE EXTENSION IF NOT EXISTS pg_buffercache;')
+
+ const buffers = await pg.query(`
+SELECT n.nspname, c.relname, count(*) AS buffers
+FROM pg_buffercache b JOIN pg_class c
+ON b.relfilenode = pg_relation_filenode(c.oid) AND
+ b.reldatabase IN (0, (SELECT oid FROM pg_database
+ WHERE datname = current_database()))
+JOIN pg_namespace n ON n.oid = c.relnamespace
+GROUP BY n.nspname, c.relname
+ORDER BY 3 DESC
+LIMIT 10;
+`)
+
+ expect(buffers.rows.length).toEqual(10)
+
+ const bufferCacheSummary = await pg.query(
+ `SELECT * FROM pg_buffercache_summary();`,
+ )
+
+ expect(bufferCacheSummary.rows.length).toEqual(1)
+
+ await pg.query(`SELECT * FROM pg_buffercache_usage_counts();`)
+})
diff --git a/packages/pglite/tests/contrib/pg_freespacemap.test.ts b/packages/pglite/tests/contrib/pg_freespacemap.test.ts
new file mode 100644
index 000000000..e9435d238
--- /dev/null
+++ b/packages/pglite/tests/contrib/pg_freespacemap.test.ts
@@ -0,0 +1,31 @@
+import { expect, it } from 'vitest'
+import { PGlite } from '../../dist/index.js'
+import { pg_freespacemap } from '../../dist/contrib/pg_freespacemap.js'
+
+it('pg_freespacemap', async () => {
+ const pg = await PGlite.create({
+ extensions: {
+ pg_freespacemap,
+ },
+ })
+
+ await pg.exec('CREATE EXTENSION IF NOT EXISTS pg_freespacemap;')
+
+ await pg.exec(`
+CREATE TABLE test_fsm(id serial PRIMARY KEY, data text);
+INSERT INTO test_fsm (data) SELECT repeat('x', 100) FROM generate_series(1, 1000);
+DELETE FROM test_fsm WHERE id <= 500;
+`)
+
+ const freeSpace = await pg.query(`
+SELECT * FROM pg_freespace('test_fsm');
+`)
+
+ expect(freeSpace.rows.length).toBeGreaterThan(0)
+
+ const freeSpace0 = await pg.query(`
+SELECT pg_freespace('test_fsm', 0);
+ `)
+
+ expect(freeSpace0.rows.length).toBeGreaterThan(0)
+})
diff --git a/packages/pglite/tests/contrib/pg_surgery.test.js b/packages/pglite/tests/contrib/pg_surgery.test.js
new file mode 100644
index 000000000..1d3403034
--- /dev/null
+++ b/packages/pglite/tests/contrib/pg_surgery.test.js
@@ -0,0 +1,15 @@
+import { it } from 'vitest'
+import { PGlite } from '../../dist/index.js'
+import { pg_surgery } from '../../dist/contrib/pg_surgery.js'
+
+it('pg_surgery', async () => {
+ const pg = await PGlite.create({
+ extensions: {
+ pg_surgery,
+ },
+ })
+
+ await pg.exec('CREATE EXTENSION IF NOT EXISTS pg_surgery;')
+
+ // unsure how to test this extension
+})
diff --git a/packages/pglite/tests/contrib/pg_visibility.test.js b/packages/pglite/tests/contrib/pg_visibility.test.js
new file mode 100644
index 000000000..7138d1eb8
--- /dev/null
+++ b/packages/pglite/tests/contrib/pg_visibility.test.js
@@ -0,0 +1,65 @@
+import { it, expect } from 'vitest'
+import { PGlite } from '../../dist/index.js'
+import { pg_visibility } from '../../dist/contrib/pg_visibility.js'
+
+it('pg_visibility', async () => {
+ const pg = await PGlite.create({
+ extensions: {
+ pg_visibility,
+ },
+ })
+
+ await pg.exec('CREATE EXTENSION IF NOT EXISTS pg_visibility;')
+
+ await pg.exec(`
+CREATE TABLE IF NOT EXISTS test (
+id SERIAL PRIMARY KEY,
+name TEXT
+);
+`)
+
+ await pg.exec(`
+INSERT INTO test (name) VALUES ('test');
+UPDATE test SET name = 'test2';
+SELECT * FROM test;
+`)
+
+ const visible = await pg.query(`
+-- Show all invisible tuples in a specific table using pg_visibility
+SELECT *
+FROM pg_visibility('test')
+WHERE all_visible = false;
+`)
+
+ expect(visible.rows).toEqual([
+ {
+ blkno: 0,
+ all_visible: false,
+ all_frozen: false,
+ pd_all_visible: false,
+ },
+ ])
+
+ const visibilityMap = await pg.query(`
+-- Check visibility map status for a table
+SELECT *
+FROM pg_visibility_map('test');
+`)
+
+ expect(visibilityMap.rows).toEqual([
+ {
+ blkno: 0,
+ all_visible: false,
+ all_frozen: false,
+ },
+ ])
+
+ const frozen = await pg.query(`
+-- Find pages with all-frozen tuples
+SELECT *
+FROM pg_visibility('test')
+WHERE all_frozen = true;
+`)
+
+ expect(frozen.rows).toEqual([])
+})
diff --git a/packages/pglite/tests/contrib/pg_walinspect.test.js b/packages/pglite/tests/contrib/pg_walinspect.test.js
new file mode 100644
index 000000000..1d2b81734
--- /dev/null
+++ b/packages/pglite/tests/contrib/pg_walinspect.test.js
@@ -0,0 +1,46 @@
+import { it, expect } from 'vitest'
+import { PGlite } from '../../dist/index.js'
+import { pg_walinspect } from '../../dist/contrib/pg_walinspect.js'
+
+it('pg_walinspect', async () => {
+ const pg = await PGlite.create({
+ extensions: {
+ pg_walinspect,
+ },
+ })
+
+ await pg.exec('CREATE EXTENSION IF NOT EXISTS pg_walinspect;')
+
+ await pg.exec(`
+CREATE TABLE test_wal (
+id SERIAL PRIMARY KEY,
+data TEXT
+);
+`)
+
+ const blsn = await pg.query(`
+SELECT pg_current_wal_lsn() AS before_lsn;
+`)
+
+ await pg.exec(`
+INSERT INTO test_wal(data)
+SELECT 'row ' || generate_series::text
+FROM generate_series(1,5);
+`)
+
+ const alsn = await pg.query(`
+SELECT pg_current_wal_lsn() AS after_lsn;
+`)
+
+ const _blsn = blsn.rows[0].before_lsn
+ const _alsn = alsn.rows[0].after_lsn
+ const infos = await pg.query(
+ `
+SELECT * FROM pg_get_wal_block_info($1, $2)
+ORDER BY start_lsn, block_id
+LIMIT 200;`,
+ [_blsn, _alsn],
+ )
+
+ expect(infos.rows.length).toBeGreaterThan(0)
+})
diff --git a/packages/pglite/tests/contrib/unaccent.test.js b/packages/pglite/tests/contrib/unaccent.test.js
new file mode 100644
index 000000000..0bf24a1bc
--- /dev/null
+++ b/packages/pglite/tests/contrib/unaccent.test.js
@@ -0,0 +1,21 @@
+import { it, expect } from 'vitest'
+import { PGlite } from '../../dist/index.js'
+import { unaccent } from '../../dist/contrib/unaccent.js'
+
+it('unaccent', async () => {
+ const pg = new PGlite({
+ extensions: {
+ unaccent,
+ },
+ })
+
+ await pg.exec('CREATE EXTENSION IF NOT EXISTS unaccent;')
+
+ const result = await pg.query(`select ts_lexize('unaccent','Hôtel');`)
+
+ expect(result.rows).toEqual([
+ {
+ ts_lexize: ['Hotel'],
+ },
+ ])
+})
diff --git a/packages/pglite/tests/exec-protocol.test.ts b/packages/pglite/tests/exec-protocol.test.ts
index 32e2d2e36..5e9987a08 100644
--- a/packages/pglite/tests/exec-protocol.test.ts
+++ b/packages/pglite/tests/exec-protocol.test.ts
@@ -1,71 +1,67 @@
import { describe, it, expect, beforeAll, afterAll } from 'vitest'
import { PGlite } from '../dist/index.js'
import { serialize } from '@electric-sql/pg-protocol'
-import { testDTC } from './test-utils.js'
-testDTC(async (defaultDataTransferContainer) => {
- describe('exec protocol', () => {
- let db: PGlite
+describe('exec protocol', () => {
+ let db: PGlite
- beforeAll(async () => {
- db = await PGlite.create({
- defaultDataTransferContainer,
- })
- })
+ beforeAll(async () => {
+ db = await PGlite.create()
+ })
- afterAll(async () => {
- await db.close()
- })
+ afterAll(async () => {
+ await db.close()
+ })
- it('should perform a simple query', async () => {
- const result = await db.execProtocol(serialize.query('SELECT 1'))
- const messageNames = result.messages.map((msg) => msg.name)
- expect(messageNames).toEqual([
- 'rowDescription',
- 'dataRow',
- 'commandComplete',
- 'readyForQuery',
- ])
- })
+ it('should perform a simple query', async () => {
+ const result = await db.execProtocol(serialize.query('SELECT 1'))
+ const messageNames = result.messages.map((msg) => msg.name)
+ expect(messageNames).toEqual([
+ 'rowDescription',
+ 'dataRow',
+ 'commandComplete',
+ 'readyForQuery',
+ ])
+ expect(result.data.length).toEqual(66)
+ })
- it('should perform an extended query', async () => {
- const r1 = await db.execProtocol(serialize.parse({ text: 'SELECT $1' }))
- const messageNames1 = r1.messages.map((msg) => msg.name)
- expect(messageNames1).toEqual([
- 'notice',
- 'parseComplete',
- /* 'readyForQuery',*/
- ])
+ it('should perform an extended query', async () => {
+ const r1 = await db.execProtocol(serialize.parse({ text: 'SELECT $1' }))
+ const messageNames1 = r1.messages.map((msg) => msg.name)
+ expect(messageNames1).toEqual([
+ 'notice',
+ 'parseComplete',
+ /* 'readyForQuery',*/
+ ])
- const r2 = await db.execProtocol(serialize.bind({ values: ['1'] }))
- const messageNames2 = r2.messages.map((msg) => msg.name)
- expect(messageNames2).toEqual(['notice', 'bindComplete'])
+ const r2 = await db.execProtocol(serialize.bind({ values: ['1'] }))
+ const messageNames2 = r2.messages.map((msg) => msg.name)
+ expect(messageNames2).toEqual(['notice', 'bindComplete'])
- const r3 = await db.execProtocol(serialize.describe({ type: 'P' }))
- const messageNames3 = r3.messages.map((msg) => msg.name)
- expect(messageNames3).toEqual(['rowDescription'])
+ const r3 = await db.execProtocol(serialize.describe({ type: 'P' }))
+ const messageNames3 = r3.messages.map((msg) => msg.name)
+ expect(messageNames3).toEqual(['rowDescription'])
- const r4 = await db.execProtocol(serialize.execute({}))
- const messageNames4 = r4.messages.map((msg) => msg.name)
- expect(messageNames4).toEqual(['dataRow', 'commandComplete'])
+ const r4 = await db.execProtocol(serialize.execute({}))
+ const messageNames4 = r4.messages.map((msg) => msg.name)
+ expect(messageNames4).toEqual(['dataRow', 'commandComplete'])
- const r5 = await db.execProtocol(serialize.sync())
- const messageNames5 = r5.messages.map((msg) => msg.name)
- expect(messageNames5).toEqual(['readyForQuery'])
- })
+ const r5 = await db.execProtocol(serialize.sync())
+ const messageNames5 = r5.messages.map((msg) => msg.name)
+ expect(messageNames5).toEqual(['readyForQuery'])
+ })
- it('should handle error', async () => {
- const result = await db.execProtocol(serialize.query('invalid sql'), {
- throwOnError: false,
- })
- const messageNames = result.messages.map((msg) => msg.name)
- expect(messageNames).toEqual(['error', 'readyForQuery'])
+ it('should handle error', async () => {
+ const result = await db.execProtocol(serialize.query('invalid sql'), {
+ throwOnError: false,
})
+ const messageNames = result.messages.map((msg) => msg.name)
+ expect(messageNames).toEqual(['error', 'readyForQuery'])
+ })
- it('should throw error', async () => {
- await expect(
- db.execProtocol(serialize.query('invalid sql')),
- ).rejects.toThrow()
- })
+ it('should throw error', async () => {
+ await expect(
+ db.execProtocol(serialize.query('invalid sql')),
+ ).rejects.toThrow()
})
})
diff --git a/packages/pglite/tests/live.test.ts b/packages/pglite/tests/live.test.ts
index 92f3c194e..f955ee9d7 100644
--- a/packages/pglite/tests/live.test.ts
+++ b/packages/pglite/tests/live.test.ts
@@ -1,7 +1,7 @@
import { describe, it, expect } from 'vitest'
import { testEsmCjsAndDTC } from './test-utils.ts'
-await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
+await testEsmCjsAndDTC(async (importType) => {
const { PGlite } = (
importType === 'esm'
? await import('../dist/index.js')
@@ -17,7 +17,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('basic live query', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -113,7 +112,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('live query on view', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -224,7 +222,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('live query with params', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -310,7 +307,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('incremental query unordered', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -360,7 +356,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('incremental query with non-integer key', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -410,7 +405,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('basic live incremental query', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -507,7 +501,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('basic live incremental query with limit 1', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -550,7 +543,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('live incremental query on view', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -662,7 +654,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('live incremental query with params', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -749,7 +740,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('basic live changes', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -925,7 +915,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('subscribe to live query after creation', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -982,7 +971,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('live changes limit 1', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -1049,7 +1037,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('subscribe to live changes after creation', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -1109,7 +1096,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('live query with windowing', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -1201,7 +1187,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('throws error when only one of offset/limit is provided', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await expect(
@@ -1244,7 +1229,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it("doesn't have a race condition when unsubscribing from a live query", async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -1292,7 +1276,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('works with pattern matching', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
@@ -1348,7 +1331,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
it('basic live query - case sensitive table name', async () => {
const db = await PGlite.create({
extensions: { live },
- defaultDataTransferContainer,
})
await db.exec(`
diff --git a/packages/pglite/tests/message-context-leak.test.ts b/packages/pglite/tests/message-context-leak.test.ts
index 8c44f67e2..92639a0aa 100644
--- a/packages/pglite/tests/message-context-leak.test.ts
+++ b/packages/pglite/tests/message-context-leak.test.ts
@@ -1,5 +1,5 @@
import { describe, it, expect, beforeEach } from 'vitest'
-import { testDTC } from './test-utils.js'
+import { testEsmCjsAndDTC } from './test-utils.js'
import { PGlite } from '../dist/index.js'
// This test isolates the MessageContext leak reported in
@@ -16,12 +16,12 @@ function makeJsonBlob(size: number): string {
return JSON.stringify({ padding: 'x'.repeat(size) })
}
-testDTC(async (defaultDataTransferContainer) => {
+testEsmCjsAndDTC(async () => {
describe('MessageContext reset between queries', () => {
let db: PGlite
beforeEach(async () => {
- db = new PGlite({ defaultDataTransferContainer })
+ db = new PGlite()
await db.exec(`
CREATE TABLE IF NOT EXISTS leak_test (
id SERIAL PRIMARY KEY,
diff --git a/packages/pglite/tests/notify.test.ts b/packages/pglite/tests/notify.test.ts
index 7164255cb..001b947d7 100644
--- a/packages/pglite/tests/notify.test.ts
+++ b/packages/pglite/tests/notify.test.ts
@@ -1,185 +1,173 @@
import { describe, it, expect, vi } from 'vitest'
import { PGlite } from '../dist/index.js'
-import { expectToThrowAsync, testDTC } from './test-utils.js'
+import { expectToThrowAsync } from './test-utils.js'
-testDTC(async (defaultDataTransferContainer) => {
- describe('notify API', () => {
- it('notify', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+describe('notify API', () => {
+ it('notify', async () => {
+ const db = new PGlite()
- await db.listen('test', (payload) => {
- expect(payload).toBe('321')
- })
+ await db.listen('test', (payload) => {
+ expect(payload).toBe('321')
+ })
+
+ await db.exec("NOTIFY test, '321'")
+
+ await new Promise((resolve) => setTimeout(resolve, 1000))
+ })
- await db.exec("NOTIFY test, '321'")
+ it('unlisten', async () => {
+ const db = new PGlite()
- await new Promise((resolve) => setTimeout(resolve, 1000))
+ const unsub = await db.listen('test', () => {
+ throw new Error('Notification received after unsubscribed')
})
- it('unlisten', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ await unsub()
- const unsub = await db.listen('test', () => {
- throw new Error('Notification received after unsubscribed')
- })
+ await db.exec('NOTIFY test')
- await unsub()
+ await new Promise((resolve) => setTimeout(resolve, 1000))
+ })
- await db.exec('NOTIFY test')
+ it('onNotification', async () => {
+ const db = new PGlite()
- await new Promise((resolve) => setTimeout(resolve, 1000))
+ db.onNotification((chan, payload) => {
+ expect(chan).toBe('test')
+ expect(payload).toBe('123')
})
- it('onNotification', async () => {
- const db = new PGlite({
- defaultDataTransferContainer,
- })
+ await db.exec('LISTEN test')
+ await db.exec("NOTIFY test, '123'")
+
+ await new Promise((resolve) => setTimeout(resolve, 1000))
+ })
+
+ it('check notify case sensitivity + special chars as Postgresql', async () => {
+ const pg = new PGlite()
- db.onNotification((chan, payload) => {
- expect(chan).toBe('test')
- expect(payload).toBe('123')
- })
+ const allLower1 = vi.fn()
+ await pg.listen('postgresdefaultlower', allLower1)
+ await pg.exec(`NOTIFY postgresdefaultlower, 'payload1'`)
- await db.exec('LISTEN test')
- await db.exec("NOTIFY test, '123'")
+ const autoLowerTest1 = vi.fn()
+ await pg.listen('PostgresDefaultLower', autoLowerTest1)
+ await pg.exec(`NOTIFY PostgresDefaultLower, 'payload1'`)
- await new Promise((resolve) => setTimeout(resolve, 1000))
+ const autoLowerTest2 = vi.fn()
+ await pg.listen('PostgresDefaultLower', autoLowerTest2)
+ await pg.exec(`NOTIFY postgresdefaultlower, 'payload1'`)
+
+ const autoLowerTest3 = vi.fn()
+ await pg.listen('postgresdefaultlower', autoLowerTest3)
+ await pg.exec(`NOTIFY PostgresDefaultLower, 'payload1'`)
+
+ const caseSensitive1 = vi.fn()
+ await pg.listen('"tesT2"', caseSensitive1)
+ await pg.exec(`NOTIFY "tesT2", 'paYloAd2'`)
+
+ const caseSensitive2 = vi.fn()
+ await pg.listen('"tesT3"', caseSensitive2)
+ await pg.exec(`NOTIFY tesT3, 'paYloAd2'`)
+
+ const caseSensitive3 = vi.fn()
+ await pg.listen('testNotCalled2', caseSensitive3)
+ await pg.exec(`NOTIFY "testNotCalled2", 'paYloAd2'`)
+
+ const quotedWithSpaces = vi.fn()
+ await pg.listen('"Quoted Channel With Spaces"', quotedWithSpaces)
+ await pg.exec(`NOTIFY "Quoted Channel With Spaces", 'payload1'`)
+
+ const unquotedWithSpaces = vi.fn()
+ await expectToThrowAsync(async () => {
+ await pg.listen('Unquoted Channel With Spaces', unquotedWithSpaces)
+ })
+ await expectToThrowAsync(async () => {
+ await pg.exec(`NOTIFY Unquoted Channel With Spaces, 'payload1'`)
})
- it('check notify case sensitivity + special chars as Postgresql', async () => {
- const pg = new PGlite({
- defaultDataTransferContainer,
- })
+ const otherCharsWithQuotes = vi.fn()
+ await pg.listen('"test&me"', otherCharsWithQuotes)
+ await pg.exec(`NOTIFY "test&me", 'paYloAd2'`)
- const allLower1 = vi.fn()
- await pg.listen('postgresdefaultlower', allLower1)
- await pg.exec(`NOTIFY postgresdefaultlower, 'payload1'`)
+ const otherChars = vi.fn()
+ await expectToThrowAsync(async () => {
+ await pg.listen('test&me', otherChars)
+ })
+ await expectToThrowAsync(async () => {
+ await pg.exec(`NOTIFY test&me, 'payload1'`)
+ })
- const autoLowerTest1 = vi.fn()
- await pg.listen('PostgresDefaultLower', autoLowerTest1)
- await pg.exec(`NOTIFY PostgresDefaultLower, 'payload1'`)
+ expect(allLower1).toHaveBeenCalledTimes(4)
+ expect(autoLowerTest1).toHaveBeenCalledTimes(3)
+ expect(autoLowerTest2).toHaveBeenCalledTimes(2)
+ expect(autoLowerTest3).toHaveBeenCalledTimes(1)
+ expect(caseSensitive1).toHaveBeenCalledOnce()
+ expect(caseSensitive2).not.toHaveBeenCalled()
+ expect(caseSensitive3).not.toHaveBeenCalled()
+ expect(otherCharsWithQuotes).toHaveBeenCalledOnce()
+ expect(quotedWithSpaces).toHaveBeenCalledOnce()
+ expect(unquotedWithSpaces).not.toHaveBeenCalled()
+ })
+
+ it('check unlisten case sensitivity + special chars as Postgresql', async () => {
+ const pg = new PGlite()
- const autoLowerTest2 = vi.fn()
- await pg.listen('PostgresDefaultLower', autoLowerTest2)
+ const allLower1 = vi.fn()
+ {
+ const unsub1 = await pg.listen('postgresdefaultlower', allLower1)
await pg.exec(`NOTIFY postgresdefaultlower, 'payload1'`)
+ await unsub1()
+ }
- const autoLowerTest3 = vi.fn()
- await pg.listen('postgresdefaultlower', autoLowerTest3)
+ const autoLowerTest1 = vi.fn()
+ {
+ const unsub2 = await pg.listen('PostgresDefaultLower', autoLowerTest1)
await pg.exec(`NOTIFY PostgresDefaultLower, 'payload1'`)
+ await unsub2()
+ }
- const caseSensitive1 = vi.fn()
- await pg.listen('"tesT2"', caseSensitive1)
- await pg.exec(`NOTIFY "tesT2", 'paYloAd2'`)
-
- const caseSensitive2 = vi.fn()
- await pg.listen('"tesT3"', caseSensitive2)
- await pg.exec(`NOTIFY tesT3, 'paYloAd2'`)
-
- const caseSensitive3 = vi.fn()
- await pg.listen('testNotCalled2', caseSensitive3)
- await pg.exec(`NOTIFY "testNotCalled2", 'paYloAd2'`)
+ const autoLowerTest2 = vi.fn()
+ {
+ const unsub3 = await pg.listen('PostgresDefaultLower', autoLowerTest2)
+ await pg.exec(`NOTIFY postgresdefaultlower, 'payload1'`)
+ await unsub3()
+ }
- const quotedWithSpaces = vi.fn()
+ const autoLowerTest3 = vi.fn()
+ {
+ const unsub4 = await pg.listen('postgresdefaultlower', autoLowerTest3)
+ await pg.exec(`NOTIFY PostgresDefaultLower, 'payload1'`)
+ await unsub4()
+ }
+
+ const caseSensitive1 = vi.fn()
+ {
+ await pg.listen('"CaSESEnsiTIvE"', caseSensitive1)
+ await pg.exec(`NOTIFY "CaSESEnsiTIvE", 'payload1'`)
+ await pg.unlisten('"CaSESEnsiTIvE"')
+ await pg.exec(`NOTIFY "CaSESEnsiTIvE", 'payload1'`)
+ }
+
+ const quotedWithSpaces = vi.fn()
+ {
await pg.listen('"Quoted Channel With Spaces"', quotedWithSpaces)
await pg.exec(`NOTIFY "Quoted Channel With Spaces", 'payload1'`)
+ await pg.unlisten('"Quoted Channel With Spaces"')
+ }
- const unquotedWithSpaces = vi.fn()
- await expectToThrowAsync(async () => {
- await pg.listen('Unquoted Channel With Spaces', unquotedWithSpaces)
- })
- await expectToThrowAsync(async () => {
- await pg.exec(`NOTIFY Unquoted Channel With Spaces, 'payload1'`)
- })
-
- const otherCharsWithQuotes = vi.fn()
+ const otherCharsWithQuotes = vi.fn()
+ {
await pg.listen('"test&me"', otherCharsWithQuotes)
- await pg.exec(`NOTIFY "test&me", 'paYloAd2'`)
-
- const otherChars = vi.fn()
- await expectToThrowAsync(async () => {
- await pg.listen('test&me', otherChars)
- })
- await expectToThrowAsync(async () => {
- await pg.exec(`NOTIFY test&me, 'payload1'`)
- })
-
- expect(allLower1).toHaveBeenCalledTimes(4)
- expect(autoLowerTest1).toHaveBeenCalledTimes(3)
- expect(autoLowerTest2).toHaveBeenCalledTimes(2)
- expect(autoLowerTest3).toHaveBeenCalledTimes(1)
- expect(caseSensitive1).toHaveBeenCalledOnce()
- expect(caseSensitive2).not.toHaveBeenCalled()
- expect(caseSensitive3).not.toHaveBeenCalled()
- expect(otherCharsWithQuotes).toHaveBeenCalledOnce()
- expect(quotedWithSpaces).toHaveBeenCalledOnce()
- expect(unquotedWithSpaces).not.toHaveBeenCalled()
- })
-
- it('check unlisten case sensitivity + special chars as Postgresql', async () => {
- const pg = new PGlite({
- defaultDataTransferContainer,
- })
-
- const allLower1 = vi.fn()
- {
- const unsub1 = await pg.listen('postgresdefaultlower', allLower1)
- await pg.exec(`NOTIFY postgresdefaultlower, 'payload1'`)
- await unsub1()
- }
-
- const autoLowerTest1 = vi.fn()
- {
- const unsub2 = await pg.listen('PostgresDefaultLower', autoLowerTest1)
- await pg.exec(`NOTIFY PostgresDefaultLower, 'payload1'`)
- await unsub2()
- }
-
- const autoLowerTest2 = vi.fn()
- {
- const unsub3 = await pg.listen('PostgresDefaultLower', autoLowerTest2)
- await pg.exec(`NOTIFY postgresdefaultlower, 'payload1'`)
- await unsub3()
- }
-
- const autoLowerTest3 = vi.fn()
- {
- const unsub4 = await pg.listen('postgresdefaultlower', autoLowerTest3)
- await pg.exec(`NOTIFY PostgresDefaultLower, 'payload1'`)
- await unsub4()
- }
-
- const caseSensitive1 = vi.fn()
- {
- await pg.listen('"CaSESEnsiTIvE"', caseSensitive1)
- await pg.exec(`NOTIFY "CaSESEnsiTIvE", 'payload1'`)
- await pg.unlisten('"CaSESEnsiTIvE"')
- await pg.exec(`NOTIFY "CaSESEnsiTIvE", 'payload1'`)
- }
-
- const quotedWithSpaces = vi.fn()
- {
- await pg.listen('"Quoted Channel With Spaces"', quotedWithSpaces)
- await pg.exec(`NOTIFY "Quoted Channel With Spaces", 'payload1'`)
- await pg.unlisten('"Quoted Channel With Spaces"')
- }
-
- const otherCharsWithQuotes = vi.fn()
- {
- await pg.listen('"test&me"', otherCharsWithQuotes)
- await pg.exec(`NOTIFY "test&me", 'payload'`)
- await pg.unlisten('"test&me"')
- }
-
- expect(allLower1).toHaveBeenCalledOnce()
- expect(autoLowerTest1).toHaveBeenCalledOnce()
- expect(autoLowerTest2).toHaveBeenCalledOnce()
- expect(autoLowerTest3).toHaveBeenCalledOnce()
- expect(caseSensitive1).toHaveBeenCalledOnce()
- expect(otherCharsWithQuotes).toHaveBeenCalledOnce()
- })
+ await pg.exec(`NOTIFY "test&me", 'payload'`)
+ await pg.unlisten('"test&me"')
+ }
+
+ expect(allLower1).toHaveBeenCalledOnce()
+ expect(autoLowerTest1).toHaveBeenCalledOnce()
+ expect(autoLowerTest2).toHaveBeenCalledOnce()
+ expect(autoLowerTest3).toHaveBeenCalledOnce()
+ expect(caseSensitive1).toHaveBeenCalledOnce()
+ expect(otherCharsWithQuotes).toHaveBeenCalledOnce()
})
})
diff --git a/packages/pglite/tests/pg_ivm.test.ts b/packages/pglite/tests/pg_ivm.test.ts
index 842bc81a0..60c7aad57 100644
--- a/packages/pglite/tests/pg_ivm.test.ts
+++ b/packages/pglite/tests/pg_ivm.test.ts
@@ -1,7 +1,7 @@
import { describe, it, expect } from 'vitest'
import { testEsmCjsAndDTC } from './test-utils.ts'
-await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
+await testEsmCjsAndDTC(async (importType) => {
const { PGlite } =
importType === 'esm'
? await import('../dist/index.js')
@@ -22,7 +22,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
extensions: {
pg_ivm,
},
- defaultDataTransferContainer,
})
await pg.exec('CREATE EXTENSION IF NOT EXISTS pg_ivm;')
@@ -43,7 +42,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
extensions: {
pg_ivm,
},
- defaultDataTransferContainer,
})
await pg.exec('CREATE EXTENSION IF NOT EXISTS pg_ivm;')
@@ -89,7 +87,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
extensions: {
pg_ivm,
},
- defaultDataTransferContainer,
})
await pg.exec('CREATE EXTENSION IF NOT EXISTS pg_ivm;')
@@ -215,7 +212,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
extensions: {
pg_ivm,
},
- defaultDataTransferContainer,
})
await pg.exec('CREATE EXTENSION IF NOT EXISTS pg_ivm;')
@@ -309,7 +305,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
extensions: {
pg_ivm,
},
- defaultDataTransferContainer,
})
await pg.exec('CREATE EXTENSION IF NOT EXISTS pg_ivm;')
@@ -386,7 +381,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
extensions: {
pg_ivm,
},
- defaultDataTransferContainer,
})
await pg.exec('CREATE EXTENSION IF NOT EXISTS pg_ivm;')
diff --git a/packages/pglite/tests/pgvector.test.ts b/packages/pglite/tests/pgvector.test.ts
index e27bf5d62..28f2dd9e7 100644
--- a/packages/pglite/tests/pgvector.test.ts
+++ b/packages/pglite/tests/pgvector.test.ts
@@ -1,7 +1,7 @@
import { describe, it, expect } from 'vitest'
import { testEsmCjsAndDTC } from './test-utils.ts'
-await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
+await testEsmCjsAndDTC(async (importType) => {
const { PGlite } =
importType === 'esm'
? await import('../dist/index.js')
@@ -22,7 +22,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
extensions: {
vector,
},
- defaultDataTransferContainer,
})
await pg.exec('CREATE EXTENSION IF NOT EXISTS vector;')
@@ -88,7 +87,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
extensions: {
vector,
},
- defaultDataTransferContainer,
})
await pg.exec('CREATE EXTENSION IF NOT EXISTS vector;')
@@ -114,7 +112,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
extensions: {
vector,
},
- defaultDataTransferContainer,
})
await pg.close()
@@ -124,7 +121,6 @@ await testEsmCjsAndDTC(async (importType, defaultDataTransferContainer) => {
extensions: {
vector,
},
- defaultDataTransferContainer,
})
await pg.exec('CREATE EXTENSION IF NOT EXISTS vector;')
diff --git a/packages/pglite/tests/query-sizes.test.ts b/packages/pglite/tests/query-sizes.test.ts
index cf1734824..988080b95 100644
--- a/packages/pglite/tests/query-sizes.test.ts
+++ b/packages/pglite/tests/query-sizes.test.ts
@@ -1,5 +1,4 @@
import { describe, it, expect, beforeEach } from 'vitest'
-import { testDTC } from './test-utils.js'
import { PGlite } from '../dist/index.js'
function createStringOfSize(sizeInBytes: number): string {
@@ -70,115 +69,113 @@ function testRowCountAndSize(
}
}
-testDTC(async (defaultDataTransferContainer) => {
- describe('query and exec with different data sizes', () => {
- let db: PGlite
+describe('query and exec with different data sizes', () => {
+ let db: PGlite
- beforeEach(async () => {
- db = new PGlite({ defaultDataTransferContainer, debug: 0 })
+ beforeEach(async () => {
+ db = new PGlite({ debug: 0 })
- await db.exec(`
+ await db.exec(`
CREATE TABLE IF NOT EXISTS size_test (
id SERIAL PRIMARY KEY,
data TEXT
);
`)
- })
+ })
- describe('exec method', () => {
- testEachSize(async (_, sizeInBytes) => {
- const testData = createStringOfSize(sizeInBytes)
+ describe('exec method', () => {
+ testEachSize(async (_, sizeInBytes) => {
+ const testData = createStringOfSize(sizeInBytes)
- const results = await db.exec(`
+ const results = await db.exec(`
INSERT INTO size_test (data) VALUES ('${testData}');
SELECT * FROM size_test;
`)
- expect(results).toHaveLength(2)
- expect(results[1].rows).toHaveLength(1)
- expect(results[1].rows[0].data).toBe(testData)
- expect(results[1].rows[0].data.length).toBe(sizeInBytes)
- })
+ expect(results).toHaveLength(2)
+ expect(results[1].rows).toHaveLength(1)
+ expect(results[1].rows[0].data).toBe(testData)
+ expect(results[1].rows[0].data.length).toBe(sizeInBytes)
})
+ })
- describe('query method without params', () => {
- testEachSize(async (_, sizeInBytes) => {
- const testData = createStringOfSize(sizeInBytes)
+ describe('query method without params', () => {
+ testEachSize(async (_, sizeInBytes) => {
+ const testData = createStringOfSize(sizeInBytes)
- await db.query(`INSERT INTO size_test (data) VALUES ('${testData}');`)
+ await db.query(`INSERT INTO size_test (data) VALUES ('${testData}');`)
- const result = await db.query<{ id: number; data: string }>(
- 'SELECT * FROM size_test;',
- )
+ const result = await db.query<{ id: number; data: string }>(
+ 'SELECT * FROM size_test;',
+ )
- expect(result.rows).toHaveLength(1)
- expect(result.rows[0].data).toBe(testData)
- expect(result.rows[0].data.length).toBe(sizeInBytes)
- })
+ expect(result.rows).toHaveLength(1)
+ expect(result.rows[0].data).toBe(testData)
+ expect(result.rows[0].data.length).toBe(sizeInBytes)
})
+ })
- describe('query method with params', () => {
- testEachSize(async (_, sizeInBytes) => {
- const testData = createStringOfSize(sizeInBytes)
+ describe('query method with params', () => {
+ testEachSize(async (_, sizeInBytes) => {
+ const testData = createStringOfSize(sizeInBytes)
- await db.query('INSERT INTO size_test (data) VALUES ($1);', [testData])
+ await db.query('INSERT INTO size_test (data) VALUES ($1);', [testData])
- const result = await db.query<{ id: number; data: string }>(
- 'SELECT * FROM size_test WHERE data = $1;',
- [testData],
- )
+ const result = await db.query<{ id: number; data: string }>(
+ 'SELECT * FROM size_test WHERE data = $1;',
+ [testData],
+ )
- expect(result.rows).toHaveLength(1)
- expect(result.rows[0].data).toBe(testData)
- expect(result.rows[0].data.length).toBe(sizeInBytes)
- })
+ expect(result.rows).toHaveLength(1)
+ expect(result.rows[0].data).toBe(testData)
+ expect(result.rows[0].data.length).toBe(sizeInBytes)
})
})
+})
- describe('query with combinations of row counts and data sizes', () => {
- let db: PGlite
+describe('query with combinations of row counts and data sizes', () => {
+ let db: PGlite
- beforeEach(async () => {
- db = new PGlite({ defaultDataTransferContainer })
- })
+ beforeEach(async () => {
+ db = new PGlite()
+ })
- testRowCountAndSize(async (_, rowCount, __, dataSize) => {
- const testData = createStringOfSize(dataSize)
+ testRowCountAndSize(async (_, rowCount, __, dataSize) => {
+ const testData = createStringOfSize(dataSize)
- const result = await db.query<{ id: number; data: string }>(`
+ const result = await db.query<{ id: number; data: string }>(`
SELECT generate_series(1, ${rowCount}) as id, '${testData}' as data;
`)
- expect(result.rows).toHaveLength(rowCount)
+ expect(result.rows).toHaveLength(rowCount)
- expect(result.rows[0].data).toBe(testData)
- expect(result.rows[0].data.length).toBe(dataSize)
- expect(result.rows[rowCount - 1].data).toBe(testData)
- expect(result.rows[rowCount - 1].data.length).toBe(dataSize)
-
- if (rowCount > 5) {
- const middleIndex = Math.floor(rowCount / 2)
- expect(result.rows[middleIndex].data).toBe(testData)
- expect(result.rows[middleIndex].data.length).toBe(dataSize)
- }
- })
+ expect(result.rows[0].data).toBe(testData)
+ expect(result.rows[0].data.length).toBe(dataSize)
+ expect(result.rows[rowCount - 1].data).toBe(testData)
+ expect(result.rows[rowCount - 1].data.length).toBe(dataSize)
+
+ if (rowCount > 5) {
+ const middleIndex = Math.floor(rowCount / 2)
+ expect(result.rows[middleIndex].data).toBe(testData)
+ expect(result.rows[middleIndex].data.length).toBe(dataSize)
+ }
})
+})
- describe('query with postgres-generated data of different sizes', () => {
- let db: PGlite
+describe('query with postgres-generated data of different sizes', () => {
+ let db: PGlite
- beforeEach(async () => {
- db = new PGlite({ defaultDataTransferContainer })
- })
+ beforeEach(async () => {
+ db = new PGlite()
+ })
- testEachSize(async (_, sizeInBytes) => {
- const result = await db.query<{ id: number; data: string }>(`
+ testEachSize(async (_, sizeInBytes) => {
+ const result = await db.query<{ id: number; data: string }>(`
SELECT 1 as id, repeat('a', ${sizeInBytes}) as data;
`)
- expect(result.rows).toHaveLength(1)
- expect(result.rows[0].data.length).toBe(sizeInBytes)
- expect(result.rows[0].data).toBe('a'.repeat(sizeInBytes))
- })
+ expect(result.rows).toHaveLength(1)
+ expect(result.rows[0].data.length).toBe(sizeInBytes)
+ expect(result.rows[0].data).toBe('a'.repeat(sizeInBytes))
})
})
diff --git a/packages/pglite/tests/test-utils.ts b/packages/pglite/tests/test-utils.ts
index 2318e2615..772e935cf 100644
--- a/packages/pglite/tests/test-utils.ts
+++ b/packages/pglite/tests/test-utils.ts
@@ -1,5 +1,4 @@
import { describe, expect } from 'vitest'
-import type { DataTransferContainer } from '../dist/index.js'
declare global {
let Bun: any
@@ -28,40 +27,16 @@ export async function expectToThrowAsync(
}
export async function testEsmCjsAndDTC(
- fn: (
- importType: 'esm' | 'cjs',
- defaultDataTransferContainer: DataTransferContainer,
- ) => Promise,
+ fn: (importType: 'esm' | 'cjs') => Promise,
) {
describe('esm import', async () => {
- describe('cma data transfer container', async () => {
- await fn('esm', 'cma')
- })
- describe('file data transfer container', async () => {
- await fn('esm', 'file')
- })
+ await fn('esm')
})
// don't run cjs tests for Bun
if (typeof Bun !== 'undefined') return
describe('cjs import', async () => {
- describe('cma data transfer container', async () => {
- await fn('cjs', 'cma')
- })
- describe('file data transfer container', async () => {
- await fn('cjs', 'file')
- })
- })
-}
-
-export async function testDTC(
- fn: (defaultDataTransferContainer: DataTransferContainer) => Promise,
-) {
- describe('cma data transfer container', async () => {
- await fn('cma')
- })
- describe('file data transfer container', async () => {
- await fn('file')
+ await fn('cjs')
})
}
diff --git a/packages/pglite/tests/user.test.ts b/packages/pglite/tests/user.test.ts
index e9a0f9fad..6d27f10a3 100644
--- a/packages/pglite/tests/user.test.ts
+++ b/packages/pglite/tests/user.test.ts
@@ -2,21 +2,17 @@ import { describe, it, expect } from 'vitest'
import { expectToThrowAsync } from './test-utils.js'
import * as fs from 'fs/promises'
import { PGlite } from '../dist/index.js'
-import { testDTC } from './test-utils.js'
-testDTC(async (defaultDataTransferContainer) => {
- describe('user', () => {
- it('user switching', async () => {
- await fs.rm('./pgdata-test-user', { force: true, recursive: true })
+describe('user', () => {
+ it('user switching', async () => {
+ await fs.rm('./pgdata-test-user', { force: true, recursive: true })
- const db = new PGlite('./pgdata-test-user', {
- defaultDataTransferContainer,
- })
- await db.exec(
- "CREATE USER test_user WITH PASSWORD 'md5abdbecd56d5fbd2cdaee3d0fa9e4f434';",
- )
+ const db = new PGlite('./pgdata-test-user')
+ await db.exec(
+ "CREATE USER test_user WITH PASSWORD 'md5abdbecd56d5fbd2cdaee3d0fa9e4f434';",
+ )
- await db.exec(`
+ await db.exec(`
CREATE TABLE test (
id SERIAL PRIMARY KEY,
number INT
@@ -24,7 +20,7 @@ testDTC(async (defaultDataTransferContainer) => {
INSERT INTO test (number) VALUES (42);
`)
- await db.exec(`
+ await db.exec(`
CREATE TABLE test2 (
id SERIAL PRIMARY KEY,
number INT
@@ -32,48 +28,46 @@ testDTC(async (defaultDataTransferContainer) => {
INSERT INTO test2 (number) VALUES (42);
`)
- await db.exec('ALTER TABLE test2 OWNER TO test_user;')
+ const test = await db.query('SELECT * FROM test2;')
+ expect(test.rows).toEqual([{ id: 1, number: 42 }])
- await db.close()
+ await db.exec('ALTER TABLE test2 OWNER TO test_user;')
- const db2 = new PGlite({
- dataDir: './pgdata-test-user',
- username: 'test_user',
- defaultDataTransferContainer,
- })
+ await db.close()
- const currentUsername = await db2.query('SELECT current_user;')
- expect(currentUsername.rows).toEqual([{ current_user: 'test_user' }])
+ const db2 = new PGlite({
+ dataDir: './pgdata-test-user',
+ username: 'test_user',
+ })
- await expectToThrowAsync(async () => {
- await db2.query('SELECT * FROM test;')
- }, 'permission denied for table test')
+ const currentUsername = await db2.query('SELECT current_user;')
+ expect(currentUsername.rows).toEqual([{ current_user: 'test_user' }])
- const test2 = await db2.query('SELECT * FROM test2;')
- expect(test2.rows).toEqual([{ id: 1, number: 42 }])
+ await expectToThrowAsync(async () => {
+ await db2.query('SELECT * FROM test;')
+ }, 'permission denied for table test')
- await expectToThrowAsync(async () => {
- await db2.query('SET ROLE postgres;')
- }, 'permission denied to set role "postgres"')
- })
+ const test2 = await db2.query('SELECT * FROM test2;')
+ expect(test2.rows).toEqual([{ id: 1, number: 42 }])
+
+ await expectToThrowAsync(async () => {
+ await db2.query('SET ROLE postgres;')
+ }, 'permission denied to set role "postgres"')
+ })
- it('switch to user created after initial run', async () => {
- await fs.rm('./pgdata-test-user', { force: true, recursive: true })
+ it('switch to user created after initial run', async () => {
+ await fs.rm('./pgdata-test-user', { force: true, recursive: true })
- const db0 = new PGlite('./pgdata-test-user', {
- defaultDataTransferContainer,
- })
- await db0.waitReady
- await db0.close()
+ const db0 = new PGlite('./pgdata-test-user')
+ await db0.waitReady
+ await db0.close()
- const db = new PGlite('./pgdata-test-user', {
- defaultDataTransferContainer,
- })
- await db.exec(
- "CREATE USER test_user WITH PASSWORD 'md5abdbecd56d5fbd2cdaee3d0fa9e4f434';",
- )
+ const db = new PGlite('./pgdata-test-user')
+ await db.exec(
+ "CREATE USER test_user WITH PASSWORD 'md5abdbecd56d5fbd2cdaee3d0fa9e4f434';",
+ )
- await db.exec(`
+ await db.exec(`
CREATE TABLE test (
id SERIAL PRIMARY KEY,
number INT
@@ -81,7 +75,7 @@ testDTC(async (defaultDataTransferContainer) => {
INSERT INTO test (number) VALUES (42);
`)
- await db.exec(`
+ await db.exec(`
CREATE TABLE test2 (
id SERIAL PRIMARY KEY,
number INT
@@ -89,53 +83,50 @@ testDTC(async (defaultDataTransferContainer) => {
INSERT INTO test2 (number) VALUES (42);
`)
- await db.exec('ALTER TABLE test2 OWNER TO test_user;')
+ await db.exec('ALTER TABLE test2 OWNER TO test_user;')
- await db.close()
+ await db.close()
- const db2 = new PGlite({
- dataDir: './pgdata-test-user',
- username: 'test_user',
- })
+ const db2 = new PGlite({
+ dataDir: './pgdata-test-user',
+ username: 'test_user',
+ })
- const currentUsername = await db2.query('SELECT current_user;')
- expect(currentUsername.rows).toEqual([{ current_user: 'test_user' }])
+ const currentUsername = await db2.query('SELECT current_user;')
+ expect(currentUsername.rows).toEqual([{ current_user: 'test_user' }])
- await expectToThrowAsync(async () => {
- await db2.query('SELECT * FROM test;')
- }, 'permission denied for table test')
+ await expectToThrowAsync(async () => {
+ await db2.query('SELECT * FROM test;')
+ }, 'permission denied for table test')
- const test2 = await db2.query('SELECT * FROM test2;')
- expect(test2.rows).toEqual([{ id: 1, number: 42 }])
+ const test2 = await db2.query('SELECT * FROM test2;')
+ expect(test2.rows).toEqual([{ id: 1, number: 42 }])
- await expectToThrowAsync(async () => {
- await db2.query('SET ROLE postgres;')
- }, 'permission denied to set role "postgres"')
- })
+ await expectToThrowAsync(async () => {
+ await db2.query('SET ROLE postgres;')
+ }, 'permission denied to set role "postgres"')
+ })
+
+ it('create database and switch to it', async () => {
+ await fs.rm('./pgdata-test-user', { force: true, recursive: true })
+
+ const db = new PGlite('./pgdata-test-user')
+ await db.exec(
+ "CREATE USER test_user WITH PASSWORD 'md5abdbecd56d5fbd2cdaee3d0fa9e4f434';",
+ )
- it('create database and switch to it', async () => {
- await fs.rm('./pgdata-test-user', { force: true, recursive: true })
-
- const db = new PGlite('./pgdata-test-user', {
- defaultDataTransferContainer,
- })
- await db.exec(
- "CREATE USER test_user WITH PASSWORD 'md5abdbecd56d5fbd2cdaee3d0fa9e4f434';",
- )
-
- await db.exec('CREATE DATABASE test_db OWNER test_user;')
- await db.close()
-
- const db2 = new PGlite({
- dataDir: './pgdata-test-user',
- username: 'test_user',
- database: 'test_db',
- })
-
- const currentUsername = await db2.query('SELECT current_user;')
- expect(currentUsername.rows).toEqual([{ current_user: 'test_user' }])
- const currentDatabase = await db2.query('SELECT current_database();')
- expect(currentDatabase.rows).toEqual([{ current_database: 'test_db' }])
+ await db.exec('CREATE DATABASE test_db OWNER test_user;')
+ await db.close()
+
+ const db2 = new PGlite({
+ dataDir: './pgdata-test-user',
+ username: 'test_user',
+ database: 'test_db',
})
+
+ const currentUsername = await db2.query('SELECT current_user;')
+ expect(currentUsername.rows).toEqual([{ current_user: 'test_user' }])
+ const currentDatabase = await db2.query('SELECT current_database();')
+ expect(currentDatabase.rows).toEqual([{ current_database: 'test_db' }])
})
})
diff --git a/postgres-pglite b/postgres-pglite
index 9bea834b4..1a0bdab38 160000
--- a/postgres-pglite
+++ b/postgres-pglite
@@ -1 +1 @@
-Subproject commit 9bea834b44d63ae7925a966e4ebbd1aeebd8cac1
+Subproject commit 1a0bdab383a0572804a6577432da6865a5bec314