diff --git a/.eslintrc.json b/.eslintrc.json
index 9fc6ad36..4a50f178 100644
--- a/.eslintrc.json
+++ b/.eslintrc.json
@@ -5,7 +5,7 @@
"node": true
},
"parserOptions": {
- "ecmaVersion": 9,
+ "ecmaVersion": 2020,
"sourceType": "module"
},
"rules": {
@@ -93,6 +93,7 @@
"Property": true,
"VariableDeclarator": true,
"ImportDeclaration": true,
+ "TernaryExpressions": true,
"Comments": true
}
}
@@ -221,7 +222,7 @@
],
"max-params": [
2,
- 4
+ 5
],
"max-statements-per-line": 0,
"new-cap": [
diff --git a/README.md b/README.md
index e2827cb0..6dd9463e 100644
--- a/README.md
+++ b/README.md
@@ -1,136 +1,117 @@
-
+
-- [🚀 Fastest full featured PostgreSQL node client](https://github.com/porsager/postgres-benchmarks#results)
-- 🚯 1250 LOC - 0 dependencies
+- [🚀 Fastest full-featured node & deno client](https://github.com/porsager/postgres-benchmarks#results)
- 🏷 ES6 Tagged Template Strings at the core
- 🏄♀️ Simple surface API
-- 💬 Chat on [Gitter](https://gitter.im/porsager/postgres)
+- 🖊️ Dynamic query support
+- 💬 Chat and help on [Gitter](https://gitter.im/porsager/postgres)
## Getting started
-
+
-**Install**
+### Installation
```bash
$ npm install postgres
```
-**Use**
+### Usage
+Create your `sql` database instance
```js
// db.js
-const postgres = require('postgres')
+import postgres from 'postgres'
-const sql = postgres({ ...options }) // will default to the same as psql
+const sql = postgres({ /* options */ }) // will use psql environment variables
-module.exports = sql
+export default sql
```
+Simply import for use elsewhere
```js
-// other.js
-const sql = require('./db.js')
+// users.js
+import sql from './db.js'
+
+async function getUsersOver(age) {
+ const users = await sql`
+ select
+ name,
+ age
+ from users
+ where age > ${ age }
+ `
+ // users = Result [{ name: "Walter", age: 80 }, { name: 'Murray', age: 68 }, ...]
+ return users
+}
-const users = await sql`
- select name, age from users
-`
-// users: [{ name: 'Murray', age: 68 }, { name: 'Walter', age: 78 }]
+
+async function insertUser({ name, age }) {
+ const users = sql`
+ insert into users
+ (name, age)
+ values
+ (${ name }, ${ age })
+ returning name, age
+ `
+ // users = Result [{ name: "Murray", age: 68 }]
+ return users
+}
```
-## Connection options `postgres([url], [options])`
+## Table of Contents
+
+* [Connection](#connection)
+* [Queries](#queries)
+* [Building queries](#building-queries)
+* [Advanced query methods](#advanced-query-methods)
+* [Transactions](#transactions)
+* [Listen & notify](#listen--notify)
+* [Realtime subscribe](#realtime-subscribe)
+* [Numbers, bigint, numeric](#numbers-bigint-numeric)
+* [Connection details](#connection-details)
+* [Custom Types](#custom-types)
+* [Teardown / Cleanup](#teardown--cleanup)
+* [Error handling](#error-handling)
+* [TypeScript support](#typescript-support)
+
-You can use either a `postgres://` url connection string or the options to define your database connection properties. Options in the object will override any present in the url.
+## Connection
+
+### `postgres([url], [options])`
+
+You can use either a `postgres://` url connection string or the options to define your database connection properties. Options in the object will override any present in the url. Options will fall back to the same environment variables as psql.
```js
const sql = postgres('postgres://username:password@host:port/database', {
host : '', // Postgres ip address[s] or domain name[s]
port : 5432, // Postgres server port[s]
- path : '', // unix socket path (usually '/tmp')
database : '', // Name of database to connect to
username : '', // Username of database user
password : '', // Password of database user
- ssl : false, // true, prefer, require, tls.connect options
- max : 10, // Max number of connections
- idle_timeout : 0, // Idle connection timeout in seconds
- connect_timeout : 30, // Connect timeout in seconds
- no_prepare : false, // No automatic creation of prepared statements
- types : [], // Array of custom types, see more below
- onnotice : fn, // Defaults to console.log
- onparameter : fn, // (key, value) when server param change
- debug : fn, // Is called with (connection, query, params)
- transform : {
- column : fn, // Transforms incoming column names
- value : fn, // Transforms incoming row values
- row : fn // Transforms entire rows
- },
- connection : {
- application_name : 'postgres.js', // Default application_name
- ... // Other connection parameters
- },
- target_session_attrs : null, // Use 'read-write' with multiple hosts to
- // ensure only connecting to primary
- fetch_array_types : true, // Disable automatically fetching array types
- // on initial connection.
+ ...and more
})
```
-### SSL
-More info for the `ssl` option can be found in the [Node.js docs for tls connect options](https://nodejs.org/dist/latest-v10.x/docs/api/tls.html#tls_new_tls_tlssocket_socket_options).
-
-Although it is [vulnerable to MITM attacks](https://security.stackexchange.com/a/229297/174913), a common configuration for the `ssl` option for some cloud providers like Heroku is to set `rejectUnauthorized` to `false` (if `NODE_ENV` is `production`):
-
-```js
-const sql =
- process.env.NODE_ENV === 'production'
- ? // "Unless you're using a Private or Shield Heroku Postgres database, Heroku Postgres does not currently support verifiable certificates"
- // https://help.heroku.com/3DELT3RK/why-can-t-my-third-party-utility-connect-to-heroku-postgres-with-ssl
- postgres({ ssl: { rejectUnauthorized: false } })
- : postgres();
-```
-
-### Multi host connections - High Availability (HA)
-
-Connection uri strings with multiple hosts works like in [`psql multiple host uris`](https://www.postgresql.org/docs/13/libpq-connect.html#LIBPQ-MULTIPLE-HOSTS)
-
-Connecting to the specified hosts/ports will be tried in order, and on a successfull connection retries will be reset. This ensures that hosts can come up and down seamless to your application.
-
-If you specify `target_session_attrs: 'read-write'` or `PGTARGETSESSIONATTRS=read-write` Postgres.js will only connect to a writeable host allowing for zero down time failovers.
-
-### Auto fetching of array types
-
-When Postgres.js first connects to the database it automatically fetches array type information.
-
-If you have revoked access to `pg_catalog` this feature will no longer work and will need to be disabled.
-
-You can disable fetching array types by setting `fetch_array_types` to `false` when creating an instance.
+More options can be found in the [Connection details section](#connection-details).
-### Environment Variables for Options
+## Queries
-It is also possible to connect to the database without a connection string or any options. Postgres.js will fall back to the common environment variables used by `psql` as in the table below:
+### ```await sql`...` -> Result[]```
-```js
-const sql = postgres()
-```
+Postgres.js utilizes [Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) to process query parameters **before** interpolation. Using tagged template literals benefits developers by:
-| Option | Environment Variables |
-| ----------------- | ------------------------ |
-| `host` | `PGHOST` |
-| `port` | `PGPORT` |
-| `database` | `PGDATABASE` |
-| `username` | `PGUSERNAME` or `PGUSER` |
-| `password` | `PGPASSWORD` |
-| `idle_timeout` | `PGIDLE_TIMEOUT` |
-| `connect_timeout` | `PGCONNECT_TIMEOUT` |
+1. **Enforcing** safe query generation
+2. Giving the ` sql`` ` function powerful [utility](#dynamic-inserts) and [query building](#building-queries) features.
-## Query ```sql` ` -> Promise```
+Any generic value will be serialized according to an inferred type, and replaced by a PostgreSQL protocol placeholder `$1, $2, ...`. The parameters are then sent separately to the database which handles escaping & casting.
-A query will always return a `Promise` which resolves to a results array `[...]{ count, command, columns }`. Destructuring is great to immediately access the first element.
+All queries will return a `Result` array, with objects mapping column names to each row.
```js
-
-const [new_user] = await sql`
+const xs = await sql`
insert into users (
name, age
) values (
@@ -140,16 +121,18 @@ const [new_user] = await sql`
returning *
`
-// new_user = { user_id: 1, name: 'Murray', age: 68 }
+// xs = [{ user_id: 1, name: 'Murray', age: 68 }]
```
-#### Query parameters
+> Please note that queries are first executed when `awaited` – or manually by using `.execute()`.
-Parameters are automatically inferred and handled by Postgres so that SQL injection isn't possible. No special handling is necessary, simply use JS tagged template literals as usual.
+### Query parameters
-```js
+Parameters are automatically extracted and handled by the database so that SQL injection isn't possible. No special handling is necessary, simply use tagged template literals as usual. **Dynamic queries and query building can be seen in the [next section]()**. // todo
-let search = 'Mur'
+```js
+const name = 'Mur'
+ , age = 60
const users = await sql`
select
@@ -157,373 +140,321 @@ const users = await sql`
age
from users
where
- name like ${ search + '%' }
+ name like ${ name + '%' }
+ and age > ${ age }
`
-
// users = [{ name: 'Murray', age: 68 }]
-
```
-> Be careful with quotation marks here. Because Postgres infers the types, you don't need to wrap your interpolated parameters in quotes like `'${name}'`. In fact, this will cause an error because the tagged template replaces `${name}` with `$1` in the query string, leaving Postgres to do the interpolation. If you wrap that in a string, Postgres will see `'$1'` and interpret it as a string as opposed to a parameter.
+> Be careful with quotation marks here. Because Postgres infers column types, you do not need to wrap your interpolated parameters in quotes like `'${name}'`. This will cause an error because the tagged template replaces `${name}` with `$1` in the query string, leaving Postgres to do the interpolation. If you wrap that in a string, Postgres will see `'$1'` and interpret it as a string as opposed to a parameter.
-#### Arrays
-Arrays will be handled by replacement parameters too, so `where in` queries are also simple.
+### Dynamic column selection
```js
+const columns = ['name', 'age']
-const users = await sql`
+sql`
select
- *
+ ${ sql(columns) }
from users
- where age in (${ [68, 75, 23] })
`
+// Which results in:
+select "name", "age" from users
```
-### TypeScript support
-
-`postgres` has TypeScript support. You can pass a row list type for your queries in this way:
-```ts
-interface User {
- id: number
- name: string
-}
-
-const users = await sql`SELECT * FROM users`
-users[0].id // ok => number
-users[1].name // ok => string
-users[0].invalid // fails: `invalid` does not exists on `User`
-```
-
-However, be sure to check the array length to avoid accessing properties of `undefined` rows:
-```ts
-const users = await sql`SELECT * FROM users WHERE id = ${id}`
-if (!users.length)
- throw new Error('Not found')
-return users[0]
-```
-
-You can also prefer destructuring when you only care about a fixed number of rows.
-In this case, we recommand you to prefer using tuples to handle `undefined` properly:
-```ts
-const [user]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}`
-if (!user) // => User | undefined
- throw new Error('Not found')
-return user // => User
-
-// NOTE:
-const [first, second]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}` // fails: `second` does not exist on `[User?]`
-// vs
-const [first, second] = await sql<[User?]>`SELECT * FROM users WHERE id = ${id}` // ok but should fail
-```
-
-All the public API is typed. Also, TypeScript support is still in beta. Feel free to open an issue if you have trouble with types.
-
-## Stream ```sql` `.stream(fn) -> Promise```
-
-If you want to handle rows returned by a query one by one, you can use `.stream` which returns a promise that resolves once there are no more rows.
-```js
-
-await sql`
- select created_at, name from events
-`.stream(row => {
- // row = { created_at: '2019-11-22T14:22:00Z', name: 'connected' }
-})
-
-// No more rows
-
-```
-
-## Cursor ```sql` `.cursor([rows = 1], fn) -> Promise```
-
-Use cursors if you need to throttle the amount of rows being returned from a query. New results won't be requested until the promise / async callback function has resolved.
-
-```js
-
-await sql`
- select * from generate_series(1,4) as x
-`.cursor(async row => {
- // row = { x: 1 }
- await http.request('https://example.com/wat', { row })
-})
-
-// No more rows
-
-```
-
-A single row will be returned by default, but you can also request batches by setting the number of rows desired in each batch as the first argument. That is usefull if you can do work with the rows in parallel like in this example:
-
-```js
-
-await sql`
- select * from generate_series(1,1000) as x
-`.cursor(10, async rows => {
- // rows = [{ x: 1 }, { x: 2 }, ... ]
- await Promise.all(rows.map(row =>
- http.request('https://example.com/wat', { row })
- ))
-})
-
-```
-
-If an error is thrown inside the callback function no more rows will be requested and the promise will reject with the thrown error.
-
-You can also stop receiving any more rows early by returning an end token `sql.END` from the callback function.
+### Dynamic inserts
```js
+const user = {
+ name: 'Murray',
+ age: 68
+}
-await sql`
- select * from generate_series(1,1000) as x
-`.cursor(row => {
- return Math.random() > 0.9 && sql.END
-})
+sql`
+ insert into users ${
+ sql(user, 'name', 'age')
+ }
+`
+// Which results in:
+insert into users ("name", "age") values ($1, $2)
```
-## Raw ```sql``.raw()```
-
-Using `.raw()` will return rows as an array with `Buffer` values for each column, instead of objects.
+**You can omit column names and simply execute `sql(user)` to get all the fields from the object as columns**. Be careful not to allow users to supply columns that you do not want to be inserted.
-This can be useful to receive identical named columns, or for specific performance / transformation reasons. The column definitions are still included on the result array with access to parsers for each column.
-
-## Listen and notify
-
-When you call listen, a dedicated connection will automatically be made to ensure that you receive notifications in real time. This connection will be used for any further calls to listen. Listen returns a promise which resolves once the `LISTEN` query to Postgres completes, or if there is already a listener active.
+#### Multiple inserts in one query
+If you need to insert multiple rows at the same time it's also much faster to do it with a single `insert`. Simply pass an array of objects to `sql()`.
```js
+const users = [{
+ name: 'Murray',
+ age: 68,
+ garbage: 'ignore'
+},
+{
+ name: 'Walter',
+ age: 80
+}]
-await sql.listen('news', payload => {
- const json = JSON.parse(payload)
- console.log(json.this) // logs 'is'
-})
-
-```
+sql`insert into users ${ sql(users, 'name', 'age') }`
-Notify can be done as usual in sql, or by using the `sql.notify` method.
-```js
+// Is translated to:
+insert into users ("name", "age") values ($1, $2), ($3, $4)
-sql.notify('news', JSON.stringify({ no: 'this', is: 'news' }))
+// Here you can also omit column names which will use object keys as columns
+sql`insert into users ${ sql(users) }`
+// Which results in:
+insert into users ("name", "age") values ($1, $2), ($3, $4)
```
-## Tagged template function ``` sql`` ```
-[Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) are not just ordinary template literal strings. They allow the function to handle any parameters within before interpolation. This means that they can be used to enforce a safe way of writing queries, which is what Postgres.js does. Any generic value will be serialized according to an inferred type, and replaced by a PostgreSQL protocol placeholders `$1, $2, ...` and then sent to the database as a parameter to let it handle any need for escaping / casting.
-
-This also means you cannot write dynamic queries or concat queries together by simple string manipulation. To enable dynamic queries in a safe way, the `sql` function doubles as a regular function which escapes any value properly. It also includes overloads for common cases of inserting, selecting, updating and querying.
-
-## Dynamic query helpers - `sql()` inside tagged template
-
-Postgres.js has a safe, ergonomic way to aid you in writing queries. This makes it easier to write dynamic `insert`, `select` and `update` queries, and pass `where` parameters.
-
-#### Insert
-
+### Dynamic columns in updates
+This is also useful for update queries
```js
-
const user = {
+ id: 1,
name: 'Murray',
age: 68
}
sql`
- insert into users ${
+ update users set ${
sql(user, 'name', 'age')
}
+ where user_id = ${ user.id }
`
-// Is translated into this query:
-insert into users (name, age) values ($1, $2)
+// Which results in:
+update users set "name" = $1, "age" = $2 where user_id = $3
+```
+### Dynamic values and `where in`
+Value lists can also be created dynamically, making `where in` queries simple too.
+```js
+const users = await sql`
+ select
+ *
+ from users
+ where age in ${ sql([68, 75, 23]) }
+`
```
-You can leave out the column names and simply do `sql(user)` if you want to get all fields from the object as columns, but be careful not to allow users to supply columns you don't want.
+or
+```js
+const [{ a, b, c }] => await sql`
+ select
+ *
+ from (values ${ sql(['a', 'b', 'c']) }) as x(a, b, c)
+`
+```
-#### Multiple inserts in one query
-If you need to insert multiple rows at the same time it's also much faster to do it with a single `insert`. Simply pass an array of objects to `sql()`.
+## Building queries
+
+Postgres.js features a simple dynamic query builder by conditionally appending/omitting query fragments.
+It works by nesting ` sql`` ` fragments within other ` sql`` ` calls or fragments. This allows you to build dynamic queries safely without risking sql injections through usual string concatenation.
+### Partial queries
```js
+const olderThan = x => sql`and age > ${ x }`
-const users = [{
- name: 'Murray',
- age: 68,
- garbage: 'ignore'
-}, {
- name: 'Walter',
- age: 78
-}]
+const filterAge = true
sql`
- insert into users ${
- sql(users, 'name', 'age')
+ select
+ *
+ from users
+ where name is not null ${
+ filterAge
+ ? olderThan(50)
+ : sql``
}
`
+// Which results in:
+select * from users where name is not null
+// Or
+select * from users where name is not null and age > 50
```
-#### Update
-
-This is also useful for update queries
+### Dynamic filters
```js
-
-const user = {
- id: 1,
- name: 'Muray'
-}
-
sql`
- update users set ${
- sql(user, 'name')
- } where
- id = ${ user.id }
+ select
+ *
+ from users ${
+ id
+ ? sql`where user_id = ${ id }`
+ : sql``
+ }
`
-// Is translated into this query:
-update users set name = $1 where id = $2
+// Which results in:
+select * from users
+// Or
+select * from users where user_id = $1
```
-#### Select
-
+### SQL functions
+Using keywords or calling functions dynamically is also possible by using ``` sql`` ``` fragments.
```js
-
-const columns = ['name', 'age']
+const date = null
sql`
- select ${
- sql(columns)
- } from users
+ update users set updated_at = ${ date || sql`now()` }
`
-// Is translated into this query:
-select name, age from users
+// Which results in:
+update users set updated_at = now()
```
-#### Dynamic table name
-
+### Table names
+Dynamic identifiers like table names and column names is also supported like so:
```js
-
const table = 'users'
+ , column = 'id'
sql`
- select id from ${sql(table)}
+ select ${ sql(column) } from ${ sql(table) }
`
-// Is translated into this query:
-select id from users
+// Which results in:
+select "id" from "users"
```
-#### Arrays `sql.array(Array)`
+## Advanced query methods
+
+### .cursor()
+
+#### ```await sql``.cursor([rows = 1], [fn])```
-PostgreSQL has a native array type which is similar to js arrays, but only allows the same type and shape for nested items. This method automatically infers the item type and serializes js arrays into PostgreSQL arrays.
+Use cursors if you need to throttle the amount of rows being returned from a query. You can use a cursor either as an [async iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of) or with a callback function. For a callback function new results won't be requested until the promise / async callback function has resolved.
+##### callback function
```js
+await sql`
+ select
+ *
+ from generate_series(1,4) as x
+`.cursor(async([row]) => {
+ // row = { x: 1 }
+ await http.request('https://example.com/wat', { row })
+}
+```
-const types = sql`
- insert into types (
- integers,
- strings,
- dates,
- buffers,
- multi
- ) values (
- ${ sql.array([1,2,3,4,5]) },
- ${ sql.array(['Hello', 'Postgres']) },
- ${ sql.array([new Date(), new Date(), new Date()]) },
- ${ sql.array([Buffer.from('Hello'), Buffer.from('Postgres')]) },
- ${ sql.array([[[1,2],[3,4]][[5,6],[7,8]]]) },
- )
-`
+##### for await...of
+```js
+// for await...of
+const cursor = sql`select * from generate_series(1,4) as x`.cursor()
+for await (const [row] of cursor) {
+ // row = { x: 1 }
+ await http.request('https://example.com/wat', { row })
+}
```
-#### JSON `sql.json(object)`
+A single row will be returned by default, but you can also request batches by setting the number of rows desired in each batch as the first argument to `.cursor`:
+```js
+await sql`
+ select
+ *
+ from generate_series(1,1000) as x
+`.cursor(10, async rows => {
+ // rows = [{ x: 1 }, { x: 2 }, ... ]
+ await Promise.all(rows.map(row =>
+ http.request('https://example.com/wat', { row })
+ ))
+}
+```
+
+If an error is thrown inside the callback function no more rows will be requested and the outer promise will reject with the thrown error.
+
+You can close the cursor early either by calling `break` in the `for await...of` loop, or by returning the token `sql.CLOSE` from the callback function.
```js
+await sql`
+ select * from generate_series(1,1000) as x
+`.cursor(row => {
+ return Math.random() > 0.9 && sql.CLOSE // or sql.END
+})
+```
-const body = { hello: 'postgres' }
+### .forEach()
-const [{ json }] = await sql`
- insert into json (
- body
- ) values (
- ${ sql.json(body) }
- )
- returning body
-`
+#### ```await sql``.forEach(fn)```
+
+If you want to handle rows returned by a query one by one, you can use `.forEach` which returns a promise that resolves once there are no more rows.
+```js
+await sql`
+ select created_at, name from events
+`.forEach(row => {
+ // row = { created_at: '2019-11-22T14:22:00Z', name: 'connected' }
+})
-// json = { hello: 'postgres' }
+// No more rows
```
-## File query `sql.file(path, [args], [options]) -> Promise`
-
-Using an `.sql` file for a query. The contents will be cached in memory so that the file is only read once.
-
-```js
+### describe
+#### ```await sql``.describe([rows = 1], fn) -> Result[]```
-sql.file(path.join(__dirname, 'query.sql'), [], {
- cache: true // Default true - disable for single shot queries or memory reasons
-})
+Rather than executing a given query, `.describe` will return information utilized in the query process. This information can include the query identifier, column types, etc.
-```
+This is useful for debugging and analyzing your Postgres queries. Furthermore, **`.describe` will give you access to the final generated query string that would be executed.**
-## Subscribe / Realtime
+### Raw
+#### ```sql``.raw()```
-Postgres.js implements the logical replication protocol of PostgreSQL to support subscription to realtime updates of `insert`, `update` and `delete` operations.
+Using `.raw()` will return rows as an array with `Buffer` values for each column, instead of objects.
-> **NOTE** To make this work you must [create the proper publications in your database](https://www.postgresql.org/docs/current/sql-createpublication.html), enable logical replication by setting `wal_level = logical` in `postgresql.conf` and connect using either a replication or superuser.
+This can be useful to receive identically named columns, or for specific performance/transformation reasons. The column definitions are still included on the result array, plus access to parsers for each column.
-### Quick start
+### File
+#### `await sql.file(path, [args], [options]) -> Result[]`
-#### Create a publication (eg. in migration)
-```sql
-CREATE PUBLICATION alltables FOR ALL TABLES
-```
+Using a `.sql` file for a query is also supported with optional parameters to use if the file includes `$1, $2, etc`
-#### Subscribe to updates
```js
-const sql = postgres({ publications: 'alltables' })
-
-const { unsubscribe } = await sql.subscribe('insert:events', row =>
- // tell about new event row over eg. websockets or do something else
-)
+const result = await sql.file('query.sql', ['Murray', 68])
```
-### Subscribe pattern
-
-You can subscribe to specific operations, tables or even rows with primary keys.
+### Canceling Queries in Progress
-### `operation` `:` `schema` `.` `table` `=` `primary_key`
+Postgres.js supports, [canceling queries in progress](https://www.postgresql.org/docs/7.1/protocol-protocol.html#AEN39000). It works by opening a new connection with a protocol level startup message to cancel the current query running on a specific connection. That means there is no guarantee that the query will be canceled, and due to the possible race conditions it might even result in canceling another query. This is fine for long running queries, but in the case of high load and fast queries it might be better to simply ignore results instead of canceling.
-**`operation`** is one of ``` * | insert | update | delete ``` and defaults to `*`
+```js
+const query = sql`select pg_sleep 100`.execute()
+setTimeout(() => query.cancel(), 100)
+const result = await query
+```
-**`schema`** defaults to `public.`
+### Unsafe raw string queries
-**`table`** is a specific table name and defaults to `*`
+
+Advanced unsafe use cases
-**`primary_key`** can be used to only subscribe to specific rows
+### `await sql.unsafe(query, [args], [options]) -> Result[]`
-#### Examples
+If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to SQL injection if you're not careful.
```js
-sql.subscribe('*', () => /* everything */ )
-sql.subscribe('insert', () => /* all inserts */ )
-sql.subscribe('*:users', () => /* all operations on the public.users table */ )
-sql.subscribe('delete:users', () => /* all deletes on the public.users table */ )
-sql.subscribe('update:users=1', () => /* all updates on the users row with a primary key = 1 */ )
+sql.unsafe('select ' + danger + ' from users where id = ' + dragons)
```
+
## Transactions
+#### BEGIN / COMMIT `await sql.begin([options = ''], fn) -> fn()`
-#### BEGIN / COMMIT `sql.begin(fn) -> Promise`
+Use `sql.begin` to start a new transaction. Postgres.js will reserve a connection for the transaction and supply a scoped `sql` instance for all transaction uses in the callback function. `sql.begin` will resolve with the returned value from the callback function.
-Calling begin with a function will return a Promise which resolves with the returned value from the function. The function provides a single argument which is `sql` with a context of the newly created transaction. `BEGIN` is automatically called, and if the Promise fails `ROLLBACK` will be called. If it succeeds `COMMIT` will be called.
+`BEGIN` is automatically sent with the optional options, and if anything fails `ROLLBACK` will be called so the connection can be released and execution can continue.
```js
-
const [user, account] = await sql.begin(async sql => {
const [user] = await sql`
insert into users (
name
) values (
- 'Alice'
+ 'Murray'
)
`
@@ -537,24 +468,31 @@ const [user, account] = await sql.begin(async sql => {
return [user, account]
})
-
```
-
-#### SAVEPOINT `sql.savepoint([name], fn) -> Promise`
+It's also possible to pipeline the requests in a transaction if needed by returning an array with queries from the callback function like this:
```js
+const result = await sql.begin(sql => [
+ sql`update ...`,
+ sql`update ...`,
+ sql`insert ...`
+])
+```
-sql.begin(async sql => {
+#### SAVEPOINT `await sql.savepoint([name], fn) -> fn()`
+
+```js
+sql.begin('read write', async sql => {
const [user] = await sql`
insert into users (
name
) values (
- 'Alice'
+ 'Murray'
)
`
- const [account] = (await sql.savepoint(sql =>
+ const [account] = (await sql.savepoint(sql =>
sql`
insert into accounts (
user_id
@@ -574,78 +512,79 @@ sql.begin(async sql => {
.catch(() => {
// not so good - ROLLBACK was called
})
-
```
Do note that you can often achieve the same result using [`WITH` queries (Common Table Expressions)](https://www.postgresql.org/docs/current/queries-with.html) instead of using transactions.
+## Listen & notify
-## Custom Types
+When you call `.listen`, a dedicated connection will be created to ensure that you receive notifications in real-time. This connection will be used for any further calls to `.listen`.
-You can add ergonomic support for custom types, or simply pass an object with a `{ type, value }` signature that contains the Postgres `oid` for the type and the correctly serialized value. _(`oid` values for types can be found in the `pg_catalog.pg_types` table.)_
+`.listen` returns a promise which resolves once the `LISTEN` query to Postgres completes, or if there is already a listener active.
-Adding Query helpers is the recommended approach which can be done like this:
+```js
+await sql.listen('news', payload => {
+ const json = JSON.parse(payload)
+ console.log(json.this) // logs 'is'
+})
+```
+Notify can be done as usual in SQL, or by using the `sql.notify` method.
```js
+sql.notify('news', JSON.stringify({ no: 'this', is: 'news' }))
+```
-const sql = postgres({
- types: {
- rect: {
- // The pg_types oid to pass to the db along with the serialized value.
- to : 1337,
+## Realtime subscribe
- // An array of pg_types oids to handle when parsing values coming from the db.
- from : [1337],
+Postgres.js implements the logical replication protocol of PostgreSQL to support subscription to real-time updates of `insert`, `update` and `delete` operations.
- //Function that transform values before sending them to the db.
- serialize : ({ x, y, width, height }) => [x, y, width, height],
+> **NOTE** To make this work you must [create the proper publications in your database](https://www.postgresql.org/docs/current/sql-createpublication.html), enable logical replication by setting `wal_level = logical` in `postgresql.conf` and connect using either a replication or superuser.
- // Function that transforms values coming from the db.
- parse : ([x, y, width, height]) => { x, y, width, height }
- }
- }
-})
+### Quick start
-// Now you can use sql.types.rect() as specified above
-const [custom] = sql`
- insert into rectangles (
- name,
- rect
- ) values (
- 'wat',
- ${ sql.types.rect({ x: 13, y: 37, width: 42, height: 80 }) }
- )
- returning *
-`
+#### Create a publication (eg. in migration)
+```sql
+CREATE PUBLICATION alltables FOR ALL TABLES
+```
-// custom = { name: 'wat', rect: { x: 13, y: 37, width: 42, height: 80 } }
+#### Subscribe to updates
+```js
+const sql = postgres({ publications: 'alltables' })
+const { unsubscribe } = await sql.subscribe('insert:events', (row, { command, relation, key, old }) =>
+ // tell about new event row over eg. websockets or do something else
+)
```
-## Teardown / Cleanup
+### Subscribe pattern
-To ensure proper teardown and cleanup on server restarts use `sql.end({ timeout: 0 })` before `process.exit()`.
+You can subscribe to specific operations, tables, or even rows with primary keys.
-Calling `sql.end()` will reject new queries and return a Promise which resolves when all queries are finished and the underlying connections are closed. If a timeout is provided any pending queries will be rejected once the timeout is reached and the connections will be destroyed.
+#### `operation` `:` `schema` `.` `table` `=` `primary_key`
-#### Sample shutdown using [Prexit](http://npmjs.com/prexit)
+**`operation`** is one of ``` * | insert | update | delete ``` and defaults to `*`
-```js
+**`schema`** defaults to `public`
-import prexit from 'prexit'
+**`table`** is a specific table name and defaults to `*`
-prexit(async () => {
- await sql.end({ timeout: 5 })
- await new Promise(r => server.close(r))
-})
+**`primary_key`** can be used to only subscribe to specific rows
+### Examples
+
+```js
+sql.subscribe('*', () => /* everything */ )
+sql.subscribe('insert', () => /* all inserts */ )
+sql.subscribe('*:users', () => /* all operations on the public.users table */ )
+sql.subscribe('delete:users', () => /* all deletes on the public.users table */ )
+sql.subscribe('update:users=1', () => /* all updates on the users row with a primary key = 1 */ )
```
## Numbers, bigint, numeric
`Number` in javascript is only able to represent 253-1 safely which means that types in PostgreSQLs like `bigint` and `numeric` won't fit into `Number`.
-Since Node.js v10.4 we can use [`BigInt`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) to match the PostgreSQL type `bigint` which is returned for eg. `count(*)`. Unfortunately it doesn't work with `JSON.stringify` out of the box, so Postgres.js will return it as a string.
+Since Node.js v10.4 we can use [`BigInt`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) to match the PostgreSQL type `bigint` which is returned for eg. `count(*)`. Unfortunately, it doesn't work with `JSON.stringify` out of the box, so Postgres.js will return it as a string.
If you want to use `BigInt` you can add this custom type:
@@ -657,13 +596,78 @@ const sql = postgres({
})
```
-There is currently no way to handle `numeric / decimal` in a native way in Javascript, so these and similar will be returned as `string`. You can also handle types like these using [custom types](#custom-types) if you want to.
+There is currently no guaranteed way to handle `numeric` / `decimal` types in native Javascript. **These [and similar] types will be returned as a `string`**. The best way in this case is to use [custom types](#custom-types).
+
+
+## Connection details
+
+### All Postgres options
+
+```js
+const sql = postgres('postgres://username:password@host:port/database', {
+ host : '', // Postgres ip address[es] or domain name[s]
+ port : 5432, // Postgres server port[s]
+ path : '', // unix socket path (usually '/tmp')
+ database : '', // Name of database to connect to
+ username : '', // Username of database user
+ password : '', // Password of database user
+ ssl : false, // true, prefer, require, tls.connect options
+ max : 10, // Max number of connections
+ max_lifetime : null, // Max lifetime in seconds (more info below)
+ idle_timeout : 0, // Idle connection timeout in seconds
+ connect_timeout : 30, // Connect timeout in seconds
+ no_prepare : false, // No automatic creation of prepared statements
+ types : [], // Array of custom types, see more below
+ onnotice : fn, // Defaults to console.log
+ onparameter : fn, // (key, value) when server param change
+ debug : fn, // Is called with (connection, query, params)
+ transform : {
+ column : fn, // Transforms incoming column names
+ value : fn, // Transforms incoming row values
+ row : fn // Transforms entire rows
+ },
+ connection : {
+ application_name : 'postgres.js', // Default application_name
+ ... // Other connection parameters
+ },
+ target_session_attrs : null, // Use 'read-write' with multiple hosts to
+ // ensure only connecting to primary
+ fetch_types : true, // Automatically fetches types on connect
+ // on initial connection.
+})
+```
+
+Note that `max_lifetime = 60 * (30 + Math.random() * 30)` by default. This resolves to an interval between 45 and 90 minutes to optimize for the benefits of prepared statements **and** working nicely with Linux's OOM killer.
+
+### SSL
+
+Although [vulnerable to MITM attacks](https://security.stackexchange.com/a/229297/174913), a common configuration for the `ssl` option for some cloud providers is to set `rejectUnauthorized` to `false` (if `NODE_ENV` is `production`):
+
+```js
+const sql =
+ process.env.NODE_ENV === 'production'
+ ? // "Unless you're using a Private or Shield Heroku Postgres database, Heroku Postgres does not currently support verifiable certificates"
+ // https://help.heroku.com/3DELT3RK/why-can-t-my-third-party-utility-connect-to-heroku-postgres-with-ssl
+ postgres({ ssl: { rejectUnauthorized: false } })
+ : postgres()
+```
+
+For more information regarding `ssl` with `postgres`, check out the [Node.js documentation for tls](https://nodejs.org/dist/latest-v16.x/docs/api/tls.html#new-tlstlssocketsocket-options).
+
+
+### Multi-host connections - High Availability (HA)
+
+Multiple connection strings can be passed to `postgres()` in the form of `postgres('postgres://localhost:5432,localhost:5433', ...)`. This works the same as native the `psql` command. Read more at [multiple host URIs](https://www.postgresql.org/docs/13/libpq-connect.html#LIBPQ-MULTIPLE-HOSTS).
+
+Connections will be attempted in order of the specified hosts/ports. On a successful connection, all retries will be reset. This ensures that hosts can come up and down seamlessly.
+
+If you specify `target_session_attrs: 'primary'` or `PGTARGETSESSIONATTRS=primary` Postgres.js will only connect to the primary host, allowing for zero downtime failovers.
-## The Connection Pool
+### The Connection Pool
-Connections are created lazily once a query is created. This means that simply doing const `sql = postgres(...)` won't have any effect other than instantiating a new `sql` instance.
+Connections are created lazily once a query is created. This means that simply doing const `sql = postgres(...)` won't have any effect other than instantiating a new `sql` instance.
-> No connection will be made until a query is made.
+> No connection will be made until a query is made.
This means that we get a much simpler story for error handling and reconnections. Queries will be sent over the wire immediately on the next available connection in the pool. Connections are automatically taken out of the pool if you start a transaction using `sql.begin()`, and automatically returned to the pool once your transaction is done.
@@ -671,51 +675,130 @@ Any query which was already sent over the wire will be rejected if the connectio
There are no guarantees about queries executing in order unless using a transaction with `sql.begin()` or setting `max: 1`. Of course doing a series of queries, one awaiting the other will work as expected, but that's just due to the nature of js async/promise handling, so it's not necessary for this library to be concerned with ordering.
-### Idle timeout
+Since this library automatically creates prepared statements, it also has a default max lifetime for connections to prevent memory bloat on the database itself. This is a random interval for each connection between 45 and 90 minutes. This allows multiple connections to come up and down seamlessly without user interference.
+
+### Connection timeout
By default, connections will not close until `.end()` is called. However, it may be useful to have them close automatically when:
-- there is no activity for some period of time
-- if using Postgres.js in Lamdas / Serverless environments
-- if using Postgres.js with a database service that automatically closes the connection after some time (see [`ECONNRESET` issue](https://github.com/porsager/postgres/issues/179))
+- re-instantiating multiple ` sql`` ` instances
+- using Postgres.js in a Serverless environment (Lambda, etc.)
+- using Postgres.js with a database service that automatically closes connections after some time (see [`ECONNRESET` issue](https://github.com/porsager/postgres/issues/179))
-This can be done using the `idle_timeout` option to specify the amount of seconds to wait before automatically closing an idle connection.
+This can be done using the `idle_timeout` or `max_lifetime` options. These configuration options specify the number of seconds to wait before automatically closing an idle connection and the maximum time a connection can exist, respectively.
-For example, to close idle connections after 2 seconds:
+For example, to close a connection that has either been idle for 20 seconds or existed for more than 30 minutes:
```js
const sql = postgres({
- idle_timeout: 2
+ idle_timeout: 20,
+ max_lifetime: 60 * 30
})
```
-## Prepared statements
+### Auto fetching of array types
+
+Postgres.js will automatically fetch table/array-type information when it first connects to a database.
+
+If you have revoked access to `pg_catalog` this feature will no longer work and will need to be disabled.
+
+You can disable this feature by setting `fetch_types` to `false`.
+
+### Environmental variables
+
+It is also possible to connect to the database without a connection string or any options. Postgres.js will fall back to the common environment variables used by `psql` as in the table below:
+
+```js
+const sql = postgres()
+```
+
+| Option | Environment Variables |
+| ----------------- | ------------------------ |
+| `host` | `PGHOST` |
+| `port` | `PGPORT` |
+| `database` | `PGDATABASE` |
+| `username` | `PGUSERNAME` or `PGUSER` |
+| `password` | `PGPASSWORD` |
+| `idle_timeout` | `PGIDLE_TIMEOUT` |
+| `connect_timeout` | `PGCONNECT_TIMEOUT` |
+
+### Prepared statements
Prepared statements will automatically be created for any queries where it can be inferred that the query is static. This can be disabled by using the `no_prepare` option. For instance — this is useful when [using PGBouncer in `transaction mode`](https://github.com/porsager/postgres/issues/93).
-sql.unsafe
- Advanced unsafe use cases
+## Custom Types
-### Unsafe queries `sql.unsafe(query, [args], [options]) -> promise`
+You can add ergonomic support for custom types, or simply use `sql.typed(value, type)` inline, where type is the PostgreSQL `oid` for the type and the correctly serialized string. _(`oid` values for types can be found in the `pg_catalog.pg_types` table.)_
-If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to sql injection if you're not careful.
+Adding Query helpers is the cleanest approach which can be done like this:
```js
+const sql = postgres({
+ types: {
+ rect: {
+ // The pg_types oid to pass to the db along with the serialized value.
+ to : 1337,
-sql.unsafe('select ' + danger + ' from users where id = ' + dragons)
+ // An array of pg_types oids to handle when parsing values coming from the db.
+ from : [1337],
+
+ //Function that transform values before sending them to the db.
+ serialize : ({ x, y, width, height }) => [x, y, width, height],
+
+ // Function that transforms values coming from the db.
+ parse : ([x, y, width, height]) => { x, y, width, height }
+ }
+ }
+})
+
+// Now you can use sql.typed.rect() as specified above
+const [custom] = sql`
+ insert into rectangles (
+ name,
+ rect
+ ) values (
+ 'wat',
+ ${ sql.typed.rect({ x: 13, y: 37, width: 42, height: 80 }) }
+ )
+ returning *
+`
+
+// custom = { name: 'wat', rect: { x: 13, y: 37, width: 42, height: 80 } }
```
-
-## Errors
+## Teardown / Cleanup
+
+To ensure proper teardown and cleanup on server restarts use `await sql.end()` before `process.exit()`.
-Errors are all thrown to related queries and never globally. Errors coming from PostgreSQL itself are always in the [native Postgres format](https://www.postgresql.org/docs/current/errcodes-appendix.html), and the same goes for any [Node.js errors](https://nodejs.org/api/errors.html#errors_common_system_errors) eg. coming from the underlying connection.
+Calling `sql.end()` will reject new queries and return a Promise which resolves when all queries are finished and the underlying connections are closed. If a `{ timeout }` option is provided any pending queries will be rejected once the timeout (in seconds) is reached and the connections will be destroyed.
+
+#### Sample shutdown using [Prexit](https://github.com/porsager/prexit)
+
+```js
+import prexit from 'prexit'
+
+prexit(async () => {
+ await sql.end({ timeout: 5 })
+ await new Promise(r => server.close(r))
+})
+```
+
+## Error handling
+
+Errors are all thrown to related queries and never globally. Errors coming from database itself are always in the [native Postgres format](https://www.postgresql.org/docs/current/errcodes-appendix.html), and the same goes for any [Node.js errors](https://nodejs.org/api/errors.html#errors_common_system_errors) eg. coming from the underlying connection.
Query errors will contain a stored error with the origin of the query to aid in tracing errors.
-Query errors will also contain the `query` string and the `parameters` which are not enumerable to avoid accidentally leaking confidential information in logs. To log these it is required to specifically access `error.query` and `error.parameters`.
+Query errors will also contain the `query` string and the `parameters`. These are not enumerable to avoid accidentally leaking confidential information in logs. To log these it is required to specifically access `error.query` and `error.parameters`, or set `debug: true` in options.
There are also the following errors specifically for this library.
+##### UNSAFE_TRANSACTION
+> Only use sql.begin or max: 1
+
+To ensure statements in a transaction runs on the same connection (which is required for them to run inside the transaction), you must use [`sql.begin(...)`](#transactions) or only allow a single connection in options (`max: 1`).
+
##### UNDEFINED_VALUE
> Undefined values are not allowed
@@ -734,7 +817,7 @@ The postgres protocol doesn't allow more than 65534 (16bit) parameters. If you r
##### SASL_SIGNATURE_MISMATCH
> Message type X not supported
-When using SASL authentication the server responds with a signature at the end of the authentication flow which needs to match the one on the client. This is to avoid [man in the middle attacks](https://en.wikipedia.org/wiki/Man-in-the-middle_attack). If you receive this error the connection was cancelled because the server did not reply with the expected signature.
+When using SASL authentication the server responds with a signature at the end of the authentication flow which needs to match the one on the client. This is to avoid [man-in-the-middle attacks](https://en.wikipedia.org/wiki/Man-in-the-middle_attack). If you receive this error the connection was canceled because the server did not reply with the expected signature.
##### NOT_TAGGED_CALL
> Query not called as a tagged template literal
@@ -749,27 +832,66 @@ Postgres supports many different authentication types. This one is not supported
##### CONNECTION_CLOSED
> write CONNECTION_CLOSED host:port
-This error is thrown if the connection was closed without an error. This should not happen during normal operation, so please create an issue if this was unexpected.
+This error is thrown if the connection was closed without an error. This should not happen during normal operations, so please create an issue if this was unexpected.
##### CONNECTION_ENDED
> write CONNECTION_ENDED host:port
-This error is thrown if the user has called [`sql.end()`](#sql_end) and performed a query afterwards.
+This error is thrown if the user has called [`sql.end()`](#teardown--cleanup) and performed a query afterward.
##### CONNECTION_DESTROYED
> write CONNECTION_DESTROYED host:port
-This error is thrown for any queries that were pending when the timeout to [`sql.end({ timeout: X })`](#sql_destroy) was reached.
+This error is thrown for any queries that were pending when the timeout to [`sql.end({ timeout: X })`](#teardown--cleanup) was reached.
##### CONNECTION_CONNECT_TIMEOUT
> write CONNECTION_CONNECT_TIMEOUT host:port
-This error is thrown if the startup phase of the connection (tcp, protocol negotiation and auth) took more than the default 30 seconds or what was specified using `connect_timeout` or `PGCONNECT_TIMEOUT`.
+This error is thrown if the startup phase of the connection (tcp, protocol negotiation, and auth) took more than the default 30 seconds or what was specified using `connect_timeout` or `PGCONNECT_TIMEOUT`.
+
+## TypeScript support
+
+`postgres` has TypeScript support. You can pass a row list type for your queries in this way:
+```ts
+interface User {
+ id: number
+ name: string
+}
+
+const users = await sql`SELECT * FROM users`
+users[0].id // ok => number
+users[1].name // ok => string
+users[0].invalid // fails: `invalid` does not exists on `User`
+```
+
+However, be sure to check the array length to avoid accessing properties of `undefined` rows:
+```ts
+const users = await sql`SELECT * FROM users WHERE id = ${id}`
+if (!users.length)
+ throw new Error('Not found')
+return users[0]
+```
+
+You can also prefer destructuring when you only care about a fixed number of rows.
+In this case, we recommend you to prefer using tuples to handle `undefined` properly:
+```ts
+const [user]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}`
+if (!user) // => User | undefined
+ throw new Error('Not found')
+return user // => User
+
+// NOTE:
+const [first, second]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}` // fails: `second` does not exist on `[User?]`
+const [first, second] = await sql<[User?]>`SELECT * FROM users WHERE id = ${id}` // don't fail : `second: User | undefined`
+```
+
+We do our best to type all the public API, however types are not always updated when features are added or changed. Feel free to open an issue if you have trouble with types.
## Migration tools
-Postgres.js doesn't come with any migration solution since it's way out of scope, but here are some modules that supports Postgres.js for migrations:
+Postgres.js doesn't come with any migration solution since it's way out of scope, but here are some modules that support Postgres.js for migrations:
+- https://github.com/porsager/postgres-shift
- https://github.com/lukeed/ley
## Thank you
diff --git a/LICENSE b/UNLICENSE
similarity index 94%
rename from LICENSE
rename to UNLICENSE
index 68a49daa..efb98088 100644
--- a/LICENSE
+++ b/UNLICENSE
@@ -21,4 +21,4 @@ OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
-For more information, please refer to
+For more information, please refer to
diff --git a/cjs/package.json b/cjs/package.json
new file mode 100644
index 00000000..0292b995
--- /dev/null
+++ b/cjs/package.json
@@ -0,0 +1 @@
+{"type":"commonjs"}
\ No newline at end of file
diff --git a/lib/bytes.js b/cjs/src/bytes.js
similarity index 86%
rename from lib/bytes.js
rename to cjs/src/bytes.js
index c4ec3152..38fe13b7 100644
--- a/lib/bytes.js
+++ b/cjs/src/bytes.js
@@ -1,7 +1,7 @@
const size = 256
let buffer = Buffer.allocUnsafe(size)
-const messages = ['B', 'C', 'Q', 'P', 'F', 'p', 'D', 'E', 'H', 'S', 'd', 'c', 'f'].reduce((acc, x) => {
+const messages = 'BCcDdEFfHPpQSX'.split('').reduce((acc, x) => {
const v = x.charCodeAt(0)
acc[x] = () => {
buffer[0] = v
@@ -11,7 +11,8 @@ const messages = ['B', 'C', 'Q', 'P', 'F', 'p', 'D', 'E', 'H', 'S', 'd', 'c', 'f
return acc
}, {})
-const b = Object.assign(messages, {
+const b = Object.assign(reset, messages, {
+ N: String.fromCharCode(0),
i: 0,
inc(x) {
b.i += x
@@ -70,3 +71,8 @@ function fit(x) {
prev.copy(buffer)
}
}
+
+function reset() {
+ b.i = 0
+ return b
+}
diff --git a/cjs/src/connection.js b/cjs/src/connection.js
new file mode 100644
index 00000000..14760caf
--- /dev/null
+++ b/cjs/src/connection.js
@@ -0,0 +1,1000 @@
+const net = require('net')
+const tls = require('tls')
+const crypto = require('crypto')
+const Stream = require('stream')
+
+const { Identifier, Builder, handleValue, arrayParser, arraySerializer } = require('./types.js')
+const { Errors } = require('./errors.js')
+const Result = require('./result.js')
+const Queue = require('./queue.js')
+const { Query, CLOSE } = require('./query.js')
+const b = require('./bytes.js')
+
+module.exports = Connection
+
+let uid = 1
+
+const Sync = b().S().end()
+ , Flush = b().H().end()
+ , SSLRequest = b().i32(8).i32(80877103).end(8)
+ , ExecuteUnnamed = Buffer.concat([b().E().str(b.N).i32(0).end(), Sync])
+ , DescribeUnnamed = b().D().str('S').str(b.N).end()
+ , noop = () => { /* noop */ }
+
+const retryRoutines = new Set([
+ 'FetchPreparedStatement',
+ 'RevalidateCachedQuery',
+ 'transformAssignedExpr'
+])
+
+const errorFields = {
+ 83 : 'severity_local', // S
+ 86 : 'severity', // V
+ 67 : 'code', // C
+ 77 : 'message', // M
+ 68 : 'detail', // D
+ 72 : 'hint', // H
+ 80 : 'position', // P
+ 112 : 'internal_position', // p
+ 113 : 'internal_query', // q
+ 87 : 'where', // W
+ 115 : 'schema_name', // s
+ 116 : 'table_name', // t
+ 99 : 'column_name', // c
+ 100 : 'data type_name', // d
+ 110 : 'constraint_name', // n
+ 70 : 'file', // F
+ 76 : 'line', // L
+ 82 : 'routine' // R
+}
+
+function Connection(options, { onopen = noop, onend = noop, ondrain = noop, onclose = noop } = {}) {
+ const {
+ ssl,
+ max,
+ user,
+ host,
+ port,
+ database,
+ parsers,
+ transform,
+ onnotice,
+ onnotify,
+ onparameter,
+ max_pipeline,
+ keep_alive,
+ backoff,
+ target_session_attrs
+ } = options
+
+ const sent = Queue()
+ , id = uid++
+ , backend = { pid: null, secret: null }
+ , idleTimer = timer(end, options.idle_timeout)
+ , lifeTimer = timer(end, options.max_lifetime)
+ , connectTimer = timer(connectTimedOut, options.connect_timeout)
+
+ let socket = createSocket()
+ , result = new Result()
+ , incoming = Buffer.alloc(0)
+ , needsTypes = options.fetch_types
+ , backendParameters = {}
+ , statements = {}
+ , state = 'closed'
+ , statementId = Math.random().toString(36).slice(2)
+ , statementCount = 1
+ , closedDate = 0
+ , remaining = 0
+ , hostIndex = 0
+ , retries = 0
+ , length = 0
+ , delay = 0
+ , rows = 0
+ , serverSignature = null
+ , nextWriteTimer = null
+ , terminated = false
+ , incomings = null
+ , results = null
+ , initial = null
+ , ending = null
+ , stream = null
+ , chunk = null
+ , ended = null
+ , nonce = null
+ , query = null
+ , final = null
+
+ const connection = {
+ get state() { return state },
+ set state(x) {
+ state = x
+ state === 'open'
+ ? idleTimer.start()
+ : idleTimer.cancel()
+ },
+ connect(query) {
+ initial = query
+ reconnect()
+ },
+ terminate,
+ execute,
+ cancel,
+ end,
+ count: 0,
+ id
+ }
+
+ return connection
+
+ function createSocket() {
+ const x = net.Socket()
+ x.on('error', error)
+ x.on('close', closed)
+ x.on('drain', drain)
+ return x
+ }
+
+ function cancel({ pid, secret }, resolve, reject) {
+ socket.removeAllListeners()
+ socket = net.Socket()
+ socket.on('connect', () => socket.write(b().i32(16).i32(80877102).i32(pid).i32(secret).end(16)))
+ socket.once('error', reject)
+ socket.once('close', resolve)
+ connect()
+ }
+
+ function execute(q) {
+ if (terminated)
+ return q.reject(Errors.connection('CONNECTION_DESTROYED', options))
+
+ if (q.cancelled)
+ return
+
+ try {
+ q.state = backend
+ query
+ ? sent.push(q)
+ : (query = q, query.active = true)
+
+ build(q)
+ return write(toBuffer(q))
+ && !q.describeFirst
+ && sent.length < max_pipeline
+ && (!q.options.onexecute || q.options.onexecute(connection))
+ } catch (error) {
+ sent.length === 0 && write(Sync)
+ errored(error)
+ return true
+ }
+ }
+
+ function toBuffer(q) {
+ if (q.parameters.length >= 65534)
+ throw Errors.generic('MAX_PARAMETERS_EXCEEDED', 'Max number of parameters (65534) exceeded')
+
+ return q.options.simple
+ ? b().Q().str(q.strings[0] + b.N).end()
+ : q.describeFirst
+ ? Buffer.concat([describe(q), Flush])
+ : q.prepare
+ ? q.prepared
+ ? prepared(q)
+ : Buffer.concat([describe(q), prepared(q)])
+ : unnamed(q)
+ }
+
+ function describe(q) {
+ return Buffer.concat([
+ Parse(q.statement.string, q.parameters, q.statement.types, q.statement.name),
+ Describe('S', q.statement.name)
+ ])
+ }
+
+ function prepared(q) {
+ return Buffer.concat([
+ Bind(q.parameters, q.statement.types, q.statement.name, q.cursorName),
+ q.cursorFn
+ ? Execute('', q.cursorRows)
+ : ExecuteUnnamed
+ ])
+ }
+
+ function unnamed(q) {
+ return Buffer.concat([
+ Parse(q.statement.string, q.parameters, q.statement.types),
+ DescribeUnnamed,
+ prepared(q)
+ ])
+ }
+
+ function build(q) {
+ const parameters = []
+ , types = []
+
+ const string = stringify(q, q.strings[0], q.args[0], parameters, types)
+
+ !q.tagged && q.args.forEach(x => handleValue(x, parameters, types))
+
+ q.prepare = options.prepare && ('prepare' in q.options ? q.options.prepare : true)
+ q.string = string
+ q.signature = q.prepare && types + string
+ q.onlyDescribe && (delete statements[q.signature])
+ q.parameters = q.parameters || parameters
+ q.prepared = q.prepare && q.signature in statements
+ q.describeFirst = q.onlyDescribe || (parameters.length && !q.prepared)
+ q.statement = q.prepared
+ ? statements[q.signature]
+ : { string, types, name: q.prepare ? statementId + statementCount++ : '' }
+
+ typeof options.debug === 'function' && options.debug(id, string, parameters, types)
+ }
+
+ function stringify(q, string, value, parameters, types) {
+ for (let i = 1; i < q.strings.length; i++) {
+ string += (
+ value instanceof Query ? fragment(string, value, parameters, types) :
+ value instanceof Identifier ? value.value :
+ value instanceof Builder ? value.build(string, parameters, types, options.transform) :
+ handleValue(value, parameters, types)
+ ) + q.strings[i]
+ value = q.args[i]
+ }
+
+ return string
+ }
+
+ function fragment(string, q, parameters, types) {
+ q.fragment = true
+ return stringify(q, q.strings[0], q.args[0], parameters, types)
+ }
+
+ function write(x, fn) {
+ chunk = chunk ? Buffer.concat([chunk, x]) : Buffer.from(x)
+ if (fn || chunk.length >= 1024)
+ return nextWrite(fn)
+ nextWriteTimer === null && (nextWriteTimer = setImmediate(nextWrite))
+ return true
+ }
+
+ function nextWrite(fn) {
+ const x = socket.write(chunk, fn)
+ nextWriteTimer !== null && clearImmediate(nextWriteTimer)
+ chunk = nextWriteTimer = null
+ return x
+ }
+
+ function connectTimedOut() {
+ errored(Errors.connection('CONNECT_TIMEOUT', options, socket))
+ socket.destroy()
+ }
+
+ async function secure() {
+ write(SSLRequest)
+ const canSSL = await new Promise(r => socket.once('data', x => r(x[0] === 83))) // S
+
+ if (!canSSL && ssl === 'prefer')
+ return connected()
+
+ socket.removeAllListeners()
+ socket = tls.connect({
+ socket,
+ ...(ssl === 'require' || ssl === 'allow' || ssl === 'prefer'
+ ? { rejectUnauthorized: false }
+ : ssl
+ )
+ })
+ socket.on('secureConnect', connected)
+ socket.on('error', error)
+ socket.on('close', closed)
+ socket.on('drain', drain)
+ }
+
+ /* c8 ignore next 3 */
+ function drain() {
+ ondrain(connection)
+ }
+
+ function data(x) {
+ if (incomings) {
+ incomings.push(x)
+ remaining -= x.length
+ if (remaining >= 0)
+ return
+ }
+
+ incoming = incomings
+ ? Buffer.concat(incomings, length - remaining)
+ : incoming.length === 0
+ ? x
+ : Buffer.concat([incoming, x], incoming.length + x.length)
+
+ while (incoming.length > 4) {
+ length = incoming.readUInt32BE(1)
+ if (length >= incoming.length) {
+ remaining = length - incoming.length
+ incomings = [incoming]
+ break
+ }
+
+ try {
+ handle(incoming.slice(0, length + 1))
+ } catch (e) {
+ query && (query.cursorFn || query.describeFirst) && write(Sync)
+ errored(e)
+ }
+ incoming = incoming.slice(length + 1)
+ remaining = 0
+ incomings = null
+ }
+ }
+
+ function connect() {
+ terminated = false
+ backendParameters = {}
+ connectTimer.start()
+ socket.on('connect', ssl ? secure : connected)
+
+ if (options.path)
+ return socket.connect(options.path)
+
+ socket.connect(port[hostIndex], host[hostIndex])
+ hostIndex = (hostIndex + 1) % port.length
+ }
+
+ function reconnect() {
+ setTimeout(connect, closedDate ? closedDate + delay - Date.now() : 0)
+ }
+
+ function connected() {
+ try {
+ statements = {}
+ needsTypes = options.fetch_types
+ statementId = Math.random().toString(36).slice(2)
+ statementCount = 1
+ lifeTimer.start()
+ socket.on('data', data)
+ socket.setKeepAlive(true, 1000 * keep_alive)
+ const s = StartupMessage()
+ write(s)
+ } catch (err) {
+ error(err)
+ }
+ }
+
+ function error(err) {
+ if (connection.state === 'connecting' && options.host[retries + 1])
+ return
+
+ errored(err)
+ while (sent.length)
+ queryError(sent.shift(), err)
+ }
+
+ function errored(err) {
+ stream && (stream.destroy(err), stream = null)
+ query && queryError(query, err)
+ initial && (queryError(initial, err), initial = null)
+ }
+
+ function queryError(query, err) {
+ query.reject(Object.create(err, {
+ stack: { value: err.stack + query.origin.replace(/.*\n/, '\n'), enumerable: options.debug },
+ query: { value: query.string, enumerable: options.debug },
+ parameters: { value: query.parameters, enumerable: options.debug },
+ args: { value: query.args, enumerable: options.debug },
+ types: { value: query.statement && query.statement.types, enumerable: options.debug }
+ }))
+ }
+
+ function end() {
+ return ending || (
+ !connection.reserved && onend(connection),
+ !connection.reserved && !initial && !query && sent.length === 0
+ ? Promise.resolve(terminate())
+ : ending = new Promise(r => ended = r)
+ )
+ }
+
+ function terminate() {
+ terminated = true
+ if (stream || query || initial || sent.length)
+ error(Errors.connection('CONNECTION_DESTROYED', options))
+
+ clearImmediate(nextWriteTimer)
+ socket.removeListener('data', data)
+ socket.removeListener('connect', connected)
+ socket.readyState !== 'closed' && socket.end(b().X().end())
+ ended && (ended(), ending = ended = null)
+ }
+
+ function closed(hadError) {
+ incoming = Buffer.alloc(0)
+ remaining = 0
+ incomings = null
+ clearImmediate(nextWriteTimer)
+ socket.removeListener('data', data)
+ socket.removeListener('connect', connected)
+ idleTimer.cancel()
+ lifeTimer.cancel()
+ connectTimer.cancel()
+
+ if (socket.encrypted) {
+ socket.removeAllListeners()
+ socket = createSocket()
+ }
+
+ if (initial)
+ return reconnect()
+
+ !hadError && (query || sent.length) && error(Errors.connection('CONNECTION_CLOSED', options, socket))
+ closedDate = Date.now()
+ hadError && options.shared.retries++
+ delay = (typeof backoff === 'function' ? backoff(options.shared.retries) : backoff) * 1000
+ onclose(connection)
+ }
+
+ /* Handlers */
+ function handle(xs, x = xs[0]) {
+ (
+ x === 68 ? DataRow : // D
+ x === 100 ? CopyData : // d
+ x === 65 ? NotificationResponse : // A
+ x === 83 ? ParameterStatus : // S
+ x === 90 ? ReadyForQuery : // Z
+ x === 67 ? CommandComplete : // C
+ x === 50 ? BindComplete : // 2
+ x === 49 ? ParseComplete : // 1
+ x === 116 ? ParameterDescription : // t
+ x === 84 ? RowDescription : // T
+ x === 82 ? Authentication : // R
+ x === 110 ? NoData : // n
+ x === 75 ? BackendKeyData : // K
+ x === 69 ? ErrorResponse : // E
+ x === 115 ? PortalSuspended : // s
+ x === 51 ? CloseComplete : // 3
+ x === 71 ? CopyInResponse : // G
+ x === 78 ? NoticeResponse : // N
+ x === 72 ? CopyOutResponse : // H
+ x === 99 ? CopyDone : // c
+ x === 73 ? EmptyQueryResponse : // I
+ x === 86 ? FunctionCallResponse : // V
+ x === 118 ? NegotiateProtocolVersion : // v
+ x === 87 ? CopyBothResponse : // W
+ /* c8 ignore next */
+ UnknownMessage
+ )(xs)
+ }
+
+ function DataRow(x) {
+ let index = 7
+ let length
+ let column
+ let value
+
+ const row = query.isRaw ? new Array(query.statement.columns.length) : {}
+ for (let i = 0; i < query.statement.columns.length; i++) {
+ column = query.statement.columns[i]
+ length = x.readInt32BE(index)
+ index += 4
+
+ value = length === -1
+ ? null
+ : query.isRaw
+ ? x.slice(index, index += length)
+ : column.parser === undefined
+ ? x.toString('utf8', index, index += length)
+ : column.parser.array === true
+ ? column.parser(x.toString('utf8', index + 1, index += length))
+ : column.parser(x.toString('utf8', index, index += length))
+
+ query.isRaw
+ ? (row[i] = value)
+ : (row[column.name] = transform.value.from ? transform.value.from(value) : value)
+ }
+
+ query.forEachFn
+ ? query.forEachFn(transform.row.from ? transform.row.from(row) : row, result)
+ : (result[rows++] = transform.row.from ? transform.row.from(row) : row)
+ }
+
+ function ParameterStatus(x) {
+ const [k, v] = x.toString('utf8', 5, x.length - 1).split(b.N)
+ backendParameters[k] = v
+ if (options.parameters[k] !== v) {
+ options.parameters[k] = v
+ onparameter && onparameter(k, v)
+ }
+ }
+
+ function ReadyForQuery(x) {
+ query && query.options.simple && query.resolve(results || result)
+ query = results = null
+ result = new Result()
+ connectTimer.cancel()
+
+ if (initial) {
+ if (target_session_attrs) {
+ if (!backendParameters.in_hot_standby || !backendParameters.default_transaction_read_only)
+ return fetchState()
+ else if (tryNext(target_session_attrs, backendParameters))
+ return terminate()
+ }
+
+ if (needsTypes)
+ return fetchArrayTypes()
+
+ execute(initial)
+ options.shared.retries = retries = initial = 0
+ return
+ }
+
+ while (sent.length && (query = sent.shift()) && (query.active = true) && query.cancelled)
+ Connection(options, {}).cancel(query.state, query.cancelled.resolve, query.cancelled.reject)
+
+ if (query)
+ return // Consider opening if able and sent.length < 50
+
+ connection.reserved
+ ? x[5] === 73 // I
+ ? ending
+ ? terminate()
+ : (connection.reserved = null, onopen(connection))
+ : connection.reserved()
+ : ending
+ ? terminate()
+ : onopen(connection)
+ }
+
+ function CommandComplete(x) {
+ rows = 0
+
+ for (let i = x.length - 1; i > 0; i--) {
+ if (x[i] === 32 && x[i + 1] < 58 && result.count === null)
+ result.count = +x.toString('utf8', i + 1, x.length - 1)
+ if (x[i - 1] >= 65) {
+ result.command = x.toString('utf8', 5, i)
+ result.state = backend
+ break
+ }
+ }
+
+ final && (final(), final = null)
+
+ if (result.command === 'BEGIN' && max !== 1 && !connection.reserved)
+ return errored(Errors.generic('UNSAFE_TRANSACTION', 'Only use sql.begin or max: 1'))
+
+ if (query.options.simple)
+ return
+
+ if (query.cursorFn) {
+ result.count && query.cursorFn(result)
+ write(Sync)
+ }
+
+ query.resolve(result)
+ }
+
+ function ParseComplete() {
+ query.parsing = false
+ }
+
+ function BindComplete() {
+ !result.statement && (result.statement = query.statement)
+ result.columns = query.statement.columns
+ }
+
+ function ParameterDescription(x) {
+ const length = x.readUInt16BE(5)
+
+ for (let i = 0; i < length; ++i)
+ !query.statement.types[i] && (query.statement.types[i] = x.readUInt32BE(7 + i * 4))
+
+ query.prepare && (statements[query.signature] = query.statement)
+ query.describeFirst && !query.onlyDescribe && (write(prepared(query)), query.describeFirst = false)
+ }
+
+ function RowDescription(x) {
+ if (result.command) {
+ results = results || [result]
+ results.push(result = new Result())
+ result.count = null
+ query.statement.columns = null
+ }
+
+ const length = x.readUInt16BE(5)
+ let index = 7
+ let start
+
+ query.statement.columns = Array(length)
+
+ for (let i = 0; i < length; ++i) {
+ start = index
+ while (x[index++] !== 0);
+ const type = x.readUInt32BE(index + 6)
+ query.statement.columns[i] = {
+ name: transform.column.from
+ ? transform.column.from(x.toString('utf8', start, index - 1))
+ : x.toString('utf8', start, index - 1),
+ parser: parsers[type],
+ type
+ }
+ index += 18
+ }
+
+ result.statement = query.statement
+ if (query.onlyDescribe)
+ return (query.resolve(query.statement), write(Sync))
+ }
+
+ async function Authentication(x, type = x.readUInt32BE(5)) {
+ (
+ type === 3 ? AuthenticationCleartextPassword :
+ type === 5 ? AuthenticationMD5Password :
+ type === 10 ? SASL :
+ type === 11 ? SASLContinue :
+ type === 12 ? SASLFinal :
+ type !== 0 ? UnknownAuth :
+ noop
+ )(x, type)
+ }
+
+ /* c8 ignore next 5 */
+ async function AuthenticationCleartextPassword() {
+ write(
+ b().p().str(await Pass()).z(1).end()
+ )
+ }
+
+ async function AuthenticationMD5Password(x) {
+ write(
+ b().p().str('md5' + md5(Buffer.concat([Buffer.from(md5((await Pass()) + user)), x.slice(9)]))).z(1).end()
+ )
+ }
+
+ function SASL() {
+ b().p().str('SCRAM-SHA-256' + b.N)
+ const i = b.i
+ nonce = crypto.randomBytes(18).toString('base64')
+ write(b.inc(4).str('n,,n=*,r=' + nonce).i32(b.i - i - 4, i).end())
+ }
+
+ async function SASLContinue(x) {
+ const res = x.toString('utf8', 9).split(',').reduce((acc, x) => (acc[x[0]] = x.slice(2), acc), {})
+
+ const saltedPassword = crypto.pbkdf2Sync(
+ await Pass(),
+ Buffer.from(res.s, 'base64'),
+ parseInt(res.i), 32,
+ 'sha256'
+ )
+
+ const clientKey = hmac(saltedPassword, 'Client Key')
+
+ const auth = 'n=*,r=' + nonce + ','
+ + 'r=' + res.r + ',s=' + res.s + ',i=' + res.i
+ + ',c=biws,r=' + res.r
+
+ serverSignature = hmac(hmac(saltedPassword, 'Server Key'), auth).toString('base64')
+
+ write(
+ b().p().str('c=biws,r=' + res.r + ',p=' + xor(clientKey, hmac(sha256(clientKey), auth)).toString('base64')).end()
+ )
+ }
+
+ function SASLFinal(x) {
+ if (x.toString('utf8', 9).split(b.N, 1)[0].slice(2) === serverSignature)
+ return
+ /* c8 ignore next 5 */
+ errored(Errors.generic('SASL_SIGNATURE_MISMATCH', 'The server did not return the correct signature'))
+ socket.destroy()
+ }
+
+ function Pass() {
+ return Promise.resolve(typeof options.pass === 'function'
+ ? options.pass()
+ : options.pass
+ )
+ }
+
+ function NoData() {
+ result.statement = query.statement
+ result.statement.columns = []
+ if (query.onlyDescribe)
+ return (query.resolve(query.statement), write(Sync))
+ }
+
+ function BackendKeyData(x) {
+ backend.pid = x.readUInt32BE(5)
+ backend.secret = x.readUInt32BE(9)
+ }
+
+ async function fetchArrayTypes() {
+ needsTypes = false
+ const types = await new Query([`
+ select b.oid, b.typarray
+ from pg_catalog.pg_type a
+ left join pg_catalog.pg_type b on b.oid = a.typelem
+ where a.typcategory = 'A'
+ group by b.oid, b.typarray
+ order by b.oid
+ `], [], execute)
+ types.forEach(({ oid, typarray }) => addArrayType(oid, typarray))
+ }
+
+ function addArrayType(oid, typarray) {
+ const parser = options.parsers[oid]
+ options.shared.typeArrayMap[oid] = typarray
+ options.parsers[typarray] = (xs) => arrayParser(xs, parser)
+ options.parsers[typarray].array = true
+ options.serializers[typarray] = (xs) => arraySerializer(xs, options.serializers[oid])
+ }
+
+ function tryNext(x, xs) {
+ return (
+ (x === 'read-write' && xs.default_transaction_read_only === 'on') ||
+ (x === 'read-only' && xs.default_transaction_read_only === 'off') ||
+ (x === 'primary' && xs.in_hot_standby === 'off') ||
+ (x === 'standby' && xs.in_hot_standby === 'on') ||
+ (x === 'prefer-standby' && xs.in_hot_standby === 'off' && options.host[retries])
+ )
+ }
+
+ function fetchState() {
+ const query = new Query([`
+ show transaction_read_only;
+ select pg_catalog.pg_is_in_recovery()
+ `], [], execute, null, { simple: true })
+ query.resolve = ([[a], [b]]) => {
+ backendParameters.default_transaction_read_only = a.transaction_read_only
+ backendParameters.in_hot_standby = b.pg_is_in_recovery ? 'on' : 'off'
+ }
+ query.execute()
+ }
+
+ function ErrorResponse(x) {
+ query && (query.cursorFn || query.describeFirst) && write(Sync)
+ const error = Errors.postgres(parseError(x))
+ query && query.retried
+ ? errored(query.retried)
+ : query && retryRoutines.has(error.routine)
+ ? retry(query, error)
+ : errored(error)
+ }
+
+ function retry(q, error) {
+ delete statements[q.signature]
+ q.retried = error
+ execute(q)
+ }
+
+ function NotificationResponse(x) {
+ if (!onnotify)
+ return
+
+ let index = 9
+ while (x[index++] !== 0);
+ onnotify(
+ x.toString('utf8', 9, index - 1),
+ x.toString('utf8', index, x.length - 1)
+ )
+ }
+
+ async function PortalSuspended() {
+ try {
+ const x = await Promise.resolve(query.cursorFn(result))
+ rows = 0
+ x === CLOSE
+ ? write(Close(query.portal))
+ : (result = new Result(), write(Execute('', query.cursorRows)))
+ } catch (err) {
+ write(Sync)
+ query.reject(err)
+ }
+ }
+
+ function CloseComplete() {
+ result.count && query.cursorFn(result)
+ query.resolve(result)
+ }
+
+ function CopyInResponse() {
+ stream = new Stream.Writable({
+ write(chunk, encoding, callback) {
+ socket.write(b().d().raw(chunk).end(), callback)
+ },
+ destroy(error, callback) {
+ callback(error)
+ socket.write(b().f().str(error + b.N).end())
+ },
+ final(callback) {
+ socket.write(b().c().end())
+ final = callback
+ }
+ })
+ query.resolve(stream)
+ }
+
+ function CopyOutResponse() {
+ stream = new Stream.Readable({
+ read() { socket.resume() }
+ })
+ query.resolve(stream)
+ }
+
+ /* c8 ignore next 3 */
+ function CopyBothResponse() {
+ stream = new Stream.Duplex({
+ read() { socket.resume() },
+ /* c8 ignore next 11 */
+ write(chunk, encoding, callback) {
+ socket.write(b().d().raw(chunk).end(), callback)
+ },
+ destroy(error, callback) {
+ callback(error)
+ socket.write(b().f().str(error + b.N).end())
+ },
+ final(callback) {
+ socket.write(b().c().end())
+ final = callback
+ }
+ })
+ query.resolve(stream)
+ }
+
+ function CopyData(x) {
+ stream.push(x.slice(5)) || socket.pause()
+ }
+
+ function CopyDone() {
+ stream.push(null)
+ stream = null
+ }
+
+ function NoticeResponse(x) {
+ onnotice
+ ? onnotice(parseError(x))
+ : console.log(parseError(x)) // eslint-disable-line
+
+ }
+
+ /* c8 ignore next 3 */
+ function EmptyQueryResponse() {
+ /* noop */
+ }
+
+ /* c8 ignore next 3 */
+ function FunctionCallResponse() {
+ errored(Errors.notSupported('FunctionCallResponse'))
+ }
+
+ /* c8 ignore next 3 */
+ function NegotiateProtocolVersion() {
+ errored(Errors.notSupported('NegotiateProtocolVersion'))
+ }
+
+ /* c8 ignore next 3 */
+ function UnknownMessage(x) {
+ console.error('Postgres.js : Unknown Message:', x[0]) // eslint-disable-line
+ }
+
+ /* c8 ignore next 3 */
+ function UnknownAuth(x, type) {
+ console.error('Postgres.js : Unknown Auth:', type) // eslint-disable-line
+ }
+
+ /* Messages */
+ function Bind(parameters, types, statement = '', portal = '') {
+ let prev
+ , type
+
+ b().B().str(portal + b.N).str(statement + b.N).i16(0).i16(parameters.length)
+
+ parameters.forEach((x, i) => {
+ if (x === null)
+ return b.i32(0xFFFFFFFF)
+
+ type = types[i]
+ parameters[i] = x = type in options.serializers
+ ? options.serializers[type](x)
+ : '' + x
+
+ prev = b.i
+ b.inc(4).str(x).i32(b.i - prev - 4, prev)
+ })
+
+ b.i16(0)
+
+ return b.end()
+ }
+
+ function Parse(str, parameters, types, name = '') {
+ b().P().str(name + b.N).str(str + b.N).i16(parameters.length)
+ parameters.forEach((x, i) => b.i32(types[i] || 0))
+ return b.end()
+ }
+
+ function Describe(x, name = '') {
+ return b().D().str(x).str(name + b.N).end()
+ }
+
+ function Execute(portal = '', rows = 0) {
+ return Buffer.concat([
+ b().E().str(portal + b.N).i32(rows).end(),
+ Flush
+ ])
+ }
+
+ function Close(portal = '') {
+ return Buffer.concat([
+ b().C().str('P').str(portal + b.N).end(),
+ b().S().end()
+ ])
+ }
+
+ function StartupMessage() {
+ return b().inc(4).i16(3).z(2).str(
+ Object.entries(Object.assign({
+ user,
+ database,
+ client_encoding: '\'utf-8\''
+ },
+ options.connection
+ )).filter(([, v]) => v).map(([k, v]) => k + b.N + v).join(b.N)
+ ).z(2).end(0)
+ }
+
+}
+
+function parseError(x) {
+ const error = {}
+ let start = 5
+ for (let i = 5; i < x.length - 1; i++) {
+ if (x[i] === 0) {
+ error[errorFields[x[start]]] = x.toString('utf8', start + 1, i)
+ start = i + 1
+ }
+ }
+ return error
+}
+
+function md5(x) {
+ return crypto.createHash('md5').update(x).digest('hex')
+}
+
+function hmac(key, x) {
+ return crypto.createHmac('sha256', key).update(x).digest()
+}
+
+function sha256(x) {
+ return crypto.createHash('sha256').update(x).digest()
+}
+
+function xor(a, b) {
+ const length = Math.max(a.length, b.length)
+ const buffer = Buffer.allocUnsafe(length)
+ for (let i = 0; i < length; i++)
+ buffer[i] = a[i] ^ b[i]
+ return buffer
+}
+
+function timer(fn, seconds) {
+ seconds = typeof seconds === 'function' ? seconds() : seconds
+ if (!seconds)
+ return { cancel: noop, start: noop }
+
+ let timer
+ return {
+ cancel() {
+ timer && (clearTimeout(timer), timer = null)
+ },
+ start() {
+ timer && clearTimeout(timer)
+ timer = setTimeout(done, seconds * 1000, arguments).unref()
+ }
+ }
+
+ function done(args) {
+ fn.apply(null, args)
+ timer = null
+ }
+}
diff --git a/cjs/src/errors.js b/cjs/src/errors.js
new file mode 100644
index 00000000..ef66149a
--- /dev/null
+++ b/cjs/src/errors.js
@@ -0,0 +1,53 @@
+const PostgresError = module.exports.PostgresError = class PostgresError extends Error {
+ constructor(x) {
+ super(x.message)
+ this.name = this.constructor.name
+ Object.assign(this, x)
+ }
+}
+
+const Errors = module.exports.Errors = {
+ connection,
+ postgres,
+ generic,
+ notSupported
+}
+
+function connection(x, options, socket) {
+ const { host, port } = socket || options
+ const error = Object.assign(
+ new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))),
+ {
+ code: x,
+ errno: x,
+ address: options.path || host
+ }, options.path ? {} : { port: port }
+ )
+ Error.captureStackTrace(error, connection)
+ return error
+}
+
+function postgres(x) {
+ const error = new PostgresError(x)
+ Error.captureStackTrace(error, postgres)
+ return error
+}
+
+function generic(code, message) {
+ const error = Object.assign(new Error(code + ': ' + message), { code })
+ Error.captureStackTrace(error, generic)
+ return error
+}
+
+/* c8 ignore next 10 */
+function notSupported(x) {
+ const error = Object.assign(
+ new Error(x + ' (B) is not supported'),
+ {
+ code: 'MESSAGE_NOT_SUPPORTED',
+ name: x
+ }
+ )
+ Error.captureStackTrace(error, notSupported)
+ return error
+}
diff --git a/cjs/src/index.js b/cjs/src/index.js
new file mode 100644
index 00000000..816b2678
--- /dev/null
+++ b/cjs/src/index.js
@@ -0,0 +1,537 @@
+const os = require('os')
+const fs = require('fs')
+const Stream = require('stream')
+
+const {
+ mergeUserTypes,
+ inferType,
+ Parameter,
+ Identifier,
+ Builder,
+ toPascal,
+ toCamel,
+ toKebab,
+ fromPascal,
+ fromCamel,
+ fromKebab
+} = require('./types.js')
+
+const Connection = require('./connection.js')
+const { Query, CLOSE } = require('./query.js')
+const Queue = require('./queue.js')
+const { Errors, PostgresError } = require('./errors.js')
+const Subscribe = require('./subscribe.js')
+
+Object.assign(Postgres, {
+ PostgresError,
+ toPascal,
+ toCamel,
+ toKebab,
+ fromPascal,
+ fromCamel,
+ fromKebab,
+ BigInt
+})
+
+module.exports = Postgres
+
+function Postgres(a, b) {
+ const options = parseOptions(a, b)
+ , subscribe = Subscribe(Postgres, { ...options })
+
+ let ending = false
+
+ const queries = Queue()
+ , connections = [...Array(options.max)].map(() => Connection(options, { onopen, onend, ondrain, onclose }))
+ , closed = Queue(connections)
+ , reserved = Queue()
+ , open = Queue()
+ , busy = Queue()
+ , full = Queue()
+ , ended = Queue()
+ , connecting = Queue()
+ , queues = { closed, ended, connecting, reserved, open, busy, full }
+
+ const sql = Sql(handler)
+
+ Object.assign(sql, {
+ get parameters() { return options.parameters },
+ largeObject,
+ subscribe,
+ CLOSE,
+ END: CLOSE,
+ PostgresError,
+ options,
+ listen,
+ notify,
+ begin,
+ end
+ })
+
+ return sql
+
+ function Sql(handler, instant) {
+ handler.debug = options.debug
+
+ Object.entries(options.types).reduce((acc, [name, type]) => {
+ acc[name] = (x) => new Parameter(x, type.to)
+ return acc
+ }, typed)
+
+ Object.assign(sql, {
+ types: typed,
+ typed,
+ unsafe,
+ array,
+ json,
+ file
+ })
+
+ return sql
+
+ function typed(value, type) {
+ return new Parameter(value, type)
+ }
+
+ function sql(strings, ...args) {
+ const query = strings && Array.isArray(strings.raw)
+ ? new Query(strings, args, handler, cancel)
+ : typeof strings === 'string' && !args.length
+ ? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings)
+ : new Builder(strings, args)
+ instant && query instanceof Query && query.execute()
+ return query
+ }
+
+ function unsafe(string, args = [], options = {}) {
+ arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
+ const query = new Query([string], args, handler, cancel, {
+ prepare: false,
+ ...options,
+ simple: 'simple' in options ? options.simple : args.length === 0
+ })
+ instant && query.execute()
+ return query
+ }
+
+ function file(path, args = [], options = {}) {
+ arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
+ const query = new Query([], args, (query) => {
+ fs.readFile(path, 'utf8', (err, string) => {
+ if (err)
+ return query.reject(err)
+
+ query.strings = [string]
+ handler(query)
+ })
+ }, cancel, {
+ ...options,
+ simple: 'simple' in options ? options.simple : args.length === 0
+ })
+ instant && query.execute()
+ return query
+ }
+ }
+
+ async function listen(name, fn) {
+ const sql = listen.sql || (listen.sql = Postgres({
+ ...options,
+ max: 1,
+ idle_timeout: null,
+ max_lifetime: null,
+ fetch_types: false,
+ onclose() {
+ Object.entries(listen.channels).forEach(([channel, { listeners }]) => {
+ delete listen.channels[channel]
+ Promise.all(listeners.map(fn => listen(channel, fn).catch(() => { /* noop */ })))
+ })
+ },
+ onnotify(c, x) {
+ c in listen.channels && listen.channels[c].listeners.forEach(fn => fn(x))
+ }
+ }))
+
+ const channels = listen.channels || (listen.channels = {})
+ , exists = name in channels
+ , channel = exists ? channels[name] : (channels[name] = { listeners: [fn] })
+
+ if (exists) {
+ channel.listeners.push(fn)
+ return Promise.resolve({ ...channel.result, unlisten })
+ }
+
+ channel.result = await sql`listen ${ sql(name) }`
+ channel.result.unlisten = unlisten
+
+ return channel.result
+
+ async function unlisten() {
+ if (name in channels === false)
+ return
+
+ channel.listeners = channel.listeners.filter(x => x !== fn)
+ if (channels[name].listeners.length)
+ return
+
+ delete channels[name]
+ return sql`unlisten ${ sql(name) }`
+ }
+ }
+
+ async function notify(channel, payload) {
+ return await sql`select pg_notify(${ channel }, ${ '' + payload })`
+ }
+
+ async function begin(options, fn) {
+ !fn && (fn = options, options = '')
+ const queries = Queue()
+ let savepoints = 0
+ , connection
+
+ try {
+ await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute })
+ return await scope(connection, fn)
+ } catch (error) {
+ throw error
+ }
+
+ async function scope(c, fn, name) {
+ const sql = Sql(handler, true)
+ sql.savepoint = savepoint
+ let errored
+ name && await sql`savepoint ${ sql(name) }`
+ try {
+ const result = await new Promise((resolve, reject) => {
+ errored = reject
+ const x = fn(sql)
+ Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject)
+ })
+ !name && await sql`commit`
+ return result
+ } catch (e) {
+ await (name
+ ? sql`rollback to ${ sql(name) }`
+ : sql`rollback`
+ )
+ throw e
+ }
+
+ function savepoint(name, fn) {
+ if (name && Array.isArray(name.raw))
+ return savepoint(sql => sql.apply(sql, arguments))
+
+ arguments.length === 1 && (fn = name, name = null)
+ return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : ''))
+ }
+
+ function handler(q) {
+ errored && q.catch(errored)
+ c.state === 'full'
+ ? queries.push(q)
+ : c.execute(q) || (c.state = 'full', full.push(c))
+ }
+ }
+
+ function onexecute(c) {
+ queues[c.state].remove(c)
+ c.state = 'reserved'
+ c.reserved = () => queries.length
+ ? c.execute(queries.shift())
+ : c.state = 'reserved'
+ reserved.push(c)
+ connection = c
+ }
+ }
+
+ function largeObject(oid, mode = 0x00020000 | 0x00040000) {
+ return new Promise(async(resolve, reject) => {
+ await sql.begin(async sql => {
+ let finish
+ !oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`)
+ const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd`
+
+ const lo = {
+ writable,
+ readable,
+ close : () => sql`select lo_close(${ fd })`.then(finish),
+ tell : () => sql`select lo_tell64(${ fd })`,
+ read : (x) => sql`select loread(${ fd }, ${ x }) as data`,
+ write : (x) => sql`select lowrite(${ fd }, ${ x })`,
+ truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`,
+ seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`,
+ size : () => sql`
+ select
+ lo_lseek64(${ fd }, location, 0) as position,
+ seek.size
+ from (
+ select
+ lo_lseek64($1, 0, 2) as size,
+ tell.location
+ from (select lo_tell64($1) as location) tell
+ ) seek
+ `
+ }
+
+ resolve(lo)
+
+ return new Promise(async r => finish = r)
+
+ async function readable({
+ highWaterMark = 2048 * 8,
+ start = 0,
+ end = Infinity
+ } = {}) {
+ let max = end - start
+ start && await lo.seek(start)
+ return new Stream.Readable({
+ highWaterMark,
+ async read(size) {
+ const l = size > max ? size - max : size
+ max -= size
+ const [{ data }] = await lo.read(l)
+ this.push(data)
+ if (data.length < size)
+ this.push(null)
+ }
+ })
+ }
+
+ async function writable({
+ highWaterMark = 2048 * 8,
+ start = 0
+ } = {}) {
+ start && await lo.seek(start)
+ return new Stream.Writable({
+ highWaterMark,
+ write(chunk, encoding, callback) {
+ lo.write(chunk).then(() => callback(), callback)
+ }
+ })
+ }
+ }).catch(reject)
+ })
+ }
+
+ function json(x) {
+ return new Parameter(x, 3802)
+ }
+
+ function array(x, type) {
+ if (!Array.isArray(x))
+ return array(Array.from(arguments))
+
+ return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap)
+ }
+
+ function handler(query) {
+ if (ending)
+ return query.reject(Errors.connection('CONNECTION_ENDED', options, options))
+
+ if (open.length)
+ return go(open, query)
+
+ if (closed.length)
+ return connect(closed.shift(), query)
+
+ busy.length
+ ? go(busy, query)
+ : queries.push(query)
+ }
+
+ function go(xs, query) {
+ const c = xs.shift()
+ return c.execute(query)
+ ? (c.state = 'busy', busy.push(c))
+ : (c.state = 'full', full.push(c))
+ }
+
+ function cancel(query) {
+ return new Promise((resolve, reject) => {
+ query.state
+ ? query.active
+ ? Connection(options, {}).cancel(query.state, resolve, reject)
+ : query.cancelled = { resolve, reject }
+ : (
+ queries.remove(query),
+ query.cancelled = true,
+ query.reject(Errors.generic('57014', 'canceling statement due to user request')),
+ resolve()
+ )
+ })
+ }
+
+ async function end({ timeout = null } = {}) {
+ if (ending)
+ return ending
+
+ await 1
+ let timer
+ return ending = Promise.race([
+ new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))),
+ Promise.all(connections.map(c => c.end()).concat(
+ listen.sql ? listen.sql.end({ timeout: 0 }) : [],
+ subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : []
+ ))
+ ]).then(() => clearTimeout(timer))
+ }
+
+ async function destroy(resolve) {
+ await Promise.all(connections.map(c => c.terminate()))
+ while (queries.length)
+ queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options))
+ resolve()
+ }
+
+ function connect(c, query) {
+ c.state = 'connecting'
+ connecting.push(c)
+ c.connect(query)
+ }
+
+ function onend(c) {
+ queues[c.state].remove(c)
+ c.state = 'ended'
+ ended.push(c)
+ }
+
+ function onopen(c) {
+ queues[c.state].remove(c)
+ if (queries.length === 0)
+ return (c.state = 'open', open.push(c))
+
+ let max = Math.ceil(queries.length / (connecting.length + 1))
+ , ready = true
+
+ while (ready && queries.length && max-- > 0)
+ ready = c.execute(queries.shift())
+
+ ready
+ ? (c.state = 'busy', busy.push(c))
+ : (c.state = 'full', full.push(c))
+ }
+
+ function ondrain(c) {
+ full.remove(c)
+ onopen(c)
+ }
+
+ function onclose(c) {
+ queues[c.state].remove(c)
+ c.state = 'closed'
+ c.reserved = null
+ options.onclose && options.onclose(c.id)
+ queries.length
+ ? connect(c, queries.shift())
+ : queues.closed.push(c)
+ }
+}
+
+function parseOptions(a, b) {
+ if (a && a.shared)
+ return a
+
+ const env = process.env // eslint-disable-line
+ , o = (typeof a === 'string' ? b : a) || {}
+ , { url, multihost } = parseUrl(a, env)
+ , query = url.searchParams
+ , host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost'
+ , port = o.port || url.port || env.PGPORT || 5432
+ , user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername()
+
+ return Object.assign({
+ host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]),
+ port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)),
+ path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port,
+ database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user,
+ user : user,
+ pass : o.pass || o.password || url.password || env.PGPASSWORD || '',
+ max : o.max || query.get('max') || 10,
+ types : o.types || {},
+ ssl : o.ssl || parseSSL(query.get('sslmode') || query.get('ssl')) || false,
+ idle_timeout : o.idle_timeout || query.get('idle_timeout') || env.PGIDLE_TIMEOUT || warn(o.timeout),
+ connect_timeout : o.connect_timeout || query.get('connect_timeout') || env.PGCONNECT_TIMEOUT || 30,
+ max_lifetime : o.max_lifetime || url.max_lifetime || max_lifetime,
+ max_pipeline : o.max_pipeline || url.max_pipeline || 100,
+ backoff : o.backoff || url.backoff || backoff,
+ keep_alive : o.keep_alive || url.keep_alive || 60,
+ prepare : 'prepare' in o ? o.prepare : 'no_prepare' in o ? !o.no_prepare : true,
+ onnotice : o.onnotice,
+ onnotify : o.onnotify,
+ onclose : o.onclose,
+ onparameter : o.onparameter,
+ transform : parseTransform(o.transform || {}),
+ connection : Object.assign({ application_name: 'postgres.js' }, o.connection),
+ target_session_attrs: tsa(o, url, env),
+ debug : o.debug,
+ fetch_types : 'fetch_types' in o ? o.fetch_types : true,
+ parameters : {},
+ shared : { retries: 0, typeArrayMap: {} }
+ },
+ mergeUserTypes(o.types)
+ )
+}
+
+function tsa(o, url, env) {
+ const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS
+ if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x))
+ return x
+
+ throw new Error('target_session_attrs ' + x + ' is not supported')
+}
+
+function backoff(retries) {
+ return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20)
+}
+
+function max_lifetime() {
+ return 60 * (30 + Math.random() * 30)
+}
+
+function parseTransform(x) {
+ return {
+ column: {
+ from: typeof x.column === 'function' ? x.column : x.column && x.column.from,
+ to: x.column && x.column.to
+ },
+ value: {
+ from: typeof x.value === 'function' ? x.value : x.value && x.value.from,
+ to: x.value && x.value.to
+ },
+ row: {
+ from: typeof x.row === 'function' ? x.row : x.row && x.row.from,
+ to: x.row && x.row.to
+ }
+ }
+}
+
+function parseSSL(x) {
+ return x !== 'disable' && x !== 'false' && x
+}
+
+function parseUrl(url) {
+ if (typeof url !== 'string')
+ return { url: { searchParams: new Map() } }
+
+ let host = url
+ host = host.slice(host.indexOf('://') + 3)
+ host = host.split(/[?/]/)[0]
+ host = host.slice(host.indexOf('@') + 1)
+
+ return {
+ url: new URL(url.replace(host, host.split(',')[0])),
+ multihost: host.indexOf(',') > -1 && host
+ }
+}
+
+function warn(x) {
+ typeof x !== 'undefined' && console.log('The timeout option is deprecated, use idle_timeout instead') // eslint-disable-line
+ return x
+}
+
+function osUsername() {
+ try {
+ return os.userInfo().username // eslint-disable-line
+ } catch (_) {
+ return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line
+ }
+}
diff --git a/cjs/src/query.js b/cjs/src/query.js
new file mode 100644
index 00000000..56643a40
--- /dev/null
+++ b/cjs/src/query.js
@@ -0,0 +1,161 @@
+const originCache = new Map()
+ , originStackCache = new Map()
+ , originError = Symbol('OriginError')
+
+const CLOSE = module.exports.CLOSE = {}
+const Query = module.exports.Query = class Query extends Promise {
+ constructor(strings, args, handler, canceller, options = {}) {
+ let resolve
+ , reject
+
+ super((a, b) => {
+ resolve = a
+ reject = b
+ })
+
+ this.tagged = Array.isArray(strings.raw)
+ this.strings = strings
+ this.args = args
+ this.handler = handler
+ this.canceller = canceller
+ this.options = options
+
+ this.state = null
+ this.statement = null
+
+ this.resolve = x => (this.active = false, resolve(x))
+ this.reject = x => (this.active = false, reject(x))
+
+ this.active = false
+ this.cancelled = null
+ this.executed = false
+ this.signature = ''
+
+ this[originError] = handler.debug || !this.tagged
+ ? new Error()
+ : cachedError(this.strings)
+ }
+
+ get origin() {
+ return this.handler.debug || !this.tagged
+ ? this[originError].stack
+ : originStackCache.has(this.strings)
+ ? originStackCache.get(this.strings)
+ : originStackCache.set(this.strings, this[originError].stack).get(this.strings)
+ }
+
+ static get [Symbol.species]() {
+ return Promise
+ }
+
+ cancel() {
+ return this.canceller && (this.canceller(this), this.canceller = null)
+ }
+
+ async readable() {
+ this.options.simple = true
+ this.options.prepare = false
+ this.streaming = true
+ return this
+ }
+
+ async writable() {
+ this.options.simple = true
+ this.options.prepare = false
+ this.streaming = true
+ return this
+ }
+
+ cursor(rows = 1, fn) {
+ this.options.simple = false
+ if (typeof rows === 'function') {
+ fn = rows
+ rows = 1
+ }
+
+ this.cursorRows = rows
+
+ if (typeof fn === 'function')
+ return (this.cursorFn = fn, this)
+
+ let prev
+ return {
+ [Symbol.asyncIterator]: () => ({
+ next: () => {
+ if (this.executed && !this.active)
+ return { done: true }
+
+ prev && prev()
+ const promise = new Promise((resolve, reject) => {
+ this.cursorFn = value => {
+ resolve({ value, done: false })
+ return new Promise(r => prev = r)
+ }
+ this.resolve = () => (this.active = false, resolve({ done: true }))
+ this.reject = x => (this.active = false, reject(x))
+ })
+ this.execute()
+ return promise
+ },
+ return() {
+ prev && prev(CLOSE)
+ return { done: true }
+ }
+ })
+ }
+ }
+
+ describe() {
+ this.onlyDescribe = true
+ return this
+ }
+
+ stream() {
+ throw new Error('.stream has been renamed to .forEach')
+ }
+
+ forEach(fn) {
+ this.forEachFn = fn
+ return this
+ }
+
+ raw() {
+ this.isRaw = true
+ return this
+ }
+
+ async handle() {
+ !this.executed && (this.executed = true) && await 1 && this.handler(this)
+ }
+
+ execute() {
+ this.handle()
+ return this
+ }
+
+ then() {
+ this.handle()
+ return super.then.apply(this, arguments)
+ }
+
+ catch() {
+ this.handle()
+ return super.catch.apply(this, arguments)
+ }
+
+ finally() {
+ this.handle()
+ return super.finally.apply(this, arguments)
+ }
+}
+
+function cachedError(xs) {
+ if (originCache.has(xs))
+ return originCache.get(xs)
+
+ const x = Error.stackTraceLimit
+ Error.stackTraceLimit = 4
+ originCache.set(xs, new Error())
+ Error.stackTraceLimit = x
+ return originCache.get(xs)
+}
diff --git a/lib/queue.js b/cjs/src/queue.js
similarity index 57%
rename from lib/queue.js
rename to cjs/src/queue.js
index 7a6f2b46..8438f5da 100644
--- a/lib/queue.js
+++ b/cjs/src/queue.js
@@ -1,15 +1,20 @@
module.exports = Queue
-function Queue() {
- let xs = []
+function Queue(initial = []) {
+ let xs = initial.slice()
let index = 0
return {
get length() {
return xs.length - index
},
- push: (x) => xs.push(x),
- peek: () => xs[index],
+ remove: (x) => {
+ const index = xs.indexOf(x)
+ return index === -1
+ ? null
+ : (xs.splice(index, 1), x)
+ },
+ push: (x) => (xs.push(x), x),
shift: () => {
const out = xs[index++]
diff --git a/cjs/src/result.js b/cjs/src/result.js
new file mode 100644
index 00000000..6146daa2
--- /dev/null
+++ b/cjs/src/result.js
@@ -0,0 +1,16 @@
+module.exports = class Result extends Array {
+ constructor() {
+ super()
+ Object.defineProperties(this, {
+ count: { value: null, writable: true },
+ state: { value: null, writable: true },
+ command: { value: null, writable: true },
+ columns: { value: null, writable: true },
+ statement: { value: null, writable: true }
+ })
+ }
+
+ static get [Symbol.species]() {
+ return Array
+ }
+}
diff --git a/cjs/src/subscribe.js b/cjs/src/subscribe.js
new file mode 100644
index 00000000..a0f9dba7
--- /dev/null
+++ b/cjs/src/subscribe.js
@@ -0,0 +1,231 @@
+module.exports = Subscribe;function Subscribe(postgres, options) {
+ const listeners = new Map()
+
+ let connection
+
+ return async function subscribe(event, fn) {
+ event = parseEvent(event)
+
+ options.max = 1
+ options.onclose = onclose
+ options.connection = {
+ ...options.connection,
+ replication: 'database'
+ }
+
+ let stream
+ , ended = false
+
+ const sql = postgres(options)
+ , slot = 'postgresjs_' + Math.random().toString(36).slice(2)
+ , end = sql.end
+
+ sql.end = async() => {
+ ended = true
+ stream && (await new Promise(r => (stream.once('end', r), stream.end())))
+ return end()
+ }
+
+ !connection && (subscribe.sql = sql, connection = init(sql, slot, options.publications))
+
+ const fns = listeners.has(event)
+ ? listeners.get(event).add(fn)
+ : listeners.set(event, new Set([fn]))
+
+ const unsubscribe = () => {
+ fns.delete(fn)
+ fns.size === 0 && listeners.delete(event)
+ }
+
+ return connection.then(x => (stream = x, { unsubscribe }))
+
+ async function onclose() {
+ stream = null
+ !ended && (stream = await init(sql, slot, options.publications))
+ }
+ }
+
+ async function init(sql, slot, publications = 'alltables') {
+ if (!publications)
+ throw new Error('Missing publication names')
+
+ const [x] = await sql.unsafe(
+ `CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT`
+ )
+
+ const stream = await sql.unsafe(
+ `START_REPLICATION SLOT ${ slot } LOGICAL ${
+ x.consistent_point
+ } (proto_version '1', publication_names '${ publications }')`
+ ).writable()
+
+ const state = {
+ lsn: Buffer.concat(x.consistent_point.split('/').map(x => Buffer.from(('00000000' + x).slice(-8), 'hex')))
+ }
+
+ stream.on('data', data)
+ stream.on('error', (error) => {
+ console.error('Logical Replication Error - Reconnecting', error)
+ sql.end()
+ })
+
+ return stream
+
+ function data(x) {
+ if (x[0] === 0x77)
+ parse(x.slice(25), state, sql.options.parsers, handle)
+ else if (x[0] === 0x6b && x[17])
+ pong()
+ }
+
+ function handle(a, b) {
+ const path = b.relation.schema + '.' + b.relation.table
+ call('*', a, b)
+ call('*:' + path, a, b)
+ b.relation.keys.length && call('*:' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
+ call(b.command, a, b)
+ call(b.command + ':' + path, a, b)
+ b.relation.keys.length && call(b.command + ':' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
+ }
+
+ function pong() {
+ const x = Buffer.alloc(34)
+ x[0] = 'r'.charCodeAt(0)
+ x.fill(state.lsn, 1)
+ x.writeBigInt64BE(BigInt(Date.now() - Date.UTC(2000, 0, 1)) * BigInt(1000), 25)
+ stream.write(x)
+ }
+ }
+
+ function call(x, a, b) {
+ listeners.has(x) && listeners.get(x).forEach(fn => fn(a, b, x))
+ }
+}
+
+function Time(x) {
+ return new Date(Date.UTC(2000, 0, 1) + Number(x / BigInt(1000)))
+}
+
+function parse(x, state, parsers, handle) {
+ const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc)
+
+ Object.entries({
+ R: x => { // Relation
+ let i = 1
+ const r = state[x.readUInt32BE(i)] = {
+ schema: String(x.slice(i += 4, i = x.indexOf(0, i))) || 'pg_catalog',
+ table: String(x.slice(i + 1, i = x.indexOf(0, i + 1))),
+ columns: Array(x.readUInt16BE(i += 2)),
+ keys: []
+ }
+ i += 2
+
+ let columnIndex = 0
+ , column
+
+ while (i < x.length) {
+ column = r.columns[columnIndex++] = {
+ key: x[i++],
+ name: String(x.slice(i, i = x.indexOf(0, i))),
+ type: x.readUInt32BE(i += 1),
+ parser: parsers[x.readUInt32BE(i)],
+ atttypmod: x.readUInt32BE(i += 4)
+ }
+
+ column.key && r.keys.push(column)
+ i += 4
+ }
+ },
+ Y: () => { /* noop */ }, // Type
+ O: () => { /* noop */ }, // Origin
+ B: x => { // Begin
+ state.date = Time(x.readBigInt64BE(9))
+ state.lsn = x.slice(1, 9)
+ },
+ I: x => { // Insert
+ let i = 1
+ const relation = state[x.readUInt32BE(i)]
+ const row = {}
+ tuples(x, row, relation.columns, i += 7)
+
+ handle(row, {
+ command: 'insert',
+ relation
+ })
+ },
+ D: x => { // Delete
+ let i = 1
+ const relation = state[x.readUInt32BE(i)]
+ i += 4
+ const key = x[i] === 75
+ const row = key || x[i] === 79
+ ? {}
+ : null
+
+ tuples(x, row, key ? relation.keys : relation.columns, i += 3)
+
+ handle(row, {
+ command: 'delete',
+ relation,
+ key
+ })
+ },
+ U: x => { // Update
+ let i = 1
+ const relation = state[x.readUInt32BE(i)]
+ i += 4
+ const key = x[i] === 75
+ const old = key || x[i] === 79
+ ? {}
+ : null
+
+ old && (i = tuples(x, old, key ? relation.keys : relation.columns, ++i))
+
+ const row = {}
+ i = tuples(x, row, relation.columns, i += 3)
+
+ handle(row, {
+ command: 'update',
+ relation,
+ key,
+ old
+ })
+ },
+ T: () => { /* noop */ }, // Truncate,
+ C: () => { /* noop */ } // Commit
+ }).reduce(char, {})[x[0]](x)
+}
+
+function tuples(x, row, columns, xi) {
+ let type
+ , column
+
+ for (let i = 0; i < columns.length; i++) {
+ type = x[xi++]
+ column = columns[i]
+ row[column.name] = type === 110 // n
+ ? null
+ : type === 117 // u
+ ? undefined
+ : column.parser === undefined
+ ? x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))
+ : column.parser.array === true
+ ? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readUInt32BE(xi)))
+ : column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi)))
+ }
+
+ return xi
+}
+
+function parseEvent(x) {
+ const xs = x.match(/^(\*|insert|update|delete)?:?([^.]+?\.?[^=]+)?=?(.+)?/i) || []
+
+ if (!xs)
+ throw new Error('Malformed subscribe pattern: ' + x)
+
+ const [, command, path, key] = xs
+
+ return (command || '*')
+ + (path ? ':' + (path.indexOf('.') === -1 ? 'public.' + path : path) : '')
+ + (key ? '=' + key : '')
+}
diff --git a/cjs/src/types.js b/cjs/src/types.js
new file mode 100644
index 00000000..42657874
--- /dev/null
+++ b/cjs/src/types.js
@@ -0,0 +1,297 @@
+const { Query } = require('./query.js')
+const { Errors } = require('./errors.js')
+
+const types = module.exports.types = {
+ string: {
+ to: 25,
+ from: null, // defaults to string
+ serialize: x => '' + x
+ },
+ number: {
+ to: 0,
+ from: [21, 23, 26, 700, 701],
+ serialize: x => '' + x,
+ parse: x => +x
+ },
+ json: {
+ to: 114,
+ from: [114, 3802],
+ serialize: x => JSON.stringify(x),
+ parse: x => JSON.parse(x)
+ },
+ boolean: {
+ to: 16,
+ from: 16,
+ serialize: x => x === true ? 't' : 'f',
+ parse: x => x === 't'
+ },
+ date: {
+ to: 1184,
+ from: [1082, 1114, 1184],
+ serialize: x => (x instanceof Date ? x : new Date(x)).toISOString(),
+ parse: x => new Date(x)
+ },
+ bytea: {
+ to: 17,
+ from: 17,
+ serialize: x => '\\x' + Buffer.from(x).toString('hex'),
+ parse: x => Buffer.from(x.slice(2), 'hex')
+ }
+}
+
+const BigInt = module.exports.BigInt = {
+ to: 1700,
+ from: [20, 701, 1700],
+ parse: x => BigInt(x), // eslint-disable-line
+ serialize: x => x.toString()
+}
+
+class NotTagged { then() { notTagged() } catch() { notTagged() } finally() { notTagged() }}
+
+const Identifier = module.exports.Identifier = class Identifier extends NotTagged {
+ constructor(value) {
+ super()
+ this.value = escapeIdentifier(value)
+ }
+}
+
+const Parameter = module.exports.Parameter = class Parameter extends NotTagged {
+ constructor(value, type, array) {
+ super()
+ this.value = value
+ this.type = type
+ this.array = array
+ }
+}
+
+const Builder = module.exports.Builder = class Builder extends NotTagged {
+ constructor(first, rest) {
+ super()
+ this.first = first
+ this.rest = rest
+ }
+
+ build(before, parameters, types, transform) {
+ const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop()
+ if (keyword.i === -1)
+ throw new Error('Could not infer helper mode')
+
+ return keyword.fn(this.first, this.rest, parameters, types, transform)
+ }
+}
+
+module.exports.handleValue = handleValue;function handleValue(x, parameters, types) {
+ const value = x instanceof Parameter ? x.value : x
+ if (value === undefined)
+ throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed')
+
+ return '$' + (types.push(
+ x instanceof Parameter
+ ? (parameters.push(x.value), x.array
+ ? x.array[x.type || inferType(x.value)] || x.type || firstIsString(x.value)
+ : x.type
+ )
+ : (parameters.push(x), inferType(x))
+ ))
+}
+
+const defaultHandlers = typeHandlers(types)
+
+function valuesBuilder(first, parameters, types, transform, columns) {
+ let value
+ return first.map(row =>
+ '(' + columns.map(column => {
+ value = row[column]
+ return (
+ value instanceof Query ? value.strings[0] :
+ value instanceof Identifier ? value.value :
+ handleValue(value, parameters, types)
+ )
+ }).join(',') + ')'
+ ).join(',')
+}
+
+function values(first, rest, parameters, types, transform) {
+ const multi = Array.isArray(first[0])
+ const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first)
+ return valuesBuilder(multi ? first : [first], parameters, types, transform, columns)
+}
+
+const builders = Object.entries({
+ values,
+ in: values,
+
+ update(first, rest, parameters, types, transform) {
+ return (rest.length ? rest.flat() : Object.keys(first)).map(x =>
+ escapeIdentifier(transform.column.to ? transform.column.to(x) : x) +
+ '=' + handleValue(first[x], parameters, types)
+ )
+ },
+
+ select(first, rest, parameters, types, transform) {
+ typeof first === 'string' && (first = [first].concat(rest))
+ if (Array.isArray(first))
+ return first.map(x => escapeIdentifier(transform.column.to ? transform.column.to(x) : x)).join(',')
+
+ let value
+ const columns = rest.length ? rest.flat() : Object.keys(first)
+ return columns.map(x => {
+ value = first[x]
+ return (
+ value instanceof Query ? value.strings[0] :
+ value instanceof Identifier ? value.value :
+ handleValue(value, parameters, types)
+ ) + ' as ' + escapeIdentifier(transform.column.to ? transform.column.to(x) : x)
+ }).join(',')
+ },
+
+ insert(first, rest, parameters, types, transform) {
+ const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first)
+ return '(' + columns.map(x =>
+ escapeIdentifier(transform.column.to ? transform.column.to(x) : x)
+ ).join(',') + ')values' +
+ valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, transform, columns)
+ }
+}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x, 'i'), fn]))
+
+function notTagged() {
+ throw Errors.generic('NOT_TAGGED_CALL', 'Query not called as a tagged template literal')
+}
+
+const serializers = module.exports.serializers = defaultHandlers.serializers
+const parsers = module.exports.parsers = defaultHandlers.parsers
+
+const END = module.exports.END = {}
+
+function firstIsString(x) {
+ if (Array.isArray(x))
+ return firstIsString(x[0])
+ return typeof x === 'string' ? 1009 : 0
+}
+
+const mergeUserTypes = module.exports.mergeUserTypes = function(types) {
+ const user = typeHandlers(types || {})
+ return {
+ serializers: Object.assign({}, serializers, user.serializers),
+ parsers: Object.assign({}, parsers, user.parsers)
+ }
+}
+
+function typeHandlers(types) {
+ return Object.keys(types).reduce((acc, k) => {
+ types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse)
+ acc.serializers[types[k].to] = types[k].serialize
+ types[k].from && [].concat(types[k].from).forEach(x => acc.serializers[x] = types[k].serialize)
+ return acc
+ }, { parsers: {}, serializers: {} })
+}
+
+const escapeIdentifier = module.exports.escapeIdentifier = function escape(str) {
+ return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"'
+}
+
+const inferType = module.exports.inferType = function inferType(x) {
+ return (
+ x instanceof Parameter ? x.type :
+ x instanceof Date ? 1184 :
+ x instanceof Uint8Array ? 17 :
+ (x === true || x === false) ? 16 :
+ typeof x === 'bigint' ? 1700 :
+ Array.isArray(x) ? inferType(x[0]) :
+ 0
+ )
+}
+
+const escapeBackslash = /\\/g
+const escapeQuote = /"/g
+
+function arrayEscape(x) {
+ return x
+ .replace(escapeBackslash, '\\\\')
+ .replace(escapeQuote, '\\"')
+}
+
+const arraySerializer = module.exports.arraySerializer = function arraySerializer(xs, serializer) {
+ if (Array.isArray(xs) === false)
+ return xs
+
+ if (!xs.length)
+ return '{}'
+
+ const first = xs[0]
+
+ if (Array.isArray(first) && !first.type)
+ return '{' + xs.map(x => arraySerializer(x, serializer)).join(',') + '}'
+
+ return '{' + xs.map(x =>
+ '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"'
+ ).join(',') + '}'
+}
+
+const arrayParserState = {
+ i: 0,
+ char: null,
+ str: '',
+ quoted: false,
+ last: 0
+}
+
+const arrayParser = module.exports.arrayParser = function arrayParser(x, parser) {
+ arrayParserState.i = arrayParserState.last = 0
+ return arrayParserLoop(arrayParserState, x, parser)
+}
+
+function arrayParserLoop(s, x, parser) {
+ const xs = []
+ for (; s.i < x.length; s.i++) {
+ s.char = x[s.i]
+ if (s.quoted) {
+ if (s.char === '\\') {
+ s.str += x[++s.i]
+ } else if (s.char === '"') {
+ xs.push(parser ? parser(s.str) : s.str)
+ s.str = ''
+ s.quoted = x[s.i + 1] === '"'
+ s.last = s.i + 2
+ } else {
+ s.str += s.char
+ }
+ } else if (s.char === '"') {
+ s.quoted = true
+ } else if (s.char === '{') {
+ s.last = ++s.i
+ xs.push(arrayParserLoop(s, x, parser))
+ } else if (s.char === '}') {
+ s.quoted = false
+ s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
+ s.last = s.i + 1
+ break
+ } else if (s.char === ',' && s.p !== '}' && s.p !== '"') {
+ xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
+ s.last = s.i + 1
+ }
+ s.p = s.char
+ }
+ s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1))
+ return xs
+}
+
+const toCamel = module.exports.toCamel = x => {
+ let str = x[0]
+ for (let i = 1; i < x.length; i++)
+ str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
+ return str
+}
+
+const toPascal = module.exports.toPascal = x => {
+ let str = x[0].toUpperCase()
+ for (let i = 1; i < x.length; i++)
+ str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
+ return str
+}
+
+const toKebab = module.exports.toKebab = x => x.replace(/_/g, '-')
+
+const fromCamel = module.exports.fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase()
+const fromPascal = module.exports.fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase()
+const fromKebab = module.exports.fromKebab = x => x.replace(/-/g, '_')
diff --git a/cjs/tests/bootstrap.js b/cjs/tests/bootstrap.js
new file mode 100644
index 00000000..15295975
--- /dev/null
+++ b/cjs/tests/bootstrap.js
@@ -0,0 +1,29 @@
+const { spawnSync } = require('child_process')
+
+exec('psql', ['-c', 'alter system set ssl=on'])
+exec('psql', ['-c', 'create user postgres_js_test'])
+exec('psql', ['-c', 'alter system set password_encryption=md5'])
+exec('psql', ['-c', 'select pg_reload_conf()'])
+exec('psql', ['-c', 'create user postgres_js_test_md5 with password \'postgres_js_test_md5\''])
+exec('psql', ['-c', 'alter system set password_encryption=\'scram-sha-256\''])
+exec('psql', ['-c', 'select pg_reload_conf()'])
+exec('psql', ['-c', 'create user postgres_js_test_scram with password \'postgres_js_test_scram\''])
+
+exec('dropdb', ['postgres_js_test'])
+exec('createdb', ['postgres_js_test'])
+exec('psql', ['-c', 'grant all on database postgres_js_test to postgres_js_test'])
+
+module.exports.exec = exec;function exec(cmd, args) {
+ const { stderr } = spawnSync(cmd, args, { stdio: 'pipe', encoding: 'utf8' })
+ if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist'))
+ throw stderr
+}
+
+async function execAsync(cmd, args) { // eslint-disable-line
+ let stderr = ''
+ const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) // eslint-disable-line
+ cp.stderr.on('data', x => stderr += x)
+ await new Promise(x => cp.on('exit', x))
+ if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist'))
+ throw new Error(stderr)
+}
diff --git a/cjs/tests/copy.csv b/cjs/tests/copy.csv
new file mode 100644
index 00000000..6622044e
--- /dev/null
+++ b/cjs/tests/copy.csv
@@ -0,0 +1,2 @@
+1 2 3
+4 5 6
diff --git a/cjs/tests/index.js b/cjs/tests/index.js
new file mode 100644
index 00000000..85508809
--- /dev/null
+++ b/cjs/tests/index.js
@@ -0,0 +1,1936 @@
+/* eslint no-console: 0 */
+
+const { exec } = require('./bootstrap.js')
+
+const { t, nt, ot } = require('./test.js') // eslint-disable-line
+const net = require('net')
+const fs = require('fs')
+const crypto = require('crypto')
+
+const postgres = require('../src/index.js')
+const delay = ms => new Promise(r => setTimeout(r, ms))
+
+const rel = x => require("path").join(__dirname, x)
+const idle_timeout = 1
+
+const login = {
+ user: 'postgres_js_test'
+}
+
+const login_md5 = {
+ user: 'postgres_js_test_md5',
+ pass: 'postgres_js_test_md5'
+}
+
+const login_scram = {
+ user: 'postgres_js_test_scram',
+ pass: 'postgres_js_test_scram'
+}
+
+const options = {
+ db: 'postgres_js_test',
+ user: login.user,
+ pass: login.pass,
+ idle_timeout,
+ connect_timeout: 1,
+ max: 1
+}
+
+const sql = postgres(options)
+
+t('Connects with no options', async() => {
+ const sql = postgres({ max: 1 })
+
+ const result = (await sql`select 1 as x`)[0].x
+ await sql.end()
+
+ return [1, result]
+})
+
+t('Uses default database without slash', async() => {
+ const sql = postgres('postgres://localhost')
+ return [sql.options.user, sql.options.database]
+})
+
+t('Uses default database with slash', async() => {
+ const sql = postgres('postgres://localhost/')
+ return [sql.options.user, sql.options.database]
+})
+
+t('Result is array', async() =>
+ [true, Array.isArray(await sql`select 1`)]
+)
+
+t('Result has count', async() =>
+ [1, (await sql`select 1`).count]
+)
+
+t('Result has command', async() =>
+ ['SELECT', (await sql`select 1`).command]
+)
+
+t('Create table', async() =>
+ ['CREATE TABLE', (await sql`create table test(int int)`).command, await sql`drop table test`]
+)
+
+t('Drop table', { timeout: 2 }, async() => {
+ await sql`create table test(int int)`
+ return ['DROP TABLE', (await sql`drop table test`).command]
+})
+
+t('null', async() =>
+ [null, (await sql`select ${ null } as x`)[0].x]
+)
+
+t('Integer', async() =>
+ ['1', (await sql`select ${ 1 } as x`)[0].x]
+)
+
+t('String', async() =>
+ ['hello', (await sql`select ${ 'hello' } as x`)[0].x]
+)
+
+t('Boolean false', async() =>
+ [false, (await sql`select ${ false } as x`)[0].x]
+)
+
+t('Boolean true', async() =>
+ [true, (await sql`select ${ true } as x`)[0].x]
+)
+
+t('Date', async() => {
+ const now = new Date()
+ return [0, now - (await sql`select ${ now } as x`)[0].x]
+})
+
+t('Json', async() => {
+ const x = (await sql`select ${ sql.json({ a: 'hello', b: 42 }) } as x`)[0].x
+ return ['hello,42', [x.a, x.b].join()]
+})
+
+t('implicit json', async() => {
+ const x = (await sql`select ${ { a: 'hello', b: 42 } }::json as x`)[0].x
+ return ['hello,42', [x.a, x.b].join()]
+})
+
+t('implicit jsonb', async() => {
+ const x = (await sql`select ${ { a: 'hello', b: 42 } }::jsonb as x`)[0].x
+ return ['hello,42', [x.a, x.b].join()]
+})
+
+t('Empty array', async() =>
+ [true, Array.isArray((await sql`select ${ sql.array([], 1009) } as x`)[0].x)]
+)
+
+t('String array', async() =>
+ ['123', (await sql`select ${ '{1,2,3}' }::int[] as x`)[0].x.join('')]
+)
+
+t('Array of Integer', async() =>
+ ['3', (await sql`select ${ sql.array([1, 2, 3]) } as x`)[0].x[2]]
+)
+
+t('Array of String', async() =>
+ ['c', (await sql`select ${ sql.array(['a', 'b', 'c']) } as x`)[0].x[2]]
+)
+
+t('Array of Date', async() => {
+ const now = new Date()
+ return [now.getTime(), (await sql`select ${ sql.array([now, now, now]) } as x`)[0].x[2].getTime()]
+})
+
+t('Nested array n2', async() =>
+ ['4', (await sql`select ${ sql.array([[1, 2], [3, 4]]) } as x`)[0].x[1][1]]
+)
+
+t('Nested array n3', async() =>
+ ['6', (await sql`select ${ sql.array([[[1, 2]], [[3, 4]], [[5, 6]]]) } as x`)[0].x[2][0][1]]
+)
+
+t('Escape in arrays', async() =>
+ ['Hello "you",c:\\windows', (await sql`select ${ sql.array(['Hello "you"', 'c:\\windows']) } as x`)[0].x.join(',')]
+)
+
+t('Escapes', async() => {
+ return ['hej"hej', Object.keys((await sql`select 1 as ${ sql('hej"hej') }`)[0])[0]]
+})
+
+t('null for int', async() => {
+ await sql`create table test (x int)`
+ return [1, (await sql`insert into test values(${ null })`).count, await sql`drop table test`]
+})
+
+t('Throws on illegal transactions', async() => {
+ const sql = postgres({ ...options, max: 2, fetch_types: false })
+ const error = await sql`begin`.catch(e => e)
+ return [
+ error.code,
+ 'UNSAFE_TRANSACTION'
+ ]
+})
+
+t('Transaction throws', async() => {
+ await sql`create table test (a int)`
+ return ['22P02', await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql`insert into test values('hej')`
+ }).catch(x => x.code), await sql`drop table test`]
+})
+
+t('Transaction rolls back', async() => {
+ await sql`create table test (a int)`
+ await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql`insert into test values('hej')`
+ }).catch(() => { /* ignore */ })
+ return [0, (await sql`select a from test`).count, await sql`drop table test`]
+})
+
+t('Transaction throws on uncaught savepoint', async() => {
+ await sql`create table test (a int)`
+
+ return ['fail', (await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql.savepoint(async sql => {
+ await sql`insert into test values(2)`
+ throw new Error('fail')
+ })
+ }).catch((err) => err.message)), await sql`drop table test`]
+})
+
+t('Transaction throws on uncaught named savepoint', async() => {
+ await sql`create table test (a int)`
+
+ return ['fail', (await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql.savepoit('watpoint', async sql => {
+ await sql`insert into test values(2)`
+ throw new Error('fail')
+ })
+ }).catch(() => 'fail')), await sql`drop table test`]
+})
+
+t('Transaction succeeds on caught savepoint', async() => {
+ await sql`create table test (a int)`
+ await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql.savepoint(async sql => {
+ await sql`insert into test values(2)`
+ throw new Error('please rollback')
+ }).catch(() => { /* ignore */ })
+ await sql`insert into test values(3)`
+ })
+
+ return ['2', (await sql`select count(1) from test`)[0].count, await sql`drop table test`]
+})
+
+t('Savepoint returns Result', async() => {
+ let result
+ await sql.begin(async sql => {
+ result = await sql.savepoint(sql =>
+ sql`select 1 as x`
+ )
+ })
+
+ return [1, result[0].x]
+})
+
+t('Transaction requests are executed implicitly', async() => {
+ const sql = postgres({ debug: true, idle_timeout: 1, fetch_types: false })
+ return [
+ 'testing',
+ (await sql.begin(async sql => {
+ sql`select set_config('postgres_js.test', 'testing', true)`
+ return await sql`select current_setting('postgres_js.test') as x`
+ }))[0].x
+ ]
+})
+
+t('Uncaught transaction request errors bubbles to transaction', async() => [
+ '42703',
+ (await sql.begin(sql => (
+ sql`select wat`,
+ sql`select current_setting('postgres_js.test') as x, ${ 1 } as a`
+ )).catch(e => e.code))
+])
+
+t('Parallel transactions', async() => {
+ await sql`create table test (a int)`
+ return ['11', (await Promise.all([
+ sql.begin(sql => sql`select 1`),
+ sql.begin(sql => sql`select 1`)
+ ])).map(x => x.count).join(''), await sql`drop table test`]
+})
+
+t('Many transactions at beginning of connection', async() => {
+ const sql = postgres(options)
+ const xs = await Promise.all(Array.from({ length: 100 }, () => sql.begin(sql => sql`select 1`)))
+ return [100, xs.length]
+})
+
+t('Transactions array', async() => {
+ await sql`create table test (a int)`
+
+ return ['11', (await sql.begin(sql => [
+ sql`select 1`.then(x => x),
+ sql`select 1`
+ ])).map(x => x.count).join(''), await sql`drop table test`]
+})
+
+t('Transaction waits', async() => {
+ await sql`create table test (a int)`
+ await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql.savepoint(async sql => {
+ await sql`insert into test values(2)`
+ throw new Error('please rollback')
+ }).catch(() => { /* ignore */ })
+ await sql`insert into test values(3)`
+ })
+
+ return ['11', (await Promise.all([
+ sql.begin(sql => sql`select 1`),
+ sql.begin(sql => sql`select 1`)
+ ])).map(x => x.count).join(''), await sql`drop table test`]
+})
+
+t('Helpers in Transaction', async() => {
+ return ['1', (await sql.begin(async sql =>
+ await sql`select ${ sql({ x: 1 }) }`
+ ))[0].x]
+})
+
+t('Undefined values throws', async() => {
+ let error
+
+ await sql`
+ select ${ undefined } as x
+ `.catch(x => error = x.code)
+
+ return ['UNDEFINED_VALUE', error]
+})
+
+t('Null sets to null', async() =>
+ [null, (await sql`select ${ null } as x`)[0].x]
+)
+
+t('Throw syntax error', async() =>
+ ['42601', (await sql`wat 1`.catch(x => x)).code]
+)
+
+t('Connect using uri', async() =>
+ [true, await new Promise((resolve, reject) => {
+ const sql = postgres('postgres://' + login.user + ':' + (login.pass || '') + '@localhost:5432/' + options.db, {
+ idle_timeout
+ })
+ sql`select 1`.then(() => resolve(true), reject)
+ })]
+)
+
+t('Fail with proper error on no host', async() =>
+ ['ECONNREFUSED', (await new Promise((resolve, reject) => {
+ const sql = postgres('postgres://localhost:33333/' + options.db, {
+ idle_timeout
+ })
+ sql`select 1`.then(reject, resolve)
+ })).code]
+)
+
+t('Connect using SSL', async() =>
+ [true, (await new Promise((resolve, reject) => {
+ postgres({
+ ssl: { rejectUnauthorized: false },
+ idle_timeout
+ })`select 1`.then(() => resolve(true), reject)
+ }))]
+)
+
+t('Connect using SSL require', async() =>
+ [true, (await new Promise((resolve, reject) => {
+ postgres({
+ ssl: 'require',
+ idle_timeout
+ })`select 1`.then(() => resolve(true), reject)
+ }))]
+)
+
+t('Connect using SSL prefer', async() => {
+ await exec('psql', ['-c', 'alter system set ssl=off'])
+ await exec('psql', ['-c', 'select pg_reload_conf()'])
+
+ const sql = postgres({
+ ssl: 'prefer',
+ idle_timeout
+ })
+
+ return [
+ 1, (await sql`select 1 as x`)[0].x,
+ await exec('psql', ['-c', 'alter system set ssl=on']),
+ await exec('psql', ['-c', 'select pg_reload_conf()'])
+ ]
+})
+
+t('Reconnect using SSL', { timeout: 2 }, async() => {
+ const sql = postgres({
+ ssl: 'require',
+ idle_timeout: 0.1
+ })
+
+ await sql`select 1`
+ await delay(200)
+
+ return [1, (await sql`select 1 as x`)[0].x]
+})
+
+t('Login without password', async() => {
+ return [true, (await postgres({ ...options, ...login })`select true as x`)[0].x]
+})
+
+t('Login using MD5', async() => {
+ return [true, (await postgres({ ...options, ...login_md5 })`select true as x`)[0].x]
+})
+
+t('Login using scram-sha-256', async() => {
+ return [true, (await postgres({ ...options, ...login_scram })`select true as x`)[0].x]
+})
+
+t('Parallel connections using scram-sha-256', {
+ timeout: 2
+}, async() => {
+ const sql = postgres({ ...options, ...login_scram })
+ return [true, (await Promise.all([
+ sql`select true as x, pg_sleep(0.2)`,
+ sql`select true as x, pg_sleep(0.2)`,
+ sql`select true as x, pg_sleep(0.2)`
+ ]))[0][0].x]
+})
+
+t('Support dynamic password function', async() => {
+ return [true, (await postgres({
+ ...options,
+ ...login_scram,
+ pass: () => 'postgres_js_test_scram'
+ })`select true as x`)[0].x]
+})
+
+t('Support dynamic async password function', async() => {
+ return [true, (await postgres({
+ ...options,
+ ...login_scram,
+ pass: () => Promise.resolve('postgres_js_test_scram')
+ })`select true as x`)[0].x]
+})
+
+t('Point type', async() => {
+ const sql = postgres({
+ ...options,
+ types: {
+ point: {
+ to: 600,
+ from: [600],
+ serialize: ([x, y]) => '(' + x + ',' + y + ')',
+ parse: (x) => x.slice(1, -1).split(',').map(x => +x)
+ }
+ }
+ })
+
+ await sql`create table test (x point)`
+ await sql`insert into test (x) values (${ sql.types.point([10, 20]) })`
+ return [20, (await sql`select x from test`)[0].x[1], await sql`drop table test`]
+})
+
+t('Point type array', async() => {
+ const sql = postgres({
+ ...options,
+ types: {
+ point: {
+ to: 600,
+ from: [600],
+ serialize: ([x, y]) => '(' + x + ',' + y + ')',
+ parse: (x) => x.slice(1, -1).split(',').map(x => +x)
+ }
+ }
+ })
+
+ await sql`create table test (x point[])`
+ await sql`insert into test (x) values (${ sql.array([sql.types.point([10, 20]), sql.types.point([20, 30])]) })`
+ return [30, (await sql`select x from test`)[0].x[1][1], await sql`drop table test`]
+})
+
+t('sql file', async() =>
+ [1, (await sql.file(rel('select.sql')))[0].x]
+)
+
+t('sql file has forEach', async() => {
+ let result
+ await sql
+ .file(rel('select.sql'), { cache: false })
+ .forEach(({ x }) => result = x)
+
+ return [1, result]
+})
+
+t('sql file throws', async() =>
+ ['ENOENT', (await sql.file(rel('selectomondo.sql')).catch(x => x.code))]
+)
+
+t('sql file cached', async() => {
+ await sql.file(rel('select.sql'))
+ await delay(20)
+
+ return [1, (await sql.file(rel('select.sql')))[0].x]
+})
+
+t('Parameters in file', async() => {
+ const result = await sql.file(
+ rel('select-param.sql'),
+ ['hello']
+ )
+ return ['hello', result[0].x]
+})
+
+t('Connection ended promise', async() => {
+ const sql = postgres(options)
+
+ await sql.end()
+
+ return [undefined, await sql.end()]
+})
+
+t('Connection ended timeout', async() => {
+ const sql = postgres(options)
+
+ await sql.end({ timeout: 10 })
+
+ return [undefined, await sql.end()]
+})
+
+t('Connection ended error', async() => {
+ const sql = postgres(options)
+ sql.end()
+ return ['CONNECTION_ENDED', (await sql``.catch(x => x.code))]
+})
+
+t('Connection end does not cancel query', async() => {
+ const sql = postgres(options)
+
+ const promise = sql`select 1 as x`.execute()
+
+ sql.end()
+
+ return [1, (await promise)[0].x]
+})
+
+t('Connection destroyed', async() => {
+ const sql = postgres(options)
+ setTimeout(() => sql.end({ timeout: 0 }), 0)
+ return ['CONNECTION_DESTROYED', await sql``.catch(x => x.code)]
+})
+
+t('Connection destroyed with query before', async() => {
+ const sql = postgres(options)
+ , error = sql`select pg_sleep(0.2)`.catch(err => err.code)
+
+ sql.end({ timeout: 0 })
+ return ['CONNECTION_DESTROYED', await error]
+})
+
+t('transform column', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { column: x => x.split('').reverse().join('') }
+ })
+
+ await sql`create table test (hello_world int)`
+ await sql`insert into test values (1)`
+ return ['dlrow_olleh', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`]
+})
+
+t('column toPascal', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { column: postgres.toPascal }
+ })
+
+ await sql`create table test (hello_world int)`
+ await sql`insert into test values (1)`
+ return ['HelloWorld', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`]
+})
+
+t('column toCamel', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { column: postgres.toCamel }
+ })
+
+ await sql`create table test (hello_world int)`
+ await sql`insert into test values (1)`
+ return ['helloWorld', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`]
+})
+
+t('column toKebab', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { column: postgres.toKebab }
+ })
+
+ await sql`create table test (hello_world int)`
+ await sql`insert into test values (1)`
+ return ['hello-world', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`]
+})
+
+t('unsafe', async() => {
+ await sql`create table test (x int)`
+ return [1, (await sql.unsafe('insert into test values ($1) returning *', [1]))[0].x, await sql`drop table test`]
+})
+
+t('unsafe simple', async() => {
+ return [1, (await sql.unsafe('select 1 as x'))[0].x]
+})
+
+t('listen and notify', async() => {
+ const sql = postgres(options)
+ , channel = 'hello'
+
+ return ['world', await new Promise((resolve, reject) =>
+ sql.listen(channel, resolve)
+ .then(() => sql.notify(channel, 'world'))
+ .then(() => delay(20))
+ .catch(reject)
+ .then(sql.end)
+ )]
+})
+
+t('double listen', async() => {
+ const sql = postgres(options)
+ , channel = 'hello'
+
+ let count = 0
+
+ await new Promise((resolve, reject) =>
+ sql.listen(channel, resolve)
+ .then(() => sql.notify(channel, 'world'))
+ .catch(reject)
+ ).then(() => count++)
+
+ await new Promise((resolve, reject) =>
+ sql.listen(channel, resolve)
+ .then(() => sql.notify(channel, 'world'))
+ .catch(reject)
+ ).then(() => count++)
+
+ // for coverage
+ sql.listen('weee', () => { /* noop */ }).then(sql.end)
+
+ return [2, count]
+})
+
+t('listen and notify with weird name', async() => {
+ const sql = postgres(options)
+ , channel = 'wat-;ø§'
+
+ return ['world', await new Promise((resolve, reject) =>
+ sql.listen(channel, resolve)
+ .then(() => sql.notify(channel, 'world'))
+ .catch(reject)
+ .then(() => delay(20))
+ .then(sql.end)
+ )]
+})
+
+t('listen and notify with upper case', async() => {
+ const sql = postgres(options)
+ let result
+
+ await sql.listen('withUpperChar', x => result = x)
+ sql.notify('withUpperChar', 'works')
+ await delay(50)
+
+ return [
+ 'works',
+ result,
+ sql.end()
+ ]
+})
+
+t('listen reconnects', { timeout: 2 }, async() => {
+ const sql = postgres(options)
+ , xs = []
+
+ const { state: { pid } } = await sql.listen('test', x => xs.push(x))
+ await delay(200)
+ await sql.notify('test', 'a')
+ await sql`select pg_terminate_backend(${ pid }::int)`
+ await delay(200)
+ await sql.notify('test', 'b')
+ await delay(200)
+ sql.end()
+
+ return ['ab', xs.join('')]
+})
+
+
+t('listen reconnects after connection error', { timeout: 3 }, async() => {
+ const sql = postgres()
+ , xs = []
+
+ const { state: { pid } } = await sql.listen('test', x => xs.push(x))
+ await sql.notify('test', 'a')
+ await sql`select pg_terminate_backend(${ pid }::int)`
+ await delay(1000)
+
+ await sql.notify('test', 'b')
+ await delay(50)
+ sql.end()
+
+ return ['ab', xs.join('')]
+})
+
+t('listen result reports correct connection state after reconnection', async() => {
+ const sql = postgres(options)
+ , xs = []
+
+ const result = await sql.listen('test', x => xs.push(x))
+ const initialPid = result.state.pid
+ await sql.notify('test', 'a')
+ await sql`select pg_terminate_backend(${ initialPid }::int)`
+ await delay(50)
+ sql.end()
+
+ return [result.state.pid !== initialPid, true]
+})
+
+t('unlisten removes subscription', async() => {
+ const sql = postgres(options)
+ , xs = []
+
+ const { unlisten } = await sql.listen('test', x => xs.push(x))
+ await sql.notify('test', 'a')
+ await delay(50)
+ await unlisten()
+ await sql.notify('test', 'b')
+ await delay(50)
+ sql.end()
+
+ return ['a', xs.join('')]
+})
+
+t('listen after unlisten', async() => {
+ const sql = postgres(options)
+ , xs = []
+
+ const { unlisten } = await sql.listen('test', x => xs.push(x))
+ await sql.notify('test', 'a')
+ await delay(50)
+ await unlisten()
+ await sql.notify('test', 'b')
+ await delay(50)
+ await sql.listen('test', x => xs.push(x))
+ await sql.notify('test', 'c')
+ await delay(50)
+ sql.end()
+
+ return ['ac', xs.join('')]
+})
+
+t('multiple listeners and unlisten one', async() => {
+ const sql = postgres(options)
+ , xs = []
+
+ await sql.listen('test', x => xs.push('1', x))
+ const s2 = await sql.listen('test', x => xs.push('2', x))
+ await sql.notify('test', 'a')
+ await delay(50)
+ await s2.unlisten()
+ await sql.notify('test', 'b')
+ await delay(50)
+ sql.end()
+
+ return ['1a2a1b', xs.join('')]
+})
+
+t('responds with server parameters (application_name)', async() =>
+ ['postgres.js', await new Promise((resolve, reject) => postgres({
+ ...options,
+ onparameter: (k, v) => k === 'application_name' && resolve(v)
+ })`select 1`.catch(reject))]
+)
+
+t('has server parameters', async() => {
+ return ['postgres.js', (await sql`select 1`.then(() => sql.parameters.application_name))]
+})
+
+t('big query body', async() => {
+ await sql`create table test (x int)`
+ return [1000, (await sql`insert into test ${
+ sql([...Array(1000).keys()].map(x => ({ x })))
+ }`).count, await sql`drop table test`]
+})
+
+t('Throws if more than 65534 parameters', async() => {
+ await sql`create table test (x int)`
+ return ['MAX_PARAMETERS_EXCEEDED', (await sql`insert into test ${
+ sql([...Array(65535).keys()].map(x => ({ x })))
+ }`.catch(e => e.code)), await sql`drop table test`]
+})
+
+t('let postgres do implicit cast of unknown types', async() => {
+ await sql`create table test (x timestamp with time zone)`
+ const [{ x }] = await sql`insert into test values (${ new Date().toISOString() }) returning *`
+ return [true, x instanceof Date, await sql`drop table test`]
+})
+
+t('only allows one statement', async() =>
+ ['42601', await sql`select 1; select 2`.catch(e => e.code)]
+)
+
+t('await sql() throws not tagged error', async() => {
+ let error
+ try {
+ await sql('select 1')
+ } catch (e) {
+ error = e.code
+ }
+ return ['NOT_TAGGED_CALL', error]
+})
+
+t('sql().then throws not tagged error', async() => {
+ let error
+ try {
+ sql('select 1').then(() => { /* noop */ })
+ } catch (e) {
+ error = e.code
+ }
+ return ['NOT_TAGGED_CALL', error]
+})
+
+t('sql().catch throws not tagged error', async() => {
+ let error
+ try {
+ await sql('select 1')
+ } catch (e) {
+ error = e.code
+ }
+ return ['NOT_TAGGED_CALL', error]
+})
+
+t('sql().finally throws not tagged error', async() => {
+ let error
+ try {
+ sql('select 1').finally(() => { /* noop */ })
+ } catch (e) {
+ error = e.code
+ }
+ return ['NOT_TAGGED_CALL', error]
+})
+
+t('little bobby tables', async() => {
+ const name = 'Robert\'); DROP TABLE students;--'
+
+ await sql`create table students (name text, age int)`
+ await sql`insert into students (name) values (${ name })`
+
+ return [
+ name, (await sql`select name from students`)[0].name,
+ await sql`drop table students`
+ ]
+})
+
+t('Connection errors are caught using begin()', {
+ timeout: 2
+}, async() => {
+ let error
+ try {
+ const sql = postgres({ host: 'wat', port: 1337 })
+
+ await sql.begin(async(sql) => {
+ await sql`insert into test (label, value) values (${1}, ${2})`
+ })
+ } catch (err) {
+ error = err
+ }
+
+ return [
+ true,
+ error.code === 'ENOTFOUND' ||
+ error.message === 'failed to lookup address information: nodename nor servname provided, or not known'
+ ]
+})
+
+t('dynamic column name', async() => {
+ return ['!not_valid', Object.keys((await sql`select 1 as ${ sql('!not_valid') }`)[0])[0]]
+})
+
+t('dynamic select as', async() => {
+ return ['2', (await sql`select ${ sql({ a: 1, b: 2 }) }`)[0].b]
+})
+
+t('dynamic select as pluck', async() => {
+ return [undefined, (await sql`select ${ sql({ a: 1, b: 2 }, 'a') }`)[0].b]
+})
+
+t('dynamic insert', async() => {
+ await sql`create table test (a int, b text)`
+ const x = { a: 42, b: 'the answer' }
+
+ return ['the answer', (await sql`insert into test ${ sql(x) } returning *`)[0].b, await sql`drop table test`]
+})
+
+t('dynamic insert pluck', async() => {
+ await sql`create table test (a int, b text)`
+ const x = { a: 42, b: 'the answer' }
+
+ return [null, (await sql`insert into test ${ sql(x, 'a') } returning *`)[0].b, await sql`drop table test`]
+})
+
+t('array insert', async() => {
+ await sql`create table test (a int, b int)`
+ return [2, (await sql`insert into test (a, b) values ${ sql([1, 2]) } returning *`)[0].b, await sql`drop table test`]
+})
+
+t('where parameters in()', async() => {
+ await sql`create table test (x text)`
+ await sql`insert into test values ('a')`
+ return [
+ (await sql`select * from test where x in ${ sql(['a', 'b', 'c']) }`)[0].x,
+ 'a',
+ await sql`drop table test`
+ ]
+})
+
+t('where parameters in() values before', async() => {
+ return [2, (await sql`
+ with rows as (
+ select * from (values (1), (2), (3), (4)) as x(a)
+ )
+ select * from rows where a in ${ sql([3, 4]) }
+ `).count]
+})
+
+t('dynamic multi row insert', async() => {
+ await sql`create table test (a int, b text)`
+ const x = { a: 42, b: 'the answer' }
+
+ return [
+ 'the answer',
+ (await sql`insert into test ${ sql([x, x]) } returning *`)[1].b, await sql`drop table test`
+ ]
+})
+
+t('dynamic update', async() => {
+ await sql`create table test (a int, b text)`
+ await sql`insert into test (a, b) values (17, 'wrong')`
+
+ return [
+ 'the answer',
+ (await sql`update test set ${ sql({ a: 42, b: 'the answer' }) } returning *`)[0].b, await sql`drop table test`
+ ]
+})
+
+t('dynamic update pluck', async() => {
+ await sql`create table test (a int, b text)`
+ await sql`insert into test (a, b) values (17, 'wrong')`
+
+ return [
+ 'wrong',
+ (await sql`update test set ${ sql({ a: 42, b: 'the answer' }, 'a') } returning *`)[0].b, await sql`drop table test`
+ ]
+})
+
+t('dynamic select array', async() => {
+ await sql`create table test (a int, b text)`
+ await sql`insert into test (a, b) values (42, 'yay')`
+ return ['yay', (await sql`select ${ sql(['a', 'b']) } from test`)[0].b, await sql`drop table test`]
+})
+
+t('dynamic select args', async() => {
+ await sql`create table test (a int, b text)`
+ await sql`insert into test (a, b) values (42, 'yay')`
+ return ['yay', (await sql`select ${ sql('a', 'b') } from test`)[0].b, await sql`drop table test`]
+})
+
+t('dynamic values single row', async() => {
+ const [{ b }] = await sql`
+ select * from (values ${ sql(['a', 'b', 'c']) }) as x(a, b, c)
+ `
+
+ return ['b', b]
+})
+
+t('dynamic values multi row', async() => {
+ const [, { b }] = await sql`
+ select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) as x(a, b, c)
+ `
+
+ return ['b', b]
+})
+
+t('connection parameters', async() => {
+ const sql = postgres({
+ ...options,
+ connection: {
+ 'some.var': 'yay'
+ }
+ })
+
+ return ['yay', (await sql`select current_setting('some.var') as x`)[0].x]
+})
+
+t('Multiple queries', async() => {
+ const sql = postgres(options)
+
+ return [4, (await Promise.all([
+ sql`select 1`,
+ sql`select 2`,
+ sql`select 3`,
+ sql`select 4`
+ ])).length]
+})
+
+t('Multiple statements', async() =>
+ [2, await sql.unsafe(`
+ select 1 as x;
+ select 2 as a;
+ `).then(([, [x]]) => x.a)]
+)
+
+t('throws correct error when authentication fails', async() => {
+ const sql = postgres({
+ ...options,
+ ...login_md5,
+ pass: 'wrong'
+ })
+ return ['28P01', await sql`select 1`.catch(e => e.code)]
+})
+
+t('notice works', async() => {
+ let notice
+ const log = console.log
+ console.log = function(x) {
+ notice = x
+ }
+
+ const sql = postgres(options)
+
+ await sql`create table if not exists users()`
+ await sql`create table if not exists users()`
+
+ console.log = log
+
+ return ['NOTICE', notice.severity]
+})
+
+t('notice hook works', async() => {
+ let notice
+ const sql = postgres({
+ ...options,
+ onnotice: x => notice = x
+ })
+
+ await sql`create table if not exists users()`
+ await sql`create table if not exists users()`
+
+ return ['NOTICE', notice.severity]
+})
+
+t('bytea serializes and parses', async() => {
+ const buf = Buffer.from('wat')
+
+ await sql`create table test (x bytea)`
+ await sql`insert into test values (${ buf })`
+
+ return [
+ buf.toString(),
+ (await sql`select x from test`)[0].x.toString(),
+ await sql`drop table test`
+ ]
+})
+
+t('forEach works', async() => {
+ let result
+ await sql`select 1 as x`.forEach(({ x }) => result = x)
+ return [1, result]
+})
+
+t('forEach returns empty array', async() => {
+ return [0, (await sql`select 1 as x`.forEach(() => { /* noop */ })).length]
+})
+
+t('Cursor works', async() => {
+ const order = []
+ await sql`select 1 as x union select 2 as x`.cursor(async([x]) => {
+ order.push(x.x + 'a')
+ await delay(100)
+ order.push(x.x + 'b')
+ })
+ return ['1a1b2a2b', order.join('')]
+})
+
+t('Unsafe cursor works', async() => {
+ const order = []
+ await sql.unsafe('select 1 as x union select 2 as x').cursor(async([x]) => {
+ order.push(x.x + 'a')
+ await delay(100)
+ order.push(x.x + 'b')
+ })
+ return ['1a1b2a2b', order.join('')]
+})
+
+t('Cursor custom n works', async() => {
+ const order = []
+ await sql`select * from generate_series(1,20)`.cursor(10, async(x) => {
+ order.push(x.length)
+ })
+ return ['10,10', order.join(',')]
+})
+
+t('Cursor custom with rest n works', async() => {
+ const order = []
+ await sql`select * from generate_series(1,20)`.cursor(11, async(x) => {
+ order.push(x.length)
+ })
+ return ['11,9', order.join(',')]
+})
+
+t('Cursor custom with less results than batch size works', async() => {
+ const order = []
+ await sql`select * from generate_series(1,20)`.cursor(21, async(x) => {
+ order.push(x.length)
+ })
+ return ['20', order.join(',')]
+})
+
+t('Cursor cancel works', async() => {
+ let result
+ await sql`select * from generate_series(1,10) as x`.cursor(async([{ x }]) => {
+ result = x
+ return sql.CLOSE
+ })
+ return [1, result]
+})
+
+t('Cursor throw works', async() => {
+ const order = []
+ await sql`select 1 as x union select 2 as x`.cursor(async([x]) => {
+ order.push(x.x + 'a')
+ await delay(100)
+ throw new Error('watty')
+ }).catch(() => order.push('err'))
+ return ['1aerr', order.join('')]
+})
+
+t('Cursor error works', async() => [
+ '42601',
+ await sql`wat`.cursor(() => { /* noop */ }).catch((err) => err.code)
+])
+
+t('Multiple Cursors', { timeout: 2 }, async() => {
+ const result = []
+ await sql.begin(async sql => [
+ await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async([row]) => {
+ result.push(row.x)
+ await new Promise(r => setTimeout(r, 200))
+ }),
+ await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async([row]) => {
+ result.push(row.x)
+ await new Promise(r => setTimeout(r, 100))
+ })
+ ])
+
+ return ['1,2,3,4,101,102,103,104', result.join(',')]
+})
+
+t('Cursor as async iterator', async() => {
+ const order = []
+ for await (const [x] of sql`select generate_series(1,2) as x;`.cursor()) {
+ order.push(x.x + 'a')
+ await delay(100)
+ order.push(x.x + 'b')
+ }
+
+ return ['1a1b2a2b', order.join('')]
+})
+
+t('Cursor as async iterator with break', async() => {
+ const order = []
+ for await (const xs of sql`select generate_series(1,2) as x;`.cursor()) {
+ order.push(xs[0].x + 'a')
+ await delay(10)
+ order.push(xs[0].x + 'b')
+ break
+ }
+
+ return ['1a1b', order.join('')]
+})
+
+t('Async Iterator Unsafe cursor works', async() => {
+ const order = []
+ for await (const [x] of sql.unsafe('select 1 as x union select 2 as x').cursor()) {
+ order.push(x.x + 'a')
+ await delay(100)
+ order.push(x.x + 'b')
+ }
+ return ['1a1b2a2b', order.join('')]
+})
+
+t('Async Iterator Cursor custom n works', async() => {
+ const order = []
+ for await (const x of sql`select * from generate_series(1,20)`.cursor(10))
+ order.push(x.length)
+
+ return ['10,10', order.join(',')]
+})
+
+t('Async Iterator Cursor custom with rest n works', async() => {
+ const order = []
+ for await (const x of sql`select * from generate_series(1,20)`.cursor(11))
+ order.push(x.length)
+
+ return ['11,9', order.join(',')]
+})
+
+t('Async Iterator Cursor custom with less results than batch size works', async() => {
+ const order = []
+ for await (const x of sql`select * from generate_series(1,20)`.cursor(21))
+ order.push(x.length)
+ return ['20', order.join(',')]
+})
+
+t('Transform row', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { row: () => 1 }
+ })
+
+ return [1, (await sql`select 'wat'`)[0]]
+})
+
+t('Transform row forEach', async() => {
+ let result
+ const sql = postgres({
+ ...options,
+ transform: { row: () => 1 }
+ })
+
+ await sql`select 1`.forEach(x => result = x)
+
+ return [1, result]
+})
+
+t('Transform value', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { value: () => 1 }
+ })
+
+ return [1, (await sql`select 'wat' as x`)[0].x]
+})
+
+t('Transform columns from', async() => {
+ const sql = postgres({ ...options, transform: { column: { to: postgres.fromCamel, from: postgres.toCamel } } })
+ await sql`create table test (a_test int, b_test text)`
+ await sql`insert into test ${ sql([{ aTest: 1, bTest: 1 }]) }`
+ await sql`update test set ${ sql({ aTest: 2, bTest: 2 }) }`
+ return [
+ 2,
+ (await sql`select ${ sql('aTest', 'bTest') } from test`)[0].aTest,
+ await sql`drop table test`
+ ]
+})
+
+t('Unix socket', async() => {
+ const sql = postgres({
+ ...options,
+ host: '/tmp'
+ })
+
+ return [1, (await sql`select 1 as x`)[0].x]
+})
+
+t('Big result', async() => {
+ return [100000, (await sql`select * from generate_series(1, 100000)`).count]
+})
+
+t('Debug works', async() => {
+ let result
+ const sql = postgres({
+ ...options,
+ debug: (connection_id, str) => result = str
+ })
+
+ await sql`select 1`
+
+ return ['select 1', result]
+})
+
+t('bigint is returned as String', async() => [
+ 'string',
+ typeof (await sql`select 9223372036854777 as x`)[0].x
+])
+
+t('int is returned as Number', async() => [
+ 'number',
+ typeof (await sql`select 123 as x`)[0].x
+])
+
+t('numeric is returned as string', async() => [
+ 'string',
+ typeof (await sql`select 1.2 as x`)[0].x
+])
+
+t('Async stack trace', async() => {
+ const sql = postgres({ ...options, debug: false })
+ return [
+ parseInt(new Error().stack.split('\n')[1].match(':([0-9]+):')[1]) + 1,
+ parseInt(await sql`error`.catch(x => x.stack.split('\n').pop().match(':([0-9]+):')[1]))
+ ]
+})
+
+t('Debug has long async stack trace', async() => {
+ const sql = postgres({ ...options, debug: true })
+
+ return [
+ 'watyo',
+ await yo().catch(x => x.stack.match(/wat|yo/g).join(''))
+ ]
+
+ function yo() {
+ return wat()
+ }
+
+ function wat() {
+ return sql`error`
+ }
+})
+
+t('Error contains query string', async() => [
+ 'selec 1',
+ (await sql`selec 1`.catch(err => err.query))
+])
+
+t('Error contains query serialized parameters', async() => [
+ 1,
+ (await sql`selec ${ 1 }`.catch(err => err.parameters[0]))
+])
+
+t('Error contains query raw parameters', async() => [
+ 1,
+ (await sql`selec ${ 1 }`.catch(err => err.args[0]))
+])
+
+t('Query and parameters on errorare not enumerable if debug is not set', async() => {
+ const sql = postgres({ ...options, debug: false })
+
+ return [
+ false,
+ (await sql`selec ${ 1 }`.catch(err => err.propertyIsEnumerable('parameters') || err.propertyIsEnumerable('query')))
+ ]
+})
+
+t('Query and parameters are enumerable if debug is set', async() => {
+ const sql = postgres({ ...options, debug: true })
+
+ return [
+ true,
+ (await sql`selec ${ 1 }`.catch(err => err.propertyIsEnumerable('parameters') && err.propertyIsEnumerable('query')))
+ ]
+})
+
+t('connect_timeout works', { timeout: 20 }, async() => {
+ const connect_timeout = 0.2
+ const server = net.createServer()
+ server.listen()
+ const sql = postgres({ port: server.address().port, host: '127.0.0.1', connect_timeout })
+ const start = Date.now()
+ let end
+ await sql`select 1`.catch((e) => {
+ if (e.code !== 'CONNECT_TIMEOUT')
+ throw e
+ end = Date.now()
+ })
+ server.close()
+ return [connect_timeout, Math.floor((end - start) / 100) / 10]
+})
+
+t('connect_timeout throws proper error', async() => [
+ 'CONNECT_TIMEOUT',
+ await postgres({
+ ...options,
+ ...login_scram,
+ connect_timeout: 0.001
+ })`select 1`.catch(e => e.code)
+])
+
+t('requests works after single connect_timeout', async() => {
+ let first = true
+
+ const sql = postgres({
+ ...options,
+ ...login_scram,
+ connect_timeout: { valueOf() { return first ? (first = false, 0.001) : 1 } }
+ })
+
+ return [
+ 'CONNECT_TIMEOUT,,1',
+ [
+ await sql`select 1 as x`.then(() => 'success', x => x.code),
+ await delay(10),
+ (await sql`select 1 as x`)[0].x
+ ].join(',')
+ ]
+})
+
+t('Postgres errors are of type PostgresError', async() =>
+ [true, (await sql`bad keyword`.catch(e => e)) instanceof sql.PostgresError]
+)
+
+t('Result has columns spec', async() =>
+ ['x', (await sql`select 1 as x`).columns[0].name]
+)
+
+t('forEach has result as second argument', async() => {
+ let x
+ await sql`select 1 as x`.forEach((_, result) => x = result)
+ return ['x', x.columns[0].name]
+})
+
+t('Result as arrays', async() => {
+ const sql = postgres({
+ ...options,
+ transform: {
+ row: x => Object.values(x)
+ }
+ })
+
+ return ['1,2', (await sql`select 1 as a, 2 as b`)[0].join(',')]
+})
+
+t('Insert empty array', async() => {
+ await sql`create table tester (ints int[])`
+ return [
+ Array.isArray((await sql`insert into tester (ints) values (${ sql.array([]) }) returning *`)[0].ints),
+ true,
+ await sql`drop table tester`
+ ]
+})
+
+t('Insert array in sql()', async() => {
+ await sql`create table tester (ints int[])`
+ return [
+ Array.isArray((await sql`insert into tester ${ sql({ ints: sql.array([]) }) } returning *`)[0].ints),
+ true,
+ await sql`drop table tester`
+ ]
+})
+
+t('Automatically creates prepared statements', async() => {
+ const sql = postgres(options)
+ const result = await sql`select * from pg_prepared_statements`
+ return [true, result.some(x => x.name = result.statement.name)]
+})
+
+t('no_prepare: true disables prepared statements (deprecated)', async() => {
+ const sql = postgres({ ...options, no_prepare: true })
+ const result = await sql`select * from pg_prepared_statements`
+ return [false, result.some(x => x.name = result.statement.name)]
+})
+
+t('prepare: false disables prepared statements', async() => {
+ const sql = postgres({ ...options, prepare: false })
+ const result = await sql`select * from pg_prepared_statements`
+ return [false, result.some(x => x.name = result.statement.name)]
+})
+
+t('prepare: true enables prepared statements', async() => {
+ const sql = postgres({ ...options, prepare: true })
+ const result = await sql`select * from pg_prepared_statements`
+ return [true, result.some(x => x.name = result.statement.name)]
+})
+
+t('prepares unsafe query when "prepare" option is true', async() => {
+ const sql = postgres({ ...options, prepare: true })
+ const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla'], { prepare: true })
+ return [true, result.some(x => x.name = result.statement.name)]
+})
+
+t('does not prepare unsafe query by default', async() => {
+ const sql = postgres({ ...options, prepare: true })
+ const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla'])
+ return [false, result.some(x => x.name = result.statement.name)]
+})
+
+t('Recreate prepared statements on transformAssignedExpr error', async() => {
+ const insert = () => sql`insert into test (name) values (${ '1' }) returning name`
+ await sql`create table test (name text)`
+ await insert()
+ await sql`alter table test alter column name type int using name::integer`
+ return [
+ 1,
+ (await insert())[0].name,
+ await sql`drop table test`
+ ]
+})
+
+t('Throws correct error when retrying in transactions', async() => {
+ await sql`create table test(x int)`
+ const error = await sql.begin(sql => sql`insert into test (x) values (${ false })`).catch(e => e)
+ return [
+ error.code,
+ '42804',
+ sql`drop table test`
+ ]
+})
+
+t('Recreate prepared statements on RevalidateCachedQuery error', async() => {
+ const select = () => sql`select name from test`
+ await sql`create table test (name text)`
+ await sql`insert into test values ('1')`
+ await select()
+ await sql`alter table test alter column name type int using name::integer`
+ return [
+ 1,
+ (await select())[0].name,
+ await sql`drop table test`
+ ]
+})
+
+
+t('Catches connection config errors', async() => {
+ const sql = postgres({ ...options, user: { toString: () => { throw new Error('wat') } }, database: 'prut' })
+
+ return [
+ 'wat',
+ await sql`select 1`.catch((e) => e.message)
+ ]
+})
+
+t('Catches connection config errors with end', async() => {
+ const sql = postgres({ ...options, user: { toString: () => { throw new Error('wat') } }, database: 'prut' })
+
+ return [
+ 'wat',
+ await sql`select 1`.catch((e) => e.message),
+ await sql.end()
+ ]
+})
+
+t('Catches query format errors', async() => [
+ 'wat',
+ await sql.unsafe({ toString: () => { throw new Error('wat') } }).catch((e) => e.message)
+])
+
+t('Multiple hosts', {
+ timeout: 10
+}, async() => {
+ const s1 = postgres({ idle_timeout })
+ , s2 = postgres({ idle_timeout, port: 5433 })
+ , sql = postgres('postgres://localhost:5432,localhost:5433', { idle_timeout, max: 1 })
+ , result = []
+
+ const x1 = await sql`select 1`
+ result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x)
+ await s1`select pg_terminate_backend(${ x1.state.pid }::int)`
+ await delay(100)
+
+ const x2 = await sql`select 1`
+ result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x)
+ await s2`select pg_terminate_backend(${ x2.state.pid }::int)`
+ await delay(100)
+
+ result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x)
+
+ return ['5432,5433,5432', result.join(',')]
+})
+
+t('Escaping supports schemas and tables', async() => {
+ await sql`create schema a`
+ await sql`create table a.b (c int)`
+ await sql`insert into a.b (c) values (1)`
+ return [
+ 1,
+ (await sql`select ${ sql('a.b.c') } from a.b`)[0].c,
+ await sql`drop table a.b`,
+ await sql`drop schema a`
+ ]
+})
+
+t('Raw method returns rows as arrays', async() => {
+ const [x] = await sql`select 1`.raw()
+ return [
+ Array.isArray(x),
+ true
+ ]
+})
+
+t('Raw method returns values unparsed as Buffer', async() => {
+ const [[x]] = await sql`select 1`.raw()
+ return [
+ x instanceof Uint8Array,
+ true
+ ]
+})
+
+t('Copy read works', async() => {
+ const result = []
+
+ await sql`create table test (x int)`
+ await sql`insert into test select * from generate_series(1,10)`
+ const readable = await sql`copy test to stdout`.readable()
+ readable.on('data', x => result.push(x))
+ await new Promise(r => readable.on('end', r))
+
+ return [
+ result.length,
+ 10,
+ await sql`drop table test`
+ ]
+})
+
+t('Copy write works', { timeout: 2 }, async() => {
+ await sql`create table test (x int)`
+ const writable = await sql`copy test from stdin`.writable()
+
+ writable.write('1\n')
+ writable.write('1\n')
+ writable.end()
+
+ await new Promise(r => writable.on('finish', r))
+
+ return [
+ (await sql`select 1 from test`).length,
+ 2,
+ await sql`drop table test`
+ ]
+})
+
+t('Copy write as first works', async() => {
+ await sql`create table test (x int)`
+ const first = postgres(options)
+ const writable = await first`COPY test FROM STDIN WITH(FORMAT csv, HEADER false, DELIMITER ',')`.writable()
+ writable.write('1\n')
+ writable.write('1\n')
+ writable.end()
+
+ await new Promise(r => writable.on('finish', r))
+
+ return [
+ (await sql`select 1 from test`).length,
+ 2,
+ await sql`drop table test`
+ ]
+})
+
+t('Copy from file works', async() => {
+ await sql`create table test (x int, y int, z int)`
+ await new Promise(async r => fs
+ .createReadStream(rel('copy.csv'))
+ .pipe(await sql`copy test from stdin`.writable())
+ .on('finish', r)
+ )
+
+ return [
+ JSON.stringify(await sql`select * from test`),
+ '[{"x":1,"y":2,"z":3},{"x":4,"y":5,"z":6}]',
+ await sql`drop table test`
+ ]
+})
+
+t('Copy from works in transaction', async() => {
+ await sql`create table test(x int)`
+ const xs = await sql.begin(async sql => {
+ (await sql`copy test from stdin`.writable()).end('1\n2')
+ await delay(20)
+ return sql`select 1 from test`
+ })
+
+ return [
+ xs.length,
+ 2,
+ await sql`drop table test`
+ ]
+})
+
+t('Copy from abort works', async() => {
+ const sql = postgres(options)
+ const readable = fs.createReadStream(rel('copy.csv'))
+
+ await sql`create table test (x int, y int, z int)`
+ await sql`TRUNCATE TABLE test`
+
+ const writable = await sql`COPY test FROM STDIN`.writable()
+
+ let aborted
+
+ readable
+ .pipe(writable)
+ .on('error', (err) => aborted = err)
+
+ writable.destroy(new Error('abort'))
+ await sql.end()
+
+ return [
+ 'abort',
+ aborted.message,
+ await postgres(options)`drop table test`
+ ]
+})
+
+t('multiple queries before connect', async() => {
+ const sql = postgres({ ...options, max: 2 })
+ const xs = await Promise.all([
+ sql`select 1 as x`,
+ sql`select 2 as x`,
+ sql`select 3 as x`,
+ sql`select 4 as x`
+ ])
+
+ return [
+ '1,2,3,4',
+ xs.map(x => x[0].x).join()
+ ]
+})
+
+t('subscribe', { timeout: 2 }, async() => {
+ const sql = postgres({
+ database: 'postgres_js_test',
+ publications: 'alltables',
+ fetch_types: false
+ })
+
+ await sql.unsafe('create publication alltables for all tables')
+
+ const result = []
+
+ await sql.subscribe('*', (row, info) =>
+ result.push(info.command, row.name || row.id)
+ )
+
+ await sql`
+ create table test (
+ id serial primary key,
+ name text
+ )
+ `
+ await sql`insert into test (name) values ('Murray')`
+ await sql`update test set name = 'Rothbard'`
+ await sql`delete from test`
+ await delay(100)
+ return [
+ 'insert,Murray,update,Rothbard,delete,1',
+ result.join(','),
+ await sql`drop table test`,
+ await sql`drop publication alltables`,
+ await sql.end()
+ ]
+})
+
+t('Execute works', async() => {
+ const result = await new Promise((resolve) => {
+ const sql = postgres({ ...options, fetch_types: false, debug:(id, query) => resolve(query) })
+ sql`select 1`.execute()
+ })
+
+ return [result, 'select 1']
+})
+
+t('Cancel running query works', async() => {
+ const query = sql`select pg_sleep(2)`
+ setTimeout(() => query.cancel(), 50)
+ const error = await query.catch(x => x)
+ return ['57014', error.code]
+})
+
+t('Cancel piped query works', async() => {
+ await sql`select 1`
+ const last = sql`select pg_sleep(0.2)`.execute()
+ const query = sql`select pg_sleep(2) as dig`
+ setTimeout(() => query.cancel(), 100)
+ const error = await query.catch(x => x)
+ await last
+ return ['57014', error.code]
+})
+
+t('Cancel queued query works', async() => {
+ const tx = sql.begin(sql => sql`select pg_sleep(0.2) as hej, 'hejsa'`)
+ const query = sql`select pg_sleep(2) as nej`
+ setTimeout(() => query.cancel(), 50)
+ const error = await query.catch(x => x)
+ await tx
+ return ['57014', error.code]
+})
+
+t('Fragments', async() => [
+ 1,
+ (await sql`
+ ${ sql`select` } 1 as x
+ `)[0].x
+])
+
+t('Result becomes array', async() => [
+ true,
+ (await sql`select 1`).slice() instanceof Array
+])
+
+t('Describe', async() => {
+ const type = (await sql`select ${ 1 }::int as x`.describe()).types[0]
+ return [23, type]
+})
+
+t('Describe a statement', async() => {
+ await sql`create table tester (name text, age int)`
+ const r = await sql`select name, age from tester where name like $1 and age > $2`.describe()
+ return [
+ '25,23/name:25,age:23',
+ `${ r.types.join(',') }/${ r.columns.map(c => `${c.name}:${c.type}`).join(',') }`,
+ await sql`drop table tester`
+ ]
+})
+
+t('Describe a statement without parameters', async() => {
+ await sql`create table tester (name text, age int)`
+ const r = await sql`select name, age from tester`.describe()
+ return [
+ '0,2',
+ `${ r.types.length },${ r.columns.length }`,
+ await sql`drop table tester`
+ ]
+})
+
+t('Describe a statement without columns', async() => {
+ await sql`create table tester (name text, age int)`
+ const r = await sql`insert into tester (name, age) values ($1, $2)`.describe()
+ return [
+ '2,0',
+ `${ r.types.length },${ r.columns.length }`,
+ await sql`drop table tester`
+ ]
+})
+
+t('Large object', async() => {
+ const file = rel('index.js')
+ , md5 = crypto.createHash('md5').update(fs.readFileSync(file)).digest('hex')
+
+ const lo = await sql.largeObject()
+ await new Promise(async r => fs.createReadStream(file).pipe(await lo.writable()).on('finish', r))
+ await lo.seek(0)
+
+ const out = crypto.createHash('md5')
+ await new Promise(r => lo.readable().then(x => x.on('data', x => out.update(x)).on('end', r)))
+
+ return [
+ md5,
+ out.digest('hex'),
+ await lo.close()
+ ]
+})
+
+t('Catches type serialize errors', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: x => x,
+ serialize: () => { throw new Error('watSerialize') }
+ }
+ }
+ })
+
+ return [
+ 'watSerialize',
+ (await sql`select ${ 'wat' }`.catch(e => e.message))
+ ]
+})
+
+t('Catches type parse errors', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: () => { throw new Error('watParse') },
+ serialize: x => x
+ }
+ }
+ })
+
+ return [
+ 'watParse',
+ (await sql`select 'wat'`.catch(e => e.message))
+ ]
+})
+
+t('Catches type serialize errors in transactions', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: x => x,
+ serialize: () => { throw new Error('watSerialize') }
+ }
+ }
+ })
+
+ return [
+ 'watSerialize',
+ (await sql.begin(sql => (
+ sql`select 1`,
+ sql`select ${ 'wat' }`
+ )).catch(e => e.message))
+ ]
+})
+
+t('Catches type parse errors in transactions', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: () => { throw new Error('watParse') },
+ serialize: x => x
+ }
+ }
+ })
+
+ return [
+ 'watParse',
+ (await sql.begin(sql => (
+ sql`select 1`,
+ sql`select 'wat'`
+ )).catch(e => e.message))
+ ]
+})
+
+t('Prevent premature end of connection in transaction', async() => {
+ const sql = postgres({ max_lifetime: 0.1, idle_timeout })
+ const result = await sql.begin(async sql => {
+ await sql`select 1`
+ await delay(200)
+ await sql`select 1`
+ return 'yay'
+ })
+
+
+ return [
+ 'yay',
+ result
+ ]
+})
+
+t('Ensure reconnect after max_lifetime with transactions', { timeout: 5000 }, async() => {
+ const sql = postgres({
+ max_lifetime: 0.01,
+ idle_timeout,
+ max: 1
+ })
+
+ let x = 0
+ while (x++ < 10) await sql.begin(sql => sql`select 1 as x`)
+
+ return [true, true]
+})
diff --git a/cjs/tests/select-param.sql b/cjs/tests/select-param.sql
new file mode 100644
index 00000000..d4de2440
--- /dev/null
+++ b/cjs/tests/select-param.sql
@@ -0,0 +1 @@
+select $1 as x
diff --git a/cjs/tests/select.sql b/cjs/tests/select.sql
new file mode 100644
index 00000000..f951e920
--- /dev/null
+++ b/cjs/tests/select.sql
@@ -0,0 +1 @@
+select 1 as x
diff --git a/cjs/tests/test.js b/cjs/tests/test.js
new file mode 100644
index 00000000..a6a83922
--- /dev/null
+++ b/cjs/tests/test.js
@@ -0,0 +1,88 @@
+/* eslint no-console: 0 */
+
+const util = require('util')
+
+let done = 0
+let only = false
+let ignored = 0
+let failed = false
+let promise = Promise.resolve()
+const tests = {}
+ , ignore = {}
+
+const nt = module.exports.nt = () => ignored++
+const ot = module.exports.ot = (...rest) => (only = true, test(true, ...rest))
+const t = module.exports.t = (...rest) => test(false, ...rest)
+t.timeout = 0.5
+
+async function test(o, name, options, fn) {
+ typeof options !== 'object' && (fn = options, options = {})
+ const line = new Error().stack.split('\n')[3].match(':([0-9]+):')[1]
+
+ await 1
+
+ if (only && !o)
+ return
+
+ tests[line] = { fn, line, name }
+ promise = promise.then(() => Promise.race([
+ new Promise((resolve, reject) =>
+ fn.timer = setTimeout(() => reject('Timed out'), (options.timeout || t.timeout) * 1000)
+ ),
+ failed
+ ? (ignored++, ignore)
+ : fn()
+ ]))
+ .then(async x => {
+ clearTimeout(fn.timer)
+ if (x === ignore)
+ return
+
+ if (!Array.isArray(x))
+ throw new Error('Test should return result array')
+
+ const [expected, got] = await Promise.all(x)
+ if (expected !== got) {
+ failed = true
+ throw new Error(util.inspect(expected) + ' != ' + util.inspect(got))
+ }
+
+ tests[line].succeeded = true
+ process.stdout.write('✅')
+ })
+ .catch(err => {
+ tests[line].failed = failed = true
+ tests[line].error = err instanceof Error ? err : new Error(util.inspect(err))
+ })
+ .then(() => {
+ ++done === Object.keys(tests).length && exit()
+ })
+}
+
+function exit() {
+ console.log('')
+ let success = true
+ Object.values(tests).every((x) => {
+ if (x.succeeded)
+ return true
+
+ success = false
+ x.cleanup
+ ? console.error('⛔️', x.name + ' at line', x.line, 'cleanup failed', '\n', util.inspect(x.cleanup))
+ : console.error('⛔️', x.name + ' at line', x.line, x.failed
+ ? 'failed'
+ : 'never finished', x.error ? '\n' + util.inspect(x.error) : ''
+ )
+ })
+
+ only
+ ? console.error('⚠️', 'Not all tests were run')
+ : ignored
+ ? console.error('⚠️', ignored, 'ignored test' + (ignored === 1 ? '' : 's', '\n'))
+ : success
+ ? console.log('All good')
+ : console.error('⚠️', 'Not good')
+
+ !process.exitCode && (!success || only || ignored) && (process.exitCode = 1)
+}
+
diff --git a/deno/mod.js b/deno/mod.js
new file mode 100644
index 00000000..7c9e3bcd
--- /dev/null
+++ b/deno/mod.js
@@ -0,0 +1,2 @@
+// @deno-types="./types/index.d.ts"
+export { default } from './deno/src/index.js'
diff --git a/deno/package.json b/deno/package.json
new file mode 100644
index 00000000..0292b995
--- /dev/null
+++ b/deno/package.json
@@ -0,0 +1 @@
+{"type":"commonjs"}
\ No newline at end of file
diff --git a/deno/polyfills.js b/deno/polyfills.js
new file mode 100644
index 00000000..37eabc66
--- /dev/null
+++ b/deno/polyfills.js
@@ -0,0 +1,162 @@
+/* global Deno */
+
+import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts'
+
+const events = () => ({ data: [], error: [], drain: [], connect: [], secureConnect: [], close: [] })
+
+export const net = {
+ createServer() {
+ const server = {
+ address() {
+ return { port: 9876 }
+ },
+ async listen() {
+ server.raw = Deno.listen({ port: 9876, transport: 'tcp' })
+ for await (const conn of server.raw)
+ setTimeout(() => conn.close(), 500)
+ },
+ close() {
+ server.raw.close()
+ }
+ }
+ return server
+ },
+ Socket() {
+ let paused
+ , resume
+
+ const socket = {
+ error,
+ success,
+ connect: (...xs) => {
+ socket.closed = false
+ socket.raw = null
+ xs.length === 1
+ ? Deno.connect({ transport: 'unix', path: xs[0] }).then(success, error)
+ : Deno.connect({ transport: 'tcp', port: socket.port = xs[0], hostname: socket.hostname = xs[1] }).then(success, error)
+ },
+ pause: () => {
+ paused = new Promise(r => resume = r)
+ },
+ resume: () => {
+ resume && resume()
+ paused = null
+ },
+ isPaused: () => !!paused,
+ removeAllListeners: () => socket.events = events(),
+ events: events(),
+ raw: null,
+ on: (x, fn) => socket.events[x].push(fn),
+ once: (x, fn) => {
+ if (x === 'data')
+ socket.break = true
+ const e = socket.events[x]
+ e.push(once)
+ once.once = fn
+ function once(...args) {
+ fn(...args)
+ e.indexOf(once) > -1 && e.splice(e.indexOf(once), 1)
+ }
+ },
+ removeListener: (x, fn) => {
+ socket.events[x] = socket.events[x].filter(x => x !== fn && x.once !== fn)
+ },
+ write: (x, cb) => {
+ socket.raw.write(x)
+ .then(() => (cb && cb(null)))
+ .catch(err => {
+ cb && cb()
+ call(socket.events.error, err)
+ })
+ return false
+ },
+ destroy: () => close(true),
+ end: close
+ }
+
+ return socket
+
+ async function success(raw) {
+ const encrypted = socket.encrypted
+ socket.raw = raw
+ socket.encrypted
+ ? call(socket.events.secureConnect)
+ : call(socket.events.connect)
+
+ const b = new Uint8Array(1024)
+ let result
+
+ try {
+ while ((result = !socket.closed && await raw.read(b))) {
+ call(socket.events.data, Buffer.from(b.subarray(0, result)))
+ if (!encrypted && socket.break && (socket.break = false, b[0] === 83))
+ return socket.break = false
+ paused && await paused
+ }
+ } catch (e) {
+ if (e instanceof Deno.errors.BadResource === false)
+ error(e)
+ }
+
+ if (!socket.encrypted || encrypted)
+ close()
+ }
+
+ function close() {
+ try {
+ socket.raw && socket.raw.close()
+ } catch (e) {
+ if (e instanceof Deno.errors.BadResource === false)
+ call(socket.events.error, e)
+ }
+ closed()
+ }
+
+ function closed() {
+ socket.break = socket.encrypted = false
+ if (socket.closed)
+ return
+
+ call(socket.events.close)
+ socket.closed = true
+ }
+
+ function error(err) {
+ call(socket.events.error, err)
+ socket.raw
+ ? close()
+ : closed()
+ }
+
+ function call(xs, x) {
+ xs.slice().forEach(fn => fn(x))
+ }
+ }
+}
+
+export const tls = {
+ connect({ socket, ...options }) {
+ socket.encrypted = true
+ Deno.startTls(socket.raw, { hostname: socket.hostname, ...options })
+ .then(socket.success, socket.error)
+ socket.raw = null
+ return socket
+ }
+}
+
+let ids = 1
+const tasks = new Set()
+export const setImmediate = fn => {
+ const id = ids++
+ tasks.add(id)
+ queueMicrotask(() => {
+ if (tasks.has(id)) {
+ fn()
+ tasks.delete(id)
+ }
+ })
+ return id
+}
+
+export const clearImmediate = id => tasks.delete(id)
+
diff --git a/deno/src/bytes.js b/deno/src/bytes.js
new file mode 100644
index 00000000..5037ea03
--- /dev/null
+++ b/deno/src/bytes.js
@@ -0,0 +1,79 @@
+import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts'
+const size = 256
+let buffer = Buffer.allocUnsafe(size)
+
+const messages = 'BCcDdEFfHPpQSX'.split('').reduce((acc, x) => {
+ const v = x.charCodeAt(0)
+ acc[x] = () => {
+ buffer[0] = v
+ b.i = 5
+ return b
+ }
+ return acc
+}, {})
+
+const b = Object.assign(reset, messages, {
+ N: String.fromCharCode(0),
+ i: 0,
+ inc(x) {
+ b.i += x
+ return b
+ },
+ str(x) {
+ const length = Buffer.byteLength(x)
+ fit(length)
+ b.i += buffer.write(x, b.i, length, 'utf8')
+ return b
+ },
+ i16(x) {
+ fit(2)
+ buffer.writeUInt16BE(x, b.i)
+ b.i += 2
+ return b
+ },
+ i32(x, i) {
+ if (i || i === 0) {
+ buffer.writeUInt32BE(x, i)
+ return b
+ }
+ fit(4)
+ buffer.writeUInt32BE(x, b.i)
+ b.i += 4
+ return b
+ },
+ z(x) {
+ fit(x)
+ buffer.fill(0, b.i, b.i + x)
+ b.i += x
+ return b
+ },
+ raw(x) {
+ buffer = Buffer.concat([buffer.slice(0, b.i), x])
+ b.i = buffer.length
+ return b
+ },
+ end(at = 1) {
+ buffer.writeUInt32BE(b.i - at, at)
+ const out = buffer.slice(0, b.i)
+ b.i = 0
+ buffer = Buffer.allocUnsafe(size)
+ return out
+ }
+})
+
+export default b
+
+function fit(x) {
+ if (buffer.length - b.i < x) {
+ const prev = buffer
+ , length = prev.length
+
+ buffer = Buffer.allocUnsafe(length + (length >> 1) + x)
+ prev.copy(buffer)
+ }
+}
+
+function reset() {
+ b.i = 0
+ return b
+}
diff --git a/deno/src/connection.js b/deno/src/connection.js
new file mode 100644
index 00000000..b2ff5b9a
--- /dev/null
+++ b/deno/src/connection.js
@@ -0,0 +1,1003 @@
+import { HmacSha256 } from 'https://deno.land/std@0.120.0/hash/sha256.ts'
+import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts'
+import { setImmediate, clearImmediate } from '../polyfills.js'
+import { net } from '../polyfills.js'
+import { tls } from '../polyfills.js'
+import crypto from 'https://deno.land/std@0.120.0/node/crypto.ts'
+import Stream from 'https://deno.land/std@0.120.0/node/stream.ts'
+
+import { Identifier, Builder, handleValue, arrayParser, arraySerializer } from './types.js'
+import { Errors } from './errors.js'
+import Result from './result.js'
+import Queue from './queue.js'
+import { Query, CLOSE } from './query.js'
+import b from './bytes.js'
+
+export default Connection
+
+let uid = 1
+
+const Sync = b().S().end()
+ , Flush = b().H().end()
+ , SSLRequest = b().i32(8).i32(80877103).end(8)
+ , ExecuteUnnamed = Buffer.concat([b().E().str(b.N).i32(0).end(), Sync])
+ , DescribeUnnamed = b().D().str('S').str(b.N).end()
+ , noop = () => { /* noop */ }
+
+const retryRoutines = new Set([
+ 'FetchPreparedStatement',
+ 'RevalidateCachedQuery',
+ 'transformAssignedExpr'
+])
+
+const errorFields = {
+ 83 : 'severity_local', // S
+ 86 : 'severity', // V
+ 67 : 'code', // C
+ 77 : 'message', // M
+ 68 : 'detail', // D
+ 72 : 'hint', // H
+ 80 : 'position', // P
+ 112 : 'internal_position', // p
+ 113 : 'internal_query', // q
+ 87 : 'where', // W
+ 115 : 'schema_name', // s
+ 116 : 'table_name', // t
+ 99 : 'column_name', // c
+ 100 : 'data type_name', // d
+ 110 : 'constraint_name', // n
+ 70 : 'file', // F
+ 76 : 'line', // L
+ 82 : 'routine' // R
+}
+
+function Connection(options, { onopen = noop, onend = noop, ondrain = noop, onclose = noop } = {}) {
+ const {
+ ssl,
+ max,
+ user,
+ host,
+ port,
+ database,
+ parsers,
+ transform,
+ onnotice,
+ onnotify,
+ onparameter,
+ max_pipeline,
+ keep_alive,
+ backoff,
+ target_session_attrs
+ } = options
+
+ const sent = Queue()
+ , id = uid++
+ , backend = { pid: null, secret: null }
+ , idleTimer = timer(end, options.idle_timeout)
+ , lifeTimer = timer(end, options.max_lifetime)
+ , connectTimer = timer(connectTimedOut, options.connect_timeout)
+
+ let socket = createSocket()
+ , result = new Result()
+ , incoming = Buffer.alloc(0)
+ , needsTypes = options.fetch_types
+ , backendParameters = {}
+ , statements = {}
+ , state = 'closed'
+ , statementId = Math.random().toString(36).slice(2)
+ , statementCount = 1
+ , closedDate = 0
+ , remaining = 0
+ , hostIndex = 0
+ , retries = 0
+ , length = 0
+ , delay = 0
+ , rows = 0
+ , serverSignature = null
+ , nextWriteTimer = null
+ , terminated = false
+ , incomings = null
+ , results = null
+ , initial = null
+ , ending = null
+ , stream = null
+ , chunk = null
+ , ended = null
+ , nonce = null
+ , query = null
+ , final = null
+
+ const connection = {
+ get state() { return state },
+ set state(x) {
+ state = x
+ state === 'open'
+ ? idleTimer.start()
+ : idleTimer.cancel()
+ },
+ connect(query) {
+ initial = query
+ reconnect()
+ },
+ terminate,
+ execute,
+ cancel,
+ end,
+ count: 0,
+ id
+ }
+
+ return connection
+
+ function createSocket() {
+ const x = net.Socket()
+ x.on('error', error)
+ x.on('close', closed)
+ x.on('drain', drain)
+ return x
+ }
+
+ function cancel({ pid, secret }, resolve, reject) {
+ socket.removeAllListeners()
+ socket = net.Socket()
+ socket.on('connect', () => socket.write(b().i32(16).i32(80877102).i32(pid).i32(secret).end(16)))
+ socket.once('error', reject)
+ socket.once('close', resolve)
+ connect()
+ }
+
+ function execute(q) {
+ if (terminated)
+ return q.reject(Errors.connection('CONNECTION_DESTROYED', options))
+
+ if (q.cancelled)
+ return
+
+ try {
+ q.state = backend
+ query
+ ? sent.push(q)
+ : (query = q, query.active = true)
+
+ build(q)
+ return write(toBuffer(q))
+ && !q.describeFirst
+ && sent.length < max_pipeline
+ && (!q.options.onexecute || q.options.onexecute(connection))
+ } catch (error) {
+ sent.length === 0 && write(Sync)
+ errored(error)
+ return true
+ }
+ }
+
+ function toBuffer(q) {
+ if (q.parameters.length >= 65534)
+ throw Errors.generic('MAX_PARAMETERS_EXCEEDED', 'Max number of parameters (65534) exceeded')
+
+ return q.options.simple
+ ? b().Q().str(q.strings[0] + b.N).end()
+ : q.describeFirst
+ ? Buffer.concat([describe(q), Flush])
+ : q.prepare
+ ? q.prepared
+ ? prepared(q)
+ : Buffer.concat([describe(q), prepared(q)])
+ : unnamed(q)
+ }
+
+ function describe(q) {
+ return Buffer.concat([
+ Parse(q.statement.string, q.parameters, q.statement.types, q.statement.name),
+ Describe('S', q.statement.name)
+ ])
+ }
+
+ function prepared(q) {
+ return Buffer.concat([
+ Bind(q.parameters, q.statement.types, q.statement.name, q.cursorName),
+ q.cursorFn
+ ? Execute('', q.cursorRows)
+ : ExecuteUnnamed
+ ])
+ }
+
+ function unnamed(q) {
+ return Buffer.concat([
+ Parse(q.statement.string, q.parameters, q.statement.types),
+ DescribeUnnamed,
+ prepared(q)
+ ])
+ }
+
+ function build(q) {
+ const parameters = []
+ , types = []
+
+ const string = stringify(q, q.strings[0], q.args[0], parameters, types)
+
+ !q.tagged && q.args.forEach(x => handleValue(x, parameters, types))
+
+ q.prepare = options.prepare && ('prepare' in q.options ? q.options.prepare : true)
+ q.string = string
+ q.signature = q.prepare && types + string
+ q.onlyDescribe && (delete statements[q.signature])
+ q.parameters = q.parameters || parameters
+ q.prepared = q.prepare && q.signature in statements
+ q.describeFirst = q.onlyDescribe || (parameters.length && !q.prepared)
+ q.statement = q.prepared
+ ? statements[q.signature]
+ : { string, types, name: q.prepare ? statementId + statementCount++ : '' }
+
+ typeof options.debug === 'function' && options.debug(id, string, parameters, types)
+ }
+
+ function stringify(q, string, value, parameters, types) {
+ for (let i = 1; i < q.strings.length; i++) {
+ string += (
+ value instanceof Query ? fragment(string, value, parameters, types) :
+ value instanceof Identifier ? value.value :
+ value instanceof Builder ? value.build(string, parameters, types, options.transform) :
+ handleValue(value, parameters, types)
+ ) + q.strings[i]
+ value = q.args[i]
+ }
+
+ return string
+ }
+
+ function fragment(string, q, parameters, types) {
+ q.fragment = true
+ return stringify(q, q.strings[0], q.args[0], parameters, types)
+ }
+
+ function write(x, fn) {
+ chunk = chunk ? Buffer.concat([chunk, x]) : Buffer.from(x)
+ if (fn || chunk.length >= 1024)
+ return nextWrite(fn)
+ nextWriteTimer === null && (nextWriteTimer = setImmediate(nextWrite))
+ return true
+ }
+
+ function nextWrite(fn) {
+ const x = socket.write(chunk, fn)
+ nextWriteTimer !== null && clearImmediate(nextWriteTimer)
+ chunk = nextWriteTimer = null
+ return x
+ }
+
+ function connectTimedOut() {
+ errored(Errors.connection('CONNECT_TIMEOUT', options, socket))
+ socket.destroy()
+ }
+
+ async function secure() {
+ write(SSLRequest)
+ const canSSL = await new Promise(r => socket.once('data', x => r(x[0] === 83))) // S
+
+ if (!canSSL && ssl === 'prefer')
+ return connected()
+
+ socket.removeAllListeners()
+ socket = tls.connect({
+ socket,
+ ...(ssl === 'require' || ssl === 'allow' || ssl === 'prefer'
+ ? { rejectUnauthorized: false }
+ : ssl
+ )
+ })
+ socket.on('secureConnect', connected)
+ socket.on('error', error)
+ socket.on('close', closed)
+ socket.on('drain', drain)
+ }
+
+ /* c8 ignore next 3 */
+ function drain() {
+ ondrain(connection)
+ }
+
+ function data(x) {
+ if (incomings) {
+ incomings.push(x)
+ remaining -= x.length
+ if (remaining >= 0)
+ return
+ }
+
+ incoming = incomings
+ ? Buffer.concat(incomings, length - remaining)
+ : incoming.length === 0
+ ? x
+ : Buffer.concat([incoming, x], incoming.length + x.length)
+
+ while (incoming.length > 4) {
+ length = incoming.readUInt32BE(1)
+ if (length >= incoming.length) {
+ remaining = length - incoming.length
+ incomings = [incoming]
+ break
+ }
+
+ try {
+ handle(incoming.slice(0, length + 1))
+ } catch (e) {
+ query && (query.cursorFn || query.describeFirst) && write(Sync)
+ errored(e)
+ }
+ incoming = incoming.slice(length + 1)
+ remaining = 0
+ incomings = null
+ }
+ }
+
+ function connect() {
+ terminated = false
+ backendParameters = {}
+ connectTimer.start()
+ socket.on('connect', ssl ? secure : connected)
+
+ if (options.path)
+ return socket.connect(options.path)
+
+ socket.connect(port[hostIndex], host[hostIndex])
+ hostIndex = (hostIndex + 1) % port.length
+ }
+
+ function reconnect() {
+ setTimeout(connect, closedDate ? closedDate + delay - Date.now() : 0)
+ }
+
+ function connected() {
+ try {
+ statements = {}
+ needsTypes = options.fetch_types
+ statementId = Math.random().toString(36).slice(2)
+ statementCount = 1
+ lifeTimer.start()
+ socket.on('data', data)
+ socket
+ const s = StartupMessage()
+ write(s)
+ } catch (err) {
+ error(err)
+ }
+ }
+
+ function error(err) {
+ if (connection.state === 'connecting' && options.host[retries + 1])
+ return
+
+ errored(err)
+ while (sent.length)
+ queryError(sent.shift(), err)
+ }
+
+ function errored(err) {
+ stream && (stream.destroy(err), stream = null)
+ query && queryError(query, err)
+ initial && (queryError(initial, err), initial = null)
+ }
+
+ function queryError(query, err) {
+ query.reject(Object.create(err, {
+ stack: { value: err.stack + query.origin.replace(/.*\n/, '\n'), enumerable: options.debug },
+ query: { value: query.string, enumerable: options.debug },
+ parameters: { value: query.parameters, enumerable: options.debug },
+ args: { value: query.args, enumerable: options.debug },
+ types: { value: query.statement && query.statement.types, enumerable: options.debug }
+ }))
+ }
+
+ function end() {
+ return ending || (
+ !connection.reserved && onend(connection),
+ !connection.reserved && !initial && !query && sent.length === 0
+ ? Promise.resolve(terminate())
+ : ending = new Promise(r => ended = r)
+ )
+ }
+
+ function terminate() {
+ terminated = true
+ if (stream || query || initial || sent.length)
+ error(Errors.connection('CONNECTION_DESTROYED', options))
+
+ clearImmediate(nextWriteTimer)
+ socket.removeListener('data', data)
+ socket.removeListener('connect', connected)
+ socket.readyState !== 'closed' && socket.end(b().X().end())
+ ended && (ended(), ending = ended = null)
+ }
+
+ function closed(hadError) {
+ incoming = Buffer.alloc(0)
+ remaining = 0
+ incomings = null
+ clearImmediate(nextWriteTimer)
+ socket.removeListener('data', data)
+ socket.removeListener('connect', connected)
+ idleTimer.cancel()
+ lifeTimer.cancel()
+ connectTimer.cancel()
+
+ if (socket.encrypted) {
+ socket.removeAllListeners()
+ socket = createSocket()
+ }
+
+ if (initial)
+ return reconnect()
+
+ !hadError && (query || sent.length) && error(Errors.connection('CONNECTION_CLOSED', options, socket))
+ closedDate = Date.now()
+ hadError && options.shared.retries++
+ delay = (typeof backoff === 'function' ? backoff(options.shared.retries) : backoff) * 1000
+ onclose(connection)
+ }
+
+ /* Handlers */
+ function handle(xs, x = xs[0]) {
+ (
+ x === 68 ? DataRow : // D
+ x === 100 ? CopyData : // d
+ x === 65 ? NotificationResponse : // A
+ x === 83 ? ParameterStatus : // S
+ x === 90 ? ReadyForQuery : // Z
+ x === 67 ? CommandComplete : // C
+ x === 50 ? BindComplete : // 2
+ x === 49 ? ParseComplete : // 1
+ x === 116 ? ParameterDescription : // t
+ x === 84 ? RowDescription : // T
+ x === 82 ? Authentication : // R
+ x === 110 ? NoData : // n
+ x === 75 ? BackendKeyData : // K
+ x === 69 ? ErrorResponse : // E
+ x === 115 ? PortalSuspended : // s
+ x === 51 ? CloseComplete : // 3
+ x === 71 ? CopyInResponse : // G
+ x === 78 ? NoticeResponse : // N
+ x === 72 ? CopyOutResponse : // H
+ x === 99 ? CopyDone : // c
+ x === 73 ? EmptyQueryResponse : // I
+ x === 86 ? FunctionCallResponse : // V
+ x === 118 ? NegotiateProtocolVersion : // v
+ x === 87 ? CopyBothResponse : // W
+ /* c8 ignore next */
+ UnknownMessage
+ )(xs)
+ }
+
+ function DataRow(x) {
+ let index = 7
+ let length
+ let column
+ let value
+
+ const row = query.isRaw ? new Array(query.statement.columns.length) : {}
+ for (let i = 0; i < query.statement.columns.length; i++) {
+ column = query.statement.columns[i]
+ length = x.readInt32BE(index)
+ index += 4
+
+ value = length === -1
+ ? null
+ : query.isRaw
+ ? x.slice(index, index += length)
+ : column.parser === undefined
+ ? x.toString('utf8', index, index += length)
+ : column.parser.array === true
+ ? column.parser(x.toString('utf8', index + 1, index += length))
+ : column.parser(x.toString('utf8', index, index += length))
+
+ query.isRaw
+ ? (row[i] = value)
+ : (row[column.name] = transform.value.from ? transform.value.from(value) : value)
+ }
+
+ query.forEachFn
+ ? query.forEachFn(transform.row.from ? transform.row.from(row) : row, result)
+ : (result[rows++] = transform.row.from ? transform.row.from(row) : row)
+ }
+
+ function ParameterStatus(x) {
+ const [k, v] = x.toString('utf8', 5, x.length - 1).split(b.N)
+ backendParameters[k] = v
+ if (options.parameters[k] !== v) {
+ options.parameters[k] = v
+ onparameter && onparameter(k, v)
+ }
+ }
+
+ function ReadyForQuery(x) {
+ query && query.options.simple && query.resolve(results || result)
+ query = results = null
+ result = new Result()
+ connectTimer.cancel()
+
+ if (initial) {
+ if (target_session_attrs) {
+ if (!backendParameters.in_hot_standby || !backendParameters.default_transaction_read_only)
+ return fetchState()
+ else if (tryNext(target_session_attrs, backendParameters))
+ return terminate()
+ }
+
+ if (needsTypes)
+ return fetchArrayTypes()
+
+ execute(initial)
+ options.shared.retries = retries = initial = 0
+ return
+ }
+
+ while (sent.length && (query = sent.shift()) && (query.active = true) && query.cancelled)
+ Connection(options, {}).cancel(query.state, query.cancelled.resolve, query.cancelled.reject)
+
+ if (query)
+ return // Consider opening if able and sent.length < 50
+
+ connection.reserved
+ ? x[5] === 73 // I
+ ? ending
+ ? terminate()
+ : (connection.reserved = null, onopen(connection))
+ : connection.reserved()
+ : ending
+ ? terminate()
+ : onopen(connection)
+ }
+
+ function CommandComplete(x) {
+ rows = 0
+
+ for (let i = x.length - 1; i > 0; i--) {
+ if (x[i] === 32 && x[i + 1] < 58 && result.count === null)
+ result.count = +x.toString('utf8', i + 1, x.length - 1)
+ if (x[i - 1] >= 65) {
+ result.command = x.toString('utf8', 5, i)
+ result.state = backend
+ break
+ }
+ }
+
+ final && (final(), final = null)
+
+ if (result.command === 'BEGIN' && max !== 1 && !connection.reserved)
+ return errored(Errors.generic('UNSAFE_TRANSACTION', 'Only use sql.begin or max: 1'))
+
+ if (query.options.simple)
+ return
+
+ if (query.cursorFn) {
+ result.count && query.cursorFn(result)
+ write(Sync)
+ }
+
+ query.resolve(result)
+ }
+
+ function ParseComplete() {
+ query.parsing = false
+ }
+
+ function BindComplete() {
+ !result.statement && (result.statement = query.statement)
+ result.columns = query.statement.columns
+ }
+
+ function ParameterDescription(x) {
+ const length = x.readUInt16BE(5)
+
+ for (let i = 0; i < length; ++i)
+ !query.statement.types[i] && (query.statement.types[i] = x.readUInt32BE(7 + i * 4))
+
+ query.prepare && (statements[query.signature] = query.statement)
+ query.describeFirst && !query.onlyDescribe && (write(prepared(query)), query.describeFirst = false)
+ }
+
+ function RowDescription(x) {
+ if (result.command) {
+ results = results || [result]
+ results.push(result = new Result())
+ result.count = null
+ query.statement.columns = null
+ }
+
+ const length = x.readUInt16BE(5)
+ let index = 7
+ let start
+
+ query.statement.columns = Array(length)
+
+ for (let i = 0; i < length; ++i) {
+ start = index
+ while (x[index++] !== 0);
+ const type = x.readUInt32BE(index + 6)
+ query.statement.columns[i] = {
+ name: transform.column.from
+ ? transform.column.from(x.toString('utf8', start, index - 1))
+ : x.toString('utf8', start, index - 1),
+ parser: parsers[type],
+ type
+ }
+ index += 18
+ }
+
+ result.statement = query.statement
+ if (query.onlyDescribe)
+ return (query.resolve(query.statement), write(Sync))
+ }
+
+ async function Authentication(x, type = x.readUInt32BE(5)) {
+ (
+ type === 3 ? AuthenticationCleartextPassword :
+ type === 5 ? AuthenticationMD5Password :
+ type === 10 ? SASL :
+ type === 11 ? SASLContinue :
+ type === 12 ? SASLFinal :
+ type !== 0 ? UnknownAuth :
+ noop
+ )(x, type)
+ }
+
+ /* c8 ignore next 5 */
+ async function AuthenticationCleartextPassword() {
+ write(
+ b().p().str(await Pass()).z(1).end()
+ )
+ }
+
+ async function AuthenticationMD5Password(x) {
+ write(
+ b().p().str('md5' + md5(Buffer.concat([Buffer.from(md5((await Pass()) + user)), x.slice(9)]))).z(1).end()
+ )
+ }
+
+ function SASL() {
+ b().p().str('SCRAM-SHA-256' + b.N)
+ const i = b.i
+ nonce = crypto.randomBytes(18).toString('base64')
+ write(b.inc(4).str('n,,n=*,r=' + nonce).i32(b.i - i - 4, i).end())
+ }
+
+ async function SASLContinue(x) {
+ const res = x.toString('utf8', 9).split(',').reduce((acc, x) => (acc[x[0]] = x.slice(2), acc), {})
+
+ const saltedPassword = crypto.pbkdf2Sync(
+ await Pass(),
+ Buffer.from(res.s, 'base64'),
+ parseInt(res.i), 32,
+ 'sha256'
+ )
+
+ const clientKey = hmac(saltedPassword, 'Client Key')
+
+ const auth = 'n=*,r=' + nonce + ','
+ + 'r=' + res.r + ',s=' + res.s + ',i=' + res.i
+ + ',c=biws,r=' + res.r
+
+ serverSignature = hmac(hmac(saltedPassword, 'Server Key'), auth).toString('base64')
+
+ write(
+ b().p().str('c=biws,r=' + res.r + ',p=' + xor(clientKey, hmac(sha256(clientKey), auth)).toString('base64')).end()
+ )
+ }
+
+ function SASLFinal(x) {
+ if (x.toString('utf8', 9).split(b.N, 1)[0].slice(2) === serverSignature)
+ return
+ /* c8 ignore next 5 */
+ errored(Errors.generic('SASL_SIGNATURE_MISMATCH', 'The server did not return the correct signature'))
+ socket.destroy()
+ }
+
+ function Pass() {
+ return Promise.resolve(typeof options.pass === 'function'
+ ? options.pass()
+ : options.pass
+ )
+ }
+
+ function NoData() {
+ result.statement = query.statement
+ result.statement.columns = []
+ if (query.onlyDescribe)
+ return (query.resolve(query.statement), write(Sync))
+ }
+
+ function BackendKeyData(x) {
+ backend.pid = x.readUInt32BE(5)
+ backend.secret = x.readUInt32BE(9)
+ }
+
+ async function fetchArrayTypes() {
+ needsTypes = false
+ const types = await new Query([`
+ select b.oid, b.typarray
+ from pg_catalog.pg_type a
+ left join pg_catalog.pg_type b on b.oid = a.typelem
+ where a.typcategory = 'A'
+ group by b.oid, b.typarray
+ order by b.oid
+ `], [], execute)
+ types.forEach(({ oid, typarray }) => addArrayType(oid, typarray))
+ }
+
+ function addArrayType(oid, typarray) {
+ const parser = options.parsers[oid]
+ options.shared.typeArrayMap[oid] = typarray
+ options.parsers[typarray] = (xs) => arrayParser(xs, parser)
+ options.parsers[typarray].array = true
+ options.serializers[typarray] = (xs) => arraySerializer(xs, options.serializers[oid])
+ }
+
+ function tryNext(x, xs) {
+ return (
+ (x === 'read-write' && xs.default_transaction_read_only === 'on') ||
+ (x === 'read-only' && xs.default_transaction_read_only === 'off') ||
+ (x === 'primary' && xs.in_hot_standby === 'off') ||
+ (x === 'standby' && xs.in_hot_standby === 'on') ||
+ (x === 'prefer-standby' && xs.in_hot_standby === 'off' && options.host[retries])
+ )
+ }
+
+ function fetchState() {
+ const query = new Query([`
+ show transaction_read_only;
+ select pg_catalog.pg_is_in_recovery()
+ `], [], execute, null, { simple: true })
+ query.resolve = ([[a], [b]]) => {
+ backendParameters.default_transaction_read_only = a.transaction_read_only
+ backendParameters.in_hot_standby = b.pg_is_in_recovery ? 'on' : 'off'
+ }
+ query.execute()
+ }
+
+ function ErrorResponse(x) {
+ query && (query.cursorFn || query.describeFirst) && write(Sync)
+ const error = Errors.postgres(parseError(x))
+ query && query.retried
+ ? errored(query.retried)
+ : query && retryRoutines.has(error.routine)
+ ? retry(query, error)
+ : errored(error)
+ }
+
+ function retry(q, error) {
+ delete statements[q.signature]
+ q.retried = error
+ execute(q)
+ }
+
+ function NotificationResponse(x) {
+ if (!onnotify)
+ return
+
+ let index = 9
+ while (x[index++] !== 0);
+ onnotify(
+ x.toString('utf8', 9, index - 1),
+ x.toString('utf8', index, x.length - 1)
+ )
+ }
+
+ async function PortalSuspended() {
+ try {
+ const x = await Promise.resolve(query.cursorFn(result))
+ rows = 0
+ x === CLOSE
+ ? write(Close(query.portal))
+ : (result = new Result(), write(Execute('', query.cursorRows)))
+ } catch (err) {
+ write(Sync)
+ query.reject(err)
+ }
+ }
+
+ function CloseComplete() {
+ result.count && query.cursorFn(result)
+ query.resolve(result)
+ }
+
+ function CopyInResponse() {
+ stream = new Stream.Writable({
+ write(chunk, encoding, callback) {
+ socket.write(b().d().raw(chunk).end(), callback)
+ },
+ destroy(error, callback) {
+ callback(error)
+ socket.write(b().f().str(error + b.N).end())
+ },
+ final(callback) {
+ socket.write(b().c().end())
+ final = callback
+ }
+ })
+ query.resolve(stream)
+ }
+
+ function CopyOutResponse() {
+ stream = new Stream.Readable({
+ read() { socket.resume() }
+ })
+ query.resolve(stream)
+ }
+
+ /* c8 ignore next 3 */
+ function CopyBothResponse() {
+ stream = new Stream.Duplex({
+ read() { socket.resume() },
+ /* c8 ignore next 11 */
+ write(chunk, encoding, callback) {
+ socket.write(b().d().raw(chunk).end(), callback)
+ },
+ destroy(error, callback) {
+ callback(error)
+ socket.write(b().f().str(error + b.N).end())
+ },
+ final(callback) {
+ socket.write(b().c().end())
+ final = callback
+ }
+ })
+ query.resolve(stream)
+ }
+
+ function CopyData(x) {
+ stream.push(x.slice(5)) || socket.pause()
+ }
+
+ function CopyDone() {
+ stream.push(null)
+ stream = null
+ }
+
+ function NoticeResponse(x) {
+ onnotice
+ ? onnotice(parseError(x))
+ : console.log(parseError(x)) // eslint-disable-line
+
+ }
+
+ /* c8 ignore next 3 */
+ function EmptyQueryResponse() {
+ /* noop */
+ }
+
+ /* c8 ignore next 3 */
+ function FunctionCallResponse() {
+ errored(Errors.notSupported('FunctionCallResponse'))
+ }
+
+ /* c8 ignore next 3 */
+ function NegotiateProtocolVersion() {
+ errored(Errors.notSupported('NegotiateProtocolVersion'))
+ }
+
+ /* c8 ignore next 3 */
+ function UnknownMessage(x) {
+ console.error('Postgres.js : Unknown Message:', x[0]) // eslint-disable-line
+ }
+
+ /* c8 ignore next 3 */
+ function UnknownAuth(x, type) {
+ console.error('Postgres.js : Unknown Auth:', type) // eslint-disable-line
+ }
+
+ /* Messages */
+ function Bind(parameters, types, statement = '', portal = '') {
+ let prev
+ , type
+
+ b().B().str(portal + b.N).str(statement + b.N).i16(0).i16(parameters.length)
+
+ parameters.forEach((x, i) => {
+ if (x === null)
+ return b.i32(0xFFFFFFFF)
+
+ type = types[i]
+ parameters[i] = x = type in options.serializers
+ ? options.serializers[type](x)
+ : '' + x
+
+ prev = b.i
+ b.inc(4).str(x).i32(b.i - prev - 4, prev)
+ })
+
+ b.i16(0)
+
+ return b.end()
+ }
+
+ function Parse(str, parameters, types, name = '') {
+ b().P().str(name + b.N).str(str + b.N).i16(parameters.length)
+ parameters.forEach((x, i) => b.i32(types[i] || 0))
+ return b.end()
+ }
+
+ function Describe(x, name = '') {
+ return b().D().str(x).str(name + b.N).end()
+ }
+
+ function Execute(portal = '', rows = 0) {
+ return Buffer.concat([
+ b().E().str(portal + b.N).i32(rows).end(),
+ Flush
+ ])
+ }
+
+ function Close(portal = '') {
+ return Buffer.concat([
+ b().C().str('P').str(portal + b.N).end(),
+ b().S().end()
+ ])
+ }
+
+ function StartupMessage() {
+ return b().inc(4).i16(3).z(2).str(
+ Object.entries(Object.assign({
+ user,
+ database,
+ client_encoding: '\'utf-8\''
+ },
+ options.connection
+ )).filter(([, v]) => v).map(([k, v]) => k + b.N + v).join(b.N)
+ ).z(2).end(0)
+ }
+
+}
+
+function parseError(x) {
+ const error = {}
+ let start = 5
+ for (let i = 5; i < x.length - 1; i++) {
+ if (x[i] === 0) {
+ error[errorFields[x[start]]] = x.toString('utf8', start + 1, i)
+ start = i + 1
+ }
+ }
+ return error
+}
+
+function md5(x) {
+ return crypto.createHash('md5').update(x).digest('hex')
+}
+
+function hmac(key, x) {
+ return Buffer.from(new HmacSha256(key).update(x).digest())
+}
+
+function sha256(x) {
+ return crypto.createHash('sha256').update(x).digest()
+}
+
+function xor(a, b) {
+ const length = Math.max(a.length, b.length)
+ const buffer = Buffer.allocUnsafe(length)
+ for (let i = 0; i < length; i++)
+ buffer[i] = a[i] ^ b[i]
+ return buffer
+}
+
+function timer(fn, seconds) {
+ seconds = typeof seconds === 'function' ? seconds() : seconds
+ if (!seconds)
+ return { cancel: noop, start: noop }
+
+ let timer
+ return {
+ cancel() {
+ timer && (clearTimeout(timer), timer = null)
+ },
+ start() {
+ timer && clearTimeout(timer)
+ timer = (window.timer = setTimeout(done, seconds * 1000, arguments), Deno.unrefTimer(window.timer), window.timer)
+ }
+ }
+
+ function done(args) {
+ fn.apply(null, args)
+ timer = null
+ }
+}
diff --git a/lib/errors.js b/deno/src/errors.js
similarity index 65%
rename from lib/errors.js
rename to deno/src/errors.js
index 16732d44..0ff83c42 100644
--- a/lib/errors.js
+++ b/deno/src/errors.js
@@ -1,4 +1,4 @@
-class PostgresError extends Error {
+export class PostgresError extends Error {
constructor(x) {
super(x.message)
this.name = this.constructor.name
@@ -6,9 +6,7 @@ class PostgresError extends Error {
}
}
-module.exports.PostgresError = PostgresError
-
-module.exports.errors = {
+export const Errors = {
connection,
postgres,
generic,
@@ -16,13 +14,14 @@ module.exports.errors = {
}
function connection(x, options, socket) {
+ const { host, port } = socket || options
const error = Object.assign(
- new Error(('write ' + x + ' ' + (options.path || (socket.host + ':' + socket.port)))),
+ new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))),
{
code: x,
errno: x,
- address: options.path || socket.host
- }, options.path ? {} : { port: socket.port }
+ address: options.path || host
+ }, options.path ? {} : { port: port }
)
Error.captureStackTrace(error, connection)
return error
@@ -34,12 +33,13 @@ function postgres(x) {
return error
}
-function generic(x) {
- const error = Object.assign(new Error(x.message), x)
+function generic(code, message) {
+ const error = Object.assign(new Error(code + ': ' + message), { code })
Error.captureStackTrace(error, generic)
return error
}
+/* c8 ignore next 10 */
function notSupported(x) {
const error = Object.assign(
new Error(x + ' (B) is not supported'),
diff --git a/deno/src/index.js b/deno/src/index.js
new file mode 100644
index 00000000..82cdeb59
--- /dev/null
+++ b/deno/src/index.js
@@ -0,0 +1,538 @@
+import process from 'https://deno.land/std@0.120.0/node/process.ts'
+import os from 'https://deno.land/std@0.120.0/node/os.ts'
+import fs from 'https://deno.land/std@0.120.0/node/fs.ts'
+import Stream from 'https://deno.land/std@0.120.0/node/stream.ts'
+
+import {
+ mergeUserTypes,
+ inferType,
+ Parameter,
+ Identifier,
+ Builder,
+ toPascal,
+ toCamel,
+ toKebab,
+ fromPascal,
+ fromCamel,
+ fromKebab
+} from './types.js'
+
+import Connection from './connection.js'
+import { Query, CLOSE } from './query.js'
+import Queue from './queue.js'
+import { Errors, PostgresError } from './errors.js'
+import Subscribe from './subscribe.js'
+
+Object.assign(Postgres, {
+ PostgresError,
+ toPascal,
+ toCamel,
+ toKebab,
+ fromPascal,
+ fromCamel,
+ fromKebab,
+ BigInt
+})
+
+export default Postgres
+
+function Postgres(a, b) {
+ const options = parseOptions(a, b)
+ , subscribe = Subscribe(Postgres, { ...options })
+
+ let ending = false
+
+ const queries = Queue()
+ , connections = [...Array(options.max)].map(() => Connection(options, { onopen, onend, ondrain, onclose }))
+ , closed = Queue(connections)
+ , reserved = Queue()
+ , open = Queue()
+ , busy = Queue()
+ , full = Queue()
+ , ended = Queue()
+ , connecting = Queue()
+ , queues = { closed, ended, connecting, reserved, open, busy, full }
+
+ const sql = Sql(handler)
+
+ Object.assign(sql, {
+ get parameters() { return options.parameters },
+ largeObject,
+ subscribe,
+ CLOSE,
+ END: CLOSE,
+ PostgresError,
+ options,
+ listen,
+ notify,
+ begin,
+ end
+ })
+
+ return sql
+
+ function Sql(handler, instant) {
+ handler.debug = options.debug
+
+ Object.entries(options.types).reduce((acc, [name, type]) => {
+ acc[name] = (x) => new Parameter(x, type.to)
+ return acc
+ }, typed)
+
+ Object.assign(sql, {
+ types: typed,
+ typed,
+ unsafe,
+ array,
+ json,
+ file
+ })
+
+ return sql
+
+ function typed(value, type) {
+ return new Parameter(value, type)
+ }
+
+ function sql(strings, ...args) {
+ const query = strings && Array.isArray(strings.raw)
+ ? new Query(strings, args, handler, cancel)
+ : typeof strings === 'string' && !args.length
+ ? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings)
+ : new Builder(strings, args)
+ instant && query instanceof Query && query.execute()
+ return query
+ }
+
+ function unsafe(string, args = [], options = {}) {
+ arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
+ const query = new Query([string], args, handler, cancel, {
+ prepare: false,
+ ...options,
+ simple: 'simple' in options ? options.simple : args.length === 0
+ })
+ instant && query.execute()
+ return query
+ }
+
+ function file(path, args = [], options = {}) {
+ arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
+ const query = new Query([], args, (query) => {
+ fs.readFile(path, 'utf8', (err, string) => {
+ if (err)
+ return query.reject(err)
+
+ query.strings = [string]
+ handler(query)
+ })
+ }, cancel, {
+ ...options,
+ simple: 'simple' in options ? options.simple : args.length === 0
+ })
+ instant && query.execute()
+ return query
+ }
+ }
+
+ async function listen(name, fn) {
+ const sql = listen.sql || (listen.sql = Postgres({
+ ...options,
+ max: 1,
+ idle_timeout: null,
+ max_lifetime: null,
+ fetch_types: false,
+ onclose() {
+ Object.entries(listen.channels).forEach(([channel, { listeners }]) => {
+ delete listen.channels[channel]
+ Promise.all(listeners.map(fn => listen(channel, fn).catch(() => { /* noop */ })))
+ })
+ },
+ onnotify(c, x) {
+ c in listen.channels && listen.channels[c].listeners.forEach(fn => fn(x))
+ }
+ }))
+
+ const channels = listen.channels || (listen.channels = {})
+ , exists = name in channels
+ , channel = exists ? channels[name] : (channels[name] = { listeners: [fn] })
+
+ if (exists) {
+ channel.listeners.push(fn)
+ return Promise.resolve({ ...channel.result, unlisten })
+ }
+
+ channel.result = await sql`listen ${ sql(name) }`
+ channel.result.unlisten = unlisten
+
+ return channel.result
+
+ async function unlisten() {
+ if (name in channels === false)
+ return
+
+ channel.listeners = channel.listeners.filter(x => x !== fn)
+ if (channels[name].listeners.length)
+ return
+
+ delete channels[name]
+ return sql`unlisten ${ sql(name) }`
+ }
+ }
+
+ async function notify(channel, payload) {
+ return await sql`select pg_notify(${ channel }, ${ '' + payload })`
+ }
+
+ async function begin(options, fn) {
+ !fn && (fn = options, options = '')
+ const queries = Queue()
+ let savepoints = 0
+ , connection
+
+ try {
+ await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute })
+ return await scope(connection, fn)
+ } catch (error) {
+ throw error
+ }
+
+ async function scope(c, fn, name) {
+ const sql = Sql(handler, true)
+ sql.savepoint = savepoint
+ let errored
+ name && await sql`savepoint ${ sql(name) }`
+ try {
+ const result = await new Promise((resolve, reject) => {
+ errored = reject
+ const x = fn(sql)
+ Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject)
+ })
+ !name && await sql`commit`
+ return result
+ } catch (e) {
+ await (name
+ ? sql`rollback to ${ sql(name) }`
+ : sql`rollback`
+ )
+ throw e
+ }
+
+ function savepoint(name, fn) {
+ if (name && Array.isArray(name.raw))
+ return savepoint(sql => sql.apply(sql, arguments))
+
+ arguments.length === 1 && (fn = name, name = null)
+ return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : ''))
+ }
+
+ function handler(q) {
+ errored && q.catch(errored)
+ c.state === 'full'
+ ? queries.push(q)
+ : c.execute(q) || (c.state = 'full', full.push(c))
+ }
+ }
+
+ function onexecute(c) {
+ queues[c.state].remove(c)
+ c.state = 'reserved'
+ c.reserved = () => queries.length
+ ? c.execute(queries.shift())
+ : c.state = 'reserved'
+ reserved.push(c)
+ connection = c
+ }
+ }
+
+ function largeObject(oid, mode = 0x00020000 | 0x00040000) {
+ return new Promise(async(resolve, reject) => {
+ await sql.begin(async sql => {
+ let finish
+ !oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`)
+ const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd`
+
+ const lo = {
+ writable,
+ readable,
+ close : () => sql`select lo_close(${ fd })`.then(finish),
+ tell : () => sql`select lo_tell64(${ fd })`,
+ read : (x) => sql`select loread(${ fd }, ${ x }) as data`,
+ write : (x) => sql`select lowrite(${ fd }, ${ x })`,
+ truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`,
+ seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`,
+ size : () => sql`
+ select
+ lo_lseek64(${ fd }, location, 0) as position,
+ seek.size
+ from (
+ select
+ lo_lseek64($1, 0, 2) as size,
+ tell.location
+ from (select lo_tell64($1) as location) tell
+ ) seek
+ `
+ }
+
+ resolve(lo)
+
+ return new Promise(async r => finish = r)
+
+ async function readable({
+ highWaterMark = 2048 * 8,
+ start = 0,
+ end = Infinity
+ } = {}) {
+ let max = end - start
+ start && await lo.seek(start)
+ return new Stream.Readable({
+ highWaterMark,
+ async read(size) {
+ const l = size > max ? size - max : size
+ max -= size
+ const [{ data }] = await lo.read(l)
+ this.push(data)
+ if (data.length < size)
+ this.push(null)
+ }
+ })
+ }
+
+ async function writable({
+ highWaterMark = 2048 * 8,
+ start = 0
+ } = {}) {
+ start && await lo.seek(start)
+ return new Stream.Writable({
+ highWaterMark,
+ write(chunk, encoding, callback) {
+ lo.write(chunk).then(() => callback(), callback)
+ }
+ })
+ }
+ }).catch(reject)
+ })
+ }
+
+ function json(x) {
+ return new Parameter(x, 3802)
+ }
+
+ function array(x, type) {
+ if (!Array.isArray(x))
+ return array(Array.from(arguments))
+
+ return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap)
+ }
+
+ function handler(query) {
+ if (ending)
+ return query.reject(Errors.connection('CONNECTION_ENDED', options, options))
+
+ if (open.length)
+ return go(open, query)
+
+ if (closed.length)
+ return connect(closed.shift(), query)
+
+ busy.length
+ ? go(busy, query)
+ : queries.push(query)
+ }
+
+ function go(xs, query) {
+ const c = xs.shift()
+ return c.execute(query)
+ ? (c.state = 'busy', busy.push(c))
+ : (c.state = 'full', full.push(c))
+ }
+
+ function cancel(query) {
+ return new Promise((resolve, reject) => {
+ query.state
+ ? query.active
+ ? Connection(options, {}).cancel(query.state, resolve, reject)
+ : query.cancelled = { resolve, reject }
+ : (
+ queries.remove(query),
+ query.cancelled = true,
+ query.reject(Errors.generic('57014', 'canceling statement due to user request')),
+ resolve()
+ )
+ })
+ }
+
+ async function end({ timeout = null } = {}) {
+ if (ending)
+ return ending
+
+ await 1
+ let timer
+ return ending = Promise.race([
+ new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))),
+ Promise.all(connections.map(c => c.end()).concat(
+ listen.sql ? listen.sql.end({ timeout: 0 }) : [],
+ subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : []
+ ))
+ ]).then(() => clearTimeout(timer))
+ }
+
+ async function destroy(resolve) {
+ await Promise.all(connections.map(c => c.terminate()))
+ while (queries.length)
+ queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options))
+ resolve()
+ }
+
+ function connect(c, query) {
+ c.state = 'connecting'
+ connecting.push(c)
+ c.connect(query)
+ }
+
+ function onend(c) {
+ queues[c.state].remove(c)
+ c.state = 'ended'
+ ended.push(c)
+ }
+
+ function onopen(c) {
+ queues[c.state].remove(c)
+ if (queries.length === 0)
+ return (c.state = 'open', open.push(c))
+
+ let max = Math.ceil(queries.length / (connecting.length + 1))
+ , ready = true
+
+ while (ready && queries.length && max-- > 0)
+ ready = c.execute(queries.shift())
+
+ ready
+ ? (c.state = 'busy', busy.push(c))
+ : (c.state = 'full', full.push(c))
+ }
+
+ function ondrain(c) {
+ full.remove(c)
+ onopen(c)
+ }
+
+ function onclose(c) {
+ queues[c.state].remove(c)
+ c.state = 'closed'
+ c.reserved = null
+ options.onclose && options.onclose(c.id)
+ queries.length
+ ? connect(c, queries.shift())
+ : queues.closed.push(c)
+ }
+}
+
+function parseOptions(a, b) {
+ if (a && a.shared)
+ return a
+
+ const env = process.env // eslint-disable-line
+ , o = (typeof a === 'string' ? b : a) || {}
+ , { url, multihost } = parseUrl(a, env)
+ , query = url.searchParams
+ , host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost'
+ , port = o.port || url.port || env.PGPORT || 5432
+ , user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername()
+
+ return Object.assign({
+ host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]),
+ port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)),
+ path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port,
+ database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user,
+ user : user,
+ pass : o.pass || o.password || url.password || env.PGPASSWORD || '',
+ max : o.max || query.get('max') || 10,
+ types : o.types || {},
+ ssl : o.ssl || parseSSL(query.get('sslmode') || query.get('ssl')) || false,
+ idle_timeout : o.idle_timeout || query.get('idle_timeout') || env.PGIDLE_TIMEOUT || warn(o.timeout),
+ connect_timeout : o.connect_timeout || query.get('connect_timeout') || env.PGCONNECT_TIMEOUT || 30,
+ max_lifetime : o.max_lifetime || url.max_lifetime || max_lifetime,
+ max_pipeline : o.max_pipeline || url.max_pipeline || 100,
+ backoff : o.backoff || url.backoff || backoff,
+ keep_alive : o.keep_alive || url.keep_alive || 60,
+ prepare : 'prepare' in o ? o.prepare : 'no_prepare' in o ? !o.no_prepare : true,
+ onnotice : o.onnotice,
+ onnotify : o.onnotify,
+ onclose : o.onclose,
+ onparameter : o.onparameter,
+ transform : parseTransform(o.transform || {}),
+ connection : Object.assign({ application_name: 'postgres.js' }, o.connection),
+ target_session_attrs: tsa(o, url, env),
+ debug : o.debug,
+ fetch_types : 'fetch_types' in o ? o.fetch_types : true,
+ parameters : {},
+ shared : { retries: 0, typeArrayMap: {} }
+ },
+ mergeUserTypes(o.types)
+ )
+}
+
+function tsa(o, url, env) {
+ const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS
+ if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x))
+ return x
+
+ throw new Error('target_session_attrs ' + x + ' is not supported')
+}
+
+function backoff(retries) {
+ return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20)
+}
+
+function max_lifetime() {
+ return 60 * (30 + Math.random() * 30)
+}
+
+function parseTransform(x) {
+ return {
+ column: {
+ from: typeof x.column === 'function' ? x.column : x.column && x.column.from,
+ to: x.column && x.column.to
+ },
+ value: {
+ from: typeof x.value === 'function' ? x.value : x.value && x.value.from,
+ to: x.value && x.value.to
+ },
+ row: {
+ from: typeof x.row === 'function' ? x.row : x.row && x.row.from,
+ to: x.row && x.row.to
+ }
+ }
+}
+
+function parseSSL(x) {
+ return x !== 'disable' && x !== 'false' && x
+}
+
+function parseUrl(url) {
+ if (typeof url !== 'string')
+ return { url: { searchParams: new Map() } }
+
+ let host = url
+ host = host.slice(host.indexOf('://') + 3)
+ host = host.split(/[?/]/)[0]
+ host = host.slice(host.indexOf('@') + 1)
+
+ return {
+ url: new URL(url.replace(host, host.split(',')[0])),
+ multihost: host.indexOf(',') > -1 && host
+ }
+}
+
+function warn(x) {
+ typeof x !== 'undefined' && console.log('The timeout option is deprecated, use idle_timeout instead') // eslint-disable-line
+ return x
+}
+
+function osUsername() {
+ try {
+ return os.userInfo().username // eslint-disable-line
+ } catch (_) {
+ return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line
+ }
+}
diff --git a/deno/src/query.js b/deno/src/query.js
new file mode 100644
index 00000000..513c044a
--- /dev/null
+++ b/deno/src/query.js
@@ -0,0 +1,161 @@
+const originCache = new Map()
+ , originStackCache = new Map()
+ , originError = Symbol('OriginError')
+
+export const CLOSE = {}
+export class Query extends Promise {
+ constructor(strings, args, handler, canceller, options = {}) {
+ let resolve
+ , reject
+
+ super((a, b) => {
+ resolve = a
+ reject = b
+ })
+
+ this.tagged = Array.isArray(strings.raw)
+ this.strings = strings
+ this.args = args
+ this.handler = handler
+ this.canceller = canceller
+ this.options = options
+
+ this.state = null
+ this.statement = null
+
+ this.resolve = x => (this.active = false, resolve(x))
+ this.reject = x => (this.active = false, reject(x))
+
+ this.active = false
+ this.cancelled = null
+ this.executed = false
+ this.signature = ''
+
+ this[originError] = handler.debug || !this.tagged
+ ? new Error()
+ : cachedError(this.strings)
+ }
+
+ get origin() {
+ return this.handler.debug || !this.tagged
+ ? this[originError].stack
+ : originStackCache.has(this.strings)
+ ? originStackCache.get(this.strings)
+ : originStackCache.set(this.strings, this[originError].stack).get(this.strings)
+ }
+
+ static get [Symbol.species]() {
+ return Promise
+ }
+
+ cancel() {
+ return this.canceller && (this.canceller(this), this.canceller = null)
+ }
+
+ async readable() {
+ this.options.simple = true
+ this.options.prepare = false
+ this.streaming = true
+ return this
+ }
+
+ async writable() {
+ this.options.simple = true
+ this.options.prepare = false
+ this.streaming = true
+ return this
+ }
+
+ cursor(rows = 1, fn) {
+ this.options.simple = false
+ if (typeof rows === 'function') {
+ fn = rows
+ rows = 1
+ }
+
+ this.cursorRows = rows
+
+ if (typeof fn === 'function')
+ return (this.cursorFn = fn, this)
+
+ let prev
+ return {
+ [Symbol.asyncIterator]: () => ({
+ next: () => {
+ if (this.executed && !this.active)
+ return { done: true }
+
+ prev && prev()
+ const promise = new Promise((resolve, reject) => {
+ this.cursorFn = value => {
+ resolve({ value, done: false })
+ return new Promise(r => prev = r)
+ }
+ this.resolve = () => (this.active = false, resolve({ done: true }))
+ this.reject = x => (this.active = false, reject(x))
+ })
+ this.execute()
+ return promise
+ },
+ return() {
+ prev && prev(CLOSE)
+ return { done: true }
+ }
+ })
+ }
+ }
+
+ describe() {
+ this.onlyDescribe = true
+ return this
+ }
+
+ stream() {
+ throw new Error('.stream has been renamed to .forEach')
+ }
+
+ forEach(fn) {
+ this.forEachFn = fn
+ return this
+ }
+
+ raw() {
+ this.isRaw = true
+ return this
+ }
+
+ async handle() {
+ !this.executed && (this.executed = true) && await 1 && this.handler(this)
+ }
+
+ execute() {
+ this.handle()
+ return this
+ }
+
+ then() {
+ this.handle()
+ return super.then.apply(this, arguments)
+ }
+
+ catch() {
+ this.handle()
+ return super.catch.apply(this, arguments)
+ }
+
+ finally() {
+ this.handle()
+ return super.finally.apply(this, arguments)
+ }
+}
+
+function cachedError(xs) {
+ if (originCache.has(xs))
+ return originCache.get(xs)
+
+ const x = Error.stackTraceLimit
+ Error.stackTraceLimit = 4
+ originCache.set(xs, new Error())
+ Error.stackTraceLimit = x
+ return originCache.get(xs)
+}
diff --git a/deno/src/queue.js b/deno/src/queue.js
new file mode 100644
index 00000000..c4ef9716
--- /dev/null
+++ b/deno/src/queue.js
@@ -0,0 +1,31 @@
+export default Queue
+
+function Queue(initial = []) {
+ let xs = initial.slice()
+ let index = 0
+
+ return {
+ get length() {
+ return xs.length - index
+ },
+ remove: (x) => {
+ const index = xs.indexOf(x)
+ return index === -1
+ ? null
+ : (xs.splice(index, 1), x)
+ },
+ push: (x) => (xs.push(x), x),
+ shift: () => {
+ const out = xs[index++]
+
+ if (index === xs.length) {
+ index = 0
+ xs = []
+ } else {
+ xs[index - 1] = undefined
+ }
+
+ return out
+ }
+ }
+}
diff --git a/deno/src/result.js b/deno/src/result.js
new file mode 100644
index 00000000..31014284
--- /dev/null
+++ b/deno/src/result.js
@@ -0,0 +1,16 @@
+export default class Result extends Array {
+ constructor() {
+ super()
+ Object.defineProperties(this, {
+ count: { value: null, writable: true },
+ state: { value: null, writable: true },
+ command: { value: null, writable: true },
+ columns: { value: null, writable: true },
+ statement: { value: null, writable: true }
+ })
+ }
+
+ static get [Symbol.species]() {
+ return Array
+ }
+}
diff --git a/deno/src/subscribe.js b/deno/src/subscribe.js
new file mode 100644
index 00000000..8b949767
--- /dev/null
+++ b/deno/src/subscribe.js
@@ -0,0 +1,232 @@
+import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts'
+export default function Subscribe(postgres, options) {
+ const listeners = new Map()
+
+ let connection
+
+ return async function subscribe(event, fn) {
+ event = parseEvent(event)
+
+ options.max = 1
+ options.onclose = onclose
+ options.connection = {
+ ...options.connection,
+ replication: 'database'
+ }
+
+ let stream
+ , ended = false
+
+ const sql = postgres(options)
+ , slot = 'postgresjs_' + Math.random().toString(36).slice(2)
+ , end = sql.end
+
+ sql.end = async() => {
+ ended = true
+ stream && (await new Promise(r => (stream.once('end', r), stream.end())))
+ return end()
+ }
+
+ !connection && (subscribe.sql = sql, connection = init(sql, slot, options.publications))
+
+ const fns = listeners.has(event)
+ ? listeners.get(event).add(fn)
+ : listeners.set(event, new Set([fn]))
+
+ const unsubscribe = () => {
+ fns.delete(fn)
+ fns.size === 0 && listeners.delete(event)
+ }
+
+ return connection.then(x => (stream = x, { unsubscribe }))
+
+ async function onclose() {
+ stream = null
+ !ended && (stream = await init(sql, slot, options.publications))
+ }
+ }
+
+ async function init(sql, slot, publications = 'alltables') {
+ if (!publications)
+ throw new Error('Missing publication names')
+
+ const [x] = await sql.unsafe(
+ `CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT`
+ )
+
+ const stream = await sql.unsafe(
+ `START_REPLICATION SLOT ${ slot } LOGICAL ${
+ x.consistent_point
+ } (proto_version '1', publication_names '${ publications }')`
+ ).writable()
+
+ const state = {
+ lsn: Buffer.concat(x.consistent_point.split('/').map(x => Buffer.from(('00000000' + x).slice(-8), 'hex')))
+ }
+
+ stream.on('data', data)
+ stream.on('error', (error) => {
+ console.error('Logical Replication Error - Reconnecting', error)
+ sql.end()
+ })
+
+ return stream
+
+ function data(x) {
+ if (x[0] === 0x77)
+ parse(x.slice(25), state, sql.options.parsers, handle)
+ else if (x[0] === 0x6b && x[17])
+ pong()
+ }
+
+ function handle(a, b) {
+ const path = b.relation.schema + '.' + b.relation.table
+ call('*', a, b)
+ call('*:' + path, a, b)
+ b.relation.keys.length && call('*:' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
+ call(b.command, a, b)
+ call(b.command + ':' + path, a, b)
+ b.relation.keys.length && call(b.command + ':' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
+ }
+
+ function pong() {
+ const x = Buffer.alloc(34)
+ x[0] = 'r'.charCodeAt(0)
+ x.fill(state.lsn, 1)
+ x.writeBigInt64BE(BigInt(Date.now() - Date.UTC(2000, 0, 1)) * BigInt(1000), 25)
+ stream.write(x)
+ }
+ }
+
+ function call(x, a, b) {
+ listeners.has(x) && listeners.get(x).forEach(fn => fn(a, b, x))
+ }
+}
+
+function Time(x) {
+ return new Date(Date.UTC(2000, 0, 1) + Number(x / BigInt(1000)))
+}
+
+function parse(x, state, parsers, handle) {
+ const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc)
+
+ Object.entries({
+ R: x => { // Relation
+ let i = 1
+ const r = state[x.readUInt32BE(i)] = {
+ schema: String(x.slice(i += 4, i = x.indexOf(0, i))) || 'pg_catalog',
+ table: String(x.slice(i + 1, i = x.indexOf(0, i + 1))),
+ columns: Array(x.readUInt16BE(i += 2)),
+ keys: []
+ }
+ i += 2
+
+ let columnIndex = 0
+ , column
+
+ while (i < x.length) {
+ column = r.columns[columnIndex++] = {
+ key: x[i++],
+ name: String(x.slice(i, i = x.indexOf(0, i))),
+ type: x.readUInt32BE(i += 1),
+ parser: parsers[x.readUInt32BE(i)],
+ atttypmod: x.readUInt32BE(i += 4)
+ }
+
+ column.key && r.keys.push(column)
+ i += 4
+ }
+ },
+ Y: () => { /* noop */ }, // Type
+ O: () => { /* noop */ }, // Origin
+ B: x => { // Begin
+ state.date = Time(x.readBigInt64BE(9))
+ state.lsn = x.slice(1, 9)
+ },
+ I: x => { // Insert
+ let i = 1
+ const relation = state[x.readUInt32BE(i)]
+ const row = {}
+ tuples(x, row, relation.columns, i += 7)
+
+ handle(row, {
+ command: 'insert',
+ relation
+ })
+ },
+ D: x => { // Delete
+ let i = 1
+ const relation = state[x.readUInt32BE(i)]
+ i += 4
+ const key = x[i] === 75
+ const row = key || x[i] === 79
+ ? {}
+ : null
+
+ tuples(x, row, key ? relation.keys : relation.columns, i += 3)
+
+ handle(row, {
+ command: 'delete',
+ relation,
+ key
+ })
+ },
+ U: x => { // Update
+ let i = 1
+ const relation = state[x.readUInt32BE(i)]
+ i += 4
+ const key = x[i] === 75
+ const old = key || x[i] === 79
+ ? {}
+ : null
+
+ old && (i = tuples(x, old, key ? relation.keys : relation.columns, ++i))
+
+ const row = {}
+ i = tuples(x, row, relation.columns, i += 3)
+
+ handle(row, {
+ command: 'update',
+ relation,
+ key,
+ old
+ })
+ },
+ T: () => { /* noop */ }, // Truncate,
+ C: () => { /* noop */ } // Commit
+ }).reduce(char, {})[x[0]](x)
+}
+
+function tuples(x, row, columns, xi) {
+ let type
+ , column
+
+ for (let i = 0; i < columns.length; i++) {
+ type = x[xi++]
+ column = columns[i]
+ row[column.name] = type === 110 // n
+ ? null
+ : type === 117 // u
+ ? undefined
+ : column.parser === undefined
+ ? x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))
+ : column.parser.array === true
+ ? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readUInt32BE(xi)))
+ : column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi)))
+ }
+
+ return xi
+}
+
+function parseEvent(x) {
+ const xs = x.match(/^(\*|insert|update|delete)?:?([^.]+?\.?[^=]+)?=?(.+)?/i) || []
+
+ if (!xs)
+ throw new Error('Malformed subscribe pattern: ' + x)
+
+ const [, command, path, key] = xs
+
+ return (command || '*')
+ + (path ? ':' + (path.indexOf('.') === -1 ? 'public.' + path : path) : '')
+ + (key ? '=' + key : '')
+}
diff --git a/deno/src/types.js b/deno/src/types.js
new file mode 100644
index 00000000..a3dabd10
--- /dev/null
+++ b/deno/src/types.js
@@ -0,0 +1,298 @@
+import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts'
+import { Query } from './query.js'
+import { Errors } from './errors.js'
+
+export const types = {
+ string: {
+ to: 25,
+ from: null, // defaults to string
+ serialize: x => '' + x
+ },
+ number: {
+ to: 0,
+ from: [21, 23, 26, 700, 701],
+ serialize: x => '' + x,
+ parse: x => +x
+ },
+ json: {
+ to: 114,
+ from: [114, 3802],
+ serialize: x => JSON.stringify(x),
+ parse: x => JSON.parse(x)
+ },
+ boolean: {
+ to: 16,
+ from: 16,
+ serialize: x => x === true ? 't' : 'f',
+ parse: x => x === 't'
+ },
+ date: {
+ to: 1184,
+ from: [1082, 1114, 1184],
+ serialize: x => (x instanceof Date ? x : new Date(x)).toISOString(),
+ parse: x => new Date(x)
+ },
+ bytea: {
+ to: 17,
+ from: 17,
+ serialize: x => '\\x' + Buffer.from(x).toString('hex'),
+ parse: x => Buffer.from(x.slice(2), 'hex')
+ }
+}
+
+export const BigInt = {
+ to: 1700,
+ from: [20, 701, 1700],
+ parse: x => BigInt(x), // eslint-disable-line
+ serialize: x => x.toString()
+}
+
+class NotTagged { then() { notTagged() } catch() { notTagged() } finally() { notTagged() }}
+
+export class Identifier extends NotTagged {
+ constructor(value) {
+ super()
+ this.value = escapeIdentifier(value)
+ }
+}
+
+export class Parameter extends NotTagged {
+ constructor(value, type, array) {
+ super()
+ this.value = value
+ this.type = type
+ this.array = array
+ }
+}
+
+export class Builder extends NotTagged {
+ constructor(first, rest) {
+ super()
+ this.first = first
+ this.rest = rest
+ }
+
+ build(before, parameters, types, transform) {
+ const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop()
+ if (keyword.i === -1)
+ throw new Error('Could not infer helper mode')
+
+ return keyword.fn(this.first, this.rest, parameters, types, transform)
+ }
+}
+
+export function handleValue(x, parameters, types) {
+ const value = x instanceof Parameter ? x.value : x
+ if (value === undefined)
+ throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed')
+
+ return '$' + (types.push(
+ x instanceof Parameter
+ ? (parameters.push(x.value), x.array
+ ? x.array[x.type || inferType(x.value)] || x.type || firstIsString(x.value)
+ : x.type
+ )
+ : (parameters.push(x), inferType(x))
+ ))
+}
+
+const defaultHandlers = typeHandlers(types)
+
+function valuesBuilder(first, parameters, types, transform, columns) {
+ let value
+ return first.map(row =>
+ '(' + columns.map(column => {
+ value = row[column]
+ return (
+ value instanceof Query ? value.strings[0] :
+ value instanceof Identifier ? value.value :
+ handleValue(value, parameters, types)
+ )
+ }).join(',') + ')'
+ ).join(',')
+}
+
+function values(first, rest, parameters, types, transform) {
+ const multi = Array.isArray(first[0])
+ const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first)
+ return valuesBuilder(multi ? first : [first], parameters, types, transform, columns)
+}
+
+const builders = Object.entries({
+ values,
+ in: values,
+
+ update(first, rest, parameters, types, transform) {
+ return (rest.length ? rest.flat() : Object.keys(first)).map(x =>
+ escapeIdentifier(transform.column.to ? transform.column.to(x) : x) +
+ '=' + handleValue(first[x], parameters, types)
+ )
+ },
+
+ select(first, rest, parameters, types, transform) {
+ typeof first === 'string' && (first = [first].concat(rest))
+ if (Array.isArray(first))
+ return first.map(x => escapeIdentifier(transform.column.to ? transform.column.to(x) : x)).join(',')
+
+ let value
+ const columns = rest.length ? rest.flat() : Object.keys(first)
+ return columns.map(x => {
+ value = first[x]
+ return (
+ value instanceof Query ? value.strings[0] :
+ value instanceof Identifier ? value.value :
+ handleValue(value, parameters, types)
+ ) + ' as ' + escapeIdentifier(transform.column.to ? transform.column.to(x) : x)
+ }).join(',')
+ },
+
+ insert(first, rest, parameters, types, transform) {
+ const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first)
+ return '(' + columns.map(x =>
+ escapeIdentifier(transform.column.to ? transform.column.to(x) : x)
+ ).join(',') + ')values' +
+ valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, transform, columns)
+ }
+}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x, 'i'), fn]))
+
+function notTagged() {
+ throw Errors.generic('NOT_TAGGED_CALL', 'Query not called as a tagged template literal')
+}
+
+export const serializers = defaultHandlers.serializers
+export const parsers = defaultHandlers.parsers
+
+export const END = {}
+
+function firstIsString(x) {
+ if (Array.isArray(x))
+ return firstIsString(x[0])
+ return typeof x === 'string' ? 1009 : 0
+}
+
+export const mergeUserTypes = function(types) {
+ const user = typeHandlers(types || {})
+ return {
+ serializers: Object.assign({}, serializers, user.serializers),
+ parsers: Object.assign({}, parsers, user.parsers)
+ }
+}
+
+function typeHandlers(types) {
+ return Object.keys(types).reduce((acc, k) => {
+ types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse)
+ acc.serializers[types[k].to] = types[k].serialize
+ types[k].from && [].concat(types[k].from).forEach(x => acc.serializers[x] = types[k].serialize)
+ return acc
+ }, { parsers: {}, serializers: {} })
+}
+
+export const escapeIdentifier = function escape(str) {
+ return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"'
+}
+
+export const inferType = function inferType(x) {
+ return (
+ x instanceof Parameter ? x.type :
+ x instanceof Date ? 1184 :
+ x instanceof Uint8Array ? 17 :
+ (x === true || x === false) ? 16 :
+ typeof x === 'bigint' ? 1700 :
+ Array.isArray(x) ? inferType(x[0]) :
+ 0
+ )
+}
+
+const escapeBackslash = /\\/g
+const escapeQuote = /"/g
+
+function arrayEscape(x) {
+ return x
+ .replace(escapeBackslash, '\\\\')
+ .replace(escapeQuote, '\\"')
+}
+
+export const arraySerializer = function arraySerializer(xs, serializer) {
+ if (Array.isArray(xs) === false)
+ return xs
+
+ if (!xs.length)
+ return '{}'
+
+ const first = xs[0]
+
+ if (Array.isArray(first) && !first.type)
+ return '{' + xs.map(x => arraySerializer(x, serializer)).join(',') + '}'
+
+ return '{' + xs.map(x =>
+ '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"'
+ ).join(',') + '}'
+}
+
+const arrayParserState = {
+ i: 0,
+ char: null,
+ str: '',
+ quoted: false,
+ last: 0
+}
+
+export const arrayParser = function arrayParser(x, parser) {
+ arrayParserState.i = arrayParserState.last = 0
+ return arrayParserLoop(arrayParserState, x, parser)
+}
+
+function arrayParserLoop(s, x, parser) {
+ const xs = []
+ for (; s.i < x.length; s.i++) {
+ s.char = x[s.i]
+ if (s.quoted) {
+ if (s.char === '\\') {
+ s.str += x[++s.i]
+ } else if (s.char === '"') {
+ xs.push(parser ? parser(s.str) : s.str)
+ s.str = ''
+ s.quoted = x[s.i + 1] === '"'
+ s.last = s.i + 2
+ } else {
+ s.str += s.char
+ }
+ } else if (s.char === '"') {
+ s.quoted = true
+ } else if (s.char === '{') {
+ s.last = ++s.i
+ xs.push(arrayParserLoop(s, x, parser))
+ } else if (s.char === '}') {
+ s.quoted = false
+ s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
+ s.last = s.i + 1
+ break
+ } else if (s.char === ',' && s.p !== '}' && s.p !== '"') {
+ xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
+ s.last = s.i + 1
+ }
+ s.p = s.char
+ }
+ s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1))
+ return xs
+}
+
+export const toCamel = x => {
+ let str = x[0]
+ for (let i = 1; i < x.length; i++)
+ str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
+ return str
+}
+
+export const toPascal = x => {
+ let str = x[0].toUpperCase()
+ for (let i = 1; i < x.length; i++)
+ str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
+ return str
+}
+
+export const toKebab = x => x.replace(/_/g, '-')
+
+export const fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase()
+export const fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase()
+export const fromKebab = x => x.replace(/-/g, '_')
diff --git a/deno/tests/bootstrap.js b/deno/tests/bootstrap.js
new file mode 100644
index 00000000..d606238a
--- /dev/null
+++ b/deno/tests/bootstrap.js
@@ -0,0 +1,29 @@
+import { spawn } from 'https://deno.land/std@0.120.0/node/child_process.ts'
+
+await exec('psql', ['-c', 'alter system set ssl=on'])
+await exec('psql', ['-c', 'create user postgres_js_test'])
+await exec('psql', ['-c', 'alter system set password_encryption=md5'])
+await exec('psql', ['-c', 'select pg_reload_conf()'])
+await exec('psql', ['-c', 'create user postgres_js_test_md5 with password \'postgres_js_test_md5\''])
+await exec('psql', ['-c', 'alter system set password_encryption=\'scram-sha-256\''])
+await exec('psql', ['-c', 'select pg_reload_conf()'])
+await exec('psql', ['-c', 'create user postgres_js_test_scram with password \'postgres_js_test_scram\''])
+
+await exec('dropdb', ['postgres_js_test'])
+await exec('createdb', ['postgres_js_test'])
+await exec('psql', ['-c', 'grant all on database postgres_js_test to postgres_js_test'])
+
+function ignore(cmd, args) {
+ const { stderr } = spawnSync(cmd, args, { stdio: 'pipe', encoding: 'utf8' })
+ if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist'))
+ throw stderr
+}
+
+export async function exec(cmd, args) { // eslint-disable-line
+ let stderr = ''
+ const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) // eslint-disable-line
+ cp.stderr.on('data', x => stderr += x)
+ await new Promise(x => cp.on('exit', x))
+ if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist'))
+ throw new Error(stderr)
+}
diff --git a/deno/tests/copy.csv b/deno/tests/copy.csv
new file mode 100644
index 00000000..6622044e
--- /dev/null
+++ b/deno/tests/copy.csv
@@ -0,0 +1,2 @@
+1 2 3
+4 5 6
diff --git a/deno/tests/index.js b/deno/tests/index.js
new file mode 100644
index 00000000..5a4ea5c6
--- /dev/null
+++ b/deno/tests/index.js
@@ -0,0 +1,1937 @@
+import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts'
+/* eslint no-console: 0 */
+
+import { exec } from './bootstrap.js'
+
+import { t, nt, ot } from './test.js' // eslint-disable-line
+import { net } from '../polyfills.js'
+import fs from 'https://deno.land/std@0.120.0/node/fs.ts'
+import crypto from 'https://deno.land/std@0.120.0/node/crypto.ts'
+
+import postgres from '../src/index.js'
+const delay = ms => new Promise(r => setTimeout(r, ms))
+
+const rel = x => new URL(x, import.meta.url)
+const idle_timeout = 1
+
+const login = {
+ user: 'postgres_js_test'
+}
+
+const login_md5 = {
+ user: 'postgres_js_test_md5',
+ pass: 'postgres_js_test_md5'
+}
+
+const login_scram = {
+ user: 'postgres_js_test_scram',
+ pass: 'postgres_js_test_scram'
+}
+
+const options = {
+ db: 'postgres_js_test',
+ user: login.user,
+ pass: login.pass,
+ idle_timeout,
+ connect_timeout: 1,
+ max: 1
+}
+
+const sql = postgres(options)
+
+t('Connects with no options', async() => {
+ const sql = postgres({ max: 1 })
+
+ const result = (await sql`select 1 as x`)[0].x
+ await sql.end()
+
+ return [1, result]
+})
+
+t('Uses default database without slash', async() => {
+ const sql = postgres('postgres://localhost')
+ return [sql.options.user, sql.options.database]
+})
+
+t('Uses default database with slash', async() => {
+ const sql = postgres('postgres://localhost/')
+ return [sql.options.user, sql.options.database]
+})
+
+t('Result is array', async() =>
+ [true, Array.isArray(await sql`select 1`)]
+)
+
+t('Result has count', async() =>
+ [1, (await sql`select 1`).count]
+)
+
+t('Result has command', async() =>
+ ['SELECT', (await sql`select 1`).command]
+)
+
+t('Create table', async() =>
+ ['CREATE TABLE', (await sql`create table test(int int)`).command, await sql`drop table test`]
+)
+
+t('Drop table', { timeout: 2 }, async() => {
+ await sql`create table test(int int)`
+ return ['DROP TABLE', (await sql`drop table test`).command]
+})
+
+t('null', async() =>
+ [null, (await sql`select ${ null } as x`)[0].x]
+)
+
+t('Integer', async() =>
+ ['1', (await sql`select ${ 1 } as x`)[0].x]
+)
+
+t('String', async() =>
+ ['hello', (await sql`select ${ 'hello' } as x`)[0].x]
+)
+
+t('Boolean false', async() =>
+ [false, (await sql`select ${ false } as x`)[0].x]
+)
+
+t('Boolean true', async() =>
+ [true, (await sql`select ${ true } as x`)[0].x]
+)
+
+t('Date', async() => {
+ const now = new Date()
+ return [0, now - (await sql`select ${ now } as x`)[0].x]
+})
+
+t('Json', async() => {
+ const x = (await sql`select ${ sql.json({ a: 'hello', b: 42 }) } as x`)[0].x
+ return ['hello,42', [x.a, x.b].join()]
+})
+
+t('implicit json', async() => {
+ const x = (await sql`select ${ { a: 'hello', b: 42 } }::json as x`)[0].x
+ return ['hello,42', [x.a, x.b].join()]
+})
+
+t('implicit jsonb', async() => {
+ const x = (await sql`select ${ { a: 'hello', b: 42 } }::jsonb as x`)[0].x
+ return ['hello,42', [x.a, x.b].join()]
+})
+
+t('Empty array', async() =>
+ [true, Array.isArray((await sql`select ${ sql.array([], 1009) } as x`)[0].x)]
+)
+
+t('String array', async() =>
+ ['123', (await sql`select ${ '{1,2,3}' }::int[] as x`)[0].x.join('')]
+)
+
+t('Array of Integer', async() =>
+ ['3', (await sql`select ${ sql.array([1, 2, 3]) } as x`)[0].x[2]]
+)
+
+t('Array of String', async() =>
+ ['c', (await sql`select ${ sql.array(['a', 'b', 'c']) } as x`)[0].x[2]]
+)
+
+t('Array of Date', async() => {
+ const now = new Date()
+ return [now.getTime(), (await sql`select ${ sql.array([now, now, now]) } as x`)[0].x[2].getTime()]
+})
+
+t('Nested array n2', async() =>
+ ['4', (await sql`select ${ sql.array([[1, 2], [3, 4]]) } as x`)[0].x[1][1]]
+)
+
+t('Nested array n3', async() =>
+ ['6', (await sql`select ${ sql.array([[[1, 2]], [[3, 4]], [[5, 6]]]) } as x`)[0].x[2][0][1]]
+)
+
+t('Escape in arrays', async() =>
+ ['Hello "you",c:\\windows', (await sql`select ${ sql.array(['Hello "you"', 'c:\\windows']) } as x`)[0].x.join(',')]
+)
+
+t('Escapes', async() => {
+ return ['hej"hej', Object.keys((await sql`select 1 as ${ sql('hej"hej') }`)[0])[0]]
+})
+
+t('null for int', async() => {
+ await sql`create table test (x int)`
+ return [1, (await sql`insert into test values(${ null })`).count, await sql`drop table test`]
+})
+
+t('Throws on illegal transactions', async() => {
+ const sql = postgres({ ...options, max: 2, fetch_types: false })
+ const error = await sql`begin`.catch(e => e)
+ return [
+ error.code,
+ 'UNSAFE_TRANSACTION'
+ ]
+})
+
+t('Transaction throws', async() => {
+ await sql`create table test (a int)`
+ return ['22P02', await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql`insert into test values('hej')`
+ }).catch(x => x.code), await sql`drop table test`]
+})
+
+t('Transaction rolls back', async() => {
+ await sql`create table test (a int)`
+ await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql`insert into test values('hej')`
+ }).catch(() => { /* ignore */ })
+ return [0, (await sql`select a from test`).count, await sql`drop table test`]
+})
+
+t('Transaction throws on uncaught savepoint', async() => {
+ await sql`create table test (a int)`
+
+ return ['fail', (await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql.savepoint(async sql => {
+ await sql`insert into test values(2)`
+ throw new Error('fail')
+ })
+ }).catch((err) => err.message)), await sql`drop table test`]
+})
+
+t('Transaction throws on uncaught named savepoint', async() => {
+ await sql`create table test (a int)`
+
+ return ['fail', (await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql.savepoit('watpoint', async sql => {
+ await sql`insert into test values(2)`
+ throw new Error('fail')
+ })
+ }).catch(() => 'fail')), await sql`drop table test`]
+})
+
+t('Transaction succeeds on caught savepoint', async() => {
+ await sql`create table test (a int)`
+ await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql.savepoint(async sql => {
+ await sql`insert into test values(2)`
+ throw new Error('please rollback')
+ }).catch(() => { /* ignore */ })
+ await sql`insert into test values(3)`
+ })
+
+ return ['2', (await sql`select count(1) from test`)[0].count, await sql`drop table test`]
+})
+
+t('Savepoint returns Result', async() => {
+ let result
+ await sql.begin(async sql => {
+ result = await sql.savepoint(sql =>
+ sql`select 1 as x`
+ )
+ })
+
+ return [1, result[0].x]
+})
+
+t('Transaction requests are executed implicitly', async() => {
+ const sql = postgres({ debug: true, idle_timeout: 1, fetch_types: false })
+ return [
+ 'testing',
+ (await sql.begin(async sql => {
+ sql`select set_config('postgres_js.test', 'testing', true)`
+ return await sql`select current_setting('postgres_js.test') as x`
+ }))[0].x
+ ]
+})
+
+t('Uncaught transaction request errors bubbles to transaction', async() => [
+ '42703',
+ (await sql.begin(sql => (
+ sql`select wat`,
+ sql`select current_setting('postgres_js.test') as x, ${ 1 } as a`
+ )).catch(e => e.code))
+])
+
+t('Parallel transactions', async() => {
+ await sql`create table test (a int)`
+ return ['11', (await Promise.all([
+ sql.begin(sql => sql`select 1`),
+ sql.begin(sql => sql`select 1`)
+ ])).map(x => x.count).join(''), await sql`drop table test`]
+})
+
+t('Many transactions at beginning of connection', async() => {
+ const sql = postgres(options)
+ const xs = await Promise.all(Array.from({ length: 100 }, () => sql.begin(sql => sql`select 1`)))
+ return [100, xs.length]
+})
+
+t('Transactions array', async() => {
+ await sql`create table test (a int)`
+
+ return ['11', (await sql.begin(sql => [
+ sql`select 1`.then(x => x),
+ sql`select 1`
+ ])).map(x => x.count).join(''), await sql`drop table test`]
+})
+
+t('Transaction waits', async() => {
+ await sql`create table test (a int)`
+ await sql.begin(async sql => {
+ await sql`insert into test values(1)`
+ await sql.savepoint(async sql => {
+ await sql`insert into test values(2)`
+ throw new Error('please rollback')
+ }).catch(() => { /* ignore */ })
+ await sql`insert into test values(3)`
+ })
+
+ return ['11', (await Promise.all([
+ sql.begin(sql => sql`select 1`),
+ sql.begin(sql => sql`select 1`)
+ ])).map(x => x.count).join(''), await sql`drop table test`]
+})
+
+t('Helpers in Transaction', async() => {
+ return ['1', (await sql.begin(async sql =>
+ await sql`select ${ sql({ x: 1 }) }`
+ ))[0].x]
+})
+
+t('Undefined values throws', async() => {
+ let error
+
+ await sql`
+ select ${ undefined } as x
+ `.catch(x => error = x.code)
+
+ return ['UNDEFINED_VALUE', error]
+})
+
+t('Null sets to null', async() =>
+ [null, (await sql`select ${ null } as x`)[0].x]
+)
+
+t('Throw syntax error', async() =>
+ ['42601', (await sql`wat 1`.catch(x => x)).code]
+)
+
+t('Connect using uri', async() =>
+ [true, await new Promise((resolve, reject) => {
+ const sql = postgres('postgres://' + login.user + ':' + (login.pass || '') + '@localhost:5432/' + options.db, {
+ idle_timeout
+ })
+ sql`select 1`.then(() => resolve(true), reject)
+ })]
+)
+
+t('Fail with proper error on no host', async() =>
+ ['ECONNREFUSED', (await new Promise((resolve, reject) => {
+ const sql = postgres('postgres://localhost:33333/' + options.db, {
+ idle_timeout
+ })
+ sql`select 1`.then(reject, resolve)
+ })).code]
+)
+
+t('Connect using SSL', async() =>
+ [true, (await new Promise((resolve, reject) => {
+ postgres({
+ ssl: { rejectUnauthorized: false },
+ idle_timeout
+ })`select 1`.then(() => resolve(true), reject)
+ }))]
+)
+
+t('Connect using SSL require', async() =>
+ [true, (await new Promise((resolve, reject) => {
+ postgres({
+ ssl: 'require',
+ idle_timeout
+ })`select 1`.then(() => resolve(true), reject)
+ }))]
+)
+
+t('Connect using SSL prefer', async() => {
+ await exec('psql', ['-c', 'alter system set ssl=off'])
+ await exec('psql', ['-c', 'select pg_reload_conf()'])
+
+ const sql = postgres({
+ ssl: 'prefer',
+ idle_timeout
+ })
+
+ return [
+ 1, (await sql`select 1 as x`)[0].x,
+ await exec('psql', ['-c', 'alter system set ssl=on']),
+ await exec('psql', ['-c', 'select pg_reload_conf()'])
+ ]
+})
+
+t('Reconnect using SSL', { timeout: 2 }, async() => {
+ const sql = postgres({
+ ssl: 'require',
+ idle_timeout: 0.1
+ })
+
+ await sql`select 1`
+ await delay(200)
+
+ return [1, (await sql`select 1 as x`)[0].x]
+})
+
+t('Login without password', async() => {
+ return [true, (await postgres({ ...options, ...login })`select true as x`)[0].x]
+})
+
+t('Login using MD5', async() => {
+ return [true, (await postgres({ ...options, ...login_md5 })`select true as x`)[0].x]
+})
+
+t('Login using scram-sha-256', async() => {
+ return [true, (await postgres({ ...options, ...login_scram })`select true as x`)[0].x]
+})
+
+t('Parallel connections using scram-sha-256', {
+ timeout: 2
+}, async() => {
+ const sql = postgres({ ...options, ...login_scram })
+ return [true, (await Promise.all([
+ sql`select true as x, pg_sleep(0.2)`,
+ sql`select true as x, pg_sleep(0.2)`,
+ sql`select true as x, pg_sleep(0.2)`
+ ]))[0][0].x]
+})
+
+t('Support dynamic password function', async() => {
+ return [true, (await postgres({
+ ...options,
+ ...login_scram,
+ pass: () => 'postgres_js_test_scram'
+ })`select true as x`)[0].x]
+})
+
+t('Support dynamic async password function', async() => {
+ return [true, (await postgres({
+ ...options,
+ ...login_scram,
+ pass: () => Promise.resolve('postgres_js_test_scram')
+ })`select true as x`)[0].x]
+})
+
+t('Point type', async() => {
+ const sql = postgres({
+ ...options,
+ types: {
+ point: {
+ to: 600,
+ from: [600],
+ serialize: ([x, y]) => '(' + x + ',' + y + ')',
+ parse: (x) => x.slice(1, -1).split(',').map(x => +x)
+ }
+ }
+ })
+
+ await sql`create table test (x point)`
+ await sql`insert into test (x) values (${ sql.types.point([10, 20]) })`
+ return [20, (await sql`select x from test`)[0].x[1], await sql`drop table test`]
+})
+
+t('Point type array', async() => {
+ const sql = postgres({
+ ...options,
+ types: {
+ point: {
+ to: 600,
+ from: [600],
+ serialize: ([x, y]) => '(' + x + ',' + y + ')',
+ parse: (x) => x.slice(1, -1).split(',').map(x => +x)
+ }
+ }
+ })
+
+ await sql`create table test (x point[])`
+ await sql`insert into test (x) values (${ sql.array([sql.types.point([10, 20]), sql.types.point([20, 30])]) })`
+ return [30, (await sql`select x from test`)[0].x[1][1], await sql`drop table test`]
+})
+
+t('sql file', async() =>
+ [1, (await sql.file(rel('select.sql')))[0].x]
+)
+
+t('sql file has forEach', async() => {
+ let result
+ await sql
+ .file(rel('select.sql'), { cache: false })
+ .forEach(({ x }) => result = x)
+
+ return [1, result]
+})
+
+t('sql file throws', async() =>
+ ['ENOENT', (await sql.file(rel('selectomondo.sql')).catch(x => x.code))]
+)
+
+t('sql file cached', async() => {
+ await sql.file(rel('select.sql'))
+ await delay(20)
+
+ return [1, (await sql.file(rel('select.sql')))[0].x]
+})
+
+t('Parameters in file', async() => {
+ const result = await sql.file(
+ rel('select-param.sql'),
+ ['hello']
+ )
+ return ['hello', result[0].x]
+})
+
+t('Connection ended promise', async() => {
+ const sql = postgres(options)
+
+ await sql.end()
+
+ return [undefined, await sql.end()]
+})
+
+t('Connection ended timeout', async() => {
+ const sql = postgres(options)
+
+ await sql.end({ timeout: 10 })
+
+ return [undefined, await sql.end()]
+})
+
+t('Connection ended error', async() => {
+ const sql = postgres(options)
+ sql.end()
+ return ['CONNECTION_ENDED', (await sql``.catch(x => x.code))]
+})
+
+t('Connection end does not cancel query', async() => {
+ const sql = postgres(options)
+
+ const promise = sql`select 1 as x`.execute()
+
+ sql.end()
+
+ return [1, (await promise)[0].x]
+})
+
+t('Connection destroyed', async() => {
+ const sql = postgres(options)
+ setTimeout(() => sql.end({ timeout: 0 }), 0)
+ return ['CONNECTION_DESTROYED', await sql``.catch(x => x.code)]
+})
+
+t('Connection destroyed with query before', async() => {
+ const sql = postgres(options)
+ , error = sql`select pg_sleep(0.2)`.catch(err => err.code)
+
+ sql.end({ timeout: 0 })
+ return ['CONNECTION_DESTROYED', await error]
+})
+
+t('transform column', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { column: x => x.split('').reverse().join('') }
+ })
+
+ await sql`create table test (hello_world int)`
+ await sql`insert into test values (1)`
+ return ['dlrow_olleh', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`]
+})
+
+t('column toPascal', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { column: postgres.toPascal }
+ })
+
+ await sql`create table test (hello_world int)`
+ await sql`insert into test values (1)`
+ return ['HelloWorld', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`]
+})
+
+t('column toCamel', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { column: postgres.toCamel }
+ })
+
+ await sql`create table test (hello_world int)`
+ await sql`insert into test values (1)`
+ return ['helloWorld', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`]
+})
+
+t('column toKebab', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { column: postgres.toKebab }
+ })
+
+ await sql`create table test (hello_world int)`
+ await sql`insert into test values (1)`
+ return ['hello-world', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`]
+})
+
+t('unsafe', async() => {
+ await sql`create table test (x int)`
+ return [1, (await sql.unsafe('insert into test values ($1) returning *', [1]))[0].x, await sql`drop table test`]
+})
+
+t('unsafe simple', async() => {
+ return [1, (await sql.unsafe('select 1 as x'))[0].x]
+})
+
+t('listen and notify', async() => {
+ const sql = postgres(options)
+ , channel = 'hello'
+
+ return ['world', await new Promise((resolve, reject) =>
+ sql.listen(channel, resolve)
+ .then(() => sql.notify(channel, 'world'))
+ .then(() => delay(20))
+ .catch(reject)
+ .then(sql.end)
+ )]
+})
+
+t('double listen', async() => {
+ const sql = postgres(options)
+ , channel = 'hello'
+
+ let count = 0
+
+ await new Promise((resolve, reject) =>
+ sql.listen(channel, resolve)
+ .then(() => sql.notify(channel, 'world'))
+ .catch(reject)
+ ).then(() => count++)
+
+ await new Promise((resolve, reject) =>
+ sql.listen(channel, resolve)
+ .then(() => sql.notify(channel, 'world'))
+ .catch(reject)
+ ).then(() => count++)
+
+ // for coverage
+ sql.listen('weee', () => { /* noop */ }).then(sql.end)
+
+ return [2, count]
+})
+
+t('listen and notify with weird name', async() => {
+ const sql = postgres(options)
+ , channel = 'wat-;ø§'
+
+ return ['world', await new Promise((resolve, reject) =>
+ sql.listen(channel, resolve)
+ .then(() => sql.notify(channel, 'world'))
+ .catch(reject)
+ .then(() => delay(20))
+ .then(sql.end)
+ )]
+})
+
+t('listen and notify with upper case', async() => {
+ const sql = postgres(options)
+ let result
+
+ await sql.listen('withUpperChar', x => result = x)
+ sql.notify('withUpperChar', 'works')
+ await delay(50)
+
+ return [
+ 'works',
+ result,
+ sql.end()
+ ]
+})
+
+t('listen reconnects', { timeout: 2 }, async() => {
+ const sql = postgres(options)
+ , xs = []
+
+ const { state: { pid } } = await sql.listen('test', x => xs.push(x))
+ await delay(200)
+ await sql.notify('test', 'a')
+ await sql`select pg_terminate_backend(${ pid }::int)`
+ await delay(200)
+ await sql.notify('test', 'b')
+ await delay(200)
+ sql.end()
+
+ return ['ab', xs.join('')]
+})
+
+
+t('listen reconnects after connection error', { timeout: 3 }, async() => {
+ const sql = postgres()
+ , xs = []
+
+ const { state: { pid } } = await sql.listen('test', x => xs.push(x))
+ await sql.notify('test', 'a')
+ await sql`select pg_terminate_backend(${ pid }::int)`
+ await delay(1000)
+
+ await sql.notify('test', 'b')
+ await delay(50)
+ sql.end()
+
+ return ['ab', xs.join('')]
+})
+
+t('listen result reports correct connection state after reconnection', async() => {
+ const sql = postgres(options)
+ , xs = []
+
+ const result = await sql.listen('test', x => xs.push(x))
+ const initialPid = result.state.pid
+ await sql.notify('test', 'a')
+ await sql`select pg_terminate_backend(${ initialPid }::int)`
+ await delay(50)
+ sql.end()
+
+ return [result.state.pid !== initialPid, true]
+})
+
+t('unlisten removes subscription', async() => {
+ const sql = postgres(options)
+ , xs = []
+
+ const { unlisten } = await sql.listen('test', x => xs.push(x))
+ await sql.notify('test', 'a')
+ await delay(50)
+ await unlisten()
+ await sql.notify('test', 'b')
+ await delay(50)
+ sql.end()
+
+ return ['a', xs.join('')]
+})
+
+t('listen after unlisten', async() => {
+ const sql = postgres(options)
+ , xs = []
+
+ const { unlisten } = await sql.listen('test', x => xs.push(x))
+ await sql.notify('test', 'a')
+ await delay(50)
+ await unlisten()
+ await sql.notify('test', 'b')
+ await delay(50)
+ await sql.listen('test', x => xs.push(x))
+ await sql.notify('test', 'c')
+ await delay(50)
+ sql.end()
+
+ return ['ac', xs.join('')]
+})
+
+t('multiple listeners and unlisten one', async() => {
+ const sql = postgres(options)
+ , xs = []
+
+ await sql.listen('test', x => xs.push('1', x))
+ const s2 = await sql.listen('test', x => xs.push('2', x))
+ await sql.notify('test', 'a')
+ await delay(50)
+ await s2.unlisten()
+ await sql.notify('test', 'b')
+ await delay(50)
+ sql.end()
+
+ return ['1a2a1b', xs.join('')]
+})
+
+t('responds with server parameters (application_name)', async() =>
+ ['postgres.js', await new Promise((resolve, reject) => postgres({
+ ...options,
+ onparameter: (k, v) => k === 'application_name' && resolve(v)
+ })`select 1`.catch(reject))]
+)
+
+t('has server parameters', async() => {
+ return ['postgres.js', (await sql`select 1`.then(() => sql.parameters.application_name))]
+})
+
+t('big query body', async() => {
+ await sql`create table test (x int)`
+ return [1000, (await sql`insert into test ${
+ sql([...Array(1000).keys()].map(x => ({ x })))
+ }`).count, await sql`drop table test`]
+})
+
+t('Throws if more than 65534 parameters', async() => {
+ await sql`create table test (x int)`
+ return ['MAX_PARAMETERS_EXCEEDED', (await sql`insert into test ${
+ sql([...Array(65535).keys()].map(x => ({ x })))
+ }`.catch(e => e.code)), await sql`drop table test`]
+})
+
+t('let postgres do implicit cast of unknown types', async() => {
+ await sql`create table test (x timestamp with time zone)`
+ const [{ x }] = await sql`insert into test values (${ new Date().toISOString() }) returning *`
+ return [true, x instanceof Date, await sql`drop table test`]
+})
+
+t('only allows one statement', async() =>
+ ['42601', await sql`select 1; select 2`.catch(e => e.code)]
+)
+
+t('await sql() throws not tagged error', async() => {
+ let error
+ try {
+ await sql('select 1')
+ } catch (e) {
+ error = e.code
+ }
+ return ['NOT_TAGGED_CALL', error]
+})
+
+t('sql().then throws not tagged error', async() => {
+ let error
+ try {
+ sql('select 1').then(() => { /* noop */ })
+ } catch (e) {
+ error = e.code
+ }
+ return ['NOT_TAGGED_CALL', error]
+})
+
+t('sql().catch throws not tagged error', async() => {
+ let error
+ try {
+ await sql('select 1')
+ } catch (e) {
+ error = e.code
+ }
+ return ['NOT_TAGGED_CALL', error]
+})
+
+t('sql().finally throws not tagged error', async() => {
+ let error
+ try {
+ sql('select 1').finally(() => { /* noop */ })
+ } catch (e) {
+ error = e.code
+ }
+ return ['NOT_TAGGED_CALL', error]
+})
+
+t('little bobby tables', async() => {
+ const name = 'Robert\'); DROP TABLE students;--'
+
+ await sql`create table students (name text, age int)`
+ await sql`insert into students (name) values (${ name })`
+
+ return [
+ name, (await sql`select name from students`)[0].name,
+ await sql`drop table students`
+ ]
+})
+
+t('Connection errors are caught using begin()', {
+ timeout: 2
+}, async() => {
+ let error
+ try {
+ const sql = postgres({ host: 'wat', port: 1337 })
+
+ await sql.begin(async(sql) => {
+ await sql`insert into test (label, value) values (${1}, ${2})`
+ })
+ } catch (err) {
+ error = err
+ }
+
+ return [
+ true,
+ error.code === 'ENOTFOUND' ||
+ error.message === 'failed to lookup address information: nodename nor servname provided, or not known'
+ ]
+})
+
+t('dynamic column name', async() => {
+ return ['!not_valid', Object.keys((await sql`select 1 as ${ sql('!not_valid') }`)[0])[0]]
+})
+
+t('dynamic select as', async() => {
+ return ['2', (await sql`select ${ sql({ a: 1, b: 2 }) }`)[0].b]
+})
+
+t('dynamic select as pluck', async() => {
+ return [undefined, (await sql`select ${ sql({ a: 1, b: 2 }, 'a') }`)[0].b]
+})
+
+t('dynamic insert', async() => {
+ await sql`create table test (a int, b text)`
+ const x = { a: 42, b: 'the answer' }
+
+ return ['the answer', (await sql`insert into test ${ sql(x) } returning *`)[0].b, await sql`drop table test`]
+})
+
+t('dynamic insert pluck', async() => {
+ await sql`create table test (a int, b text)`
+ const x = { a: 42, b: 'the answer' }
+
+ return [null, (await sql`insert into test ${ sql(x, 'a') } returning *`)[0].b, await sql`drop table test`]
+})
+
+t('array insert', async() => {
+ await sql`create table test (a int, b int)`
+ return [2, (await sql`insert into test (a, b) values ${ sql([1, 2]) } returning *`)[0].b, await sql`drop table test`]
+})
+
+t('where parameters in()', async() => {
+ await sql`create table test (x text)`
+ await sql`insert into test values ('a')`
+ return [
+ (await sql`select * from test where x in ${ sql(['a', 'b', 'c']) }`)[0].x,
+ 'a',
+ await sql`drop table test`
+ ]
+})
+
+t('where parameters in() values before', async() => {
+ return [2, (await sql`
+ with rows as (
+ select * from (values (1), (2), (3), (4)) as x(a)
+ )
+ select * from rows where a in ${ sql([3, 4]) }
+ `).count]
+})
+
+t('dynamic multi row insert', async() => {
+ await sql`create table test (a int, b text)`
+ const x = { a: 42, b: 'the answer' }
+
+ return [
+ 'the answer',
+ (await sql`insert into test ${ sql([x, x]) } returning *`)[1].b, await sql`drop table test`
+ ]
+})
+
+t('dynamic update', async() => {
+ await sql`create table test (a int, b text)`
+ await sql`insert into test (a, b) values (17, 'wrong')`
+
+ return [
+ 'the answer',
+ (await sql`update test set ${ sql({ a: 42, b: 'the answer' }) } returning *`)[0].b, await sql`drop table test`
+ ]
+})
+
+t('dynamic update pluck', async() => {
+ await sql`create table test (a int, b text)`
+ await sql`insert into test (a, b) values (17, 'wrong')`
+
+ return [
+ 'wrong',
+ (await sql`update test set ${ sql({ a: 42, b: 'the answer' }, 'a') } returning *`)[0].b, await sql`drop table test`
+ ]
+})
+
+t('dynamic select array', async() => {
+ await sql`create table test (a int, b text)`
+ await sql`insert into test (a, b) values (42, 'yay')`
+ return ['yay', (await sql`select ${ sql(['a', 'b']) } from test`)[0].b, await sql`drop table test`]
+})
+
+t('dynamic select args', async() => {
+ await sql`create table test (a int, b text)`
+ await sql`insert into test (a, b) values (42, 'yay')`
+ return ['yay', (await sql`select ${ sql('a', 'b') } from test`)[0].b, await sql`drop table test`]
+})
+
+t('dynamic values single row', async() => {
+ const [{ b }] = await sql`
+ select * from (values ${ sql(['a', 'b', 'c']) }) as x(a, b, c)
+ `
+
+ return ['b', b]
+})
+
+t('dynamic values multi row', async() => {
+ const [, { b }] = await sql`
+ select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) as x(a, b, c)
+ `
+
+ return ['b', b]
+})
+
+t('connection parameters', async() => {
+ const sql = postgres({
+ ...options,
+ connection: {
+ 'some.var': 'yay'
+ }
+ })
+
+ return ['yay', (await sql`select current_setting('some.var') as x`)[0].x]
+})
+
+t('Multiple queries', async() => {
+ const sql = postgres(options)
+
+ return [4, (await Promise.all([
+ sql`select 1`,
+ sql`select 2`,
+ sql`select 3`,
+ sql`select 4`
+ ])).length]
+})
+
+t('Multiple statements', async() =>
+ [2, await sql.unsafe(`
+ select 1 as x;
+ select 2 as a;
+ `).then(([, [x]]) => x.a)]
+)
+
+t('throws correct error when authentication fails', async() => {
+ const sql = postgres({
+ ...options,
+ ...login_md5,
+ pass: 'wrong'
+ })
+ return ['28P01', await sql`select 1`.catch(e => e.code)]
+})
+
+t('notice works', async() => {
+ let notice
+ const log = console.log
+ console.log = function(x) {
+ notice = x
+ }
+
+ const sql = postgres(options)
+
+ await sql`create table if not exists users()`
+ await sql`create table if not exists users()`
+
+ console.log = log
+
+ return ['NOTICE', notice.severity]
+})
+
+t('notice hook works', async() => {
+ let notice
+ const sql = postgres({
+ ...options,
+ onnotice: x => notice = x
+ })
+
+ await sql`create table if not exists users()`
+ await sql`create table if not exists users()`
+
+ return ['NOTICE', notice.severity]
+})
+
+t('bytea serializes and parses', async() => {
+ const buf = Buffer.from('wat')
+
+ await sql`create table test (x bytea)`
+ await sql`insert into test values (${ buf })`
+
+ return [
+ buf.toString(),
+ (await sql`select x from test`)[0].x.toString(),
+ await sql`drop table test`
+ ]
+})
+
+t('forEach works', async() => {
+ let result
+ await sql`select 1 as x`.forEach(({ x }) => result = x)
+ return [1, result]
+})
+
+t('forEach returns empty array', async() => {
+ return [0, (await sql`select 1 as x`.forEach(() => { /* noop */ })).length]
+})
+
+t('Cursor works', async() => {
+ const order = []
+ await sql`select 1 as x union select 2 as x`.cursor(async([x]) => {
+ order.push(x.x + 'a')
+ await delay(100)
+ order.push(x.x + 'b')
+ })
+ return ['1a1b2a2b', order.join('')]
+})
+
+t('Unsafe cursor works', async() => {
+ const order = []
+ await sql.unsafe('select 1 as x union select 2 as x').cursor(async([x]) => {
+ order.push(x.x + 'a')
+ await delay(100)
+ order.push(x.x + 'b')
+ })
+ return ['1a1b2a2b', order.join('')]
+})
+
+t('Cursor custom n works', async() => {
+ const order = []
+ await sql`select * from generate_series(1,20)`.cursor(10, async(x) => {
+ order.push(x.length)
+ })
+ return ['10,10', order.join(',')]
+})
+
+t('Cursor custom with rest n works', async() => {
+ const order = []
+ await sql`select * from generate_series(1,20)`.cursor(11, async(x) => {
+ order.push(x.length)
+ })
+ return ['11,9', order.join(',')]
+})
+
+t('Cursor custom with less results than batch size works', async() => {
+ const order = []
+ await sql`select * from generate_series(1,20)`.cursor(21, async(x) => {
+ order.push(x.length)
+ })
+ return ['20', order.join(',')]
+})
+
+t('Cursor cancel works', async() => {
+ let result
+ await sql`select * from generate_series(1,10) as x`.cursor(async([{ x }]) => {
+ result = x
+ return sql.CLOSE
+ })
+ return [1, result]
+})
+
+t('Cursor throw works', async() => {
+ const order = []
+ await sql`select 1 as x union select 2 as x`.cursor(async([x]) => {
+ order.push(x.x + 'a')
+ await delay(100)
+ throw new Error('watty')
+ }).catch(() => order.push('err'))
+ return ['1aerr', order.join('')]
+})
+
+t('Cursor error works', async() => [
+ '42601',
+ await sql`wat`.cursor(() => { /* noop */ }).catch((err) => err.code)
+])
+
+t('Multiple Cursors', { timeout: 2 }, async() => {
+ const result = []
+ await sql.begin(async sql => [
+ await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async([row]) => {
+ result.push(row.x)
+ await new Promise(r => setTimeout(r, 200))
+ }),
+ await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async([row]) => {
+ result.push(row.x)
+ await new Promise(r => setTimeout(r, 100))
+ })
+ ])
+
+ return ['1,2,3,4,101,102,103,104', result.join(',')]
+})
+
+t('Cursor as async iterator', async() => {
+ const order = []
+ for await (const [x] of sql`select generate_series(1,2) as x;`.cursor()) {
+ order.push(x.x + 'a')
+ await delay(100)
+ order.push(x.x + 'b')
+ }
+
+ return ['1a1b2a2b', order.join('')]
+})
+
+t('Cursor as async iterator with break', async() => {
+ const order = []
+ for await (const xs of sql`select generate_series(1,2) as x;`.cursor()) {
+ order.push(xs[0].x + 'a')
+ await delay(10)
+ order.push(xs[0].x + 'b')
+ break
+ }
+
+ return ['1a1b', order.join('')]
+})
+
+t('Async Iterator Unsafe cursor works', async() => {
+ const order = []
+ for await (const [x] of sql.unsafe('select 1 as x union select 2 as x').cursor()) {
+ order.push(x.x + 'a')
+ await delay(100)
+ order.push(x.x + 'b')
+ }
+ return ['1a1b2a2b', order.join('')]
+})
+
+t('Async Iterator Cursor custom n works', async() => {
+ const order = []
+ for await (const x of sql`select * from generate_series(1,20)`.cursor(10))
+ order.push(x.length)
+
+ return ['10,10', order.join(',')]
+})
+
+t('Async Iterator Cursor custom with rest n works', async() => {
+ const order = []
+ for await (const x of sql`select * from generate_series(1,20)`.cursor(11))
+ order.push(x.length)
+
+ return ['11,9', order.join(',')]
+})
+
+t('Async Iterator Cursor custom with less results than batch size works', async() => {
+ const order = []
+ for await (const x of sql`select * from generate_series(1,20)`.cursor(21))
+ order.push(x.length)
+ return ['20', order.join(',')]
+})
+
+t('Transform row', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { row: () => 1 }
+ })
+
+ return [1, (await sql`select 'wat'`)[0]]
+})
+
+t('Transform row forEach', async() => {
+ let result
+ const sql = postgres({
+ ...options,
+ transform: { row: () => 1 }
+ })
+
+ await sql`select 1`.forEach(x => result = x)
+
+ return [1, result]
+})
+
+t('Transform value', async() => {
+ const sql = postgres({
+ ...options,
+ transform: { value: () => 1 }
+ })
+
+ return [1, (await sql`select 'wat' as x`)[0].x]
+})
+
+t('Transform columns from', async() => {
+ const sql = postgres({ ...options, transform: { column: { to: postgres.fromCamel, from: postgres.toCamel } } })
+ await sql`create table test (a_test int, b_test text)`
+ await sql`insert into test ${ sql([{ aTest: 1, bTest: 1 }]) }`
+ await sql`update test set ${ sql({ aTest: 2, bTest: 2 }) }`
+ return [
+ 2,
+ (await sql`select ${ sql('aTest', 'bTest') } from test`)[0].aTest,
+ await sql`drop table test`
+ ]
+})
+
+t('Unix socket', async() => {
+ const sql = postgres({
+ ...options,
+ host: '/tmp'
+ })
+
+ return [1, (await sql`select 1 as x`)[0].x]
+})
+
+t('Big result', async() => {
+ return [100000, (await sql`select * from generate_series(1, 100000)`).count]
+})
+
+t('Debug works', async() => {
+ let result
+ const sql = postgres({
+ ...options,
+ debug: (connection_id, str) => result = str
+ })
+
+ await sql`select 1`
+
+ return ['select 1', result]
+})
+
+t('bigint is returned as String', async() => [
+ 'string',
+ typeof (await sql`select 9223372036854777 as x`)[0].x
+])
+
+t('int is returned as Number', async() => [
+ 'number',
+ typeof (await sql`select 123 as x`)[0].x
+])
+
+t('numeric is returned as string', async() => [
+ 'string',
+ typeof (await sql`select 1.2 as x`)[0].x
+])
+
+t('Async stack trace', async() => {
+ const sql = postgres({ ...options, debug: false })
+ return [
+ parseInt(new Error().stack.split('\n')[1].match(':([0-9]+):')[1]) + 1,
+ parseInt(await sql`error`.catch(x => x.stack.split('\n').pop().match(':([0-9]+):')[1]))
+ ]
+})
+
+t('Debug has long async stack trace', async() => {
+ const sql = postgres({ ...options, debug: true })
+
+ return [
+ 'watyo',
+ await yo().catch(x => x.stack.match(/wat|yo/g).join(''))
+ ]
+
+ function yo() {
+ return wat()
+ }
+
+ function wat() {
+ return sql`error`
+ }
+})
+
+t('Error contains query string', async() => [
+ 'selec 1',
+ (await sql`selec 1`.catch(err => err.query))
+])
+
+t('Error contains query serialized parameters', async() => [
+ 1,
+ (await sql`selec ${ 1 }`.catch(err => err.parameters[0]))
+])
+
+t('Error contains query raw parameters', async() => [
+ 1,
+ (await sql`selec ${ 1 }`.catch(err => err.args[0]))
+])
+
+t('Query and parameters on errorare not enumerable if debug is not set', async() => {
+ const sql = postgres({ ...options, debug: false })
+
+ return [
+ false,
+ (await sql`selec ${ 1 }`.catch(err => err.propertyIsEnumerable('parameters') || err.propertyIsEnumerable('query')))
+ ]
+})
+
+t('Query and parameters are enumerable if debug is set', async() => {
+ const sql = postgres({ ...options, debug: true })
+
+ return [
+ true,
+ (await sql`selec ${ 1 }`.catch(err => err.propertyIsEnumerable('parameters') && err.propertyIsEnumerable('query')))
+ ]
+})
+
+t('connect_timeout works', { timeout: 20 }, async() => {
+ const connect_timeout = 0.2
+ const server = net.createServer()
+ server.listen()
+ const sql = postgres({ port: server.address().port, host: '127.0.0.1', connect_timeout })
+ const start = Date.now()
+ let end
+ await sql`select 1`.catch((e) => {
+ if (e.code !== 'CONNECT_TIMEOUT')
+ throw e
+ end = Date.now()
+ })
+ server.close()
+ return [connect_timeout, Math.floor((end - start) / 100) / 10]
+})
+
+t('connect_timeout throws proper error', async() => [
+ 'CONNECT_TIMEOUT',
+ await postgres({
+ ...options,
+ ...login_scram,
+ connect_timeout: 0.001
+ })`select 1`.catch(e => e.code)
+])
+
+t('requests works after single connect_timeout', async() => {
+ let first = true
+
+ const sql = postgres({
+ ...options,
+ ...login_scram,
+ connect_timeout: { valueOf() { return first ? (first = false, 0.001) : 1 } }
+ })
+
+ return [
+ 'CONNECT_TIMEOUT,,1',
+ [
+ await sql`select 1 as x`.then(() => 'success', x => x.code),
+ await delay(10),
+ (await sql`select 1 as x`)[0].x
+ ].join(',')
+ ]
+})
+
+t('Postgres errors are of type PostgresError', async() =>
+ [true, (await sql`bad keyword`.catch(e => e)) instanceof sql.PostgresError]
+)
+
+t('Result has columns spec', async() =>
+ ['x', (await sql`select 1 as x`).columns[0].name]
+)
+
+t('forEach has result as second argument', async() => {
+ let x
+ await sql`select 1 as x`.forEach((_, result) => x = result)
+ return ['x', x.columns[0].name]
+})
+
+t('Result as arrays', async() => {
+ const sql = postgres({
+ ...options,
+ transform: {
+ row: x => Object.values(x)
+ }
+ })
+
+ return ['1,2', (await sql`select 1 as a, 2 as b`)[0].join(',')]
+})
+
+t('Insert empty array', async() => {
+ await sql`create table tester (ints int[])`
+ return [
+ Array.isArray((await sql`insert into tester (ints) values (${ sql.array([]) }) returning *`)[0].ints),
+ true,
+ await sql`drop table tester`
+ ]
+})
+
+t('Insert array in sql()', async() => {
+ await sql`create table tester (ints int[])`
+ return [
+ Array.isArray((await sql`insert into tester ${ sql({ ints: sql.array([]) }) } returning *`)[0].ints),
+ true,
+ await sql`drop table tester`
+ ]
+})
+
+t('Automatically creates prepared statements', async() => {
+ const sql = postgres(options)
+ const result = await sql`select * from pg_prepared_statements`
+ return [true, result.some(x => x.name = result.statement.name)]
+})
+
+t('no_prepare: true disables prepared statements (deprecated)', async() => {
+ const sql = postgres({ ...options, no_prepare: true })
+ const result = await sql`select * from pg_prepared_statements`
+ return [false, result.some(x => x.name = result.statement.name)]
+})
+
+t('prepare: false disables prepared statements', async() => {
+ const sql = postgres({ ...options, prepare: false })
+ const result = await sql`select * from pg_prepared_statements`
+ return [false, result.some(x => x.name = result.statement.name)]
+})
+
+t('prepare: true enables prepared statements', async() => {
+ const sql = postgres({ ...options, prepare: true })
+ const result = await sql`select * from pg_prepared_statements`
+ return [true, result.some(x => x.name = result.statement.name)]
+})
+
+t('prepares unsafe query when "prepare" option is true', async() => {
+ const sql = postgres({ ...options, prepare: true })
+ const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla'], { prepare: true })
+ return [true, result.some(x => x.name = result.statement.name)]
+})
+
+t('does not prepare unsafe query by default', async() => {
+ const sql = postgres({ ...options, prepare: true })
+ const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla'])
+ return [false, result.some(x => x.name = result.statement.name)]
+})
+
+t('Recreate prepared statements on transformAssignedExpr error', async() => {
+ const insert = () => sql`insert into test (name) values (${ '1' }) returning name`
+ await sql`create table test (name text)`
+ await insert()
+ await sql`alter table test alter column name type int using name::integer`
+ return [
+ 1,
+ (await insert())[0].name,
+ await sql`drop table test`
+ ]
+})
+
+t('Throws correct error when retrying in transactions', async() => {
+ await sql`create table test(x int)`
+ const error = await sql.begin(sql => sql`insert into test (x) values (${ false })`).catch(e => e)
+ return [
+ error.code,
+ '42804',
+ sql`drop table test`
+ ]
+})
+
+t('Recreate prepared statements on RevalidateCachedQuery error', async() => {
+ const select = () => sql`select name from test`
+ await sql`create table test (name text)`
+ await sql`insert into test values ('1')`
+ await select()
+ await sql`alter table test alter column name type int using name::integer`
+ return [
+ 1,
+ (await select())[0].name,
+ await sql`drop table test`
+ ]
+})
+
+
+t('Catches connection config errors', async() => {
+ const sql = postgres({ ...options, user: { toString: () => { throw new Error('wat') } }, database: 'prut' })
+
+ return [
+ 'wat',
+ await sql`select 1`.catch((e) => e.message)
+ ]
+})
+
+t('Catches connection config errors with end', async() => {
+ const sql = postgres({ ...options, user: { toString: () => { throw new Error('wat') } }, database: 'prut' })
+
+ return [
+ 'wat',
+ await sql`select 1`.catch((e) => e.message),
+ await sql.end()
+ ]
+})
+
+t('Catches query format errors', async() => [
+ 'wat',
+ await sql.unsafe({ toString: () => { throw new Error('wat') } }).catch((e) => e.message)
+])
+
+t('Multiple hosts', {
+ timeout: 10
+}, async() => {
+ const s1 = postgres({ idle_timeout })
+ , s2 = postgres({ idle_timeout, port: 5433 })
+ , sql = postgres('postgres://localhost:5432,localhost:5433', { idle_timeout, max: 1 })
+ , result = []
+
+ const x1 = await sql`select 1`
+ result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x)
+ await s1`select pg_terminate_backend(${ x1.state.pid }::int)`
+ await delay(100)
+
+ const x2 = await sql`select 1`
+ result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x)
+ await s2`select pg_terminate_backend(${ x2.state.pid }::int)`
+ await delay(100)
+
+ result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x)
+
+ return ['5432,5433,5432', result.join(',')]
+})
+
+t('Escaping supports schemas and tables', async() => {
+ await sql`create schema a`
+ await sql`create table a.b (c int)`
+ await sql`insert into a.b (c) values (1)`
+ return [
+ 1,
+ (await sql`select ${ sql('a.b.c') } from a.b`)[0].c,
+ await sql`drop table a.b`,
+ await sql`drop schema a`
+ ]
+})
+
+t('Raw method returns rows as arrays', async() => {
+ const [x] = await sql`select 1`.raw()
+ return [
+ Array.isArray(x),
+ true
+ ]
+})
+
+t('Raw method returns values unparsed as Buffer', async() => {
+ const [[x]] = await sql`select 1`.raw()
+ return [
+ x instanceof Uint8Array,
+ true
+ ]
+})
+
+t('Copy read works', async() => {
+ const result = []
+
+ await sql`create table test (x int)`
+ await sql`insert into test select * from generate_series(1,10)`
+ const readable = await sql`copy test to stdout`.readable()
+ readable.on('data', x => result.push(x))
+ await new Promise(r => readable.on('end', r))
+
+ return [
+ result.length,
+ 10,
+ await sql`drop table test`
+ ]
+})
+
+t('Copy write works', { timeout: 2 }, async() => {
+ await sql`create table test (x int)`
+ const writable = await sql`copy test from stdin`.writable()
+
+ writable.write('1\n')
+ writable.write('1\n')
+ writable.end()
+
+ await new Promise(r => writable.on('finish', r))
+
+ return [
+ (await sql`select 1 from test`).length,
+ 2,
+ await sql`drop table test`
+ ]
+})
+
+t('Copy write as first works', async() => {
+ await sql`create table test (x int)`
+ const first = postgres(options)
+ const writable = await first`COPY test FROM STDIN WITH(FORMAT csv, HEADER false, DELIMITER ',')`.writable()
+ writable.write('1\n')
+ writable.write('1\n')
+ writable.end()
+
+ await new Promise(r => writable.on('finish', r))
+
+ return [
+ (await sql`select 1 from test`).length,
+ 2,
+ await sql`drop table test`
+ ]
+})
+
+nt('Copy from file works', async() => {
+ await sql`create table test (x int, y int, z int)`
+ await new Promise(async r => fs
+ .createReadStream(rel('copy.csv'))
+ .pipe(await sql`copy test from stdin`.writable())
+ .on('finish', r)
+ )
+
+ return [
+ JSON.stringify(await sql`select * from test`),
+ '[{"x":1,"y":2,"z":3},{"x":4,"y":5,"z":6}]',
+ await sql`drop table test`
+ ]
+})
+
+t('Copy from works in transaction', async() => {
+ await sql`create table test(x int)`
+ const xs = await sql.begin(async sql => {
+ (await sql`copy test from stdin`.writable()).end('1\n2')
+ await delay(20)
+ return sql`select 1 from test`
+ })
+
+ return [
+ xs.length,
+ 2,
+ await sql`drop table test`
+ ]
+})
+
+nt('Copy from abort works', async() => {
+ const sql = postgres(options)
+ const readable = fs.createReadStream(rel('copy.csv'))
+
+ await sql`create table test (x int, y int, z int)`
+ await sql`TRUNCATE TABLE test`
+
+ const writable = await sql`COPY test FROM STDIN`.writable()
+
+ let aborted
+
+ readable
+ .pipe(writable)
+ .on('error', (err) => aborted = err)
+
+ writable.destroy(new Error('abort'))
+ await sql.end()
+
+ return [
+ 'abort',
+ aborted.message,
+ await postgres(options)`drop table test`
+ ]
+})
+
+t('multiple queries before connect', async() => {
+ const sql = postgres({ ...options, max: 2 })
+ const xs = await Promise.all([
+ sql`select 1 as x`,
+ sql`select 2 as x`,
+ sql`select 3 as x`,
+ sql`select 4 as x`
+ ])
+
+ return [
+ '1,2,3,4',
+ xs.map(x => x[0].x).join()
+ ]
+})
+
+t('subscribe', { timeout: 2 }, async() => {
+ const sql = postgres({
+ database: 'postgres_js_test',
+ publications: 'alltables',
+ fetch_types: false
+ })
+
+ await sql.unsafe('create publication alltables for all tables')
+
+ const result = []
+
+ await sql.subscribe('*', (row, info) =>
+ result.push(info.command, row.name || row.id)
+ )
+
+ await sql`
+ create table test (
+ id serial primary key,
+ name text
+ )
+ `
+ await sql`insert into test (name) values ('Murray')`
+ await sql`update test set name = 'Rothbard'`
+ await sql`delete from test`
+ await delay(100)
+ return [
+ 'insert,Murray,update,Rothbard,delete,1',
+ result.join(','),
+ await sql`drop table test`,
+ await sql`drop publication alltables`,
+ await sql.end()
+ ]
+})
+
+t('Execute works', async() => {
+ const result = await new Promise((resolve) => {
+ const sql = postgres({ ...options, fetch_types: false, debug:(id, query) => resolve(query) })
+ sql`select 1`.execute()
+ })
+
+ return [result, 'select 1']
+})
+
+t('Cancel running query works', async() => {
+ const query = sql`select pg_sleep(2)`
+ setTimeout(() => query.cancel(), 50)
+ const error = await query.catch(x => x)
+ return ['57014', error.code]
+})
+
+t('Cancel piped query works', async() => {
+ await sql`select 1`
+ const last = sql`select pg_sleep(0.2)`.execute()
+ const query = sql`select pg_sleep(2) as dig`
+ setTimeout(() => query.cancel(), 100)
+ const error = await query.catch(x => x)
+ await last
+ return ['57014', error.code]
+})
+
+t('Cancel queued query works', async() => {
+ const tx = sql.begin(sql => sql`select pg_sleep(0.2) as hej, 'hejsa'`)
+ const query = sql`select pg_sleep(2) as nej`
+ setTimeout(() => query.cancel(), 50)
+ const error = await query.catch(x => x)
+ await tx
+ return ['57014', error.code]
+})
+
+t('Fragments', async() => [
+ 1,
+ (await sql`
+ ${ sql`select` } 1 as x
+ `)[0].x
+])
+
+t('Result becomes array', async() => [
+ true,
+ (await sql`select 1`).slice() instanceof Array
+])
+
+t('Describe', async() => {
+ const type = (await sql`select ${ 1 }::int as x`.describe()).types[0]
+ return [23, type]
+})
+
+t('Describe a statement', async() => {
+ await sql`create table tester (name text, age int)`
+ const r = await sql`select name, age from tester where name like $1 and age > $2`.describe()
+ return [
+ '25,23/name:25,age:23',
+ `${ r.types.join(',') }/${ r.columns.map(c => `${c.name}:${c.type}`).join(',') }`,
+ await sql`drop table tester`
+ ]
+})
+
+t('Describe a statement without parameters', async() => {
+ await sql`create table tester (name text, age int)`
+ const r = await sql`select name, age from tester`.describe()
+ return [
+ '0,2',
+ `${ r.types.length },${ r.columns.length }`,
+ await sql`drop table tester`
+ ]
+})
+
+t('Describe a statement without columns', async() => {
+ await sql`create table tester (name text, age int)`
+ const r = await sql`insert into tester (name, age) values ($1, $2)`.describe()
+ return [
+ '2,0',
+ `${ r.types.length },${ r.columns.length }`,
+ await sql`drop table tester`
+ ]
+})
+
+nt('Large object', async() => {
+ const file = rel('index.js')
+ , md5 = crypto.createHash('md5').update(fs.readFileSync(file)).digest('hex')
+
+ const lo = await sql.largeObject()
+ await new Promise(async r => fs.createReadStream(file).pipe(await lo.writable()).on('finish', r))
+ await lo.seek(0)
+
+ const out = crypto.createHash('md5')
+ await new Promise(r => lo.readable().then(x => x.on('data', x => out.update(x)).on('end', r)))
+
+ return [
+ md5,
+ out.digest('hex'),
+ await lo.close()
+ ]
+})
+
+t('Catches type serialize errors', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: x => x,
+ serialize: () => { throw new Error('watSerialize') }
+ }
+ }
+ })
+
+ return [
+ 'watSerialize',
+ (await sql`select ${ 'wat' }`.catch(e => e.message))
+ ]
+})
+
+t('Catches type parse errors', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: () => { throw new Error('watParse') },
+ serialize: x => x
+ }
+ }
+ })
+
+ return [
+ 'watParse',
+ (await sql`select 'wat'`.catch(e => e.message))
+ ]
+})
+
+t('Catches type serialize errors in transactions', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: x => x,
+ serialize: () => { throw new Error('watSerialize') }
+ }
+ }
+ })
+
+ return [
+ 'watSerialize',
+ (await sql.begin(sql => (
+ sql`select 1`,
+ sql`select ${ 'wat' }`
+ )).catch(e => e.message))
+ ]
+})
+
+t('Catches type parse errors in transactions', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: () => { throw new Error('watParse') },
+ serialize: x => x
+ }
+ }
+ })
+
+ return [
+ 'watParse',
+ (await sql.begin(sql => (
+ sql`select 1`,
+ sql`select 'wat'`
+ )).catch(e => e.message))
+ ]
+})
+
+t('Prevent premature end of connection in transaction', async() => {
+ const sql = postgres({ max_lifetime: 0.1, idle_timeout })
+ const result = await sql.begin(async sql => {
+ await sql`select 1`
+ await delay(200)
+ await sql`select 1`
+ return 'yay'
+ })
+
+
+ return [
+ 'yay',
+ result
+ ]
+})
+
+t('Ensure reconnect after max_lifetime with transactions', { timeout: 5000 }, async() => {
+ const sql = postgres({
+ max_lifetime: 0.01,
+ idle_timeout,
+ max: 1
+ })
+
+ let x = 0
+ while (x++ < 10) await sql.begin(sql => sql`select 1 as x`)
+
+ return [true, true]
+})
diff --git a/deno/tests/select-param.sql b/deno/tests/select-param.sql
new file mode 100644
index 00000000..d4de2440
--- /dev/null
+++ b/deno/tests/select-param.sql
@@ -0,0 +1 @@
+select $1 as x
diff --git a/deno/tests/select.sql b/deno/tests/select.sql
new file mode 100644
index 00000000..f951e920
--- /dev/null
+++ b/deno/tests/select.sql
@@ -0,0 +1 @@
+select 1 as x
diff --git a/deno/tests/test.js b/deno/tests/test.js
new file mode 100644
index 00000000..2e36de60
--- /dev/null
+++ b/deno/tests/test.js
@@ -0,0 +1,89 @@
+import process from 'https://deno.land/std@0.120.0/node/process.ts'
+/* eslint no-console: 0 */
+
+import util from 'https://deno.land/std@0.120.0/node/util.ts'
+
+let done = 0
+let only = false
+let ignored = 0
+let failed = false
+let promise = Promise.resolve()
+const tests = {}
+ , ignore = {}
+
+export const nt = () => ignored++
+export const ot = (...rest) => (only = true, test(true, ...rest))
+export const t = (...rest) => test(false, ...rest)
+t.timeout = 0.5
+
+async function test(o, name, options, fn) {
+ typeof options !== 'object' && (fn = options, options = {})
+ const line = new Error().stack.split('\n')[3].match(':([0-9]+):')[1]
+
+ await 1
+
+ if (only && !o)
+ return
+
+ tests[line] = { fn, line, name }
+ promise = promise.then(() => Promise.race([
+ new Promise((resolve, reject) =>
+ fn.timer = setTimeout(() => reject('Timed out'), (options.timeout || t.timeout) * 1000)
+ ),
+ failed
+ ? (ignored++, ignore)
+ : fn()
+ ]))
+ .then(async x => {
+ clearTimeout(fn.timer)
+ if (x === ignore)
+ return
+
+ if (!Array.isArray(x))
+ throw new Error('Test should return result array')
+
+ const [expected, got] = await Promise.all(x)
+ if (expected !== got) {
+ failed = true
+ throw new Error(util.inspect(expected) + ' != ' + util.inspect(got))
+ }
+
+ tests[line].succeeded = true
+ process.stdout.write('✅')
+ })
+ .catch(err => {
+ tests[line].failed = failed = true
+ tests[line].error = err instanceof Error ? err : new Error(util.inspect(err))
+ })
+ .then(() => {
+ ++done === Object.keys(tests).length && exit()
+ })
+}
+
+function exit() {
+ console.log('')
+ let success = true
+ Object.values(tests).every((x) => {
+ if (x.succeeded)
+ return true
+
+ success = false
+ x.cleanup
+ ? console.error('⛔️', x.name + ' at line', x.line, 'cleanup failed', '\n', util.inspect(x.cleanup))
+ : console.error('⛔️', x.name + ' at line', x.line, x.failed
+ ? 'failed'
+ : 'never finished', x.error ? '\n' + util.inspect(x.error) : ''
+ )
+ })
+
+ only
+ ? console.error('⚠️', 'Not all tests were run')
+ : ignored
+ ? console.error('⚠️', ignored, 'ignored test' + (ignored === 1 ? '' : 's', '\n'))
+ : success
+ ? console.log('All good')
+ : console.error('⚠️', 'Not good')
+
+ !process.exitCode && (!success || only || ignored) && (process.exitCode = 1)
+}
+
diff --git a/lib/backend.js b/lib/backend.js
deleted file mode 100644
index 5248b735..00000000
--- a/lib/backend.js
+++ /dev/null
@@ -1,255 +0,0 @@
-const { errors } = require('./errors.js')
- , { entries, errorFields } = require('./types.js')
-
-const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc)
- , N = '\u0000'
-
-module.exports = Backend
-
-function Backend({
- onparse,
- onparameter,
- onsuspended,
- oncomplete,
- onerror,
- parsers,
- onauth,
- onready,
- oncopy,
- ondata,
- transform,
- onnotice,
- onnotify
-}) {
- let rows = 0
-
- const backend = entries({
- 1: ParseComplete,
- 2: BindComplete,
- 3: CloseComplete,
- A: NotificationResponse,
- C: CommandComplete,
- c: CopyDone,
- D: DataRow,
- d: CopyData,
- E: ErrorResponse,
- G: CopyInResponse,
- H: CopyOutResponse,
- I: EmptyQueryResponse,
- K: BackendKeyData,
- N: NoticeResponse,
- n: NoData,
- R: Authentication,
- S: ParameterStatus,
- s: PortalSuspended,
- T: RowDescription,
- t: ParameterDescription,
- V: FunctionCallResponse,
- v: NegotiateProtocolVersion,
- W: CopyBothResponse,
- Z: ReadyForQuery
- }).reduce(char, {})
-
- const state = backend.state = {
- status : 'I',
- pid : null,
- secret : null
- }
-
- function ParseComplete() {
- onparse()
- }
-
- /* c8 ignore next 2 */
- function BindComplete() {
- backend.query.result.columns = backend.query.statement.columns
- }
-
- function CloseComplete() { /* No handling needed */ }
-
- function NotificationResponse(x) {
- if (!onnotify)
- return
-
- let index = 9
- while (x[index++] !== 0);
- onnotify(
- x.toString('utf8', 9, index - 1),
- x.toString('utf8', index, x.length - 1)
- )
- }
-
- function CommandComplete(x) {
- rows = 0
-
- if (!backend.query)
- return
-
- for (let i = x.length - 1; i > 0; i--) {
- if (x[i] === 32 && x[i + 1] < 58 && backend.query.result.count === null)
- backend.query.result.count = +x.toString('utf8', i + 1, x.length - 1)
- if (x[i - 1] >= 65) {
- backend.query.result.command = x.toString('utf8', 5, i)
- backend.query.result.state = state
- break
- }
- }
-
- oncomplete()
- }
-
- /* c8 ignore next 3 */
- function CopyDone() {
- backend.query.readable.push(null)
- }
-
- function DataRow(x) {
- let index = 7
- let length
- let column
- let value
-
- const row = backend.query.raw ? new Array(backend.query.statement.columns.length) : {}
- for (let i = 0; i < backend.query.statement.columns.length; i++) {
- column = backend.query.statement.columns[i]
- length = x.readInt32BE(index)
- index += 4
-
- value = length === -1
- ? null
- : backend.query.raw
- ? x.slice(index, index += length)
- : column.parser === undefined
- ? x.toString('utf8', index, index += length)
- : column.parser.array === true
- ? column.parser(x.toString('utf8', index + 1, index += length))
- : column.parser(x.toString('utf8', index, index += length))
-
- backend.query.raw
- ? (row[i] = value)
- : (row[column.name] = transform.value.from ? transform.value.from(value) : value)
- }
-
- backend.query.stream
- ? backend.query.stream(transform.row.from ? transform.row.from(row) : row, backend.query.result)
- : (backend.query.result[rows++] = transform.row.from ? transform.row.from(row) : row)
- }
-
- /* c8 ignore next 3 */
- function CopyData(x) {
- ondata(x.slice(5))
- }
-
- function ErrorResponse(x) {
- onerror(errors.postgres(parseError(x)))
- }
-
- /* c8 ignore next 3 */
- function CopyInResponse() {
- oncopy()
- }
-
- /* c8 ignore next 3 */
- function CopyOutResponse() { /* No handling needed */ }
-
- /* c8 ignore next 3 */
- function EmptyQueryResponse() { /* No handling needed */ }
-
- function BackendKeyData(x) {
- state.pid = x.readInt32BE(5)
- state.secret = x.readInt32BE(9)
- }
-
- function NoticeResponse(x) {
- onnotice
- ? onnotice(parseError(x))
- : console.log(parseError(x)) // eslint-disable-line
- }
-
- function NoData() { /* No handling needed */ }
-
- function Authentication(x) {
- const type = x.readInt32BE(5)
- type !== 0 && onauth(type, x, onerror)
- }
-
- function ParameterStatus(x) {
- const [k, v] = x.toString('utf8', 5, x.length - 1).split(N)
- onparameter(k, v)
- }
-
- function PortalSuspended() {
- onsuspended(backend.query.result)
- backend.query.result = []
- rows = 0
- }
-
- /* c8 ignore next 3 */
- function ParameterDescription() { /* No handling needed */ }
-
- function RowDescription(x) {
- if (backend.query.result.command) {
- backend.query.results = backend.query.results || [backend.query.result]
- backend.query.results.push(backend.query.result = [])
- backend.query.result.count = null
- backend.query.statement.columns = null
- }
-
- if (backend.query.statement.columns)
- return backend.query.result.columns = backend.query.statement.columns
-
- const length = x.readInt16BE(5)
- let index = 7
- let start
-
- backend.query.statement.columns = Array(length)
-
- for (let i = 0; i < length; ++i) {
- start = index
- while (x[index++] !== 0);
- const type = x.readInt32BE(index + 6)
- backend.query.statement.columns[i] = {
- name: transform.column.from
- ? transform.column.from(x.toString('utf8', start, index - 1))
- : x.toString('utf8', start, index - 1),
- parser: parsers[type],
- type
- }
- index += 18
- }
- backend.query.result.columns = backend.query.statement.columns
- }
-
- /* c8 ignore next 3 */
- function FunctionCallResponse() {
- backend.error = errors.notSupported('FunctionCallResponse')
- }
-
- /* c8 ignore next 3 */
- function NegotiateProtocolVersion() {
- backend.error = errors.notSupported('NegotiateProtocolVersion')
- }
-
- /* c8 ignore next 3 */
- function CopyBothResponse() {
- oncopy()
- }
-
- function ReadyForQuery() {
- onready(backend.error)
- }
-
- return backend
-}
-
-function parseError(x) {
- const error = {}
- let start = 5
- for (let i = 5; i < x.length - 1; i++) {
- if (x[i] === 0) {
- error[errorFields[x[start]]] = x.toString('utf8', start + 1, i)
- start = i + 1
- }
- }
- return error
-}
diff --git a/lib/connection.js b/lib/connection.js
deleted file mode 100644
index 3f5c8360..00000000
--- a/lib/connection.js
+++ /dev/null
@@ -1,472 +0,0 @@
-const net = require('net')
-const tls = require('tls')
-const frontend = require('./frontend.js')
-const Backend = require('./backend.js')
-const Queue = require('./queue.js')
-const { END, retryRoutines } = require('./types.js')
-const { errors } = require('./errors.js')
-
-module.exports = Connection
-
-let count = 1
-
-function Connection(options = {}) {
- const statements = new Map()
- const {
- onparameter,
- transform,
- idle_timeout,
- connect_timeout,
- onnotify,
- onnotice,
- onclose,
- parsers
- } = options
- let buffer = Buffer.alloc(0)
- let length = 0
- let messages = []
- let timer
- let statement_id = 1
- let ended
- let open = false
- let ready = false
- let write = false
- let next = false
- let connect_timer
- let buffers = null
- let remaining = 0
-
- const queries = Queue()
- , id = count++
- , uid = Math.random().toString(36).slice(2)
-
- const socket = postgresSocket(options, {
- ready,
- data,
- error,
- close,
- cleanup
- })
-
- const connection = { send, end, destroy, socket }
-
- const backend = Backend({
- onparse,
- onparameter,
- onsuspended,
- oncomplete,
- onerror,
- transform,
- parsers,
- onnotify,
- onnotice,
- onready,
- onauth,
- oncopy,
- ondata,
- error
- })
-
- function onsuspended(x, done) {
- new Promise(r => r(x.length && backend.query.cursor(
- backend.query.cursor.rows === 1 ? x[0] : x
- ))).then(x => {
- x === END || done
- ? socket.write(frontend.Close())
- : socket.write(frontend.ExecuteCursor(backend.query.cursor.rows))
- }).catch(err => {
- backend.query.reject(err)
- socket.write(frontend.Close())
- })
- }
-
- function oncomplete() {
- backend.query.cursor && onsuspended(backend.query.result, true)
- }
-
- function onerror(x) {
- if (!backend.query)
- return error(x)
-
- backend.error = x
- backend.query.cursor && socket.write(frontend.Sync)
- }
-
- function onparse() {
- if (backend.query && backend.query.statement.sig)
- statements.set(backend.query.statement.sig, backend.query.statement)
- }
-
- function onauth(type, x, onerror) {
- Promise.resolve(
- typeof options.pass === 'function'
- ? options.pass()
- : options.pass
- ).then(pass =>
- socket.write(frontend.auth(type, x, options, pass))
- ).catch(onerror)
- }
-
- function end() {
- clearTimeout(timer)
- const promise = new Promise((resolve) => {
- ended = () => resolve(socket.end())
- })
-
- process.nextTick(() => (ready || !backend.query) && ended())
-
- return promise
- }
-
- function destroy() {
- error(errors.connection('CONNECTION_DESTROYED', options, socket))
- socket.destroy()
- }
-
- function error(err) {
- backend.query && backend.query.reject(err)
- let q
- while ((q = queries.shift()))
- q.reject(err)
- }
-
- function retry(query) {
- query.retried = true
- statements.delete(query.sig)
- ready = true
- backend.query = backend.error = null
- send(query, { sig: query.sig, str: query.str, args: query.args })
- }
-
- function send(query, { sig, str, args = [] }) {
- try {
- query.sig = sig
- query.str = str
- query.args = args
- query.result = []
- query.result.count = null
- idle_timeout && clearTimeout(timer)
-
- typeof options.debug === 'function' && options.debug(id, str, args)
- const buffer = query.simple
- ? simple(str, query)
- : statements.has(sig)
- ? prepared(statements.get(sig), args, query)
- : prepare(sig, str, args, query)
-
- ready
- ? (backend.query = query, ready = false)
- : queries.push(query)
-
- open
- ? socket.write(buffer)
- : (messages.push(buffer), connect())
- } catch (err) {
- query.reject(err)
- idle()
- }
- }
-
- function connect() {
- connect_timeout && (
- clearTimeout(connect_timer),
- connect_timer = setTimeout(connectTimedOut, connect_timeout * 1000).unref()
- )
- socket.connect()
- }
-
- function connectTimedOut() {
- error(errors.connection('CONNECT_TIMEOUT', options, socket))
- socket.destroy()
- }
-
- function simple(str, query) {
- query.statement = {}
- return frontend.Query(str)
- }
-
- function prepared(statement, args, query) {
- query.statement = statement
- return Buffer.concat([
- frontend.Bind(query.statement.name, args),
- query.cursor
- ? frontend.Describe('P')
- : Buffer.alloc(0),
- query.cursor
- ? frontend.ExecuteCursor(query.cursor.rows)
- : frontend.Execute
- ])
- }
-
- function prepare(sig, str, args, query) {
- query.statement = { name: sig ? 'p' + uid + statement_id++ : '', sig }
- return Buffer.concat([
- frontend.Parse(query.statement.name, str, args),
- frontend.Bind(query.statement.name, args),
- query.cursor
- ? frontend.Describe('P')
- : frontend.Describe('S', query.statement.name),
- query.cursor
- ? frontend.ExecuteCursor(query.cursor.rows)
- : frontend.Execute
- ])
- }
-
- function idle() {
- if (idle_timeout && !backend.query && queries.length === 0) {
- clearTimeout(timer)
- timer = setTimeout(socket.end, idle_timeout * 1000)
- }
- }
-
- function onready(err) {
- clearTimeout(connect_timer)
- if (err) {
- if (backend.query) {
- if (!backend.query.retried && retryRoutines[err.routine])
- return retry(backend.query)
-
- err.stack += backend.query.origin.replace(/.*\n/, '\n')
- Object.defineProperty(err, 'query', {
- value: backend.query.str,
- enumerable: !!options.debug
- })
- Object.defineProperty(err, 'parameters', {
- value: backend.query.args,
- enumerable: !!options.debug
- })
- backend.query.reject(err)
- } else {
- error(err)
- }
- } else if (backend.query) {
- backend.query.resolve(backend.query.results || backend.query.result)
- }
-
- backend.query = backend.error = null
- idle()
-
- if (!open) {
- if (multi())
- return
-
- messages.forEach(x => socket.write(x))
- messages = []
- open = true
- }
-
- backend.query = queries.shift()
- ready = !backend.query
- ready && ended && ended()
- }
-
- function oncopy() {
- backend.query.writable.push = ({ chunk, error, callback }) => {
- error
- ? socket.write(frontend.CopyFail(error))
- : chunk === null
- ? socket.write(frontend.CopyDone())
- : socket.write(frontend.CopyData(chunk), callback)
- }
- backend.query.writable.forEach(backend.query.writable.push)
- }
-
- function ondata(x) {
- !backend.query.readable.push(x) && socket.pause()
- }
-
- function multi() {
- if (next)
- return (next = false, true)
-
- if (!write && options.target_session_attrs === 'read-write') {
- backend.query = {
- origin: '',
- result: [],
- statement: {},
- resolve: ([{ transaction_read_only }]) => transaction_read_only === 'on'
- ? (next = true, socket.destroy())
- : (write = true, socket.success()),
- reject: error
- }
- socket.write(frontend.Query('show transaction_read_only'))
- return true
- }
- }
-
- function data(x) {
- if (buffers) {
- buffers.push(x)
- remaining -= x.length
- if (remaining >= 0)
- return
- }
-
- buffer = buffers
- ? Buffer.concat(buffers, length - remaining)
- : buffer.length === 0
- ? x
- : Buffer.concat([buffer, x], buffer.length + x.length)
-
- while (buffer.length > 4) {
- length = buffer.readInt32BE(1)
- if (length >= buffer.length) {
- remaining = length - buffer.length
- buffers = [buffer]
- break
- }
-
- backend[buffer[0]](buffer.slice(0, length + 1))
- buffer = buffer.slice(length + 1)
- remaining = 0
- buffers = null
- }
- }
-
- function close() {
- clearTimeout(connect_timer)
- error(errors.connection('CONNECTION_CLOSED', options, socket))
- messages = []
- onclose && onclose()
- }
-
- function cleanup() {
- statements.clear()
- open = ready = write = false
- }
-
- /* c8 ignore next */
- return connection
-}
-
-function postgresSocket(options, {
- error,
- close,
- cleanup,
- data
-}) {
- let socket
- let ended = false
- let closed = true
- let succeeded = false
- let next = null
- let buffer
- let i = 0
- let retries = 0
-
- function onclose(err) {
- retries++
- oncleanup()
- !ended && !succeeded && i < options.host.length
- ? connect()
- : err instanceof Error
- ? (error(err), close())
- : close()
- i >= options.host.length && (i = 0)
- }
-
- function oncleanup() {
- socket.removeListener('data', data)
- socket.removeListener('close', onclose)
- socket.removeListener('error', onclose)
- socket.removeListener('connect', ready)
- socket.removeListener('secureConnect', ready)
- closed = true
- cleanup()
- }
-
- async function connect() {
- if (!closed)
- return
-
- retries && await new Promise(r =>
- setTimeout(r, Math.min((0.5 + Math.random()) * Math.pow(1.3, retries) * 10, 10000))
- )
-
- closed = succeeded = false
-
- socket = options.path
- ? net.connect(options.path)
- : net.connect(
- x.port = options.port[i],
- x.host = options.host[i++]
- ).setKeepAlive(true, 1000 * 60)
-
- if (!options.ssl)
- return attach(socket)
-
- socket.once('connect', () => socket.write(frontend.SSLRequest))
- socket.once('error', onclose)
- socket.once('close', onclose)
- socket.once('data', x => {
- socket.removeListener('error', onclose)
- socket.removeListener('close', onclose)
- x.toString() === 'S'
- ? attach(tls.connect(Object.assign({ socket }, ssl(options.ssl))))
- : options.ssl === 'prefer'
- ? (attach(socket), ready())
- : /* c8 ignore next */ error('Server does not support SSL')
- })
- }
-
- function ssl(x) {
- return x === 'require' || x === 'allow' || x === 'prefer'
- ? { rejectUnauthorized: false }
- : x
- }
-
- function attach(x) {
- socket = x
- socket.on('data', data)
- socket.once('error', onclose)
- socket.once('connect', ready)
- socket.once('secureConnect', ready)
- socket.once('close', onclose)
- }
-
- function ready() {
- retries = 0
- try {
- socket.write(frontend.StartupMessage(options))
- } catch (e) {
- error(e)
- socket.end()
- }
- }
-
- const x = {
- success: () => {
- retries = 0
- succeeded = true
- i >= options.host.length && (i = 0)
- },
- pause: () => socket.pause(),
- resume: () => socket.resume(),
- isPaused: () => socket.isPaused(),
- write: (x, callback) => {
- buffer = buffer ? Buffer.concat([buffer, x]) : Buffer.from(x)
- if (buffer.length >= 1024)
- return write(callback)
- next === null && (next = setImmediate(write))
- callback && callback()
- },
- destroy: () => {
- socket && socket.destroy()
- return Promise.resolve()
- },
- end: () => {
- ended = true
- return new Promise(r => socket && !closed ? (socket.once('close', r), socket.end()) : r())
- },
- connect
- }
-
- function write(callback) {
- socket.write(buffer, callback)
- next !== null && clearImmediate(next)
- buffer = next = null
- }
-
- /* c8 ignore next */
- return x
-}
diff --git a/lib/frontend.js b/lib/frontend.js
deleted file mode 100644
index 8a980c18..00000000
--- a/lib/frontend.js
+++ /dev/null
@@ -1,249 +0,0 @@
-const crypto = require('crypto')
-const bytes = require('./bytes.js')
-const { entries } = require('./types.js')
-const { errors } = require('./errors.js')
-
-const N = String.fromCharCode(0)
-const empty = Buffer.alloc(0)
-const Sync = bytes.S().end()
-const Flush = bytes.H().end()
-const Execute = Buffer.concat([
- bytes.E().str(N).i32(0).end(),
- bytes.S().end()
-])
-
-const SSLRequest = bytes.i32(8).i32(80877103).end(8)
-
-const authNames = {
- 2 : 'KerberosV5',
- 3 : 'CleartextPassword',
- 5 : 'MD5Password',
- 6 : 'SCMCredential',
- 7 : 'GSS',
- 8 : 'GSSContinue',
- 9 : 'SSPI',
- 10: 'SASL',
- 11: 'SASLContinue',
- 12: 'SASLFinal'
-}
-
-const auths = {
- 3 : AuthenticationCleartextPassword,
- 5 : AuthenticationMD5Password,
- 10: SASL,
- 11: SASLContinue,
- 12: SASLFinal
-}
-
-module.exports = {
- StartupMessage,
- SSLRequest,
- auth,
- Bind,
- Sync,
- Flush,
- Parse,
- Query,
- Close,
- Execute,
- ExecuteCursor,
- Describe,
- CopyData,
- CopyDone,
- CopyFail
-}
-
-function StartupMessage({ user, database, connection }) {
- return bytes
- .inc(4)
- .i16(3)
- .z(2)
- .str(entries(Object.assign({
- user,
- database,
- client_encoding: '\'utf-8\''
- },
- connection
- )).filter(([, v]) => v).map(([k, v]) => k + N + v).join(N))
- .z(2)
- .end(0)
-}
-
-function auth(type, x, options, pass) {
- if (type in auths)
- return auths[type](type, x, options, pass)
- /* c8 ignore next */
- throw errors.generic({
- message: 'Auth type ' + (authNames[type] || type) + ' not implemented',
- type: authNames[type] || type,
- code: 'AUTH_TYPE_NOT_IMPLEMENTED'
- })
-}
-
-function AuthenticationCleartextPassword(type, x, options, pass) {
- return bytes
- .p()
- .str(pass)
- .z(1)
- .end()
-}
-
-function AuthenticationMD5Password(type, x, options, pass) {
- return bytes
- .p()
- .str('md5' + md5(Buffer.concat([Buffer.from(md5(pass + options.user)), x.slice(9)])))
- .z(1)
- .end()
-}
-
-function SASL(type, x, options) {
- bytes
- .p()
- .str('SCRAM-SHA-256' + N)
-
- const i = bytes.i
-
- options.nonce = crypto.randomBytes(18).toString('base64')
-
- return bytes
- .inc(4)
- .str('n,,n=*,r=' + options.nonce)
- .i32(bytes.i - i - 4, i)
- .end()
-}
-
-function SASLContinue(type, x, options, pass) {
- const res = x.toString('utf8', 9).split(',').reduce((acc, x) => (acc[x[0]] = x.slice(2), acc), {})
-
- const saltedPassword = crypto.pbkdf2Sync(
- pass,
- Buffer.from(res.s, 'base64'),
- parseInt(res.i), 32,
- 'sha256'
- )
-
- const clientKey = hmac(saltedPassword, 'Client Key')
-
- const auth = 'n=*,r=' + options.nonce + ','
- + 'r=' + res.r + ',s=' + res.s + ',i=' + res.i
- + ',c=biws,r=' + res.r
-
- options.serverSignature = hmac(hmac(saltedPassword, 'Server Key'), auth).toString('base64')
-
- return bytes.p()
- .str('c=biws,r=' + res.r + ',p=' + xor(clientKey, hmac(sha256(clientKey), auth)).toString('base64'))
- .end()
-}
-
-function SASLFinal(type, x, options) {
- if (x.toString('utf8', 9).split(N, 1)[0].slice(2) === options.serverSignature)
- return empty
- /* c8 ignore next 4 */
- throw errors.generic({
- message: 'The server did not return the correct signature',
- code: 'SASL_SIGNATURE_MISMATCH'
- })
-}
-
-function Query(x) {
- return bytes
- .Q()
- .str(x + N)
- .end()
-}
-
-function CopyData(x) {
- return bytes
- .d()
- .raw(x)
- .end()
-}
-
-function CopyDone() {
- return bytes
- .c()
- .end()
-}
-
-function CopyFail(err) {
- return bytes
- .f()
- .str(String(err) + N)
- .end()
-}
-
-function Bind(name, args) {
- let prev
-
- bytes
- .B()
- .str(N)
- .str(name + N)
- .i16(0)
- .i16(args.length)
-
- args.forEach(x => {
- if (x.value == null)
- return bytes.i32(0xFFFFFFFF)
-
- prev = bytes.i
- bytes
- .inc(4)
- .str(x.value)
- .i32(bytes.i - prev - 4, prev)
- })
-
- bytes.i16(0)
-
- return bytes.end()
-}
-
-function Parse(name, str, args) {
- bytes
- .P()
- .str(name + N)
- .str(str + N)
- .i16(args.length)
-
- args.forEach(x => bytes.i32(x.type))
-
- return bytes.end()
-}
-
-function Describe(x, name = '') {
- return bytes.D().str(x).str(name + N).end()
-}
-
-function ExecuteCursor(rows) {
- return Buffer.concat([
- bytes.E().str(N).i32(rows).end(),
- bytes.H().end()
- ])
-}
-
-function Close() {
- return Buffer.concat([
- bytes.C().str('P').str(N).end(),
- bytes.S().end()
- ])
-}
-
-function md5(x) {
- return crypto.createHash('md5').update(x).digest('hex')
-}
-
-function hmac(key, x) {
- return crypto.createHmac('sha256', key).update(x).digest()
-}
-
-function sha256(x) {
- return crypto.createHash('sha256').update(x).digest()
-}
-
-function xor(a, b) {
- const length = Math.max(a.length, b.length)
- const buffer = Buffer.allocUnsafe(length)
- for (let i = 0; i < length; i++)
- buffer[i] = a[i] ^ b[i]
- return buffer
-}
diff --git a/lib/index.js b/lib/index.js
deleted file mode 100644
index 358ece9e..00000000
--- a/lib/index.js
+++ /dev/null
@@ -1,711 +0,0 @@
-const fs = require('fs')
-const Url = require('url')
-const Stream = require('stream')
-const Connection = require('./connection.js')
-const Queue = require('./queue.js')
-const Subscribe = require('./subscribe.js')
-const { errors, PostgresError } = require('./errors.js')
-const {
- mergeUserTypes,
- arraySerializer,
- arrayParser,
- fromPascal,
- fromCamel,
- fromKebab,
- inferType,
- toPascal,
- toCamel,
- toKebab,
- entries,
- escape,
- types,
- END
-} = require('./types.js')
-
-const notPromise = {
- P: {},
- finally: notTagged,
- then: notTagged,
- catch: notTagged
-}
-
-function notTagged() {
- throw errors.generic({ message: 'Query not called as a tagged template literal', code: 'NOT_TAGGED_CALL' })
-}
-
-Object.assign(Postgres, {
- PostgresError,
- toPascal,
- toCamel,
- toKebab,
- fromPascal,
- fromCamel,
- fromKebab,
- BigInt: {
- to: 20,
- from: [20],
- parse: x => BigInt(x), // eslint-disable-line
- serialize: x => x.toString()
- }
-})
-
-const originCache = new Map()
-
-module.exports = Postgres
-
-function Postgres(a, b) {
- if (arguments.length && !a)
- throw new Error(a + ' - is not a url or connection object')
-
- const options = parseOptions(a, b)
-
- const max = Math.max(1, options.max)
- , subscribe = Subscribe(Postgres, a, b)
- , transform = options.transform
- , connections = Queue()
- , all = []
- , queries = Queue()
- , listeners = {}
- , typeArrayMap = {}
- , files = {}
- , isInsert = /(^|[^)(])\s*insert\s+into\s+[^\s]+\s*$/i
- , isSelect = /(^|[^)(])\s*select\s*$/i
-
- let ready = false
- , ended = null
- , arrayTypesPromise = options.fetch_types ? null : Promise.resolve([])
- , slots = max
- , listener
-
- function postgres(xs) {
- return query({ tagged: true, prepare: options.prepare }, getConnection(), xs, Array.from(arguments).slice(1))
- }
-
- Object.assign(postgres, {
- options: Object.assign({}, options, { pass: null }),
- parameters: {},
- subscribe,
- listen,
- begin,
- end
- })
-
- addTypes(postgres)
-
- const onparameter = options.onparameter
- options.onparameter = (k, v) => {
- if (postgres.parameters[k] !== v) {
- postgres.parameters[k] = v
- onparameter && onparameter(k, v)
- }
- }
-
- return postgres
-
- function begin(options, fn) {
- if (!fn) {
- fn = options
- options = ''
- }
-
- return new Promise((resolve, reject) => {
- const connection = getConnection(true)
- , query = { resolve, reject, fn, begin: 'begin ' + options.replace(/[^a-z ]/ig, '') }
-
- connection
- ? transaction(query, connection)
- : queries.push(query)
- })
- }
-
- function transaction({
- resolve,
- reject,
- fn,
- begin = '',
- savepoint = ''
- }, connection) {
- begin && (connection.savepoints = 0)
- addTypes(scoped, connection)
- scoped.savepoint = (name, fn) => new Promise((resolve, reject) => {
- transaction({
- savepoint: 'savepoint s' + connection.savepoints++ + '_' + (fn ? name : ''),
- resolve,
- reject,
- fn: fn || name
- }, connection)
- })
-
- query({}, connection, begin || savepoint)
- .then(() => {
- const result = fn(scoped)
- return Array.isArray(result)
- ? Promise.all(result)
- : result
- })
- .then((x) =>
- begin
- ? scoped`commit`.then(() => resolve(x))
- : resolve(x)
- )
- .catch((err) => {
- query({}, connection,
- begin
- ? 'rollback'
- : 'rollback to ' + savepoint
- )
- .then(() => reject(err), reject)
- })
- .then(begin && (() => {
- connections.push(connection)
- next(connection)
- }))
-
- function scoped(xs) {
- return query({ tagged: true }, connection, xs, Array.from(arguments).slice(1))
- }
- }
-
- function next() {
- let c
- , x
-
- while (
- (x = queries.peek())
- && (c = x.query && x.query.connection || getConnection(queries.peek().fn))
- && queries.shift()
- ) {
- x.fn
- ? transaction(x, c)
- : send(c, x.query, x.xs, x.args)
-
- x.query && x.query.connection && x.query.writable && (c.blocked = true)
- }
- }
-
- function query(query, connection, xs, args) {
- query.origin = options.debug ? new Error().stack : cachedError(xs)
- query.prepare = 'prepare' in query ? query.prepare : options.prepare
- if (query.tagged && (!Array.isArray(xs) || !Array.isArray(xs.raw)))
- return nested(xs, args)
-
- const promise = new Promise((resolve, reject) => {
- query.resolve = resolve
- query.reject = reject
- ended !== null
- ? reject(errors.connection('CONNECTION_ENDED', options, options))
- : ready
- ? send(connection, query, xs, args)
- : fetchArrayTypes(connection).then(() => send(connection, query, xs, args)).catch(reject)
- })
-
- addMethods(promise, query)
-
- return promise
- }
-
- function cachedError(xs) {
- if (originCache.has(xs))
- return originCache.get(xs)
-
- const x = Error.stackTraceLimit
- Error.stackTraceLimit = 4
- originCache.set(xs, new Error().stack)
- Error.stackTraceLimit = x
- return originCache.get(xs)
- }
-
- function nested(first, rest) {
- const o = Object.create(notPromise)
- o.first = first
- o.rest = rest.reduce((acc, val) => acc.concat(val), [])
- return o
- }
-
- function send(connection, query, xs, args) {
- connection && (query.connection = connection)
- if (!connection || connection.blocked)
- return queries.push({ query, xs, args, connection })
-
- connection.blocked = query.blocked
- process.nextTick(connection.send, query, query.tagged ? parseTagged(query, xs, args) : parseUnsafe(query, xs, args))
- }
-
- function getConnection(reserve) {
- const connection = slots ? createConnection(options) : connections.shift()
- !reserve && connection && connections.push(connection)
- return connection
- }
-
- function createConnection(options) {
- slots--
- // The options object gets cloned as the as the authentication in the frontend.js mutates the
- // options to persist a nonce and signature, which are unique per connection.
- const connection = Connection({ ...options })
- all.push(connection)
- return connection
- }
-
- function array(xs) {
- const o = Object.create(notPromise)
- o.array = xs
- return o
- }
-
- function json(value) {
- return {
- type: types.json.to,
- value
- }
- }
-
- function fetchArrayTypes(connection) {
- return arrayTypesPromise || (arrayTypesPromise =
- new Promise((resolve, reject) => {
- send(connection, { resolve, reject, simple: true, tagged: false, prepare: false, origin: new Error().stack }, `
- select b.oid, b.typarray
- from pg_catalog.pg_type a
- left join pg_catalog.pg_type b on b.oid = a.typelem
- where a.typcategory = 'A'
- group by b.oid, b.typarray
- order by b.oid
- `)
- }).catch(err => {
- arrayTypesPromise = null
- throw err
- }).then(types => {
- types.forEach(({ oid, typarray }) => addArrayType(oid, typarray))
- ready = true
- })
- )
- }
-
- function addArrayType(oid, typarray) {
- const parser = options.parsers[oid]
-
- typeArrayMap[oid] = typarray
- options.parsers[typarray] = (xs) => arrayParser(xs, parser)
- options.parsers[typarray].array = true
- options.serializers[typarray] = (xs) => arraySerializer(xs, options.serializers[oid])
- }
-
- function addTypes(sql, connection) {
- Object.assign(sql, {
- END,
- PostgresError,
- types: {},
- notify,
- unsafe,
- array,
- file,
- json
- })
-
- function notify(channel, payload) {
- return sql`select pg_notify(${ channel }, ${ '' + payload })`
- }
-
- function unsafe(xs, args, queryOptions) {
- const prepare = queryOptions && queryOptions.prepare || false
- return query({ simple: !args, prepare }, connection || getConnection(), xs, args || [])
- }
-
- function file(path, args, options = {}) {
- if (!Array.isArray(args)) {
- options = args || {}
- args = null
- }
-
- if ('cache' in options === false)
- options.cache = true
-
- const file = files[path]
- const q = { tagged: false, simple: !args }
-
- if (options.cache && typeof file === 'string')
- return query(q, connection || getConnection(), file, args || [])
-
- const promise = ((options.cache && file) || (files[path] = new Promise((resolve, reject) => {
- fs.readFile(path, 'utf8', (err, str) => {
- if (err)
- return reject(err)
-
- files[path] = str
- resolve(str)
- })
- }))).then(str => query(q, connection || getConnection(), str, args || []))
-
- addMethods(promise, q)
-
- return promise
- }
-
- options.types && entries(options.types).forEach(([name, type]) => {
- sql.types[name] = (x) => ({ type: type.to, value: x })
- })
- }
-
- function addMethods(promise, query) {
- promise.readable = () => readable(promise, query)
- promise.writable = () => writable(promise, query)
- promise.raw = () => (query.raw = true, promise)
- promise.stream = (fn) => (query.stream = fn, promise)
- promise.cursor = cursor(promise, query)
- }
-
- function cursor(promise, query) {
- return (rows, fn) => {
- if (typeof rows === 'function') {
- fn = rows
- rows = 1
- }
- fn.rows = rows
- query.cursor = fn
- query.simple = false
- return promise
- }
- }
-
- function readable(promise, query) {
- query.connection
- ? query.connection.blocked = true
- : query.blocked = true
-
- const read = () => query.connection.socket.isPaused() && query.connection.socket.resume()
- promise.catch(err => query.readable.destroy(err)).then(() => {
- query.connection.blocked = false
- read()
- next()
- })
- return query.readable = new Stream.Readable({ read })
- }
-
- function writable(promise, query) {
- query.connection
- ? query.connection.blocked = true
- : query.blocked = true
- let error
- query.prepare = false
- query.simple = true
- query.writable = []
- promise.catch(err => error = err).then(() => {
- query.connection.blocked = false
- next()
- })
- return query.readable = new Stream.Duplex({
- read() { /* backpressure handling not possible */ },
- write(chunk, encoding, callback) {
- error
- ? callback(error)
- : query.writable.push({ chunk, callback })
- },
- destroy(error, callback) {
- callback(error)
- query.writable.push({ error })
- },
- final(callback) {
- if (error)
- return callback(error)
-
- query.writable.push({ chunk: null })
- promise.then(() => callback(), callback)
- }
- })
- }
-
- function listen(channel, fn) {
- const listener = getListener()
-
- if (channel in listeners) {
- listeners[channel].push(fn)
- return Promise.resolve(Object.create(listener.result, {
- unlisten: { value: unlisten }
- }))
- }
-
- listeners[channel] = [fn]
-
- return query({}, listener.conn, 'listen ' + escape(channel))
- .then((result) => {
- Object.assign(listener.result, result)
- return Object.create(listener.result, {
- unlisten: { value: unlisten }
- })
- })
-
- function unlisten() {
- if (!listeners[channel])
- return Promise.resolve()
-
- listeners[channel] = listeners[channel].filter(handler => handler !== fn)
-
- if (listeners[channel].length)
- return Promise.resolve()
-
- delete listeners[channel]
- return query({}, getListener().conn, 'unlisten ' + escape(channel)).then(() => undefined)
- }
- }
-
- function getListener() {
- if (listener)
- return listener
-
- const conn = Connection(Object.assign({
- onnotify: (c, x) => c in listeners && listeners[c].forEach(fn => fn(x)),
- onclose: () => {
- Object.entries(listeners).forEach(([channel, fns]) => {
- delete listeners[channel]
- Promise.all(fns.map(fn => listen(channel, fn).catch(() => { /* noop */ })))
- })
- listener = null
- }
- },
- options
- ))
- listener = { conn, result: {} }
- all.push(conn)
- return listener
- }
-
- function end({ timeout = null } = {}) {
- if (ended)
- return ended
-
- let destroy
-
- return ended = Promise.race([
- Promise.resolve(arrayTypesPromise).then(() => Promise.all(
- (subscribe.sql ? [subscribe.sql.end({ timeout: 0 })] : []).concat(all.map(c => c.end()))
- ))
- ].concat(
- timeout === 0 || timeout > 0
- ? new Promise(r => destroy = setTimeout(() => (
- subscribe.sql && subscribe.sql.end({ timeout }),
- all.map(c => c.destroy()),
- r()
- ), timeout * 1000))
- : []
- ))
- .then(() => clearTimeout(destroy))
- }
-
- function parseUnsafe(query, str, args = []) {
- const types = []
- , xargs = []
-
- args.forEach(x => parseValue(x, xargs, types))
-
- return {
- sig: query.prepare && types + str,
- str,
- args: xargs
- }
- }
-
- function parseTagged(query, xs, args = []) {
- const xargs = []
- , types = []
-
- let str = xs[0]
- let arg
-
- for (let i = 1; i < xs.length; i++) {
- arg = args[i - 1]
- str += parseArg(str, arg, xargs, types) + xs[i]
- }
-
- return {
- sig: query.prepare && !xargs.dynamic && types + str,
- str: str.trim(),
- args: xargs
- }
- }
-
- function parseArg(str, arg, xargs, types) {
- return arg && arg.P === notPromise.P
- ? arg.array
- ? parseArray(arg.array, xargs, types)
- : parseHelper(str, arg, xargs, types)
- : parseValue(arg, xargs, types)
- }
-
- function parseArray(array, xargs, types) {
- return array.length === 0 ? '\'{}\'' : 'array[' + array.map((x) => Array.isArray(x)
- ? parseArray(x, xargs, types)
- : parseValue(x, xargs, types)
- ).join(',') + ']'
- }
-
- function parseHelper(str, { first, rest }, xargs, types) {
- xargs.dynamic = true
- if (first !== null && typeof first === 'object' && typeof first[0] !== 'string') {
- if (isInsert.test(str))
- return insertHelper(first, rest, xargs, types)
- else if (isSelect.test(str))
- return selectHelper(first, rest, xargs, types)
- else if (!Array.isArray(first))
- return equalsHelper(first, rest, xargs, types)
- }
-
- return escapeHelper(Array.isArray(first) ? first : [first].concat(rest))
- }
-
- function selectHelper(first, columns, xargs, types) {
- return entries(first).reduce((acc, [k, v]) =>
- acc + (!columns.length || columns.indexOf(k) > -1
- ? (acc ? ',' : '') + parseValue(v, xargs, types) + ' as ' + escape(
- transform.column.to ? transform.column.to(k) : k
- )
- : ''
- ),
- ''
- )
- }
-
- function insertHelper(first, columns, xargs, types) {
- first = Array.isArray(first) ? first : [first]
- columns = columns.length ? columns : Object.keys(first[0])
- return '(' + escapeHelper(columns) + ') values ' +
- first.reduce((acc, row) =>
- acc + (acc ? ',' : '') + '(' +
- columns.reduce((acc, k) => acc + (acc ? ',' : '') + parseValue(row[k], xargs, types), '') +
- ')',
- ''
- )
- }
-
- function equalsHelper(first, columns, xargs, types) {
- return (columns.length ? columns : Object.keys(first)).reduce((acc, k) =>
- acc + (acc ? ',' : '') + escape(
- transform.column.to ? transform.column.to(k) : k
- ) + ' = ' + parseValue(first[k], xargs, types),
- ''
- )
- }
-
- function escapeHelper(xs) {
- return xs.reduce((acc, x) => acc + (acc ? ',' : '') + escape(
- transform.column.to ? transform.column.to(x) : x
- ), '')
- }
-
- function parseValue(x, xargs, types) {
- if (x === undefined)
- throw errors.generic({ code: 'UNDEFINED_VALUE', message: 'Undefined values are not allowed' })
-
- return Array.isArray(x)
- ? x.reduce((acc, x) => acc + (acc ? ',' : '') + addValue(x, xargs, types), '')
- : x && x.P === notPromise.P
- ? parseArg('', x, xargs, types)
- : addValue(x, xargs, types)
- }
-
- function addValue(x, xargs, types) {
- const type = getType(x)
- , i = types.push(type.type)
-
- if (i > 65534)
- throw errors.generic({ message: 'Max number of parameters (65534) exceeded', code: 'MAX_PARAMETERS_EXCEEDED' })
-
- xargs.push(type)
- return '$' + i
- }
-
- function getType(x) {
- if (x == null)
- return { type: 0, value: x, raw: x }
-
- const value = x.type ? x.value : x
- , type = x.type || inferType(value)
-
- return {
- type,
- value: (options.serializers[type] || types.string.serialize)(value),
- raw: x
- }
- }
-}
-
-function parseOptions(a, b) {
- const env = process.env // eslint-disable-line
- , o = (typeof a === 'string' ? b : a) || {}
- , { url, multihost } = parseUrl(a, env)
- , auth = (url.auth || '').split(':')
- , host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost'
- , port = o.port || url.port || env.PGPORT || 5432
- , user = o.user || o.username || auth[0] || env.PGUSERNAME || env.PGUSER || osUsername()
-
- return Object.assign({
- host : host.split(',').map(x => x.split(':')[0]),
- port : host.split(',').map(x => x.split(':')[1] || port),
- path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port,
- database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user,
- user : user,
- pass : o.pass || o.password || auth[1] || env.PGPASSWORD || '',
- max : o.max || url.query.max || 10,
- types : o.types || {},
- ssl : o.ssl || parseSSL(url.query.sslmode || url.query.ssl) || false,
- idle_timeout : o.idle_timeout || url.query.idle_timeout || env.PGIDLE_TIMEOUT || warn(o.timeout),
- connect_timeout : o.connect_timeout || url.query.connect_timeout || env.PGCONNECT_TIMEOUT || 30,
- prepare : 'prepare' in o ? o.prepare : 'no_prepare' in o ? !o.no_prepare : true,
- onnotice : o.onnotice,
- onparameter : o.onparameter,
- transform : parseTransform(o.transform || {}),
- connection : Object.assign({ application_name: 'postgres.js' }, o.connection),
- target_session_attrs: o.target_session_attrs || url.query.target_session_attrs || env.PGTARGETSESSIONATTRS,
- debug : o.debug,
- fetch_types : 'fetch_types' in o ? o.fetch_types : true
- },
- mergeUserTypes(o.types)
- )
-}
-
-function parseTransform(x) {
- return {
- column: {
- from: typeof x.column === 'function' ? x.column : x.column && x.column.from,
- to: x.column && x.column.to
- },
- value: {
- from: typeof x.value === 'function' ? x.value : x.value && x.value.from,
- to: x.value && x.value.to
- },
- row: {
- from: typeof x.row === 'function' ? x.row : x.row && x.row.from,
- to: x.row && x.row.to
- }
- }
-}
-
-function parseSSL(x) {
- return x !== 'disable' && x !== 'false' && x
-}
-
-function parseUrl(url) {
- if (typeof url !== 'string')
- return { url: { query: {} } }
-
- let host = url
- host = host.slice(host.indexOf('://') + 3)
- host = host.split(/[?/]/)[0]
- host = host.slice(host.indexOf('@') + 1)
-
- return {
- url: Url.parse(url.replace(host, host.split(',')[0]), true),
- multihost: host.indexOf(',') > -1 && host
- }
-}
-
-function warn(x) {
- typeof x !== 'undefined' && console.log('The timeout option is deprecated, use idle_timeout instead') // eslint-disable-line
- return x
-}
-
-function osUsername() {
- try {
- return require('os').userInfo().username // eslint-disable-line
- } catch (_) {
- return
- }
-}
diff --git a/lib/types.js b/lib/types.js
deleted file mode 100644
index a94a8932..00000000
--- a/lib/types.js
+++ /dev/null
@@ -1,204 +0,0 @@
-const char = module.exports.char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc)
-const entries = o => Object.keys(o).map(x => [x, o[x]])
-
-// These were the fastest ways to do it in Node.js v12.11.1 (add tests to revise if this changes)
-const types = module.exports.types = {
- string: {
- to: 25,
- from: null, // defaults to string
- serialize: x => '' + x
- },
- number: {
- to: 0,
- from: [21, 23, 26, 700, 701],
- serialize: x => '' + x,
- parse: x => +x
- },
- json: {
- to: 3802,
- from: [114, 3802],
- serialize: x => JSON.stringify(x),
- parse: x => JSON.parse(x)
- },
- boolean: {
- to: 16,
- from: 16,
- serialize: x => x === true ? 't' : 'f',
- parse: x => x === 't'
- },
- date: {
- to: 1184,
- from: [1082, 1114, 1184],
- serialize: x => x.toISOString(),
- parse: x => new Date(x)
- },
- bytea: {
- to: 17,
- from: 17,
- serialize: x => '\\x' + Buffer.from(x.buffer, x.byteOffset, x.byteLength).toString('hex'),
- parse: x => Buffer.from(x.slice(2), 'hex')
- }
-}
-
-const defaultHandlers = typeHandlers(types)
-
-const serializers = module.exports.serializers = defaultHandlers.serializers
-const parsers = module.exports.parsers = defaultHandlers.parsers
-
-module.exports.entries = entries
-
-module.exports.END = {}
-
-module.exports.mergeUserTypes = function(types) {
- const user = typeHandlers(types || {})
- return {
- serializers: Object.assign({}, serializers, user.serializers),
- parsers: Object.assign({}, parsers, user.parsers)
- }
-}
-
-function typeHandlers(types) {
- return Object.keys(types).reduce((acc, k) => {
- types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse)
- acc.serializers[types[k].to] = types[k].serialize
- return acc
- }, { parsers: {}, serializers: {} })
-}
-
-module.exports.escape = function escape(str) {
- return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"'
-}
-
-const type = {
- number: 0,
- bigint: 20,
- boolean: 16
-}
-
-module.exports.inferType = function inferType(x) {
- return (x && x.type) || (x instanceof Date
- ? 1184
- : Array.isArray(x)
- ? inferType(x[0])
- : x instanceof Buffer
- ? 17
- : type[typeof x] || 0)
-}
-
-const escapeBackslash = /\\/g
-const escapeQuote = /"/g
-
-function arrayEscape(x) {
- return x
- .replace(escapeBackslash, '\\\\')
- .replace(escapeQuote, '\\"')
-}
-
-module.exports.arraySerializer = function arraySerializer(xs, serializer) {
- if (!xs.length)
- return '{}'
-
- const first = xs[0]
-
- if (Array.isArray(first) && !first.type)
- return '{' + xs.map(x => arraySerializer(x, serializer)).join(',') + '}'
-
- return '{' + xs.map(x =>
- '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"'
- ).join(',') + '}'
-}
-
-const arrayParserState = {
- i: 0,
- char: null,
- str: '',
- quoted: false,
- last: 0
-}
-
-module.exports.arrayParser = function arrayParser(x, parser) {
- arrayParserState.i = arrayParserState.last = 0
- return arrayParserLoop(arrayParserState, x, parser)
-}
-
-function arrayParserLoop(s, x, parser) {
- const xs = []
- for (; s.i < x.length; s.i++) {
- s.char = x[s.i]
- if (s.quoted) {
- if (s.char === '\\') {
- s.str += x[++s.i]
- } else if (s.char === '"') {
- xs.push(parser ? parser(s.str) : s.str)
- s.str = ''
- s.quoted = x[s.i + 1] === '"'
- s.last = s.i + 2
- } else {
- s.str += s.char
- }
- } else if (s.char === '"') {
- s.quoted = true
- } else if (s.char === '{') {
- s.last = ++s.i
- xs.push(arrayParserLoop(s, x, parser))
- } else if (s.char === '}') {
- s.quoted = false
- s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
- s.last = s.i + 1
- break
- } else if (s.char === ',' && s.p !== '}' && s.p !== '"') {
- xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
- s.last = s.i + 1
- }
- s.p = s.char
- }
- s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1))
- return xs
-}
-
-module.exports.toCamel = x => {
- let str = x[0]
- for (let i = 1; i < x.length; i++)
- str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
- return str
-}
-
-module.exports.toPascal = x => {
- let str = x[0].toUpperCase()
- for (let i = 1; i < x.length; i++)
- str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
- return str
-}
-
-module.exports.toKebab = x => x.replace(/_/g, '-')
-
-module.exports.fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase()
-module.exports.fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase()
-module.exports.fromKebab = x => x.replace(/-/g, '_')
-
-module.exports.errorFields = entries({
- S: 'severity_local',
- V: 'severity',
- C: 'code',
- M: 'message',
- D: 'detail',
- H: 'hint',
- P: 'position',
- p: 'internal_position',
- q: 'internal_query',
- W: 'where',
- s: 'schema_name',
- t: 'table_name',
- c: 'column_name',
- d: 'data type_name',
- n: 'constraint_name',
- F: 'file',
- L: 'line',
- R: 'routine'
-}).reduce(char, {})
-
-module.exports.retryRoutines = {
- FetchPreparedStatement: true,
- RevalidateCachedQuery: true,
- transformAssignedExpr: true
-}
diff --git a/package.json b/package.json
index 4bcbef2f..2d323201 100644
--- a/package.json
+++ b/package.json
@@ -1,23 +1,43 @@
{
"name": "postgres",
- "version": "2.0.0-beta.11",
+ "version": "3.0.0-rc.2",
"description": "Fastest full featured PostgreSQL client for Node.js",
- "main": "lib/index.js",
+ "type": "module",
+ "module": "src/index.js",
+ "main": "cjs/src/index.js",
+ "exports": {
+ "import": "./src/index.js",
+ "default": "./cjs/src/index.js"
+ },
"types": "types/index.d.ts",
"typings": "types/index.d.ts",
- "type": "commonjs",
"scripts": {
- "test": "node tests/index.js",
- "lint": "eslint lib && eslint tests",
- "prepublishOnly": "npm run lint && npm test"
+ "build": "npm run build:cjs && npm run build:deno",
+ "build:cjs": "node transpile.cjs",
+ "build:deno": "node transpile.deno.js",
+ "test": "npm run test:esm && npm run test:cjs && npm run test:deno",
+ "test:esm": "node tests/index.js",
+ "test:cjs": "npm run build:cjs && pushd cjs/tests && node index.js && popd",
+ "test:deno": "npm run build:deno && pushd deno/tests && deno run --unstable --allow-all --unsafely-ignore-certificate-errors index.js && popd",
+ "lint": "eslint src && eslint tests",
+ "prepare": "npm run build",
+ "prepublishOnly": "npm run lint"
},
"files": [
- "/lib",
+ "/cjs/src",
+ "/cjs/package.json",
+ "/src",
"/types"
],
- "author": "Rasmus Porsager ",
+ "author": "Rasmus Porsager (https://www.porsager.com)",
+ "funding": {
+ "type": "individual",
+ "url": "https://github.com/sponsors/porsager"
+ },
"license": "Unlicense",
"repository": "porsager/postgres",
+ "homepage": "https://github.com/porsager/postgres",
+ "bugs": "https://github.com/porsager/postgres/issues",
"keywords": [
"driver",
"postgresql",
diff --git a/src/bytes.js b/src/bytes.js
new file mode 100644
index 00000000..6effd6e6
--- /dev/null
+++ b/src/bytes.js
@@ -0,0 +1,78 @@
+const size = 256
+let buffer = Buffer.allocUnsafe(size)
+
+const messages = 'BCcDdEFfHPpQSX'.split('').reduce((acc, x) => {
+ const v = x.charCodeAt(0)
+ acc[x] = () => {
+ buffer[0] = v
+ b.i = 5
+ return b
+ }
+ return acc
+}, {})
+
+const b = Object.assign(reset, messages, {
+ N: String.fromCharCode(0),
+ i: 0,
+ inc(x) {
+ b.i += x
+ return b
+ },
+ str(x) {
+ const length = Buffer.byteLength(x)
+ fit(length)
+ b.i += buffer.write(x, b.i, length, 'utf8')
+ return b
+ },
+ i16(x) {
+ fit(2)
+ buffer.writeUInt16BE(x, b.i)
+ b.i += 2
+ return b
+ },
+ i32(x, i) {
+ if (i || i === 0) {
+ buffer.writeUInt32BE(x, i)
+ return b
+ }
+ fit(4)
+ buffer.writeUInt32BE(x, b.i)
+ b.i += 4
+ return b
+ },
+ z(x) {
+ fit(x)
+ buffer.fill(0, b.i, b.i + x)
+ b.i += x
+ return b
+ },
+ raw(x) {
+ buffer = Buffer.concat([buffer.slice(0, b.i), x])
+ b.i = buffer.length
+ return b
+ },
+ end(at = 1) {
+ buffer.writeUInt32BE(b.i - at, at)
+ const out = buffer.slice(0, b.i)
+ b.i = 0
+ buffer = Buffer.allocUnsafe(size)
+ return out
+ }
+})
+
+export default b
+
+function fit(x) {
+ if (buffer.length - b.i < x) {
+ const prev = buffer
+ , length = prev.length
+
+ buffer = Buffer.allocUnsafe(length + (length >> 1) + x)
+ prev.copy(buffer)
+ }
+}
+
+function reset() {
+ b.i = 0
+ return b
+}
diff --git a/src/connection.js b/src/connection.js
new file mode 100644
index 00000000..c6dcc2e9
--- /dev/null
+++ b/src/connection.js
@@ -0,0 +1,1000 @@
+import net from 'net'
+import tls from 'tls'
+import crypto from 'crypto'
+import Stream from 'stream'
+
+import { Identifier, Builder, handleValue, arrayParser, arraySerializer } from './types.js'
+import { Errors } from './errors.js'
+import Result from './result.js'
+import Queue from './queue.js'
+import { Query, CLOSE } from './query.js'
+import b from './bytes.js'
+
+export default Connection
+
+let uid = 1
+
+const Sync = b().S().end()
+ , Flush = b().H().end()
+ , SSLRequest = b().i32(8).i32(80877103).end(8)
+ , ExecuteUnnamed = Buffer.concat([b().E().str(b.N).i32(0).end(), Sync])
+ , DescribeUnnamed = b().D().str('S').str(b.N).end()
+ , noop = () => { /* noop */ }
+
+const retryRoutines = new Set([
+ 'FetchPreparedStatement',
+ 'RevalidateCachedQuery',
+ 'transformAssignedExpr'
+])
+
+const errorFields = {
+ 83 : 'severity_local', // S
+ 86 : 'severity', // V
+ 67 : 'code', // C
+ 77 : 'message', // M
+ 68 : 'detail', // D
+ 72 : 'hint', // H
+ 80 : 'position', // P
+ 112 : 'internal_position', // p
+ 113 : 'internal_query', // q
+ 87 : 'where', // W
+ 115 : 'schema_name', // s
+ 116 : 'table_name', // t
+ 99 : 'column_name', // c
+ 100 : 'data type_name', // d
+ 110 : 'constraint_name', // n
+ 70 : 'file', // F
+ 76 : 'line', // L
+ 82 : 'routine' // R
+}
+
+function Connection(options, { onopen = noop, onend = noop, ondrain = noop, onclose = noop } = {}) {
+ const {
+ ssl,
+ max,
+ user,
+ host,
+ port,
+ database,
+ parsers,
+ transform,
+ onnotice,
+ onnotify,
+ onparameter,
+ max_pipeline,
+ keep_alive,
+ backoff,
+ target_session_attrs
+ } = options
+
+ const sent = Queue()
+ , id = uid++
+ , backend = { pid: null, secret: null }
+ , idleTimer = timer(end, options.idle_timeout)
+ , lifeTimer = timer(end, options.max_lifetime)
+ , connectTimer = timer(connectTimedOut, options.connect_timeout)
+
+ let socket = createSocket()
+ , result = new Result()
+ , incoming = Buffer.alloc(0)
+ , needsTypes = options.fetch_types
+ , backendParameters = {}
+ , statements = {}
+ , state = 'closed'
+ , statementId = Math.random().toString(36).slice(2)
+ , statementCount = 1
+ , closedDate = 0
+ , remaining = 0
+ , hostIndex = 0
+ , retries = 0
+ , length = 0
+ , delay = 0
+ , rows = 0
+ , serverSignature = null
+ , nextWriteTimer = null
+ , terminated = false
+ , incomings = null
+ , results = null
+ , initial = null
+ , ending = null
+ , stream = null
+ , chunk = null
+ , ended = null
+ , nonce = null
+ , query = null
+ , final = null
+
+ const connection = {
+ get state() { return state },
+ set state(x) {
+ state = x
+ state === 'open'
+ ? idleTimer.start()
+ : idleTimer.cancel()
+ },
+ connect(query) {
+ initial = query
+ reconnect()
+ },
+ terminate,
+ execute,
+ cancel,
+ end,
+ count: 0,
+ id
+ }
+
+ return connection
+
+ function createSocket() {
+ const x = net.Socket()
+ x.on('error', error)
+ x.on('close', closed)
+ x.on('drain', drain)
+ return x
+ }
+
+ function cancel({ pid, secret }, resolve, reject) {
+ socket.removeAllListeners()
+ socket = net.Socket()
+ socket.on('connect', () => socket.write(b().i32(16).i32(80877102).i32(pid).i32(secret).end(16)))
+ socket.once('error', reject)
+ socket.once('close', resolve)
+ connect()
+ }
+
+ function execute(q) {
+ if (terminated)
+ return q.reject(Errors.connection('CONNECTION_DESTROYED', options))
+
+ if (q.cancelled)
+ return
+
+ try {
+ q.state = backend
+ query
+ ? sent.push(q)
+ : (query = q, query.active = true)
+
+ build(q)
+ return write(toBuffer(q))
+ && !q.describeFirst
+ && sent.length < max_pipeline
+ && (!q.options.onexecute || q.options.onexecute(connection))
+ } catch (error) {
+ sent.length === 0 && write(Sync)
+ errored(error)
+ return true
+ }
+ }
+
+ function toBuffer(q) {
+ if (q.parameters.length >= 65534)
+ throw Errors.generic('MAX_PARAMETERS_EXCEEDED', 'Max number of parameters (65534) exceeded')
+
+ return q.options.simple
+ ? b().Q().str(q.strings[0] + b.N).end()
+ : q.describeFirst
+ ? Buffer.concat([describe(q), Flush])
+ : q.prepare
+ ? q.prepared
+ ? prepared(q)
+ : Buffer.concat([describe(q), prepared(q)])
+ : unnamed(q)
+ }
+
+ function describe(q) {
+ return Buffer.concat([
+ Parse(q.statement.string, q.parameters, q.statement.types, q.statement.name),
+ Describe('S', q.statement.name)
+ ])
+ }
+
+ function prepared(q) {
+ return Buffer.concat([
+ Bind(q.parameters, q.statement.types, q.statement.name, q.cursorName),
+ q.cursorFn
+ ? Execute('', q.cursorRows)
+ : ExecuteUnnamed
+ ])
+ }
+
+ function unnamed(q) {
+ return Buffer.concat([
+ Parse(q.statement.string, q.parameters, q.statement.types),
+ DescribeUnnamed,
+ prepared(q)
+ ])
+ }
+
+ function build(q) {
+ const parameters = []
+ , types = []
+
+ const string = stringify(q, q.strings[0], q.args[0], parameters, types)
+
+ !q.tagged && q.args.forEach(x => handleValue(x, parameters, types))
+
+ q.prepare = options.prepare && ('prepare' in q.options ? q.options.prepare : true)
+ q.string = string
+ q.signature = q.prepare && types + string
+ q.onlyDescribe && (delete statements[q.signature])
+ q.parameters = q.parameters || parameters
+ q.prepared = q.prepare && q.signature in statements
+ q.describeFirst = q.onlyDescribe || (parameters.length && !q.prepared)
+ q.statement = q.prepared
+ ? statements[q.signature]
+ : { string, types, name: q.prepare ? statementId + statementCount++ : '' }
+
+ typeof options.debug === 'function' && options.debug(id, string, parameters, types)
+ }
+
+ function stringify(q, string, value, parameters, types) {
+ for (let i = 1; i < q.strings.length; i++) {
+ string += (
+ value instanceof Query ? fragment(string, value, parameters, types) :
+ value instanceof Identifier ? value.value :
+ value instanceof Builder ? value.build(string, parameters, types, options.transform) :
+ handleValue(value, parameters, types)
+ ) + q.strings[i]
+ value = q.args[i]
+ }
+
+ return string
+ }
+
+ function fragment(string, q, parameters, types) {
+ q.fragment = true
+ return stringify(q, q.strings[0], q.args[0], parameters, types)
+ }
+
+ function write(x, fn) {
+ chunk = chunk ? Buffer.concat([chunk, x]) : Buffer.from(x)
+ if (fn || chunk.length >= 1024)
+ return nextWrite(fn)
+ nextWriteTimer === null && (nextWriteTimer = setImmediate(nextWrite))
+ return true
+ }
+
+ function nextWrite(fn) {
+ const x = socket.write(chunk, fn)
+ nextWriteTimer !== null && clearImmediate(nextWriteTimer)
+ chunk = nextWriteTimer = null
+ return x
+ }
+
+ function connectTimedOut() {
+ errored(Errors.connection('CONNECT_TIMEOUT', options, socket))
+ socket.destroy()
+ }
+
+ async function secure() {
+ write(SSLRequest)
+ const canSSL = await new Promise(r => socket.once('data', x => r(x[0] === 83))) // S
+
+ if (!canSSL && ssl === 'prefer')
+ return connected()
+
+ socket.removeAllListeners()
+ socket = tls.connect({
+ socket,
+ ...(ssl === 'require' || ssl === 'allow' || ssl === 'prefer'
+ ? { rejectUnauthorized: false }
+ : ssl
+ )
+ })
+ socket.on('secureConnect', connected)
+ socket.on('error', error)
+ socket.on('close', closed)
+ socket.on('drain', drain)
+ }
+
+ /* c8 ignore next 3 */
+ function drain() {
+ ondrain(connection)
+ }
+
+ function data(x) {
+ if (incomings) {
+ incomings.push(x)
+ remaining -= x.length
+ if (remaining >= 0)
+ return
+ }
+
+ incoming = incomings
+ ? Buffer.concat(incomings, length - remaining)
+ : incoming.length === 0
+ ? x
+ : Buffer.concat([incoming, x], incoming.length + x.length)
+
+ while (incoming.length > 4) {
+ length = incoming.readUInt32BE(1)
+ if (length >= incoming.length) {
+ remaining = length - incoming.length
+ incomings = [incoming]
+ break
+ }
+
+ try {
+ handle(incoming.slice(0, length + 1))
+ } catch (e) {
+ query && (query.cursorFn || query.describeFirst) && write(Sync)
+ errored(e)
+ }
+ incoming = incoming.slice(length + 1)
+ remaining = 0
+ incomings = null
+ }
+ }
+
+ function connect() {
+ terminated = false
+ backendParameters = {}
+ connectTimer.start()
+ socket.on('connect', ssl ? secure : connected)
+
+ if (options.path)
+ return socket.connect(options.path)
+
+ socket.connect(port[hostIndex], host[hostIndex])
+ hostIndex = (hostIndex + 1) % port.length
+ }
+
+ function reconnect() {
+ setTimeout(connect, closedDate ? closedDate + delay - Date.now() : 0)
+ }
+
+ function connected() {
+ try {
+ statements = {}
+ needsTypes = options.fetch_types
+ statementId = Math.random().toString(36).slice(2)
+ statementCount = 1
+ lifeTimer.start()
+ socket.on('data', data)
+ socket.setKeepAlive(true, 1000 * keep_alive)
+ const s = StartupMessage()
+ write(s)
+ } catch (err) {
+ error(err)
+ }
+ }
+
+ function error(err) {
+ if (connection.state === 'connecting' && options.host[retries + 1])
+ return
+
+ errored(err)
+ while (sent.length)
+ queryError(sent.shift(), err)
+ }
+
+ function errored(err) {
+ stream && (stream.destroy(err), stream = null)
+ query && queryError(query, err)
+ initial && (queryError(initial, err), initial = null)
+ }
+
+ function queryError(query, err) {
+ query.reject(Object.create(err, {
+ stack: { value: err.stack + query.origin.replace(/.*\n/, '\n'), enumerable: options.debug },
+ query: { value: query.string, enumerable: options.debug },
+ parameters: { value: query.parameters, enumerable: options.debug },
+ args: { value: query.args, enumerable: options.debug },
+ types: { value: query.statement && query.statement.types, enumerable: options.debug }
+ }))
+ }
+
+ function end() {
+ return ending || (
+ !connection.reserved && onend(connection),
+ !connection.reserved && !initial && !query && sent.length === 0
+ ? Promise.resolve(terminate())
+ : ending = new Promise(r => ended = r)
+ )
+ }
+
+ function terminate() {
+ terminated = true
+ if (stream || query || initial || sent.length)
+ error(Errors.connection('CONNECTION_DESTROYED', options))
+
+ clearImmediate(nextWriteTimer)
+ socket.removeListener('data', data)
+ socket.removeListener('connect', connected)
+ socket.readyState !== 'closed' && socket.end(b().X().end())
+ ended && (ended(), ending = ended = null)
+ }
+
+ function closed(hadError) {
+ incoming = Buffer.alloc(0)
+ remaining = 0
+ incomings = null
+ clearImmediate(nextWriteTimer)
+ socket.removeListener('data', data)
+ socket.removeListener('connect', connected)
+ idleTimer.cancel()
+ lifeTimer.cancel()
+ connectTimer.cancel()
+
+ if (socket.encrypted) {
+ socket.removeAllListeners()
+ socket = createSocket()
+ }
+
+ if (initial)
+ return reconnect()
+
+ !hadError && (query || sent.length) && error(Errors.connection('CONNECTION_CLOSED', options, socket))
+ closedDate = Date.now()
+ hadError && options.shared.retries++
+ delay = (typeof backoff === 'function' ? backoff(options.shared.retries) : backoff) * 1000
+ onclose(connection)
+ }
+
+ /* Handlers */
+ function handle(xs, x = xs[0]) {
+ (
+ x === 68 ? DataRow : // D
+ x === 100 ? CopyData : // d
+ x === 65 ? NotificationResponse : // A
+ x === 83 ? ParameterStatus : // S
+ x === 90 ? ReadyForQuery : // Z
+ x === 67 ? CommandComplete : // C
+ x === 50 ? BindComplete : // 2
+ x === 49 ? ParseComplete : // 1
+ x === 116 ? ParameterDescription : // t
+ x === 84 ? RowDescription : // T
+ x === 82 ? Authentication : // R
+ x === 110 ? NoData : // n
+ x === 75 ? BackendKeyData : // K
+ x === 69 ? ErrorResponse : // E
+ x === 115 ? PortalSuspended : // s
+ x === 51 ? CloseComplete : // 3
+ x === 71 ? CopyInResponse : // G
+ x === 78 ? NoticeResponse : // N
+ x === 72 ? CopyOutResponse : // H
+ x === 99 ? CopyDone : // c
+ x === 73 ? EmptyQueryResponse : // I
+ x === 86 ? FunctionCallResponse : // V
+ x === 118 ? NegotiateProtocolVersion : // v
+ x === 87 ? CopyBothResponse : // W
+ /* c8 ignore next */
+ UnknownMessage
+ )(xs)
+ }
+
+ function DataRow(x) {
+ let index = 7
+ let length
+ let column
+ let value
+
+ const row = query.isRaw ? new Array(query.statement.columns.length) : {}
+ for (let i = 0; i < query.statement.columns.length; i++) {
+ column = query.statement.columns[i]
+ length = x.readInt32BE(index)
+ index += 4
+
+ value = length === -1
+ ? null
+ : query.isRaw
+ ? x.slice(index, index += length)
+ : column.parser === undefined
+ ? x.toString('utf8', index, index += length)
+ : column.parser.array === true
+ ? column.parser(x.toString('utf8', index + 1, index += length))
+ : column.parser(x.toString('utf8', index, index += length))
+
+ query.isRaw
+ ? (row[i] = value)
+ : (row[column.name] = transform.value.from ? transform.value.from(value) : value)
+ }
+
+ query.forEachFn
+ ? query.forEachFn(transform.row.from ? transform.row.from(row) : row, result)
+ : (result[rows++] = transform.row.from ? transform.row.from(row) : row)
+ }
+
+ function ParameterStatus(x) {
+ const [k, v] = x.toString('utf8', 5, x.length - 1).split(b.N)
+ backendParameters[k] = v
+ if (options.parameters[k] !== v) {
+ options.parameters[k] = v
+ onparameter && onparameter(k, v)
+ }
+ }
+
+ function ReadyForQuery(x) {
+ query && query.options.simple && query.resolve(results || result)
+ query = results = null
+ result = new Result()
+ connectTimer.cancel()
+
+ if (initial) {
+ if (target_session_attrs) {
+ if (!backendParameters.in_hot_standby || !backendParameters.default_transaction_read_only)
+ return fetchState()
+ else if (tryNext(target_session_attrs, backendParameters))
+ return terminate()
+ }
+
+ if (needsTypes)
+ return fetchArrayTypes()
+
+ execute(initial)
+ options.shared.retries = retries = initial = 0
+ return
+ }
+
+ while (sent.length && (query = sent.shift()) && (query.active = true) && query.cancelled)
+ Connection(options, {}).cancel(query.state, query.cancelled.resolve, query.cancelled.reject)
+
+ if (query)
+ return // Consider opening if able and sent.length < 50
+
+ connection.reserved
+ ? x[5] === 73 // I
+ ? ending
+ ? terminate()
+ : (connection.reserved = null, onopen(connection))
+ : connection.reserved()
+ : ending
+ ? terminate()
+ : onopen(connection)
+ }
+
+ function CommandComplete(x) {
+ rows = 0
+
+ for (let i = x.length - 1; i > 0; i--) {
+ if (x[i] === 32 && x[i + 1] < 58 && result.count === null)
+ result.count = +x.toString('utf8', i + 1, x.length - 1)
+ if (x[i - 1] >= 65) {
+ result.command = x.toString('utf8', 5, i)
+ result.state = backend
+ break
+ }
+ }
+
+ final && (final(), final = null)
+
+ if (result.command === 'BEGIN' && max !== 1 && !connection.reserved)
+ return errored(Errors.generic('UNSAFE_TRANSACTION', 'Only use sql.begin or max: 1'))
+
+ if (query.options.simple)
+ return
+
+ if (query.cursorFn) {
+ result.count && query.cursorFn(result)
+ write(Sync)
+ }
+
+ query.resolve(result)
+ }
+
+ function ParseComplete() {
+ query.parsing = false
+ }
+
+ function BindComplete() {
+ !result.statement && (result.statement = query.statement)
+ result.columns = query.statement.columns
+ }
+
+ function ParameterDescription(x) {
+ const length = x.readUInt16BE(5)
+
+ for (let i = 0; i < length; ++i)
+ !query.statement.types[i] && (query.statement.types[i] = x.readUInt32BE(7 + i * 4))
+
+ query.prepare && (statements[query.signature] = query.statement)
+ query.describeFirst && !query.onlyDescribe && (write(prepared(query)), query.describeFirst = false)
+ }
+
+ function RowDescription(x) {
+ if (result.command) {
+ results = results || [result]
+ results.push(result = new Result())
+ result.count = null
+ query.statement.columns = null
+ }
+
+ const length = x.readUInt16BE(5)
+ let index = 7
+ let start
+
+ query.statement.columns = Array(length)
+
+ for (let i = 0; i < length; ++i) {
+ start = index
+ while (x[index++] !== 0);
+ const type = x.readUInt32BE(index + 6)
+ query.statement.columns[i] = {
+ name: transform.column.from
+ ? transform.column.from(x.toString('utf8', start, index - 1))
+ : x.toString('utf8', start, index - 1),
+ parser: parsers[type],
+ type
+ }
+ index += 18
+ }
+
+ result.statement = query.statement
+ if (query.onlyDescribe)
+ return (query.resolve(query.statement), write(Sync))
+ }
+
+ async function Authentication(x, type = x.readUInt32BE(5)) {
+ (
+ type === 3 ? AuthenticationCleartextPassword :
+ type === 5 ? AuthenticationMD5Password :
+ type === 10 ? SASL :
+ type === 11 ? SASLContinue :
+ type === 12 ? SASLFinal :
+ type !== 0 ? UnknownAuth :
+ noop
+ )(x, type)
+ }
+
+ /* c8 ignore next 5 */
+ async function AuthenticationCleartextPassword() {
+ write(
+ b().p().str(await Pass()).z(1).end()
+ )
+ }
+
+ async function AuthenticationMD5Password(x) {
+ write(
+ b().p().str('md5' + md5(Buffer.concat([Buffer.from(md5((await Pass()) + user)), x.slice(9)]))).z(1).end()
+ )
+ }
+
+ function SASL() {
+ b().p().str('SCRAM-SHA-256' + b.N)
+ const i = b.i
+ nonce = crypto.randomBytes(18).toString('base64')
+ write(b.inc(4).str('n,,n=*,r=' + nonce).i32(b.i - i - 4, i).end())
+ }
+
+ async function SASLContinue(x) {
+ const res = x.toString('utf8', 9).split(',').reduce((acc, x) => (acc[x[0]] = x.slice(2), acc), {})
+
+ const saltedPassword = crypto.pbkdf2Sync(
+ await Pass(),
+ Buffer.from(res.s, 'base64'),
+ parseInt(res.i), 32,
+ 'sha256'
+ )
+
+ const clientKey = hmac(saltedPassword, 'Client Key')
+
+ const auth = 'n=*,r=' + nonce + ','
+ + 'r=' + res.r + ',s=' + res.s + ',i=' + res.i
+ + ',c=biws,r=' + res.r
+
+ serverSignature = hmac(hmac(saltedPassword, 'Server Key'), auth).toString('base64')
+
+ write(
+ b().p().str('c=biws,r=' + res.r + ',p=' + xor(clientKey, hmac(sha256(clientKey), auth)).toString('base64')).end()
+ )
+ }
+
+ function SASLFinal(x) {
+ if (x.toString('utf8', 9).split(b.N, 1)[0].slice(2) === serverSignature)
+ return
+ /* c8 ignore next 5 */
+ errored(Errors.generic('SASL_SIGNATURE_MISMATCH', 'The server did not return the correct signature'))
+ socket.destroy()
+ }
+
+ function Pass() {
+ return Promise.resolve(typeof options.pass === 'function'
+ ? options.pass()
+ : options.pass
+ )
+ }
+
+ function NoData() {
+ result.statement = query.statement
+ result.statement.columns = []
+ if (query.onlyDescribe)
+ return (query.resolve(query.statement), write(Sync))
+ }
+
+ function BackendKeyData(x) {
+ backend.pid = x.readUInt32BE(5)
+ backend.secret = x.readUInt32BE(9)
+ }
+
+ async function fetchArrayTypes() {
+ needsTypes = false
+ const types = await new Query([`
+ select b.oid, b.typarray
+ from pg_catalog.pg_type a
+ left join pg_catalog.pg_type b on b.oid = a.typelem
+ where a.typcategory = 'A'
+ group by b.oid, b.typarray
+ order by b.oid
+ `], [], execute)
+ types.forEach(({ oid, typarray }) => addArrayType(oid, typarray))
+ }
+
+ function addArrayType(oid, typarray) {
+ const parser = options.parsers[oid]
+ options.shared.typeArrayMap[oid] = typarray
+ options.parsers[typarray] = (xs) => arrayParser(xs, parser)
+ options.parsers[typarray].array = true
+ options.serializers[typarray] = (xs) => arraySerializer(xs, options.serializers[oid])
+ }
+
+ function tryNext(x, xs) {
+ return (
+ (x === 'read-write' && xs.default_transaction_read_only === 'on') ||
+ (x === 'read-only' && xs.default_transaction_read_only === 'off') ||
+ (x === 'primary' && xs.in_hot_standby === 'off') ||
+ (x === 'standby' && xs.in_hot_standby === 'on') ||
+ (x === 'prefer-standby' && xs.in_hot_standby === 'off' && options.host[retries])
+ )
+ }
+
+ function fetchState() {
+ const query = new Query([`
+ show transaction_read_only;
+ select pg_catalog.pg_is_in_recovery()
+ `], [], execute, null, { simple: true })
+ query.resolve = ([[a], [b]]) => {
+ backendParameters.default_transaction_read_only = a.transaction_read_only
+ backendParameters.in_hot_standby = b.pg_is_in_recovery ? 'on' : 'off'
+ }
+ query.execute()
+ }
+
+ function ErrorResponse(x) {
+ query && (query.cursorFn || query.describeFirst) && write(Sync)
+ const error = Errors.postgres(parseError(x))
+ query && query.retried
+ ? errored(query.retried)
+ : query && retryRoutines.has(error.routine)
+ ? retry(query, error)
+ : errored(error)
+ }
+
+ function retry(q, error) {
+ delete statements[q.signature]
+ q.retried = error
+ execute(q)
+ }
+
+ function NotificationResponse(x) {
+ if (!onnotify)
+ return
+
+ let index = 9
+ while (x[index++] !== 0);
+ onnotify(
+ x.toString('utf8', 9, index - 1),
+ x.toString('utf8', index, x.length - 1)
+ )
+ }
+
+ async function PortalSuspended() {
+ try {
+ const x = await Promise.resolve(query.cursorFn(result))
+ rows = 0
+ x === CLOSE
+ ? write(Close(query.portal))
+ : (result = new Result(), write(Execute('', query.cursorRows)))
+ } catch (err) {
+ write(Sync)
+ query.reject(err)
+ }
+ }
+
+ function CloseComplete() {
+ result.count && query.cursorFn(result)
+ query.resolve(result)
+ }
+
+ function CopyInResponse() {
+ stream = new Stream.Writable({
+ write(chunk, encoding, callback) {
+ socket.write(b().d().raw(chunk).end(), callback)
+ },
+ destroy(error, callback) {
+ callback(error)
+ socket.write(b().f().str(error + b.N).end())
+ },
+ final(callback) {
+ socket.write(b().c().end())
+ final = callback
+ }
+ })
+ query.resolve(stream)
+ }
+
+ function CopyOutResponse() {
+ stream = new Stream.Readable({
+ read() { socket.resume() }
+ })
+ query.resolve(stream)
+ }
+
+ /* c8 ignore next 3 */
+ function CopyBothResponse() {
+ stream = new Stream.Duplex({
+ read() { socket.resume() },
+ /* c8 ignore next 11 */
+ write(chunk, encoding, callback) {
+ socket.write(b().d().raw(chunk).end(), callback)
+ },
+ destroy(error, callback) {
+ callback(error)
+ socket.write(b().f().str(error + b.N).end())
+ },
+ final(callback) {
+ socket.write(b().c().end())
+ final = callback
+ }
+ })
+ query.resolve(stream)
+ }
+
+ function CopyData(x) {
+ stream.push(x.slice(5)) || socket.pause()
+ }
+
+ function CopyDone() {
+ stream.push(null)
+ stream = null
+ }
+
+ function NoticeResponse(x) {
+ onnotice
+ ? onnotice(parseError(x))
+ : console.log(parseError(x)) // eslint-disable-line
+
+ }
+
+ /* c8 ignore next 3 */
+ function EmptyQueryResponse() {
+ /* noop */
+ }
+
+ /* c8 ignore next 3 */
+ function FunctionCallResponse() {
+ errored(Errors.notSupported('FunctionCallResponse'))
+ }
+
+ /* c8 ignore next 3 */
+ function NegotiateProtocolVersion() {
+ errored(Errors.notSupported('NegotiateProtocolVersion'))
+ }
+
+ /* c8 ignore next 3 */
+ function UnknownMessage(x) {
+ console.error('Postgres.js : Unknown Message:', x[0]) // eslint-disable-line
+ }
+
+ /* c8 ignore next 3 */
+ function UnknownAuth(x, type) {
+ console.error('Postgres.js : Unknown Auth:', type) // eslint-disable-line
+ }
+
+ /* Messages */
+ function Bind(parameters, types, statement = '', portal = '') {
+ let prev
+ , type
+
+ b().B().str(portal + b.N).str(statement + b.N).i16(0).i16(parameters.length)
+
+ parameters.forEach((x, i) => {
+ if (x === null)
+ return b.i32(0xFFFFFFFF)
+
+ type = types[i]
+ parameters[i] = x = type in options.serializers
+ ? options.serializers[type](x)
+ : '' + x
+
+ prev = b.i
+ b.inc(4).str(x).i32(b.i - prev - 4, prev)
+ })
+
+ b.i16(0)
+
+ return b.end()
+ }
+
+ function Parse(str, parameters, types, name = '') {
+ b().P().str(name + b.N).str(str + b.N).i16(parameters.length)
+ parameters.forEach((x, i) => b.i32(types[i] || 0))
+ return b.end()
+ }
+
+ function Describe(x, name = '') {
+ return b().D().str(x).str(name + b.N).end()
+ }
+
+ function Execute(portal = '', rows = 0) {
+ return Buffer.concat([
+ b().E().str(portal + b.N).i32(rows).end(),
+ Flush
+ ])
+ }
+
+ function Close(portal = '') {
+ return Buffer.concat([
+ b().C().str('P').str(portal + b.N).end(),
+ b().S().end()
+ ])
+ }
+
+ function StartupMessage() {
+ return b().inc(4).i16(3).z(2).str(
+ Object.entries(Object.assign({
+ user,
+ database,
+ client_encoding: '\'utf-8\''
+ },
+ options.connection
+ )).filter(([, v]) => v).map(([k, v]) => k + b.N + v).join(b.N)
+ ).z(2).end(0)
+ }
+
+}
+
+function parseError(x) {
+ const error = {}
+ let start = 5
+ for (let i = 5; i < x.length - 1; i++) {
+ if (x[i] === 0) {
+ error[errorFields[x[start]]] = x.toString('utf8', start + 1, i)
+ start = i + 1
+ }
+ }
+ return error
+}
+
+function md5(x) {
+ return crypto.createHash('md5').update(x).digest('hex')
+}
+
+function hmac(key, x) {
+ return crypto.createHmac('sha256', key).update(x).digest()
+}
+
+function sha256(x) {
+ return crypto.createHash('sha256').update(x).digest()
+}
+
+function xor(a, b) {
+ const length = Math.max(a.length, b.length)
+ const buffer = Buffer.allocUnsafe(length)
+ for (let i = 0; i < length; i++)
+ buffer[i] = a[i] ^ b[i]
+ return buffer
+}
+
+function timer(fn, seconds) {
+ seconds = typeof seconds === 'function' ? seconds() : seconds
+ if (!seconds)
+ return { cancel: noop, start: noop }
+
+ let timer
+ return {
+ cancel() {
+ timer && (clearTimeout(timer), timer = null)
+ },
+ start() {
+ timer && clearTimeout(timer)
+ timer = setTimeout(done, seconds * 1000, arguments).unref()
+ }
+ }
+
+ function done(args) {
+ fn.apply(null, args)
+ timer = null
+ }
+}
diff --git a/src/errors.js b/src/errors.js
new file mode 100644
index 00000000..0ff83c42
--- /dev/null
+++ b/src/errors.js
@@ -0,0 +1,53 @@
+export class PostgresError extends Error {
+ constructor(x) {
+ super(x.message)
+ this.name = this.constructor.name
+ Object.assign(this, x)
+ }
+}
+
+export const Errors = {
+ connection,
+ postgres,
+ generic,
+ notSupported
+}
+
+function connection(x, options, socket) {
+ const { host, port } = socket || options
+ const error = Object.assign(
+ new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))),
+ {
+ code: x,
+ errno: x,
+ address: options.path || host
+ }, options.path ? {} : { port: port }
+ )
+ Error.captureStackTrace(error, connection)
+ return error
+}
+
+function postgres(x) {
+ const error = new PostgresError(x)
+ Error.captureStackTrace(error, postgres)
+ return error
+}
+
+function generic(code, message) {
+ const error = Object.assign(new Error(code + ': ' + message), { code })
+ Error.captureStackTrace(error, generic)
+ return error
+}
+
+/* c8 ignore next 10 */
+function notSupported(x) {
+ const error = Object.assign(
+ new Error(x + ' (B) is not supported'),
+ {
+ code: 'MESSAGE_NOT_SUPPORTED',
+ name: x
+ }
+ )
+ Error.captureStackTrace(error, notSupported)
+ return error
+}
diff --git a/src/index.js b/src/index.js
new file mode 100644
index 00000000..691a2c97
--- /dev/null
+++ b/src/index.js
@@ -0,0 +1,537 @@
+import os from 'os'
+import fs from 'fs'
+import Stream from 'stream'
+
+import {
+ mergeUserTypes,
+ inferType,
+ Parameter,
+ Identifier,
+ Builder,
+ toPascal,
+ toCamel,
+ toKebab,
+ fromPascal,
+ fromCamel,
+ fromKebab
+} from './types.js'
+
+import Connection from './connection.js'
+import { Query, CLOSE } from './query.js'
+import Queue from './queue.js'
+import { Errors, PostgresError } from './errors.js'
+import Subscribe from './subscribe.js'
+
+Object.assign(Postgres, {
+ PostgresError,
+ toPascal,
+ toCamel,
+ toKebab,
+ fromPascal,
+ fromCamel,
+ fromKebab,
+ BigInt
+})
+
+export default Postgres
+
+function Postgres(a, b) {
+ const options = parseOptions(a, b)
+ , subscribe = Subscribe(Postgres, { ...options })
+
+ let ending = false
+
+ const queries = Queue()
+ , connections = [...Array(options.max)].map(() => Connection(options, { onopen, onend, ondrain, onclose }))
+ , closed = Queue(connections)
+ , reserved = Queue()
+ , open = Queue()
+ , busy = Queue()
+ , full = Queue()
+ , ended = Queue()
+ , connecting = Queue()
+ , queues = { closed, ended, connecting, reserved, open, busy, full }
+
+ const sql = Sql(handler)
+
+ Object.assign(sql, {
+ get parameters() { return options.parameters },
+ largeObject,
+ subscribe,
+ CLOSE,
+ END: CLOSE,
+ PostgresError,
+ options,
+ listen,
+ notify,
+ begin,
+ end
+ })
+
+ return sql
+
+ function Sql(handler, instant) {
+ handler.debug = options.debug
+
+ Object.entries(options.types).reduce((acc, [name, type]) => {
+ acc[name] = (x) => new Parameter(x, type.to)
+ return acc
+ }, typed)
+
+ Object.assign(sql, {
+ types: typed,
+ typed,
+ unsafe,
+ array,
+ json,
+ file
+ })
+
+ return sql
+
+ function typed(value, type) {
+ return new Parameter(value, type)
+ }
+
+ function sql(strings, ...args) {
+ const query = strings && Array.isArray(strings.raw)
+ ? new Query(strings, args, handler, cancel)
+ : typeof strings === 'string' && !args.length
+ ? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings)
+ : new Builder(strings, args)
+ instant && query instanceof Query && query.execute()
+ return query
+ }
+
+ function unsafe(string, args = [], options = {}) {
+ arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
+ const query = new Query([string], args, handler, cancel, {
+ prepare: false,
+ ...options,
+ simple: 'simple' in options ? options.simple : args.length === 0
+ })
+ instant && query.execute()
+ return query
+ }
+
+ function file(path, args = [], options = {}) {
+ arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
+ const query = new Query([], args, (query) => {
+ fs.readFile(path, 'utf8', (err, string) => {
+ if (err)
+ return query.reject(err)
+
+ query.strings = [string]
+ handler(query)
+ })
+ }, cancel, {
+ ...options,
+ simple: 'simple' in options ? options.simple : args.length === 0
+ })
+ instant && query.execute()
+ return query
+ }
+ }
+
+ async function listen(name, fn) {
+ const sql = listen.sql || (listen.sql = Postgres({
+ ...options,
+ max: 1,
+ idle_timeout: null,
+ max_lifetime: null,
+ fetch_types: false,
+ onclose() {
+ Object.entries(listen.channels).forEach(([channel, { listeners }]) => {
+ delete listen.channels[channel]
+ Promise.all(listeners.map(fn => listen(channel, fn).catch(() => { /* noop */ })))
+ })
+ },
+ onnotify(c, x) {
+ c in listen.channels && listen.channels[c].listeners.forEach(fn => fn(x))
+ }
+ }))
+
+ const channels = listen.channels || (listen.channels = {})
+ , exists = name in channels
+ , channel = exists ? channels[name] : (channels[name] = { listeners: [fn] })
+
+ if (exists) {
+ channel.listeners.push(fn)
+ return Promise.resolve({ ...channel.result, unlisten })
+ }
+
+ channel.result = await sql`listen ${ sql(name) }`
+ channel.result.unlisten = unlisten
+
+ return channel.result
+
+ async function unlisten() {
+ if (name in channels === false)
+ return
+
+ channel.listeners = channel.listeners.filter(x => x !== fn)
+ if (channels[name].listeners.length)
+ return
+
+ delete channels[name]
+ return sql`unlisten ${ sql(name) }`
+ }
+ }
+
+ async function notify(channel, payload) {
+ return await sql`select pg_notify(${ channel }, ${ '' + payload })`
+ }
+
+ async function begin(options, fn) {
+ !fn && (fn = options, options = '')
+ const queries = Queue()
+ let savepoints = 0
+ , connection
+
+ try {
+ await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute })
+ return await scope(connection, fn)
+ } catch (error) {
+ throw error
+ }
+
+ async function scope(c, fn, name) {
+ const sql = Sql(handler, true)
+ sql.savepoint = savepoint
+ let errored
+ name && await sql`savepoint ${ sql(name) }`
+ try {
+ const result = await new Promise((resolve, reject) => {
+ errored = reject
+ const x = fn(sql)
+ Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject)
+ })
+ !name && await sql`commit`
+ return result
+ } catch (e) {
+ await (name
+ ? sql`rollback to ${ sql(name) }`
+ : sql`rollback`
+ )
+ throw e
+ }
+
+ function savepoint(name, fn) {
+ if (name && Array.isArray(name.raw))
+ return savepoint(sql => sql.apply(sql, arguments))
+
+ arguments.length === 1 && (fn = name, name = null)
+ return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : ''))
+ }
+
+ function handler(q) {
+ errored && q.catch(errored)
+ c.state === 'full'
+ ? queries.push(q)
+ : c.execute(q) || (c.state = 'full', full.push(c))
+ }
+ }
+
+ function onexecute(c) {
+ queues[c.state].remove(c)
+ c.state = 'reserved'
+ c.reserved = () => queries.length
+ ? c.execute(queries.shift())
+ : c.state = 'reserved'
+ reserved.push(c)
+ connection = c
+ }
+ }
+
+ function largeObject(oid, mode = 0x00020000 | 0x00040000) {
+ return new Promise(async(resolve, reject) => {
+ await sql.begin(async sql => {
+ let finish
+ !oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`)
+ const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd`
+
+ const lo = {
+ writable,
+ readable,
+ close : () => sql`select lo_close(${ fd })`.then(finish),
+ tell : () => sql`select lo_tell64(${ fd })`,
+ read : (x) => sql`select loread(${ fd }, ${ x }) as data`,
+ write : (x) => sql`select lowrite(${ fd }, ${ x })`,
+ truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`,
+ seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`,
+ size : () => sql`
+ select
+ lo_lseek64(${ fd }, location, 0) as position,
+ seek.size
+ from (
+ select
+ lo_lseek64($1, 0, 2) as size,
+ tell.location
+ from (select lo_tell64($1) as location) tell
+ ) seek
+ `
+ }
+
+ resolve(lo)
+
+ return new Promise(async r => finish = r)
+
+ async function readable({
+ highWaterMark = 2048 * 8,
+ start = 0,
+ end = Infinity
+ } = {}) {
+ let max = end - start
+ start && await lo.seek(start)
+ return new Stream.Readable({
+ highWaterMark,
+ async read(size) {
+ const l = size > max ? size - max : size
+ max -= size
+ const [{ data }] = await lo.read(l)
+ this.push(data)
+ if (data.length < size)
+ this.push(null)
+ }
+ })
+ }
+
+ async function writable({
+ highWaterMark = 2048 * 8,
+ start = 0
+ } = {}) {
+ start && await lo.seek(start)
+ return new Stream.Writable({
+ highWaterMark,
+ write(chunk, encoding, callback) {
+ lo.write(chunk).then(() => callback(), callback)
+ }
+ })
+ }
+ }).catch(reject)
+ })
+ }
+
+ function json(x) {
+ return new Parameter(x, 3802)
+ }
+
+ function array(x, type) {
+ if (!Array.isArray(x))
+ return array(Array.from(arguments))
+
+ return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap)
+ }
+
+ function handler(query) {
+ if (ending)
+ return query.reject(Errors.connection('CONNECTION_ENDED', options, options))
+
+ if (open.length)
+ return go(open, query)
+
+ if (closed.length)
+ return connect(closed.shift(), query)
+
+ busy.length
+ ? go(busy, query)
+ : queries.push(query)
+ }
+
+ function go(xs, query) {
+ const c = xs.shift()
+ return c.execute(query)
+ ? (c.state = 'busy', busy.push(c))
+ : (c.state = 'full', full.push(c))
+ }
+
+ function cancel(query) {
+ return new Promise((resolve, reject) => {
+ query.state
+ ? query.active
+ ? Connection(options, {}).cancel(query.state, resolve, reject)
+ : query.cancelled = { resolve, reject }
+ : (
+ queries.remove(query),
+ query.cancelled = true,
+ query.reject(Errors.generic('57014', 'canceling statement due to user request')),
+ resolve()
+ )
+ })
+ }
+
+ async function end({ timeout = null } = {}) {
+ if (ending)
+ return ending
+
+ await 1
+ let timer
+ return ending = Promise.race([
+ new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))),
+ Promise.all(connections.map(c => c.end()).concat(
+ listen.sql ? listen.sql.end({ timeout: 0 }) : [],
+ subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : []
+ ))
+ ]).then(() => clearTimeout(timer))
+ }
+
+ async function destroy(resolve) {
+ await Promise.all(connections.map(c => c.terminate()))
+ while (queries.length)
+ queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options))
+ resolve()
+ }
+
+ function connect(c, query) {
+ c.state = 'connecting'
+ connecting.push(c)
+ c.connect(query)
+ }
+
+ function onend(c) {
+ queues[c.state].remove(c)
+ c.state = 'ended'
+ ended.push(c)
+ }
+
+ function onopen(c) {
+ queues[c.state].remove(c)
+ if (queries.length === 0)
+ return (c.state = 'open', open.push(c))
+
+ let max = Math.ceil(queries.length / (connecting.length + 1))
+ , ready = true
+
+ while (ready && queries.length && max-- > 0)
+ ready = c.execute(queries.shift())
+
+ ready
+ ? (c.state = 'busy', busy.push(c))
+ : (c.state = 'full', full.push(c))
+ }
+
+ function ondrain(c) {
+ full.remove(c)
+ onopen(c)
+ }
+
+ function onclose(c) {
+ queues[c.state].remove(c)
+ c.state = 'closed'
+ c.reserved = null
+ options.onclose && options.onclose(c.id)
+ queries.length
+ ? connect(c, queries.shift())
+ : queues.closed.push(c)
+ }
+}
+
+function parseOptions(a, b) {
+ if (a && a.shared)
+ return a
+
+ const env = process.env // eslint-disable-line
+ , o = (typeof a === 'string' ? b : a) || {}
+ , { url, multihost } = parseUrl(a, env)
+ , query = url.searchParams
+ , host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost'
+ , port = o.port || url.port || env.PGPORT || 5432
+ , user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername()
+
+ return Object.assign({
+ host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]),
+ port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)),
+ path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port,
+ database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user,
+ user : user,
+ pass : o.pass || o.password || url.password || env.PGPASSWORD || '',
+ max : o.max || query.get('max') || 10,
+ types : o.types || {},
+ ssl : o.ssl || parseSSL(query.get('sslmode') || query.get('ssl')) || false,
+ idle_timeout : o.idle_timeout || query.get('idle_timeout') || env.PGIDLE_TIMEOUT || warn(o.timeout),
+ connect_timeout : o.connect_timeout || query.get('connect_timeout') || env.PGCONNECT_TIMEOUT || 30,
+ max_lifetime : o.max_lifetime || url.max_lifetime || max_lifetime,
+ max_pipeline : o.max_pipeline || url.max_pipeline || 100,
+ backoff : o.backoff || url.backoff || backoff,
+ keep_alive : o.keep_alive || url.keep_alive || 60,
+ prepare : 'prepare' in o ? o.prepare : 'no_prepare' in o ? !o.no_prepare : true,
+ onnotice : o.onnotice,
+ onnotify : o.onnotify,
+ onclose : o.onclose,
+ onparameter : o.onparameter,
+ transform : parseTransform(o.transform || {}),
+ connection : Object.assign({ application_name: 'postgres.js' }, o.connection),
+ target_session_attrs: tsa(o, url, env),
+ debug : o.debug,
+ fetch_types : 'fetch_types' in o ? o.fetch_types : true,
+ parameters : {},
+ shared : { retries: 0, typeArrayMap: {} }
+ },
+ mergeUserTypes(o.types)
+ )
+}
+
+function tsa(o, url, env) {
+ const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS
+ if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x))
+ return x
+
+ throw new Error('target_session_attrs ' + x + ' is not supported')
+}
+
+function backoff(retries) {
+ return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20)
+}
+
+function max_lifetime() {
+ return 60 * (30 + Math.random() * 30)
+}
+
+function parseTransform(x) {
+ return {
+ column: {
+ from: typeof x.column === 'function' ? x.column : x.column && x.column.from,
+ to: x.column && x.column.to
+ },
+ value: {
+ from: typeof x.value === 'function' ? x.value : x.value && x.value.from,
+ to: x.value && x.value.to
+ },
+ row: {
+ from: typeof x.row === 'function' ? x.row : x.row && x.row.from,
+ to: x.row && x.row.to
+ }
+ }
+}
+
+function parseSSL(x) {
+ return x !== 'disable' && x !== 'false' && x
+}
+
+function parseUrl(url) {
+ if (typeof url !== 'string')
+ return { url: { searchParams: new Map() } }
+
+ let host = url
+ host = host.slice(host.indexOf('://') + 3)
+ host = host.split(/[?/]/)[0]
+ host = host.slice(host.indexOf('@') + 1)
+
+ return {
+ url: new URL(url.replace(host, host.split(',')[0])),
+ multihost: host.indexOf(',') > -1 && host
+ }
+}
+
+function warn(x) {
+ typeof x !== 'undefined' && console.log('The timeout option is deprecated, use idle_timeout instead') // eslint-disable-line
+ return x
+}
+
+function osUsername() {
+ try {
+ return os.userInfo().username // eslint-disable-line
+ } catch (_) {
+ return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line
+ }
+}
diff --git a/src/query.js b/src/query.js
new file mode 100644
index 00000000..513c044a
--- /dev/null
+++ b/src/query.js
@@ -0,0 +1,161 @@
+const originCache = new Map()
+ , originStackCache = new Map()
+ , originError = Symbol('OriginError')
+
+export const CLOSE = {}
+export class Query extends Promise {
+ constructor(strings, args, handler, canceller, options = {}) {
+ let resolve
+ , reject
+
+ super((a, b) => {
+ resolve = a
+ reject = b
+ })
+
+ this.tagged = Array.isArray(strings.raw)
+ this.strings = strings
+ this.args = args
+ this.handler = handler
+ this.canceller = canceller
+ this.options = options
+
+ this.state = null
+ this.statement = null
+
+ this.resolve = x => (this.active = false, resolve(x))
+ this.reject = x => (this.active = false, reject(x))
+
+ this.active = false
+ this.cancelled = null
+ this.executed = false
+ this.signature = ''
+
+ this[originError] = handler.debug || !this.tagged
+ ? new Error()
+ : cachedError(this.strings)
+ }
+
+ get origin() {
+ return this.handler.debug || !this.tagged
+ ? this[originError].stack
+ : originStackCache.has(this.strings)
+ ? originStackCache.get(this.strings)
+ : originStackCache.set(this.strings, this[originError].stack).get(this.strings)
+ }
+
+ static get [Symbol.species]() {
+ return Promise
+ }
+
+ cancel() {
+ return this.canceller && (this.canceller(this), this.canceller = null)
+ }
+
+ async readable() {
+ this.options.simple = true
+ this.options.prepare = false
+ this.streaming = true
+ return this
+ }
+
+ async writable() {
+ this.options.simple = true
+ this.options.prepare = false
+ this.streaming = true
+ return this
+ }
+
+ cursor(rows = 1, fn) {
+ this.options.simple = false
+ if (typeof rows === 'function') {
+ fn = rows
+ rows = 1
+ }
+
+ this.cursorRows = rows
+
+ if (typeof fn === 'function')
+ return (this.cursorFn = fn, this)
+
+ let prev
+ return {
+ [Symbol.asyncIterator]: () => ({
+ next: () => {
+ if (this.executed && !this.active)
+ return { done: true }
+
+ prev && prev()
+ const promise = new Promise((resolve, reject) => {
+ this.cursorFn = value => {
+ resolve({ value, done: false })
+ return new Promise(r => prev = r)
+ }
+ this.resolve = () => (this.active = false, resolve({ done: true }))
+ this.reject = x => (this.active = false, reject(x))
+ })
+ this.execute()
+ return promise
+ },
+ return() {
+ prev && prev(CLOSE)
+ return { done: true }
+ }
+ })
+ }
+ }
+
+ describe() {
+ this.onlyDescribe = true
+ return this
+ }
+
+ stream() {
+ throw new Error('.stream has been renamed to .forEach')
+ }
+
+ forEach(fn) {
+ this.forEachFn = fn
+ return this
+ }
+
+ raw() {
+ this.isRaw = true
+ return this
+ }
+
+ async handle() {
+ !this.executed && (this.executed = true) && await 1 && this.handler(this)
+ }
+
+ execute() {
+ this.handle()
+ return this
+ }
+
+ then() {
+ this.handle()
+ return super.then.apply(this, arguments)
+ }
+
+ catch() {
+ this.handle()
+ return super.catch.apply(this, arguments)
+ }
+
+ finally() {
+ this.handle()
+ return super.finally.apply(this, arguments)
+ }
+}
+
+function cachedError(xs) {
+ if (originCache.has(xs))
+ return originCache.get(xs)
+
+ const x = Error.stackTraceLimit
+ Error.stackTraceLimit = 4
+ originCache.set(xs, new Error())
+ Error.stackTraceLimit = x
+ return originCache.get(xs)
+}
diff --git a/src/queue.js b/src/queue.js
new file mode 100644
index 00000000..c4ef9716
--- /dev/null
+++ b/src/queue.js
@@ -0,0 +1,31 @@
+export default Queue
+
+function Queue(initial = []) {
+ let xs = initial.slice()
+ let index = 0
+
+ return {
+ get length() {
+ return xs.length - index
+ },
+ remove: (x) => {
+ const index = xs.indexOf(x)
+ return index === -1
+ ? null
+ : (xs.splice(index, 1), x)
+ },
+ push: (x) => (xs.push(x), x),
+ shift: () => {
+ const out = xs[index++]
+
+ if (index === xs.length) {
+ index = 0
+ xs = []
+ } else {
+ xs[index - 1] = undefined
+ }
+
+ return out
+ }
+ }
+}
diff --git a/src/result.js b/src/result.js
new file mode 100644
index 00000000..31014284
--- /dev/null
+++ b/src/result.js
@@ -0,0 +1,16 @@
+export default class Result extends Array {
+ constructor() {
+ super()
+ Object.defineProperties(this, {
+ count: { value: null, writable: true },
+ state: { value: null, writable: true },
+ command: { value: null, writable: true },
+ columns: { value: null, writable: true },
+ statement: { value: null, writable: true }
+ })
+ }
+
+ static get [Symbol.species]() {
+ return Array
+ }
+}
diff --git a/lib/subscribe.js b/src/subscribe.js
similarity index 78%
rename from lib/subscribe.js
rename to src/subscribe.js
index 0a5b4899..b81c7c3a 100644
--- a/lib/subscribe.js
+++ b/src/subscribe.js
@@ -1,4 +1,4 @@
-module.exports = function(postgres, a, b) {
+export default function Subscribe(postgres, options) {
const listeners = new Map()
let connection
@@ -6,16 +6,27 @@ module.exports = function(postgres, a, b) {
return async function subscribe(event, fn) {
event = parseEvent(event)
- const options = typeof a === 'string' ? b : a || {}
options.max = 1
+ options.onclose = onclose
options.connection = {
...options.connection,
replication: 'database'
}
- const sql = postgres(a, b)
+ let stream
+ , ended = false
- !connection && (subscribe.sql = sql, connection = init(sql, options.publications))
+ const sql = postgres(options)
+ , slot = 'postgresjs_' + Math.random().toString(36).slice(2)
+ , end = sql.end
+
+ sql.end = async() => {
+ ended = true
+ stream && (await new Promise(r => (stream.once('end', r), stream.end())))
+ return end()
+ }
+
+ !connection && (subscribe.sql = sql, connection = init(sql, slot, options.publications))
const fns = listeners.has(event)
? listeners.get(event).add(fn)
@@ -26,19 +37,23 @@ module.exports = function(postgres, a, b) {
fns.size === 0 && listeners.delete(event)
}
- return connection.then(() => ({ unsubscribe }))
+ return connection.then(x => (stream = x, { unsubscribe }))
+
+ async function onclose() {
+ stream = null
+ !ended && (stream = await init(sql, slot, options.publications))
+ }
}
- async function init(sql, publications = 'alltables') {
+ async function init(sql, slot, publications = 'alltables') {
if (!publications)
throw new Error('Missing publication names')
- const slot = 'postgresjs_' + Math.random().toString(36).slice(2)
const [x] = await sql.unsafe(
`CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT`
)
- const stream = sql.unsafe(
+ const stream = await sql.unsafe(
`START_REPLICATION SLOT ${ slot } LOGICAL ${
x.consistent_point
} (proto_version '1', publication_names '${ publications }')`
@@ -49,6 +64,12 @@ module.exports = function(postgres, a, b) {
}
stream.on('data', data)
+ stream.on('error', (error) => {
+ console.error('Logical Replication Error - Reconnecting', error)
+ sql.end()
+ })
+
+ return stream
function data(x) {
if (x[0] === 0x77)
@@ -91,10 +112,10 @@ function parse(x, state, parsers, handle) {
Object.entries({
R: x => { // Relation
let i = 1
- const r = state[x.readInt32BE(i)] = {
+ const r = state[x.readUInt32BE(i)] = {
schema: String(x.slice(i += 4, i = x.indexOf(0, i))) || 'pg_catalog',
table: String(x.slice(i + 1, i = x.indexOf(0, i + 1))),
- columns: Array(x.readInt16BE(i += 2)),
+ columns: Array(x.readUInt16BE(i += 2)),
keys: []
}
i += 2
@@ -106,9 +127,9 @@ function parse(x, state, parsers, handle) {
column = r.columns[columnIndex++] = {
key: x[i++],
name: String(x.slice(i, i = x.indexOf(0, i))),
- type: x.readInt32BE(i += 1),
- parser: parsers[x.readInt32BE(i)],
- atttypmod: x.readInt32BE(i += 4)
+ type: x.readUInt32BE(i += 1),
+ parser: parsers[x.readUInt32BE(i)],
+ atttypmod: x.readUInt32BE(i += 4)
}
column.key && r.keys.push(column)
@@ -123,7 +144,7 @@ function parse(x, state, parsers, handle) {
},
I: x => { // Insert
let i = 1
- const relation = state[x.readInt32BE(i)]
+ const relation = state[x.readUInt32BE(i)]
const row = {}
tuples(x, row, relation.columns, i += 7)
@@ -134,7 +155,7 @@ function parse(x, state, parsers, handle) {
},
D: x => { // Delete
let i = 1
- const relation = state[x.readInt32BE(i)]
+ const relation = state[x.readUInt32BE(i)]
i += 4
const key = x[i] === 75
const row = key || x[i] === 79
@@ -151,7 +172,7 @@ function parse(x, state, parsers, handle) {
},
U: x => { // Update
let i = 1
- const relation = state[x.readInt32BE(i)]
+ const relation = state[x.readUInt32BE(i)]
i += 4
const key = x[i] === 75
const old = key || x[i] === 79
@@ -187,10 +208,10 @@ function tuples(x, row, columns, xi) {
: type === 117 // u
? undefined
: column.parser === undefined
- ? x.toString('utf8', xi + 4, xi += 4 + x.readInt32BE(xi))
+ ? x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))
: column.parser.array === true
- ? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readInt32BE(xi)))
- : column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readInt32BE(xi)))
+ ? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readUInt32BE(xi)))
+ : column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi)))
}
return xi
diff --git a/src/types.js b/src/types.js
new file mode 100644
index 00000000..c806acb6
--- /dev/null
+++ b/src/types.js
@@ -0,0 +1,297 @@
+import { Query } from './query.js'
+import { Errors } from './errors.js'
+
+export const types = {
+ string: {
+ to: 25,
+ from: null, // defaults to string
+ serialize: x => '' + x
+ },
+ number: {
+ to: 0,
+ from: [21, 23, 26, 700, 701],
+ serialize: x => '' + x,
+ parse: x => +x
+ },
+ json: {
+ to: 114,
+ from: [114, 3802],
+ serialize: x => JSON.stringify(x),
+ parse: x => JSON.parse(x)
+ },
+ boolean: {
+ to: 16,
+ from: 16,
+ serialize: x => x === true ? 't' : 'f',
+ parse: x => x === 't'
+ },
+ date: {
+ to: 1184,
+ from: [1082, 1114, 1184],
+ serialize: x => (x instanceof Date ? x : new Date(x)).toISOString(),
+ parse: x => new Date(x)
+ },
+ bytea: {
+ to: 17,
+ from: 17,
+ serialize: x => '\\x' + Buffer.from(x).toString('hex'),
+ parse: x => Buffer.from(x.slice(2), 'hex')
+ }
+}
+
+export const BigInt = {
+ to: 1700,
+ from: [20, 701, 1700],
+ parse: x => BigInt(x), // eslint-disable-line
+ serialize: x => x.toString()
+}
+
+class NotTagged { then() { notTagged() } catch() { notTagged() } finally() { notTagged() }}
+
+export class Identifier extends NotTagged {
+ constructor(value) {
+ super()
+ this.value = escapeIdentifier(value)
+ }
+}
+
+export class Parameter extends NotTagged {
+ constructor(value, type, array) {
+ super()
+ this.value = value
+ this.type = type
+ this.array = array
+ }
+}
+
+export class Builder extends NotTagged {
+ constructor(first, rest) {
+ super()
+ this.first = first
+ this.rest = rest
+ }
+
+ build(before, parameters, types, transform) {
+ const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop()
+ if (keyword.i === -1)
+ throw new Error('Could not infer helper mode')
+
+ return keyword.fn(this.first, this.rest, parameters, types, transform)
+ }
+}
+
+export function handleValue(x, parameters, types) {
+ const value = x instanceof Parameter ? x.value : x
+ if (value === undefined)
+ throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed')
+
+ return '$' + (types.push(
+ x instanceof Parameter
+ ? (parameters.push(x.value), x.array
+ ? x.array[x.type || inferType(x.value)] || x.type || firstIsString(x.value)
+ : x.type
+ )
+ : (parameters.push(x), inferType(x))
+ ))
+}
+
+const defaultHandlers = typeHandlers(types)
+
+function valuesBuilder(first, parameters, types, transform, columns) {
+ let value
+ return first.map(row =>
+ '(' + columns.map(column => {
+ value = row[column]
+ return (
+ value instanceof Query ? value.strings[0] :
+ value instanceof Identifier ? value.value :
+ handleValue(value, parameters, types)
+ )
+ }).join(',') + ')'
+ ).join(',')
+}
+
+function values(first, rest, parameters, types, transform) {
+ const multi = Array.isArray(first[0])
+ const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first)
+ return valuesBuilder(multi ? first : [first], parameters, types, transform, columns)
+}
+
+const builders = Object.entries({
+ values,
+ in: values,
+
+ update(first, rest, parameters, types, transform) {
+ return (rest.length ? rest.flat() : Object.keys(first)).map(x =>
+ escapeIdentifier(transform.column.to ? transform.column.to(x) : x) +
+ '=' + handleValue(first[x], parameters, types)
+ )
+ },
+
+ select(first, rest, parameters, types, transform) {
+ typeof first === 'string' && (first = [first].concat(rest))
+ if (Array.isArray(first))
+ return first.map(x => escapeIdentifier(transform.column.to ? transform.column.to(x) : x)).join(',')
+
+ let value
+ const columns = rest.length ? rest.flat() : Object.keys(first)
+ return columns.map(x => {
+ value = first[x]
+ return (
+ value instanceof Query ? value.strings[0] :
+ value instanceof Identifier ? value.value :
+ handleValue(value, parameters, types)
+ ) + ' as ' + escapeIdentifier(transform.column.to ? transform.column.to(x) : x)
+ }).join(',')
+ },
+
+ insert(first, rest, parameters, types, transform) {
+ const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first)
+ return '(' + columns.map(x =>
+ escapeIdentifier(transform.column.to ? transform.column.to(x) : x)
+ ).join(',') + ')values' +
+ valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, transform, columns)
+ }
+}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x, 'i'), fn]))
+
+function notTagged() {
+ throw Errors.generic('NOT_TAGGED_CALL', 'Query not called as a tagged template literal')
+}
+
+export const serializers = defaultHandlers.serializers
+export const parsers = defaultHandlers.parsers
+
+export const END = {}
+
+function firstIsString(x) {
+ if (Array.isArray(x))
+ return firstIsString(x[0])
+ return typeof x === 'string' ? 1009 : 0
+}
+
+export const mergeUserTypes = function(types) {
+ const user = typeHandlers(types || {})
+ return {
+ serializers: Object.assign({}, serializers, user.serializers),
+ parsers: Object.assign({}, parsers, user.parsers)
+ }
+}
+
+function typeHandlers(types) {
+ return Object.keys(types).reduce((acc, k) => {
+ types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse)
+ acc.serializers[types[k].to] = types[k].serialize
+ types[k].from && [].concat(types[k].from).forEach(x => acc.serializers[x] = types[k].serialize)
+ return acc
+ }, { parsers: {}, serializers: {} })
+}
+
+export const escapeIdentifier = function escape(str) {
+ return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"'
+}
+
+export const inferType = function inferType(x) {
+ return (
+ x instanceof Parameter ? x.type :
+ x instanceof Date ? 1184 :
+ x instanceof Uint8Array ? 17 :
+ (x === true || x === false) ? 16 :
+ typeof x === 'bigint' ? 1700 :
+ Array.isArray(x) ? inferType(x[0]) :
+ 0
+ )
+}
+
+const escapeBackslash = /\\/g
+const escapeQuote = /"/g
+
+function arrayEscape(x) {
+ return x
+ .replace(escapeBackslash, '\\\\')
+ .replace(escapeQuote, '\\"')
+}
+
+export const arraySerializer = function arraySerializer(xs, serializer) {
+ if (Array.isArray(xs) === false)
+ return xs
+
+ if (!xs.length)
+ return '{}'
+
+ const first = xs[0]
+
+ if (Array.isArray(first) && !first.type)
+ return '{' + xs.map(x => arraySerializer(x, serializer)).join(',') + '}'
+
+ return '{' + xs.map(x =>
+ '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"'
+ ).join(',') + '}'
+}
+
+const arrayParserState = {
+ i: 0,
+ char: null,
+ str: '',
+ quoted: false,
+ last: 0
+}
+
+export const arrayParser = function arrayParser(x, parser) {
+ arrayParserState.i = arrayParserState.last = 0
+ return arrayParserLoop(arrayParserState, x, parser)
+}
+
+function arrayParserLoop(s, x, parser) {
+ const xs = []
+ for (; s.i < x.length; s.i++) {
+ s.char = x[s.i]
+ if (s.quoted) {
+ if (s.char === '\\') {
+ s.str += x[++s.i]
+ } else if (s.char === '"') {
+ xs.push(parser ? parser(s.str) : s.str)
+ s.str = ''
+ s.quoted = x[s.i + 1] === '"'
+ s.last = s.i + 2
+ } else {
+ s.str += s.char
+ }
+ } else if (s.char === '"') {
+ s.quoted = true
+ } else if (s.char === '{') {
+ s.last = ++s.i
+ xs.push(arrayParserLoop(s, x, parser))
+ } else if (s.char === '}') {
+ s.quoted = false
+ s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
+ s.last = s.i + 1
+ break
+ } else if (s.char === ',' && s.p !== '}' && s.p !== '"') {
+ xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
+ s.last = s.i + 1
+ }
+ s.p = s.char
+ }
+ s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1))
+ return xs
+}
+
+export const toCamel = x => {
+ let str = x[0]
+ for (let i = 1; i < x.length; i++)
+ str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
+ return str
+}
+
+export const toPascal = x => {
+ let str = x[0].toUpperCase()
+ for (let i = 1; i < x.length; i++)
+ str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
+ return str
+}
+
+export const toKebab = x => x.replace(/_/g, '-')
+
+export const fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase()
+export const fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase()
+export const fromKebab = x => x.replace(/-/g, '_')
diff --git a/tests/bootstrap.js b/tests/bootstrap.js
index e25cc862..6a4fa4c1 100644
--- a/tests/bootstrap.js
+++ b/tests/bootstrap.js
@@ -1,23 +1,29 @@
-const cp = require('child_process')
+import { spawnSync } from 'child_process'
-exec('psql -c "create user postgres_js_test"')
-exec('psql -c "alter system set password_encryption=md5"')
-exec('psql -c "select pg_reload_conf()"')
-exec('psql -c "create user postgres_js_test_md5 with password \'postgres_js_test_md5\'"')
-exec('psql -c "alter system set password_encryption=\'scram-sha-256\'"')
-exec('psql -c "select pg_reload_conf()"')
-exec('psql -c "create user postgres_js_test_scram with password \'postgres_js_test_scram\'"')
+exec('psql', ['-c', 'alter system set ssl=on'])
+exec('psql', ['-c', 'create user postgres_js_test'])
+exec('psql', ['-c', 'alter system set password_encryption=md5'])
+exec('psql', ['-c', 'select pg_reload_conf()'])
+exec('psql', ['-c', 'create user postgres_js_test_md5 with password \'postgres_js_test_md5\''])
+exec('psql', ['-c', 'alter system set password_encryption=\'scram-sha-256\''])
+exec('psql', ['-c', 'select pg_reload_conf()'])
+exec('psql', ['-c', 'create user postgres_js_test_scram with password \'postgres_js_test_scram\''])
-cp.execSync('dropdb postgres_js_test;createdb postgres_js_test')
-;['postgres_js_test', 'postgres_js_test', 'postgres_js_test', 'postgres_js_test'].forEach(x =>
- cp.execSync('psql -c "grant all on database postgres_js_test to ' + x + '"')
-)
+exec('dropdb', ['postgres_js_test'])
+exec('createdb', ['postgres_js_test'])
+exec('psql', ['-c', 'grant all on database postgres_js_test to postgres_js_test'])
-function exec(cmd) {
- try {
- cp.execSync(cmd, { stdio: 'pipe', encoding: 'utf8' })
- } catch (err) {
- if (err.stderr.indexOf('already exists') === -1)
- throw err
- }
+export function exec(cmd, args) {
+ const { stderr } = spawnSync(cmd, args, { stdio: 'pipe', encoding: 'utf8' })
+ if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist'))
+ throw stderr
+}
+
+async function execAsync(cmd, args) { // eslint-disable-line
+ let stderr = ''
+ const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) // eslint-disable-line
+ cp.stderr.on('data', x => stderr += x)
+ await new Promise(x => cp.on('exit', x))
+ if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist'))
+ throw new Error(stderr)
}
diff --git a/tests/index.js b/tests/index.js
index ab897273..876f85ec 100644
--- a/tests/index.js
+++ b/tests/index.js
@@ -1,17 +1,18 @@
/* eslint no-console: 0 */
-require('./bootstrap.js')
+import { exec } from './bootstrap.js'
-const { t, not, ot } = require('./test.js') // eslint-disable-line
-const cp = require('child_process')
-const path = require('path')
-const net = require('net')
-const fs = require('fs')
+import { t, nt, ot } from './test.js' // eslint-disable-line
+import net from 'net'
+import fs from 'fs'
+import crypto from 'crypto'
-/** @type {import('../types')} */
-const postgres = require('../lib')
+import postgres from '../src/index.js'
const delay = ms => new Promise(r => setTimeout(r, ms))
+const rel = x => new URL(x, import.meta.url)
+const idle_timeout = 1
+
const login = {
user: 'postgres_js_test'
}
@@ -30,15 +31,15 @@ const options = {
db: 'postgres_js_test',
user: login.user,
pass: login.pass,
- idle_timeout: 0.2,
- debug: false,
+ idle_timeout,
+ connect_timeout: 1,
max: 1
}
const sql = postgres(options)
t('Connects with no options', async() => {
- const sql = postgres()
+ const sql = postgres({ max: 1 })
const result = (await sql`select 1 as x`)[0].x
await sql.end()
@@ -72,7 +73,7 @@ t('Create table', async() =>
['CREATE TABLE', (await sql`create table test(int int)`).command, await sql`drop table test`]
)
-t('Drop table', async() => {
+t('Drop table', { timeout: 2 }, async() => {
await sql`create table test(int int)`
return ['DROP TABLE', (await sql`drop table test`).command]
})
@@ -103,12 +104,26 @@ t('Date', async() => {
})
t('Json', async() => {
- const x = (await sql`select ${ sql.json({ a: 1, b: 'hello' }) } as x`)[0].x
- return [true, x.a === 1 && x.b === 'hello']
+ const x = (await sql`select ${ sql.json({ a: 'hello', b: 42 }) } as x`)[0].x
+ return ['hello,42', [x.a, x.b].join()]
+})
+
+t('implicit json', async() => {
+ const x = (await sql`select ${ { a: 'hello', b: 42 } }::json as x`)[0].x
+ return ['hello,42', [x.a, x.b].join()]
+})
+
+t('implicit jsonb', async() => {
+ const x = (await sql`select ${ { a: 'hello', b: 42 } }::jsonb as x`)[0].x
+ return ['hello,42', [x.a, x.b].join()]
})
t('Empty array', async() =>
- [true, Array.isArray((await sql`select ${ sql.array([]) }::int[] as x`)[0].x)]
+ [true, Array.isArray((await sql`select ${ sql.array([], 1009) } as x`)[0].x)]
+)
+
+t('String array', async() =>
+ ['123', (await sql`select ${ '{1,2,3}' }::int[] as x`)[0].x.join('')]
)
t('Array of Integer', async() =>
@@ -145,6 +160,15 @@ t('null for int', async() => {
return [1, (await sql`insert into test values(${ null })`).count, await sql`drop table test`]
})
+t('Throws on illegal transactions', async() => {
+ const sql = postgres({ ...options, max: 2, fetch_types: false })
+ const error = await sql`begin`.catch(e => e)
+ return [
+ error.code,
+ 'UNSAFE_TRANSACTION'
+ ]
+})
+
t('Transaction throws', async() => {
await sql`create table test (a int)`
return ['22P02', await sql.begin(async sql => {
@@ -171,7 +195,7 @@ t('Transaction throws on uncaught savepoint', async() => {
await sql`insert into test values(2)`
throw new Error('fail')
})
- }).catch(() => 'fail')), await sql`drop table test`]
+ }).catch((err) => err.message)), await sql`drop table test`]
})
t('Transaction throws on uncaught named savepoint', async() => {
@@ -179,7 +203,7 @@ t('Transaction throws on uncaught named savepoint', async() => {
return ['fail', (await sql.begin(async sql => {
await sql`insert into test values(1)`
- await sql.savepoint('watpoint', async sql => {
+ await sql.savepoit('watpoint', async sql => {
await sql`insert into test values(2)`
throw new Error('fail')
})
@@ -211,6 +235,25 @@ t('Savepoint returns Result', async() => {
return [1, result[0].x]
})
+t('Transaction requests are executed implicitly', async() => {
+ const sql = postgres({ debug: true, idle_timeout: 1, fetch_types: false })
+ return [
+ 'testing',
+ (await sql.begin(async sql => {
+ sql`select set_config('postgres_js.test', 'testing', true)`
+ return await sql`select current_setting('postgres_js.test') as x`
+ }))[0].x
+ ]
+})
+
+t('Uncaught transaction request errors bubbles to transaction', async() => [
+ '42703',
+ (await sql.begin(sql => (
+ sql`select wat`,
+ sql`select current_setting('postgres_js.test') as x, ${ 1 } as a`
+ )).catch(e => e.code))
+])
+
t('Parallel transactions', async() => {
await sql`create table test (a int)`
return ['11', (await Promise.all([
@@ -219,6 +262,12 @@ t('Parallel transactions', async() => {
])).map(x => x.count).join(''), await sql`drop table test`]
})
+t('Many transactions at beginning of connection', async() => {
+ const sql = postgres(options)
+ const xs = await Promise.all(Array.from({ length: 100 }, () => sql.begin(sql => sql`select 1`)))
+ return [100, xs.length]
+})
+
t('Transactions array', async() => {
await sql`create table test (a int)`
@@ -272,7 +321,7 @@ t('Throw syntax error', async() =>
t('Connect using uri', async() =>
[true, await new Promise((resolve, reject) => {
const sql = postgres('postgres://' + login.user + ':' + (login.pass || '') + '@localhost:5432/' + options.db, {
- idle_timeout: options.idle_timeout
+ idle_timeout
})
sql`select 1`.then(() => resolve(true), reject)
})]
@@ -281,7 +330,7 @@ t('Connect using uri', async() =>
t('Fail with proper error on no host', async() =>
['ECONNREFUSED', (await new Promise((resolve, reject) => {
const sql = postgres('postgres://localhost:33333/' + options.db, {
- idle_timeout: options.idle_timeout
+ idle_timeout
})
sql`select 1`.then(reject, resolve)
})).code]
@@ -291,7 +340,7 @@ t('Connect using SSL', async() =>
[true, (await new Promise((resolve, reject) => {
postgres({
ssl: { rejectUnauthorized: false },
- idle_timeout: options.idle_timeout
+ idle_timeout
})`select 1`.then(() => resolve(true), reject)
}))]
)
@@ -300,27 +349,39 @@ t('Connect using SSL require', async() =>
[true, (await new Promise((resolve, reject) => {
postgres({
ssl: 'require',
- idle_timeout: options.idle_timeout
+ idle_timeout
})`select 1`.then(() => resolve(true), reject)
}))]
)
t('Connect using SSL prefer', async() => {
- cp.execSync('psql -c "alter system set ssl=off"')
- cp.execSync('psql -c "select pg_reload_conf()"')
+ await exec('psql', ['-c', 'alter system set ssl=off'])
+ await exec('psql', ['-c', 'select pg_reload_conf()'])
const sql = postgres({
ssl: 'prefer',
- idle_timeout: options.idle_timeout
+ idle_timeout
})
return [
1, (await sql`select 1 as x`)[0].x,
- cp.execSync('psql -c "alter system set ssl=on"'),
- cp.execSync('psql -c "select pg_reload_conf()"')
+ await exec('psql', ['-c', 'alter system set ssl=on']),
+ await exec('psql', ['-c', 'select pg_reload_conf()'])
]
})
+t('Reconnect using SSL', { timeout: 2 }, async() => {
+ const sql = postgres({
+ ssl: 'require',
+ idle_timeout: 0.1
+ })
+
+ await sql`select 1`
+ await delay(200)
+
+ return [1, (await sql`select 1 as x`)[0].x]
+})
+
t('Login without password', async() => {
return [true, (await postgres({ ...options, ...login })`select true as x`)[0].x]
})
@@ -334,7 +395,7 @@ t('Login using scram-sha-256', async() => {
})
t('Parallel connections using scram-sha-256', {
- timeout: 2000
+ timeout: 2
}, async() => {
const sql = postgres({ ...options, ...login_scram })
return [true, (await Promise.all([
@@ -397,32 +458,32 @@ t('Point type array', async() => {
})
t('sql file', async() =>
- [1, (await sql.file(path.join(__dirname, 'select.sql')))[0].x]
+ [1, (await sql.file(rel('select.sql')))[0].x]
)
-t('sql file can stream', async() => {
+t('sql file has forEach', async() => {
let result
await sql
- .file(path.join(__dirname, 'select.sql'), { cache: false })
- .stream(({ x }) => result = x)
+ .file(rel('select.sql'), { cache: false })
+ .forEach(({ x }) => result = x)
return [1, result]
})
t('sql file throws', async() =>
- ['ENOENT', (await sql.file('./selectomondo.sql').catch(x => x.code))]
+ ['ENOENT', (await sql.file(rel('selectomondo.sql')).catch(x => x.code))]
)
t('sql file cached', async() => {
- await sql.file(path.join(__dirname, 'select.sql'))
+ await sql.file(rel('select.sql'))
await delay(20)
- return [1, (await sql.file(path.join(__dirname, 'select.sql')))[0].x]
+ return [1, (await sql.file(rel('select.sql')))[0].x]
})
t('Parameters in file', async() => {
const result = await sql.file(
- path.join(__dirname, 'select-param.sql'),
+ rel('select-param.sql'),
['hello']
)
return ['hello', result[0].x]
@@ -453,7 +514,8 @@ t('Connection ended error', async() => {
t('Connection end does not cancel query', async() => {
const sql = postgres(options)
- const promise = sql`select 1 as x`
+ const promise = sql`select 1 as x`.execute()
+
sql.end()
return [1, (await promise)[0].x]
@@ -533,6 +595,7 @@ t('listen and notify', async() => {
return ['world', await new Promise((resolve, reject) =>
sql.listen(channel, resolve)
.then(() => sql.notify(channel, 'world'))
+ .then(() => delay(20))
.catch(reject)
.then(sql.end)
)]
@@ -570,54 +633,51 @@ t('listen and notify with weird name', async() => {
sql.listen(channel, resolve)
.then(() => sql.notify(channel, 'world'))
.catch(reject)
+ .then(() => delay(20))
.then(sql.end)
)]
})
t('listen and notify with upper case', async() => {
+ const sql = postgres(options)
let result
- const { unlisten } = await sql.listen('withUpperChar', x => result = x)
+ await sql.listen('withUpperChar', x => result = x)
sql.notify('withUpperChar', 'works')
await delay(50)
return [
'works',
result,
- unlisten()
+ sql.end()
]
})
-t('listen reconnects', async() => {
- const listener = postgres(options)
+t('listen reconnects', { timeout: 2 }, async() => {
+ const sql = postgres(options)
, xs = []
- const { state: { pid } } = await listener.listen('test', x => xs.push(x))
+ const { state: { pid } } = await sql.listen('test', x => xs.push(x))
+ await delay(200)
await sql.notify('test', 'a')
await sql`select pg_terminate_backend(${ pid }::int)`
- await delay(50)
+ await delay(200)
await sql.notify('test', 'b')
- await delay(50)
- listener.end()
+ await delay(200)
+ sql.end()
return ['ab', xs.join('')]
})
-t('listen reconnects after connection error', { timeout: 2000 }, async() => {
+t('listen reconnects after connection error', { timeout: 3 }, async() => {
const sql = postgres()
, xs = []
- const a = (await sql`show data_directory`)[0].data_directory
-
const { state: { pid } } = await sql.listen('test', x => xs.push(x))
await sql.notify('test', 'a')
await sql`select pg_terminate_backend(${ pid }::int)`
-
- cp.execSync('pg_ctl stop -D "' + a + '"')
- await delay(50)
- cp.execSync('pg_ctl start -D "' + a + '" -w -l "' + a + '/postgresql.log"')
- await delay(50)
+ await delay(1000)
await sql.notify('test', 'b')
await delay(50)
@@ -627,64 +687,64 @@ t('listen reconnects after connection error', { timeout: 2000 }, async() => {
})
t('listen result reports correct connection state after reconnection', async() => {
- const listener = postgres(options)
+ const sql = postgres(options)
, xs = []
- const result = await listener.listen('test', x => xs.push(x))
+ const result = await sql.listen('test', x => xs.push(x))
const initialPid = result.state.pid
await sql.notify('test', 'a')
await sql`select pg_terminate_backend(${ initialPid }::int)`
await delay(50)
- listener.end()
+ sql.end()
return [result.state.pid !== initialPid, true]
})
t('unlisten removes subscription', async() => {
- const listener = postgres(options)
+ const sql = postgres(options)
, xs = []
- const { unlisten } = await listener.listen('test', x => xs.push(x))
- await listener.notify('test', 'a')
+ const { unlisten } = await sql.listen('test', x => xs.push(x))
+ await sql.notify('test', 'a')
await delay(50)
await unlisten()
- await listener.notify('test', 'b')
+ await sql.notify('test', 'b')
await delay(50)
- listener.end()
+ sql.end()
return ['a', xs.join('')]
})
t('listen after unlisten', async() => {
- const listener = postgres(options)
+ const sql = postgres(options)
, xs = []
- const { unlisten } = await listener.listen('test', x => xs.push(x))
- await listener.notify('test', 'a')
+ const { unlisten } = await sql.listen('test', x => xs.push(x))
+ await sql.notify('test', 'a')
await delay(50)
await unlisten()
- await listener.notify('test', 'b')
+ await sql.notify('test', 'b')
await delay(50)
- await listener.listen('test', x => xs.push(x))
- await listener.notify('test', 'c')
+ await sql.listen('test', x => xs.push(x))
+ await sql.notify('test', 'c')
await delay(50)
- listener.end()
+ sql.end()
return ['ac', xs.join('')]
})
t('multiple listeners and unlisten one', async() => {
- const listener = postgres(options)
+ const sql = postgres(options)
, xs = []
- await listener.listen('test', x => xs.push('1', x))
- const s2 = await listener.listen('test', x => xs.push('2', x))
- await listener.notify('test', 'a')
+ await sql.listen('test', x => xs.push('1', x))
+ const s2 = await sql.listen('test', x => xs.push('2', x))
+ await sql.notify('test', 'a')
await delay(50)
await s2.unlisten()
- await listener.notify('test', 'b')
+ await sql.notify('test', 'b')
await delay(50)
- listener.end()
+ sql.end()
return ['1a2a1b', xs.join('')]
})
@@ -777,22 +837,24 @@ t('little bobby tables', async() => {
})
t('Connection errors are caught using begin()', {
- timeout: 20000
+ timeout: 2
}, async() => {
let error
try {
- const sql = postgres({ host: 'wat' })
+ const sql = postgres({ host: 'wat', port: 1337 })
await sql.begin(async(sql) => {
await sql`insert into test (label, value) values (${1}, ${2})`
})
-
- await sql.end()
} catch (err) {
error = err
}
- return ['ENOTFOUND', error.code]
+ return [
+ true,
+ error.code === 'ENOTFOUND' ||
+ error.message === 'failed to lookup address information: nodename nor servname provided, or not known'
+ ]
})
t('dynamic column name', async() => {
@@ -823,15 +885,25 @@ t('dynamic insert pluck', async() => {
t('array insert', async() => {
await sql`create table test (a int, b int)`
- return [2, (await sql`insert into test (a, b) values (${ [1, 2] }) returning *`)[0].b, await sql`drop table test`]
+ return [2, (await sql`insert into test (a, b) values ${ sql([1, 2]) } returning *`)[0].b, await sql`drop table test`]
})
-t('parameters in()', async() => {
+t('where parameters in()', async() => {
+ await sql`create table test (x text)`
+ await sql`insert into test values ('a')`
+ return [
+ (await sql`select * from test where x in ${ sql(['a', 'b', 'c']) }`)[0].x,
+ 'a',
+ await sql`drop table test`
+ ]
+})
+
+t('where parameters in() values before', async() => {
return [2, (await sql`
with rows as (
select * from (values (1), (2), (3), (4)) as x(a)
)
- select * from rows where a in (${ [3, 4] })
+ select * from rows where a in ${ sql([3, 4]) }
`).count]
})
@@ -877,6 +949,22 @@ t('dynamic select args', async() => {
return ['yay', (await sql`select ${ sql('a', 'b') } from test`)[0].b, await sql`drop table test`]
})
+t('dynamic values single row', async() => {
+ const [{ b }] = await sql`
+ select * from (values ${ sql(['a', 'b', 'c']) }) as x(a, b, c)
+ `
+
+ return ['b', b]
+})
+
+t('dynamic values multi row', async() => {
+ const [, { b }] = await sql`
+ select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) as x(a, b, c)
+ `
+
+ return ['b', b]
+})
+
t('connection parameters', async() => {
const sql = postgres({
...options,
@@ -952,25 +1040,25 @@ t('bytea serializes and parses', async() => {
await sql`insert into test values (${ buf })`
return [
- 0,
- Buffer.compare(buf, (await sql`select x from test`)[0].x),
+ buf.toString(),
+ (await sql`select x from test`)[0].x.toString(),
await sql`drop table test`
]
})
-t('Stream works', async() => {
+t('forEach works', async() => {
let result
- await sql`select 1 as x`.stream(({ x }) => result = x)
+ await sql`select 1 as x`.forEach(({ x }) => result = x)
return [1, result]
})
-t('Stream returns empty array', async() => {
- return [0, (await sql`select 1 as x`.stream(() => { /* noop */ })).length]
+t('forEach returns empty array', async() => {
+ return [0, (await sql`select 1 as x`.forEach(() => { /* noop */ })).length]
})
t('Cursor works', async() => {
const order = []
- await sql`select 1 as x union select 2 as x`.cursor(async(x) => {
+ await sql`select 1 as x union select 2 as x`.cursor(async([x]) => {
order.push(x.x + 'a')
await delay(100)
order.push(x.x + 'b')
@@ -980,7 +1068,7 @@ t('Cursor works', async() => {
t('Unsafe cursor works', async() => {
const order = []
- await sql.unsafe('select 1 as x union select 2 as x').cursor(async(x) => {
+ await sql.unsafe('select 1 as x union select 2 as x').cursor(async([x]) => {
order.push(x.x + 'a')
await delay(100)
order.push(x.x + 'b')
@@ -1014,16 +1102,16 @@ t('Cursor custom with less results than batch size works', async() => {
t('Cursor cancel works', async() => {
let result
- await sql`select * from generate_series(1,10) as x`.cursor(async({ x }) => {
+ await sql`select * from generate_series(1,10) as x`.cursor(async([{ x }]) => {
result = x
- return sql.END
+ return sql.CLOSE
})
return [1, result]
})
t('Cursor throw works', async() => {
const order = []
- await sql`select 1 as x union select 2 as x`.cursor(async(x) => {
+ await sql`select 1 as x union select 2 as x`.cursor(async([x]) => {
order.push(x.x + 'a')
await delay(100)
throw new Error('watty')
@@ -1031,11 +1119,83 @@ t('Cursor throw works', async() => {
return ['1aerr', order.join('')]
})
-t('Cursor throw works', async() => [
- 'err',
- await sql`wat`.cursor(() => { /* noop */ }).catch(() => 'err')
+t('Cursor error works', async() => [
+ '42601',
+ await sql`wat`.cursor(() => { /* noop */ }).catch((err) => err.code)
])
+t('Multiple Cursors', { timeout: 2 }, async() => {
+ const result = []
+ await sql.begin(async sql => [
+ await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async([row]) => {
+ result.push(row.x)
+ await new Promise(r => setTimeout(r, 200))
+ }),
+ await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async([row]) => {
+ result.push(row.x)
+ await new Promise(r => setTimeout(r, 100))
+ })
+ ])
+
+ return ['1,2,3,4,101,102,103,104', result.join(',')]
+})
+
+t('Cursor as async iterator', async() => {
+ const order = []
+ for await (const [x] of sql`select generate_series(1,2) as x;`.cursor()) {
+ order.push(x.x + 'a')
+ await delay(100)
+ order.push(x.x + 'b')
+ }
+
+ return ['1a1b2a2b', order.join('')]
+})
+
+t('Cursor as async iterator with break', async() => {
+ const order = []
+ for await (const xs of sql`select generate_series(1,2) as x;`.cursor()) {
+ order.push(xs[0].x + 'a')
+ await delay(10)
+ order.push(xs[0].x + 'b')
+ break
+ }
+
+ return ['1a1b', order.join('')]
+})
+
+t('Async Iterator Unsafe cursor works', async() => {
+ const order = []
+ for await (const [x] of sql.unsafe('select 1 as x union select 2 as x').cursor()) {
+ order.push(x.x + 'a')
+ await delay(100)
+ order.push(x.x + 'b')
+ }
+ return ['1a1b2a2b', order.join('')]
+})
+
+t('Async Iterator Cursor custom n works', async() => {
+ const order = []
+ for await (const x of sql`select * from generate_series(1,20)`.cursor(10))
+ order.push(x.length)
+
+ return ['10,10', order.join(',')]
+})
+
+t('Async Iterator Cursor custom with rest n works', async() => {
+ const order = []
+ for await (const x of sql`select * from generate_series(1,20)`.cursor(11))
+ order.push(x.length)
+
+ return ['11,9', order.join(',')]
+})
+
+t('Async Iterator Cursor custom with less results than batch size works', async() => {
+ const order = []
+ for await (const x of sql`select * from generate_series(1,20)`.cursor(21))
+ order.push(x.length)
+ return ['20', order.join(',')]
+})
+
t('Transform row', async() => {
const sql = postgres({
...options,
@@ -1045,14 +1205,14 @@ t('Transform row', async() => {
return [1, (await sql`select 'wat'`)[0]]
})
-t('Transform row stream', async() => {
+t('Transform row forEach', async() => {
let result
const sql = postgres({
...options,
transform: { row: () => 1 }
})
- await sql`select 1`.stream(x => result = x)
+ await sql`select 1`.forEach(x => result = x)
return [1, result]
})
@@ -1121,8 +1281,8 @@ t('numeric is returned as string', async() => [
t('Async stack trace', async() => {
const sql = postgres({ ...options, debug: false })
return [
- parseInt(new Error().stack.split('\n')[1].split(':')[1]) + 1,
- parseInt(await sql`select.sql`.catch(x => x.stack.split('\n').pop().split(':')[1]))
+ parseInt(new Error().stack.split('\n')[1].match(':([0-9]+):')[1]) + 1,
+ parseInt(await sql`error`.catch(x => x.stack.split('\n').pop().match(':([0-9]+):')[1]))
]
})
@@ -1139,7 +1299,7 @@ t('Debug has long async stack trace', async() => {
}
function wat() {
- return sql`selec 1`
+ return sql`error`
}
})
@@ -1149,24 +1309,16 @@ t('Error contains query string', async() => [
])
t('Error contains query serialized parameters', async() => [
- '1',
- (await sql`selec ${ 1 }`.catch(err => err.parameters[0].value))
+ 1,
+ (await sql`selec ${ 1 }`.catch(err => err.parameters[0]))
])
t('Error contains query raw parameters', async() => [
1,
- (await sql`selec ${ 1 }`.catch(err => err.parameters[0].raw))
+ (await sql`selec ${ 1 }`.catch(err => err.args[0]))
])
-t('Query string is not enumerable', async() => {
- const sql = postgres({ ...options, debug: false })
- return [
- -1,
- (await sql`selec 1`.catch(err => Object.keys(err).indexOf('query')))
- ]
-})
-
-t('Query and parameters are not enumerable if debug is not set', async() => {
+t('Query and parameters on errorare not enumerable if debug is not set', async() => {
const sql = postgres({ ...options, debug: false })
return [
@@ -1184,11 +1336,11 @@ t('Query and parameters are enumerable if debug is set', async() => {
]
})
-t('connect_timeout works', async() => {
+t('connect_timeout works', { timeout: 20 }, async() => {
const connect_timeout = 0.2
const server = net.createServer()
server.listen()
- const sql = postgres({ port: server.address().port, connect_timeout })
+ const sql = postgres({ port: server.address().port, host: '127.0.0.1', connect_timeout })
const start = Date.now()
let end
await sql`select 1`.catch((e) => {
@@ -1221,8 +1373,8 @@ t('requests works after single connect_timeout', async() => {
return [
'CONNECT_TIMEOUT,,1',
[
- await sql`select 1 as x`.catch(x => x.code),
- await new Promise(r => setTimeout(r, 10)),
+ await sql`select 1 as x`.then(() => 'success', x => x.code),
+ await delay(10),
(await sql`select 1 as x`)[0].x
].join(',')
]
@@ -1236,9 +1388,9 @@ t('Result has columns spec', async() =>
['x', (await sql`select 1 as x`).columns[0].name]
)
-t('Stream has result as second argument', async() => {
+t('forEach has result as second argument', async() => {
let x
- await sql`select 1 as x`.stream((_, result) => x = result)
+ await sql`select 1 as x`.forEach((_, result) => x = result)
return ['x', x.columns[0].name]
})
@@ -1265,48 +1417,84 @@ t('Insert empty array', async() => {
t('Insert array in sql()', async() => {
await sql`create table tester (ints int[])`
return [
- Array.isArray((await sql`insert into tester ${ sql({ ints: sql.array([]) })} returning *`)[0].ints),
+ Array.isArray((await sql`insert into tester ${ sql({ ints: sql.array([]) }) } returning *`)[0].ints),
true,
await sql`drop table tester`
]
})
t('Automatically creates prepared statements', async() => {
- const sql = postgres({ ...options, no_prepare: false })
+ const sql = postgres(options)
const result = await sql`select * from pg_prepared_statements`
- return [result[0].statement, 'select * from pg_prepared_statements']
+ return [true, result.some(x => x.name = result.statement.name)]
})
-t('no_prepare: true disables prepared transactions (deprecated)', async() => {
+t('no_prepare: true disables prepared statements (deprecated)', async() => {
const sql = postgres({ ...options, no_prepare: true })
const result = await sql`select * from pg_prepared_statements`
- return [0, result.count]
+ return [false, result.some(x => x.name = result.statement.name)]
})
-t('prepare: false disables prepared transactions', async() => {
+t('prepare: false disables prepared statements', async() => {
const sql = postgres({ ...options, prepare: false })
const result = await sql`select * from pg_prepared_statements`
- return [0, result.count]
+ return [false, result.some(x => x.name = result.statement.name)]
})
-t('prepare: true enables prepared transactions', async() => {
+t('prepare: true enables prepared statements', async() => {
const sql = postgres({ ...options, prepare: true })
const result = await sql`select * from pg_prepared_statements`
- return [result[0].statement, 'select * from pg_prepared_statements']
+ return [true, result.some(x => x.name = result.statement.name)]
})
t('prepares unsafe query when "prepare" option is true', async() => {
const sql = postgres({ ...options, prepare: true })
const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla'], { prepare: true })
- return [result[0].statement, 'select * from pg_prepared_statements where name <> $1']
+ return [true, result.some(x => x.name = result.statement.name)]
})
t('does not prepare unsafe query by default', async() => {
const sql = postgres({ ...options, prepare: true })
const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla'])
- return [0, result.count]
+ return [false, result.some(x => x.name = result.statement.name)]
+})
+
+t('Recreate prepared statements on transformAssignedExpr error', async() => {
+ const insert = () => sql`insert into test (name) values (${ '1' }) returning name`
+ await sql`create table test (name text)`
+ await insert()
+ await sql`alter table test alter column name type int using name::integer`
+ return [
+ 1,
+ (await insert())[0].name,
+ await sql`drop table test`
+ ]
+})
+
+t('Throws correct error when retrying in transactions', async() => {
+ await sql`create table test(x int)`
+ const error = await sql.begin(sql => sql`insert into test (x) values (${ false })`).catch(e => e)
+ return [
+ error.code,
+ '42804',
+ sql`drop table test`
+ ]
+})
+
+t('Recreate prepared statements on RevalidateCachedQuery error', async() => {
+ const select = () => sql`select name from test`
+ await sql`create table test (name text)`
+ await sql`insert into test values ('1')`
+ await select()
+ await sql`alter table test alter column name type int using name::integer`
+ return [
+ 1,
+ (await select())[0].name,
+ await sql`drop table test`
+ ]
})
+
t('Catches connection config errors', async() => {
const sql = postgres({ ...options, user: { toString: () => { throw new Error('wat') } }, database: 'prut' })
@@ -1332,22 +1520,24 @@ t('Catches query format errors', async() => [
])
t('Multiple hosts', {
- timeout: 10000
+ timeout: 10
}, async() => {
- const sql = postgres('postgres://localhost:5432,localhost:5433', { idle_timeout: options.idle_timeout })
+ const s1 = postgres({ idle_timeout })
+ , s2 = postgres({ idle_timeout, port: 5433 })
+ , sql = postgres('postgres://localhost:5432,localhost:5433', { idle_timeout, max: 1 })
, result = []
- const a = (await sql`show data_directory`)[0].data_directory
+ const x1 = await sql`select 1`
result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x)
- cp.execSync('pg_ctl stop -D "' + a + '"')
+ await s1`select pg_terminate_backend(${ x1.state.pid }::int)`
+ await delay(100)
- const b = (await sql`show data_directory`)[0].data_directory
+ const x2 = await sql`select 1`
result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x)
- cp.execSync('pg_ctl start -D "' + a + '" -w -l "' + a + '/postgresql.log"')
- cp.execSync('pg_ctl stop -D "' + b + '"')
+ await s2`select pg_terminate_backend(${ x2.state.pid }::int)`
+ await delay(100)
result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x)
- cp.execSync('pg_ctl start -o "-p 5433" -D "' + b + '" -w -l "' + b + '/postgresql.log"')
return ['5432,5433,5432', result.join(',')]
})
@@ -1375,7 +1565,7 @@ t('Raw method returns rows as arrays', async() => {
t('Raw method returns values unparsed as Buffer', async() => {
const [[x]] = await sql`select 1`.raw()
return [
- x instanceof Buffer,
+ x instanceof Uint8Array,
true
]
})
@@ -1385,7 +1575,7 @@ t('Copy read works', async() => {
await sql`create table test (x int)`
await sql`insert into test select * from generate_series(1,10)`
- const readable = sql`copy test to stdout`.readable()
+ const readable = await sql`copy test to stdout`.readable()
readable.on('data', x => result.push(x))
await new Promise(r => readable.on('end', r))
@@ -1396,9 +1586,9 @@ t('Copy read works', async() => {
]
})
-t('Copy write works', async() => {
+t('Copy write works', { timeout: 2 }, async() => {
await sql`create table test (x int)`
- const writable = sql`copy test from stdin`.writable()
+ const writable = await sql`copy test from stdin`.writable()
writable.write('1\n')
writable.write('1\n')
@@ -1416,7 +1606,7 @@ t('Copy write works', async() => {
t('Copy write as first works', async() => {
await sql`create table test (x int)`
const first = postgres(options)
- const writable = first`COPY test FROM STDIN WITH(FORMAT csv, HEADER false, DELIMITER ',')`.writable()
+ const writable = await first`COPY test FROM STDIN WITH(FORMAT csv, HEADER false, DELIMITER ',')`.writable()
writable.write('1\n')
writable.write('1\n')
writable.end()
@@ -1430,12 +1620,11 @@ t('Copy write as first works', async() => {
]
})
-
t('Copy from file works', async() => {
await sql`create table test (x int, y int, z int)`
- await new Promise(r => fs
- .createReadStream(path.join(__dirname, 'copy.csv'))
- .pipe(sql`copy test from stdin`.writable())
+ await new Promise(async r => fs
+ .createReadStream(rel('copy.csv'))
+ .pipe(await sql`copy test from stdin`.writable())
.on('finish', r)
)
@@ -1449,7 +1638,8 @@ t('Copy from file works', async() => {
t('Copy from works in transaction', async() => {
await sql`create table test(x int)`
const xs = await sql.begin(async sql => {
- sql`copy test from stdin`.writable().end('1\n2')
+ (await sql`copy test from stdin`.writable()).end('1\n2')
+ await delay(20)
return sql`select 1 from test`
})
@@ -1462,54 +1652,29 @@ t('Copy from works in transaction', async() => {
t('Copy from abort works', async() => {
const sql = postgres(options)
- const readable = fs.createReadStream(path.join(__dirname, 'copy.csv'))
+ const readable = fs.createReadStream(rel('copy.csv'))
await sql`create table test (x int, y int, z int)`
await sql`TRUNCATE TABLE test`
- const writable = sql`COPY test FROM STDIN`.writable()
+ const writable = await sql`COPY test FROM STDIN`.writable()
let aborted
readable
.pipe(writable)
- .on('error', () => aborted = true)
+ .on('error', (err) => aborted = err)
writable.destroy(new Error('abort'))
await sql.end()
return [
- aborted,
- true,
+ 'abort',
+ aborted.message,
await postgres(options)`drop table test`
]
})
-t('Recreate prepared statements on transformAssignedExpr error', async() => {
- const insert = () => sql`insert into test (name) values (${ '1' }) returning name`
- await sql`create table test (name text)`
- await insert()
- await sql`alter table test alter column name type int using name::integer`
- return [
- 1,
- (await insert())[0].name,
- await sql`drop table test`
- ]
-})
-
-t('Recreate prepared statements on RevalidateCachedQuery error', async() => {
- const select = () => sql`select name from test`
- await sql`create table test (name text)`
- await sql`insert into test values ('1')`
- await select()
- await sql`alter table test alter column name type int using name::integer`
- return [
- 1,
- (await select())[0].name,
- await sql`drop table test`
- ]
-})
-
t('multiple queries before connect', async() => {
const sql = postgres({ ...options, max: 2 })
const xs = await Promise.all([
@@ -1525,10 +1690,11 @@ t('multiple queries before connect', async() => {
]
})
-t('subscribe', { timeout: 1000 }, async() => {
+t('subscribe', { timeout: 2 }, async() => {
const sql = postgres({
database: 'postgres_js_test',
- publications: 'alltables'
+ publications: 'alltables',
+ fetch_types: false
})
await sql.unsafe('create publication alltables for all tables')
@@ -1557,3 +1723,214 @@ t('subscribe', { timeout: 1000 }, async() => {
await sql.end()
]
})
+
+t('Execute works', async() => {
+ const result = await new Promise((resolve) => {
+ const sql = postgres({ ...options, fetch_types: false, debug:(id, query) => resolve(query) })
+ sql`select 1`.execute()
+ })
+
+ return [result, 'select 1']
+})
+
+t('Cancel running query works', async() => {
+ const query = sql`select pg_sleep(2)`
+ setTimeout(() => query.cancel(), 50)
+ const error = await query.catch(x => x)
+ return ['57014', error.code]
+})
+
+t('Cancel piped query works', async() => {
+ await sql`select 1`
+ const last = sql`select pg_sleep(0.2)`.execute()
+ const query = sql`select pg_sleep(2) as dig`
+ setTimeout(() => query.cancel(), 100)
+ const error = await query.catch(x => x)
+ await last
+ return ['57014', error.code]
+})
+
+t('Cancel queued query works', async() => {
+ const tx = sql.begin(sql => sql`select pg_sleep(0.2) as hej, 'hejsa'`)
+ const query = sql`select pg_sleep(2) as nej`
+ setTimeout(() => query.cancel(), 50)
+ const error = await query.catch(x => x)
+ await tx
+ return ['57014', error.code]
+})
+
+t('Fragments', async() => [
+ 1,
+ (await sql`
+ ${ sql`select` } 1 as x
+ `)[0].x
+])
+
+t('Result becomes array', async() => [
+ true,
+ (await sql`select 1`).slice() instanceof Array
+])
+
+t('Describe', async() => {
+ const type = (await sql`select ${ 1 }::int as x`.describe()).types[0]
+ return [23, type]
+})
+
+t('Describe a statement', async() => {
+ await sql`create table tester (name text, age int)`
+ const r = await sql`select name, age from tester where name like $1 and age > $2`.describe()
+ return [
+ '25,23/name:25,age:23',
+ `${ r.types.join(',') }/${ r.columns.map(c => `${c.name}:${c.type}`).join(',') }`,
+ await sql`drop table tester`
+ ]
+})
+
+t('Describe a statement without parameters', async() => {
+ await sql`create table tester (name text, age int)`
+ const r = await sql`select name, age from tester`.describe()
+ return [
+ '0,2',
+ `${ r.types.length },${ r.columns.length }`,
+ await sql`drop table tester`
+ ]
+})
+
+t('Describe a statement without columns', async() => {
+ await sql`create table tester (name text, age int)`
+ const r = await sql`insert into tester (name, age) values ($1, $2)`.describe()
+ return [
+ '2,0',
+ `${ r.types.length },${ r.columns.length }`,
+ await sql`drop table tester`
+ ]
+})
+
+t('Large object', async() => {
+ const file = rel('index.js')
+ , md5 = crypto.createHash('md5').update(fs.readFileSync(file)).digest('hex')
+
+ const lo = await sql.largeObject()
+ await new Promise(async r => fs.createReadStream(file).pipe(await lo.writable()).on('finish', r))
+ await lo.seek(0)
+
+ const out = crypto.createHash('md5')
+ await new Promise(r => lo.readable().then(x => x.on('data', x => out.update(x)).on('end', r)))
+
+ return [
+ md5,
+ out.digest('hex'),
+ await lo.close()
+ ]
+})
+
+t('Catches type serialize errors', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: x => x,
+ serialize: () => { throw new Error('watSerialize') }
+ }
+ }
+ })
+
+ return [
+ 'watSerialize',
+ (await sql`select ${ 'wat' }`.catch(e => e.message))
+ ]
+})
+
+t('Catches type parse errors', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: () => { throw new Error('watParse') },
+ serialize: x => x
+ }
+ }
+ })
+
+ return [
+ 'watParse',
+ (await sql`select 'wat'`.catch(e => e.message))
+ ]
+})
+
+t('Catches type serialize errors in transactions', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: x => x,
+ serialize: () => { throw new Error('watSerialize') }
+ }
+ }
+ })
+
+ return [
+ 'watSerialize',
+ (await sql.begin(sql => (
+ sql`select 1`,
+ sql`select ${ 'wat' }`
+ )).catch(e => e.message))
+ ]
+})
+
+t('Catches type parse errors in transactions', async() => {
+ const sql = postgres({
+ idle_timeout,
+ types: {
+ text: {
+ from: 25,
+ to: 25,
+ parse: () => { throw new Error('watParse') },
+ serialize: x => x
+ }
+ }
+ })
+
+ return [
+ 'watParse',
+ (await sql.begin(sql => (
+ sql`select 1`,
+ sql`select 'wat'`
+ )).catch(e => e.message))
+ ]
+})
+
+t('Prevent premature end of connection in transaction', async() => {
+ const sql = postgres({ max_lifetime: 0.1, idle_timeout })
+ const result = await sql.begin(async sql => {
+ await sql`select 1`
+ await delay(200)
+ await sql`select 1`
+ return 'yay'
+ })
+
+
+ return [
+ 'yay',
+ result
+ ]
+})
+
+t('Ensure reconnect after max_lifetime with transactions', { timeout: 5000 }, async() => {
+ const sql = postgres({
+ max_lifetime: 0.01,
+ idle_timeout,
+ max: 1
+ })
+
+ let x = 0
+ while (x++ < 10) await sql.begin(sql => sql`select 1 as x`)
+
+ return [true, true]
+})
diff --git a/tests/test.js b/tests/test.js
index 05583e61..09da8abc 100644
--- a/tests/test.js
+++ b/tests/test.js
@@ -1,22 +1,24 @@
/* eslint no-console: 0 */
-const util = require('util')
+import util from 'util'
let done = 0
let only = false
let ignored = 0
+let failed = false
let promise = Promise.resolve()
const tests = {}
+ , ignore = {}
-module.exports.not = () => ignored++
-module.exports.ot = (...rest) => (only = true, test(true, ...rest))
-
-const t = module.exports.t = (...rest) => test(false, ...rest)
-t.timeout = 500
+export const nt = () => ignored++
+export const ot = (...rest) => (only = true, test(true, ...rest))
+export const t = (...rest) => test(false, ...rest)
+t.timeout = 0.5
async function test(o, name, options, fn) {
typeof options !== 'object' && (fn = options, options = {})
- const line = new Error().stack.split('\n')[3].split(':')[1]
+ const line = new Error().stack.split('\n')[3].match(':([0-9]+):')[1]
+
await 1
if (only && !o)
@@ -25,22 +27,31 @@ async function test(o, name, options, fn) {
tests[line] = { fn, line, name }
promise = promise.then(() => Promise.race([
new Promise((resolve, reject) =>
- fn.timer = setTimeout(() => reject('Timed out'), options.timeout || t.timeout).unref()
+ fn.timer = setTimeout(() => reject('Timed out'), (options.timeout || t.timeout) * 1000)
),
- fn()
+ failed
+ ? (ignored++, ignore)
+ : fn()
]))
- .then((x) => {
+ .then(async x => {
+ clearTimeout(fn.timer)
+ if (x === ignore)
+ return
+
if (!Array.isArray(x))
throw new Error('Test should return result array')
- const [expected, got] = x
- if (expected !== got)
- throw new Error(expected + ' != ' + util.inspect(got))
+ const [expected, got] = await Promise.all(x)
+ if (expected !== got) {
+ failed = true
+ throw new Error(util.inspect(expected) + ' != ' + util.inspect(got))
+ }
+
tests[line].succeeded = true
process.stdout.write('✅')
})
.catch(err => {
- tests[line].failed = true
+ tests[line].failed = failed = true
tests[line].error = err instanceof Error ? err : new Error(util.inspect(err))
})
.then(() => {
@@ -48,24 +59,20 @@ async function test(o, name, options, fn) {
})
}
-process.on('exit', exit)
-
-process.on('SIGINT', exit)
-
function exit() {
- process.removeAllListeners('exit')
console.log('')
let success = true
- Object.values(tests).forEach((x) => {
- if (!x.succeeded) {
- success = false
- x.cleanup
- ? console.error('⛔️', x.name + ' at line', x.line, 'cleanup failed', '\n', util.inspect(x.cleanup))
- : console.error('⛔️', x.name + ' at line', x.line, x.failed
- ? 'failed'
- : 'never finished', '\n', util.inspect(x.error)
- )
- }
+ Object.values(tests).every((x) => {
+ if (x.succeeded)
+ return true
+
+ success = false
+ x.cleanup
+ ? console.error('⛔️', x.name + ' at line', x.line, 'cleanup failed', '\n', util.inspect(x.cleanup))
+ : console.error('⛔️', x.name + ' at line', x.line, x.failed
+ ? 'failed'
+ : 'never finished', x.error ? '\n' + util.inspect(x.error) : ''
+ )
})
only
@@ -78,3 +85,4 @@ function exit() {
!process.exitCode && (!success || only || ignored) && (process.exitCode = 1)
}
+
diff --git a/transpile.cjs b/transpile.cjs
new file mode 100644
index 00000000..3cf80805
--- /dev/null
+++ b/transpile.cjs
@@ -0,0 +1,43 @@
+const fs = require('fs')
+ , path = require('path')
+
+const empty = x => fs.readdirSync(x).forEach(f => fs.unlinkSync(path.join(x, f)))
+ , ensureEmpty = x => !fs.existsSync(x) ? fs.mkdirSync(x) : empty(x)
+ , root = 'cjs'
+ , src = path.join(root, 'src')
+ , tests = path.join(root, 'tests')
+
+!fs.existsSync(root) && fs.mkdirSync(root)
+ensureEmpty(src)
+ensureEmpty(tests)
+
+fs.readdirSync('src').forEach(name =>
+ fs.writeFileSync(
+ path.join(src, name),
+ transpile(fs.readFileSync(path.join('src', name), 'utf8'))
+ )
+)
+
+fs.readdirSync('tests').forEach(name =>
+ fs.writeFileSync(
+ path.join(tests, name),
+ name.endsWith('.js')
+ ? transpile(fs.readFileSync(path.join('tests', name), 'utf8'))
+ : fs.readFileSync(path.join('tests', name), 'utf8')
+ )
+)
+
+fs.writeFileSync(path.join(root, 'package.json'), JSON.stringify({ type: 'commonjs' }))
+
+function transpile(x) {
+ return x.replace(/export default function ([^(]+)/, 'module.exports = $1;function $1')
+ .replace(/export class ([a-z0-9_$]+)/gi, 'const $1 = module.exports.$1 = class $1')
+ .replace(/export default /, 'module.exports = ')
+ .replace(/export {/g, 'module.exports = {')
+ .replace(/export const ([a-z0-9_$]+)/gi, 'const $1 = module.exports.$1')
+ .replace(/export function ([a-z0-9_$]+)/gi, 'module.exports.$1 = $1;function $1')
+ .replace(/import {([^{}]*?)} from (['"].*?['"])/gi, 'const {$1} = require($2)')
+ .replace(/import (.*?) from (['"].*?['"])/gi, 'const $1 = require($2)')
+ .replace(/import (['"].*?['"])/gi, 'require($1)')
+ .replace('new URL(x, import.meta.url)', 'require("path").join(__dirname, x)')
+}
diff --git a/transpile.deno.js b/transpile.deno.js
new file mode 100644
index 00000000..364c19d4
--- /dev/null
+++ b/transpile.deno.js
@@ -0,0 +1,78 @@
+import fs from 'fs'
+import path from 'path'
+
+const empty = x => fs.readdirSync(x).forEach(f => fs.unlinkSync(path.join(x, f)))
+ , ensureEmpty = x => !fs.existsSync(x) ? fs.mkdirSync(x) : empty(x)
+ , root = 'deno'
+ , src = path.join(root, 'src')
+ , tests = path.join(root, 'tests')
+
+ensureEmpty(src)
+ensureEmpty(tests)
+
+fs.readdirSync('src').forEach(name =>
+ fs.writeFileSync(
+ path.join(src, name),
+ transpile(fs.readFileSync(path.join('src', name), 'utf8'), name, 'src')
+ )
+)
+
+fs.readdirSync('tests').forEach(name =>
+ fs.writeFileSync(
+ path.join(tests, name),
+ name.endsWith('.js')
+ ? transpile(fs.readFileSync(path.join('tests', name), 'utf8'), name, 'tests')
+ : fs.readFileSync(path.join('tests', name), 'utf8')
+ )
+)
+
+fs.writeFileSync(path.join(root, 'package.json'), JSON.stringify({ type: 'commonjs' }))
+
+function transpile(x, name, folder) {
+ if (folder === 'tests') {
+ if (name === 'bootstrap.js') {
+ x = x.replace('export function exec(', 'function ignore(')
+ .replace('async function execAsync(', 'export async function exec(')
+ .replace(/\nexec\(/g, '\nawait exec(')
+ .replace('{ spawnSync }', '{ spawn }')
+ }
+
+ if (name === 'index.js') {
+ // Ignore tests that use node create stream functions not supported in deno yet
+ x = x.replace(/(t\('Copy from file works)/, 'n$1')
+ .replace(/(t\('Copy from abort works)/, 'n$1')
+ .replace(/(t\('Large object)/, 'n$1')
+ }
+ }
+
+ const buffer = x.includes('Buffer')
+ ? 'import { Buffer } from \'https://deno.land/std@0.120.0/node/buffer.ts\'\n'
+ : ''
+
+ const process = x.includes('process.')
+ ? 'import process from \'https://deno.land/std@0.120.0/node/process.ts\'\n'
+ : ''
+
+ const timers = x.includes('setImmediate')
+ ? 'import { setImmediate, clearImmediate } from \'../polyfills.js\'\n'
+ : ''
+
+ const hmac = x.includes('createHmac')
+ ? 'import { HmacSha256 } from \'https://deno.land/std@0.120.0/hash/sha256.ts\'\n'
+ : ''
+
+ return hmac + buffer + process + timers + x
+ .replace(/setTimeout\((.*)\)\.unref\(\)/g, '(window.timer = setTimeout($1), Deno.unrefTimer(window.timer), window.timer)')
+ .replace(
+ 'crypto.createHmac(\'sha256\', key).update(x).digest()',
+ 'Buffer.from(new HmacSha256(key).update(x).digest())'
+ )
+ .replace(
+ 'query.writable.push({ chunk, callback })',
+ '(query.writable.push({ chunk }), callback())'
+ )
+ .replace(/.setKeepAlive\([^)]+\)/g, '')
+ .replace(/import net from 'net'/, 'import { net } from \'../polyfills.js\'')
+ .replace(/import tls from 'tls'/, 'import { tls } from \'../polyfills.js\'')
+ .replace(/ from '([a-z_]+)'/g, ' from \'https://deno.land/std@0.120.0/node/$1.ts\'')
+}
diff --git a/types/index.d.ts b/types/index.d.ts
index 4f2c2a6d..92ee9e2e 100644
--- a/types/index.d.ts
+++ b/types/index.d.ts
@@ -16,124 +16,223 @@ declare function postgres(url: string, options?:
* Connection options of Postgres.
*/
interface BaseOptions {
- /** Postgres ip address or domain name */
+ /** Postgres ip address[s] or domain name[s] */
host: string | string[];
- /** Postgres server port */
+ /** Postgres server[s] port[s] */
port: number | number[];
- /** Name of database to connect to */
+ /** unix socket path (usually '/tmp') */
+ path: string | undefined;
+ /**
+ * Name of database to connect to
+ * @default process.env['PGDATABASE'] || options.user
+ */
database: string;
- /** Username of database user */
+ /**
+ * Username of database user
+ * @default process.env['PGUSERNAME'] || process.env['PGUSER'] || require('os').userInfo().username
+ */
user: string;
- /** True; or options for tls.connect */
- ssl: 'require' | 'prefer' | boolean | object;
- /** Max number of connections */
+ /**
+ * true, prefer, require or tls.connect options
+ * @default false
+ */
+ ssl: 'require' | 'allow' | 'prefer' | boolean | object;
+ /**
+ * Max number of connections
+ * @default 10
+ */
max: number;
- /** Idle connection timeout in seconds */
+ /**
+ * Idle connection timeout in seconds
+ * @default process.env['PGIDLE_TIMEOUT']
+ */
idle_timeout: number | undefined;
- /** Connect timeout in seconds */
+ /**
+ * Connect timeout in seconds
+ * @default process.env['PGCONNECT_TIMEOUT']
+ */
connect_timeout: number;
/** Array of custom types; see more below */
types: PostgresTypeList;
- /**
- * Disable prepared mode
- * @deprecated use "prepare" option instead
- */
- no_prepare: boolean;
/**
* Enables prepare mode.
* @default true
*/
prepare: boolean;
- /** Defaults to console.log */
+ /**
+ * Called when a notice is received
+ * @default console.log
+ */
onnotice: (notice: postgres.Notice) => void;
- /** (key; value) when server param change */
+ /** (key; value) when a server param change */
onparameter: (key: string, value: any) => void;
/** Is called with (connection; query; parameters) */
debug: boolean | ((connection: number, query: string, parameters: any[]) => void);
/** Transform hooks */
transform: {
- /** Transforms incoming column names */
- column?: (column: string) => string;
- /** Transforms incoming row values */
- value?: (value: any) => any;
+ /** Transforms incoming and outgoing column names */
+ column?: ((column: string) => string) | {
+ /** SQL to JS */
+ from?: (column: string) => string;
+ /** JS to SQL */
+ to?: (column: string) => string;
+ };
+ /** Transforms incoming and outgoing row values */
+ value?: ((value: any) => any) | {
+ /** SQL to JS */
+ from?: (value: unknown) => any;
+ // /** JS to SQL */
+ // to?: (value: unknown) => any; // unused
+ };
/** Transforms entire rows */
- row?: (row: postgres.Row) => any;
+ row?: ((row: postgres.Row) => any) | {
+ /** SQL to JS */
+ from?: (row: postgres.Row) => any;
+ // /** JS to SQL */
+ // to?: (row: postgres.Row) => any; // unused
+ };
};
/** Connection parameters */
connection: Partial;
+ /**
+ * Use 'read-write' with multiple hosts to ensure only connecting to primary
+ * @default process.env['PGTARGETSESSIONATTRS']
+ */
+ target_session_attrs: undefined | 'read-write' | 'read-only' | 'primary' | 'standby' | 'prefer-standby';
+ /**
+ * Automatically fetches types on connect
+ * @default true
+ */
+ fetch_types: boolean;
+ /**
+ * Publications to subscribe to (only relevant when calling `sql.subscribe()`)
+ * @default 'alltables'
+ */
+ publications: string
}
type PostgresTypeList = {
- [name in keyof T]: T[name] extends (...args: any) => unknown
+ [name in keyof T]: T[name] extends (...args: any) => postgres.SerializableParameter
? postgres.PostgresType
- : postgres.PostgresType;
+ : postgres.PostgresType<(...args: any) => postgres.SerializableParameter>;
};
interface JSToPostgresTypeMap {
[name: string]: unknown;
}
-declare class PostgresError extends Error {
- name: 'PostgresError';
- severity_local: string;
- severity: string;
- code: string;
- position: string;
- file: string;
- line: string;
- routine: string;
-
- detail?: string;
- hint?: string;
- internal_position?: string;
- internal_query?: string;
- where?: string;
- schema_name?: string;
- table_name?: string;
- column_name?: string;
- data?: string;
- type_name?: string;
- constraint_name?: string;
-
- // Disable user-side creation of PostgresError
- private constructor();
+declare const PRIVATE: unique symbol;
+
+declare class NotAPromise {
+ private [PRIVATE]: never; // prevent user-side interface implementation
+
+ /**
+ * @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;```
+ * @throws NOT_TAGGED_CALL
+ */
+ private then(): never;
+ /**
+ * @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;```
+ * @throws NOT_TAGGED_CALL
+ */
+ private catch(): never;
+ /**
+ * @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;```
+ * @throws NOT_TAGGED_CALL
+ */
+ private finally(): never;
}
type UnwrapPromiseArray = T extends any[] ? {
[k in keyof T]: T[k] extends Promise ? R : T[k]
} : T;
-type PostgresErrorType = typeof PostgresError
-
declare namespace postgres {
- export const PostgresError: PostgresErrorType;
+ class PostgresError extends Error {
+ name: 'PostgresError';
+ severity_local: string;
+ severity: string;
+ code: string;
+ position: string;
+ file: string;
+ line: string;
+ routine: string;
+
+ detail?: string;
+ hint?: string;
+ internal_position?: string;
+ internal_query?: string;
+ where?: string;
+ schema_name?: string;
+ table_name?: string;
+ column_name?: string;
+ data?: string;
+ type_name?: string;
+ constraint_name?: string;
+
+ /** Only set when debug is enabled */
+ query: string;
+ /** Only set when debug is enabled */
+ parameters: any[];
+
+ // Disable user-side creation of PostgresError
+ private constructor();
+ }
/**
- * Convert a string to Pascal case.
- * @param str THe string to convert
- * @returns The new string in Pascal case
+ * Convert a snake_case string to PascalCase.
+ * @param str The string from snake_case to convert
+ * @returns The new string in PascalCase
*/
function toPascal(str: string): string;
/**
- * Convert a string to Camel case.
- * @param str THe string to convert
- * @returns The new string in Camel case
+ * Convert a PascalCase string to snake_case.
+ * @param str The string from snake_case to convert
+ * @returns The new string in snake_case
+ */
+ function fromPascal(str: string): string;
+ /**
+ * Convert a snake_case string to camelCase.
+ * @param str The string from snake_case to convert
+ * @returns The new string in camelCase
*/
function toCamel(str: string): string;
/**
- * Convert a string to Kebab case.
- * @param str THe string to convert
- * @returns The new string in Kebab case
+ * Convert a camelCase string to snake_case.
+ * @param str The string from snake_case to convert
+ * @returns The new string in snake_case
+ */
+ function fromCamel(str: string): string;
+ /**
+ * Convert a snake_case string to kebab-case.
+ * @param str The string from snake_case to convert
+ * @returns The new string in kebab-case
*/
function toKebab(str: string): string;
+ /**
+ * Convert a kebab-case string to snake_case.
+ * @param str The string from snake_case to convert
+ * @returns The new string in snake_case
+ */
+ function fromKebab(str: string): string;
const BigInt: PostgresType<(number: bigint) => string>;
+ interface PostgresType unknown> {
+ to: number;
+ from: number[];
+ serialize: T;
+ parse: (raw: string) => unknown;
+ }
+
interface ConnectionParameters {
- /** Default application_name */
+ /**
+ * Default application_name
+ * @default 'postgres.js'
+ */
application_name: string;
/** Other connection parameters */
- [name: string]: any;
+ [name: string]: string;
}
interface Options extends Partial> {
@@ -141,18 +240,31 @@ declare namespace postgres {
host?: string;
/** @inheritdoc */
port?: number;
- /** unix socket path (usually '/tmp') */
- path?: string | (() => string);
+ /** @inheritdoc */
+ path?: string;
/** Password of database user (an alias for `password`) */
pass?: Options['password'];
- /** Password of database user */
+ /**
+ * Password of database user
+ * @default process.env['PGPASSWORD']
+ */
password?: string | (() => string | Promise);
/** Name of database to connect to (an alias for `database`) */
db?: Options['database'];
- /** Username of database user (an alias for `username`) */
+ /** Username of database user (an alias for `user`) */
username?: Options['user'];
/** Postgres ip address or domain name (an alias for `host`) */
hostname?: Options['host'];
+ /**
+ * Disable prepared mode
+ * @deprecated use "prepare" option instead
+ */
+ no_prepare?: boolean;
+ /**
+ * Idle connection timeout in seconds
+ * @deprecated use "idle_timeout" option instead
+ */
+ timeout?: Options['idle_timeout'];
}
interface ParsedOptions extends BaseOptions {
@@ -162,22 +274,35 @@ declare namespace postgres {
port: number[];
/** @inheritdoc */
pass: null;
- serializers: { [oid: number]: T[keyof T] };
- parsers: { [oid: number]: T[keyof T] };
+ /** @inheritdoc */
+ transform: Transform;
+ serializers: Record SerializableParameter>;
+ parsers: Record unknown>;
}
- interface Notice {
- [field: string]: string;
+ interface Transform {
+ /** Transforms incoming column names */
+ column: {
+ from: ((column: string) => string) | undefined;
+ to: ((column: string) => string) | undefined;
+ };
+ /** Transforms incoming row values */
+ value: {
+ from: ((value: any) => any) | undefined;
+ to: undefined; // (value: any) => any
+ };
+ /** Transforms entire rows */
+ row: {
+ from: ((row: postgres.Row) => any) | undefined;
+ to: undefined; // (row: postgres.Row) => any
+ };
}
- interface PostgresType any = (...args: any) => any> {
- to: number;
- from: number[];
- serialize: T;
- parse: (raw: ReturnType) => unknown;
+ interface Notice {
+ [field: string]: string;
}
- interface Parameter {
+ interface Parameter extends NotAPromise {
/**
* PostgreSQL OID of the type
*/
@@ -197,7 +322,7 @@ declare namespace postgres {
}
interface ConnectionError extends globalThis.Error {
- code: never
+ code:
| 'CONNECTION_DESTROYED'
| 'CONNECT_TIMEOUT'
| 'CONNECTION_CLOSED'
@@ -209,17 +334,12 @@ declare namespace postgres {
interface NotSupportedError extends globalThis.Error {
code: 'MESSAGE_NOT_SUPPORTED';
- name: never
- | 'CopyInResponse'
- | 'CopyOutResponse'
- | 'ParameterDescription'
- | 'FunctionCallResponse'
- | 'NegotiateProtocolVersion'
- | 'CopyBothResponse';
+ name: string;
}
interface GenericError extends globalThis.Error {
- code: never
+ code:
+ | '57014' // canceling statement due to user request
| 'NOT_TAGGED_CALL'
| 'UNDEFINED_VALUE'
| 'MAX_PARAMETERS_EXCEEDED'
@@ -229,17 +349,7 @@ declare namespace postgres {
interface AuthNotImplementedError extends globalThis.Error {
code: 'AUTH_TYPE_NOT_IMPLEMENTED';
- type: number
- | 'KerberosV5'
- | 'CleartextPassword'
- | 'MD5Password'
- | 'SCMCredential'
- | 'GSS'
- | 'GSSContinue'
- | 'SSPI'
- | 'SASL'
- | 'SASLContinue'
- | 'SASLFinal';
+ type: number | string;
message: string;
}
@@ -250,6 +360,50 @@ declare namespace postgres {
| GenericError
| AuthNotImplementedError;
+ interface ColumnInfo {
+ key: number;
+ name: string;
+ type: number;
+ parser?(raw: string): unknown;
+ atttypmod: number;
+ }
+
+ interface RelationInfo {
+ schema: string;
+ table: string;
+ columns: ColumnInfo[];
+ keys: ColumnInfo[];
+ }
+
+ type ReplicationEvent =
+ | { command: 'insert', relation: RelationInfo }
+ | { command: 'delete', relation: RelationInfo, key: boolean }
+ | { command: 'update', relation: RelationInfo, key: boolean, old: Row | null };
+
+ interface SubscriptionHandle {
+ unsubscribe(): void;
+ }
+
+ interface LargeObject {
+ writable(options?: {
+ highWaterMark?: number,
+ start?: number
+ }): Promise;
+ readable(options?: {
+ highWaterMark?: number,
+ start?: number,
+ end?: number
+ }): Promise;
+
+ close(): Promise;
+ tell(): Promise;
+ read(size: number): Promise;
+ write(buffer: Uint8Array): Promise<[{ data: Uint8Array }]>;
+ truncate(size: number): Promise;
+ seek(offset: number, whence?: number): Promise;
+ size(): Promise<[{ position: bigint, size: bigint }]>;
+ }
+
type Serializable = null
| boolean
| number
@@ -261,7 +415,8 @@ declare namespace postgres {
| Helper
| Parameter
| ArrayParameter
- | SerializableParameter[];
+ | Record // implicit JSON
+ | readonly SerializableParameter[];
type HelperSerializable = { [index: string]: SerializableParameter } | { [index: string]: SerializableParameter }[];
@@ -277,10 +432,6 @@ declare namespace postgres {
[column: string]: any;
}
- interface UnlabeledRow {
- '?column?': T;
- }
-
type MaybeRow = Row | undefined;
type TransformRow = T extends Serializable
@@ -292,20 +443,31 @@ declare namespace postgres {
interface Column {
name: T;
type: number;
- parser(raw: string): string;
+ parser?(raw: string): unknown;
}
type ColumnList = (T extends string ? Column : never)[];
interface State {
- state: 'I';
+ status: string;
pid: number;
secret: number;
}
+ interface Statement {
+ /** statement unique name */
+ name: string;
+ /** sql query */
+ string: string;
+ /** parameters types */
+ types: number[];
+ columns: ColumnList;
+ }
+
interface ResultMeta {
count: T; // For tuples
command: string;
+ statement: Statement;
state: State;
}
@@ -314,13 +476,37 @@ declare namespace postgres {
}
type ExecutionResult = [] & ResultQueryMeta>;
- type RowList = T & Iterable> & ResultQueryMeta;
+ type RawRowList = Buffer[][] & Iterable & ResultQueryMeta;
+ type RowList = T & Iterable> & ResultQueryMeta;
+
+ interface PendingQueryModifiers {
+ readable(): import('node:stream').Readable;
+ writable(): import('node:stream').Writable;
+
+ execute(): this;
+ cancel(): void;
+
+ /**
+ * @deprecated `.stream` has been renamed to `.forEach`
+ * @throws
+ */
+ stream(cb: (row: NonNullable, result: ExecutionResult) => void): never;
+ forEach(cb: (row: NonNullable, result: ExecutionResult) => void): Promise>;
+
+ cursor(rows?: number): AsyncIterable[]>;
+ cursor(cb: (row: [NonNullable]) => void): Promise>;
+ cursor(rows: number, cb: (rows: NonNullable[]) => void): Promise>;
+ }
- interface PendingQuery extends Promise> {
- stream(cb: (row: NonNullable, result: ExecutionResult) => void): Promise>;
- cursor(cb: (row: NonNullable) => void): Promise>;
- cursor(size: 1, cb: (row: NonNullable) => void): Promise>;
- cursor(size: number, cb: (rows: NonNullable[]) => void): Promise>;
+ interface PendingDescribeQuery extends Promise {
+ }
+
+ interface PendingRawQuery extends Promise>, PendingQueryModifiers {
+ }
+
+ interface PendingQuery extends Promise>, PendingQueryModifiers {
+ describe(): PendingDescribeQuery;
+ raw(): PendingRawQuery;
}
interface PendingRequest extends Promise<[] & ResultMeta> { }
@@ -330,7 +516,7 @@ declare namespace postgres {
unlisten(): Promise
}
- interface Helper {
+ interface Helper extends NotAPromise {
first: T;
rest: U;
}
@@ -343,7 +529,7 @@ declare namespace postgres {
* @param args Interpoled values of the template string
* @returns A promise resolving to the result of your query
*/
- (template: TemplateStringsArray, ...args: SerializableParameter[]): PendingQuery>;
+ (template: TemplateStringsArray, ...args: SerializableParameter[]): PendingQuery>;
/**
* Escape column names
@@ -361,18 +547,10 @@ declare namespace postgres {
*/
>(objOrArray: T, ...keys: U[]): Helper;
- END: {}; // FIXME unique symbol ?
+ CLOSE: {};
+ END: this['CLOSE'];
PostgresError: typeof PostgresError;
- array(value: T): ArrayParameter;
- begin(cb: (sql: TransactionSql) => T | Promise): Promise>;
- begin(options: string, cb: (sql: TransactionSql) => T | Promise): Promise>;
- end(options?: { timeout?: number }): Promise;
- file(path: string, options?: { cache?: boolean }): PendingQuery>;
- file(path: string, args: SerializableParameter[], options?: { cache?: boolean }): PendingQuery>;
- json(value: any): Parameter;
- listen(channel: string, cb: (value?: string) => void): ListenRequest;
- notify(channel: string, payload: string): PendingRequest;
options: ParsedOptions;
parameters: ConnectionParameters;
types: {
@@ -380,22 +558,38 @@ declare namespace postgres {
? (...args: Parameters) => postgres.Parameter>
: (...args: any) => postgres.Parameter;
};
- unsafe(query: string, parameters?: SerializableParameter[], queryOptions?: UnsafeQueryOptions): PendingQuery>;
+
+ unsafe)[]>(query: string, parameters?: SerializableParameter[], queryOptions?: UnsafeQueryOptions): PendingQuery>;
+ end(options?: { timeout?: number }): Promise;
+
+ listen(channel: string, cb: (value: string) => void): ListenRequest;
+ notify(channel: string, payload: string): PendingRequest;
+
+ subscribe(event: string, cb: (row: Row | null, info: ReplicationEvent) => void): Promise;
+
+ largeObject(oid?: number, /** @default 0x00020000 | 0x00040000 */ mode?: number): Promise;
+
+ begin(cb: (sql: TransactionSql) => T | Promise): Promise>;
+ begin(options: string, cb: (sql: TransactionSql) => T | Promise): Promise>;
+
+ array(value: T, type?: number): ArrayParameter;
+ file(path: string | Buffer | URL | number, options?: { cache?: boolean }): PendingQuery>;
+ file(path: string | Buffer | URL | number, args: SerializableParameter[], options?: { cache?: boolean }): PendingQuery>;
+ json(value: any): Parameter;
+ }
+
+ interface UnsafeQueryOptions {
+ /**
+ * When executes query as prepared statement.
+ * @default false
+ */
+ prepare?: boolean;
}
interface TransactionSql extends Sql {
savepoint(cb: (sql: TransactionSql) => T | Promise): Promise>;
savepoint(name: string, cb: (sql: TransactionSql) => T | Promise): Promise>;
}
-
-}
-
-interface UnsafeQueryOptions {
- /**
- * When executes query as prepared statement.
- * @default false
- */
- prepare?: boolean;
}
export = postgres;
diff --git a/types/package.json b/types/package.json
new file mode 100644
index 00000000..49a279aa
--- /dev/null
+++ b/types/package.json
@@ -0,0 +1,5 @@
+{
+ "devDependencies": {
+ "@types/node": "^16"
+ }
+}