From 5704fe14ffe7f3991187fa093aa45552e5b9ff1a Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Tue, 21 Sep 2021 11:17:23 +0200 Subject: [PATCH 01/51] Rewrite --- .eslintrc.json | 3 +- README.md | 32 +- cjs/package.json | 1 + {lib => cjs/src}/bytes.js | 10 +- cjs/src/connection.js | 980 +++++++++++++++++++ cjs/src/errors.js | 53 + cjs/src/index.js | 540 ++++++++++ cjs/src/query.js | 141 +++ {lib => cjs/src}/queue.js | 13 +- cjs/src/result.js | 16 + {lib => cjs/src}/subscribe.js | 29 +- cjs/src/types.js | 298 ++++++ cjs/tests/bootstrap.js | 29 + cjs/tests/copy.csv | 2 + cjs/tests/index.js | 1737 ++++++++++++++++++++++++++++++++ cjs/tests/select-param.sql | 1 + cjs/tests/select.sql | 1 + cjs/tests/test.js | 88 ++ deno/package.json | 1 + deno/polyfills.js | 162 +++ deno/src/bytes.js | 79 ++ deno/src/connection.js | 983 +++++++++++++++++++ {lib => deno/src}/errors.js | 16 +- deno/src/index.js | 541 ++++++++++ deno/src/query.js | 141 +++ deno/src/queue.js | 31 + deno/src/result.js | 16 + deno/src/subscribe.js | 210 ++++ deno/src/types.js | 299 ++++++ deno/tests/bootstrap.js | 29 + deno/tests/copy.csv | 2 + deno/tests/index.js | 1738 +++++++++++++++++++++++++++++++++ deno/tests/select-param.sql | 1 + deno/tests/select.sql | 1 + deno/tests/test.js | 89 ++ lib/backend.js | 255 ----- lib/connection.js | 472 --------- lib/frontend.js | 249 ----- lib/index.js | 711 -------------- lib/types.js | 204 ---- package.json | 25 +- src/bytes.js | 78 ++ src/connection.js | 980 +++++++++++++++++++ src/errors.js | 53 + src/index.js | 540 ++++++++++ src/query.js | 141 +++ src/queue.js | 31 + src/result.js | 16 + src/subscribe.js | 209 ++++ src/types.js | 298 ++++++ tests/bootstrap.js | 44 +- tests/index.js | 522 ++++++---- tests/test.js | 62 +- transpile.cjs | 43 + transpile.deno.js | 78 ++ 55 files changed, 11165 insertions(+), 2159 deletions(-) create mode 100644 cjs/package.json rename {lib => cjs/src}/bytes.js (86%) create mode 100644 cjs/src/connection.js create mode 100644 cjs/src/errors.js create mode 100644 cjs/src/index.js create mode 100644 cjs/src/query.js rename {lib => cjs/src}/queue.js (57%) create mode 100644 cjs/src/result.js rename {lib => cjs/src}/subscribe.js (88%) create mode 100644 cjs/src/types.js create mode 100644 cjs/tests/bootstrap.js create mode 100644 cjs/tests/copy.csv create mode 100644 cjs/tests/index.js create mode 100644 cjs/tests/select-param.sql create mode 100644 cjs/tests/select.sql create mode 100644 cjs/tests/test.js create mode 100644 deno/package.json create mode 100644 deno/polyfills.js create mode 100644 deno/src/bytes.js create mode 100644 deno/src/connection.js rename {lib => deno/src}/errors.js (73%) create mode 100644 deno/src/index.js create mode 100644 deno/src/query.js create mode 100644 deno/src/queue.js create mode 100644 deno/src/result.js create mode 100644 deno/src/subscribe.js create mode 100644 deno/src/types.js create mode 100644 deno/tests/bootstrap.js create mode 100644 deno/tests/copy.csv create mode 100644 deno/tests/index.js create mode 100644 deno/tests/select-param.sql create mode 100644 deno/tests/select.sql create mode 100644 deno/tests/test.js delete mode 100644 lib/backend.js delete mode 100644 lib/connection.js delete mode 100644 lib/frontend.js delete mode 100644 lib/index.js delete mode 100644 lib/types.js create mode 100644 src/bytes.js create mode 100644 src/connection.js create mode 100644 src/errors.js create mode 100644 src/index.js create mode 100644 src/query.js create mode 100644 src/queue.js create mode 100644 src/result.js create mode 100644 src/subscribe.js create mode 100644 src/types.js create mode 100644 transpile.cjs create mode 100644 transpile.deno.js diff --git a/.eslintrc.json b/.eslintrc.json index 9fc6ad36..f117263c 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -93,6 +93,7 @@ "Property": true, "VariableDeclarator": true, "ImportDeclaration": true, + "TernaryExpressions": true, "Comments": true } } @@ -221,7 +222,7 @@ ], "max-params": [ 2, - 4 + 5 ], "max-statements-per-line": 0, "new-cap": [ diff --git a/README.md b/README.md index ef6b6cc6..cb99ff19 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ Fastest full PostgreSQL nodejs client - [🚀 Fastest full featured PostgreSQL node client](https://github.com/porsager/postgres-benchmarks#results) -- 🚯 1250 LOC - 0 dependencies +- 🚯 1850 LOC - 0 dependencies - 🏷 ES6 Tagged Template Strings at the core - 🏄‍♀️ Simple surface API - 💬 Chat on [Gitter](https://gitter.im/porsager/postgres) @@ -71,7 +71,7 @@ const sql = postgres('postgres://username:password@host:port/database', { }, target_session_attrs : null, // Use 'read-write' with multiple hosts to // ensure only connecting to primary - fetch_array_types : true, // Disable automatically fetching array types + fetch_types : true, // Automatically fetches types on connect // on initial connection. }) ``` @@ -96,7 +96,7 @@ Connection uri strings with multiple hosts works like in [`psql multiple host ur Connecting to the specified hosts/ports will be tried in order, and on a successfull connection retries will be reset. This ensures that hosts can come up and down seamless to your application. -If you specify `target_session_attrs: 'read-write'` or `PGTARGETSESSIONATTRS=read-write` Postgres.js will only connect to a writeable host allowing for zero down time failovers. +If you specify `target_session_attrs: 'primary'` or `PGTARGETSESSIONATTRS=primary` Postgres.js will only connect to a the primary host allowing for zero down time failovers. ### Auto fetching of array types @@ -104,7 +104,7 @@ When Postgres.js first connects to the database it automatically fetches array t If you have revoked access to `pg_catalog` this feature will no longer work and will need to be disabled. -You can disable fetching array types by setting `fetch_array_types` to `false` when creating an instance. +You can disable fetching array types by setting `fetch_types` to `false` when creating an instance. ### Environment Variables for Options @@ -219,14 +219,14 @@ const [first, second] = await sql<[User?]>`SELECT * FROM users WHERE id = ${id}` All the public API is typed. Also, TypeScript support is still in beta. Feel free to open an issue if you have trouble with types. -## Stream ```sql` `.stream(fn) -> Promise``` +## forEach ```sql` `.forEach(fn) -> Promise``` -If you want to handle rows returned by a query one by one, you can use `.stream` which returns a promise that resolves once there are no more rows. +If you want to handle rows returned by a query one by one, you can use `.forEach` which returns a promise that resolves once there are no more rows. ```js await sql` select created_at, name from events -`.stream(row => { +`.forEach(row => { // row = { created_at: '2019-11-22T14:22:00Z', name: 'connected' } }) @@ -242,7 +242,7 @@ Use cursors if you need to throttle the amount of rows being returned from a que await sql` select * from generate_series(1,4) as x -`.cursor(async row => { +`.cursor(async ([row]) => { // row = { x: 1 } await http.request('https://example.com/wat', { row }) }) @@ -306,7 +306,7 @@ sql.notify('news', JSON.stringify({ no: 'this', is: 'news' })) ``` -## Tagged template function ``` sql`` ``` +## Tagged template function ``` sql`` ``` [Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) are not just ordinary template literal strings. They allow the function to handle any parameters within before interpolation. This means that they can be used to enforce a safe way of writing queries, which is what Postgres.js does. Any generic value will be serialized according to an inferred type, and replaced by a PostgreSQL protocol placeholders `$1, $2, ...` and then sent to the database as a parameter to let it handle any need for escaping / casting. This also means you cannot write dynamic queries or concat queries together by simple string manipulation. To enable dynamic queries in a safe way, the `sql` function doubles as a regular function which escapes any value properly. It also includes overloads for common cases of inserting, selecting, updating and querying. @@ -331,7 +331,7 @@ sql` ` // Is translated into this query: -insert into users (name, age) values ($1, $2) +insert into users ("name", "age") values ($1, $2) ``` @@ -356,6 +356,9 @@ sql` sql(users, 'name', 'age') } ` + +// Is translated into this query: +insert into users ("name", "age") values ($1, $2), ($3, $4) ``` #### Update @@ -376,7 +379,7 @@ sql` ` // Is translated into this query: -update users set name = $1 where id = $2 +update users set "name" = $1 where id = $2 ``` #### Select @@ -392,7 +395,7 @@ sql` ` // Is translated into this query: -select name, age from users +select "name", "age" from users ``` #### Dynamic table name @@ -402,11 +405,11 @@ select name, age from users const table = 'users' sql` - select id from ${sql(table)} + select id from ${ sql(table) } ` // Is translated into this query: -select id from users +select id from "users" ``` #### Arrays `sql.array(Array)` @@ -770,6 +773,7 @@ This error is thrown if the startup phase of the connection (tcp, protocol negot Postgres.js doesn't come with any migration solution since it's way out of scope, but here are some modules that supports Postgres.js for migrations: +- https://github.com/porsager/postgres-shift - https://github.com/lukeed/ley ## Thank you diff --git a/cjs/package.json b/cjs/package.json new file mode 100644 index 00000000..0292b995 --- /dev/null +++ b/cjs/package.json @@ -0,0 +1 @@ +{"type":"commonjs"} \ No newline at end of file diff --git a/lib/bytes.js b/cjs/src/bytes.js similarity index 86% rename from lib/bytes.js rename to cjs/src/bytes.js index c4ec3152..38fe13b7 100644 --- a/lib/bytes.js +++ b/cjs/src/bytes.js @@ -1,7 +1,7 @@ const size = 256 let buffer = Buffer.allocUnsafe(size) -const messages = ['B', 'C', 'Q', 'P', 'F', 'p', 'D', 'E', 'H', 'S', 'd', 'c', 'f'].reduce((acc, x) => { +const messages = 'BCcDdEFfHPpQSX'.split('').reduce((acc, x) => { const v = x.charCodeAt(0) acc[x] = () => { buffer[0] = v @@ -11,7 +11,8 @@ const messages = ['B', 'C', 'Q', 'P', 'F', 'p', 'D', 'E', 'H', 'S', 'd', 'c', 'f return acc }, {}) -const b = Object.assign(messages, { +const b = Object.assign(reset, messages, { + N: String.fromCharCode(0), i: 0, inc(x) { b.i += x @@ -70,3 +71,8 @@ function fit(x) { prev.copy(buffer) } } + +function reset() { + b.i = 0 + return b +} diff --git a/cjs/src/connection.js b/cjs/src/connection.js new file mode 100644 index 00000000..62be51ed --- /dev/null +++ b/cjs/src/connection.js @@ -0,0 +1,980 @@ +const net = require('net') +const tls = require('tls') +const crypto = require('crypto') +const Stream = require('stream') + +const { Identifier, Builder, handleValue, arrayParser, arraySerializer, CLOSE } = require('./types.js') +const { Errors } = require('./errors.js') +const Result = require('./result.js') +const Queue = require('./queue.js') +const Query = require('./query.js') +const b = require('./bytes.js') + +module.exports = Connection + +let uid = 1 + +const Sync = b().S().end() + , Flush = b().H().end() + , SSLRequest = b().i32(8).i32(80877103).end(8) + , ExecuteUnnamed = Buffer.concat([b().E().str(b.N).i32(0).end(), Sync]) + , DescribeUnnamed = b().D().str('S').str(b.N).end() + , noop = () => { /* noop */ } + +const retryRoutines = new Set([ + 'FetchPreparedStatement', + 'RevalidateCachedQuery', + 'transformAssignedExpr' +]) + +const errorFields = { + 83 : 'severity_local', // S + 86 : 'severity', // V + 67 : 'code', // C + 77 : 'message', // M + 68 : 'detail', // D + 72 : 'hint', // H + 80 : 'position', // P + 112 : 'internal_position', // p + 113 : 'internal_query', // q + 87 : 'where', // W + 115 : 'schema_name', // s + 116 : 'table_name', // t + 99 : 'column_name', // c + 100 : 'data type_name', // d + 110 : 'constraint_name', // n + 70 : 'file', // F + 76 : 'line', // L + 82 : 'routine' // R +} + +function Connection(options, { onopen = noop, onend = noop, ondrain = noop, onclose = noop } = {}) { + const { + ssl, + user, + host, + port, + database, + parsers, + transform, + onnotice, + onnotify, + onparameter, + max_pipeline, + keep_alive, + backoff, + target_session_attrs + } = options + + const sent = Queue() + , id = uid++ + , backend = { pid: null, secret: null } + , idleTimer = timer(end, options.idle_timeout) + , lifeTimer = timer(end, options.max_lifetime) + , connectTimer = timer(connectTimedOut, options.connect_timeout) + + let socket = createSocket() + , result = new Result() + , incoming = Buffer.alloc(0) + , needsTypes = options.fetch_types + , backendParameters = {} + , statements = {} + , state = 'closed' + , statementId = Math.random().toString(36).slice(2) + , statementCount = 1 + , closedDate = 0 + , remaining = 0 + , hostIndex = 0 + , retries = 0 + , length = 0 + , delay = 0 + , rows = 0 + , serverSignature = null + , nextWriteTimer = null + , incomings = null + , results = null + , initial = null + , ending = null + , stream = null + , chunk = null + , ended = null + , nonce = null + , query = null + , final = null + + const connection = { + get state() { return state }, + set state(x) { + state = x + state === 'open' + ? idleTimer.start() + : idleTimer.cancel() + }, + connect(query) { + initial = query + reconnect() + }, + terminate, + execute, + cancel, + end, + count: 0, + id + } + + return connection + + function createSocket() { + const x = net.Socket() + x.setKeepAlive(true, 1000 * keep_alive) + x.on('error', error) + x.on('close', closed) + x.on('drain', drain) + return x + } + + function cancel({ pid, secret }, resolve, reject) { + socket.removeAllListeners() + socket = net.Socket() + socket.on('connect', () => socket.write(b().i32(16).i32(80877102).i32(pid).i32(secret).end(16))) + socket.once('error', reject) + socket.once('close', resolve) + connect() + } + + function execute(q) { + if (q.cancelled) + return + + try { + q.state = backend + query + ? sent.push(q) + : (query = q, query.active = true) + + build(q) + return write(toBuffer(q)) + && !q.describeFirst + && sent.length < max_pipeline + && (!q.options.onexecute || q.options.onexecute(connection)) + } catch (error) { + sent.length === 0 && write(Sync) + errored(error) + return true + } + } + + function toBuffer(q) { + if (q.parameters.length >= 65534) + throw Errors.generic({ message: 'Max number of parameters (65534) exceeded', code: 'MAX_PARAMETERS_EXCEEDED' }) + + return q.options.simple + ? b().Q().str(q.strings[0] + b.N).end() + : q.describeFirst + ? Buffer.concat([describe(q), Flush]) + : q.prepare + ? q.prepared + ? prepared(q) + : Buffer.concat([describe(q), prepared(q)]) + : unnamed(q) + } + + function describe(q) { + return Buffer.concat([ + Parse(q.statement.string, q.parameters, q.statement.types, q.statement.name), + Describe('S', q.statement.name) + ]) + } + + function prepared(q) { + return Buffer.concat([ + Bind(q.parameters, q.statement.types, q.statement.name, q.cursorName), + q.cursorFn + ? Execute('', q.cursorRows) + : ExecuteUnnamed + ]) + } + + function unnamed(q) { + return Buffer.concat([ + Parse(q.statement.string, q.parameters, q.statement.types), + DescribeUnnamed, + prepared(q) + ]) + } + + function build(q) { + const parameters = [] + , types = [] + + const string = stringify(q, q.strings[0], q.args[0], parameters, types) + + !q.tagged && q.args.forEach(x => handleValue(x, parameters, types)) + + q.prepare = options.prepare && ('prepare' in q.options ? q.options.prepare : true) + q.string = string + q.signature = q.prepare && types + string + q.onlyDescribe && (delete statements[q.signature]) + q.parameters = q.parameters || parameters + q.prepared = q.prepare && q.signature in statements + q.describeFirst = q.onlyDescribe || (parameters.length && !q.prepared) + q.statement = q.prepared + ? statements[q.signature] + : { string, types, name: q.prepare ? statementId + statementCount++ : '' } + + typeof options.debug === 'function' && options.debug(id, string, parameters, types) + } + + function stringify(q, string, value, parameters, types) { + for (let i = 1; i < q.strings.length; i++) { + string += ( + value instanceof Query ? fragment(string, value, parameters, types) : + value instanceof Identifier ? value.value : + value instanceof Builder ? value.build(string, parameters, types, options.transform) : + handleValue(value, parameters, types) + ) + q.strings[i] + value = q.args[i] + } + + return string + } + + function fragment(string, q, parameters, types) { + q.fragment = true + return stringify(q, string + q.strings[0], q.args[0], parameters, types) + } + + function write(x, fn) { + chunk = chunk ? Buffer.concat([chunk, x]) : Buffer.from(x) + if (fn || chunk.length >= 1024) + return nextWrite(fn) + nextWriteTimer === null && (nextWriteTimer = setImmediate(nextWrite)) + return true + } + + function nextWrite(fn) { + const x = socket.write(chunk, fn) + nextWriteTimer !== null && clearImmediate(nextWriteTimer) + chunk = nextWriteTimer = null + return x + } + + function connectTimedOut() { + errored(Errors.connection('CONNECT_TIMEOUT', options, socket)) + socket.destroy() + } + + async function secure() { + write(SSLRequest) + const canSSL = await new Promise(r => socket.once('data', x => r(x[0] === 83))) // S + + if (!canSSL && ssl === 'prefer') + return connected() + + socket.removeAllListeners() + socket = tls.connect({ + socket, + ...(ssl === 'require' || ssl === 'allow' || ssl === 'prefer' + ? { rejectUnauthorized: false } + : ssl + ) + }) + socket.on('secureConnect', connected) + socket.on('error', error) + socket.on('close', closed) + socket.on('drain', drain) + } + + /* c8 ignore next 3 */ + function drain() { + ondrain(connection) + } + + function data(x) { + if (incomings) { + incomings.push(x) + remaining -= x.length + if (remaining >= 0) + return + } + + incoming = incomings + ? Buffer.concat(incomings, length - remaining) + : incoming.length === 0 + ? x + : Buffer.concat([incoming, x], incoming.length + x.length) + + while (incoming.length > 4) { + length = incoming.readUInt32BE(1) + if (length >= incoming.length) { + remaining = length - incoming.length + incomings = [incoming] + break + } + + handle(incoming.slice(0, length + 1)) + incoming = incoming.slice(length + 1) + remaining = 0 + incomings = null + } + } + + function connect() { + backendParameters = {} + connectTimer.start() + socket.on('connect', ssl ? secure : connected) + + if (options.path) + return socket.connect(options.path) + + socket.connect(port[hostIndex], host[hostIndex]) + hostIndex = (hostIndex + 1) % port.length + } + + function reconnect() { + setTimeout(connect, closedDate ? closedDate + delay - Date.now() : 0) + } + + function connected() { + try { + statements = {} + needsTypes = options.fetch_types + statementId = Math.random().toString(36).slice(2) + statementCount = 1 + lifeTimer.start() + socket.on('data', data) + const s = StartupMessage() + write(s) + } catch (err) { + error(err) + } + } + + function error(err) { + if (connection.state === 'connecting' && options.host[retries + 1]) + return + + errored(err) + while (sent.length) + queryError(sent.shift(), err) + } + + function errored(err) { + query && queryError(query, err) + initial && (queryError(initial, err), initial = null) + } + + function queryError(query, err) { + if (err.query) + return + + err.stack += query.origin.replace(/.*\n/, '\n') + Object.defineProperties(err, { + query: { value: query.string, enumerable: options.debug }, + parameters: { value: query.parameters, enumerable: options.debug }, + args: { value: query.args, enumerable: options.debug }, + types: { value: query.statement && query.statement.types, enumerable: options.debug } + }) + query.reject(err) + } + + function end() { + return ending || ( + !connection.reserved && onend(connection), + !connection.reserved && !initial && !query && sent.length === 0 + ? Promise.resolve(terminate()) + : ending = new Promise(r => ended = r) + ) + } + + function terminate() { + if (query || initial || sent.length) + error(Errors.connection('CONNECTION_DESTROYED', options)) + + clearImmediate(nextWriteTimer) + socket.removeListener('data', data) + socket.removeListener('connect', connected) + socket.readyState !== 'closed' && socket.end(b().X().end()) + ended && (ended(), ending = ended = null) + } + + function closed(hadError) { + clearImmediate(nextWriteTimer) + socket.removeListener('data', data) + socket.removeListener('connect', connected) + idleTimer.cancel() + lifeTimer.cancel() + connectTimer.cancel() + + if (socket.encrypted) { + socket.removeAllListeners() + socket = createSocket() + } + + if (initial) + return reconnect() + + !hadError && (query || sent.length) && error(Errors.connection('CONNECTION_CLOSED', options, socket)) + closedDate = Date.now() + hadError && options.shared.retries++ + delay = (typeof backoff === 'function' ? backoff(options.shared.retries) : backoff) * 1000 + onclose(connection) + } + + /* Handlers */ + function handle(xs, x = xs[0]) { + ( + x === 68 ? DataRow : // D + x === 100 ? CopyData : // d + x === 65 ? NotificationResponse : // A + x === 83 ? ParameterStatus : // S + x === 90 ? ReadyForQuery : // Z + x === 67 ? CommandComplete : // C + x === 50 ? BindComplete : // 2 + x === 49 ? ParseComplete : // 1 + x === 116 ? ParameterDescription : // t + x === 84 ? RowDescription : // T + x === 82 ? Authentication : // R + x === 110 ? NoData : // n + x === 75 ? BackendKeyData : // K + x === 69 ? ErrorResponse : // E + x === 115 ? PortalSuspended : // s + x === 51 ? CloseComplete : // 3 + x === 71 ? CopyInResponse : // G + x === 78 ? NoticeResponse : // N + x === 72 ? CopyOutResponse : // H + x === 99 ? CopyDone : // c + x === 73 ? EmptyQueryResponse : // I + x === 86 ? FunctionCallResponse : // V + x === 118 ? NegotiateProtocolVersion : // v + x === 87 ? CopyBothResponse : // W + /* c8 ignore next */ + UnknownMessage + )(xs) + } + + function DataRow(x) { + let index = 7 + let length + let column + let value + + const row = query.isRaw ? new Array(query.statement.columns.length) : {} + for (let i = 0; i < query.statement.columns.length; i++) { + column = query.statement.columns[i] + length = x.readInt32BE(index) + index += 4 + + value = length === -1 + ? null + : query.isRaw + ? x.slice(index, index += length) + : column.parser === undefined + ? x.toString('utf8', index, index += length) + : column.parser.array === true + ? column.parser(x.toString('utf8', index + 1, index += length)) + : column.parser(x.toString('utf8', index, index += length)) + + query.isRaw + ? (row[i] = value) + : (row[column.name] = transform.value.from ? transform.value.from(value) : value) + } + + query.forEachFn + ? query.forEachFn(transform.row.from ? transform.row.from(row) : row, result) + : (result[rows++] = transform.row.from ? transform.row.from(row) : row) + } + + function ParameterStatus(x) { + const [k, v] = x.toString('utf8', 5, x.length - 1).split(b.N) + backendParameters[k] = v + if (options.parameters[k] !== v) { + options.parameters[k] = v + onparameter && onparameter(k, v) + } + } + + function ReadyForQuery() { + query && query.options.simple && query.resolve(results || result) + query = results = null + result = new Result() + connectTimer.cancel() + + if (initial) { + if (target_session_attrs) { + if (!backendParameters.in_hot_standby || !backendParameters.default_transaction_read_only) + return fetchState() + else if (tryNext(target_session_attrs, backendParameters)) + return terminate() + } + + if (needsTypes) + return fetchArrayTypes() + + execute(initial) + options.shared.retries = retries = initial = 0 + return + } + + while (sent.length && (query = sent.shift()) && (query.active = true) && query.cancelled) + Connection(options, {}).cancel(query.state, query.cancelled.resolve, query.cancelled.reject) + + if (query) // Consider opening if able and sent.length < 50 + return + + ending + ? terminate() + : onopen(connection) + + } + + function CommandComplete(x) { + rows = 0 + + for (let i = x.length - 1; i > 0; i--) { + if (x[i] === 32 && x[i + 1] < 58 && result.count === null) + result.count = +x.toString('utf8', i + 1, x.length - 1) + if (x[i - 1] >= 65) { + result.command = x.toString('utf8', 5, i) + result.state = backend + break + } + } + + final && (final(), final = null) + + if (query.options.simple) + return + + if (query.cursorFn) { + result.count && query.cursorFn(result) + write(Sync) + } + + query.resolve(result) + } + + function ParseComplete() { + query.parsing = false + } + + function BindComplete() { + !result.statement && (result.statement = query.statement) + result.columns = query.statement.columns + } + + function ParameterDescription(x) { + const length = x.readUInt16BE(5) + + for (let i = 0; i < length; ++i) + !query.statement.types[i] && (query.statement.types[i] = x.readUInt32BE(7 + i * 4)) + + query.prepare && (statements[query.signature] = query.statement) + query.describeFirst && !query.onlyDescribe && (write(prepared(query)), query.describeFirst = false) + } + + function RowDescription(x) { + if (result.command) { + results = results || [result] + results.push(result = new Result()) + result.count = null + query.statement.columns = null + } + + const length = x.readUInt16BE(5) + let index = 7 + let start + + query.statement.columns = Array(length) + + for (let i = 0; i < length; ++i) { + start = index + while (x[index++] !== 0); + const type = x.readUInt32BE(index + 6) + query.statement.columns[i] = { + name: transform.column.from + ? transform.column.from(x.toString('utf8', start, index - 1)) + : x.toString('utf8', start, index - 1), + parser: parsers[type], + type + } + index += 18 + } + + result.statement = query.statement + if (query.onlyDescribe) + return (query.resolve(query.statement), write(Sync)) + } + + async function Authentication(x, type = x.readUInt32BE(5)) { + ( + type === 3 ? AuthenticationCleartextPassword : + type === 5 ? AuthenticationMD5Password : + type === 10 ? SASL : + type === 11 ? SASLContinue : + type === 12 ? SASLFinal : + type !== 0 ? UnknownAuth : + noop + )(x, type) + } + + /* c8 ignore next 5 */ + async function AuthenticationCleartextPassword() { + write( + b().p().str(await Pass()).z(1).end() + ) + } + + async function AuthenticationMD5Password(x) { + write( + b().p().str('md5' + md5(Buffer.concat([Buffer.from(md5((await Pass()) + user)), x.slice(9)]))).z(1).end() + ) + } + + function SASL() { + b().p().str('SCRAM-SHA-256' + b.N) + const i = b.i + nonce = crypto.randomBytes(18).toString('base64') + write(b.inc(4).str('n,,n=*,r=' + nonce).i32(b.i - i - 4, i).end()) + } + + async function SASLContinue(x) { + const res = x.toString('utf8', 9).split(',').reduce((acc, x) => (acc[x[0]] = x.slice(2), acc), {}) + + const saltedPassword = crypto.pbkdf2Sync( + await Pass(), + Buffer.from(res.s, 'base64'), + parseInt(res.i), 32, + 'sha256' + ) + + const clientKey = hmac(saltedPassword, 'Client Key') + + const auth = 'n=*,r=' + nonce + ',' + + 'r=' + res.r + ',s=' + res.s + ',i=' + res.i + + ',c=biws,r=' + res.r + + serverSignature = hmac(hmac(saltedPassword, 'Server Key'), auth).toString('base64') + + write( + b().p().str('c=biws,r=' + res.r + ',p=' + xor(clientKey, hmac(sha256(clientKey), auth)).toString('base64')).end() + ) + } + + function SASLFinal(x) { + if (x.toString('utf8', 9).split(b.N, 1)[0].slice(2) === serverSignature) + return + /* c8 ignore next 5 */ + errored(Errors.generic({ + message: 'The server did not return the correct signature', + code: 'SASL_SIGNATURE_MISMATCH' + })) + socket.destroy() + } + + function Pass() { + return Promise.resolve(typeof options.pass === 'function' + ? options.pass() + : options.pass + ) + } + + function NoData() { + result.statement = query.statement + result.statement.columns = [] + if (query.onlyDescribe) + return (query.resolve(query.statement), write(Sync)) + } + + function BackendKeyData(x) { + backend.pid = x.readUInt32BE(5) + backend.secret = x.readUInt32BE(9) + } + + async function fetchArrayTypes() { + needsTypes = false + const types = await new Query([` + select b.oid, b.typarray + from pg_catalog.pg_type a + left join pg_catalog.pg_type b on b.oid = a.typelem + where a.typcategory = 'A' + group by b.oid, b.typarray + order by b.oid + `], [], execute) + types.forEach(({ oid, typarray }) => addArrayType(oid, typarray)) + } + + function addArrayType(oid, typarray) { + const parser = options.parsers[oid] + options.shared.typeArrayMap[oid] = typarray + options.parsers[typarray] = (xs) => arrayParser(xs, parser) + options.parsers[typarray].array = true + options.serializers[typarray] = (xs) => arraySerializer(xs, options.serializers[oid]) + } + + function tryNext(x, xs) { + return ( + (x === 'read-write' && xs.default_transaction_read_only === 'on') || + (x === 'read-only' && xs.default_transaction_read_only === 'off') || + (x === 'primary' && xs.in_hot_standby === 'off') || + (x === 'standby' && xs.in_hot_standby === 'on') || + (x === 'prefer-standby' && xs.in_hot_standby === 'off' && options.host[retries]) + ) + } + + function fetchState() { + const query = new Query([` + show transaction_read_only; + select pg_catalog.pg_is_in_recovery() + `], [], execute, null, { simple: true }) + query.resolve = ([[a], [b]]) => { + backendParameters.default_transaction_read_only = a.transaction_read_only + backendParameters.in_hot_standby = b.pg_is_in_recovery ? 'on' : 'off' + } + query.execute() + } + + function ErrorResponse(x) { + query && (query.cursorFn || query.describeFirst) && write(Sync) + const error = Errors.postgres(parseError(x)) + query && !query.retried && retryRoutines.has(error.routine) + ? retry(query) + : errored(error) + } + + function retry(q) { + delete statements[q.signature] + q.retried = true + execute(q) + } + + function NotificationResponse(x) { + if (!onnotify) + return + + let index = 9 + while (x[index++] !== 0); + onnotify( + x.toString('utf8', 9, index - 1), + x.toString('utf8', index, x.length - 1) + ) + } + + async function PortalSuspended() { + try { + const x = await Promise.resolve(query.cursorFn(result)) + rows = 0 + x === CLOSE + ? write(Close(query.portal)) + : (result = new Result(), write(Execute('', query.cursorRows))) + } catch (err) { + write(Sync) + query.reject(err) + } + } + + function CloseComplete() { + result.count && query.cursorFn(result) + query.resolve(result) + } + + function CopyInResponse() { + stream = new Stream.Writable({ + write(chunk, encoding, callback) { + socket.write(b().d().raw(chunk).end(), callback) + }, + destroy(error, callback) { + callback(error) + socket.write(b().f().str(error + b.N).end()) + }, + final(callback) { + socket.write(b().c().end()) + final = callback + } + }) + query.resolve(stream) + } + + function CopyOutResponse() { + stream = new Stream.Readable({ + read() { socket.resume() } + }) + query.resolve(stream) + } + + /* c8 ignore next 3 */ + function CopyBothResponse() { + stream = new Stream.Readable({ + read() { socket.resume() }, + /* c8 ignore next 11 */ + write(chunk, encoding, callback) { + socket.write(b().d().raw(chunk).end(), callback) + }, + destroy(error, callback) { + callback(error) + socket.write(b().f().str(error + b.N).end()) + }, + final(callback) { + socket.write(b().c().end()) + final = callback + } + }) + query.resolve(stream) + } + + function CopyData(x) { + stream.push(x.slice(5)) || socket.pause() + } + + function CopyDone() { + stream.push(null) + } + + function NoticeResponse(x) { + onnotice + ? onnotice(parseError(x)) + : console.log(parseError(x)) // eslint-disable-line + + } + + /* c8 ignore next 3 */ + function EmptyQueryResponse() { + + } + + /* c8 ignore next 3 */ + function FunctionCallResponse() { + errored(Errors.notSupported('FunctionCallResponse')) + } + + /* c8 ignore next 3 */ + function NegotiateProtocolVersion() { + errored(Errors.notSupported('NegotiateProtocolVersion')) + } + + /* c8 ignore next 3 */ + function UnknownMessage(x) { + console.error('Unknown message', x) + } + + /* c8 ignore next 3 */ + function UnknownAuth(x, type) { + console.error('Unknown auth', type) + } + + /* Messages */ + function Bind(parameters, types, statement = '', portal = '') { + let prev + , type + + b().B().str(portal + b.N).str(statement + b.N).i16(0).i16(parameters.length) + + parameters.forEach((x, i) => { + if (x === null) + return b.i32(0xFFFFFFFF) + + type = types[i] + parameters[i] = x = type in options.serializers + ? options.serializers[type](x) + : '' + x + + prev = b.i + b.inc(4).str(x).i32(b.i - prev - 4, prev) + }) + + b.i16(0) + + return b.end() + } + + function Parse(str, parameters, types, name = '') { + b().P().str(name + b.N).str(str + b.N).i16(parameters.length) + parameters.forEach((x, i) => b.i32(types[i] || 0)) + return b.end() + } + + function Describe(x, name = '') { + return b().D().str(x).str(name + b.N).end() + } + + function Execute(portal = '', rows = 0) { + return Buffer.concat([ + b().E().str(portal + b.N).i32(rows).end(), + Flush + ]) + } + + function Close(portal = '') { + return Buffer.concat([ + b().C().str('P').str(portal + b.N).end(), + b().S().end() + ]) + } + + function StartupMessage() { + return b().inc(4).i16(3).z(2).str( + Object.entries(Object.assign({ + user, + database, + client_encoding: '\'utf-8\'' + }, + options.connection + )).filter(([, v]) => v).map(([k, v]) => k + b.N + v).join(b.N) + ).z(2).end(0) + } + +} + +function parseError(x) { + const error = {} + let start = 5 + for (let i = 5; i < x.length - 1; i++) { + if (x[i] === 0) { + error[errorFields[x[start]]] = x.toString('utf8', start + 1, i) + start = i + 1 + } + } + return error +} + +function md5(x) { + return crypto.createHash('md5').update(x).digest('hex') +} + +function hmac(key, x) { + return crypto.createHmac('sha256', key).update(x).digest() +} + +function sha256(x) { + return crypto.createHash('sha256').update(x).digest() +} + +function xor(a, b) { + const length = Math.max(a.length, b.length) + const buffer = Buffer.allocUnsafe(length) + for (let i = 0; i < length; i++) + buffer[i] = a[i] ^ b[i] + return buffer +} + +function timer(fn, seconds) { + seconds = typeof seconds === 'function' ? seconds() : seconds + if (!seconds) + return { cancel: noop, start: noop } + + let timer + return { + cancel() { + timer && (clearTimeout(timer), timer = null) + }, + start() { + timer && clearTimeout(timer) + timer = setTimeout(done, seconds * 1000, arguments).unref() + } + } + + function done(args) { + fn.apply(null, args) + timer = null + } +} diff --git a/cjs/src/errors.js b/cjs/src/errors.js new file mode 100644 index 00000000..0de66340 --- /dev/null +++ b/cjs/src/errors.js @@ -0,0 +1,53 @@ +class PostgresError extends Error { + constructor(x) { + super(x.message) + this.name = this.constructor.name + Object.assign(this, x) + } +};module.exports.PostgresError = PostgresError + +const Errors = module.exports.Errors = { + connection, + postgres, + generic, + notSupported +} + +function connection(x, options, socket) { + const { host, port } = socket || options + const error = Object.assign( + new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))), + { + code: x, + errno: x, + address: options.path || host + }, options.path ? {} : { port: port } + ) + Error.captureStackTrace(error, connection) + return error +} + +function postgres(x) { + const error = new PostgresError(x) + Error.captureStackTrace(error, postgres) + return error +} + +function generic(x) { + const error = Object.assign(new Error(x.message), x) + Error.captureStackTrace(error, generic) + return error +} + +/* c8 ignore next 10 */ +function notSupported(x) { + const error = Object.assign( + new Error(x + ' (B) is not supported'), + { + code: 'MESSAGE_NOT_SUPPORTED', + name: x + } + ) + Error.captureStackTrace(error, notSupported) + return error +} diff --git a/cjs/src/index.js b/cjs/src/index.js new file mode 100644 index 00000000..c4e945f0 --- /dev/null +++ b/cjs/src/index.js @@ -0,0 +1,540 @@ +const os = require('os') +const fs = require('fs') +const Stream = require('stream') + +const { + mergeUserTypes, + inferType, + Parameter, + Identifier, + Builder, + toPascal, + toCamel, + toKebab, + fromPascal, + fromCamel, + fromKebab, + CLOSE +} = require('./types.js') + +const Connection = require('./connection.js') +const Query = require('./query.js') +const Queue = require('./queue.js') +const { Errors, PostgresError } = require('./errors.js') +const Subscribe = require('./subscribe.js') + +Object.assign(Postgres, { + PostgresError, + toPascal, + toCamel, + toKebab, + fromPascal, + fromCamel, + fromKebab, + BigInt +}) + +module.exports = Postgres + +function Postgres(a, b) { + const options = parseOptions(a, b) + , subscribe = Subscribe(Postgres, { ...options }) + + let ending = false + + const queries = Queue() + , connections = [...Array(options.max)].map(() => Connection(options, { onopen, onend, ondrain, onclose })) + , closed = Queue(connections) + , reserved = Queue() + , open = Queue() + , busy = Queue() + , full = Queue() + , ended = Queue() + , connecting = Queue() + , queues = { closed, ended, connecting, reserved, open, busy, full } + + const sql = Sql(handler) + + Object.assign(sql, { + get parameters() { return options.parameters }, + largeObject, + subscribe, + CLOSE, + END: CLOSE, + PostgresError, + options, + listen, + notify, + begin, + end + }) + + return sql + + function Sql(handler, instant) { + handler.debug = options.debug + + Object.assign(sql, { + types: Object.entries(options.types).reduce((acc, [name, type]) => { + acc[name] = (x) => new Parameter(x, type.to) + return acc + }, {}), + unsafe, + array, + json, + file + }) + + return sql + + function sql(strings, ...args) { + const query = strings && Array.isArray(strings.raw) + ? new Query(strings, args, handler, cancel) + : typeof strings === 'string' && !args.length + ? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings) + : new Builder(strings, args) + instant && query instanceof Query && Promise.resolve().then(() => { !query.fragment && query.execute() }) + return query + } + + function unsafe(string, args = [], options = {}) { + arguments.length === 2 && !Array.isArray(args) && (options = args, args = []) + const query = new Query([string], args, handler, cancel, { + prepare: false, + ...options, + simple: 'simple' in options ? options.simple : args.length === 0 + }) + instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + return query + } + + function file(path, args = [], options = { cache: true }) { + arguments.length === 2 && !Array.isArray(args) && (options = args, args = []) + const query = new Query([], args, (query) => { + fs.readFile(path, 'utf8', (err, string) => { + if (err) + return query.reject(err) + + query.strings = [string] + handler(query) + }) + }, cancel, { + ...options, + simple: 'simple' in options ? options.simple : args.length === 0 + }) + instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + return query + } + } + + async function listen(name, fn) { + const sql = listen.sql || (listen.sql = Postgres({ + ...options, + max: 1, + idle_timeout: null, + max_lifetime: null, + fetch_types: false, + onclose() { + Object.entries(listen.channels).forEach(([channel, { listeners }]) => { + delete listen.channels[channel] + Promise.all(listeners.map(fn => listen(channel, fn).catch(() => { /* noop */ }))) + }) + }, + onnotify(c, x) { + c in listen.channels && listen.channels[c].listeners.forEach(fn => fn(x)) + } + })) + + const channels = listen.channels || (listen.channels = {}) + , exists = name in channels + , channel = exists ? channels[name] : (channels[name] = { listeners: [fn] }) + + if (exists) { + channel.listeners.push(fn) + return Promise.resolve({ ...channel.result, unlisten }) + } + + channel.result = await sql`listen ${ sql(name) }` + channel.result.unlisten = unlisten + + return channel.result + + async function unlisten() { + if (name in channels === false) + return + + channel.listeners = channel.listeners.filter(x => x !== fn) + if (channels[name].listeners.length) + return + + delete channels[name] + return sql`unlisten ${ sql(name) }` + } + } + + async function notify(channel, payload) { + return await sql`select pg_notify(${ channel }, ${ '' + payload })` + } + + async function begin(options, fn) { + !fn && (fn = options, options = '') + return new Promise(async(resolve, reject) => { + await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).catch(reject) + + function onexecute(c) { + const queries = Queue() + let savepoints = 0 + + queues[c.state].remove(c) + c.state = 'reserved' + c.reserved = () => queries.length && handler(queries.shift()) + reserved.push(c) + + const sql = Sql(handler, true) + sql.savepoint = savepoint + + start() + + return false + + async function start() { + try { + const xs = fn(sql) + const result = await (Array.isArray(xs) ? Promise.all(xs) : xs) + await sql`commit` + resolve(result) + } catch (error) { + await sql`rollback`.catch(reject) + reject(error) + } + c.reserved = null + onopen(c) + } + + async function savepoint(name, fn) { + if (name && Array.isArray(name.raw)) + return savepoint(sql => sql.apply(sql, arguments)) + + try { + arguments.length === 1 && (fn = name, name = null) + name = 's' + savepoints++ + (name ? '_' + name : '') + await sql`savepoint ${ sql(name) }` + } catch (err) { + reject(err) + } + try { + return await Promise.resolve(fn(sql)) + } catch (err) { + await sql`rollback to ${ sql(name) }` + throw err + } + } + + function handler(query) { + c.state === 'full' + ? queries.push(query) + : c.execute(query) + } + } + }) + } + + function largeObject(oid, mode = 0x00020000 | 0x00040000) { + return new Promise(async(resolve, reject) => { + await sql.begin(async sql => { + let finish + !oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`) + const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd` + + const lo = { + writable, + readable, + close : () => sql`select lo_close(${ fd })`.then(finish), + tell : () => sql`select lo_tell64(${ fd })`, + read : (x) => sql`select loread(${ fd }, ${ x }) as data`, + write : (x) => sql`select lowrite(${ fd }, ${ x })`, + truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`, + seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`, + size : () => sql` + select + lo_lseek64(${ fd }, location, 0) as position, + seek.size + from ( + select + lo_lseek64($1, 0, 2) as size, + tell.location + from (select lo_tell64($1) as location) tell + ) seek + ` + } + + resolve(lo) + + return new Promise(async r => finish = r) + + async function readable({ + highWaterMark = 2048 * 8, + start = 0, + end = Infinity + } = {}) { + let max = end - start + start && await lo.seek(start) + return new Stream.Readable({ + highWaterMark, + async read(size) { + const l = size > max ? size - max : size + max -= size + const [{ data }] = await lo.read(l) + this.push(data) + if (data.length < size) + this.push(null) + } + }) + } + + async function writable({ + highWaterMark = 2048 * 8, + start = 0 + } = {}) { + start && await lo.seek(start) + return new Stream.Writable({ + highWaterMark, + write(chunk, encoding, callback) { + lo.write(chunk).then(() => callback(), callback) + } + }) + } + }).catch(reject) + }) + } + + function json(x) { + return new Parameter(x, 114) + } + + function array(x, type) { + if (!Array.isArray(x)) + return array(Array.from(arguments)) + + return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap) + } + + function handler(query) { + if (ending) + return query.reject(Errors.connection('CONNECTION_ENDED', options, options)) + + if (open.length) + return go(open, query) + + if (closed.length) + return connect(closed.shift(), query) + + busy.length + ? go(busy, query) + : queries.push(query) + } + + function go(xs, query) { + const c = xs.shift() + return c.execute(query) + ? (c.state = 'busy', busy.push(c)) + : (c.state = 'full', full.push(c)) + } + + function cancel(query) { + return new Promise((resolve, reject) => { + query.state + ? query.active + ? Connection(options, {}).cancel(query.state, resolve, reject) + : query.cancelled = { resolve, reject } + : ( + queries.remove(query), + query.cancelled = true, + query.reject(Errors.generic({ code: '57014', message: 'canceling statement due to user request' })), + resolve() + ) + }) + } + + function end({ timeout = null } = {}) { + if (ending) + return ending + + let timer + return ending = Promise.race([ + new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))), + Promise.all(connections.map(c => c.end()).concat( + listen.sql ? listen.sql.end({ timeout: 0 }) : [], + subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : [] + )) + ]).then(() => clearTimeout(timer)) + } + + async function destroy(resolve) { + await Promise.all(connections.map(c => c.terminate())) + while (queries.length) + queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options)) + resolve() + } + + function connect(c, query) { + c.state = 'connecting' + connecting.push(c) + c.connect(query) + } + + function onend(c) { + queues[c.state].remove(c) + c.state = 'ended' + ended.push(c) + } + + function onopen(c) { + queues[c.state].remove(c) + + if (c.reserved) { + c.state = 'reserved' + c.reserved() + reserved.push(c) + return + } + + if (queries.length === 0) + return (c.state = 'open', open.push(c)) + + let max = Math.ceil(queries.length / (connecting.length + 1)) + , ready = true + + while (ready && queries.length && max-- > 0) + ready = c.execute(queries.shift()) + + ready + ? (c.state = 'busy', busy.push(c)) + : (c.state = 'full', full.push(c)) + } + + function ondrain(c) { + full.remove(c) + onopen(c) + } + + function onclose(c) { + queues[c.state].remove(c) + c.state = 'closed' + c.reserved = null + options.onclose && options.onclose(c.id) + queries.length + ? connect(c, queries.shift()) + : queues.closed.push(c) + } +} + +function parseOptions(a, b) { + if (a && a.shared) + return a + + const env = process.env // eslint-disable-line + , o = (typeof a === 'string' ? b : a) || {} + , { url, multihost } = parseUrl(a, env) + , query = url.searchParams + , host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost' + , port = o.port || url.port || env.PGPORT || 5432 + , user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername() + + return Object.assign({ + host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]), + port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)), + path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port, + database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user, + user : user, + pass : o.pass || o.password || url.password || env.PGPASSWORD || '', + max : o.max || query.get('max') || 10, + types : o.types || {}, + ssl : o.ssl || parseSSL(query.get('sslmode') || query.get('ssl')) || false, + idle_timeout : o.idle_timeout || query.get('idle_timeout') || env.PGIDLE_TIMEOUT || warn(o.timeout), + connect_timeout : o.connect_timeout || query.get('connect_timeout') || env.PGCONNECT_TIMEOUT || 30, + max_lifetime : o.max_lifetime || url.max_lifetime || max_lifetime, + max_pipeline : o.max_pipeline || url.max_pipeline || 100, + backoff : o.backoff || url.backoff || backoff, + keep_alive : o.keep_alive || url.keep_alive || 60, + prepare : 'prepare' in o ? o.prepare : 'no_prepare' in o ? !o.no_prepare : true, + onnotice : o.onnotice, + onnotify : o.onnotify, + onclose : o.onclose, + onparameter : o.onparameter, + transform : parseTransform(o.transform || {}), + connection : Object.assign({ application_name: 'postgres.js' }, o.connection), + target_session_attrs: tsa(o, url, env), + debug : o.debug, + fetch_types : 'fetch_types' in o ? o.fetch_types : true, + parameters : {}, + shared : { retries: 0, typeArrayMap: {} } + }, + mergeUserTypes(o.types) + ) +} + +function tsa(o, url, env) { + const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS + if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x)) + return x + + throw new Error('target_session_attrs ' + x + ' is not supported') +} + +function backoff(retries) { + return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20) +} + +function max_lifetime() { + return 60 * (30 + Math.random() * 30) +} + +function parseTransform(x) { + return { + column: { + from: typeof x.column === 'function' ? x.column : x.column && x.column.from, + to: x.column && x.column.to + }, + value: { + from: typeof x.value === 'function' ? x.value : x.value && x.value.from, + to: x.value && x.value.to + }, + row: { + from: typeof x.row === 'function' ? x.row : x.row && x.row.from, + to: x.row && x.row.to + } + } +} + +function parseSSL(x) { + return x !== 'disable' && x !== 'false' && x +} + +function parseUrl(url) { + if (typeof url !== 'string') + return { url: { searchParams: new Map() } } + + let host = url + host = host.slice(host.indexOf('://') + 3) + host = host.split(/[?/]/)[0] + host = host.slice(host.indexOf('@') + 1) + + return { + url: new URL(url.replace(host, host.split(',')[0])), + multihost: host.indexOf(',') > -1 && host + } +} + +function warn(x) { + typeof x !== 'undefined' && console.log('The timeout option is deprecated, use idle_timeout instead') // eslint-disable-line + return x +} + +function osUsername() { + try { + return os.userInfo().username // eslint-disable-line + } catch (_) { + return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line + } +} diff --git a/cjs/src/query.js b/cjs/src/query.js new file mode 100644 index 00000000..86fbaf54 --- /dev/null +++ b/cjs/src/query.js @@ -0,0 +1,141 @@ +const originCache = new Map() + +module.exports = class Query extends Promise { + constructor(strings, args, handler, canceller, options = {}) { + let resolve + , reject + + super((a, b) => { + resolve = a + reject = b + }) + + this.tagged = Array.isArray(strings.raw) + this.strings = strings + this.args = args + this.handler = handler + this.canceller = canceller + this.options = options + + this.state = null + this.statement = null + + this.resolve = x => (this.active = false, resolve(x)) + this.reject = x => (this.active = false, reject(x)) + + this.active = false + this.cancelled = null + this.executed = false + this.signature = '' + + this.origin = handler.debug ? new Error().stack : cachedError(this.strings) + } + + static get [Symbol.species]() { + return Promise + } + + cancel() { + return this.canceller && (this.canceller(this), this.canceller = null) + } + + async readable() { + this.options.simple = true + this.options.prepare = false + this.streaming = true + return this + } + + async writable() { + this.options.simple = true + this.options.prepare = false + this.streaming = true + return this + } + + cursor(rows = 1, fn) { + this.options.simple = false + if (typeof rows === 'function') { + fn = rows + rows = 1 + } + + this.cursorRows = rows + + if (typeof fn === 'function') + return (this.cursorFn = fn, this) + + let prev + return { + [Symbol.asyncIterator]: () => ({ + next: () => { + prev && prev() + const promise = new Promise((resolve, reject) => { + this.cursorFn = x => { + resolve({ value: x, done: false }) + return new Promise(r => prev = r) + } + this.resolve = () => (this.active = false, resolve({ done: true })) + this.reject = x => (this.active = false, reject(x)) + }) + this.execute() + return promise + } + }) + } + } + + describe() { + this.onlyDescribe = true + return this + } + + stream() { + throw new Error('.stream has been renamed to .forEach') + } + + forEach(fn) { + this.forEachFn = fn + return this + } + + raw() { + this.isRaw = true + return this + } + + handle() { + !this.executed && this.handler((this.executed = true, this)) + } + + execute() { + this.handle() + return this + } + + then() { + this.handle() + return super.then.apply(this, arguments) + } + + catch() { + this.handle() + return super.catch.apply(this, arguments) + } + + finally() { + this.handle() + return super.finally.apply(this, arguments) + } +} + +function cachedError(xs) { + if (originCache.has(xs)) + return originCache.get(xs) + + const x = Error.stackTraceLimit + Error.stackTraceLimit = 4 + originCache.set(xs, new Error().stack) + Error.stackTraceLimit = x + return originCache.get(xs) +} diff --git a/lib/queue.js b/cjs/src/queue.js similarity index 57% rename from lib/queue.js rename to cjs/src/queue.js index 7a6f2b46..8438f5da 100644 --- a/lib/queue.js +++ b/cjs/src/queue.js @@ -1,15 +1,20 @@ module.exports = Queue -function Queue() { - let xs = [] +function Queue(initial = []) { + let xs = initial.slice() let index = 0 return { get length() { return xs.length - index }, - push: (x) => xs.push(x), - peek: () => xs[index], + remove: (x) => { + const index = xs.indexOf(x) + return index === -1 + ? null + : (xs.splice(index, 1), x) + }, + push: (x) => (xs.push(x), x), shift: () => { const out = xs[index++] diff --git a/cjs/src/result.js b/cjs/src/result.js new file mode 100644 index 00000000..6146daa2 --- /dev/null +++ b/cjs/src/result.js @@ -0,0 +1,16 @@ +module.exports = class Result extends Array { + constructor() { + super() + Object.defineProperties(this, { + count: { value: null, writable: true }, + state: { value: null, writable: true }, + command: { value: null, writable: true }, + columns: { value: null, writable: true }, + statement: { value: null, writable: true } + }) + } + + static get [Symbol.species]() { + return Array + } +} diff --git a/lib/subscribe.js b/cjs/src/subscribe.js similarity index 88% rename from lib/subscribe.js rename to cjs/src/subscribe.js index 0a5b4899..9ec5aafd 100644 --- a/lib/subscribe.js +++ b/cjs/src/subscribe.js @@ -1,4 +1,4 @@ -module.exports = function(postgres, a, b) { +module.exports = Subscribe;function Subscribe(postgres, options) { const listeners = new Map() let connection @@ -6,14 +6,13 @@ module.exports = function(postgres, a, b) { return async function subscribe(event, fn) { event = parseEvent(event) - const options = typeof a === 'string' ? b : a || {} options.max = 1 options.connection = { ...options.connection, replication: 'database' } - const sql = postgres(a, b) + const sql = postgres(options) !connection && (subscribe.sql = sql, connection = init(sql, options.publications)) @@ -38,7 +37,7 @@ module.exports = function(postgres, a, b) { `CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT` ) - const stream = sql.unsafe( + const stream = await sql.unsafe( `START_REPLICATION SLOT ${ slot } LOGICAL ${ x.consistent_point } (proto_version '1', publication_names '${ publications }')` @@ -91,10 +90,10 @@ function parse(x, state, parsers, handle) { Object.entries({ R: x => { // Relation let i = 1 - const r = state[x.readInt32BE(i)] = { + const r = state[x.readUInt32BE(i)] = { schema: String(x.slice(i += 4, i = x.indexOf(0, i))) || 'pg_catalog', table: String(x.slice(i + 1, i = x.indexOf(0, i + 1))), - columns: Array(x.readInt16BE(i += 2)), + columns: Array(x.readUInt16BE(i += 2)), keys: [] } i += 2 @@ -106,9 +105,9 @@ function parse(x, state, parsers, handle) { column = r.columns[columnIndex++] = { key: x[i++], name: String(x.slice(i, i = x.indexOf(0, i))), - type: x.readInt32BE(i += 1), - parser: parsers[x.readInt32BE(i)], - atttypmod: x.readInt32BE(i += 4) + type: x.readUInt32BE(i += 1), + parser: parsers[x.readUInt32BE(i)], + atttypmod: x.readUInt32BE(i += 4) } column.key && r.keys.push(column) @@ -123,7 +122,7 @@ function parse(x, state, parsers, handle) { }, I: x => { // Insert let i = 1 - const relation = state[x.readInt32BE(i)] + const relation = state[x.readUInt32BE(i)] const row = {} tuples(x, row, relation.columns, i += 7) @@ -134,7 +133,7 @@ function parse(x, state, parsers, handle) { }, D: x => { // Delete let i = 1 - const relation = state[x.readInt32BE(i)] + const relation = state[x.readUInt32BE(i)] i += 4 const key = x[i] === 75 const row = key || x[i] === 79 @@ -151,7 +150,7 @@ function parse(x, state, parsers, handle) { }, U: x => { // Update let i = 1 - const relation = state[x.readInt32BE(i)] + const relation = state[x.readUInt32BE(i)] i += 4 const key = x[i] === 75 const old = key || x[i] === 79 @@ -187,10 +186,10 @@ function tuples(x, row, columns, xi) { : type === 117 // u ? undefined : column.parser === undefined - ? x.toString('utf8', xi + 4, xi += 4 + x.readInt32BE(xi)) + ? x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi)) : column.parser.array === true - ? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readInt32BE(xi))) - : column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readInt32BE(xi))) + ? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readUInt32BE(xi))) + : column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))) } return xi diff --git a/cjs/src/types.js b/cjs/src/types.js new file mode 100644 index 00000000..fa70d8d8 --- /dev/null +++ b/cjs/src/types.js @@ -0,0 +1,298 @@ +const Query = require('./query.js') +const { Errors } = require('./errors.js') + +const types = module.exports.types = { + string: { + to: 25, + from: null, // defaults to string + serialize: x => '' + x + }, + number: { + to: 0, + from: [21, 23, 26, 700], + serialize: x => '' + x, + parse: x => +x + }, + json: { + to: 114, + from: [114, 3802], + serialize: x => JSON.stringify(x), + parse: x => JSON.parse(x) + }, + boolean: { + to: 16, + from: 16, + serialize: x => x === true ? 't' : 'f', + parse: x => x === 't' + }, + date: { + to: 1184, + from: [1082, 1114, 1184], + serialize: x => (x instanceof Date ? x : new Date(x)).toISOString(), + parse: x => new Date(x) + }, + bytea: { + to: 17, + from: 17, + serialize: x => '\\x' + Buffer.from(x).toString('hex'), + parse: x => Buffer.from(x.slice(2), 'hex') + } +} + +const BigInt = module.exports.BigInt = { + to: 1700, + from: [20, 701, 1700], + parse: x => BigInt(x), // eslint-disable-line + serialize: x => x.toString() +} + +class NotTagged { then() { notTagged() } catch() { notTagged() } finally() { notTagged() }} + +class Identifier extends NotTagged { + constructor(value) { + super() + this.value = escapeIdentifier(value) + } +};module.exports.Identifier = Identifier + +class Parameter extends NotTagged { + constructor(value, type, array) { + super() + this.value = value + this.type = type + this.array = array + } +};module.exports.Parameter = Parameter + +class Builder extends NotTagged { + constructor(first, rest) { + super() + this.first = first + this.rest = rest + } + + build(before, parameters, types, transform) { + const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop() + if (keyword.i === -1) + throw new Error('WTF') + + return keyword.fn(this.first, this.rest, parameters, types, transform) + } +};module.exports.Builder = Builder + +module.exports.handleValue = handleValue;function handleValue(x, parameters, types) { + if (Array.isArray(x)) + return x.map(x => handleValue(x, parameters, types)).join(',') + + const value = x instanceof Parameter ? x.value : x + if (value === undefined) + throw Errors.generic({ code: 'UNDEFINED_VALUE', message: 'Undefined values are not allowed' }) + + return '$' + (types.push( + x instanceof Parameter + ? (parameters.push(x.value), x.array + ? x.array[x.type || inferType(x.value)] || x.type || firstIsString(x.value) + : x.type + ) + : (parameters.push(x), inferType(x)) + )) +} + +const CLOSE = module.exports.CLOSE = {} + +const defaultHandlers = typeHandlers(types) + +function valuesBuilder(first, parameters, types, transform, columns) { + let value + return first.map(row => + '(' + columns.map(column => { + value = row[column] + return ( + value instanceof Query ? value.strings[0] : + value instanceof Identifier ? value.value : + handleValue(value, parameters, types) + ) + }).join(',') + ')' + ).join(',') +} + +const builders = Object.entries({ + valuesBuilder, + + update(first, rest, parameters, types, transform) { + return (rest.length ? rest.flat() : Object.keys(first)).map(x => + escapeIdentifier(transform.column.to ? transform.column.to(x) : x) + + '=' + handleValue(first[x], parameters, types) + ) + }, + + select(first, rest, parameters, types, transform) { + typeof first === 'string' && (first = [first].concat(rest)) + if (Array.isArray(first)) + return first.map(x => escapeIdentifier(transform.column.to ? transform.column.to(x) : x)).join(',') + + let value + const columns = rest.length ? rest.flat() : Object.keys(first) + return columns.map(x => { + value = first[x] + return ( + value instanceof Query ? value.strings[0] : + value instanceof Identifier ? value.value : + handleValue(value, parameters, types) + ) + ' as ' + escapeIdentifier(transform.column.to ? transform.column.to(x) : x) + }).join(',') + }, + + values(first, rest, parameters, types, transform) { + const multi = Array.isArray(first[0]) + const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first) + return valuesBuilder(multi ? first : [first], parameters, types, transform, columns) + }, + + insert(first, rest, parameters, types, transform) { + const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first) + return '(' + columns.map(x => + escapeIdentifier(transform.column.to ? transform.column.to(x) : x) + ).join(',') + ')values' + + valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, transform, columns) + } +}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x), fn])) + +function notTagged() { + throw Errors.generic({ message: 'Query not called as a tagged template literal', code: 'NOT_TAGGED_CALL' }) +} + +const serializers = module.exports.serializers = defaultHandlers.serializers +const parsers = module.exports.parsers = defaultHandlers.parsers + +const END = module.exports.END = {} + +function firstIsString(x) { + if (Array.isArray(x)) + return firstIsString(x[0]) + return typeof x === 'string' ? 1009 : 0 +} + +const mergeUserTypes = module.exports.mergeUserTypes = function(types) { + const user = typeHandlers(types || {}) + return { + serializers: Object.assign({}, serializers, user.serializers), + parsers: Object.assign({}, parsers, user.parsers) + } +} + +function typeHandlers(types) { + return Object.keys(types).reduce((acc, k) => { + types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse) + acc.serializers[types[k].to] = types[k].serialize + types[k].from && [].concat(types[k].from).forEach(x => acc.serializers[x] = types[k].serialize) + return acc + }, { parsers: {}, serializers: {} }) +} + +const escapeIdentifier = module.exports.escapeIdentifier = function escape(str) { + return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"' +} + +const inferType = module.exports.inferType = function inferType(x) { + return ( + x instanceof Parameter ? x.type : + x instanceof Date ? 1184 : + x instanceof Uint8Array ? 17 : + (x === true || x === false) ? 16 : + typeof x === 'bigint' ? 1700 : + Array.isArray(x) ? inferType(x[0]) : + 0 + ) +} + +const escapeBackslash = /\\/g +const escapeQuote = /"/g + +function arrayEscape(x) { + return x + .replace(escapeBackslash, '\\\\') + .replace(escapeQuote, '\\"') +} + +const arraySerializer = module.exports.arraySerializer = function arraySerializer(xs, serializer) { + if (!xs.length) + return '{}' + + const first = xs[0] + + if (Array.isArray(first) && !first.type) + return '{' + xs.map(x => arraySerializer(x, serializer)).join(',') + '}' + + return '{' + xs.map(x => + '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"' + ).join(',') + '}' +} + +const arrayParserState = { + i: 0, + char: null, + str: '', + quoted: false, + last: 0 +} + +const arrayParser = module.exports.arrayParser = function arrayParser(x, parser) { + arrayParserState.i = arrayParserState.last = 0 + return arrayParserLoop(arrayParserState, x, parser) +} + +function arrayParserLoop(s, x, parser) { + const xs = [] + for (; s.i < x.length; s.i++) { + s.char = x[s.i] + if (s.quoted) { + if (s.char === '\\') { + s.str += x[++s.i] + } else if (s.char === '"') { + xs.push(parser ? parser(s.str) : s.str) + s.str = '' + s.quoted = x[s.i + 1] === '"' + s.last = s.i + 2 + } else { + s.str += s.char + } + } else if (s.char === '"') { + s.quoted = true + } else if (s.char === '{') { + s.last = ++s.i + xs.push(arrayParserLoop(s, x, parser)) + } else if (s.char === '}') { + s.quoted = false + s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i)) + s.last = s.i + 1 + break + } else if (s.char === ',' && s.p !== '}' && s.p !== '"') { + xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i)) + s.last = s.i + 1 + } + s.p = s.char + } + s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1)) + return xs +} + +const toCamel = module.exports.toCamel = x => { + let str = x[0] + for (let i = 1; i < x.length; i++) + str += x[i] === '_' ? x[++i].toUpperCase() : x[i] + return str +} + +const toPascal = module.exports.toPascal = x => { + let str = x[0].toUpperCase() + for (let i = 1; i < x.length; i++) + str += x[i] === '_' ? x[++i].toUpperCase() : x[i] + return str +} + +const toKebab = module.exports.toKebab = x => x.replace(/_/g, '-') + +const fromCamel = module.exports.fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase() +const fromPascal = module.exports.fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase() +const fromKebab = module.exports.fromKebab = x => x.replace(/-/g, '_') diff --git a/cjs/tests/bootstrap.js b/cjs/tests/bootstrap.js new file mode 100644 index 00000000..d7c0dc44 --- /dev/null +++ b/cjs/tests/bootstrap.js @@ -0,0 +1,29 @@ +const { spawnSync } = require('child_process') + +exec('psql', ['-c', 'alter system set ssl=on']) +exec('psql', ['-c', 'create user postgres_js_test']) +exec('psql', ['-c', 'alter system set password_encryption=md5']) +exec('psql', ['-c', 'select pg_reload_conf()']) +exec('psql', ['-c', 'create user postgres_js_test_md5 with password \'postgres_js_test_md5\'']) +exec('psql', ['-c', 'alter system set password_encryption=\'scram-sha-256\'']) +exec('psql', ['-c', 'select pg_reload_conf()']) +exec('psql', ['-c', 'create user postgres_js_test_scram with password \'postgres_js_test_scram\'']) + +exec('dropdb', ['postgres_js_test']) +exec('createdb', ['postgres_js_test']) +exec('psql', ['-c', 'grant all on database postgres_js_test to postgres_js_test']) + +module.exports.exec = exec;function exec(cmd, args) { + const { stderr } = spawnSync(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) + if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist')) + throw stderr +} + +async function execAsync(cmd, args) { + let stderr = '' + const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) + cp.stderr.on('data', x => stderr += x) + await new Promise(x => cp.on('exit', x)) + if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist')) + throw new Error(stderr) +} diff --git a/cjs/tests/copy.csv b/cjs/tests/copy.csv new file mode 100644 index 00000000..6622044e --- /dev/null +++ b/cjs/tests/copy.csv @@ -0,0 +1,2 @@ +1 2 3 +4 5 6 diff --git a/cjs/tests/index.js b/cjs/tests/index.js new file mode 100644 index 00000000..12a43194 --- /dev/null +++ b/cjs/tests/index.js @@ -0,0 +1,1737 @@ +/* eslint no-console: 0 */ + +const { exec } = require('./bootstrap.js') + +const { t, nt, ot } = require('./test.js') // eslint-disable-line +const cp = require('child_process') +const path = require('path') +const net = require('net') +const fs = require('fs') +const crypto = require('crypto') + +/** @type {import('../types')} */ +const postgres = require('../src/index.js') +const delay = ms => new Promise(r => setTimeout(r, ms)) + +const rel = x => path.join(__dirname, x) +const idle_timeout = 1 + +const login = { + user: 'postgres_js_test' +} + +const login_md5 = { + user: 'postgres_js_test_md5', + pass: 'postgres_js_test_md5' +} + +const login_scram = { + user: 'postgres_js_test_scram', + pass: 'postgres_js_test_scram' +} + +const options = { + db: 'postgres_js_test', + user: login.user, + pass: login.pass, + idle_timeout, + connect_timeout: 1, + max: 1 +} + +const sql = postgres(options) + +t('Connects with no options', async() => { + const sql = postgres({ max: 1 }) + + const result = (await sql`select 1 as x`)[0].x + await sql.end() + + return [1, result] +}) + +t('Uses default database without slash', async() => { + const sql = postgres('postgres://localhost') + return [sql.options.user, sql.options.database] +}) + +t('Uses default database with slash', async() => { + const sql = postgres('postgres://localhost/') + return [sql.options.user, sql.options.database] +}) + +t('Result is array', async() => + [true, Array.isArray(await sql`select 1`)] +) + +t('Result has count', async() => + [1, (await sql`select 1`).count] +) + +t('Result has command', async() => + ['SELECT', (await sql`select 1`).command] +) + +t('Create table', async() => + ['CREATE TABLE', (await sql`create table test(int int)`).command, await sql`drop table test`] +) + +t('Drop table', { timeout: 2 }, async() => { + await sql`create table test(int int)` + return ['DROP TABLE', (await sql`drop table test`).command] +}) + +t('null', async() => + [null, (await sql`select ${ null } as x`)[0].x] +) + +t('Integer', async() => + ['1', (await sql`select ${ 1 } as x`)[0].x] +) + +t('String', async() => + ['hello', (await sql`select ${ 'hello' } as x`)[0].x] +) + +t('Boolean false', async() => + [false, (await sql`select ${ false } as x`)[0].x] +) + +t('Boolean true', async() => + [true, (await sql`select ${ true } as x`)[0].x] +) + +t('Date', async() => { + const now = new Date() + return [0, now - (await sql`select ${ now } as x`)[0].x] +}) + +t('Json', async() => { + const x = (await sql`select ${ sql.json({ a: 'hello', b: 42 }) } as x`)[0].x + return ['hello,42', [x.a, x.b].join()] +}) + +t('implicit json', async() => { + const x = (await sql`select ${ { a: 'hello', b: 42 } }::json as x`)[0].x + return ['hello,42', [x.a, x.b].join()] +}) + +t('implicit jsonb', async() => { + const x = (await sql`select ${ { a: 'hello', b: 42 } }::jsonb as x`)[0].x + return ['hello,42', [x.a, x.b].join()] +}) + +t('Empty array', async() => + [true, Array.isArray((await sql`select ${ sql.array([], 1009) } as x`)[0].x)] +) + +t('Array of Integer', async() => + ['3', (await sql`select ${ sql.array([1, 2, 3]) } as x`)[0].x[2]] +) + +t('Array of String', async() => + ['c', (await sql`select ${ sql.array(['a', 'b', 'c']) } as x`)[0].x[2]] +) + +t('Array of Date', async() => { + const now = new Date() + return [now.getTime(), (await sql`select ${ sql.array([now, now, now]) } as x`)[0].x[2].getTime()] +}) + +t('Nested array n2', async() => + ['4', (await sql`select ${ sql.array([[1, 2], [3, 4]]) } as x`)[0].x[1][1]] +) + +t('Nested array n3', async() => + ['6', (await sql`select ${ sql.array([[[1, 2]], [[3, 4]], [[5, 6]]]) } as x`)[0].x[2][0][1]] +) + +t('Escape in arrays', async() => + ['Hello "you",c:\\windows', (await sql`select ${ sql.array(['Hello "you"', 'c:\\windows']) } as x`)[0].x.join(',')] +) + +t('Escapes', async() => { + return ['hej"hej', Object.keys((await sql`select 1 as ${ sql('hej"hej') }`)[0])[0]] +}) + +t('null for int', async() => { + await sql`create table test (x int)` + return [1, (await sql`insert into test values(${ null })`).count, await sql`drop table test`] +}) + +t('Transaction throws', async() => { + await sql`create table test (a int)` + return ['22P02', await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql`insert into test values('hej')` + }).catch(x => x.code), await sql`drop table test`] +}) + +t('Transaction rolls back', async() => { + await sql`create table test (a int)` + await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql`insert into test values('hej')` + }).catch(() => { /* ignore */ }) + return [0, (await sql`select a from test`).count, await sql`drop table test`] +}) + +t('Transaction throws on uncaught savepoint', async() => { + await sql`create table test (a int)` + + return ['fail', (await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql.savepoint(async sql => { + await sql`insert into test values(2)` + throw new Error('fail') + }) + }).catch((err) => err.message)), await sql`drop table test`] +}) + +t('Transaction throws on uncaught named savepoint', async() => { + await sql`create table test (a int)` + + return ['fail', (await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql.savepoit('watpoint', async sql => { + await sql`insert into test values(2)` + throw new Error('fail') + }) + }).catch(() => 'fail')), await sql`drop table test`] +}) + +t('Transaction succeeds on caught savepoint', async() => { + await sql`create table test (a int)` + await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql.savepoint(async sql => { + await sql`insert into test values(2)` + throw new Error('please rollback') + }).catch(() => { /* ignore */ }) + await sql`insert into test values(3)` + }) + + return ['2', (await sql`select count(1) from test`)[0].count, await sql`drop table test`] +}) + +t('Savepoint returns Result', async() => { + let result + await sql.begin(async sql => { + result = await sql.savepoint(sql => + sql`select 1 as x` + ) + }) + + return [1, result[0].x] +}) + +t('Transaction requests are executed implicitly', async() => [ + 'testing', + (await sql.begin(async sql => { + sql`select set_config('postgres_js.test', 'testing', true)` + return await sql`select current_setting('postgres_js.test') as x` + }))[0].x +]) + +t('Parallel transactions', async() => { + await sql`create table test (a int)` + return ['11', (await Promise.all([ + sql.begin(sql => sql`select 1`), + sql.begin(sql => sql`select 1`) + ])).map(x => x.count).join(''), await sql`drop table test`] +}) + +t('Transactions array', async() => { + await sql`create table test (a int)` + + return ['11', (await sql.begin(sql => [ + sql`select 1`.then(x => x), + sql`select 1` + ])).map(x => x.count).join(''), await sql`drop table test`] +}) + +t('Transaction waits', async() => { + await sql`create table test (a int)` + await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql.savepoint(async sql => { + await sql`insert into test values(2)` + throw new Error('please rollback') + }).catch(() => { /* ignore */ }) + await sql`insert into test values(3)` + }) + + return ['11', (await Promise.all([ + sql.begin(sql => sql`select 1`), + sql.begin(sql => sql`select 1`) + ])).map(x => x.count).join(''), await sql`drop table test`] +}) + +t('Helpers in Transaction', async() => { + return ['1', (await sql.begin(async sql => + await sql`select ${ sql({ x: 1 }) }` + ))[0].x] +}) + +t('Undefined values throws', async() => { + let error + + await sql` + select ${ undefined } as x + `.catch(x => error = x.code) + + return ['UNDEFINED_VALUE', error] +}) + +t('Null sets to null', async() => + [null, (await sql`select ${ null } as x`)[0].x] +) + +t('Throw syntax error', async() => + ['42601', (await sql`wat 1`.catch(x => x)).code] +) + +t('Connect using uri', async() => + [true, await new Promise((resolve, reject) => { + const sql = postgres('postgres://' + login.user + ':' + (login.pass || '') + '@localhost:5432/' + options.db, { + idle_timeout + }) + sql`select 1`.then(() => resolve(true), reject) + })] +) + +t('Fail with proper error on no host', async() => + ['ECONNREFUSED', (await new Promise((resolve, reject) => { + const sql = postgres('postgres://localhost:33333/' + options.db, { + idle_timeout + }) + sql`select 1`.then(reject, resolve) + })).code] +) + +t('Connect using SSL', async() => + [true, (await new Promise((resolve, reject) => { + postgres({ + ssl: { rejectUnauthorized: false }, + idle_timeout + })`select 1`.then(() => resolve(true), reject) + }))] +) + +t('Connect using SSL require', async() => + [true, (await new Promise((resolve, reject) => { + postgres({ + ssl: 'require', + idle_timeout + })`select 1`.then(() => resolve(true), reject) + }))] +) + +t('Connect using SSL prefer', async() => { + await exec('psql', ['-c', 'alter system set ssl=off']) + await exec('psql', ['-c', 'select pg_reload_conf()']) + + const sql = postgres({ + ssl: 'prefer', + idle_timeout + }) + + return [ + 1, (await sql`select 1 as x`)[0].x, + await exec('psql', ['-c', 'alter system set ssl=on']), + await exec('psql', ['-c', 'select pg_reload_conf()']) + ] +}) + +t('Reconnect using SSL', { timeout: 2 }, async() => { + const sql = postgres({ + ssl: 'require', + idle_timeout: 0.1 + }) + + await sql`select 1` + await delay(200) + + return [1, (await sql`select 1 as x`)[0].x] +}) + +t('Login without password', async() => { + return [true, (await postgres({ ...options, ...login })`select true as x`)[0].x] +}) + +t('Login using MD5', async() => { + return [true, (await postgres({ ...options, ...login_md5 })`select true as x`)[0].x] +}) + +t('Login using scram-sha-256', async() => { + return [true, (await postgres({ ...options, ...login_scram })`select true as x`)[0].x] +}) + +t('Parallel connections using scram-sha-256', { + timeout: 2 +}, async() => { + const sql = postgres({ ...options, ...login_scram }) + return [true, (await Promise.all([ + sql`select true as x, pg_sleep(0.2)`, + sql`select true as x, pg_sleep(0.2)`, + sql`select true as x, pg_sleep(0.2)` + ]))[0][0].x] +}) + +t('Support dynamic password function', async() => { + return [true, (await postgres({ + ...options, + ...login_scram, + pass: () => 'postgres_js_test_scram' + })`select true as x`)[0].x] +}) + +t('Support dynamic async password function', async() => { + return [true, (await postgres({ + ...options, + ...login_scram, + pass: () => Promise.resolve('postgres_js_test_scram') + })`select true as x`)[0].x] +}) + +t('Point type', async() => { + const sql = postgres({ + ...options, + types: { + point: { + to: 600, + from: [600], + serialize: ([x, y]) => '(' + x + ',' + y + ')', + parse: (x) => x.slice(1, -1).split(',').map(x => +x) + } + } + }) + + await sql`create table test (x point)` + await sql`insert into test (x) values (${ sql.types.point([10, 20]) })` + return [20, (await sql`select x from test`)[0].x[1], await sql`drop table test`] +}) + +t('Point type array', async() => { + const sql = postgres({ + ...options, + types: { + point: { + to: 600, + from: [600], + serialize: ([x, y]) => '(' + x + ',' + y + ')', + parse: (x) => x.slice(1, -1).split(',').map(x => +x) + } + } + }) + + await sql`create table test (x point[])` + await sql`insert into test (x) values (${ sql.array([sql.types.point([10, 20]), sql.types.point([20, 30])]) })` + return [30, (await sql`select x from test`)[0].x[1][1], await sql`drop table test`] +}) + +t('sql file', async() => + [1, (await sql.file(rel('select.sql')))[0].x] +) + +t('sql file has forEach', async() => { + let result + await sql + .file(rel('select.sql'), { cache: false }) + .forEach(({ x }) => result = x) + + return [1, result] +}) + +t('sql file throws', async() => + ['ENOENT', (await sql.file(rel('selectomondo.sql')).catch(x => x.code))] +) + +t('sql file cached', async() => { + await sql.file(rel('select.sql')) + await delay(20) + + return [1, (await sql.file(rel('select.sql')))[0].x] +}) + +t('Parameters in file', async() => { + const result = await sql.file( + rel('select-param.sql'), + ['hello'] + ) + return ['hello', result[0].x] +}) + +t('Connection ended promise', async() => { + const sql = postgres(options) + + await sql.end() + + return [undefined, await sql.end()] +}) + +t('Connection ended timeout', async() => { + const sql = postgres(options) + + await sql.end({ timeout: 10 }) + + return [undefined, await sql.end()] +}) + +t('Connection ended error', async() => { + const sql = postgres(options) + sql.end() + return ['CONNECTION_ENDED', (await sql``.catch(x => x.code))] +}) + +t('Connection end does not cancel query', async() => { + const sql = postgres(options) + + const promise = sql`select 1 as x`.execute() + + sql.end() + + return [1, (await promise)[0].x] +}) + +t('Connection destroyed', async() => { + const sql = postgres(options) + setTimeout(() => sql.end({ timeout: 0 }), 0) + return ['CONNECTION_DESTROYED', await sql``.catch(x => x.code)] +}) + +t('Connection destroyed with query before', async() => { + const sql = postgres(options) + , error = sql`select pg_sleep(0.2)`.catch(err => err.code) + + sql.end({ timeout: 0 }) + return ['CONNECTION_DESTROYED', await error] +}) + +t('transform column', async() => { + const sql = postgres({ + ...options, + transform: { column: x => x.split('').reverse().join('') } + }) + + await sql`create table test (hello_world int)` + await sql`insert into test values (1)` + return ['dlrow_olleh', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`] +}) + +t('column toPascal', async() => { + const sql = postgres({ + ...options, + transform: { column: postgres.toPascal } + }) + + await sql`create table test (hello_world int)` + await sql`insert into test values (1)` + return ['HelloWorld', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`] +}) + +t('column toCamel', async() => { + const sql = postgres({ + ...options, + transform: { column: postgres.toCamel } + }) + + await sql`create table test (hello_world int)` + await sql`insert into test values (1)` + return ['helloWorld', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`] +}) + +t('column toKebab', async() => { + const sql = postgres({ + ...options, + transform: { column: postgres.toKebab } + }) + + await sql`create table test (hello_world int)` + await sql`insert into test values (1)` + return ['hello-world', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`] +}) + +t('unsafe', async() => { + await sql`create table test (x int)` + return [1, (await sql.unsafe('insert into test values ($1) returning *', [1]))[0].x, await sql`drop table test`] +}) + +t('unsafe simple', async() => { + return [1, (await sql.unsafe('select 1 as x'))[0].x] +}) + +t('listen and notify', async() => { + const sql = postgres(options) + , channel = 'hello' + + return ['world', await new Promise((resolve, reject) => + sql.listen(channel, resolve) + .then(() => sql.notify(channel, 'world')) + .then(() => delay(20)) + .catch(reject) + .then(sql.end) + )] +}) + +t('double listen', async() => { + const sql = postgres(options) + , channel = 'hello' + + let count = 0 + + await new Promise((resolve, reject) => + sql.listen(channel, resolve) + .then(() => sql.notify(channel, 'world')) + .catch(reject) + ).then(() => count++) + + await new Promise((resolve, reject) => + sql.listen(channel, resolve) + .then(() => sql.notify(channel, 'world')) + .catch(reject) + ).then(() => count++) + + // for coverage + sql.listen('weee', () => { /* noop */ }).then(sql.end) + + return [2, count] +}) + +t('listen and notify with weird name', async() => { + const sql = postgres(options) + , channel = 'wat-;ø§' + + return ['world', await new Promise((resolve, reject) => + sql.listen(channel, resolve) + .then(() => sql.notify(channel, 'world')) + .catch(reject) + .then(() => delay(20)) + .then(sql.end) + )] +}) + +t('listen and notify with upper case', async() => { + const sql = postgres(options) + let result + + await sql.listen('withUpperChar', x => result = x) + sql.notify('withUpperChar', 'works') + await delay(50) + + return [ + 'works', + result, + sql.end() + ] +}) + +t('listen reconnects', { timeout: 4 }, async() => { + const sql = postgres(options) + , xs = [] + + const { state: { pid } } = await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'a') + await sql`select pg_terminate_backend(${ pid }::int)` + await delay(200) + await sql.notify('test', 'b') + await delay(200) + sql.end() + + return ['ab', xs.join('')] +}) + + +t('listen reconnects after connection error', { timeout: 3 }, async() => { + const sql = postgres() + , xs = [] + + const a = (await sql`show data_directory`)[0].data_directory + + const { state: { pid } } = await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'a') + await sql`select pg_terminate_backend(${ pid }::int)` + await delay(1000) + + await sql.notify('test', 'b') + await delay(50) + sql.end() + + return ['ab', xs.join('')] +}) + +t('listen result reports correct connection state after reconnection', async() => { + const sql = postgres(options) + , xs = [] + + const result = await sql.listen('test', x => xs.push(x)) + const initialPid = result.state.pid + await sql.notify('test', 'a') + await sql`select pg_terminate_backend(${ initialPid }::int)` + await delay(50) + sql.end() + + return [result.state.pid !== initialPid, true] +}) + +t('unlisten removes subscription', async() => { + const sql = postgres(options) + , xs = [] + + const { unlisten } = await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'a') + await delay(50) + await unlisten() + await sql.notify('test', 'b') + await delay(50) + sql.end() + + return ['a', xs.join('')] +}) + +t('listen after unlisten', async() => { + const sql = postgres(options) + , xs = [] + + const { unlisten } = await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'a') + await delay(50) + await unlisten() + await sql.notify('test', 'b') + await delay(50) + await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'c') + await delay(50) + sql.end() + + return ['ac', xs.join('')] +}) + +t('multiple listeners and unlisten one', async() => { + const sql = postgres(options) + , xs = [] + + await sql.listen('test', x => xs.push('1', x)) + const s2 = await sql.listen('test', x => xs.push('2', x)) + await sql.notify('test', 'a') + await delay(50) + await s2.unlisten() + await sql.notify('test', 'b') + await delay(50) + sql.end() + + return ['1a2a1b', xs.join('')] +}) + +t('responds with server parameters (application_name)', async() => + ['postgres.js', await new Promise((resolve, reject) => postgres({ + ...options, + onparameter: (k, v) => k === 'application_name' && resolve(v) + })`select 1`.catch(reject))] +) + +t('has server parameters', async() => { + return ['postgres.js', (await sql`select 1`.then(() => sql.parameters.application_name))] +}) + +t('big query body', async() => { + await sql`create table test (x int)` + return [1000, (await sql`insert into test ${ + sql([...Array(1000).keys()].map(x => ({ x }))) + }`).count, await sql`drop table test`] +}) + +t('Throws if more than 65534 parameters', async() => { + await sql`create table test (x int)` + return ['MAX_PARAMETERS_EXCEEDED', (await sql`insert into test ${ + sql([...Array(65535).keys()].map(x => ({ x }))) + }`.catch(e => e.code)), await sql`drop table test`] +}) + +t('let postgres do implicit cast of unknown types', async() => { + await sql`create table test (x timestamp with time zone)` + const [{ x }] = await sql`insert into test values (${ new Date().toISOString() }) returning *` + return [true, x instanceof Date, await sql`drop table test`] +}) + +t('only allows one statement', async() => + ['42601', await sql`select 1; select 2`.catch(e => e.code)] +) + +t('await sql() throws not tagged error', async() => { + let error + try { + await sql('select 1') + } catch (e) { + error = e.code + } + return ['NOT_TAGGED_CALL', error] +}) + +t('sql().then throws not tagged error', async() => { + let error + try { + sql('select 1').then(() => { /* noop */ }) + } catch (e) { + error = e.code + } + return ['NOT_TAGGED_CALL', error] +}) + +t('sql().catch throws not tagged error', async() => { + let error + try { + await sql('select 1') + } catch (e) { + error = e.code + } + return ['NOT_TAGGED_CALL', error] +}) + +t('sql().finally throws not tagged error', async() => { + let error + try { + sql('select 1').finally(() => { /* noop */ }) + } catch (e) { + error = e.code + } + return ['NOT_TAGGED_CALL', error] +}) + +t('little bobby tables', async() => { + const name = 'Robert\'); DROP TABLE students;--' + + await sql`create table students (name text, age int)` + await sql`insert into students (name) values (${ name })` + + return [ + name, (await sql`select name from students`)[0].name, + await sql`drop table students` + ] +}) + +t('Connection errors are caught using begin()', { + timeout: 2 +}, async() => { + let error + try { + const sql = postgres({ host: 'wat', port: 1337 }) + + await sql.begin(async(sql) => { + await sql`insert into test (label, value) values (${1}, ${2})` + }) + } catch (err) { + error = err + } + + return [ + true, + error.code === 'ENOTFOUND' || + error.message === 'failed to lookup address information: nodename nor servname provided, or not known' + ] +}) + +t('dynamic column name', async() => { + return ['!not_valid', Object.keys((await sql`select 1 as ${ sql('!not_valid') }`)[0])[0]] +}) + +t('dynamic select as', async() => { + return ['2', (await sql`select ${ sql({ a: 1, b: 2 }) }`)[0].b] +}) + +t('dynamic select as pluck', async() => { + return [undefined, (await sql`select ${ sql({ a: 1, b: 2 }, 'a') }`)[0].b] +}) + +t('dynamic insert', async() => { + await sql`create table test (a int, b text)` + const x = { a: 42, b: 'the answer' } + + return ['the answer', (await sql`insert into test ${ sql(x) } returning *`)[0].b, await sql`drop table test`] +}) + +t('dynamic insert pluck', async() => { + await sql`create table test (a int, b text)` + const x = { a: 42, b: 'the answer' } + + return [null, (await sql`insert into test ${ sql(x, 'a') } returning *`)[0].b, await sql`drop table test`] +}) + +t('array insert', async() => { + await sql`create table test (a int, b int)` + return [2, (await sql`insert into test (a, b) values (${ [1, 2] }) returning *`)[0].b, await sql`drop table test`] +}) + +t('parameters in()', async() => { + return [2, (await sql` + with rows as ( + select * from (values (1), (2), (3), (4)) as x(a) + ) + select * from rows where a in (${ [3, 4] }) + `).count] +}) + +t('dynamic multi row insert', async() => { + await sql`create table test (a int, b text)` + const x = { a: 42, b: 'the answer' } + + return [ + 'the answer', + (await sql`insert into test ${ sql([x, x]) } returning *`)[1].b, await sql`drop table test` + ] +}) + +t('dynamic update', async() => { + await sql`create table test (a int, b text)` + await sql`insert into test (a, b) values (17, 'wrong')` + + return [ + 'the answer', + (await sql`update test set ${ sql({ a: 42, b: 'the answer' }) } returning *`)[0].b, await sql`drop table test` + ] +}) + +t('dynamic update pluck', async() => { + await sql`create table test (a int, b text)` + await sql`insert into test (a, b) values (17, 'wrong')` + + return [ + 'wrong', + (await sql`update test set ${ sql({ a: 42, b: 'the answer' }, 'a') } returning *`)[0].b, await sql`drop table test` + ] +}) + +t('dynamic select array', async() => { + await sql`create table test (a int, b text)` + await sql`insert into test (a, b) values (42, 'yay')` + return ['yay', (await sql`select ${ sql(['a', 'b']) } from test`)[0].b, await sql`drop table test`] +}) + +t('dynamic select args', async() => { + await sql`create table test (a int, b text)` + await sql`insert into test (a, b) values (42, 'yay')` + return ['yay', (await sql`select ${ sql('a', 'b') } from test`)[0].b, await sql`drop table test`] +}) + +t('dynamic values single row', async() => { + const [{ b }] = await sql` + select * from (values ${ sql(['a', 'b', 'c']) }) AS x(a, b, c) + ` + + return ['b', b] +}) + +t('dynamic values multi row', async() => { + const [_, { b }] = await sql` + select * from (values ${ sql([['a', 'b', 'c'],['a', 'b', 'c']]) }) AS x(a, b, c) + ` + + return ['b', b] +}) + +t('connection parameters', async() => { + const sql = postgres({ + ...options, + connection: { + 'some.var': 'yay' + } + }) + + return ['yay', (await sql`select current_setting('some.var') as x`)[0].x] +}) + +t('Multiple queries', async() => { + const sql = postgres(options) + + return [4, (await Promise.all([ + sql`select 1`, + sql`select 2`, + sql`select 3`, + sql`select 4` + ])).length] +}) + +t('Multiple statements', async() => + [2, await sql.unsafe(` + select 1 as x; + select 2 as a; + `).then(([, [x]]) => x.a)] +) + +t('throws correct error when authentication fails', async() => { + const sql = postgres({ + ...options, + ...login_md5, + pass: 'wrong' + }) + return ['28P01', await sql`select 1`.catch(e => e.code)] +}) + +t('notice works', async() => { + let notice + const log = console.log + console.log = function(x) { + notice = x + } + + const sql = postgres(options) + + await sql`create table if not exists users()` + await sql`create table if not exists users()` + + console.log = log + + return ['NOTICE', notice.severity] +}) + +t('notice hook works', async() => { + let notice + const sql = postgres({ + ...options, + onnotice: x => notice = x + }) + + await sql`create table if not exists users()` + await sql`create table if not exists users()` + + return ['NOTICE', notice.severity] +}) + +t('bytea serializes and parses', async() => { + const buf = Buffer.from('wat') + + await sql`create table test (x bytea)` + await sql`insert into test values (${ buf })` + + return [ + buf.toString(), + (await sql`select x from test`)[0].x.toString(), + await sql`drop table test` + ] +}) + +t('forEach works', async() => { + let result + await sql`select 1 as x`.forEach(({ x }) => result = x) + return [1, result] +}) + +t('forEach returns empty array', async() => { + return [0, (await sql`select 1 as x`.forEach(() => { /* noop */ })).length] +}) + +t('Cursor works', async() => { + const order = [] + await sql`select 1 as x union select 2 as x`.cursor(async([x]) => { + order.push(x.x + 'a') + await delay(100) + order.push(x.x + 'b') + }) + return ['1a1b2a2b', order.join('')] +}) + +t('Unsafe cursor works', async() => { + const order = [] + await sql.unsafe('select 1 as x union select 2 as x').cursor(async([x]) => { + order.push(x.x + 'a') + await delay(100) + order.push(x.x + 'b') + }) + return ['1a1b2a2b', order.join('')] +}) + +t('Cursor custom n works', async() => { + const order = [] + await sql`select * from generate_series(1,20)`.cursor(10, async(x) => { + order.push(x.length) + }) + return ['10,10', order.join(',')] +}) + +t('Cursor custom with rest n works', async() => { + const order = [] + await sql`select * from generate_series(1,20)`.cursor(11, async(x) => { + order.push(x.length) + }) + return ['11,9', order.join(',')] +}) + +t('Cursor custom with less results than batch size works', async() => { + const order = [] + await sql`select * from generate_series(1,20)`.cursor(21, async(x) => { + order.push(x.length) + }) + return ['20', order.join(',')] +}) + +t('Cursor cancel works', async() => { + let result + await sql`select * from generate_series(1,10) as x`.cursor(async([{ x }]) => { + result = x + return sql.CLOSE + }) + return [1, result] +}) + +t('Cursor throw works', async() => { + const order = [] + await sql`select 1 as x union select 2 as x`.cursor(async([x]) => { + order.push(x.x + 'a') + await delay(100) + throw new Error('watty') + }).catch(() => order.push('err')) + return ['1aerr', order.join('')] +}) + +t('Cursor error works', async() => [ + '42601', + await sql`wat`.cursor(() => { /* noop */ }).catch((err) => err.code) +]) + +t('Multiple Cursors', { timeout: 2 }, async() => { + const result = [] + const xs = await sql.begin(async sql => [ + await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async ([row]) => { + result.push(row.x) + await new Promise(r => setTimeout(r, 200)) + }), + await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async ([row]) => { + result.push(row.x) + await new Promise(r => setTimeout(r, 100)) + }) + ]) + + return ['1,2,3,4,101,102,103,104', result.join(',')] +}) + +t('Cursor as async iterator', async() => { + const order = [] + for await (const [x] of sql`select generate_series(1,2) as x;`.cursor()) { + order.push(x.x + 'a') + await delay(100) + order.push(x.x + 'b') + } + + return ['1a1b2a2b', order.join('')] +}) + +t('Transform row', async() => { + const sql = postgres({ + ...options, + transform: { row: () => 1 } + }) + + return [1, (await sql`select 'wat'`)[0]] +}) + +t('Transform row forEach', async() => { + let result + const sql = postgres({ + ...options, + transform: { row: () => 1 } + }) + + await sql`select 1`.forEach(x => result = x) + + return [1, result] +}) + +t('Transform value', async() => { + const sql = postgres({ + ...options, + transform: { value: () => 1 } + }) + + return [1, (await sql`select 'wat' as x`)[0].x] +}) + +t('Transform columns from', async() => { + const sql = postgres({ ...options, transform: { column: { to: postgres.fromCamel, from: postgres.toCamel } } }) + await sql`create table test (a_test int, b_test text)` + await sql`insert into test ${ sql([{ aTest: 1, bTest: 1 }]) }` + await sql`update test set ${ sql({ aTest: 2, bTest: 2 }) }` + return [ + 2, + (await sql`select ${ sql('aTest', 'bTest') } from test`)[0].aTest, + await sql`drop table test` + ] +}) + +t('Unix socket', async() => { + const sql = postgres({ + ...options, + host: '/tmp' + }) + + return [1, (await sql`select 1 as x`)[0].x] +}) + +t('Big result', async() => { + return [100000, (await sql`select * from generate_series(1, 100000)`).count] +}) + +t('Debug works', async() => { + let result + const sql = postgres({ + ...options, + debug: (connection_id, str) => result = str + }) + + await sql`select 1` + + return ['select 1', result] +}) + +t('bigint is returned as String', async() => [ + 'string', + typeof (await sql`select 9223372036854777 as x`)[0].x +]) + +t('int is returned as Number', async() => [ + 'number', + typeof (await sql`select 123 as x`)[0].x +]) + +t('numeric is returned as string', async() => [ + 'string', + typeof (await sql`select 1.2 as x`)[0].x +]) + +t('Async stack trace', async() => { + const sql = postgres({ ...options, debug: false }) + return [ + parseInt(new Error().stack.split('\n')[1].match(':([0-9]+):')[1]) + 1, + parseInt(await sql`error`.catch(x => x.stack.split('\n').pop().match(':([0-9]+):')[1])) + ] +}) + +t('Debug has long async stack trace', async() => { + const sql = postgres({ ...options, debug: true }) + + return [ + 'watyo', + await yo().catch(x => x.stack.match(/wat|yo/g).join('')) + ] + + function yo() { + return wat() + } + + function wat() { + return sql`error` + } +}) + +t('Error contains query string', async() => [ + 'selec 1', + (await sql`selec 1`.catch(err => err.query)) +]) + +t('Error contains query serialized parameters', async() => [ + 1, + (await sql`selec ${ 1 }`.catch(err => err.parameters[0])) +]) + +t('Error contains query raw parameters', async() => [ + 1, + (await sql`selec ${ 1 }`.catch(err => err.args[0])) +]) + +t('Query and parameters on errorare not enumerable if debug is not set', async() => { + const sql = postgres({ ...options, debug: false }) + + return [ + false, + (await sql`selec ${ 1 }`.catch(err => err.propertyIsEnumerable('parameters') || err.propertyIsEnumerable('query'))) + ] +}) + +t('Query and parameters are enumerable if debug is set', async() => { + const sql = postgres({ ...options, debug: true }) + + return [ + true, + (await sql`selec ${ 1 }`.catch(err => err.propertyIsEnumerable('parameters') && err.propertyIsEnumerable('query'))) + ] +}) + +t('connect_timeout works', { timeout: 20 }, async() => { + const connect_timeout = 0.2 + const server = net.createServer() + server.listen() + const sql = postgres({ port: server.address().port, host: '127.0.0.1', connect_timeout }) + const start = Date.now() + let end + await sql`select 1`.catch((e) => { + if (e.code !== 'CONNECT_TIMEOUT') + throw e + end = Date.now() + }) + server.close() + return [connect_timeout, Math.floor((end - start) / 100) / 10] +}) + +t('connect_timeout throws proper error', async() => [ + 'CONNECT_TIMEOUT', + await postgres({ + ...options, + ...login_scram, + connect_timeout: 0.001 + })`select 1`.catch(e => e.code) +]) + +t('requests works after single connect_timeout', async() => { + let first = true + + const sql = postgres({ + ...options, + ...login_scram, + connect_timeout: { valueOf() { return first ? (first = false, 0.01) : 1 } } + }) + + return [ + 'CONNECT_TIMEOUT,,1', + [ + await sql`select 1 as x`.catch(x => x.code), + await delay(10), + (await sql`select 1 as x`)[0].x + ].join(',') + ] +}) + +t('Postgres errors are of type PostgresError', async() => + [true, (await sql`bad keyword`.catch(e => e)) instanceof sql.PostgresError] +) + +t('Result has columns spec', async() => + ['x', (await sql`select 1 as x`).columns[0].name] +) + +t('forEach has result as second argument', async() => { + let x + await sql`select 1 as x`.forEach((_, result) => x = result) + return ['x', x.columns[0].name] +}) + +t('Result as arrays', async() => { + const sql = postgres({ + ...options, + transform: { + row: x => Object.values(x) + } + }) + + return ['1,2', (await sql`select 1 as a, 2 as b`)[0].join(',')] +}) + +t('Insert empty array', async() => { + await sql`create table tester (ints int[])` + return [ + Array.isArray((await sql`insert into tester (ints) values (${ sql.array([]) }) returning *`)[0].ints), + true, + await sql`drop table tester` + ] +}) + +t('Insert array in sql()', async() => { + await sql`create table tester (ints int[])` + return [ + Array.isArray((await sql`insert into tester ${ sql({ ints: sql.array([]) }) } returning *`)[0].ints), + true, + await sql`drop table tester` + ] +}) + +t('Automatically creates prepared statements', async() => { + const sql = postgres(options) + const result = await sql`select * from pg_prepared_statements` + return [true, result.some(x => x.name = result.statement.name)] +}) + +t('no_prepare: true disables prepared statements (deprecated)', async() => { + const sql = postgres({ ...options, no_prepare: true }) + const result = await sql`select * from pg_prepared_statements` + return [false, result.some(x => x.name = result.statement.name)] +}) + +t('prepare: false disables prepared statements', async() => { + const sql = postgres({ ...options, prepare: false }) + const result = await sql`select * from pg_prepared_statements` + return [false, result.some(x => x.name = result.statement.name)] +}) + +t('prepare: true enables prepared statements', async() => { + const sql = postgres({ ...options, prepare: true }) + const result = await sql`select * from pg_prepared_statements` + return [true, result.some(x => x.name = result.statement.name)] +}) + +t('prepares unsafe query when "prepare" option is true', async() => { + const sql = postgres({ ...options, prepare: true }) + const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla'], { prepare: true }) + return [true, result.some(x => x.name = result.statement.name)] +}) + +t('does not prepare unsafe query by default', async() => { + const sql = postgres({ ...options, prepare: true }) + const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla']) + return [false, result.some(x => x.name = result.statement.name)] +}) + +t('Recreate prepared statements on transformAssignedExpr error', async() => { + const insert = () => sql`insert into test (name) values (${ '1' }) returning name` + await sql`create table test (name text)` + await insert() + await sql`alter table test alter column name type int using name::integer` + return [ + 1, + (await insert())[0].name, + await sql`drop table test` + ] +}) + +t('Recreate prepared statements on RevalidateCachedQuery error', async() => { + const select = () => sql`select name from test` + await sql`create table test (name text)` + await sql`insert into test values ('1')` + await select() + await sql`alter table test alter column name type int using name::integer` + return [ + 1, + (await select())[0].name, + await sql`drop table test` + ] +}) + + +t('Catches connection config errors', async() => { + const sql = postgres({ ...options, user: { toString: () => { throw new Error('wat') } }, database: 'prut' }) + + return [ + 'wat', + await sql`select 1`.catch((e) => e.message) + ] +}) + +t('Catches connection config errors with end', async() => { + const sql = postgres({ ...options, user: { toString: () => { throw new Error('wat') } }, database: 'prut' }) + + return [ + 'wat', + await sql`select 1`.catch((e) => e.message), + await sql.end() + ] +}) + +t('Catches query format errors', async() => [ + 'wat', + await sql.unsafe({ toString: () => { throw new Error('wat') } }).catch((e) => e.message) +]) + +t('Multiple hosts', { + timeout: 10 +}, async() => { + const s1 = postgres({ idle_timeout }) + , s2 = postgres({ idle_timeout, port: 5433 }) + , sql = postgres('postgres://localhost:5432,localhost:5433', { idle_timeout, max: 1 }) + , result = [] + + const x1 = await sql`select 1` + result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x) + await s1`select pg_terminate_backend(${ x1.state.pid }::int)` + await delay(100) + + const x2 = await sql`select 1` + result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x) + await s2`select pg_terminate_backend(${ x2.state.pid }::int)` + await delay(100) + + result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x) + + return ['5432,5433,5432', result.join(',')] +}) + +t('Escaping supports schemas and tables', async() => { + await sql`create schema a` + await sql`create table a.b (c int)` + await sql`insert into a.b (c) values (1)` + return [ + 1, + (await sql`select ${ sql('a.b.c') } from a.b`)[0].c, + await sql`drop table a.b`, + await sql`drop schema a` + ] +}) + +t('Raw method returns rows as arrays', async() => { + const [x] = await sql`select 1`.raw() + return [ + Array.isArray(x), + true + ] +}) + +t('Raw method returns values unparsed as Buffer', async() => { + const [[x]] = await sql`select 1`.raw() + return [ + x instanceof Uint8Array, + true + ] +}) + +t('Copy read works', async() => { + const result = [] + + await sql`create table test (x int)` + await sql`insert into test select * from generate_series(1,10)` + const readable = await sql`copy test to stdout`.readable() + readable.on('data', x => result.push(x)) + await new Promise(r => readable.on('end', r)) + + return [ + result.length, + 10, + await sql`drop table test` + ] +}) + +t('Copy write works', { timeout: 2 }, async() => { + await sql`create table test (x int)` + const writable = await sql`copy test from stdin`.writable() + + writable.write('1\n') + writable.write('1\n') + writable.end() + + await new Promise(r => writable.on('finish', r)) + + return [ + (await sql`select 1 from test`).length, + 2, + await sql`drop table test` + ] +}) + +t('Copy write as first works', async() => { + await sql`create table test (x int)` + const first = postgres(options) + const writable = await first`COPY test FROM STDIN WITH(FORMAT csv, HEADER false, DELIMITER ',')`.writable() + writable.write('1\n') + writable.write('1\n') + writable.end() + + await new Promise(r => writable.on('finish', r)) + + return [ + (await sql`select 1 from test`).length, + 2, + await sql`drop table test` + ] +}) + + +t('Copy from file works', async() => { + await sql`create table test (x int, y int, z int)` + await new Promise(async r => fs + .createReadStream(rel('copy.csv')) + .pipe(await sql`copy test from stdin`.writable()) + .on('finish', r) + ) + + return [ + JSON.stringify(await sql`select * from test`), + '[{"x":1,"y":2,"z":3},{"x":4,"y":5,"z":6}]', + await sql`drop table test` + ] +}) + +t('Copy from works in transaction', async() => { + await sql`create table test(x int)` + const xs = await sql.begin(async sql => { + (await sql`copy test from stdin`.writable()).end('1\n2') + await delay(20) + return sql`select 1 from test` + }) + + return [ + xs.length, + 2, + await sql`drop table test` + ] +}) + +t('Copy from abort works', async() => { + const sql = postgres(options) + const readable = fs.createReadStream(rel('copy.csv')) + + await sql`create table test (x int, y int, z int)` + await sql`TRUNCATE TABLE test` + + const writable = await sql`COPY test FROM STDIN`.writable() + + let aborted + + readable + .pipe(writable) + .on('error', (err) => aborted = err) + + writable.destroy(new Error('abort')) + await sql.end() + + return [ + 'abort', + aborted.message, + await postgres(options)`drop table test` + ] +}) + +t('multiple queries before connect', async() => { + const sql = postgres({ ...options, max: 2 }) + const xs = await Promise.all([ + sql`select 1 as x`, + sql`select 2 as x`, + sql`select 3 as x`, + sql`select 4 as x` + ]) + + return [ + '1,2,3,4', + xs.map(x => x[0].x).join() + ] +}) + +t('subscribe', { timeout: 2 }, async() => { + const sql = postgres({ + database: 'postgres_js_test', + publications: 'alltables', + fetch_types: false + }) + + await sql.unsafe('create publication alltables for all tables') + + const result = [] + + await sql.subscribe('*', (row, info) => + result.push(info.command, row.name || row.id) + ) + + await sql` + create table test ( + id serial primary key, + name text + ) + ` + await sql`insert into test (name) values ('Murray')` + await sql`update test set name = 'Rothbard'` + await sql`delete from test` + await delay(100) + return [ + 'insert,Murray,update,Rothbard,delete,1', + result.join(','), + await sql`drop table test`, + await sql`drop publication alltables`, + await sql.end() + ] +}) + +t('Execute works', async() => { + const result = await new Promise((resolve) => { + const sql = postgres({ ...options, fetch_types: false, debug (id, query) { + resolve(query) + }}) + sql`select 1`.execute() + }) + + return [result, 'select 1'] +}) + +t('Cancel running query works', async() => { + const query = sql`select pg_sleep(2)` + setTimeout(() => query.cancel(), 50) + const error = await query.catch(x => x) + return ['57014', error.code] +}) + +t('Cancel piped query works', async() => { + await sql`select 1` + const last = sql`select pg_sleep(0.2)`.execute() + const query = sql`select pg_sleep(2) as dig` + setTimeout(() => query.cancel(), 100) + const error = await query.catch(x => x) + await last + return ['57014', error.code] +}) + +t('Cancel queued query works', async() => { + const tx = sql.begin(sql => sql`select pg_sleep(0.2) as hej, 'hejsa'`) + const query = sql`select pg_sleep(2) as nej` + setTimeout(() => query.cancel(), 50) + const error = await query.catch(x => x) + await tx + return ['57014', error.code] +}) + +t('Fragments', async() => [ + 1, + (await sql` + ${ sql`select` } 1 as x + `)[0].x +]) + +t('Result becomes array', async() => [ + true, + (await sql`select 1`).slice() instanceof Array +]) + +t('Describe', async() => { + const type = (await sql`select ${ 1 }::int as x`.describe()).types[0] + return [23, type] +}) + +t('Describe a statement', async() => { + await sql`create table tester (name text, age int)` + const r = await sql`select name, age from tester where name like $1 and age > $2`.describe() + return [ + '25,23/name:25,age:23', + `${ r.types.join(',') }/${ r.columns.map(c => `${c.name}:${c.type}`).join(',') }`, + await sql`drop table tester` + ] + }) + +t('Describe a statement without parameters', async() => { + await sql`create table tester (name text, age int)` + const r = await sql`select name, age from tester`.describe() + return [ + '0,2', + `${ r.types.length },${ r.columns.length }`, + await sql`drop table tester` + ] + }) + +t('Describe a statement without columns', async () => { + await sql`create table tester (name text, age int)` + const r = await sql`insert into tester (name, age) values ($1, $2)`.describe() + return [ + '2,0', + `${ r.types.length },${ r.columns.length }`, + await sql`drop table tester` + ] + }) + +t('Large object', async() => { + const file = rel('index.js') + , md5 = crypto.createHash('md5').update(fs.readFileSync(file)).digest('hex') + + const lo = await sql.largeObject() + await new Promise(async r => fs.createReadStream(file).pipe(await lo.writable()).on('finish', r)) + await lo.seek(0) + + const out = crypto.createHash('md5') + await new Promise(r => lo.readable().then(x => x.on('data', x => out.update(x)).on('end', r))) + + return [ + md5, + out.digest('hex'), + await lo.close() + ] +}) diff --git a/cjs/tests/select-param.sql b/cjs/tests/select-param.sql new file mode 100644 index 00000000..d4de2440 --- /dev/null +++ b/cjs/tests/select-param.sql @@ -0,0 +1 @@ +select $1 as x diff --git a/cjs/tests/select.sql b/cjs/tests/select.sql new file mode 100644 index 00000000..f951e920 --- /dev/null +++ b/cjs/tests/select.sql @@ -0,0 +1 @@ +select 1 as x diff --git a/cjs/tests/test.js b/cjs/tests/test.js new file mode 100644 index 00000000..b282e871 --- /dev/null +++ b/cjs/tests/test.js @@ -0,0 +1,88 @@ +/* eslint no-console: 0 */ + +const util = require('util') + +let done = 0 +let only = false +let ignored = 0 +let failed = false +let promise = Promise.resolve() +const tests = {} + , ignore = {} + +const nt = module.exports.nt = () => ignored++ +const ot = module.exports.ot = (...rest) => (only = true, test(true, ...rest)) +const t = module.exports.t = (...rest) => test(false, ...rest) +t.timeout = 0.5 + +async function test(o, name, options, fn) { + typeof options !== 'object' && (fn = options, options = {}) + const line = new Error().stack.split('\n')[3].match(':([0-9]+):')[1] + + await 1 + + if (only && !o) + return + + tests[line] = { fn, line, name } + promise = promise.then(() => Promise.race([ + new Promise((resolve, reject) => + fn.timer = setTimeout(() => reject('Timed out'), (options.timeout || t.timeout) * 1000) + ), + failed + ? (ignored++, ignore) + : fn() + ])) + .then((x) => { + clearTimeout(fn.timer) + if (x === ignore) + return + + if (!Array.isArray(x)) + throw new Error('Test should return result array') + + const [expected, got] = x + if (expected !== got) { + failed = true + throw new Error(util.inspect(expected) + ' != ' + util.inspect(got)) + } + + tests[line].succeeded = true + process.stdout.write('✅') + }) + .catch(err => { + tests[line].failed = failed = true + tests[line].error = err instanceof Error ? err : new Error(util.inspect(err)) + }) + .then(() => { + ++done === Object.keys(tests).length && exit() + }) +} + +function exit() { + console.log('') + let success = true + Object.values(tests).every((x) => { + if (x.succeeded) + return true + + success = false + x.cleanup + ? console.error('⛔️', x.name + ' at line', x.line, 'cleanup failed', '\n', util.inspect(x.cleanup)) + : console.error('⛔️', x.name + ' at line', x.line, x.failed + ? 'failed' + : 'never finished', x.error ? '\n' + util.inspect(x.error) : '' + ) + }) + + only + ? console.error('⚠️', 'Not all tests were run') + : ignored + ? console.error('⚠️', ignored, 'ignored test' + (ignored === 1 ? '' : 's', '\n')) + : success + ? console.log('All good') + : console.error('⚠️', 'Not good') + + !process.exitCode && (!success || only || ignored) && (process.exitCode = 1) +} + diff --git a/deno/package.json b/deno/package.json new file mode 100644 index 00000000..0292b995 --- /dev/null +++ b/deno/package.json @@ -0,0 +1 @@ +{"type":"commonjs"} \ No newline at end of file diff --git a/deno/polyfills.js b/deno/polyfills.js new file mode 100644 index 00000000..37eabc66 --- /dev/null +++ b/deno/polyfills.js @@ -0,0 +1,162 @@ +/* global Deno */ + +import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts' + +const events = () => ({ data: [], error: [], drain: [], connect: [], secureConnect: [], close: [] }) + +export const net = { + createServer() { + const server = { + address() { + return { port: 9876 } + }, + async listen() { + server.raw = Deno.listen({ port: 9876, transport: 'tcp' }) + for await (const conn of server.raw) + setTimeout(() => conn.close(), 500) + }, + close() { + server.raw.close() + } + } + return server + }, + Socket() { + let paused + , resume + + const socket = { + error, + success, + connect: (...xs) => { + socket.closed = false + socket.raw = null + xs.length === 1 + ? Deno.connect({ transport: 'unix', path: xs[0] }).then(success, error) + : Deno.connect({ transport: 'tcp', port: socket.port = xs[0], hostname: socket.hostname = xs[1] }).then(success, error) + }, + pause: () => { + paused = new Promise(r => resume = r) + }, + resume: () => { + resume && resume() + paused = null + }, + isPaused: () => !!paused, + removeAllListeners: () => socket.events = events(), + events: events(), + raw: null, + on: (x, fn) => socket.events[x].push(fn), + once: (x, fn) => { + if (x === 'data') + socket.break = true + const e = socket.events[x] + e.push(once) + once.once = fn + function once(...args) { + fn(...args) + e.indexOf(once) > -1 && e.splice(e.indexOf(once), 1) + } + }, + removeListener: (x, fn) => { + socket.events[x] = socket.events[x].filter(x => x !== fn && x.once !== fn) + }, + write: (x, cb) => { + socket.raw.write(x) + .then(() => (cb && cb(null))) + .catch(err => { + cb && cb() + call(socket.events.error, err) + }) + return false + }, + destroy: () => close(true), + end: close + } + + return socket + + async function success(raw) { + const encrypted = socket.encrypted + socket.raw = raw + socket.encrypted + ? call(socket.events.secureConnect) + : call(socket.events.connect) + + const b = new Uint8Array(1024) + let result + + try { + while ((result = !socket.closed && await raw.read(b))) { + call(socket.events.data, Buffer.from(b.subarray(0, result))) + if (!encrypted && socket.break && (socket.break = false, b[0] === 83)) + return socket.break = false + paused && await paused + } + } catch (e) { + if (e instanceof Deno.errors.BadResource === false) + error(e) + } + + if (!socket.encrypted || encrypted) + close() + } + + function close() { + try { + socket.raw && socket.raw.close() + } catch (e) { + if (e instanceof Deno.errors.BadResource === false) + call(socket.events.error, e) + } + closed() + } + + function closed() { + socket.break = socket.encrypted = false + if (socket.closed) + return + + call(socket.events.close) + socket.closed = true + } + + function error(err) { + call(socket.events.error, err) + socket.raw + ? close() + : closed() + } + + function call(xs, x) { + xs.slice().forEach(fn => fn(x)) + } + } +} + +export const tls = { + connect({ socket, ...options }) { + socket.encrypted = true + Deno.startTls(socket.raw, { hostname: socket.hostname, ...options }) + .then(socket.success, socket.error) + socket.raw = null + return socket + } +} + +let ids = 1 +const tasks = new Set() +export const setImmediate = fn => { + const id = ids++ + tasks.add(id) + queueMicrotask(() => { + if (tasks.has(id)) { + fn() + tasks.delete(id) + } + }) + return id +} + +export const clearImmediate = id => tasks.delete(id) + diff --git a/deno/src/bytes.js b/deno/src/bytes.js new file mode 100644 index 00000000..5037ea03 --- /dev/null +++ b/deno/src/bytes.js @@ -0,0 +1,79 @@ +import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts' +const size = 256 +let buffer = Buffer.allocUnsafe(size) + +const messages = 'BCcDdEFfHPpQSX'.split('').reduce((acc, x) => { + const v = x.charCodeAt(0) + acc[x] = () => { + buffer[0] = v + b.i = 5 + return b + } + return acc +}, {}) + +const b = Object.assign(reset, messages, { + N: String.fromCharCode(0), + i: 0, + inc(x) { + b.i += x + return b + }, + str(x) { + const length = Buffer.byteLength(x) + fit(length) + b.i += buffer.write(x, b.i, length, 'utf8') + return b + }, + i16(x) { + fit(2) + buffer.writeUInt16BE(x, b.i) + b.i += 2 + return b + }, + i32(x, i) { + if (i || i === 0) { + buffer.writeUInt32BE(x, i) + return b + } + fit(4) + buffer.writeUInt32BE(x, b.i) + b.i += 4 + return b + }, + z(x) { + fit(x) + buffer.fill(0, b.i, b.i + x) + b.i += x + return b + }, + raw(x) { + buffer = Buffer.concat([buffer.slice(0, b.i), x]) + b.i = buffer.length + return b + }, + end(at = 1) { + buffer.writeUInt32BE(b.i - at, at) + const out = buffer.slice(0, b.i) + b.i = 0 + buffer = Buffer.allocUnsafe(size) + return out + } +}) + +export default b + +function fit(x) { + if (buffer.length - b.i < x) { + const prev = buffer + , length = prev.length + + buffer = Buffer.allocUnsafe(length + (length >> 1) + x) + prev.copy(buffer) + } +} + +function reset() { + b.i = 0 + return b +} diff --git a/deno/src/connection.js b/deno/src/connection.js new file mode 100644 index 00000000..61a2bc15 --- /dev/null +++ b/deno/src/connection.js @@ -0,0 +1,983 @@ +import { HmacSha256 } from 'https://deno.land/std@0.120.0/hash/sha256.ts' +import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts' +import { setImmediate, clearImmediate } from '../polyfills.js' +import { net } from '../polyfills.js' +import { tls } from '../polyfills.js' +import crypto from 'https://deno.land/std@0.120.0/node/crypto.ts' +import Stream from 'https://deno.land/std@0.120.0/node/stream.ts' + +import { Identifier, Builder, handleValue, arrayParser, arraySerializer, CLOSE } from './types.js' +import { Errors } from './errors.js' +import Result from './result.js' +import Queue from './queue.js' +import Query from './query.js' +import b from './bytes.js' + +export default Connection + +let uid = 1 + +const Sync = b().S().end() + , Flush = b().H().end() + , SSLRequest = b().i32(8).i32(80877103).end(8) + , ExecuteUnnamed = Buffer.concat([b().E().str(b.N).i32(0).end(), Sync]) + , DescribeUnnamed = b().D().str('S').str(b.N).end() + , noop = () => { /* noop */ } + +const retryRoutines = new Set([ + 'FetchPreparedStatement', + 'RevalidateCachedQuery', + 'transformAssignedExpr' +]) + +const errorFields = { + 83 : 'severity_local', // S + 86 : 'severity', // V + 67 : 'code', // C + 77 : 'message', // M + 68 : 'detail', // D + 72 : 'hint', // H + 80 : 'position', // P + 112 : 'internal_position', // p + 113 : 'internal_query', // q + 87 : 'where', // W + 115 : 'schema_name', // s + 116 : 'table_name', // t + 99 : 'column_name', // c + 100 : 'data type_name', // d + 110 : 'constraint_name', // n + 70 : 'file', // F + 76 : 'line', // L + 82 : 'routine' // R +} + +function Connection(options, { onopen = noop, onend = noop, ondrain = noop, onclose = noop } = {}) { + const { + ssl, + user, + host, + port, + database, + parsers, + transform, + onnotice, + onnotify, + onparameter, + max_pipeline, + keep_alive, + backoff, + target_session_attrs + } = options + + const sent = Queue() + , id = uid++ + , backend = { pid: null, secret: null } + , idleTimer = timer(end, options.idle_timeout) + , lifeTimer = timer(end, options.max_lifetime) + , connectTimer = timer(connectTimedOut, options.connect_timeout) + + let socket = createSocket() + , result = new Result() + , incoming = Buffer.alloc(0) + , needsTypes = options.fetch_types + , backendParameters = {} + , statements = {} + , state = 'closed' + , statementId = Math.random().toString(36).slice(2) + , statementCount = 1 + , closedDate = 0 + , remaining = 0 + , hostIndex = 0 + , retries = 0 + , length = 0 + , delay = 0 + , rows = 0 + , serverSignature = null + , nextWriteTimer = null + , incomings = null + , results = null + , initial = null + , ending = null + , stream = null + , chunk = null + , ended = null + , nonce = null + , query = null + , final = null + + const connection = { + get state() { return state }, + set state(x) { + state = x + state === 'open' + ? idleTimer.start() + : idleTimer.cancel() + }, + connect(query) { + initial = query + reconnect() + }, + terminate, + execute, + cancel, + end, + count: 0, + id + } + + return connection + + function createSocket() { + const x = net.Socket() + x + x.on('error', error) + x.on('close', closed) + x.on('drain', drain) + return x + } + + function cancel({ pid, secret }, resolve, reject) { + socket.removeAllListeners() + socket = net.Socket() + socket.on('connect', () => socket.write(b().i32(16).i32(80877102).i32(pid).i32(secret).end(16))) + socket.once('error', reject) + socket.once('close', resolve) + connect() + } + + function execute(q) { + if (q.cancelled) + return + + try { + q.state = backend + query + ? sent.push(q) + : (query = q, query.active = true) + + build(q) + return write(toBuffer(q)) + && !q.describeFirst + && sent.length < max_pipeline + && (!q.options.onexecute || q.options.onexecute(connection)) + } catch (error) { + sent.length === 0 && write(Sync) + errored(error) + return true + } + } + + function toBuffer(q) { + if (q.parameters.length >= 65534) + throw Errors.generic({ message: 'Max number of parameters (65534) exceeded', code: 'MAX_PARAMETERS_EXCEEDED' }) + + return q.options.simple + ? b().Q().str(q.strings[0] + b.N).end() + : q.describeFirst + ? Buffer.concat([describe(q), Flush]) + : q.prepare + ? q.prepared + ? prepared(q) + : Buffer.concat([describe(q), prepared(q)]) + : unnamed(q) + } + + function describe(q) { + return Buffer.concat([ + Parse(q.statement.string, q.parameters, q.statement.types, q.statement.name), + Describe('S', q.statement.name) + ]) + } + + function prepared(q) { + return Buffer.concat([ + Bind(q.parameters, q.statement.types, q.statement.name, q.cursorName), + q.cursorFn + ? Execute('', q.cursorRows) + : ExecuteUnnamed + ]) + } + + function unnamed(q) { + return Buffer.concat([ + Parse(q.statement.string, q.parameters, q.statement.types), + DescribeUnnamed, + prepared(q) + ]) + } + + function build(q) { + const parameters = [] + , types = [] + + const string = stringify(q, q.strings[0], q.args[0], parameters, types) + + !q.tagged && q.args.forEach(x => handleValue(x, parameters, types)) + + q.prepare = options.prepare && ('prepare' in q.options ? q.options.prepare : true) + q.string = string + q.signature = q.prepare && types + string + q.onlyDescribe && (delete statements[q.signature]) + q.parameters = q.parameters || parameters + q.prepared = q.prepare && q.signature in statements + q.describeFirst = q.onlyDescribe || (parameters.length && !q.prepared) + q.statement = q.prepared + ? statements[q.signature] + : { string, types, name: q.prepare ? statementId + statementCount++ : '' } + + typeof options.debug === 'function' && options.debug(id, string, parameters, types) + } + + function stringify(q, string, value, parameters, types) { + for (let i = 1; i < q.strings.length; i++) { + string += ( + value instanceof Query ? fragment(string, value, parameters, types) : + value instanceof Identifier ? value.value : + value instanceof Builder ? value.build(string, parameters, types, options.transform) : + handleValue(value, parameters, types) + ) + q.strings[i] + value = q.args[i] + } + + return string + } + + function fragment(string, q, parameters, types) { + q.fragment = true + return stringify(q, string + q.strings[0], q.args[0], parameters, types) + } + + function write(x, fn) { + chunk = chunk ? Buffer.concat([chunk, x]) : Buffer.from(x) + if (fn || chunk.length >= 1024) + return nextWrite(fn) + nextWriteTimer === null && (nextWriteTimer = setImmediate(nextWrite)) + return true + } + + function nextWrite(fn) { + const x = socket.write(chunk, fn) + nextWriteTimer !== null && clearImmediate(nextWriteTimer) + chunk = nextWriteTimer = null + return x + } + + function connectTimedOut() { + errored(Errors.connection('CONNECT_TIMEOUT', options, socket)) + socket.destroy() + } + + async function secure() { + write(SSLRequest) + const canSSL = await new Promise(r => socket.once('data', x => r(x[0] === 83))) // S + + if (!canSSL && ssl === 'prefer') + return connected() + + socket.removeAllListeners() + socket = tls.connect({ + socket, + ...(ssl === 'require' || ssl === 'allow' || ssl === 'prefer' + ? { rejectUnauthorized: false } + : ssl + ) + }) + socket.on('secureConnect', connected) + socket.on('error', error) + socket.on('close', closed) + socket.on('drain', drain) + } + + /* c8 ignore next 3 */ + function drain() { + ondrain(connection) + } + + function data(x) { + if (incomings) { + incomings.push(x) + remaining -= x.length + if (remaining >= 0) + return + } + + incoming = incomings + ? Buffer.concat(incomings, length - remaining) + : incoming.length === 0 + ? x + : Buffer.concat([incoming, x], incoming.length + x.length) + + while (incoming.length > 4) { + length = incoming.readUInt32BE(1) + if (length >= incoming.length) { + remaining = length - incoming.length + incomings = [incoming] + break + } + + handle(incoming.slice(0, length + 1)) + incoming = incoming.slice(length + 1) + remaining = 0 + incomings = null + } + } + + function connect() { + backendParameters = {} + connectTimer.start() + socket.on('connect', ssl ? secure : connected) + + if (options.path) + return socket.connect(options.path) + + socket.connect(port[hostIndex], host[hostIndex]) + hostIndex = (hostIndex + 1) % port.length + } + + function reconnect() { + setTimeout(connect, closedDate ? closedDate + delay - Date.now() : 0) + } + + function connected() { + try { + statements = {} + needsTypes = options.fetch_types + statementId = Math.random().toString(36).slice(2) + statementCount = 1 + lifeTimer.start() + socket.on('data', data) + const s = StartupMessage() + write(s) + } catch (err) { + error(err) + } + } + + function error(err) { + if (connection.state === 'connecting' && options.host[retries + 1]) + return + + errored(err) + while (sent.length) + queryError(sent.shift(), err) + } + + function errored(err) { + query && queryError(query, err) + initial && (queryError(initial, err), initial = null) + } + + function queryError(query, err) { + if (err.query) + return + + err.stack += query.origin.replace(/.*\n/, '\n') + Object.defineProperties(err, { + query: { value: query.string, enumerable: options.debug }, + parameters: { value: query.parameters, enumerable: options.debug }, + args: { value: query.args, enumerable: options.debug }, + types: { value: query.statement && query.statement.types, enumerable: options.debug } + }) + query.reject(err) + } + + function end() { + return ending || ( + !connection.reserved && onend(connection), + !connection.reserved && !initial && !query && sent.length === 0 + ? Promise.resolve(terminate()) + : ending = new Promise(r => ended = r) + ) + } + + function terminate() { + if (query || initial || sent.length) + error(Errors.connection('CONNECTION_DESTROYED', options)) + + clearImmediate(nextWriteTimer) + socket.removeListener('data', data) + socket.removeListener('connect', connected) + socket.readyState !== 'closed' && socket.end(b().X().end()) + ended && (ended(), ending = ended = null) + } + + function closed(hadError) { + clearImmediate(nextWriteTimer) + socket.removeListener('data', data) + socket.removeListener('connect', connected) + idleTimer.cancel() + lifeTimer.cancel() + connectTimer.cancel() + + if (socket.encrypted) { + socket.removeAllListeners() + socket = createSocket() + } + + if (initial) + return reconnect() + + !hadError && (query || sent.length) && error(Errors.connection('CONNECTION_CLOSED', options, socket)) + closedDate = Date.now() + hadError && options.shared.retries++ + delay = (typeof backoff === 'function' ? backoff(options.shared.retries) : backoff) * 1000 + onclose(connection) + } + + /* Handlers */ + function handle(xs, x = xs[0]) { + ( + x === 68 ? DataRow : // D + x === 100 ? CopyData : // d + x === 65 ? NotificationResponse : // A + x === 83 ? ParameterStatus : // S + x === 90 ? ReadyForQuery : // Z + x === 67 ? CommandComplete : // C + x === 50 ? BindComplete : // 2 + x === 49 ? ParseComplete : // 1 + x === 116 ? ParameterDescription : // t + x === 84 ? RowDescription : // T + x === 82 ? Authentication : // R + x === 110 ? NoData : // n + x === 75 ? BackendKeyData : // K + x === 69 ? ErrorResponse : // E + x === 115 ? PortalSuspended : // s + x === 51 ? CloseComplete : // 3 + x === 71 ? CopyInResponse : // G + x === 78 ? NoticeResponse : // N + x === 72 ? CopyOutResponse : // H + x === 99 ? CopyDone : // c + x === 73 ? EmptyQueryResponse : // I + x === 86 ? FunctionCallResponse : // V + x === 118 ? NegotiateProtocolVersion : // v + x === 87 ? CopyBothResponse : // W + /* c8 ignore next */ + UnknownMessage + )(xs) + } + + function DataRow(x) { + let index = 7 + let length + let column + let value + + const row = query.isRaw ? new Array(query.statement.columns.length) : {} + for (let i = 0; i < query.statement.columns.length; i++) { + column = query.statement.columns[i] + length = x.readInt32BE(index) + index += 4 + + value = length === -1 + ? null + : query.isRaw + ? x.slice(index, index += length) + : column.parser === undefined + ? x.toString('utf8', index, index += length) + : column.parser.array === true + ? column.parser(x.toString('utf8', index + 1, index += length)) + : column.parser(x.toString('utf8', index, index += length)) + + query.isRaw + ? (row[i] = value) + : (row[column.name] = transform.value.from ? transform.value.from(value) : value) + } + + query.forEachFn + ? query.forEachFn(transform.row.from ? transform.row.from(row) : row, result) + : (result[rows++] = transform.row.from ? transform.row.from(row) : row) + } + + function ParameterStatus(x) { + const [k, v] = x.toString('utf8', 5, x.length - 1).split(b.N) + backendParameters[k] = v + if (options.parameters[k] !== v) { + options.parameters[k] = v + onparameter && onparameter(k, v) + } + } + + function ReadyForQuery() { + query && query.options.simple && query.resolve(results || result) + query = results = null + result = new Result() + connectTimer.cancel() + + if (initial) { + if (target_session_attrs) { + if (!backendParameters.in_hot_standby || !backendParameters.default_transaction_read_only) + return fetchState() + else if (tryNext(target_session_attrs, backendParameters)) + return terminate() + } + + if (needsTypes) + return fetchArrayTypes() + + execute(initial) + options.shared.retries = retries = initial = 0 + return + } + + while (sent.length && (query = sent.shift()) && (query.active = true) && query.cancelled) + Connection(options, {}).cancel(query.state, query.cancelled.resolve, query.cancelled.reject) + + if (query) // Consider opening if able and sent.length < 50 + return + + ending + ? terminate() + : onopen(connection) + + } + + function CommandComplete(x) { + rows = 0 + + for (let i = x.length - 1; i > 0; i--) { + if (x[i] === 32 && x[i + 1] < 58 && result.count === null) + result.count = +x.toString('utf8', i + 1, x.length - 1) + if (x[i - 1] >= 65) { + result.command = x.toString('utf8', 5, i) + result.state = backend + break + } + } + + final && (final(), final = null) + + if (query.options.simple) + return + + if (query.cursorFn) { + result.count && query.cursorFn(result) + write(Sync) + } + + query.resolve(result) + } + + function ParseComplete() { + query.parsing = false + } + + function BindComplete() { + !result.statement && (result.statement = query.statement) + result.columns = query.statement.columns + } + + function ParameterDescription(x) { + const length = x.readUInt16BE(5) + + for (let i = 0; i < length; ++i) + !query.statement.types[i] && (query.statement.types[i] = x.readUInt32BE(7 + i * 4)) + + query.prepare && (statements[query.signature] = query.statement) + query.describeFirst && !query.onlyDescribe && (write(prepared(query)), query.describeFirst = false) + } + + function RowDescription(x) { + if (result.command) { + results = results || [result] + results.push(result = new Result()) + result.count = null + query.statement.columns = null + } + + const length = x.readUInt16BE(5) + let index = 7 + let start + + query.statement.columns = Array(length) + + for (let i = 0; i < length; ++i) { + start = index + while (x[index++] !== 0); + const type = x.readUInt32BE(index + 6) + query.statement.columns[i] = { + name: transform.column.from + ? transform.column.from(x.toString('utf8', start, index - 1)) + : x.toString('utf8', start, index - 1), + parser: parsers[type], + type + } + index += 18 + } + + result.statement = query.statement + if (query.onlyDescribe) + return (query.resolve(query.statement), write(Sync)) + } + + async function Authentication(x, type = x.readUInt32BE(5)) { + ( + type === 3 ? AuthenticationCleartextPassword : + type === 5 ? AuthenticationMD5Password : + type === 10 ? SASL : + type === 11 ? SASLContinue : + type === 12 ? SASLFinal : + type !== 0 ? UnknownAuth : + noop + )(x, type) + } + + /* c8 ignore next 5 */ + async function AuthenticationCleartextPassword() { + write( + b().p().str(await Pass()).z(1).end() + ) + } + + async function AuthenticationMD5Password(x) { + write( + b().p().str('md5' + md5(Buffer.concat([Buffer.from(md5((await Pass()) + user)), x.slice(9)]))).z(1).end() + ) + } + + function SASL() { + b().p().str('SCRAM-SHA-256' + b.N) + const i = b.i + nonce = crypto.randomBytes(18).toString('base64') + write(b.inc(4).str('n,,n=*,r=' + nonce).i32(b.i - i - 4, i).end()) + } + + async function SASLContinue(x) { + const res = x.toString('utf8', 9).split(',').reduce((acc, x) => (acc[x[0]] = x.slice(2), acc), {}) + + const saltedPassword = crypto.pbkdf2Sync( + await Pass(), + Buffer.from(res.s, 'base64'), + parseInt(res.i), 32, + 'sha256' + ) + + const clientKey = hmac(saltedPassword, 'Client Key') + + const auth = 'n=*,r=' + nonce + ',' + + 'r=' + res.r + ',s=' + res.s + ',i=' + res.i + + ',c=biws,r=' + res.r + + serverSignature = hmac(hmac(saltedPassword, 'Server Key'), auth).toString('base64') + + write( + b().p().str('c=biws,r=' + res.r + ',p=' + xor(clientKey, hmac(sha256(clientKey), auth)).toString('base64')).end() + ) + } + + function SASLFinal(x) { + if (x.toString('utf8', 9).split(b.N, 1)[0].slice(2) === serverSignature) + return + /* c8 ignore next 5 */ + errored(Errors.generic({ + message: 'The server did not return the correct signature', + code: 'SASL_SIGNATURE_MISMATCH' + })) + socket.destroy() + } + + function Pass() { + return Promise.resolve(typeof options.pass === 'function' + ? options.pass() + : options.pass + ) + } + + function NoData() { + result.statement = query.statement + result.statement.columns = [] + if (query.onlyDescribe) + return (query.resolve(query.statement), write(Sync)) + } + + function BackendKeyData(x) { + backend.pid = x.readUInt32BE(5) + backend.secret = x.readUInt32BE(9) + } + + async function fetchArrayTypes() { + needsTypes = false + const types = await new Query([` + select b.oid, b.typarray + from pg_catalog.pg_type a + left join pg_catalog.pg_type b on b.oid = a.typelem + where a.typcategory = 'A' + group by b.oid, b.typarray + order by b.oid + `], [], execute) + types.forEach(({ oid, typarray }) => addArrayType(oid, typarray)) + } + + function addArrayType(oid, typarray) { + const parser = options.parsers[oid] + options.shared.typeArrayMap[oid] = typarray + options.parsers[typarray] = (xs) => arrayParser(xs, parser) + options.parsers[typarray].array = true + options.serializers[typarray] = (xs) => arraySerializer(xs, options.serializers[oid]) + } + + function tryNext(x, xs) { + return ( + (x === 'read-write' && xs.default_transaction_read_only === 'on') || + (x === 'read-only' && xs.default_transaction_read_only === 'off') || + (x === 'primary' && xs.in_hot_standby === 'off') || + (x === 'standby' && xs.in_hot_standby === 'on') || + (x === 'prefer-standby' && xs.in_hot_standby === 'off' && options.host[retries]) + ) + } + + function fetchState() { + const query = new Query([` + show transaction_read_only; + select pg_catalog.pg_is_in_recovery() + `], [], execute, null, { simple: true }) + query.resolve = ([[a], [b]]) => { + backendParameters.default_transaction_read_only = a.transaction_read_only + backendParameters.in_hot_standby = b.pg_is_in_recovery ? 'on' : 'off' + } + query.execute() + } + + function ErrorResponse(x) { + query && (query.cursorFn || query.describeFirst) && write(Sync) + const error = Errors.postgres(parseError(x)) + query && !query.retried && retryRoutines.has(error.routine) + ? retry(query) + : errored(error) + } + + function retry(q) { + delete statements[q.signature] + q.retried = true + execute(q) + } + + function NotificationResponse(x) { + if (!onnotify) + return + + let index = 9 + while (x[index++] !== 0); + onnotify( + x.toString('utf8', 9, index - 1), + x.toString('utf8', index, x.length - 1) + ) + } + + async function PortalSuspended() { + try { + const x = await Promise.resolve(query.cursorFn(result)) + rows = 0 + x === CLOSE + ? write(Close(query.portal)) + : (result = new Result(), write(Execute('', query.cursorRows))) + } catch (err) { + write(Sync) + query.reject(err) + } + } + + function CloseComplete() { + result.count && query.cursorFn(result) + query.resolve(result) + } + + function CopyInResponse() { + stream = new Stream.Writable({ + write(chunk, encoding, callback) { + socket.write(b().d().raw(chunk).end(), callback) + }, + destroy(error, callback) { + callback(error) + socket.write(b().f().str(error + b.N).end()) + }, + final(callback) { + socket.write(b().c().end()) + final = callback + } + }) + query.resolve(stream) + } + + function CopyOutResponse() { + stream = new Stream.Readable({ + read() { socket.resume() } + }) + query.resolve(stream) + } + + /* c8 ignore next 3 */ + function CopyBothResponse() { + stream = new Stream.Readable({ + read() { socket.resume() }, + /* c8 ignore next 11 */ + write(chunk, encoding, callback) { + socket.write(b().d().raw(chunk).end(), callback) + }, + destroy(error, callback) { + callback(error) + socket.write(b().f().str(error + b.N).end()) + }, + final(callback) { + socket.write(b().c().end()) + final = callback + } + }) + query.resolve(stream) + } + + function CopyData(x) { + stream.push(x.slice(5)) || socket.pause() + } + + function CopyDone() { + stream.push(null) + } + + function NoticeResponse(x) { + onnotice + ? onnotice(parseError(x)) + : console.log(parseError(x)) // eslint-disable-line + + } + + /* c8 ignore next 3 */ + function EmptyQueryResponse() { + + } + + /* c8 ignore next 3 */ + function FunctionCallResponse() { + errored(Errors.notSupported('FunctionCallResponse')) + } + + /* c8 ignore next 3 */ + function NegotiateProtocolVersion() { + errored(Errors.notSupported('NegotiateProtocolVersion')) + } + + /* c8 ignore next 3 */ + function UnknownMessage(x) { + console.error('Unknown message', x) + } + + /* c8 ignore next 3 */ + function UnknownAuth(x, type) { + console.error('Unknown auth', type) + } + + /* Messages */ + function Bind(parameters, types, statement = '', portal = '') { + let prev + , type + + b().B().str(portal + b.N).str(statement + b.N).i16(0).i16(parameters.length) + + parameters.forEach((x, i) => { + if (x === null) + return b.i32(0xFFFFFFFF) + + type = types[i] + parameters[i] = x = type in options.serializers + ? options.serializers[type](x) + : '' + x + + prev = b.i + b.inc(4).str(x).i32(b.i - prev - 4, prev) + }) + + b.i16(0) + + return b.end() + } + + function Parse(str, parameters, types, name = '') { + b().P().str(name + b.N).str(str + b.N).i16(parameters.length) + parameters.forEach((x, i) => b.i32(types[i] || 0)) + return b.end() + } + + function Describe(x, name = '') { + return b().D().str(x).str(name + b.N).end() + } + + function Execute(portal = '', rows = 0) { + return Buffer.concat([ + b().E().str(portal + b.N).i32(rows).end(), + Flush + ]) + } + + function Close(portal = '') { + return Buffer.concat([ + b().C().str('P').str(portal + b.N).end(), + b().S().end() + ]) + } + + function StartupMessage() { + return b().inc(4).i16(3).z(2).str( + Object.entries(Object.assign({ + user, + database, + client_encoding: '\'utf-8\'' + }, + options.connection + )).filter(([, v]) => v).map(([k, v]) => k + b.N + v).join(b.N) + ).z(2).end(0) + } + +} + +function parseError(x) { + const error = {} + let start = 5 + for (let i = 5; i < x.length - 1; i++) { + if (x[i] === 0) { + error[errorFields[x[start]]] = x.toString('utf8', start + 1, i) + start = i + 1 + } + } + return error +} + +function md5(x) { + return crypto.createHash('md5').update(x).digest('hex') +} + +function hmac(key, x) { + return Buffer.from(new HmacSha256(key).update(x).digest()) +} + +function sha256(x) { + return crypto.createHash('sha256').update(x).digest() +} + +function xor(a, b) { + const length = Math.max(a.length, b.length) + const buffer = Buffer.allocUnsafe(length) + for (let i = 0; i < length; i++) + buffer[i] = a[i] ^ b[i] + return buffer +} + +function timer(fn, seconds) { + seconds = typeof seconds === 'function' ? seconds() : seconds + if (!seconds) + return { cancel: noop, start: noop } + + let timer + return { + cancel() { + timer && (clearTimeout(timer), timer = null) + }, + start() { + timer && clearTimeout(timer) + timer = (window.timer = setTimeout(done, seconds * 1000, arguments), Deno.unrefTimer(window.timer), window.timer) + } + } + + function done(args) { + fn.apply(null, args) + timer = null + } +} diff --git a/lib/errors.js b/deno/src/errors.js similarity index 73% rename from lib/errors.js rename to deno/src/errors.js index 16732d44..ed12202d 100644 --- a/lib/errors.js +++ b/deno/src/errors.js @@ -1,14 +1,12 @@ -class PostgresError extends Error { +export class PostgresError extends Error { constructor(x) { super(x.message) this.name = this.constructor.name Object.assign(this, x) } -} - -module.exports.PostgresError = PostgresError +}; -module.exports.errors = { +export const Errors = { connection, postgres, generic, @@ -16,13 +14,14 @@ module.exports.errors = { } function connection(x, options, socket) { + const { host, port } = socket || options const error = Object.assign( - new Error(('write ' + x + ' ' + (options.path || (socket.host + ':' + socket.port)))), + new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))), { code: x, errno: x, - address: options.path || socket.host - }, options.path ? {} : { port: socket.port } + address: options.path || host + }, options.path ? {} : { port: port } ) Error.captureStackTrace(error, connection) return error @@ -40,6 +39,7 @@ function generic(x) { return error } +/* c8 ignore next 10 */ function notSupported(x) { const error = Object.assign( new Error(x + ' (B) is not supported'), diff --git a/deno/src/index.js b/deno/src/index.js new file mode 100644 index 00000000..0667e218 --- /dev/null +++ b/deno/src/index.js @@ -0,0 +1,541 @@ +import process from 'https://deno.land/std@0.120.0/node/process.ts' +import os from 'https://deno.land/std@0.120.0/node/os.ts' +import fs from 'https://deno.land/std@0.120.0/node/fs.ts' +import Stream from 'https://deno.land/std@0.120.0/node/stream.ts' + +import { + mergeUserTypes, + inferType, + Parameter, + Identifier, + Builder, + toPascal, + toCamel, + toKebab, + fromPascal, + fromCamel, + fromKebab, + CLOSE +} from './types.js' + +import Connection from './connection.js' +import Query from './query.js' +import Queue from './queue.js' +import { Errors, PostgresError } from './errors.js' +import Subscribe from './subscribe.js' + +Object.assign(Postgres, { + PostgresError, + toPascal, + toCamel, + toKebab, + fromPascal, + fromCamel, + fromKebab, + BigInt +}) + +export default Postgres + +function Postgres(a, b) { + const options = parseOptions(a, b) + , subscribe = Subscribe(Postgres, { ...options }) + + let ending = false + + const queries = Queue() + , connections = [...Array(options.max)].map(() => Connection(options, { onopen, onend, ondrain, onclose })) + , closed = Queue(connections) + , reserved = Queue() + , open = Queue() + , busy = Queue() + , full = Queue() + , ended = Queue() + , connecting = Queue() + , queues = { closed, ended, connecting, reserved, open, busy, full } + + const sql = Sql(handler) + + Object.assign(sql, { + get parameters() { return options.parameters }, + largeObject, + subscribe, + CLOSE, + END: CLOSE, + PostgresError, + options, + listen, + notify, + begin, + end + }) + + return sql + + function Sql(handler, instant) { + handler.debug = options.debug + + Object.assign(sql, { + types: Object.entries(options.types).reduce((acc, [name, type]) => { + acc[name] = (x) => new Parameter(x, type.to) + return acc + }, {}), + unsafe, + array, + json, + file + }) + + return sql + + function sql(strings, ...args) { + const query = strings && Array.isArray(strings.raw) + ? new Query(strings, args, handler, cancel) + : typeof strings === 'string' && !args.length + ? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings) + : new Builder(strings, args) + instant && query instanceof Query && Promise.resolve().then(() => { !query.fragment && query.execute() }) + return query + } + + function unsafe(string, args = [], options = {}) { + arguments.length === 2 && !Array.isArray(args) && (options = args, args = []) + const query = new Query([string], args, handler, cancel, { + prepare: false, + ...options, + simple: 'simple' in options ? options.simple : args.length === 0 + }) + instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + return query + } + + function file(path, args = [], options = { cache: true }) { + arguments.length === 2 && !Array.isArray(args) && (options = args, args = []) + const query = new Query([], args, (query) => { + fs.readFile(path, 'utf8', (err, string) => { + if (err) + return query.reject(err) + + query.strings = [string] + handler(query) + }) + }, cancel, { + ...options, + simple: 'simple' in options ? options.simple : args.length === 0 + }) + instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + return query + } + } + + async function listen(name, fn) { + const sql = listen.sql || (listen.sql = Postgres({ + ...options, + max: 1, + idle_timeout: null, + max_lifetime: null, + fetch_types: false, + onclose() { + Object.entries(listen.channels).forEach(([channel, { listeners }]) => { + delete listen.channels[channel] + Promise.all(listeners.map(fn => listen(channel, fn).catch(() => { /* noop */ }))) + }) + }, + onnotify(c, x) { + c in listen.channels && listen.channels[c].listeners.forEach(fn => fn(x)) + } + })) + + const channels = listen.channels || (listen.channels = {}) + , exists = name in channels + , channel = exists ? channels[name] : (channels[name] = { listeners: [fn] }) + + if (exists) { + channel.listeners.push(fn) + return Promise.resolve({ ...channel.result, unlisten }) + } + + channel.result = await sql`listen ${ sql(name) }` + channel.result.unlisten = unlisten + + return channel.result + + async function unlisten() { + if (name in channels === false) + return + + channel.listeners = channel.listeners.filter(x => x !== fn) + if (channels[name].listeners.length) + return + + delete channels[name] + return sql`unlisten ${ sql(name) }` + } + } + + async function notify(channel, payload) { + return await sql`select pg_notify(${ channel }, ${ '' + payload })` + } + + async function begin(options, fn) { + !fn && (fn = options, options = '') + return new Promise(async(resolve, reject) => { + await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).catch(reject) + + function onexecute(c) { + const queries = Queue() + let savepoints = 0 + + queues[c.state].remove(c) + c.state = 'reserved' + c.reserved = () => queries.length && handler(queries.shift()) + reserved.push(c) + + const sql = Sql(handler, true) + sql.savepoint = savepoint + + start() + + return false + + async function start() { + try { + const xs = fn(sql) + const result = await (Array.isArray(xs) ? Promise.all(xs) : xs) + await sql`commit` + resolve(result) + } catch (error) { + await sql`rollback`.catch(reject) + reject(error) + } + c.reserved = null + onopen(c) + } + + async function savepoint(name, fn) { + if (name && Array.isArray(name.raw)) + return savepoint(sql => sql.apply(sql, arguments)) + + try { + arguments.length === 1 && (fn = name, name = null) + name = 's' + savepoints++ + (name ? '_' + name : '') + await sql`savepoint ${ sql(name) }` + } catch (err) { + reject(err) + } + try { + return await Promise.resolve(fn(sql)) + } catch (err) { + await sql`rollback to ${ sql(name) }` + throw err + } + } + + function handler(query) { + c.state === 'full' + ? queries.push(query) + : c.execute(query) + } + } + }) + } + + function largeObject(oid, mode = 0x00020000 | 0x00040000) { + return new Promise(async(resolve, reject) => { + await sql.begin(async sql => { + let finish + !oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`) + const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd` + + const lo = { + writable, + readable, + close : () => sql`select lo_close(${ fd })`.then(finish), + tell : () => sql`select lo_tell64(${ fd })`, + read : (x) => sql`select loread(${ fd }, ${ x }) as data`, + write : (x) => sql`select lowrite(${ fd }, ${ x })`, + truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`, + seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`, + size : () => sql` + select + lo_lseek64(${ fd }, location, 0) as position, + seek.size + from ( + select + lo_lseek64($1, 0, 2) as size, + tell.location + from (select lo_tell64($1) as location) tell + ) seek + ` + } + + resolve(lo) + + return new Promise(async r => finish = r) + + async function readable({ + highWaterMark = 2048 * 8, + start = 0, + end = Infinity + } = {}) { + let max = end - start + start && await lo.seek(start) + return new Stream.Readable({ + highWaterMark, + async read(size) { + const l = size > max ? size - max : size + max -= size + const [{ data }] = await lo.read(l) + this.push(data) + if (data.length < size) + this.push(null) + } + }) + } + + async function writable({ + highWaterMark = 2048 * 8, + start = 0 + } = {}) { + start && await lo.seek(start) + return new Stream.Writable({ + highWaterMark, + write(chunk, encoding, callback) { + lo.write(chunk).then(() => callback(), callback) + } + }) + } + }).catch(reject) + }) + } + + function json(x) { + return new Parameter(x, 114) + } + + function array(x, type) { + if (!Array.isArray(x)) + return array(Array.from(arguments)) + + return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap) + } + + function handler(query) { + if (ending) + return query.reject(Errors.connection('CONNECTION_ENDED', options, options)) + + if (open.length) + return go(open, query) + + if (closed.length) + return connect(closed.shift(), query) + + busy.length + ? go(busy, query) + : queries.push(query) + } + + function go(xs, query) { + const c = xs.shift() + return c.execute(query) + ? (c.state = 'busy', busy.push(c)) + : (c.state = 'full', full.push(c)) + } + + function cancel(query) { + return new Promise((resolve, reject) => { + query.state + ? query.active + ? Connection(options, {}).cancel(query.state, resolve, reject) + : query.cancelled = { resolve, reject } + : ( + queries.remove(query), + query.cancelled = true, + query.reject(Errors.generic({ code: '57014', message: 'canceling statement due to user request' })), + resolve() + ) + }) + } + + function end({ timeout = null } = {}) { + if (ending) + return ending + + let timer + return ending = Promise.race([ + new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))), + Promise.all(connections.map(c => c.end()).concat( + listen.sql ? listen.sql.end({ timeout: 0 }) : [], + subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : [] + )) + ]).then(() => clearTimeout(timer)) + } + + async function destroy(resolve) { + await Promise.all(connections.map(c => c.terminate())) + while (queries.length) + queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options)) + resolve() + } + + function connect(c, query) { + c.state = 'connecting' + connecting.push(c) + c.connect(query) + } + + function onend(c) { + queues[c.state].remove(c) + c.state = 'ended' + ended.push(c) + } + + function onopen(c) { + queues[c.state].remove(c) + + if (c.reserved) { + c.state = 'reserved' + c.reserved() + reserved.push(c) + return + } + + if (queries.length === 0) + return (c.state = 'open', open.push(c)) + + let max = Math.ceil(queries.length / (connecting.length + 1)) + , ready = true + + while (ready && queries.length && max-- > 0) + ready = c.execute(queries.shift()) + + ready + ? (c.state = 'busy', busy.push(c)) + : (c.state = 'full', full.push(c)) + } + + function ondrain(c) { + full.remove(c) + onopen(c) + } + + function onclose(c) { + queues[c.state].remove(c) + c.state = 'closed' + c.reserved = null + options.onclose && options.onclose(c.id) + queries.length + ? connect(c, queries.shift()) + : queues.closed.push(c) + } +} + +function parseOptions(a, b) { + if (a && a.shared) + return a + + const env = process.env // eslint-disable-line + , o = (typeof a === 'string' ? b : a) || {} + , { url, multihost } = parseUrl(a, env) + , query = url.searchParams + , host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost' + , port = o.port || url.port || env.PGPORT || 5432 + , user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername() + + return Object.assign({ + host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]), + port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)), + path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port, + database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user, + user : user, + pass : o.pass || o.password || url.password || env.PGPASSWORD || '', + max : o.max || query.get('max') || 10, + types : o.types || {}, + ssl : o.ssl || parseSSL(query.get('sslmode') || query.get('ssl')) || false, + idle_timeout : o.idle_timeout || query.get('idle_timeout') || env.PGIDLE_TIMEOUT || warn(o.timeout), + connect_timeout : o.connect_timeout || query.get('connect_timeout') || env.PGCONNECT_TIMEOUT || 30, + max_lifetime : o.max_lifetime || url.max_lifetime || max_lifetime, + max_pipeline : o.max_pipeline || url.max_pipeline || 100, + backoff : o.backoff || url.backoff || backoff, + keep_alive : o.keep_alive || url.keep_alive || 60, + prepare : 'prepare' in o ? o.prepare : 'no_prepare' in o ? !o.no_prepare : true, + onnotice : o.onnotice, + onnotify : o.onnotify, + onclose : o.onclose, + onparameter : o.onparameter, + transform : parseTransform(o.transform || {}), + connection : Object.assign({ application_name: 'postgres.js' }, o.connection), + target_session_attrs: tsa(o, url, env), + debug : o.debug, + fetch_types : 'fetch_types' in o ? o.fetch_types : true, + parameters : {}, + shared : { retries: 0, typeArrayMap: {} } + }, + mergeUserTypes(o.types) + ) +} + +function tsa(o, url, env) { + const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS + if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x)) + return x + + throw new Error('target_session_attrs ' + x + ' is not supported') +} + +function backoff(retries) { + return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20) +} + +function max_lifetime() { + return 60 * (30 + Math.random() * 30) +} + +function parseTransform(x) { + return { + column: { + from: typeof x.column === 'function' ? x.column : x.column && x.column.from, + to: x.column && x.column.to + }, + value: { + from: typeof x.value === 'function' ? x.value : x.value && x.value.from, + to: x.value && x.value.to + }, + row: { + from: typeof x.row === 'function' ? x.row : x.row && x.row.from, + to: x.row && x.row.to + } + } +} + +function parseSSL(x) { + return x !== 'disable' && x !== 'false' && x +} + +function parseUrl(url) { + if (typeof url !== 'string') + return { url: { searchParams: new Map() } } + + let host = url + host = host.slice(host.indexOf('://') + 3) + host = host.split(/[?/]/)[0] + host = host.slice(host.indexOf('@') + 1) + + return { + url: new URL(url.replace(host, host.split(',')[0])), + multihost: host.indexOf(',') > -1 && host + } +} + +function warn(x) { + typeof x !== 'undefined' && console.log('The timeout option is deprecated, use idle_timeout instead') // eslint-disable-line + return x +} + +function osUsername() { + try { + return os.userInfo().username // eslint-disable-line + } catch (_) { + return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line + } +} diff --git a/deno/src/query.js b/deno/src/query.js new file mode 100644 index 00000000..1b5826c5 --- /dev/null +++ b/deno/src/query.js @@ -0,0 +1,141 @@ +const originCache = new Map() + +export default class Query extends Promise { + constructor(strings, args, handler, canceller, options = {}) { + let resolve + , reject + + super((a, b) => { + resolve = a + reject = b + }) + + this.tagged = Array.isArray(strings.raw) + this.strings = strings + this.args = args + this.handler = handler + this.canceller = canceller + this.options = options + + this.state = null + this.statement = null + + this.resolve = x => (this.active = false, resolve(x)) + this.reject = x => (this.active = false, reject(x)) + + this.active = false + this.cancelled = null + this.executed = false + this.signature = '' + + this.origin = handler.debug ? new Error().stack : cachedError(this.strings) + } + + static get [Symbol.species]() { + return Promise + } + + cancel() { + return this.canceller && (this.canceller(this), this.canceller = null) + } + + async readable() { + this.options.simple = true + this.options.prepare = false + this.streaming = true + return this + } + + async writable() { + this.options.simple = true + this.options.prepare = false + this.streaming = true + return this + } + + cursor(rows = 1, fn) { + this.options.simple = false + if (typeof rows === 'function') { + fn = rows + rows = 1 + } + + this.cursorRows = rows + + if (typeof fn === 'function') + return (this.cursorFn = fn, this) + + let prev + return { + [Symbol.asyncIterator]: () => ({ + next: () => { + prev && prev() + const promise = new Promise((resolve, reject) => { + this.cursorFn = x => { + resolve({ value: x, done: false }) + return new Promise(r => prev = r) + } + this.resolve = () => (this.active = false, resolve({ done: true })) + this.reject = x => (this.active = false, reject(x)) + }) + this.execute() + return promise + } + }) + } + } + + describe() { + this.onlyDescribe = true + return this + } + + stream() { + throw new Error('.stream has been renamed to .forEach') + } + + forEach(fn) { + this.forEachFn = fn + return this + } + + raw() { + this.isRaw = true + return this + } + + handle() { + !this.executed && this.handler((this.executed = true, this)) + } + + execute() { + this.handle() + return this + } + + then() { + this.handle() + return super.then.apply(this, arguments) + } + + catch() { + this.handle() + return super.catch.apply(this, arguments) + } + + finally() { + this.handle() + return super.finally.apply(this, arguments) + } +} + +function cachedError(xs) { + if (originCache.has(xs)) + return originCache.get(xs) + + const x = Error.stackTraceLimit + Error.stackTraceLimit = 4 + originCache.set(xs, new Error().stack) + Error.stackTraceLimit = x + return originCache.get(xs) +} diff --git a/deno/src/queue.js b/deno/src/queue.js new file mode 100644 index 00000000..c4ef9716 --- /dev/null +++ b/deno/src/queue.js @@ -0,0 +1,31 @@ +export default Queue + +function Queue(initial = []) { + let xs = initial.slice() + let index = 0 + + return { + get length() { + return xs.length - index + }, + remove: (x) => { + const index = xs.indexOf(x) + return index === -1 + ? null + : (xs.splice(index, 1), x) + }, + push: (x) => (xs.push(x), x), + shift: () => { + const out = xs[index++] + + if (index === xs.length) { + index = 0 + xs = [] + } else { + xs[index - 1] = undefined + } + + return out + } + } +} diff --git a/deno/src/result.js b/deno/src/result.js new file mode 100644 index 00000000..31014284 --- /dev/null +++ b/deno/src/result.js @@ -0,0 +1,16 @@ +export default class Result extends Array { + constructor() { + super() + Object.defineProperties(this, { + count: { value: null, writable: true }, + state: { value: null, writable: true }, + command: { value: null, writable: true }, + columns: { value: null, writable: true }, + statement: { value: null, writable: true } + }) + } + + static get [Symbol.species]() { + return Array + } +} diff --git a/deno/src/subscribe.js b/deno/src/subscribe.js new file mode 100644 index 00000000..e54414ab --- /dev/null +++ b/deno/src/subscribe.js @@ -0,0 +1,210 @@ +import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts' +export default function Subscribe(postgres, options) { + const listeners = new Map() + + let connection + + return async function subscribe(event, fn) { + event = parseEvent(event) + + options.max = 1 + options.connection = { + ...options.connection, + replication: 'database' + } + + const sql = postgres(options) + + !connection && (subscribe.sql = sql, connection = init(sql, options.publications)) + + const fns = listeners.has(event) + ? listeners.get(event).add(fn) + : listeners.set(event, new Set([fn])) + + const unsubscribe = () => { + fns.delete(fn) + fns.size === 0 && listeners.delete(event) + } + + return connection.then(() => ({ unsubscribe })) + } + + async function init(sql, publications = 'alltables') { + if (!publications) + throw new Error('Missing publication names') + + const slot = 'postgresjs_' + Math.random().toString(36).slice(2) + const [x] = await sql.unsafe( + `CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT` + ) + + const stream = await sql.unsafe( + `START_REPLICATION SLOT ${ slot } LOGICAL ${ + x.consistent_point + } (proto_version '1', publication_names '${ publications }')` + ).writable() + + const state = { + lsn: Buffer.concat(x.consistent_point.split('/').map(x => Buffer.from(('00000000' + x).slice(-8), 'hex'))) + } + + stream.on('data', data) + + function data(x) { + if (x[0] === 0x77) + parse(x.slice(25), state, sql.options.parsers, handle) + else if (x[0] === 0x6b && x[17]) + pong() + } + + function handle(a, b) { + const path = b.relation.schema + '.' + b.relation.table + call('*', a, b) + call('*:' + path, a, b) + b.relation.keys.length && call('*:' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b) + call(b.command, a, b) + call(b.command + ':' + path, a, b) + b.relation.keys.length && call(b.command + ':' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b) + } + + function pong() { + const x = Buffer.alloc(34) + x[0] = 'r'.charCodeAt(0) + x.fill(state.lsn, 1) + x.writeBigInt64BE(BigInt(Date.now() - Date.UTC(2000, 0, 1)) * BigInt(1000), 25) + stream.write(x) + } + } + + function call(x, a, b) { + listeners.has(x) && listeners.get(x).forEach(fn => fn(a, b, x)) + } +} + +function Time(x) { + return new Date(Date.UTC(2000, 0, 1) + Number(x / BigInt(1000))) +} + +function parse(x, state, parsers, handle) { + const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc) + + Object.entries({ + R: x => { // Relation + let i = 1 + const r = state[x.readUInt32BE(i)] = { + schema: String(x.slice(i += 4, i = x.indexOf(0, i))) || 'pg_catalog', + table: String(x.slice(i + 1, i = x.indexOf(0, i + 1))), + columns: Array(x.readUInt16BE(i += 2)), + keys: [] + } + i += 2 + + let columnIndex = 0 + , column + + while (i < x.length) { + column = r.columns[columnIndex++] = { + key: x[i++], + name: String(x.slice(i, i = x.indexOf(0, i))), + type: x.readUInt32BE(i += 1), + parser: parsers[x.readUInt32BE(i)], + atttypmod: x.readUInt32BE(i += 4) + } + + column.key && r.keys.push(column) + i += 4 + } + }, + Y: () => { /* noop */ }, // Type + O: () => { /* noop */ }, // Origin + B: x => { // Begin + state.date = Time(x.readBigInt64BE(9)) + state.lsn = x.slice(1, 9) + }, + I: x => { // Insert + let i = 1 + const relation = state[x.readUInt32BE(i)] + const row = {} + tuples(x, row, relation.columns, i += 7) + + handle(row, { + command: 'insert', + relation + }) + }, + D: x => { // Delete + let i = 1 + const relation = state[x.readUInt32BE(i)] + i += 4 + const key = x[i] === 75 + const row = key || x[i] === 79 + ? {} + : null + + tuples(x, row, key ? relation.keys : relation.columns, i += 3) + + handle(row, { + command: 'delete', + relation, + key + }) + }, + U: x => { // Update + let i = 1 + const relation = state[x.readUInt32BE(i)] + i += 4 + const key = x[i] === 75 + const old = key || x[i] === 79 + ? {} + : null + + old && (i = tuples(x, old, key ? relation.keys : relation.columns, ++i)) + + const row = {} + i = tuples(x, row, relation.columns, i += 3) + + handle(row, { + command: 'update', + relation, + key, + old + }) + }, + T: () => { /* noop */ }, // Truncate, + C: () => { /* noop */ } // Commit + }).reduce(char, {})[x[0]](x) +} + +function tuples(x, row, columns, xi) { + let type + , column + + for (let i = 0; i < columns.length; i++) { + type = x[xi++] + column = columns[i] + row[column.name] = type === 110 // n + ? null + : type === 117 // u + ? undefined + : column.parser === undefined + ? x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi)) + : column.parser.array === true + ? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readUInt32BE(xi))) + : column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))) + } + + return xi +} + +function parseEvent(x) { + const xs = x.match(/^(\*|insert|update|delete)?:?([^.]+?\.?[^=]+)?=?(.+)?/i) || [] + + if (!xs) + throw new Error('Malformed subscribe pattern: ' + x) + + const [, command, path, key] = xs + + return (command || '*') + + (path ? ':' + (path.indexOf('.') === -1 ? 'public.' + path : path) : '') + + (key ? '=' + key : '') +} diff --git a/deno/src/types.js b/deno/src/types.js new file mode 100644 index 00000000..778ff580 --- /dev/null +++ b/deno/src/types.js @@ -0,0 +1,299 @@ +import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts' +import Query from './query.js' +import { Errors } from './errors.js' + +export const types = { + string: { + to: 25, + from: null, // defaults to string + serialize: x => '' + x + }, + number: { + to: 0, + from: [21, 23, 26, 700], + serialize: x => '' + x, + parse: x => +x + }, + json: { + to: 114, + from: [114, 3802], + serialize: x => JSON.stringify(x), + parse: x => JSON.parse(x) + }, + boolean: { + to: 16, + from: 16, + serialize: x => x === true ? 't' : 'f', + parse: x => x === 't' + }, + date: { + to: 1184, + from: [1082, 1114, 1184], + serialize: x => (x instanceof Date ? x : new Date(x)).toISOString(), + parse: x => new Date(x) + }, + bytea: { + to: 17, + from: 17, + serialize: x => '\\x' + Buffer.from(x).toString('hex'), + parse: x => Buffer.from(x.slice(2), 'hex') + } +} + +export const BigInt = { + to: 1700, + from: [20, 701, 1700], + parse: x => BigInt(x), // eslint-disable-line + serialize: x => x.toString() +} + +class NotTagged { then() { notTagged() } catch() { notTagged() } finally() { notTagged() }} + +export class Identifier extends NotTagged { + constructor(value) { + super() + this.value = escapeIdentifier(value) + } +}; + +export class Parameter extends NotTagged { + constructor(value, type, array) { + super() + this.value = value + this.type = type + this.array = array + } +}; + +export class Builder extends NotTagged { + constructor(first, rest) { + super() + this.first = first + this.rest = rest + } + + build(before, parameters, types, transform) { + const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop() + if (keyword.i === -1) + throw new Error('WTF') + + return keyword.fn(this.first, this.rest, parameters, types, transform) + } +}; + +export function handleValue(x, parameters, types) { + if (Array.isArray(x)) + return x.map(x => handleValue(x, parameters, types)).join(',') + + const value = x instanceof Parameter ? x.value : x + if (value === undefined) + throw Errors.generic({ code: 'UNDEFINED_VALUE', message: 'Undefined values are not allowed' }) + + return '$' + (types.push( + x instanceof Parameter + ? (parameters.push(x.value), x.array + ? x.array[x.type || inferType(x.value)] || x.type || firstIsString(x.value) + : x.type + ) + : (parameters.push(x), inferType(x)) + )) +} + +export const CLOSE = {} + +const defaultHandlers = typeHandlers(types) + +function valuesBuilder(first, parameters, types, transform, columns) { + let value + return first.map(row => + '(' + columns.map(column => { + value = row[column] + return ( + value instanceof Query ? value.strings[0] : + value instanceof Identifier ? value.value : + handleValue(value, parameters, types) + ) + }).join(',') + ')' + ).join(',') +} + +const builders = Object.entries({ + valuesBuilder, + + update(first, rest, parameters, types, transform) { + return (rest.length ? rest.flat() : Object.keys(first)).map(x => + escapeIdentifier(transform.column.to ? transform.column.to(x) : x) + + '=' + handleValue(first[x], parameters, types) + ) + }, + + select(first, rest, parameters, types, transform) { + typeof first === 'string' && (first = [first].concat(rest)) + if (Array.isArray(first)) + return first.map(x => escapeIdentifier(transform.column.to ? transform.column.to(x) : x)).join(',') + + let value + const columns = rest.length ? rest.flat() : Object.keys(first) + return columns.map(x => { + value = first[x] + return ( + value instanceof Query ? value.strings[0] : + value instanceof Identifier ? value.value : + handleValue(value, parameters, types) + ) + ' as ' + escapeIdentifier(transform.column.to ? transform.column.to(x) : x) + }).join(',') + }, + + values(first, rest, parameters, types, transform) { + const multi = Array.isArray(first[0]) + const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first) + return valuesBuilder(multi ? first : [first], parameters, types, transform, columns) + }, + + insert(first, rest, parameters, types, transform) { + const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first) + return '(' + columns.map(x => + escapeIdentifier(transform.column.to ? transform.column.to(x) : x) + ).join(',') + ')values' + + valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, transform, columns) + } +}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x), fn])) + +function notTagged() { + throw Errors.generic({ message: 'Query not called as a tagged template literal', code: 'NOT_TAGGED_CALL' }) +} + +export const serializers = defaultHandlers.serializers +export const parsers = defaultHandlers.parsers + +export const END = {} + +function firstIsString(x) { + if (Array.isArray(x)) + return firstIsString(x[0]) + return typeof x === 'string' ? 1009 : 0 +} + +export const mergeUserTypes = function(types) { + const user = typeHandlers(types || {}) + return { + serializers: Object.assign({}, serializers, user.serializers), + parsers: Object.assign({}, parsers, user.parsers) + } +} + +function typeHandlers(types) { + return Object.keys(types).reduce((acc, k) => { + types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse) + acc.serializers[types[k].to] = types[k].serialize + types[k].from && [].concat(types[k].from).forEach(x => acc.serializers[x] = types[k].serialize) + return acc + }, { parsers: {}, serializers: {} }) +} + +export const escapeIdentifier = function escape(str) { + return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"' +} + +export const inferType = function inferType(x) { + return ( + x instanceof Parameter ? x.type : + x instanceof Date ? 1184 : + x instanceof Uint8Array ? 17 : + (x === true || x === false) ? 16 : + typeof x === 'bigint' ? 1700 : + Array.isArray(x) ? inferType(x[0]) : + 0 + ) +} + +const escapeBackslash = /\\/g +const escapeQuote = /"/g + +function arrayEscape(x) { + return x + .replace(escapeBackslash, '\\\\') + .replace(escapeQuote, '\\"') +} + +export const arraySerializer = function arraySerializer(xs, serializer) { + if (!xs.length) + return '{}' + + const first = xs[0] + + if (Array.isArray(first) && !first.type) + return '{' + xs.map(x => arraySerializer(x, serializer)).join(',') + '}' + + return '{' + xs.map(x => + '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"' + ).join(',') + '}' +} + +const arrayParserState = { + i: 0, + char: null, + str: '', + quoted: false, + last: 0 +} + +export const arrayParser = function arrayParser(x, parser) { + arrayParserState.i = arrayParserState.last = 0 + return arrayParserLoop(arrayParserState, x, parser) +} + +function arrayParserLoop(s, x, parser) { + const xs = [] + for (; s.i < x.length; s.i++) { + s.char = x[s.i] + if (s.quoted) { + if (s.char === '\\') { + s.str += x[++s.i] + } else if (s.char === '"') { + xs.push(parser ? parser(s.str) : s.str) + s.str = '' + s.quoted = x[s.i + 1] === '"' + s.last = s.i + 2 + } else { + s.str += s.char + } + } else if (s.char === '"') { + s.quoted = true + } else if (s.char === '{') { + s.last = ++s.i + xs.push(arrayParserLoop(s, x, parser)) + } else if (s.char === '}') { + s.quoted = false + s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i)) + s.last = s.i + 1 + break + } else if (s.char === ',' && s.p !== '}' && s.p !== '"') { + xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i)) + s.last = s.i + 1 + } + s.p = s.char + } + s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1)) + return xs +} + +export const toCamel = x => { + let str = x[0] + for (let i = 1; i < x.length; i++) + str += x[i] === '_' ? x[++i].toUpperCase() : x[i] + return str +} + +export const toPascal = x => { + let str = x[0].toUpperCase() + for (let i = 1; i < x.length; i++) + str += x[i] === '_' ? x[++i].toUpperCase() : x[i] + return str +} + +export const toKebab = x => x.replace(/_/g, '-') + +export const fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase() +export const fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase() +export const fromKebab = x => x.replace(/-/g, '_') diff --git a/deno/tests/bootstrap.js b/deno/tests/bootstrap.js new file mode 100644 index 00000000..210a14f3 --- /dev/null +++ b/deno/tests/bootstrap.js @@ -0,0 +1,29 @@ +import { spawn } from 'https://deno.land/std@0.120.0/node/child_process.ts' + +await exec('psql', ['-c', 'alter system set ssl=on']) +await exec('psql', ['-c', 'create user postgres_js_test']) +await exec('psql', ['-c', 'alter system set password_encryption=md5']) +await exec('psql', ['-c', 'select pg_reload_conf()']) +await exec('psql', ['-c', 'create user postgres_js_test_md5 with password \'postgres_js_test_md5\'']) +await exec('psql', ['-c', 'alter system set password_encryption=\'scram-sha-256\'']) +await exec('psql', ['-c', 'select pg_reload_conf()']) +await exec('psql', ['-c', 'create user postgres_js_test_scram with password \'postgres_js_test_scram\'']) + +await exec('dropdb', ['postgres_js_test']) +await exec('createdb', ['postgres_js_test']) +await exec('psql', ['-c', 'grant all on database postgres_js_test to postgres_js_test']) + +function ignore(cmd, args) { + const { stderr } = spawnSync(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) + if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist')) + throw stderr +} + +export async function exec(cmd, args) { + let stderr = '' + const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) + cp.stderr.on('data', x => stderr += x) + await new Promise(x => cp.on('exit', x)) + if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist')) + throw new Error(stderr) +} diff --git a/deno/tests/copy.csv b/deno/tests/copy.csv new file mode 100644 index 00000000..6622044e --- /dev/null +++ b/deno/tests/copy.csv @@ -0,0 +1,2 @@ +1 2 3 +4 5 6 diff --git a/deno/tests/index.js b/deno/tests/index.js new file mode 100644 index 00000000..99f34edb --- /dev/null +++ b/deno/tests/index.js @@ -0,0 +1,1738 @@ +import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts' +/* eslint no-console: 0 */ + +import { exec } from './bootstrap.js' + +import { t, nt, ot } from './test.js' // eslint-disable-line +import cp from 'https://deno.land/std@0.120.0/node/child_process.ts' +import path from 'https://deno.land/std@0.120.0/node/path.ts' +import { net } from '../polyfills.js' +import fs from 'https://deno.land/std@0.120.0/node/fs.ts' +import crypto from 'https://deno.land/std@0.120.0/node/crypto.ts' + +/** @type {import('../types')} */ +import postgres from '../src/index.js' +const delay = ms => new Promise(r => setTimeout(r, ms)) + +const rel = x => new URL(x, import.meta.url) +const idle_timeout = 1 + +const login = { + user: 'postgres_js_test' +} + +const login_md5 = { + user: 'postgres_js_test_md5', + pass: 'postgres_js_test_md5' +} + +const login_scram = { + user: 'postgres_js_test_scram', + pass: 'postgres_js_test_scram' +} + +const options = { + db: 'postgres_js_test', + user: login.user, + pass: login.pass, + idle_timeout, + connect_timeout: 1, + max: 1 +} + +const sql = postgres(options) + +t('Connects with no options', async() => { + const sql = postgres({ max: 1 }) + + const result = (await sql`select 1 as x`)[0].x + await sql.end() + + return [1, result] +}) + +t('Uses default database without slash', async() => { + const sql = postgres('postgres://localhost') + return [sql.options.user, sql.options.database] +}) + +t('Uses default database with slash', async() => { + const sql = postgres('postgres://localhost/') + return [sql.options.user, sql.options.database] +}) + +t('Result is array', async() => + [true, Array.isArray(await sql`select 1`)] +) + +t('Result has count', async() => + [1, (await sql`select 1`).count] +) + +t('Result has command', async() => + ['SELECT', (await sql`select 1`).command] +) + +t('Create table', async() => + ['CREATE TABLE', (await sql`create table test(int int)`).command, await sql`drop table test`] +) + +t('Drop table', { timeout: 2 }, async() => { + await sql`create table test(int int)` + return ['DROP TABLE', (await sql`drop table test`).command] +}) + +t('null', async() => + [null, (await sql`select ${ null } as x`)[0].x] +) + +t('Integer', async() => + ['1', (await sql`select ${ 1 } as x`)[0].x] +) + +t('String', async() => + ['hello', (await sql`select ${ 'hello' } as x`)[0].x] +) + +t('Boolean false', async() => + [false, (await sql`select ${ false } as x`)[0].x] +) + +t('Boolean true', async() => + [true, (await sql`select ${ true } as x`)[0].x] +) + +t('Date', async() => { + const now = new Date() + return [0, now - (await sql`select ${ now } as x`)[0].x] +}) + +t('Json', async() => { + const x = (await sql`select ${ sql.json({ a: 'hello', b: 42 }) } as x`)[0].x + return ['hello,42', [x.a, x.b].join()] +}) + +t('implicit json', async() => { + const x = (await sql`select ${ { a: 'hello', b: 42 } }::json as x`)[0].x + return ['hello,42', [x.a, x.b].join()] +}) + +t('implicit jsonb', async() => { + const x = (await sql`select ${ { a: 'hello', b: 42 } }::jsonb as x`)[0].x + return ['hello,42', [x.a, x.b].join()] +}) + +t('Empty array', async() => + [true, Array.isArray((await sql`select ${ sql.array([], 1009) } as x`)[0].x)] +) + +t('Array of Integer', async() => + ['3', (await sql`select ${ sql.array([1, 2, 3]) } as x`)[0].x[2]] +) + +t('Array of String', async() => + ['c', (await sql`select ${ sql.array(['a', 'b', 'c']) } as x`)[0].x[2]] +) + +t('Array of Date', async() => { + const now = new Date() + return [now.getTime(), (await sql`select ${ sql.array([now, now, now]) } as x`)[0].x[2].getTime()] +}) + +t('Nested array n2', async() => + ['4', (await sql`select ${ sql.array([[1, 2], [3, 4]]) } as x`)[0].x[1][1]] +) + +t('Nested array n3', async() => + ['6', (await sql`select ${ sql.array([[[1, 2]], [[3, 4]], [[5, 6]]]) } as x`)[0].x[2][0][1]] +) + +t('Escape in arrays', async() => + ['Hello "you",c:\\windows', (await sql`select ${ sql.array(['Hello "you"', 'c:\\windows']) } as x`)[0].x.join(',')] +) + +t('Escapes', async() => { + return ['hej"hej', Object.keys((await sql`select 1 as ${ sql('hej"hej') }`)[0])[0]] +}) + +t('null for int', async() => { + await sql`create table test (x int)` + return [1, (await sql`insert into test values(${ null })`).count, await sql`drop table test`] +}) + +t('Transaction throws', async() => { + await sql`create table test (a int)` + return ['22P02', await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql`insert into test values('hej')` + }).catch(x => x.code), await sql`drop table test`] +}) + +t('Transaction rolls back', async() => { + await sql`create table test (a int)` + await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql`insert into test values('hej')` + }).catch(() => { /* ignore */ }) + return [0, (await sql`select a from test`).count, await sql`drop table test`] +}) + +t('Transaction throws on uncaught savepoint', async() => { + await sql`create table test (a int)` + + return ['fail', (await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql.savepoint(async sql => { + await sql`insert into test values(2)` + throw new Error('fail') + }) + }).catch((err) => err.message)), await sql`drop table test`] +}) + +t('Transaction throws on uncaught named savepoint', async() => { + await sql`create table test (a int)` + + return ['fail', (await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql.savepoit('watpoint', async sql => { + await sql`insert into test values(2)` + throw new Error('fail') + }) + }).catch(() => 'fail')), await sql`drop table test`] +}) + +t('Transaction succeeds on caught savepoint', async() => { + await sql`create table test (a int)` + await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql.savepoint(async sql => { + await sql`insert into test values(2)` + throw new Error('please rollback') + }).catch(() => { /* ignore */ }) + await sql`insert into test values(3)` + }) + + return ['2', (await sql`select count(1) from test`)[0].count, await sql`drop table test`] +}) + +t('Savepoint returns Result', async() => { + let result + await sql.begin(async sql => { + result = await sql.savepoint(sql => + sql`select 1 as x` + ) + }) + + return [1, result[0].x] +}) + +t('Transaction requests are executed implicitly', async() => [ + 'testing', + (await sql.begin(async sql => { + sql`select set_config('postgres_js.test', 'testing', true)` + return await sql`select current_setting('postgres_js.test') as x` + }))[0].x +]) + +t('Parallel transactions', async() => { + await sql`create table test (a int)` + return ['11', (await Promise.all([ + sql.begin(sql => sql`select 1`), + sql.begin(sql => sql`select 1`) + ])).map(x => x.count).join(''), await sql`drop table test`] +}) + +t('Transactions array', async() => { + await sql`create table test (a int)` + + return ['11', (await sql.begin(sql => [ + sql`select 1`.then(x => x), + sql`select 1` + ])).map(x => x.count).join(''), await sql`drop table test`] +}) + +t('Transaction waits', async() => { + await sql`create table test (a int)` + await sql.begin(async sql => { + await sql`insert into test values(1)` + await sql.savepoint(async sql => { + await sql`insert into test values(2)` + throw new Error('please rollback') + }).catch(() => { /* ignore */ }) + await sql`insert into test values(3)` + }) + + return ['11', (await Promise.all([ + sql.begin(sql => sql`select 1`), + sql.begin(sql => sql`select 1`) + ])).map(x => x.count).join(''), await sql`drop table test`] +}) + +t('Helpers in Transaction', async() => { + return ['1', (await sql.begin(async sql => + await sql`select ${ sql({ x: 1 }) }` + ))[0].x] +}) + +t('Undefined values throws', async() => { + let error + + await sql` + select ${ undefined } as x + `.catch(x => error = x.code) + + return ['UNDEFINED_VALUE', error] +}) + +t('Null sets to null', async() => + [null, (await sql`select ${ null } as x`)[0].x] +) + +t('Throw syntax error', async() => + ['42601', (await sql`wat 1`.catch(x => x)).code] +) + +t('Connect using uri', async() => + [true, await new Promise((resolve, reject) => { + const sql = postgres('postgres://' + login.user + ':' + (login.pass || '') + '@localhost:5432/' + options.db, { + idle_timeout + }) + sql`select 1`.then(() => resolve(true), reject) + })] +) + +t('Fail with proper error on no host', async() => + ['ECONNREFUSED', (await new Promise((resolve, reject) => { + const sql = postgres('postgres://localhost:33333/' + options.db, { + idle_timeout + }) + sql`select 1`.then(reject, resolve) + })).code] +) + +t('Connect using SSL', async() => + [true, (await new Promise((resolve, reject) => { + postgres({ + ssl: { rejectUnauthorized: false }, + idle_timeout + })`select 1`.then(() => resolve(true), reject) + }))] +) + +t('Connect using SSL require', async() => + [true, (await new Promise((resolve, reject) => { + postgres({ + ssl: 'require', + idle_timeout + })`select 1`.then(() => resolve(true), reject) + }))] +) + +t('Connect using SSL prefer', async() => { + await exec('psql', ['-c', 'alter system set ssl=off']) + await exec('psql', ['-c', 'select pg_reload_conf()']) + + const sql = postgres({ + ssl: 'prefer', + idle_timeout + }) + + return [ + 1, (await sql`select 1 as x`)[0].x, + await exec('psql', ['-c', 'alter system set ssl=on']), + await exec('psql', ['-c', 'select pg_reload_conf()']) + ] +}) + +t('Reconnect using SSL', { timeout: 2 }, async() => { + const sql = postgres({ + ssl: 'require', + idle_timeout: 0.1 + }) + + await sql`select 1` + await delay(200) + + return [1, (await sql`select 1 as x`)[0].x] +}) + +t('Login without password', async() => { + return [true, (await postgres({ ...options, ...login })`select true as x`)[0].x] +}) + +t('Login using MD5', async() => { + return [true, (await postgres({ ...options, ...login_md5 })`select true as x`)[0].x] +}) + +t('Login using scram-sha-256', async() => { + return [true, (await postgres({ ...options, ...login_scram })`select true as x`)[0].x] +}) + +t('Parallel connections using scram-sha-256', { + timeout: 2 +}, async() => { + const sql = postgres({ ...options, ...login_scram }) + return [true, (await Promise.all([ + sql`select true as x, pg_sleep(0.2)`, + sql`select true as x, pg_sleep(0.2)`, + sql`select true as x, pg_sleep(0.2)` + ]))[0][0].x] +}) + +t('Support dynamic password function', async() => { + return [true, (await postgres({ + ...options, + ...login_scram, + pass: () => 'postgres_js_test_scram' + })`select true as x`)[0].x] +}) + +t('Support dynamic async password function', async() => { + return [true, (await postgres({ + ...options, + ...login_scram, + pass: () => Promise.resolve('postgres_js_test_scram') + })`select true as x`)[0].x] +}) + +t('Point type', async() => { + const sql = postgres({ + ...options, + types: { + point: { + to: 600, + from: [600], + serialize: ([x, y]) => '(' + x + ',' + y + ')', + parse: (x) => x.slice(1, -1).split(',').map(x => +x) + } + } + }) + + await sql`create table test (x point)` + await sql`insert into test (x) values (${ sql.types.point([10, 20]) })` + return [20, (await sql`select x from test`)[0].x[1], await sql`drop table test`] +}) + +t('Point type array', async() => { + const sql = postgres({ + ...options, + types: { + point: { + to: 600, + from: [600], + serialize: ([x, y]) => '(' + x + ',' + y + ')', + parse: (x) => x.slice(1, -1).split(',').map(x => +x) + } + } + }) + + await sql`create table test (x point[])` + await sql`insert into test (x) values (${ sql.array([sql.types.point([10, 20]), sql.types.point([20, 30])]) })` + return [30, (await sql`select x from test`)[0].x[1][1], await sql`drop table test`] +}) + +t('sql file', async() => + [1, (await sql.file(rel('select.sql')))[0].x] +) + +t('sql file has forEach', async() => { + let result + await sql + .file(rel('select.sql'), { cache: false }) + .forEach(({ x }) => result = x) + + return [1, result] +}) + +t('sql file throws', async() => + ['ENOENT', (await sql.file(rel('selectomondo.sql')).catch(x => x.code))] +) + +t('sql file cached', async() => { + await sql.file(rel('select.sql')) + await delay(20) + + return [1, (await sql.file(rel('select.sql')))[0].x] +}) + +t('Parameters in file', async() => { + const result = await sql.file( + rel('select-param.sql'), + ['hello'] + ) + return ['hello', result[0].x] +}) + +t('Connection ended promise', async() => { + const sql = postgres(options) + + await sql.end() + + return [undefined, await sql.end()] +}) + +t('Connection ended timeout', async() => { + const sql = postgres(options) + + await sql.end({ timeout: 10 }) + + return [undefined, await sql.end()] +}) + +t('Connection ended error', async() => { + const sql = postgres(options) + sql.end() + return ['CONNECTION_ENDED', (await sql``.catch(x => x.code))] +}) + +t('Connection end does not cancel query', async() => { + const sql = postgres(options) + + const promise = sql`select 1 as x`.execute() + + sql.end() + + return [1, (await promise)[0].x] +}) + +t('Connection destroyed', async() => { + const sql = postgres(options) + setTimeout(() => sql.end({ timeout: 0 }), 0) + return ['CONNECTION_DESTROYED', await sql``.catch(x => x.code)] +}) + +t('Connection destroyed with query before', async() => { + const sql = postgres(options) + , error = sql`select pg_sleep(0.2)`.catch(err => err.code) + + sql.end({ timeout: 0 }) + return ['CONNECTION_DESTROYED', await error] +}) + +t('transform column', async() => { + const sql = postgres({ + ...options, + transform: { column: x => x.split('').reverse().join('') } + }) + + await sql`create table test (hello_world int)` + await sql`insert into test values (1)` + return ['dlrow_olleh', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`] +}) + +t('column toPascal', async() => { + const sql = postgres({ + ...options, + transform: { column: postgres.toPascal } + }) + + await sql`create table test (hello_world int)` + await sql`insert into test values (1)` + return ['HelloWorld', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`] +}) + +t('column toCamel', async() => { + const sql = postgres({ + ...options, + transform: { column: postgres.toCamel } + }) + + await sql`create table test (hello_world int)` + await sql`insert into test values (1)` + return ['helloWorld', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`] +}) + +t('column toKebab', async() => { + const sql = postgres({ + ...options, + transform: { column: postgres.toKebab } + }) + + await sql`create table test (hello_world int)` + await sql`insert into test values (1)` + return ['hello-world', Object.keys((await sql`select * from test`)[0])[0], await sql`drop table test`] +}) + +t('unsafe', async() => { + await sql`create table test (x int)` + return [1, (await sql.unsafe('insert into test values ($1) returning *', [1]))[0].x, await sql`drop table test`] +}) + +t('unsafe simple', async() => { + return [1, (await sql.unsafe('select 1 as x'))[0].x] +}) + +t('listen and notify', async() => { + const sql = postgres(options) + , channel = 'hello' + + return ['world', await new Promise((resolve, reject) => + sql.listen(channel, resolve) + .then(() => sql.notify(channel, 'world')) + .then(() => delay(20)) + .catch(reject) + .then(sql.end) + )] +}) + +t('double listen', async() => { + const sql = postgres(options) + , channel = 'hello' + + let count = 0 + + await new Promise((resolve, reject) => + sql.listen(channel, resolve) + .then(() => sql.notify(channel, 'world')) + .catch(reject) + ).then(() => count++) + + await new Promise((resolve, reject) => + sql.listen(channel, resolve) + .then(() => sql.notify(channel, 'world')) + .catch(reject) + ).then(() => count++) + + // for coverage + sql.listen('weee', () => { /* noop */ }).then(sql.end) + + return [2, count] +}) + +t('listen and notify with weird name', async() => { + const sql = postgres(options) + , channel = 'wat-;ø§' + + return ['world', await new Promise((resolve, reject) => + sql.listen(channel, resolve) + .then(() => sql.notify(channel, 'world')) + .catch(reject) + .then(() => delay(20)) + .then(sql.end) + )] +}) + +t('listen and notify with upper case', async() => { + const sql = postgres(options) + let result + + await sql.listen('withUpperChar', x => result = x) + sql.notify('withUpperChar', 'works') + await delay(50) + + return [ + 'works', + result, + sql.end() + ] +}) + +t('listen reconnects', { timeout: 4 }, async() => { + const sql = postgres(options) + , xs = [] + + const { state: { pid } } = await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'a') + await sql`select pg_terminate_backend(${ pid }::int)` + await delay(200) + await sql.notify('test', 'b') + await delay(200) + sql.end() + + return ['ab', xs.join('')] +}) + + +t('listen reconnects after connection error', { timeout: 3 }, async() => { + const sql = postgres() + , xs = [] + + const a = (await sql`show data_directory`)[0].data_directory + + const { state: { pid } } = await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'a') + await sql`select pg_terminate_backend(${ pid }::int)` + await delay(1000) + + await sql.notify('test', 'b') + await delay(50) + sql.end() + + return ['ab', xs.join('')] +}) + +t('listen result reports correct connection state after reconnection', async() => { + const sql = postgres(options) + , xs = [] + + const result = await sql.listen('test', x => xs.push(x)) + const initialPid = result.state.pid + await sql.notify('test', 'a') + await sql`select pg_terminate_backend(${ initialPid }::int)` + await delay(50) + sql.end() + + return [result.state.pid !== initialPid, true] +}) + +t('unlisten removes subscription', async() => { + const sql = postgres(options) + , xs = [] + + const { unlisten } = await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'a') + await delay(50) + await unlisten() + await sql.notify('test', 'b') + await delay(50) + sql.end() + + return ['a', xs.join('')] +}) + +t('listen after unlisten', async() => { + const sql = postgres(options) + , xs = [] + + const { unlisten } = await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'a') + await delay(50) + await unlisten() + await sql.notify('test', 'b') + await delay(50) + await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'c') + await delay(50) + sql.end() + + return ['ac', xs.join('')] +}) + +t('multiple listeners and unlisten one', async() => { + const sql = postgres(options) + , xs = [] + + await sql.listen('test', x => xs.push('1', x)) + const s2 = await sql.listen('test', x => xs.push('2', x)) + await sql.notify('test', 'a') + await delay(50) + await s2.unlisten() + await sql.notify('test', 'b') + await delay(50) + sql.end() + + return ['1a2a1b', xs.join('')] +}) + +t('responds with server parameters (application_name)', async() => + ['postgres.js', await new Promise((resolve, reject) => postgres({ + ...options, + onparameter: (k, v) => k === 'application_name' && resolve(v) + })`select 1`.catch(reject))] +) + +t('has server parameters', async() => { + return ['postgres.js', (await sql`select 1`.then(() => sql.parameters.application_name))] +}) + +t('big query body', async() => { + await sql`create table test (x int)` + return [1000, (await sql`insert into test ${ + sql([...Array(1000).keys()].map(x => ({ x }))) + }`).count, await sql`drop table test`] +}) + +t('Throws if more than 65534 parameters', async() => { + await sql`create table test (x int)` + return ['MAX_PARAMETERS_EXCEEDED', (await sql`insert into test ${ + sql([...Array(65535).keys()].map(x => ({ x }))) + }`.catch(e => e.code)), await sql`drop table test`] +}) + +t('let postgres do implicit cast of unknown types', async() => { + await sql`create table test (x timestamp with time zone)` + const [{ x }] = await sql`insert into test values (${ new Date().toISOString() }) returning *` + return [true, x instanceof Date, await sql`drop table test`] +}) + +t('only allows one statement', async() => + ['42601', await sql`select 1; select 2`.catch(e => e.code)] +) + +t('await sql() throws not tagged error', async() => { + let error + try { + await sql('select 1') + } catch (e) { + error = e.code + } + return ['NOT_TAGGED_CALL', error] +}) + +t('sql().then throws not tagged error', async() => { + let error + try { + sql('select 1').then(() => { /* noop */ }) + } catch (e) { + error = e.code + } + return ['NOT_TAGGED_CALL', error] +}) + +t('sql().catch throws not tagged error', async() => { + let error + try { + await sql('select 1') + } catch (e) { + error = e.code + } + return ['NOT_TAGGED_CALL', error] +}) + +t('sql().finally throws not tagged error', async() => { + let error + try { + sql('select 1').finally(() => { /* noop */ }) + } catch (e) { + error = e.code + } + return ['NOT_TAGGED_CALL', error] +}) + +t('little bobby tables', async() => { + const name = 'Robert\'); DROP TABLE students;--' + + await sql`create table students (name text, age int)` + await sql`insert into students (name) values (${ name })` + + return [ + name, (await sql`select name from students`)[0].name, + await sql`drop table students` + ] +}) + +t('Connection errors are caught using begin()', { + timeout: 2 +}, async() => { + let error + try { + const sql = postgres({ host: 'wat', port: 1337 }) + + await sql.begin(async(sql) => { + await sql`insert into test (label, value) values (${1}, ${2})` + }) + } catch (err) { + error = err + } + + return [ + true, + error.code === 'ENOTFOUND' || + error.message === 'failed to lookup address information: nodename nor servname provided, or not known' + ] +}) + +t('dynamic column name', async() => { + return ['!not_valid', Object.keys((await sql`select 1 as ${ sql('!not_valid') }`)[0])[0]] +}) + +t('dynamic select as', async() => { + return ['2', (await sql`select ${ sql({ a: 1, b: 2 }) }`)[0].b] +}) + +t('dynamic select as pluck', async() => { + return [undefined, (await sql`select ${ sql({ a: 1, b: 2 }, 'a') }`)[0].b] +}) + +t('dynamic insert', async() => { + await sql`create table test (a int, b text)` + const x = { a: 42, b: 'the answer' } + + return ['the answer', (await sql`insert into test ${ sql(x) } returning *`)[0].b, await sql`drop table test`] +}) + +t('dynamic insert pluck', async() => { + await sql`create table test (a int, b text)` + const x = { a: 42, b: 'the answer' } + + return [null, (await sql`insert into test ${ sql(x, 'a') } returning *`)[0].b, await sql`drop table test`] +}) + +t('array insert', async() => { + await sql`create table test (a int, b int)` + return [2, (await sql`insert into test (a, b) values (${ [1, 2] }) returning *`)[0].b, await sql`drop table test`] +}) + +t('parameters in()', async() => { + return [2, (await sql` + with rows as ( + select * from (values (1), (2), (3), (4)) as x(a) + ) + select * from rows where a in (${ [3, 4] }) + `).count] +}) + +t('dynamic multi row insert', async() => { + await sql`create table test (a int, b text)` + const x = { a: 42, b: 'the answer' } + + return [ + 'the answer', + (await sql`insert into test ${ sql([x, x]) } returning *`)[1].b, await sql`drop table test` + ] +}) + +t('dynamic update', async() => { + await sql`create table test (a int, b text)` + await sql`insert into test (a, b) values (17, 'wrong')` + + return [ + 'the answer', + (await sql`update test set ${ sql({ a: 42, b: 'the answer' }) } returning *`)[0].b, await sql`drop table test` + ] +}) + +t('dynamic update pluck', async() => { + await sql`create table test (a int, b text)` + await sql`insert into test (a, b) values (17, 'wrong')` + + return [ + 'wrong', + (await sql`update test set ${ sql({ a: 42, b: 'the answer' }, 'a') } returning *`)[0].b, await sql`drop table test` + ] +}) + +t('dynamic select array', async() => { + await sql`create table test (a int, b text)` + await sql`insert into test (a, b) values (42, 'yay')` + return ['yay', (await sql`select ${ sql(['a', 'b']) } from test`)[0].b, await sql`drop table test`] +}) + +t('dynamic select args', async() => { + await sql`create table test (a int, b text)` + await sql`insert into test (a, b) values (42, 'yay')` + return ['yay', (await sql`select ${ sql('a', 'b') } from test`)[0].b, await sql`drop table test`] +}) + +t('dynamic values single row', async() => { + const [{ b }] = await sql` + select * from (values ${ sql(['a', 'b', 'c']) }) AS x(a, b, c) + ` + + return ['b', b] +}) + +t('dynamic values multi row', async() => { + const [_, { b }] = await sql` + select * from (values ${ sql([['a', 'b', 'c'],['a', 'b', 'c']]) }) AS x(a, b, c) + ` + + return ['b', b] +}) + +t('connection parameters', async() => { + const sql = postgres({ + ...options, + connection: { + 'some.var': 'yay' + } + }) + + return ['yay', (await sql`select current_setting('some.var') as x`)[0].x] +}) + +t('Multiple queries', async() => { + const sql = postgres(options) + + return [4, (await Promise.all([ + sql`select 1`, + sql`select 2`, + sql`select 3`, + sql`select 4` + ])).length] +}) + +t('Multiple statements', async() => + [2, await sql.unsafe(` + select 1 as x; + select 2 as a; + `).then(([, [x]]) => x.a)] +) + +t('throws correct error when authentication fails', async() => { + const sql = postgres({ + ...options, + ...login_md5, + pass: 'wrong' + }) + return ['28P01', await sql`select 1`.catch(e => e.code)] +}) + +t('notice works', async() => { + let notice + const log = console.log + console.log = function(x) { + notice = x + } + + const sql = postgres(options) + + await sql`create table if not exists users()` + await sql`create table if not exists users()` + + console.log = log + + return ['NOTICE', notice.severity] +}) + +t('notice hook works', async() => { + let notice + const sql = postgres({ + ...options, + onnotice: x => notice = x + }) + + await sql`create table if not exists users()` + await sql`create table if not exists users()` + + return ['NOTICE', notice.severity] +}) + +t('bytea serializes and parses', async() => { + const buf = Buffer.from('wat') + + await sql`create table test (x bytea)` + await sql`insert into test values (${ buf })` + + return [ + buf.toString(), + (await sql`select x from test`)[0].x.toString(), + await sql`drop table test` + ] +}) + +t('forEach works', async() => { + let result + await sql`select 1 as x`.forEach(({ x }) => result = x) + return [1, result] +}) + +t('forEach returns empty array', async() => { + return [0, (await sql`select 1 as x`.forEach(() => { /* noop */ })).length] +}) + +t('Cursor works', async() => { + const order = [] + await sql`select 1 as x union select 2 as x`.cursor(async([x]) => { + order.push(x.x + 'a') + await delay(100) + order.push(x.x + 'b') + }) + return ['1a1b2a2b', order.join('')] +}) + +t('Unsafe cursor works', async() => { + const order = [] + await sql.unsafe('select 1 as x union select 2 as x').cursor(async([x]) => { + order.push(x.x + 'a') + await delay(100) + order.push(x.x + 'b') + }) + return ['1a1b2a2b', order.join('')] +}) + +t('Cursor custom n works', async() => { + const order = [] + await sql`select * from generate_series(1,20)`.cursor(10, async(x) => { + order.push(x.length) + }) + return ['10,10', order.join(',')] +}) + +t('Cursor custom with rest n works', async() => { + const order = [] + await sql`select * from generate_series(1,20)`.cursor(11, async(x) => { + order.push(x.length) + }) + return ['11,9', order.join(',')] +}) + +t('Cursor custom with less results than batch size works', async() => { + const order = [] + await sql`select * from generate_series(1,20)`.cursor(21, async(x) => { + order.push(x.length) + }) + return ['20', order.join(',')] +}) + +t('Cursor cancel works', async() => { + let result + await sql`select * from generate_series(1,10) as x`.cursor(async([{ x }]) => { + result = x + return sql.CLOSE + }) + return [1, result] +}) + +t('Cursor throw works', async() => { + const order = [] + await sql`select 1 as x union select 2 as x`.cursor(async([x]) => { + order.push(x.x + 'a') + await delay(100) + throw new Error('watty') + }).catch(() => order.push('err')) + return ['1aerr', order.join('')] +}) + +t('Cursor error works', async() => [ + '42601', + await sql`wat`.cursor(() => { /* noop */ }).catch((err) => err.code) +]) + +t('Multiple Cursors', { timeout: 2 }, async() => { + const result = [] + const xs = await sql.begin(async sql => [ + await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async ([row]) => { + result.push(row.x) + await new Promise(r => setTimeout(r, 200)) + }), + await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async ([row]) => { + result.push(row.x) + await new Promise(r => setTimeout(r, 100)) + }) + ]) + + return ['1,2,3,4,101,102,103,104', result.join(',')] +}) + +t('Cursor as async iterator', async() => { + const order = [] + for await (const [x] of sql`select generate_series(1,2) as x;`.cursor()) { + order.push(x.x + 'a') + await delay(100) + order.push(x.x + 'b') + } + + return ['1a1b2a2b', order.join('')] +}) + +t('Transform row', async() => { + const sql = postgres({ + ...options, + transform: { row: () => 1 } + }) + + return [1, (await sql`select 'wat'`)[0]] +}) + +t('Transform row forEach', async() => { + let result + const sql = postgres({ + ...options, + transform: { row: () => 1 } + }) + + await sql`select 1`.forEach(x => result = x) + + return [1, result] +}) + +t('Transform value', async() => { + const sql = postgres({ + ...options, + transform: { value: () => 1 } + }) + + return [1, (await sql`select 'wat' as x`)[0].x] +}) + +t('Transform columns from', async() => { + const sql = postgres({ ...options, transform: { column: { to: postgres.fromCamel, from: postgres.toCamel } } }) + await sql`create table test (a_test int, b_test text)` + await sql`insert into test ${ sql([{ aTest: 1, bTest: 1 }]) }` + await sql`update test set ${ sql({ aTest: 2, bTest: 2 }) }` + return [ + 2, + (await sql`select ${ sql('aTest', 'bTest') } from test`)[0].aTest, + await sql`drop table test` + ] +}) + +t('Unix socket', async() => { + const sql = postgres({ + ...options, + host: '/tmp' + }) + + return [1, (await sql`select 1 as x`)[0].x] +}) + +t('Big result', async() => { + return [100000, (await sql`select * from generate_series(1, 100000)`).count] +}) + +t('Debug works', async() => { + let result + const sql = postgres({ + ...options, + debug: (connection_id, str) => result = str + }) + + await sql`select 1` + + return ['select 1', result] +}) + +t('bigint is returned as String', async() => [ + 'string', + typeof (await sql`select 9223372036854777 as x`)[0].x +]) + +t('int is returned as Number', async() => [ + 'number', + typeof (await sql`select 123 as x`)[0].x +]) + +t('numeric is returned as string', async() => [ + 'string', + typeof (await sql`select 1.2 as x`)[0].x +]) + +t('Async stack trace', async() => { + const sql = postgres({ ...options, debug: false }) + return [ + parseInt(new Error().stack.split('\n')[1].match(':([0-9]+):')[1]) + 1, + parseInt(await sql`error`.catch(x => x.stack.split('\n').pop().match(':([0-9]+):')[1])) + ] +}) + +t('Debug has long async stack trace', async() => { + const sql = postgres({ ...options, debug: true }) + + return [ + 'watyo', + await yo().catch(x => x.stack.match(/wat|yo/g).join('')) + ] + + function yo() { + return wat() + } + + function wat() { + return sql`error` + } +}) + +t('Error contains query string', async() => [ + 'selec 1', + (await sql`selec 1`.catch(err => err.query)) +]) + +t('Error contains query serialized parameters', async() => [ + 1, + (await sql`selec ${ 1 }`.catch(err => err.parameters[0])) +]) + +t('Error contains query raw parameters', async() => [ + 1, + (await sql`selec ${ 1 }`.catch(err => err.args[0])) +]) + +t('Query and parameters on errorare not enumerable if debug is not set', async() => { + const sql = postgres({ ...options, debug: false }) + + return [ + false, + (await sql`selec ${ 1 }`.catch(err => err.propertyIsEnumerable('parameters') || err.propertyIsEnumerable('query'))) + ] +}) + +t('Query and parameters are enumerable if debug is set', async() => { + const sql = postgres({ ...options, debug: true }) + + return [ + true, + (await sql`selec ${ 1 }`.catch(err => err.propertyIsEnumerable('parameters') && err.propertyIsEnumerable('query'))) + ] +}) + +t('connect_timeout works', { timeout: 20 }, async() => { + const connect_timeout = 0.2 + const server = net.createServer() + server.listen() + const sql = postgres({ port: server.address().port, host: '127.0.0.1', connect_timeout }) + const start = Date.now() + let end + await sql`select 1`.catch((e) => { + if (e.code !== 'CONNECT_TIMEOUT') + throw e + end = Date.now() + }) + server.close() + return [connect_timeout, Math.floor((end - start) / 100) / 10] +}) + +t('connect_timeout throws proper error', async() => [ + 'CONNECT_TIMEOUT', + await postgres({ + ...options, + ...login_scram, + connect_timeout: 0.001 + })`select 1`.catch(e => e.code) +]) + +t('requests works after single connect_timeout', async() => { + let first = true + + const sql = postgres({ + ...options, + ...login_scram, + connect_timeout: { valueOf() { return first ? (first = false, 0.01) : 1 } } + }) + + return [ + 'CONNECT_TIMEOUT,,1', + [ + await sql`select 1 as x`.catch(x => x.code), + await delay(10), + (await sql`select 1 as x`)[0].x + ].join(',') + ] +}) + +t('Postgres errors are of type PostgresError', async() => + [true, (await sql`bad keyword`.catch(e => e)) instanceof sql.PostgresError] +) + +t('Result has columns spec', async() => + ['x', (await sql`select 1 as x`).columns[0].name] +) + +t('forEach has result as second argument', async() => { + let x + await sql`select 1 as x`.forEach((_, result) => x = result) + return ['x', x.columns[0].name] +}) + +t('Result as arrays', async() => { + const sql = postgres({ + ...options, + transform: { + row: x => Object.values(x) + } + }) + + return ['1,2', (await sql`select 1 as a, 2 as b`)[0].join(',')] +}) + +t('Insert empty array', async() => { + await sql`create table tester (ints int[])` + return [ + Array.isArray((await sql`insert into tester (ints) values (${ sql.array([]) }) returning *`)[0].ints), + true, + await sql`drop table tester` + ] +}) + +t('Insert array in sql()', async() => { + await sql`create table tester (ints int[])` + return [ + Array.isArray((await sql`insert into tester ${ sql({ ints: sql.array([]) }) } returning *`)[0].ints), + true, + await sql`drop table tester` + ] +}) + +t('Automatically creates prepared statements', async() => { + const sql = postgres(options) + const result = await sql`select * from pg_prepared_statements` + return [true, result.some(x => x.name = result.statement.name)] +}) + +t('no_prepare: true disables prepared statements (deprecated)', async() => { + const sql = postgres({ ...options, no_prepare: true }) + const result = await sql`select * from pg_prepared_statements` + return [false, result.some(x => x.name = result.statement.name)] +}) + +t('prepare: false disables prepared statements', async() => { + const sql = postgres({ ...options, prepare: false }) + const result = await sql`select * from pg_prepared_statements` + return [false, result.some(x => x.name = result.statement.name)] +}) + +t('prepare: true enables prepared statements', async() => { + const sql = postgres({ ...options, prepare: true }) + const result = await sql`select * from pg_prepared_statements` + return [true, result.some(x => x.name = result.statement.name)] +}) + +t('prepares unsafe query when "prepare" option is true', async() => { + const sql = postgres({ ...options, prepare: true }) + const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla'], { prepare: true }) + return [true, result.some(x => x.name = result.statement.name)] +}) + +t('does not prepare unsafe query by default', async() => { + const sql = postgres({ ...options, prepare: true }) + const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla']) + return [false, result.some(x => x.name = result.statement.name)] +}) + +t('Recreate prepared statements on transformAssignedExpr error', async() => { + const insert = () => sql`insert into test (name) values (${ '1' }) returning name` + await sql`create table test (name text)` + await insert() + await sql`alter table test alter column name type int using name::integer` + return [ + 1, + (await insert())[0].name, + await sql`drop table test` + ] +}) + +t('Recreate prepared statements on RevalidateCachedQuery error', async() => { + const select = () => sql`select name from test` + await sql`create table test (name text)` + await sql`insert into test values ('1')` + await select() + await sql`alter table test alter column name type int using name::integer` + return [ + 1, + (await select())[0].name, + await sql`drop table test` + ] +}) + + +t('Catches connection config errors', async() => { + const sql = postgres({ ...options, user: { toString: () => { throw new Error('wat') } }, database: 'prut' }) + + return [ + 'wat', + await sql`select 1`.catch((e) => e.message) + ] +}) + +t('Catches connection config errors with end', async() => { + const sql = postgres({ ...options, user: { toString: () => { throw new Error('wat') } }, database: 'prut' }) + + return [ + 'wat', + await sql`select 1`.catch((e) => e.message), + await sql.end() + ] +}) + +t('Catches query format errors', async() => [ + 'wat', + await sql.unsafe({ toString: () => { throw new Error('wat') } }).catch((e) => e.message) +]) + +t('Multiple hosts', { + timeout: 10 +}, async() => { + const s1 = postgres({ idle_timeout }) + , s2 = postgres({ idle_timeout, port: 5433 }) + , sql = postgres('postgres://localhost:5432,localhost:5433', { idle_timeout, max: 1 }) + , result = [] + + const x1 = await sql`select 1` + result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x) + await s1`select pg_terminate_backend(${ x1.state.pid }::int)` + await delay(100) + + const x2 = await sql`select 1` + result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x) + await s2`select pg_terminate_backend(${ x2.state.pid }::int)` + await delay(100) + + result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x) + + return ['5432,5433,5432', result.join(',')] +}) + +t('Escaping supports schemas and tables', async() => { + await sql`create schema a` + await sql`create table a.b (c int)` + await sql`insert into a.b (c) values (1)` + return [ + 1, + (await sql`select ${ sql('a.b.c') } from a.b`)[0].c, + await sql`drop table a.b`, + await sql`drop schema a` + ] +}) + +t('Raw method returns rows as arrays', async() => { + const [x] = await sql`select 1`.raw() + return [ + Array.isArray(x), + true + ] +}) + +t('Raw method returns values unparsed as Buffer', async() => { + const [[x]] = await sql`select 1`.raw() + return [ + x instanceof Uint8Array, + true + ] +}) + +t('Copy read works', async() => { + const result = [] + + await sql`create table test (x int)` + await sql`insert into test select * from generate_series(1,10)` + const readable = await sql`copy test to stdout`.readable() + readable.on('data', x => result.push(x)) + await new Promise(r => readable.on('end', r)) + + return [ + result.length, + 10, + await sql`drop table test` + ] +}) + +t('Copy write works', { timeout: 2 }, async() => { + await sql`create table test (x int)` + const writable = await sql`copy test from stdin`.writable() + + writable.write('1\n') + writable.write('1\n') + writable.end() + + await new Promise(r => writable.on('finish', r)) + + return [ + (await sql`select 1 from test`).length, + 2, + await sql`drop table test` + ] +}) + +t('Copy write as first works', async() => { + await sql`create table test (x int)` + const first = postgres(options) + const writable = await first`COPY test FROM STDIN WITH(FORMAT csv, HEADER false, DELIMITER ',')`.writable() + writable.write('1\n') + writable.write('1\n') + writable.end() + + await new Promise(r => writable.on('finish', r)) + + return [ + (await sql`select 1 from test`).length, + 2, + await sql`drop table test` + ] +}) + + +nt('Copy from file works', async() => { + await sql`create table test (x int, y int, z int)` + await new Promise(async r => fs + .createReadStream(rel('copy.csv')) + .pipe(await sql`copy test from stdin`.writable()) + .on('finish', r) + ) + + return [ + JSON.stringify(await sql`select * from test`), + '[{"x":1,"y":2,"z":3},{"x":4,"y":5,"z":6}]', + await sql`drop table test` + ] +}) + +t('Copy from works in transaction', async() => { + await sql`create table test(x int)` + const xs = await sql.begin(async sql => { + (await sql`copy test from stdin`.writable()).end('1\n2') + await delay(20) + return sql`select 1 from test` + }) + + return [ + xs.length, + 2, + await sql`drop table test` + ] +}) + +nt('Copy from abort works', async() => { + const sql = postgres(options) + const readable = fs.createReadStream(rel('copy.csv')) + + await sql`create table test (x int, y int, z int)` + await sql`TRUNCATE TABLE test` + + const writable = await sql`COPY test FROM STDIN`.writable() + + let aborted + + readable + .pipe(writable) + .on('error', (err) => aborted = err) + + writable.destroy(new Error('abort')) + await sql.end() + + return [ + 'abort', + aborted.message, + await postgres(options)`drop table test` + ] +}) + +t('multiple queries before connect', async() => { + const sql = postgres({ ...options, max: 2 }) + const xs = await Promise.all([ + sql`select 1 as x`, + sql`select 2 as x`, + sql`select 3 as x`, + sql`select 4 as x` + ]) + + return [ + '1,2,3,4', + xs.map(x => x[0].x).join() + ] +}) + +t('subscribe', { timeout: 2 }, async() => { + const sql = postgres({ + database: 'postgres_js_test', + publications: 'alltables', + fetch_types: false + }) + + await sql.unsafe('create publication alltables for all tables') + + const result = [] + + await sql.subscribe('*', (row, info) => + result.push(info.command, row.name || row.id) + ) + + await sql` + create table test ( + id serial primary key, + name text + ) + ` + await sql`insert into test (name) values ('Murray')` + await sql`update test set name = 'Rothbard'` + await sql`delete from test` + await delay(100) + return [ + 'insert,Murray,update,Rothbard,delete,1', + result.join(','), + await sql`drop table test`, + await sql`drop publication alltables`, + await sql.end() + ] +}) + +t('Execute works', async() => { + const result = await new Promise((resolve) => { + const sql = postgres({ ...options, fetch_types: false, debug (id, query) { + resolve(query) + }}) + sql`select 1`.execute() + }) + + return [result, 'select 1'] +}) + +t('Cancel running query works', async() => { + const query = sql`select pg_sleep(2)` + setTimeout(() => query.cancel(), 50) + const error = await query.catch(x => x) + return ['57014', error.code] +}) + +t('Cancel piped query works', async() => { + await sql`select 1` + const last = sql`select pg_sleep(0.2)`.execute() + const query = sql`select pg_sleep(2) as dig` + setTimeout(() => query.cancel(), 100) + const error = await query.catch(x => x) + await last + return ['57014', error.code] +}) + +t('Cancel queued query works', async() => { + const tx = sql.begin(sql => sql`select pg_sleep(0.2) as hej, 'hejsa'`) + const query = sql`select pg_sleep(2) as nej` + setTimeout(() => query.cancel(), 50) + const error = await query.catch(x => x) + await tx + return ['57014', error.code] +}) + +t('Fragments', async() => [ + 1, + (await sql` + ${ sql`select` } 1 as x + `)[0].x +]) + +t('Result becomes array', async() => [ + true, + (await sql`select 1`).slice() instanceof Array +]) + +t('Describe', async() => { + const type = (await sql`select ${ 1 }::int as x`.describe()).types[0] + return [23, type] +}) + +t('Describe a statement', async() => { + await sql`create table tester (name text, age int)` + const r = await sql`select name, age from tester where name like $1 and age > $2`.describe() + return [ + '25,23/name:25,age:23', + `${ r.types.join(',') }/${ r.columns.map(c => `${c.name}:${c.type}`).join(',') }`, + await sql`drop table tester` + ] + }) + +t('Describe a statement without parameters', async() => { + await sql`create table tester (name text, age int)` + const r = await sql`select name, age from tester`.describe() + return [ + '0,2', + `${ r.types.length },${ r.columns.length }`, + await sql`drop table tester` + ] + }) + +t('Describe a statement without columns', async () => { + await sql`create table tester (name text, age int)` + const r = await sql`insert into tester (name, age) values ($1, $2)`.describe() + return [ + '2,0', + `${ r.types.length },${ r.columns.length }`, + await sql`drop table tester` + ] + }) + +nt('Large object', async() => { + const file = rel('index.js') + , md5 = crypto.createHash('md5').update(fs.readFileSync(file)).digest('hex') + + const lo = await sql.largeObject() + await new Promise(async r => fs.createReadStream(file).pipe(await lo.writable()).on('finish', r)) + await lo.seek(0) + + const out = crypto.createHash('md5') + await new Promise(r => lo.readable().then(x => x.on('data', x => out.update(x)).on('end', r))) + + return [ + md5, + out.digest('hex'), + await lo.close() + ] +}) diff --git a/deno/tests/select-param.sql b/deno/tests/select-param.sql new file mode 100644 index 00000000..d4de2440 --- /dev/null +++ b/deno/tests/select-param.sql @@ -0,0 +1 @@ +select $1 as x diff --git a/deno/tests/select.sql b/deno/tests/select.sql new file mode 100644 index 00000000..f951e920 --- /dev/null +++ b/deno/tests/select.sql @@ -0,0 +1 @@ +select 1 as x diff --git a/deno/tests/test.js b/deno/tests/test.js new file mode 100644 index 00000000..4b8eca74 --- /dev/null +++ b/deno/tests/test.js @@ -0,0 +1,89 @@ +import process from 'https://deno.land/std@0.120.0/node/process.ts' +/* eslint no-console: 0 */ + +import util from 'https://deno.land/std@0.120.0/node/util.ts' + +let done = 0 +let only = false +let ignored = 0 +let failed = false +let promise = Promise.resolve() +const tests = {} + , ignore = {} + +export const nt = () => ignored++ +export const ot = (...rest) => (only = true, test(true, ...rest)) +export const t = (...rest) => test(false, ...rest) +t.timeout = 0.5 + +async function test(o, name, options, fn) { + typeof options !== 'object' && (fn = options, options = {}) + const line = new Error().stack.split('\n')[3].match(':([0-9]+):')[1] + + await 1 + + if (only && !o) + return + + tests[line] = { fn, line, name } + promise = promise.then(() => Promise.race([ + new Promise((resolve, reject) => + fn.timer = setTimeout(() => reject('Timed out'), (options.timeout || t.timeout) * 1000) + ), + failed + ? (ignored++, ignore) + : fn() + ])) + .then((x) => { + clearTimeout(fn.timer) + if (x === ignore) + return + + if (!Array.isArray(x)) + throw new Error('Test should return result array') + + const [expected, got] = x + if (expected !== got) { + failed = true + throw new Error(util.inspect(expected) + ' != ' + util.inspect(got)) + } + + tests[line].succeeded = true + process.stdout.write('✅') + }) + .catch(err => { + tests[line].failed = failed = true + tests[line].error = err instanceof Error ? err : new Error(util.inspect(err)) + }) + .then(() => { + ++done === Object.keys(tests).length && exit() + }) +} + +function exit() { + console.log('') + let success = true + Object.values(tests).every((x) => { + if (x.succeeded) + return true + + success = false + x.cleanup + ? console.error('⛔️', x.name + ' at line', x.line, 'cleanup failed', '\n', util.inspect(x.cleanup)) + : console.error('⛔️', x.name + ' at line', x.line, x.failed + ? 'failed' + : 'never finished', x.error ? '\n' + util.inspect(x.error) : '' + ) + }) + + only + ? console.error('⚠️', 'Not all tests were run') + : ignored + ? console.error('⚠️', ignored, 'ignored test' + (ignored === 1 ? '' : 's', '\n')) + : success + ? console.log('All good') + : console.error('⚠️', 'Not good') + + !process.exitCode && (!success || only || ignored) && (process.exitCode = 1) +} + diff --git a/lib/backend.js b/lib/backend.js deleted file mode 100644 index 5248b735..00000000 --- a/lib/backend.js +++ /dev/null @@ -1,255 +0,0 @@ -const { errors } = require('./errors.js') - , { entries, errorFields } = require('./types.js') - -const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc) - , N = '\u0000' - -module.exports = Backend - -function Backend({ - onparse, - onparameter, - onsuspended, - oncomplete, - onerror, - parsers, - onauth, - onready, - oncopy, - ondata, - transform, - onnotice, - onnotify -}) { - let rows = 0 - - const backend = entries({ - 1: ParseComplete, - 2: BindComplete, - 3: CloseComplete, - A: NotificationResponse, - C: CommandComplete, - c: CopyDone, - D: DataRow, - d: CopyData, - E: ErrorResponse, - G: CopyInResponse, - H: CopyOutResponse, - I: EmptyQueryResponse, - K: BackendKeyData, - N: NoticeResponse, - n: NoData, - R: Authentication, - S: ParameterStatus, - s: PortalSuspended, - T: RowDescription, - t: ParameterDescription, - V: FunctionCallResponse, - v: NegotiateProtocolVersion, - W: CopyBothResponse, - Z: ReadyForQuery - }).reduce(char, {}) - - const state = backend.state = { - status : 'I', - pid : null, - secret : null - } - - function ParseComplete() { - onparse() - } - - /* c8 ignore next 2 */ - function BindComplete() { - backend.query.result.columns = backend.query.statement.columns - } - - function CloseComplete() { /* No handling needed */ } - - function NotificationResponse(x) { - if (!onnotify) - return - - let index = 9 - while (x[index++] !== 0); - onnotify( - x.toString('utf8', 9, index - 1), - x.toString('utf8', index, x.length - 1) - ) - } - - function CommandComplete(x) { - rows = 0 - - if (!backend.query) - return - - for (let i = x.length - 1; i > 0; i--) { - if (x[i] === 32 && x[i + 1] < 58 && backend.query.result.count === null) - backend.query.result.count = +x.toString('utf8', i + 1, x.length - 1) - if (x[i - 1] >= 65) { - backend.query.result.command = x.toString('utf8', 5, i) - backend.query.result.state = state - break - } - } - - oncomplete() - } - - /* c8 ignore next 3 */ - function CopyDone() { - backend.query.readable.push(null) - } - - function DataRow(x) { - let index = 7 - let length - let column - let value - - const row = backend.query.raw ? new Array(backend.query.statement.columns.length) : {} - for (let i = 0; i < backend.query.statement.columns.length; i++) { - column = backend.query.statement.columns[i] - length = x.readInt32BE(index) - index += 4 - - value = length === -1 - ? null - : backend.query.raw - ? x.slice(index, index += length) - : column.parser === undefined - ? x.toString('utf8', index, index += length) - : column.parser.array === true - ? column.parser(x.toString('utf8', index + 1, index += length)) - : column.parser(x.toString('utf8', index, index += length)) - - backend.query.raw - ? (row[i] = value) - : (row[column.name] = transform.value.from ? transform.value.from(value) : value) - } - - backend.query.stream - ? backend.query.stream(transform.row.from ? transform.row.from(row) : row, backend.query.result) - : (backend.query.result[rows++] = transform.row.from ? transform.row.from(row) : row) - } - - /* c8 ignore next 3 */ - function CopyData(x) { - ondata(x.slice(5)) - } - - function ErrorResponse(x) { - onerror(errors.postgres(parseError(x))) - } - - /* c8 ignore next 3 */ - function CopyInResponse() { - oncopy() - } - - /* c8 ignore next 3 */ - function CopyOutResponse() { /* No handling needed */ } - - /* c8 ignore next 3 */ - function EmptyQueryResponse() { /* No handling needed */ } - - function BackendKeyData(x) { - state.pid = x.readInt32BE(5) - state.secret = x.readInt32BE(9) - } - - function NoticeResponse(x) { - onnotice - ? onnotice(parseError(x)) - : console.log(parseError(x)) // eslint-disable-line - } - - function NoData() { /* No handling needed */ } - - function Authentication(x) { - const type = x.readInt32BE(5) - type !== 0 && onauth(type, x, onerror) - } - - function ParameterStatus(x) { - const [k, v] = x.toString('utf8', 5, x.length - 1).split(N) - onparameter(k, v) - } - - function PortalSuspended() { - onsuspended(backend.query.result) - backend.query.result = [] - rows = 0 - } - - /* c8 ignore next 3 */ - function ParameterDescription() { /* No handling needed */ } - - function RowDescription(x) { - if (backend.query.result.command) { - backend.query.results = backend.query.results || [backend.query.result] - backend.query.results.push(backend.query.result = []) - backend.query.result.count = null - backend.query.statement.columns = null - } - - if (backend.query.statement.columns) - return backend.query.result.columns = backend.query.statement.columns - - const length = x.readInt16BE(5) - let index = 7 - let start - - backend.query.statement.columns = Array(length) - - for (let i = 0; i < length; ++i) { - start = index - while (x[index++] !== 0); - const type = x.readInt32BE(index + 6) - backend.query.statement.columns[i] = { - name: transform.column.from - ? transform.column.from(x.toString('utf8', start, index - 1)) - : x.toString('utf8', start, index - 1), - parser: parsers[type], - type - } - index += 18 - } - backend.query.result.columns = backend.query.statement.columns - } - - /* c8 ignore next 3 */ - function FunctionCallResponse() { - backend.error = errors.notSupported('FunctionCallResponse') - } - - /* c8 ignore next 3 */ - function NegotiateProtocolVersion() { - backend.error = errors.notSupported('NegotiateProtocolVersion') - } - - /* c8 ignore next 3 */ - function CopyBothResponse() { - oncopy() - } - - function ReadyForQuery() { - onready(backend.error) - } - - return backend -} - -function parseError(x) { - const error = {} - let start = 5 - for (let i = 5; i < x.length - 1; i++) { - if (x[i] === 0) { - error[errorFields[x[start]]] = x.toString('utf8', start + 1, i) - start = i + 1 - } - } - return error -} diff --git a/lib/connection.js b/lib/connection.js deleted file mode 100644 index 3f5c8360..00000000 --- a/lib/connection.js +++ /dev/null @@ -1,472 +0,0 @@ -const net = require('net') -const tls = require('tls') -const frontend = require('./frontend.js') -const Backend = require('./backend.js') -const Queue = require('./queue.js') -const { END, retryRoutines } = require('./types.js') -const { errors } = require('./errors.js') - -module.exports = Connection - -let count = 1 - -function Connection(options = {}) { - const statements = new Map() - const { - onparameter, - transform, - idle_timeout, - connect_timeout, - onnotify, - onnotice, - onclose, - parsers - } = options - let buffer = Buffer.alloc(0) - let length = 0 - let messages = [] - let timer - let statement_id = 1 - let ended - let open = false - let ready = false - let write = false - let next = false - let connect_timer - let buffers = null - let remaining = 0 - - const queries = Queue() - , id = count++ - , uid = Math.random().toString(36).slice(2) - - const socket = postgresSocket(options, { - ready, - data, - error, - close, - cleanup - }) - - const connection = { send, end, destroy, socket } - - const backend = Backend({ - onparse, - onparameter, - onsuspended, - oncomplete, - onerror, - transform, - parsers, - onnotify, - onnotice, - onready, - onauth, - oncopy, - ondata, - error - }) - - function onsuspended(x, done) { - new Promise(r => r(x.length && backend.query.cursor( - backend.query.cursor.rows === 1 ? x[0] : x - ))).then(x => { - x === END || done - ? socket.write(frontend.Close()) - : socket.write(frontend.ExecuteCursor(backend.query.cursor.rows)) - }).catch(err => { - backend.query.reject(err) - socket.write(frontend.Close()) - }) - } - - function oncomplete() { - backend.query.cursor && onsuspended(backend.query.result, true) - } - - function onerror(x) { - if (!backend.query) - return error(x) - - backend.error = x - backend.query.cursor && socket.write(frontend.Sync) - } - - function onparse() { - if (backend.query && backend.query.statement.sig) - statements.set(backend.query.statement.sig, backend.query.statement) - } - - function onauth(type, x, onerror) { - Promise.resolve( - typeof options.pass === 'function' - ? options.pass() - : options.pass - ).then(pass => - socket.write(frontend.auth(type, x, options, pass)) - ).catch(onerror) - } - - function end() { - clearTimeout(timer) - const promise = new Promise((resolve) => { - ended = () => resolve(socket.end()) - }) - - process.nextTick(() => (ready || !backend.query) && ended()) - - return promise - } - - function destroy() { - error(errors.connection('CONNECTION_DESTROYED', options, socket)) - socket.destroy() - } - - function error(err) { - backend.query && backend.query.reject(err) - let q - while ((q = queries.shift())) - q.reject(err) - } - - function retry(query) { - query.retried = true - statements.delete(query.sig) - ready = true - backend.query = backend.error = null - send(query, { sig: query.sig, str: query.str, args: query.args }) - } - - function send(query, { sig, str, args = [] }) { - try { - query.sig = sig - query.str = str - query.args = args - query.result = [] - query.result.count = null - idle_timeout && clearTimeout(timer) - - typeof options.debug === 'function' && options.debug(id, str, args) - const buffer = query.simple - ? simple(str, query) - : statements.has(sig) - ? prepared(statements.get(sig), args, query) - : prepare(sig, str, args, query) - - ready - ? (backend.query = query, ready = false) - : queries.push(query) - - open - ? socket.write(buffer) - : (messages.push(buffer), connect()) - } catch (err) { - query.reject(err) - idle() - } - } - - function connect() { - connect_timeout && ( - clearTimeout(connect_timer), - connect_timer = setTimeout(connectTimedOut, connect_timeout * 1000).unref() - ) - socket.connect() - } - - function connectTimedOut() { - error(errors.connection('CONNECT_TIMEOUT', options, socket)) - socket.destroy() - } - - function simple(str, query) { - query.statement = {} - return frontend.Query(str) - } - - function prepared(statement, args, query) { - query.statement = statement - return Buffer.concat([ - frontend.Bind(query.statement.name, args), - query.cursor - ? frontend.Describe('P') - : Buffer.alloc(0), - query.cursor - ? frontend.ExecuteCursor(query.cursor.rows) - : frontend.Execute - ]) - } - - function prepare(sig, str, args, query) { - query.statement = { name: sig ? 'p' + uid + statement_id++ : '', sig } - return Buffer.concat([ - frontend.Parse(query.statement.name, str, args), - frontend.Bind(query.statement.name, args), - query.cursor - ? frontend.Describe('P') - : frontend.Describe('S', query.statement.name), - query.cursor - ? frontend.ExecuteCursor(query.cursor.rows) - : frontend.Execute - ]) - } - - function idle() { - if (idle_timeout && !backend.query && queries.length === 0) { - clearTimeout(timer) - timer = setTimeout(socket.end, idle_timeout * 1000) - } - } - - function onready(err) { - clearTimeout(connect_timer) - if (err) { - if (backend.query) { - if (!backend.query.retried && retryRoutines[err.routine]) - return retry(backend.query) - - err.stack += backend.query.origin.replace(/.*\n/, '\n') - Object.defineProperty(err, 'query', { - value: backend.query.str, - enumerable: !!options.debug - }) - Object.defineProperty(err, 'parameters', { - value: backend.query.args, - enumerable: !!options.debug - }) - backend.query.reject(err) - } else { - error(err) - } - } else if (backend.query) { - backend.query.resolve(backend.query.results || backend.query.result) - } - - backend.query = backend.error = null - idle() - - if (!open) { - if (multi()) - return - - messages.forEach(x => socket.write(x)) - messages = [] - open = true - } - - backend.query = queries.shift() - ready = !backend.query - ready && ended && ended() - } - - function oncopy() { - backend.query.writable.push = ({ chunk, error, callback }) => { - error - ? socket.write(frontend.CopyFail(error)) - : chunk === null - ? socket.write(frontend.CopyDone()) - : socket.write(frontend.CopyData(chunk), callback) - } - backend.query.writable.forEach(backend.query.writable.push) - } - - function ondata(x) { - !backend.query.readable.push(x) && socket.pause() - } - - function multi() { - if (next) - return (next = false, true) - - if (!write && options.target_session_attrs === 'read-write') { - backend.query = { - origin: '', - result: [], - statement: {}, - resolve: ([{ transaction_read_only }]) => transaction_read_only === 'on' - ? (next = true, socket.destroy()) - : (write = true, socket.success()), - reject: error - } - socket.write(frontend.Query('show transaction_read_only')) - return true - } - } - - function data(x) { - if (buffers) { - buffers.push(x) - remaining -= x.length - if (remaining >= 0) - return - } - - buffer = buffers - ? Buffer.concat(buffers, length - remaining) - : buffer.length === 0 - ? x - : Buffer.concat([buffer, x], buffer.length + x.length) - - while (buffer.length > 4) { - length = buffer.readInt32BE(1) - if (length >= buffer.length) { - remaining = length - buffer.length - buffers = [buffer] - break - } - - backend[buffer[0]](buffer.slice(0, length + 1)) - buffer = buffer.slice(length + 1) - remaining = 0 - buffers = null - } - } - - function close() { - clearTimeout(connect_timer) - error(errors.connection('CONNECTION_CLOSED', options, socket)) - messages = [] - onclose && onclose() - } - - function cleanup() { - statements.clear() - open = ready = write = false - } - - /* c8 ignore next */ - return connection -} - -function postgresSocket(options, { - error, - close, - cleanup, - data -}) { - let socket - let ended = false - let closed = true - let succeeded = false - let next = null - let buffer - let i = 0 - let retries = 0 - - function onclose(err) { - retries++ - oncleanup() - !ended && !succeeded && i < options.host.length - ? connect() - : err instanceof Error - ? (error(err), close()) - : close() - i >= options.host.length && (i = 0) - } - - function oncleanup() { - socket.removeListener('data', data) - socket.removeListener('close', onclose) - socket.removeListener('error', onclose) - socket.removeListener('connect', ready) - socket.removeListener('secureConnect', ready) - closed = true - cleanup() - } - - async function connect() { - if (!closed) - return - - retries && await new Promise(r => - setTimeout(r, Math.min((0.5 + Math.random()) * Math.pow(1.3, retries) * 10, 10000)) - ) - - closed = succeeded = false - - socket = options.path - ? net.connect(options.path) - : net.connect( - x.port = options.port[i], - x.host = options.host[i++] - ).setKeepAlive(true, 1000 * 60) - - if (!options.ssl) - return attach(socket) - - socket.once('connect', () => socket.write(frontend.SSLRequest)) - socket.once('error', onclose) - socket.once('close', onclose) - socket.once('data', x => { - socket.removeListener('error', onclose) - socket.removeListener('close', onclose) - x.toString() === 'S' - ? attach(tls.connect(Object.assign({ socket }, ssl(options.ssl)))) - : options.ssl === 'prefer' - ? (attach(socket), ready()) - : /* c8 ignore next */ error('Server does not support SSL') - }) - } - - function ssl(x) { - return x === 'require' || x === 'allow' || x === 'prefer' - ? { rejectUnauthorized: false } - : x - } - - function attach(x) { - socket = x - socket.on('data', data) - socket.once('error', onclose) - socket.once('connect', ready) - socket.once('secureConnect', ready) - socket.once('close', onclose) - } - - function ready() { - retries = 0 - try { - socket.write(frontend.StartupMessage(options)) - } catch (e) { - error(e) - socket.end() - } - } - - const x = { - success: () => { - retries = 0 - succeeded = true - i >= options.host.length && (i = 0) - }, - pause: () => socket.pause(), - resume: () => socket.resume(), - isPaused: () => socket.isPaused(), - write: (x, callback) => { - buffer = buffer ? Buffer.concat([buffer, x]) : Buffer.from(x) - if (buffer.length >= 1024) - return write(callback) - next === null && (next = setImmediate(write)) - callback && callback() - }, - destroy: () => { - socket && socket.destroy() - return Promise.resolve() - }, - end: () => { - ended = true - return new Promise(r => socket && !closed ? (socket.once('close', r), socket.end()) : r()) - }, - connect - } - - function write(callback) { - socket.write(buffer, callback) - next !== null && clearImmediate(next) - buffer = next = null - } - - /* c8 ignore next */ - return x -} diff --git a/lib/frontend.js b/lib/frontend.js deleted file mode 100644 index 8a980c18..00000000 --- a/lib/frontend.js +++ /dev/null @@ -1,249 +0,0 @@ -const crypto = require('crypto') -const bytes = require('./bytes.js') -const { entries } = require('./types.js') -const { errors } = require('./errors.js') - -const N = String.fromCharCode(0) -const empty = Buffer.alloc(0) -const Sync = bytes.S().end() -const Flush = bytes.H().end() -const Execute = Buffer.concat([ - bytes.E().str(N).i32(0).end(), - bytes.S().end() -]) - -const SSLRequest = bytes.i32(8).i32(80877103).end(8) - -const authNames = { - 2 : 'KerberosV5', - 3 : 'CleartextPassword', - 5 : 'MD5Password', - 6 : 'SCMCredential', - 7 : 'GSS', - 8 : 'GSSContinue', - 9 : 'SSPI', - 10: 'SASL', - 11: 'SASLContinue', - 12: 'SASLFinal' -} - -const auths = { - 3 : AuthenticationCleartextPassword, - 5 : AuthenticationMD5Password, - 10: SASL, - 11: SASLContinue, - 12: SASLFinal -} - -module.exports = { - StartupMessage, - SSLRequest, - auth, - Bind, - Sync, - Flush, - Parse, - Query, - Close, - Execute, - ExecuteCursor, - Describe, - CopyData, - CopyDone, - CopyFail -} - -function StartupMessage({ user, database, connection }) { - return bytes - .inc(4) - .i16(3) - .z(2) - .str(entries(Object.assign({ - user, - database, - client_encoding: '\'utf-8\'' - }, - connection - )).filter(([, v]) => v).map(([k, v]) => k + N + v).join(N)) - .z(2) - .end(0) -} - -function auth(type, x, options, pass) { - if (type in auths) - return auths[type](type, x, options, pass) - /* c8 ignore next */ - throw errors.generic({ - message: 'Auth type ' + (authNames[type] || type) + ' not implemented', - type: authNames[type] || type, - code: 'AUTH_TYPE_NOT_IMPLEMENTED' - }) -} - -function AuthenticationCleartextPassword(type, x, options, pass) { - return bytes - .p() - .str(pass) - .z(1) - .end() -} - -function AuthenticationMD5Password(type, x, options, pass) { - return bytes - .p() - .str('md5' + md5(Buffer.concat([Buffer.from(md5(pass + options.user)), x.slice(9)]))) - .z(1) - .end() -} - -function SASL(type, x, options) { - bytes - .p() - .str('SCRAM-SHA-256' + N) - - const i = bytes.i - - options.nonce = crypto.randomBytes(18).toString('base64') - - return bytes - .inc(4) - .str('n,,n=*,r=' + options.nonce) - .i32(bytes.i - i - 4, i) - .end() -} - -function SASLContinue(type, x, options, pass) { - const res = x.toString('utf8', 9).split(',').reduce((acc, x) => (acc[x[0]] = x.slice(2), acc), {}) - - const saltedPassword = crypto.pbkdf2Sync( - pass, - Buffer.from(res.s, 'base64'), - parseInt(res.i), 32, - 'sha256' - ) - - const clientKey = hmac(saltedPassword, 'Client Key') - - const auth = 'n=*,r=' + options.nonce + ',' - + 'r=' + res.r + ',s=' + res.s + ',i=' + res.i - + ',c=biws,r=' + res.r - - options.serverSignature = hmac(hmac(saltedPassword, 'Server Key'), auth).toString('base64') - - return bytes.p() - .str('c=biws,r=' + res.r + ',p=' + xor(clientKey, hmac(sha256(clientKey), auth)).toString('base64')) - .end() -} - -function SASLFinal(type, x, options) { - if (x.toString('utf8', 9).split(N, 1)[0].slice(2) === options.serverSignature) - return empty - /* c8 ignore next 4 */ - throw errors.generic({ - message: 'The server did not return the correct signature', - code: 'SASL_SIGNATURE_MISMATCH' - }) -} - -function Query(x) { - return bytes - .Q() - .str(x + N) - .end() -} - -function CopyData(x) { - return bytes - .d() - .raw(x) - .end() -} - -function CopyDone() { - return bytes - .c() - .end() -} - -function CopyFail(err) { - return bytes - .f() - .str(String(err) + N) - .end() -} - -function Bind(name, args) { - let prev - - bytes - .B() - .str(N) - .str(name + N) - .i16(0) - .i16(args.length) - - args.forEach(x => { - if (x.value == null) - return bytes.i32(0xFFFFFFFF) - - prev = bytes.i - bytes - .inc(4) - .str(x.value) - .i32(bytes.i - prev - 4, prev) - }) - - bytes.i16(0) - - return bytes.end() -} - -function Parse(name, str, args) { - bytes - .P() - .str(name + N) - .str(str + N) - .i16(args.length) - - args.forEach(x => bytes.i32(x.type)) - - return bytes.end() -} - -function Describe(x, name = '') { - return bytes.D().str(x).str(name + N).end() -} - -function ExecuteCursor(rows) { - return Buffer.concat([ - bytes.E().str(N).i32(rows).end(), - bytes.H().end() - ]) -} - -function Close() { - return Buffer.concat([ - bytes.C().str('P').str(N).end(), - bytes.S().end() - ]) -} - -function md5(x) { - return crypto.createHash('md5').update(x).digest('hex') -} - -function hmac(key, x) { - return crypto.createHmac('sha256', key).update(x).digest() -} - -function sha256(x) { - return crypto.createHash('sha256').update(x).digest() -} - -function xor(a, b) { - const length = Math.max(a.length, b.length) - const buffer = Buffer.allocUnsafe(length) - for (let i = 0; i < length; i++) - buffer[i] = a[i] ^ b[i] - return buffer -} diff --git a/lib/index.js b/lib/index.js deleted file mode 100644 index 358ece9e..00000000 --- a/lib/index.js +++ /dev/null @@ -1,711 +0,0 @@ -const fs = require('fs') -const Url = require('url') -const Stream = require('stream') -const Connection = require('./connection.js') -const Queue = require('./queue.js') -const Subscribe = require('./subscribe.js') -const { errors, PostgresError } = require('./errors.js') -const { - mergeUserTypes, - arraySerializer, - arrayParser, - fromPascal, - fromCamel, - fromKebab, - inferType, - toPascal, - toCamel, - toKebab, - entries, - escape, - types, - END -} = require('./types.js') - -const notPromise = { - P: {}, - finally: notTagged, - then: notTagged, - catch: notTagged -} - -function notTagged() { - throw errors.generic({ message: 'Query not called as a tagged template literal', code: 'NOT_TAGGED_CALL' }) -} - -Object.assign(Postgres, { - PostgresError, - toPascal, - toCamel, - toKebab, - fromPascal, - fromCamel, - fromKebab, - BigInt: { - to: 20, - from: [20], - parse: x => BigInt(x), // eslint-disable-line - serialize: x => x.toString() - } -}) - -const originCache = new Map() - -module.exports = Postgres - -function Postgres(a, b) { - if (arguments.length && !a) - throw new Error(a + ' - is not a url or connection object') - - const options = parseOptions(a, b) - - const max = Math.max(1, options.max) - , subscribe = Subscribe(Postgres, a, b) - , transform = options.transform - , connections = Queue() - , all = [] - , queries = Queue() - , listeners = {} - , typeArrayMap = {} - , files = {} - , isInsert = /(^|[^)(])\s*insert\s+into\s+[^\s]+\s*$/i - , isSelect = /(^|[^)(])\s*select\s*$/i - - let ready = false - , ended = null - , arrayTypesPromise = options.fetch_types ? null : Promise.resolve([]) - , slots = max - , listener - - function postgres(xs) { - return query({ tagged: true, prepare: options.prepare }, getConnection(), xs, Array.from(arguments).slice(1)) - } - - Object.assign(postgres, { - options: Object.assign({}, options, { pass: null }), - parameters: {}, - subscribe, - listen, - begin, - end - }) - - addTypes(postgres) - - const onparameter = options.onparameter - options.onparameter = (k, v) => { - if (postgres.parameters[k] !== v) { - postgres.parameters[k] = v - onparameter && onparameter(k, v) - } - } - - return postgres - - function begin(options, fn) { - if (!fn) { - fn = options - options = '' - } - - return new Promise((resolve, reject) => { - const connection = getConnection(true) - , query = { resolve, reject, fn, begin: 'begin ' + options.replace(/[^a-z ]/ig, '') } - - connection - ? transaction(query, connection) - : queries.push(query) - }) - } - - function transaction({ - resolve, - reject, - fn, - begin = '', - savepoint = '' - }, connection) { - begin && (connection.savepoints = 0) - addTypes(scoped, connection) - scoped.savepoint = (name, fn) => new Promise((resolve, reject) => { - transaction({ - savepoint: 'savepoint s' + connection.savepoints++ + '_' + (fn ? name : ''), - resolve, - reject, - fn: fn || name - }, connection) - }) - - query({}, connection, begin || savepoint) - .then(() => { - const result = fn(scoped) - return Array.isArray(result) - ? Promise.all(result) - : result - }) - .then((x) => - begin - ? scoped`commit`.then(() => resolve(x)) - : resolve(x) - ) - .catch((err) => { - query({}, connection, - begin - ? 'rollback' - : 'rollback to ' + savepoint - ) - .then(() => reject(err), reject) - }) - .then(begin && (() => { - connections.push(connection) - next(connection) - })) - - function scoped(xs) { - return query({ tagged: true }, connection, xs, Array.from(arguments).slice(1)) - } - } - - function next() { - let c - , x - - while ( - (x = queries.peek()) - && (c = x.query && x.query.connection || getConnection(queries.peek().fn)) - && queries.shift() - ) { - x.fn - ? transaction(x, c) - : send(c, x.query, x.xs, x.args) - - x.query && x.query.connection && x.query.writable && (c.blocked = true) - } - } - - function query(query, connection, xs, args) { - query.origin = options.debug ? new Error().stack : cachedError(xs) - query.prepare = 'prepare' in query ? query.prepare : options.prepare - if (query.tagged && (!Array.isArray(xs) || !Array.isArray(xs.raw))) - return nested(xs, args) - - const promise = new Promise((resolve, reject) => { - query.resolve = resolve - query.reject = reject - ended !== null - ? reject(errors.connection('CONNECTION_ENDED', options, options)) - : ready - ? send(connection, query, xs, args) - : fetchArrayTypes(connection).then(() => send(connection, query, xs, args)).catch(reject) - }) - - addMethods(promise, query) - - return promise - } - - function cachedError(xs) { - if (originCache.has(xs)) - return originCache.get(xs) - - const x = Error.stackTraceLimit - Error.stackTraceLimit = 4 - originCache.set(xs, new Error().stack) - Error.stackTraceLimit = x - return originCache.get(xs) - } - - function nested(first, rest) { - const o = Object.create(notPromise) - o.first = first - o.rest = rest.reduce((acc, val) => acc.concat(val), []) - return o - } - - function send(connection, query, xs, args) { - connection && (query.connection = connection) - if (!connection || connection.blocked) - return queries.push({ query, xs, args, connection }) - - connection.blocked = query.blocked - process.nextTick(connection.send, query, query.tagged ? parseTagged(query, xs, args) : parseUnsafe(query, xs, args)) - } - - function getConnection(reserve) { - const connection = slots ? createConnection(options) : connections.shift() - !reserve && connection && connections.push(connection) - return connection - } - - function createConnection(options) { - slots-- - // The options object gets cloned as the as the authentication in the frontend.js mutates the - // options to persist a nonce and signature, which are unique per connection. - const connection = Connection({ ...options }) - all.push(connection) - return connection - } - - function array(xs) { - const o = Object.create(notPromise) - o.array = xs - return o - } - - function json(value) { - return { - type: types.json.to, - value - } - } - - function fetchArrayTypes(connection) { - return arrayTypesPromise || (arrayTypesPromise = - new Promise((resolve, reject) => { - send(connection, { resolve, reject, simple: true, tagged: false, prepare: false, origin: new Error().stack }, ` - select b.oid, b.typarray - from pg_catalog.pg_type a - left join pg_catalog.pg_type b on b.oid = a.typelem - where a.typcategory = 'A' - group by b.oid, b.typarray - order by b.oid - `) - }).catch(err => { - arrayTypesPromise = null - throw err - }).then(types => { - types.forEach(({ oid, typarray }) => addArrayType(oid, typarray)) - ready = true - }) - ) - } - - function addArrayType(oid, typarray) { - const parser = options.parsers[oid] - - typeArrayMap[oid] = typarray - options.parsers[typarray] = (xs) => arrayParser(xs, parser) - options.parsers[typarray].array = true - options.serializers[typarray] = (xs) => arraySerializer(xs, options.serializers[oid]) - } - - function addTypes(sql, connection) { - Object.assign(sql, { - END, - PostgresError, - types: {}, - notify, - unsafe, - array, - file, - json - }) - - function notify(channel, payload) { - return sql`select pg_notify(${ channel }, ${ '' + payload })` - } - - function unsafe(xs, args, queryOptions) { - const prepare = queryOptions && queryOptions.prepare || false - return query({ simple: !args, prepare }, connection || getConnection(), xs, args || []) - } - - function file(path, args, options = {}) { - if (!Array.isArray(args)) { - options = args || {} - args = null - } - - if ('cache' in options === false) - options.cache = true - - const file = files[path] - const q = { tagged: false, simple: !args } - - if (options.cache && typeof file === 'string') - return query(q, connection || getConnection(), file, args || []) - - const promise = ((options.cache && file) || (files[path] = new Promise((resolve, reject) => { - fs.readFile(path, 'utf8', (err, str) => { - if (err) - return reject(err) - - files[path] = str - resolve(str) - }) - }))).then(str => query(q, connection || getConnection(), str, args || [])) - - addMethods(promise, q) - - return promise - } - - options.types && entries(options.types).forEach(([name, type]) => { - sql.types[name] = (x) => ({ type: type.to, value: x }) - }) - } - - function addMethods(promise, query) { - promise.readable = () => readable(promise, query) - promise.writable = () => writable(promise, query) - promise.raw = () => (query.raw = true, promise) - promise.stream = (fn) => (query.stream = fn, promise) - promise.cursor = cursor(promise, query) - } - - function cursor(promise, query) { - return (rows, fn) => { - if (typeof rows === 'function') { - fn = rows - rows = 1 - } - fn.rows = rows - query.cursor = fn - query.simple = false - return promise - } - } - - function readable(promise, query) { - query.connection - ? query.connection.blocked = true - : query.blocked = true - - const read = () => query.connection.socket.isPaused() && query.connection.socket.resume() - promise.catch(err => query.readable.destroy(err)).then(() => { - query.connection.blocked = false - read() - next() - }) - return query.readable = new Stream.Readable({ read }) - } - - function writable(promise, query) { - query.connection - ? query.connection.blocked = true - : query.blocked = true - let error - query.prepare = false - query.simple = true - query.writable = [] - promise.catch(err => error = err).then(() => { - query.connection.blocked = false - next() - }) - return query.readable = new Stream.Duplex({ - read() { /* backpressure handling not possible */ }, - write(chunk, encoding, callback) { - error - ? callback(error) - : query.writable.push({ chunk, callback }) - }, - destroy(error, callback) { - callback(error) - query.writable.push({ error }) - }, - final(callback) { - if (error) - return callback(error) - - query.writable.push({ chunk: null }) - promise.then(() => callback(), callback) - } - }) - } - - function listen(channel, fn) { - const listener = getListener() - - if (channel in listeners) { - listeners[channel].push(fn) - return Promise.resolve(Object.create(listener.result, { - unlisten: { value: unlisten } - })) - } - - listeners[channel] = [fn] - - return query({}, listener.conn, 'listen ' + escape(channel)) - .then((result) => { - Object.assign(listener.result, result) - return Object.create(listener.result, { - unlisten: { value: unlisten } - }) - }) - - function unlisten() { - if (!listeners[channel]) - return Promise.resolve() - - listeners[channel] = listeners[channel].filter(handler => handler !== fn) - - if (listeners[channel].length) - return Promise.resolve() - - delete listeners[channel] - return query({}, getListener().conn, 'unlisten ' + escape(channel)).then(() => undefined) - } - } - - function getListener() { - if (listener) - return listener - - const conn = Connection(Object.assign({ - onnotify: (c, x) => c in listeners && listeners[c].forEach(fn => fn(x)), - onclose: () => { - Object.entries(listeners).forEach(([channel, fns]) => { - delete listeners[channel] - Promise.all(fns.map(fn => listen(channel, fn).catch(() => { /* noop */ }))) - }) - listener = null - } - }, - options - )) - listener = { conn, result: {} } - all.push(conn) - return listener - } - - function end({ timeout = null } = {}) { - if (ended) - return ended - - let destroy - - return ended = Promise.race([ - Promise.resolve(arrayTypesPromise).then(() => Promise.all( - (subscribe.sql ? [subscribe.sql.end({ timeout: 0 })] : []).concat(all.map(c => c.end())) - )) - ].concat( - timeout === 0 || timeout > 0 - ? new Promise(r => destroy = setTimeout(() => ( - subscribe.sql && subscribe.sql.end({ timeout }), - all.map(c => c.destroy()), - r() - ), timeout * 1000)) - : [] - )) - .then(() => clearTimeout(destroy)) - } - - function parseUnsafe(query, str, args = []) { - const types = [] - , xargs = [] - - args.forEach(x => parseValue(x, xargs, types)) - - return { - sig: query.prepare && types + str, - str, - args: xargs - } - } - - function parseTagged(query, xs, args = []) { - const xargs = [] - , types = [] - - let str = xs[0] - let arg - - for (let i = 1; i < xs.length; i++) { - arg = args[i - 1] - str += parseArg(str, arg, xargs, types) + xs[i] - } - - return { - sig: query.prepare && !xargs.dynamic && types + str, - str: str.trim(), - args: xargs - } - } - - function parseArg(str, arg, xargs, types) { - return arg && arg.P === notPromise.P - ? arg.array - ? parseArray(arg.array, xargs, types) - : parseHelper(str, arg, xargs, types) - : parseValue(arg, xargs, types) - } - - function parseArray(array, xargs, types) { - return array.length === 0 ? '\'{}\'' : 'array[' + array.map((x) => Array.isArray(x) - ? parseArray(x, xargs, types) - : parseValue(x, xargs, types) - ).join(',') + ']' - } - - function parseHelper(str, { first, rest }, xargs, types) { - xargs.dynamic = true - if (first !== null && typeof first === 'object' && typeof first[0] !== 'string') { - if (isInsert.test(str)) - return insertHelper(first, rest, xargs, types) - else if (isSelect.test(str)) - return selectHelper(first, rest, xargs, types) - else if (!Array.isArray(first)) - return equalsHelper(first, rest, xargs, types) - } - - return escapeHelper(Array.isArray(first) ? first : [first].concat(rest)) - } - - function selectHelper(first, columns, xargs, types) { - return entries(first).reduce((acc, [k, v]) => - acc + (!columns.length || columns.indexOf(k) > -1 - ? (acc ? ',' : '') + parseValue(v, xargs, types) + ' as ' + escape( - transform.column.to ? transform.column.to(k) : k - ) - : '' - ), - '' - ) - } - - function insertHelper(first, columns, xargs, types) { - first = Array.isArray(first) ? first : [first] - columns = columns.length ? columns : Object.keys(first[0]) - return '(' + escapeHelper(columns) + ') values ' + - first.reduce((acc, row) => - acc + (acc ? ',' : '') + '(' + - columns.reduce((acc, k) => acc + (acc ? ',' : '') + parseValue(row[k], xargs, types), '') + - ')', - '' - ) - } - - function equalsHelper(first, columns, xargs, types) { - return (columns.length ? columns : Object.keys(first)).reduce((acc, k) => - acc + (acc ? ',' : '') + escape( - transform.column.to ? transform.column.to(k) : k - ) + ' = ' + parseValue(first[k], xargs, types), - '' - ) - } - - function escapeHelper(xs) { - return xs.reduce((acc, x) => acc + (acc ? ',' : '') + escape( - transform.column.to ? transform.column.to(x) : x - ), '') - } - - function parseValue(x, xargs, types) { - if (x === undefined) - throw errors.generic({ code: 'UNDEFINED_VALUE', message: 'Undefined values are not allowed' }) - - return Array.isArray(x) - ? x.reduce((acc, x) => acc + (acc ? ',' : '') + addValue(x, xargs, types), '') - : x && x.P === notPromise.P - ? parseArg('', x, xargs, types) - : addValue(x, xargs, types) - } - - function addValue(x, xargs, types) { - const type = getType(x) - , i = types.push(type.type) - - if (i > 65534) - throw errors.generic({ message: 'Max number of parameters (65534) exceeded', code: 'MAX_PARAMETERS_EXCEEDED' }) - - xargs.push(type) - return '$' + i - } - - function getType(x) { - if (x == null) - return { type: 0, value: x, raw: x } - - const value = x.type ? x.value : x - , type = x.type || inferType(value) - - return { - type, - value: (options.serializers[type] || types.string.serialize)(value), - raw: x - } - } -} - -function parseOptions(a, b) { - const env = process.env // eslint-disable-line - , o = (typeof a === 'string' ? b : a) || {} - , { url, multihost } = parseUrl(a, env) - , auth = (url.auth || '').split(':') - , host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost' - , port = o.port || url.port || env.PGPORT || 5432 - , user = o.user || o.username || auth[0] || env.PGUSERNAME || env.PGUSER || osUsername() - - return Object.assign({ - host : host.split(',').map(x => x.split(':')[0]), - port : host.split(',').map(x => x.split(':')[1] || port), - path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port, - database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user, - user : user, - pass : o.pass || o.password || auth[1] || env.PGPASSWORD || '', - max : o.max || url.query.max || 10, - types : o.types || {}, - ssl : o.ssl || parseSSL(url.query.sslmode || url.query.ssl) || false, - idle_timeout : o.idle_timeout || url.query.idle_timeout || env.PGIDLE_TIMEOUT || warn(o.timeout), - connect_timeout : o.connect_timeout || url.query.connect_timeout || env.PGCONNECT_TIMEOUT || 30, - prepare : 'prepare' in o ? o.prepare : 'no_prepare' in o ? !o.no_prepare : true, - onnotice : o.onnotice, - onparameter : o.onparameter, - transform : parseTransform(o.transform || {}), - connection : Object.assign({ application_name: 'postgres.js' }, o.connection), - target_session_attrs: o.target_session_attrs || url.query.target_session_attrs || env.PGTARGETSESSIONATTRS, - debug : o.debug, - fetch_types : 'fetch_types' in o ? o.fetch_types : true - }, - mergeUserTypes(o.types) - ) -} - -function parseTransform(x) { - return { - column: { - from: typeof x.column === 'function' ? x.column : x.column && x.column.from, - to: x.column && x.column.to - }, - value: { - from: typeof x.value === 'function' ? x.value : x.value && x.value.from, - to: x.value && x.value.to - }, - row: { - from: typeof x.row === 'function' ? x.row : x.row && x.row.from, - to: x.row && x.row.to - } - } -} - -function parseSSL(x) { - return x !== 'disable' && x !== 'false' && x -} - -function parseUrl(url) { - if (typeof url !== 'string') - return { url: { query: {} } } - - let host = url - host = host.slice(host.indexOf('://') + 3) - host = host.split(/[?/]/)[0] - host = host.slice(host.indexOf('@') + 1) - - return { - url: Url.parse(url.replace(host, host.split(',')[0]), true), - multihost: host.indexOf(',') > -1 && host - } -} - -function warn(x) { - typeof x !== 'undefined' && console.log('The timeout option is deprecated, use idle_timeout instead') // eslint-disable-line - return x -} - -function osUsername() { - try { - return require('os').userInfo().username // eslint-disable-line - } catch (_) { - return - } -} diff --git a/lib/types.js b/lib/types.js deleted file mode 100644 index a94a8932..00000000 --- a/lib/types.js +++ /dev/null @@ -1,204 +0,0 @@ -const char = module.exports.char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc) -const entries = o => Object.keys(o).map(x => [x, o[x]]) - -// These were the fastest ways to do it in Node.js v12.11.1 (add tests to revise if this changes) -const types = module.exports.types = { - string: { - to: 25, - from: null, // defaults to string - serialize: x => '' + x - }, - number: { - to: 0, - from: [21, 23, 26, 700, 701], - serialize: x => '' + x, - parse: x => +x - }, - json: { - to: 3802, - from: [114, 3802], - serialize: x => JSON.stringify(x), - parse: x => JSON.parse(x) - }, - boolean: { - to: 16, - from: 16, - serialize: x => x === true ? 't' : 'f', - parse: x => x === 't' - }, - date: { - to: 1184, - from: [1082, 1114, 1184], - serialize: x => x.toISOString(), - parse: x => new Date(x) - }, - bytea: { - to: 17, - from: 17, - serialize: x => '\\x' + Buffer.from(x.buffer, x.byteOffset, x.byteLength).toString('hex'), - parse: x => Buffer.from(x.slice(2), 'hex') - } -} - -const defaultHandlers = typeHandlers(types) - -const serializers = module.exports.serializers = defaultHandlers.serializers -const parsers = module.exports.parsers = defaultHandlers.parsers - -module.exports.entries = entries - -module.exports.END = {} - -module.exports.mergeUserTypes = function(types) { - const user = typeHandlers(types || {}) - return { - serializers: Object.assign({}, serializers, user.serializers), - parsers: Object.assign({}, parsers, user.parsers) - } -} - -function typeHandlers(types) { - return Object.keys(types).reduce((acc, k) => { - types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse) - acc.serializers[types[k].to] = types[k].serialize - return acc - }, { parsers: {}, serializers: {} }) -} - -module.exports.escape = function escape(str) { - return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"' -} - -const type = { - number: 0, - bigint: 20, - boolean: 16 -} - -module.exports.inferType = function inferType(x) { - return (x && x.type) || (x instanceof Date - ? 1184 - : Array.isArray(x) - ? inferType(x[0]) - : x instanceof Buffer - ? 17 - : type[typeof x] || 0) -} - -const escapeBackslash = /\\/g -const escapeQuote = /"/g - -function arrayEscape(x) { - return x - .replace(escapeBackslash, '\\\\') - .replace(escapeQuote, '\\"') -} - -module.exports.arraySerializer = function arraySerializer(xs, serializer) { - if (!xs.length) - return '{}' - - const first = xs[0] - - if (Array.isArray(first) && !first.type) - return '{' + xs.map(x => arraySerializer(x, serializer)).join(',') + '}' - - return '{' + xs.map(x => - '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"' - ).join(',') + '}' -} - -const arrayParserState = { - i: 0, - char: null, - str: '', - quoted: false, - last: 0 -} - -module.exports.arrayParser = function arrayParser(x, parser) { - arrayParserState.i = arrayParserState.last = 0 - return arrayParserLoop(arrayParserState, x, parser) -} - -function arrayParserLoop(s, x, parser) { - const xs = [] - for (; s.i < x.length; s.i++) { - s.char = x[s.i] - if (s.quoted) { - if (s.char === '\\') { - s.str += x[++s.i] - } else if (s.char === '"') { - xs.push(parser ? parser(s.str) : s.str) - s.str = '' - s.quoted = x[s.i + 1] === '"' - s.last = s.i + 2 - } else { - s.str += s.char - } - } else if (s.char === '"') { - s.quoted = true - } else if (s.char === '{') { - s.last = ++s.i - xs.push(arrayParserLoop(s, x, parser)) - } else if (s.char === '}') { - s.quoted = false - s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i)) - s.last = s.i + 1 - break - } else if (s.char === ',' && s.p !== '}' && s.p !== '"') { - xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i)) - s.last = s.i + 1 - } - s.p = s.char - } - s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1)) - return xs -} - -module.exports.toCamel = x => { - let str = x[0] - for (let i = 1; i < x.length; i++) - str += x[i] === '_' ? x[++i].toUpperCase() : x[i] - return str -} - -module.exports.toPascal = x => { - let str = x[0].toUpperCase() - for (let i = 1; i < x.length; i++) - str += x[i] === '_' ? x[++i].toUpperCase() : x[i] - return str -} - -module.exports.toKebab = x => x.replace(/_/g, '-') - -module.exports.fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase() -module.exports.fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase() -module.exports.fromKebab = x => x.replace(/-/g, '_') - -module.exports.errorFields = entries({ - S: 'severity_local', - V: 'severity', - C: 'code', - M: 'message', - D: 'detail', - H: 'hint', - P: 'position', - p: 'internal_position', - q: 'internal_query', - W: 'where', - s: 'schema_name', - t: 'table_name', - c: 'column_name', - d: 'data type_name', - n: 'constraint_name', - F: 'file', - L: 'line', - R: 'routine' -}).reduce(char, {}) - -module.exports.retryRoutines = { - FetchPreparedStatement: true, - RevalidateCachedQuery: true, - transformAssignedExpr: true -} diff --git a/package.json b/package.json index 4bcbef2f..70417497 100644 --- a/package.json +++ b/package.json @@ -2,17 +2,30 @@ "name": "postgres", "version": "2.0.0-beta.11", "description": "Fastest full featured PostgreSQL client for Node.js", - "main": "lib/index.js", + "type": "module", + "module": "src/index.js", + "main": "cjs/src/index.js", + "exports": { + "import": "./src/index.js", + "default": "./cjs/src/index.js" + }, "types": "types/index.d.ts", "typings": "types/index.d.ts", - "type": "commonjs", "scripts": { - "test": "node tests/index.js", - "lint": "eslint lib && eslint tests", - "prepublishOnly": "npm run lint && npm test" + "build": "npm run build:cjs && npm run build:deno", + "build:cjs": "node transpile.cjs", + "build:deno": "node transpile.deno.js", + "test:cjs": "npm run build:cjs && pushd cjs/tests && node index.js && popd", + "test:cjs": "npm run build:cjs && pushd cjs/tests && node index.js && popd", + "test:deno": "npm run build:deno && pushd deno/tests && deno run --unstable --allow-all --unsafely-ignore-certificate-errors index.js && popd", + "test": "node tests/index.js && npm run test:cjs && npm run test:deno", + "lint": "eslint src && eslint tests", + "prepare": "npm run build", + "prepublishOnly": "npm run lint" }, "files": [ - "/lib", + "/cjs", + "/src", "/types" ], "author": "Rasmus Porsager ", diff --git a/src/bytes.js b/src/bytes.js new file mode 100644 index 00000000..6effd6e6 --- /dev/null +++ b/src/bytes.js @@ -0,0 +1,78 @@ +const size = 256 +let buffer = Buffer.allocUnsafe(size) + +const messages = 'BCcDdEFfHPpQSX'.split('').reduce((acc, x) => { + const v = x.charCodeAt(0) + acc[x] = () => { + buffer[0] = v + b.i = 5 + return b + } + return acc +}, {}) + +const b = Object.assign(reset, messages, { + N: String.fromCharCode(0), + i: 0, + inc(x) { + b.i += x + return b + }, + str(x) { + const length = Buffer.byteLength(x) + fit(length) + b.i += buffer.write(x, b.i, length, 'utf8') + return b + }, + i16(x) { + fit(2) + buffer.writeUInt16BE(x, b.i) + b.i += 2 + return b + }, + i32(x, i) { + if (i || i === 0) { + buffer.writeUInt32BE(x, i) + return b + } + fit(4) + buffer.writeUInt32BE(x, b.i) + b.i += 4 + return b + }, + z(x) { + fit(x) + buffer.fill(0, b.i, b.i + x) + b.i += x + return b + }, + raw(x) { + buffer = Buffer.concat([buffer.slice(0, b.i), x]) + b.i = buffer.length + return b + }, + end(at = 1) { + buffer.writeUInt32BE(b.i - at, at) + const out = buffer.slice(0, b.i) + b.i = 0 + buffer = Buffer.allocUnsafe(size) + return out + } +}) + +export default b + +function fit(x) { + if (buffer.length - b.i < x) { + const prev = buffer + , length = prev.length + + buffer = Buffer.allocUnsafe(length + (length >> 1) + x) + prev.copy(buffer) + } +} + +function reset() { + b.i = 0 + return b +} diff --git a/src/connection.js b/src/connection.js new file mode 100644 index 00000000..61685c50 --- /dev/null +++ b/src/connection.js @@ -0,0 +1,980 @@ +import net from 'net' +import tls from 'tls' +import crypto from 'crypto' +import Stream from 'stream' + +import { Identifier, Builder, handleValue, arrayParser, arraySerializer, CLOSE } from './types.js' +import { Errors } from './errors.js' +import Result from './result.js' +import Queue from './queue.js' +import Query from './query.js' +import b from './bytes.js' + +export default Connection + +let uid = 1 + +const Sync = b().S().end() + , Flush = b().H().end() + , SSLRequest = b().i32(8).i32(80877103).end(8) + , ExecuteUnnamed = Buffer.concat([b().E().str(b.N).i32(0).end(), Sync]) + , DescribeUnnamed = b().D().str('S').str(b.N).end() + , noop = () => { /* noop */ } + +const retryRoutines = new Set([ + 'FetchPreparedStatement', + 'RevalidateCachedQuery', + 'transformAssignedExpr' +]) + +const errorFields = { + 83 : 'severity_local', // S + 86 : 'severity', // V + 67 : 'code', // C + 77 : 'message', // M + 68 : 'detail', // D + 72 : 'hint', // H + 80 : 'position', // P + 112 : 'internal_position', // p + 113 : 'internal_query', // q + 87 : 'where', // W + 115 : 'schema_name', // s + 116 : 'table_name', // t + 99 : 'column_name', // c + 100 : 'data type_name', // d + 110 : 'constraint_name', // n + 70 : 'file', // F + 76 : 'line', // L + 82 : 'routine' // R +} + +function Connection(options, { onopen = noop, onend = noop, ondrain = noop, onclose = noop } = {}) { + const { + ssl, + user, + host, + port, + database, + parsers, + transform, + onnotice, + onnotify, + onparameter, + max_pipeline, + keep_alive, + backoff, + target_session_attrs + } = options + + const sent = Queue() + , id = uid++ + , backend = { pid: null, secret: null } + , idleTimer = timer(end, options.idle_timeout) + , lifeTimer = timer(end, options.max_lifetime) + , connectTimer = timer(connectTimedOut, options.connect_timeout) + + let socket = createSocket() + , result = new Result() + , incoming = Buffer.alloc(0) + , needsTypes = options.fetch_types + , backendParameters = {} + , statements = {} + , state = 'closed' + , statementId = Math.random().toString(36).slice(2) + , statementCount = 1 + , closedDate = 0 + , remaining = 0 + , hostIndex = 0 + , retries = 0 + , length = 0 + , delay = 0 + , rows = 0 + , serverSignature = null + , nextWriteTimer = null + , incomings = null + , results = null + , initial = null + , ending = null + , stream = null + , chunk = null + , ended = null + , nonce = null + , query = null + , final = null + + const connection = { + get state() { return state }, + set state(x) { + state = x + state === 'open' + ? idleTimer.start() + : idleTimer.cancel() + }, + connect(query) { + initial = query + reconnect() + }, + terminate, + execute, + cancel, + end, + count: 0, + id + } + + return connection + + function createSocket() { + const x = net.Socket() + x.setKeepAlive(true, 1000 * keep_alive) + x.on('error', error) + x.on('close', closed) + x.on('drain', drain) + return x + } + + function cancel({ pid, secret }, resolve, reject) { + socket.removeAllListeners() + socket = net.Socket() + socket.on('connect', () => socket.write(b().i32(16).i32(80877102).i32(pid).i32(secret).end(16))) + socket.once('error', reject) + socket.once('close', resolve) + connect() + } + + function execute(q) { + if (q.cancelled) + return + + try { + q.state = backend + query + ? sent.push(q) + : (query = q, query.active = true) + + build(q) + return write(toBuffer(q)) + && !q.describeFirst + && sent.length < max_pipeline + && (!q.options.onexecute || q.options.onexecute(connection)) + } catch (error) { + sent.length === 0 && write(Sync) + errored(error) + return true + } + } + + function toBuffer(q) { + if (q.parameters.length >= 65534) + throw Errors.generic({ message: 'Max number of parameters (65534) exceeded', code: 'MAX_PARAMETERS_EXCEEDED' }) + + return q.options.simple + ? b().Q().str(q.strings[0] + b.N).end() + : q.describeFirst + ? Buffer.concat([describe(q), Flush]) + : q.prepare + ? q.prepared + ? prepared(q) + : Buffer.concat([describe(q), prepared(q)]) + : unnamed(q) + } + + function describe(q) { + return Buffer.concat([ + Parse(q.statement.string, q.parameters, q.statement.types, q.statement.name), + Describe('S', q.statement.name) + ]) + } + + function prepared(q) { + return Buffer.concat([ + Bind(q.parameters, q.statement.types, q.statement.name, q.cursorName), + q.cursorFn + ? Execute('', q.cursorRows) + : ExecuteUnnamed + ]) + } + + function unnamed(q) { + return Buffer.concat([ + Parse(q.statement.string, q.parameters, q.statement.types), + DescribeUnnamed, + prepared(q) + ]) + } + + function build(q) { + const parameters = [] + , types = [] + + const string = stringify(q, q.strings[0], q.args[0], parameters, types) + + !q.tagged && q.args.forEach(x => handleValue(x, parameters, types)) + + q.prepare = options.prepare && ('prepare' in q.options ? q.options.prepare : true) + q.string = string + q.signature = q.prepare && types + string + q.onlyDescribe && (delete statements[q.signature]) + q.parameters = q.parameters || parameters + q.prepared = q.prepare && q.signature in statements + q.describeFirst = q.onlyDescribe || (parameters.length && !q.prepared) + q.statement = q.prepared + ? statements[q.signature] + : { string, types, name: q.prepare ? statementId + statementCount++ : '' } + + typeof options.debug === 'function' && options.debug(id, string, parameters, types) + } + + function stringify(q, string, value, parameters, types) { + for (let i = 1; i < q.strings.length; i++) { + string += ( + value instanceof Query ? fragment(string, value, parameters, types) : + value instanceof Identifier ? value.value : + value instanceof Builder ? value.build(string, parameters, types, options.transform) : + handleValue(value, parameters, types) + ) + q.strings[i] + value = q.args[i] + } + + return string + } + + function fragment(string, q, parameters, types) { + q.fragment = true + return stringify(q, string + q.strings[0], q.args[0], parameters, types) + } + + function write(x, fn) { + chunk = chunk ? Buffer.concat([chunk, x]) : Buffer.from(x) + if (fn || chunk.length >= 1024) + return nextWrite(fn) + nextWriteTimer === null && (nextWriteTimer = setImmediate(nextWrite)) + return true + } + + function nextWrite(fn) { + const x = socket.write(chunk, fn) + nextWriteTimer !== null && clearImmediate(nextWriteTimer) + chunk = nextWriteTimer = null + return x + } + + function connectTimedOut() { + errored(Errors.connection('CONNECT_TIMEOUT', options, socket)) + socket.destroy() + } + + async function secure() { + write(SSLRequest) + const canSSL = await new Promise(r => socket.once('data', x => r(x[0] === 83))) // S + + if (!canSSL && ssl === 'prefer') + return connected() + + socket.removeAllListeners() + socket = tls.connect({ + socket, + ...(ssl === 'require' || ssl === 'allow' || ssl === 'prefer' + ? { rejectUnauthorized: false } + : ssl + ) + }) + socket.on('secureConnect', connected) + socket.on('error', error) + socket.on('close', closed) + socket.on('drain', drain) + } + + /* c8 ignore next 3 */ + function drain() { + ondrain(connection) + } + + function data(x) { + if (incomings) { + incomings.push(x) + remaining -= x.length + if (remaining >= 0) + return + } + + incoming = incomings + ? Buffer.concat(incomings, length - remaining) + : incoming.length === 0 + ? x + : Buffer.concat([incoming, x], incoming.length + x.length) + + while (incoming.length > 4) { + length = incoming.readUInt32BE(1) + if (length >= incoming.length) { + remaining = length - incoming.length + incomings = [incoming] + break + } + + handle(incoming.slice(0, length + 1)) + incoming = incoming.slice(length + 1) + remaining = 0 + incomings = null + } + } + + function connect() { + backendParameters = {} + connectTimer.start() + socket.on('connect', ssl ? secure : connected) + + if (options.path) + return socket.connect(options.path) + + socket.connect(port[hostIndex], host[hostIndex]) + hostIndex = (hostIndex + 1) % port.length + } + + function reconnect() { + setTimeout(connect, closedDate ? closedDate + delay - Date.now() : 0) + } + + function connected() { + try { + statements = {} + needsTypes = options.fetch_types + statementId = Math.random().toString(36).slice(2) + statementCount = 1 + lifeTimer.start() + socket.on('data', data) + const s = StartupMessage() + write(s) + } catch (err) { + error(err) + } + } + + function error(err) { + if (connection.state === 'connecting' && options.host[retries + 1]) + return + + errored(err) + while (sent.length) + queryError(sent.shift(), err) + } + + function errored(err) { + query && queryError(query, err) + initial && (queryError(initial, err), initial = null) + } + + function queryError(query, err) { + if (err.query) + return + + err.stack += query.origin.replace(/.*\n/, '\n') + Object.defineProperties(err, { + query: { value: query.string, enumerable: options.debug }, + parameters: { value: query.parameters, enumerable: options.debug }, + args: { value: query.args, enumerable: options.debug }, + types: { value: query.statement && query.statement.types, enumerable: options.debug } + }) + query.reject(err) + } + + function end() { + return ending || ( + !connection.reserved && onend(connection), + !connection.reserved && !initial && !query && sent.length === 0 + ? Promise.resolve(terminate()) + : ending = new Promise(r => ended = r) + ) + } + + function terminate() { + if (query || initial || sent.length) + error(Errors.connection('CONNECTION_DESTROYED', options)) + + clearImmediate(nextWriteTimer) + socket.removeListener('data', data) + socket.removeListener('connect', connected) + socket.readyState !== 'closed' && socket.end(b().X().end()) + ended && (ended(), ending = ended = null) + } + + function closed(hadError) { + clearImmediate(nextWriteTimer) + socket.removeListener('data', data) + socket.removeListener('connect', connected) + idleTimer.cancel() + lifeTimer.cancel() + connectTimer.cancel() + + if (socket.encrypted) { + socket.removeAllListeners() + socket = createSocket() + } + + if (initial) + return reconnect() + + !hadError && (query || sent.length) && error(Errors.connection('CONNECTION_CLOSED', options, socket)) + closedDate = Date.now() + hadError && options.shared.retries++ + delay = (typeof backoff === 'function' ? backoff(options.shared.retries) : backoff) * 1000 + onclose(connection) + } + + /* Handlers */ + function handle(xs, x = xs[0]) { + ( + x === 68 ? DataRow : // D + x === 100 ? CopyData : // d + x === 65 ? NotificationResponse : // A + x === 83 ? ParameterStatus : // S + x === 90 ? ReadyForQuery : // Z + x === 67 ? CommandComplete : // C + x === 50 ? BindComplete : // 2 + x === 49 ? ParseComplete : // 1 + x === 116 ? ParameterDescription : // t + x === 84 ? RowDescription : // T + x === 82 ? Authentication : // R + x === 110 ? NoData : // n + x === 75 ? BackendKeyData : // K + x === 69 ? ErrorResponse : // E + x === 115 ? PortalSuspended : // s + x === 51 ? CloseComplete : // 3 + x === 71 ? CopyInResponse : // G + x === 78 ? NoticeResponse : // N + x === 72 ? CopyOutResponse : // H + x === 99 ? CopyDone : // c + x === 73 ? EmptyQueryResponse : // I + x === 86 ? FunctionCallResponse : // V + x === 118 ? NegotiateProtocolVersion : // v + x === 87 ? CopyBothResponse : // W + /* c8 ignore next */ + UnknownMessage + )(xs) + } + + function DataRow(x) { + let index = 7 + let length + let column + let value + + const row = query.isRaw ? new Array(query.statement.columns.length) : {} + for (let i = 0; i < query.statement.columns.length; i++) { + column = query.statement.columns[i] + length = x.readInt32BE(index) + index += 4 + + value = length === -1 + ? null + : query.isRaw + ? x.slice(index, index += length) + : column.parser === undefined + ? x.toString('utf8', index, index += length) + : column.parser.array === true + ? column.parser(x.toString('utf8', index + 1, index += length)) + : column.parser(x.toString('utf8', index, index += length)) + + query.isRaw + ? (row[i] = value) + : (row[column.name] = transform.value.from ? transform.value.from(value) : value) + } + + query.forEachFn + ? query.forEachFn(transform.row.from ? transform.row.from(row) : row, result) + : (result[rows++] = transform.row.from ? transform.row.from(row) : row) + } + + function ParameterStatus(x) { + const [k, v] = x.toString('utf8', 5, x.length - 1).split(b.N) + backendParameters[k] = v + if (options.parameters[k] !== v) { + options.parameters[k] = v + onparameter && onparameter(k, v) + } + } + + function ReadyForQuery() { + query && query.options.simple && query.resolve(results || result) + query = results = null + result = new Result() + connectTimer.cancel() + + if (initial) { + if (target_session_attrs) { + if (!backendParameters.in_hot_standby || !backendParameters.default_transaction_read_only) + return fetchState() + else if (tryNext(target_session_attrs, backendParameters)) + return terminate() + } + + if (needsTypes) + return fetchArrayTypes() + + execute(initial) + options.shared.retries = retries = initial = 0 + return + } + + while (sent.length && (query = sent.shift()) && (query.active = true) && query.cancelled) + Connection(options, {}).cancel(query.state, query.cancelled.resolve, query.cancelled.reject) + + if (query) // Consider opening if able and sent.length < 50 + return + + ending + ? terminate() + : onopen(connection) + + } + + function CommandComplete(x) { + rows = 0 + + for (let i = x.length - 1; i > 0; i--) { + if (x[i] === 32 && x[i + 1] < 58 && result.count === null) + result.count = +x.toString('utf8', i + 1, x.length - 1) + if (x[i - 1] >= 65) { + result.command = x.toString('utf8', 5, i) + result.state = backend + break + } + } + + final && (final(), final = null) + + if (query.options.simple) + return + + if (query.cursorFn) { + result.count && query.cursorFn(result) + write(Sync) + } + + query.resolve(result) + } + + function ParseComplete() { + query.parsing = false + } + + function BindComplete() { + !result.statement && (result.statement = query.statement) + result.columns = query.statement.columns + } + + function ParameterDescription(x) { + const length = x.readUInt16BE(5) + + for (let i = 0; i < length; ++i) + !query.statement.types[i] && (query.statement.types[i] = x.readUInt32BE(7 + i * 4)) + + query.prepare && (statements[query.signature] = query.statement) + query.describeFirst && !query.onlyDescribe && (write(prepared(query)), query.describeFirst = false) + } + + function RowDescription(x) { + if (result.command) { + results = results || [result] + results.push(result = new Result()) + result.count = null + query.statement.columns = null + } + + const length = x.readUInt16BE(5) + let index = 7 + let start + + query.statement.columns = Array(length) + + for (let i = 0; i < length; ++i) { + start = index + while (x[index++] !== 0); + const type = x.readUInt32BE(index + 6) + query.statement.columns[i] = { + name: transform.column.from + ? transform.column.from(x.toString('utf8', start, index - 1)) + : x.toString('utf8', start, index - 1), + parser: parsers[type], + type + } + index += 18 + } + + result.statement = query.statement + if (query.onlyDescribe) + return (query.resolve(query.statement), write(Sync)) + } + + async function Authentication(x, type = x.readUInt32BE(5)) { + ( + type === 3 ? AuthenticationCleartextPassword : + type === 5 ? AuthenticationMD5Password : + type === 10 ? SASL : + type === 11 ? SASLContinue : + type === 12 ? SASLFinal : + type !== 0 ? UnknownAuth : + noop + )(x, type) + } + + /* c8 ignore next 5 */ + async function AuthenticationCleartextPassword() { + write( + b().p().str(await Pass()).z(1).end() + ) + } + + async function AuthenticationMD5Password(x) { + write( + b().p().str('md5' + md5(Buffer.concat([Buffer.from(md5((await Pass()) + user)), x.slice(9)]))).z(1).end() + ) + } + + function SASL() { + b().p().str('SCRAM-SHA-256' + b.N) + const i = b.i + nonce = crypto.randomBytes(18).toString('base64') + write(b.inc(4).str('n,,n=*,r=' + nonce).i32(b.i - i - 4, i).end()) + } + + async function SASLContinue(x) { + const res = x.toString('utf8', 9).split(',').reduce((acc, x) => (acc[x[0]] = x.slice(2), acc), {}) + + const saltedPassword = crypto.pbkdf2Sync( + await Pass(), + Buffer.from(res.s, 'base64'), + parseInt(res.i), 32, + 'sha256' + ) + + const clientKey = hmac(saltedPassword, 'Client Key') + + const auth = 'n=*,r=' + nonce + ',' + + 'r=' + res.r + ',s=' + res.s + ',i=' + res.i + + ',c=biws,r=' + res.r + + serverSignature = hmac(hmac(saltedPassword, 'Server Key'), auth).toString('base64') + + write( + b().p().str('c=biws,r=' + res.r + ',p=' + xor(clientKey, hmac(sha256(clientKey), auth)).toString('base64')).end() + ) + } + + function SASLFinal(x) { + if (x.toString('utf8', 9).split(b.N, 1)[0].slice(2) === serverSignature) + return + /* c8 ignore next 5 */ + errored(Errors.generic({ + message: 'The server did not return the correct signature', + code: 'SASL_SIGNATURE_MISMATCH' + })) + socket.destroy() + } + + function Pass() { + return Promise.resolve(typeof options.pass === 'function' + ? options.pass() + : options.pass + ) + } + + function NoData() { + result.statement = query.statement + result.statement.columns = [] + if (query.onlyDescribe) + return (query.resolve(query.statement), write(Sync)) + } + + function BackendKeyData(x) { + backend.pid = x.readUInt32BE(5) + backend.secret = x.readUInt32BE(9) + } + + async function fetchArrayTypes() { + needsTypes = false + const types = await new Query([` + select b.oid, b.typarray + from pg_catalog.pg_type a + left join pg_catalog.pg_type b on b.oid = a.typelem + where a.typcategory = 'A' + group by b.oid, b.typarray + order by b.oid + `], [], execute) + types.forEach(({ oid, typarray }) => addArrayType(oid, typarray)) + } + + function addArrayType(oid, typarray) { + const parser = options.parsers[oid] + options.shared.typeArrayMap[oid] = typarray + options.parsers[typarray] = (xs) => arrayParser(xs, parser) + options.parsers[typarray].array = true + options.serializers[typarray] = (xs) => arraySerializer(xs, options.serializers[oid]) + } + + function tryNext(x, xs) { + return ( + (x === 'read-write' && xs.default_transaction_read_only === 'on') || + (x === 'read-only' && xs.default_transaction_read_only === 'off') || + (x === 'primary' && xs.in_hot_standby === 'off') || + (x === 'standby' && xs.in_hot_standby === 'on') || + (x === 'prefer-standby' && xs.in_hot_standby === 'off' && options.host[retries]) + ) + } + + function fetchState() { + const query = new Query([` + show transaction_read_only; + select pg_catalog.pg_is_in_recovery() + `], [], execute, null, { simple: true }) + query.resolve = ([[a], [b]]) => { + backendParameters.default_transaction_read_only = a.transaction_read_only + backendParameters.in_hot_standby = b.pg_is_in_recovery ? 'on' : 'off' + } + query.execute() + } + + function ErrorResponse(x) { + query && (query.cursorFn || query.describeFirst) && write(Sync) + const error = Errors.postgres(parseError(x)) + query && !query.retried && retryRoutines.has(error.routine) + ? retry(query) + : errored(error) + } + + function retry(q) { + delete statements[q.signature] + q.retried = true + execute(q) + } + + function NotificationResponse(x) { + if (!onnotify) + return + + let index = 9 + while (x[index++] !== 0); + onnotify( + x.toString('utf8', 9, index - 1), + x.toString('utf8', index, x.length - 1) + ) + } + + async function PortalSuspended() { + try { + const x = await Promise.resolve(query.cursorFn(result)) + rows = 0 + x === CLOSE + ? write(Close(query.portal)) + : (result = new Result(), write(Execute('', query.cursorRows))) + } catch (err) { + write(Sync) + query.reject(err) + } + } + + function CloseComplete() { + result.count && query.cursorFn(result) + query.resolve(result) + } + + function CopyInResponse() { + stream = new Stream.Writable({ + write(chunk, encoding, callback) { + socket.write(b().d().raw(chunk).end(), callback) + }, + destroy(error, callback) { + callback(error) + socket.write(b().f().str(error + b.N).end()) + }, + final(callback) { + socket.write(b().c().end()) + final = callback + } + }) + query.resolve(stream) + } + + function CopyOutResponse() { + stream = new Stream.Readable({ + read() { socket.resume() } + }) + query.resolve(stream) + } + + /* c8 ignore next 3 */ + function CopyBothResponse() { + stream = new Stream.Readable({ + read() { socket.resume() }, + /* c8 ignore next 11 */ + write(chunk, encoding, callback) { + socket.write(b().d().raw(chunk).end(), callback) + }, + destroy(error, callback) { + callback(error) + socket.write(b().f().str(error + b.N).end()) + }, + final(callback) { + socket.write(b().c().end()) + final = callback + } + }) + query.resolve(stream) + } + + function CopyData(x) { + stream.push(x.slice(5)) || socket.pause() + } + + function CopyDone() { + stream.push(null) + } + + function NoticeResponse(x) { + onnotice + ? onnotice(parseError(x)) + : console.log(parseError(x)) // eslint-disable-line + + } + + /* c8 ignore next 3 */ + function EmptyQueryResponse() { + + } + + /* c8 ignore next 3 */ + function FunctionCallResponse() { + errored(Errors.notSupported('FunctionCallResponse')) + } + + /* c8 ignore next 3 */ + function NegotiateProtocolVersion() { + errored(Errors.notSupported('NegotiateProtocolVersion')) + } + + /* c8 ignore next 3 */ + function UnknownMessage(x) { + console.error('Unknown message', x) + } + + /* c8 ignore next 3 */ + function UnknownAuth(x, type) { + console.error('Unknown auth', type) + } + + /* Messages */ + function Bind(parameters, types, statement = '', portal = '') { + let prev + , type + + b().B().str(portal + b.N).str(statement + b.N).i16(0).i16(parameters.length) + + parameters.forEach((x, i) => { + if (x === null) + return b.i32(0xFFFFFFFF) + + type = types[i] + parameters[i] = x = type in options.serializers + ? options.serializers[type](x) + : '' + x + + prev = b.i + b.inc(4).str(x).i32(b.i - prev - 4, prev) + }) + + b.i16(0) + + return b.end() + } + + function Parse(str, parameters, types, name = '') { + b().P().str(name + b.N).str(str + b.N).i16(parameters.length) + parameters.forEach((x, i) => b.i32(types[i] || 0)) + return b.end() + } + + function Describe(x, name = '') { + return b().D().str(x).str(name + b.N).end() + } + + function Execute(portal = '', rows = 0) { + return Buffer.concat([ + b().E().str(portal + b.N).i32(rows).end(), + Flush + ]) + } + + function Close(portal = '') { + return Buffer.concat([ + b().C().str('P').str(portal + b.N).end(), + b().S().end() + ]) + } + + function StartupMessage() { + return b().inc(4).i16(3).z(2).str( + Object.entries(Object.assign({ + user, + database, + client_encoding: '\'utf-8\'' + }, + options.connection + )).filter(([, v]) => v).map(([k, v]) => k + b.N + v).join(b.N) + ).z(2).end(0) + } + +} + +function parseError(x) { + const error = {} + let start = 5 + for (let i = 5; i < x.length - 1; i++) { + if (x[i] === 0) { + error[errorFields[x[start]]] = x.toString('utf8', start + 1, i) + start = i + 1 + } + } + return error +} + +function md5(x) { + return crypto.createHash('md5').update(x).digest('hex') +} + +function hmac(key, x) { + return crypto.createHmac('sha256', key).update(x).digest() +} + +function sha256(x) { + return crypto.createHash('sha256').update(x).digest() +} + +function xor(a, b) { + const length = Math.max(a.length, b.length) + const buffer = Buffer.allocUnsafe(length) + for (let i = 0; i < length; i++) + buffer[i] = a[i] ^ b[i] + return buffer +} + +function timer(fn, seconds) { + seconds = typeof seconds === 'function' ? seconds() : seconds + if (!seconds) + return { cancel: noop, start: noop } + + let timer + return { + cancel() { + timer && (clearTimeout(timer), timer = null) + }, + start() { + timer && clearTimeout(timer) + timer = setTimeout(done, seconds * 1000, arguments).unref() + } + } + + function done(args) { + fn.apply(null, args) + timer = null + } +} diff --git a/src/errors.js b/src/errors.js new file mode 100644 index 00000000..ed12202d --- /dev/null +++ b/src/errors.js @@ -0,0 +1,53 @@ +export class PostgresError extends Error { + constructor(x) { + super(x.message) + this.name = this.constructor.name + Object.assign(this, x) + } +}; + +export const Errors = { + connection, + postgres, + generic, + notSupported +} + +function connection(x, options, socket) { + const { host, port } = socket || options + const error = Object.assign( + new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))), + { + code: x, + errno: x, + address: options.path || host + }, options.path ? {} : { port: port } + ) + Error.captureStackTrace(error, connection) + return error +} + +function postgres(x) { + const error = new PostgresError(x) + Error.captureStackTrace(error, postgres) + return error +} + +function generic(x) { + const error = Object.assign(new Error(x.message), x) + Error.captureStackTrace(error, generic) + return error +} + +/* c8 ignore next 10 */ +function notSupported(x) { + const error = Object.assign( + new Error(x + ' (B) is not supported'), + { + code: 'MESSAGE_NOT_SUPPORTED', + name: x + } + ) + Error.captureStackTrace(error, notSupported) + return error +} diff --git a/src/index.js b/src/index.js new file mode 100644 index 00000000..43831e91 --- /dev/null +++ b/src/index.js @@ -0,0 +1,540 @@ +import os from 'os' +import fs from 'fs' +import Stream from 'stream' + +import { + mergeUserTypes, + inferType, + Parameter, + Identifier, + Builder, + toPascal, + toCamel, + toKebab, + fromPascal, + fromCamel, + fromKebab, + CLOSE +} from './types.js' + +import Connection from './connection.js' +import Query from './query.js' +import Queue from './queue.js' +import { Errors, PostgresError } from './errors.js' +import Subscribe from './subscribe.js' + +Object.assign(Postgres, { + PostgresError, + toPascal, + toCamel, + toKebab, + fromPascal, + fromCamel, + fromKebab, + BigInt +}) + +export default Postgres + +function Postgres(a, b) { + const options = parseOptions(a, b) + , subscribe = Subscribe(Postgres, { ...options }) + + let ending = false + + const queries = Queue() + , connections = [...Array(options.max)].map(() => Connection(options, { onopen, onend, ondrain, onclose })) + , closed = Queue(connections) + , reserved = Queue() + , open = Queue() + , busy = Queue() + , full = Queue() + , ended = Queue() + , connecting = Queue() + , queues = { closed, ended, connecting, reserved, open, busy, full } + + const sql = Sql(handler) + + Object.assign(sql, { + get parameters() { return options.parameters }, + largeObject, + subscribe, + CLOSE, + END: CLOSE, + PostgresError, + options, + listen, + notify, + begin, + end + }) + + return sql + + function Sql(handler, instant) { + handler.debug = options.debug + + Object.assign(sql, { + types: Object.entries(options.types).reduce((acc, [name, type]) => { + acc[name] = (x) => new Parameter(x, type.to) + return acc + }, {}), + unsafe, + array, + json, + file + }) + + return sql + + function sql(strings, ...args) { + const query = strings && Array.isArray(strings.raw) + ? new Query(strings, args, handler, cancel) + : typeof strings === 'string' && !args.length + ? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings) + : new Builder(strings, args) + instant && query instanceof Query && Promise.resolve().then(() => { !query.fragment && query.execute() }) + return query + } + + function unsafe(string, args = [], options = {}) { + arguments.length === 2 && !Array.isArray(args) && (options = args, args = []) + const query = new Query([string], args, handler, cancel, { + prepare: false, + ...options, + simple: 'simple' in options ? options.simple : args.length === 0 + }) + instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + return query + } + + function file(path, args = [], options = { cache: true }) { + arguments.length === 2 && !Array.isArray(args) && (options = args, args = []) + const query = new Query([], args, (query) => { + fs.readFile(path, 'utf8', (err, string) => { + if (err) + return query.reject(err) + + query.strings = [string] + handler(query) + }) + }, cancel, { + ...options, + simple: 'simple' in options ? options.simple : args.length === 0 + }) + instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + return query + } + } + + async function listen(name, fn) { + const sql = listen.sql || (listen.sql = Postgres({ + ...options, + max: 1, + idle_timeout: null, + max_lifetime: null, + fetch_types: false, + onclose() { + Object.entries(listen.channels).forEach(([channel, { listeners }]) => { + delete listen.channels[channel] + Promise.all(listeners.map(fn => listen(channel, fn).catch(() => { /* noop */ }))) + }) + }, + onnotify(c, x) { + c in listen.channels && listen.channels[c].listeners.forEach(fn => fn(x)) + } + })) + + const channels = listen.channels || (listen.channels = {}) + , exists = name in channels + , channel = exists ? channels[name] : (channels[name] = { listeners: [fn] }) + + if (exists) { + channel.listeners.push(fn) + return Promise.resolve({ ...channel.result, unlisten }) + } + + channel.result = await sql`listen ${ sql(name) }` + channel.result.unlisten = unlisten + + return channel.result + + async function unlisten() { + if (name in channels === false) + return + + channel.listeners = channel.listeners.filter(x => x !== fn) + if (channels[name].listeners.length) + return + + delete channels[name] + return sql`unlisten ${ sql(name) }` + } + } + + async function notify(channel, payload) { + return await sql`select pg_notify(${ channel }, ${ '' + payload })` + } + + async function begin(options, fn) { + !fn && (fn = options, options = '') + return new Promise(async(resolve, reject) => { + await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).catch(reject) + + function onexecute(c) { + const queries = Queue() + let savepoints = 0 + + queues[c.state].remove(c) + c.state = 'reserved' + c.reserved = () => queries.length && handler(queries.shift()) + reserved.push(c) + + const sql = Sql(handler, true) + sql.savepoint = savepoint + + start() + + return false + + async function start() { + try { + const xs = fn(sql) + const result = await (Array.isArray(xs) ? Promise.all(xs) : xs) + await sql`commit` + resolve(result) + } catch (error) { + await sql`rollback`.catch(reject) + reject(error) + } + c.reserved = null + onopen(c) + } + + async function savepoint(name, fn) { + if (name && Array.isArray(name.raw)) + return savepoint(sql => sql.apply(sql, arguments)) + + try { + arguments.length === 1 && (fn = name, name = null) + name = 's' + savepoints++ + (name ? '_' + name : '') + await sql`savepoint ${ sql(name) }` + } catch (err) { + reject(err) + } + try { + return await Promise.resolve(fn(sql)) + } catch (err) { + await sql`rollback to ${ sql(name) }` + throw err + } + } + + function handler(query) { + c.state === 'full' + ? queries.push(query) + : c.execute(query) + } + } + }) + } + + function largeObject(oid, mode = 0x00020000 | 0x00040000) { + return new Promise(async(resolve, reject) => { + await sql.begin(async sql => { + let finish + !oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`) + const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd` + + const lo = { + writable, + readable, + close : () => sql`select lo_close(${ fd })`.then(finish), + tell : () => sql`select lo_tell64(${ fd })`, + read : (x) => sql`select loread(${ fd }, ${ x }) as data`, + write : (x) => sql`select lowrite(${ fd }, ${ x })`, + truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`, + seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`, + size : () => sql` + select + lo_lseek64(${ fd }, location, 0) as position, + seek.size + from ( + select + lo_lseek64($1, 0, 2) as size, + tell.location + from (select lo_tell64($1) as location) tell + ) seek + ` + } + + resolve(lo) + + return new Promise(async r => finish = r) + + async function readable({ + highWaterMark = 2048 * 8, + start = 0, + end = Infinity + } = {}) { + let max = end - start + start && await lo.seek(start) + return new Stream.Readable({ + highWaterMark, + async read(size) { + const l = size > max ? size - max : size + max -= size + const [{ data }] = await lo.read(l) + this.push(data) + if (data.length < size) + this.push(null) + } + }) + } + + async function writable({ + highWaterMark = 2048 * 8, + start = 0 + } = {}) { + start && await lo.seek(start) + return new Stream.Writable({ + highWaterMark, + write(chunk, encoding, callback) { + lo.write(chunk).then(() => callback(), callback) + } + }) + } + }).catch(reject) + }) + } + + function json(x) { + return new Parameter(x, 114) + } + + function array(x, type) { + if (!Array.isArray(x)) + return array(Array.from(arguments)) + + return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap) + } + + function handler(query) { + if (ending) + return query.reject(Errors.connection('CONNECTION_ENDED', options, options)) + + if (open.length) + return go(open, query) + + if (closed.length) + return connect(closed.shift(), query) + + busy.length + ? go(busy, query) + : queries.push(query) + } + + function go(xs, query) { + const c = xs.shift() + return c.execute(query) + ? (c.state = 'busy', busy.push(c)) + : (c.state = 'full', full.push(c)) + } + + function cancel(query) { + return new Promise((resolve, reject) => { + query.state + ? query.active + ? Connection(options, {}).cancel(query.state, resolve, reject) + : query.cancelled = { resolve, reject } + : ( + queries.remove(query), + query.cancelled = true, + query.reject(Errors.generic({ code: '57014', message: 'canceling statement due to user request' })), + resolve() + ) + }) + } + + function end({ timeout = null } = {}) { + if (ending) + return ending + + let timer + return ending = Promise.race([ + new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))), + Promise.all(connections.map(c => c.end()).concat( + listen.sql ? listen.sql.end({ timeout: 0 }) : [], + subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : [] + )) + ]).then(() => clearTimeout(timer)) + } + + async function destroy(resolve) { + await Promise.all(connections.map(c => c.terminate())) + while (queries.length) + queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options)) + resolve() + } + + function connect(c, query) { + c.state = 'connecting' + connecting.push(c) + c.connect(query) + } + + function onend(c) { + queues[c.state].remove(c) + c.state = 'ended' + ended.push(c) + } + + function onopen(c) { + queues[c.state].remove(c) + + if (c.reserved) { + c.state = 'reserved' + c.reserved() + reserved.push(c) + return + } + + if (queries.length === 0) + return (c.state = 'open', open.push(c)) + + let max = Math.ceil(queries.length / (connecting.length + 1)) + , ready = true + + while (ready && queries.length && max-- > 0) + ready = c.execute(queries.shift()) + + ready + ? (c.state = 'busy', busy.push(c)) + : (c.state = 'full', full.push(c)) + } + + function ondrain(c) { + full.remove(c) + onopen(c) + } + + function onclose(c) { + queues[c.state].remove(c) + c.state = 'closed' + c.reserved = null + options.onclose && options.onclose(c.id) + queries.length + ? connect(c, queries.shift()) + : queues.closed.push(c) + } +} + +function parseOptions(a, b) { + if (a && a.shared) + return a + + const env = process.env // eslint-disable-line + , o = (typeof a === 'string' ? b : a) || {} + , { url, multihost } = parseUrl(a, env) + , query = url.searchParams + , host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost' + , port = o.port || url.port || env.PGPORT || 5432 + , user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername() + + return Object.assign({ + host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]), + port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)), + path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port, + database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user, + user : user, + pass : o.pass || o.password || url.password || env.PGPASSWORD || '', + max : o.max || query.get('max') || 10, + types : o.types || {}, + ssl : o.ssl || parseSSL(query.get('sslmode') || query.get('ssl')) || false, + idle_timeout : o.idle_timeout || query.get('idle_timeout') || env.PGIDLE_TIMEOUT || warn(o.timeout), + connect_timeout : o.connect_timeout || query.get('connect_timeout') || env.PGCONNECT_TIMEOUT || 30, + max_lifetime : o.max_lifetime || url.max_lifetime || max_lifetime, + max_pipeline : o.max_pipeline || url.max_pipeline || 100, + backoff : o.backoff || url.backoff || backoff, + keep_alive : o.keep_alive || url.keep_alive || 60, + prepare : 'prepare' in o ? o.prepare : 'no_prepare' in o ? !o.no_prepare : true, + onnotice : o.onnotice, + onnotify : o.onnotify, + onclose : o.onclose, + onparameter : o.onparameter, + transform : parseTransform(o.transform || {}), + connection : Object.assign({ application_name: 'postgres.js' }, o.connection), + target_session_attrs: tsa(o, url, env), + debug : o.debug, + fetch_types : 'fetch_types' in o ? o.fetch_types : true, + parameters : {}, + shared : { retries: 0, typeArrayMap: {} } + }, + mergeUserTypes(o.types) + ) +} + +function tsa(o, url, env) { + const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS + if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x)) + return x + + throw new Error('target_session_attrs ' + x + ' is not supported') +} + +function backoff(retries) { + return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20) +} + +function max_lifetime() { + return 60 * (30 + Math.random() * 30) +} + +function parseTransform(x) { + return { + column: { + from: typeof x.column === 'function' ? x.column : x.column && x.column.from, + to: x.column && x.column.to + }, + value: { + from: typeof x.value === 'function' ? x.value : x.value && x.value.from, + to: x.value && x.value.to + }, + row: { + from: typeof x.row === 'function' ? x.row : x.row && x.row.from, + to: x.row && x.row.to + } + } +} + +function parseSSL(x) { + return x !== 'disable' && x !== 'false' && x +} + +function parseUrl(url) { + if (typeof url !== 'string') + return { url: { searchParams: new Map() } } + + let host = url + host = host.slice(host.indexOf('://') + 3) + host = host.split(/[?/]/)[0] + host = host.slice(host.indexOf('@') + 1) + + return { + url: new URL(url.replace(host, host.split(',')[0])), + multihost: host.indexOf(',') > -1 && host + } +} + +function warn(x) { + typeof x !== 'undefined' && console.log('The timeout option is deprecated, use idle_timeout instead') // eslint-disable-line + return x +} + +function osUsername() { + try { + return os.userInfo().username // eslint-disable-line + } catch (_) { + return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line + } +} diff --git a/src/query.js b/src/query.js new file mode 100644 index 00000000..1b5826c5 --- /dev/null +++ b/src/query.js @@ -0,0 +1,141 @@ +const originCache = new Map() + +export default class Query extends Promise { + constructor(strings, args, handler, canceller, options = {}) { + let resolve + , reject + + super((a, b) => { + resolve = a + reject = b + }) + + this.tagged = Array.isArray(strings.raw) + this.strings = strings + this.args = args + this.handler = handler + this.canceller = canceller + this.options = options + + this.state = null + this.statement = null + + this.resolve = x => (this.active = false, resolve(x)) + this.reject = x => (this.active = false, reject(x)) + + this.active = false + this.cancelled = null + this.executed = false + this.signature = '' + + this.origin = handler.debug ? new Error().stack : cachedError(this.strings) + } + + static get [Symbol.species]() { + return Promise + } + + cancel() { + return this.canceller && (this.canceller(this), this.canceller = null) + } + + async readable() { + this.options.simple = true + this.options.prepare = false + this.streaming = true + return this + } + + async writable() { + this.options.simple = true + this.options.prepare = false + this.streaming = true + return this + } + + cursor(rows = 1, fn) { + this.options.simple = false + if (typeof rows === 'function') { + fn = rows + rows = 1 + } + + this.cursorRows = rows + + if (typeof fn === 'function') + return (this.cursorFn = fn, this) + + let prev + return { + [Symbol.asyncIterator]: () => ({ + next: () => { + prev && prev() + const promise = new Promise((resolve, reject) => { + this.cursorFn = x => { + resolve({ value: x, done: false }) + return new Promise(r => prev = r) + } + this.resolve = () => (this.active = false, resolve({ done: true })) + this.reject = x => (this.active = false, reject(x)) + }) + this.execute() + return promise + } + }) + } + } + + describe() { + this.onlyDescribe = true + return this + } + + stream() { + throw new Error('.stream has been renamed to .forEach') + } + + forEach(fn) { + this.forEachFn = fn + return this + } + + raw() { + this.isRaw = true + return this + } + + handle() { + !this.executed && this.handler((this.executed = true, this)) + } + + execute() { + this.handle() + return this + } + + then() { + this.handle() + return super.then.apply(this, arguments) + } + + catch() { + this.handle() + return super.catch.apply(this, arguments) + } + + finally() { + this.handle() + return super.finally.apply(this, arguments) + } +} + +function cachedError(xs) { + if (originCache.has(xs)) + return originCache.get(xs) + + const x = Error.stackTraceLimit + Error.stackTraceLimit = 4 + originCache.set(xs, new Error().stack) + Error.stackTraceLimit = x + return originCache.get(xs) +} diff --git a/src/queue.js b/src/queue.js new file mode 100644 index 00000000..c4ef9716 --- /dev/null +++ b/src/queue.js @@ -0,0 +1,31 @@ +export default Queue + +function Queue(initial = []) { + let xs = initial.slice() + let index = 0 + + return { + get length() { + return xs.length - index + }, + remove: (x) => { + const index = xs.indexOf(x) + return index === -1 + ? null + : (xs.splice(index, 1), x) + }, + push: (x) => (xs.push(x), x), + shift: () => { + const out = xs[index++] + + if (index === xs.length) { + index = 0 + xs = [] + } else { + xs[index - 1] = undefined + } + + return out + } + } +} diff --git a/src/result.js b/src/result.js new file mode 100644 index 00000000..31014284 --- /dev/null +++ b/src/result.js @@ -0,0 +1,16 @@ +export default class Result extends Array { + constructor() { + super() + Object.defineProperties(this, { + count: { value: null, writable: true }, + state: { value: null, writable: true }, + command: { value: null, writable: true }, + columns: { value: null, writable: true }, + statement: { value: null, writable: true } + }) + } + + static get [Symbol.species]() { + return Array + } +} diff --git a/src/subscribe.js b/src/subscribe.js new file mode 100644 index 00000000..da238d7f --- /dev/null +++ b/src/subscribe.js @@ -0,0 +1,209 @@ +export default function Subscribe(postgres, options) { + const listeners = new Map() + + let connection + + return async function subscribe(event, fn) { + event = parseEvent(event) + + options.max = 1 + options.connection = { + ...options.connection, + replication: 'database' + } + + const sql = postgres(options) + + !connection && (subscribe.sql = sql, connection = init(sql, options.publications)) + + const fns = listeners.has(event) + ? listeners.get(event).add(fn) + : listeners.set(event, new Set([fn])) + + const unsubscribe = () => { + fns.delete(fn) + fns.size === 0 && listeners.delete(event) + } + + return connection.then(() => ({ unsubscribe })) + } + + async function init(sql, publications = 'alltables') { + if (!publications) + throw new Error('Missing publication names') + + const slot = 'postgresjs_' + Math.random().toString(36).slice(2) + const [x] = await sql.unsafe( + `CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT` + ) + + const stream = await sql.unsafe( + `START_REPLICATION SLOT ${ slot } LOGICAL ${ + x.consistent_point + } (proto_version '1', publication_names '${ publications }')` + ).writable() + + const state = { + lsn: Buffer.concat(x.consistent_point.split('/').map(x => Buffer.from(('00000000' + x).slice(-8), 'hex'))) + } + + stream.on('data', data) + + function data(x) { + if (x[0] === 0x77) + parse(x.slice(25), state, sql.options.parsers, handle) + else if (x[0] === 0x6b && x[17]) + pong() + } + + function handle(a, b) { + const path = b.relation.schema + '.' + b.relation.table + call('*', a, b) + call('*:' + path, a, b) + b.relation.keys.length && call('*:' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b) + call(b.command, a, b) + call(b.command + ':' + path, a, b) + b.relation.keys.length && call(b.command + ':' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b) + } + + function pong() { + const x = Buffer.alloc(34) + x[0] = 'r'.charCodeAt(0) + x.fill(state.lsn, 1) + x.writeBigInt64BE(BigInt(Date.now() - Date.UTC(2000, 0, 1)) * BigInt(1000), 25) + stream.write(x) + } + } + + function call(x, a, b) { + listeners.has(x) && listeners.get(x).forEach(fn => fn(a, b, x)) + } +} + +function Time(x) { + return new Date(Date.UTC(2000, 0, 1) + Number(x / BigInt(1000))) +} + +function parse(x, state, parsers, handle) { + const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc) + + Object.entries({ + R: x => { // Relation + let i = 1 + const r = state[x.readUInt32BE(i)] = { + schema: String(x.slice(i += 4, i = x.indexOf(0, i))) || 'pg_catalog', + table: String(x.slice(i + 1, i = x.indexOf(0, i + 1))), + columns: Array(x.readUInt16BE(i += 2)), + keys: [] + } + i += 2 + + let columnIndex = 0 + , column + + while (i < x.length) { + column = r.columns[columnIndex++] = { + key: x[i++], + name: String(x.slice(i, i = x.indexOf(0, i))), + type: x.readUInt32BE(i += 1), + parser: parsers[x.readUInt32BE(i)], + atttypmod: x.readUInt32BE(i += 4) + } + + column.key && r.keys.push(column) + i += 4 + } + }, + Y: () => { /* noop */ }, // Type + O: () => { /* noop */ }, // Origin + B: x => { // Begin + state.date = Time(x.readBigInt64BE(9)) + state.lsn = x.slice(1, 9) + }, + I: x => { // Insert + let i = 1 + const relation = state[x.readUInt32BE(i)] + const row = {} + tuples(x, row, relation.columns, i += 7) + + handle(row, { + command: 'insert', + relation + }) + }, + D: x => { // Delete + let i = 1 + const relation = state[x.readUInt32BE(i)] + i += 4 + const key = x[i] === 75 + const row = key || x[i] === 79 + ? {} + : null + + tuples(x, row, key ? relation.keys : relation.columns, i += 3) + + handle(row, { + command: 'delete', + relation, + key + }) + }, + U: x => { // Update + let i = 1 + const relation = state[x.readUInt32BE(i)] + i += 4 + const key = x[i] === 75 + const old = key || x[i] === 79 + ? {} + : null + + old && (i = tuples(x, old, key ? relation.keys : relation.columns, ++i)) + + const row = {} + i = tuples(x, row, relation.columns, i += 3) + + handle(row, { + command: 'update', + relation, + key, + old + }) + }, + T: () => { /* noop */ }, // Truncate, + C: () => { /* noop */ } // Commit + }).reduce(char, {})[x[0]](x) +} + +function tuples(x, row, columns, xi) { + let type + , column + + for (let i = 0; i < columns.length; i++) { + type = x[xi++] + column = columns[i] + row[column.name] = type === 110 // n + ? null + : type === 117 // u + ? undefined + : column.parser === undefined + ? x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi)) + : column.parser.array === true + ? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readUInt32BE(xi))) + : column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))) + } + + return xi +} + +function parseEvent(x) { + const xs = x.match(/^(\*|insert|update|delete)?:?([^.]+?\.?[^=]+)?=?(.+)?/i) || [] + + if (!xs) + throw new Error('Malformed subscribe pattern: ' + x) + + const [, command, path, key] = xs + + return (command || '*') + + (path ? ':' + (path.indexOf('.') === -1 ? 'public.' + path : path) : '') + + (key ? '=' + key : '') +} diff --git a/src/types.js b/src/types.js new file mode 100644 index 00000000..9eb76a4f --- /dev/null +++ b/src/types.js @@ -0,0 +1,298 @@ +import Query from './query.js' +import { Errors } from './errors.js' + +export const types = { + string: { + to: 25, + from: null, // defaults to string + serialize: x => '' + x + }, + number: { + to: 0, + from: [21, 23, 26, 700], + serialize: x => '' + x, + parse: x => +x + }, + json: { + to: 114, + from: [114, 3802], + serialize: x => JSON.stringify(x), + parse: x => JSON.parse(x) + }, + boolean: { + to: 16, + from: 16, + serialize: x => x === true ? 't' : 'f', + parse: x => x === 't' + }, + date: { + to: 1184, + from: [1082, 1114, 1184], + serialize: x => (x instanceof Date ? x : new Date(x)).toISOString(), + parse: x => new Date(x) + }, + bytea: { + to: 17, + from: 17, + serialize: x => '\\x' + Buffer.from(x).toString('hex'), + parse: x => Buffer.from(x.slice(2), 'hex') + } +} + +export const BigInt = { + to: 1700, + from: [20, 701, 1700], + parse: x => BigInt(x), // eslint-disable-line + serialize: x => x.toString() +} + +class NotTagged { then() { notTagged() } catch() { notTagged() } finally() { notTagged() }} + +export class Identifier extends NotTagged { + constructor(value) { + super() + this.value = escapeIdentifier(value) + } +}; + +export class Parameter extends NotTagged { + constructor(value, type, array) { + super() + this.value = value + this.type = type + this.array = array + } +}; + +export class Builder extends NotTagged { + constructor(first, rest) { + super() + this.first = first + this.rest = rest + } + + build(before, parameters, types, transform) { + const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop() + if (keyword.i === -1) + throw new Error('WTF') + + return keyword.fn(this.first, this.rest, parameters, types, transform) + } +}; + +export function handleValue(x, parameters, types) { + if (Array.isArray(x)) + return x.map(x => handleValue(x, parameters, types)).join(',') + + const value = x instanceof Parameter ? x.value : x + if (value === undefined) + throw Errors.generic({ code: 'UNDEFINED_VALUE', message: 'Undefined values are not allowed' }) + + return '$' + (types.push( + x instanceof Parameter + ? (parameters.push(x.value), x.array + ? x.array[x.type || inferType(x.value)] || x.type || firstIsString(x.value) + : x.type + ) + : (parameters.push(x), inferType(x)) + )) +} + +export const CLOSE = {} + +const defaultHandlers = typeHandlers(types) + +function valuesBuilder(first, parameters, types, transform, columns) { + let value + return first.map(row => + '(' + columns.map(column => { + value = row[column] + return ( + value instanceof Query ? value.strings[0] : + value instanceof Identifier ? value.value : + handleValue(value, parameters, types) + ) + }).join(',') + ')' + ).join(',') +} + +const builders = Object.entries({ + valuesBuilder, + + update(first, rest, parameters, types, transform) { + return (rest.length ? rest.flat() : Object.keys(first)).map(x => + escapeIdentifier(transform.column.to ? transform.column.to(x) : x) + + '=' + handleValue(first[x], parameters, types) + ) + }, + + select(first, rest, parameters, types, transform) { + typeof first === 'string' && (first = [first].concat(rest)) + if (Array.isArray(first)) + return first.map(x => escapeIdentifier(transform.column.to ? transform.column.to(x) : x)).join(',') + + let value + const columns = rest.length ? rest.flat() : Object.keys(first) + return columns.map(x => { + value = first[x] + return ( + value instanceof Query ? value.strings[0] : + value instanceof Identifier ? value.value : + handleValue(value, parameters, types) + ) + ' as ' + escapeIdentifier(transform.column.to ? transform.column.to(x) : x) + }).join(',') + }, + + values(first, rest, parameters, types, transform) { + const multi = Array.isArray(first[0]) + const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first) + return valuesBuilder(multi ? first : [first], parameters, types, transform, columns) + }, + + insert(first, rest, parameters, types, transform) { + const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first) + return '(' + columns.map(x => + escapeIdentifier(transform.column.to ? transform.column.to(x) : x) + ).join(',') + ')values' + + valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, transform, columns) + } +}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x), fn])) + +function notTagged() { + throw Errors.generic({ message: 'Query not called as a tagged template literal', code: 'NOT_TAGGED_CALL' }) +} + +export const serializers = defaultHandlers.serializers +export const parsers = defaultHandlers.parsers + +export const END = {} + +function firstIsString(x) { + if (Array.isArray(x)) + return firstIsString(x[0]) + return typeof x === 'string' ? 1009 : 0 +} + +export const mergeUserTypes = function(types) { + const user = typeHandlers(types || {}) + return { + serializers: Object.assign({}, serializers, user.serializers), + parsers: Object.assign({}, parsers, user.parsers) + } +} + +function typeHandlers(types) { + return Object.keys(types).reduce((acc, k) => { + types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse) + acc.serializers[types[k].to] = types[k].serialize + types[k].from && [].concat(types[k].from).forEach(x => acc.serializers[x] = types[k].serialize) + return acc + }, { parsers: {}, serializers: {} }) +} + +export const escapeIdentifier = function escape(str) { + return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"' +} + +export const inferType = function inferType(x) { + return ( + x instanceof Parameter ? x.type : + x instanceof Date ? 1184 : + x instanceof Uint8Array ? 17 : + (x === true || x === false) ? 16 : + typeof x === 'bigint' ? 1700 : + Array.isArray(x) ? inferType(x[0]) : + 0 + ) +} + +const escapeBackslash = /\\/g +const escapeQuote = /"/g + +function arrayEscape(x) { + return x + .replace(escapeBackslash, '\\\\') + .replace(escapeQuote, '\\"') +} + +export const arraySerializer = function arraySerializer(xs, serializer) { + if (!xs.length) + return '{}' + + const first = xs[0] + + if (Array.isArray(first) && !first.type) + return '{' + xs.map(x => arraySerializer(x, serializer)).join(',') + '}' + + return '{' + xs.map(x => + '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"' + ).join(',') + '}' +} + +const arrayParserState = { + i: 0, + char: null, + str: '', + quoted: false, + last: 0 +} + +export const arrayParser = function arrayParser(x, parser) { + arrayParserState.i = arrayParserState.last = 0 + return arrayParserLoop(arrayParserState, x, parser) +} + +function arrayParserLoop(s, x, parser) { + const xs = [] + for (; s.i < x.length; s.i++) { + s.char = x[s.i] + if (s.quoted) { + if (s.char === '\\') { + s.str += x[++s.i] + } else if (s.char === '"') { + xs.push(parser ? parser(s.str) : s.str) + s.str = '' + s.quoted = x[s.i + 1] === '"' + s.last = s.i + 2 + } else { + s.str += s.char + } + } else if (s.char === '"') { + s.quoted = true + } else if (s.char === '{') { + s.last = ++s.i + xs.push(arrayParserLoop(s, x, parser)) + } else if (s.char === '}') { + s.quoted = false + s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i)) + s.last = s.i + 1 + break + } else if (s.char === ',' && s.p !== '}' && s.p !== '"') { + xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i)) + s.last = s.i + 1 + } + s.p = s.char + } + s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1)) + return xs +} + +export const toCamel = x => { + let str = x[0] + for (let i = 1; i < x.length; i++) + str += x[i] === '_' ? x[++i].toUpperCase() : x[i] + return str +} + +export const toPascal = x => { + let str = x[0].toUpperCase() + for (let i = 1; i < x.length; i++) + str += x[i] === '_' ? x[++i].toUpperCase() : x[i] + return str +} + +export const toKebab = x => x.replace(/_/g, '-') + +export const fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase() +export const fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase() +export const fromKebab = x => x.replace(/-/g, '_') diff --git a/tests/bootstrap.js b/tests/bootstrap.js index e25cc862..e90bce4d 100644 --- a/tests/bootstrap.js +++ b/tests/bootstrap.js @@ -1,23 +1,29 @@ -const cp = require('child_process') +import { spawnSync } from 'child_process' -exec('psql -c "create user postgres_js_test"') -exec('psql -c "alter system set password_encryption=md5"') -exec('psql -c "select pg_reload_conf()"') -exec('psql -c "create user postgres_js_test_md5 with password \'postgres_js_test_md5\'"') -exec('psql -c "alter system set password_encryption=\'scram-sha-256\'"') -exec('psql -c "select pg_reload_conf()"') -exec('psql -c "create user postgres_js_test_scram with password \'postgres_js_test_scram\'"') +exec('psql', ['-c', 'alter system set ssl=on']) +exec('psql', ['-c', 'create user postgres_js_test']) +exec('psql', ['-c', 'alter system set password_encryption=md5']) +exec('psql', ['-c', 'select pg_reload_conf()']) +exec('psql', ['-c', 'create user postgres_js_test_md5 with password \'postgres_js_test_md5\'']) +exec('psql', ['-c', 'alter system set password_encryption=\'scram-sha-256\'']) +exec('psql', ['-c', 'select pg_reload_conf()']) +exec('psql', ['-c', 'create user postgres_js_test_scram with password \'postgres_js_test_scram\'']) -cp.execSync('dropdb postgres_js_test;createdb postgres_js_test') -;['postgres_js_test', 'postgres_js_test', 'postgres_js_test', 'postgres_js_test'].forEach(x => - cp.execSync('psql -c "grant all on database postgres_js_test to ' + x + '"') -) +exec('dropdb', ['postgres_js_test']) +exec('createdb', ['postgres_js_test']) +exec('psql', ['-c', 'grant all on database postgres_js_test to postgres_js_test']) -function exec(cmd) { - try { - cp.execSync(cmd, { stdio: 'pipe', encoding: 'utf8' }) - } catch (err) { - if (err.stderr.indexOf('already exists') === -1) - throw err - } +export function exec(cmd, args) { + const { stderr } = spawnSync(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) + if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist')) + throw stderr +} + +async function execAsync(cmd, args) { + let stderr = '' + const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) + cp.stderr.on('data', x => stderr += x) + await new Promise(x => cp.on('exit', x)) + if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist')) + throw new Error(stderr) } diff --git a/tests/index.js b/tests/index.js index ab897273..4d5f7ce0 100644 --- a/tests/index.js +++ b/tests/index.js @@ -1,17 +1,21 @@ /* eslint no-console: 0 */ -require('./bootstrap.js') +import { exec } from './bootstrap.js' -const { t, not, ot } = require('./test.js') // eslint-disable-line -const cp = require('child_process') -const path = require('path') -const net = require('net') -const fs = require('fs') +import { t, nt, ot } from './test.js' // eslint-disable-line +import cp from 'child_process' +import path from 'path' +import net from 'net' +import fs from 'fs' +import crypto from 'crypto' /** @type {import('../types')} */ -const postgres = require('../lib') +import postgres from '../src/index.js' const delay = ms => new Promise(r => setTimeout(r, ms)) +const rel = x => new URL(x, import.meta.url) +const idle_timeout = 1 + const login = { user: 'postgres_js_test' } @@ -30,15 +34,15 @@ const options = { db: 'postgres_js_test', user: login.user, pass: login.pass, - idle_timeout: 0.2, - debug: false, + idle_timeout, + connect_timeout: 1, max: 1 } const sql = postgres(options) t('Connects with no options', async() => { - const sql = postgres() + const sql = postgres({ max: 1 }) const result = (await sql`select 1 as x`)[0].x await sql.end() @@ -72,7 +76,7 @@ t('Create table', async() => ['CREATE TABLE', (await sql`create table test(int int)`).command, await sql`drop table test`] ) -t('Drop table', async() => { +t('Drop table', { timeout: 2 }, async() => { await sql`create table test(int int)` return ['DROP TABLE', (await sql`drop table test`).command] }) @@ -103,12 +107,22 @@ t('Date', async() => { }) t('Json', async() => { - const x = (await sql`select ${ sql.json({ a: 1, b: 'hello' }) } as x`)[0].x - return [true, x.a === 1 && x.b === 'hello'] + const x = (await sql`select ${ sql.json({ a: 'hello', b: 42 }) } as x`)[0].x + return ['hello,42', [x.a, x.b].join()] +}) + +t('implicit json', async() => { + const x = (await sql`select ${ { a: 'hello', b: 42 } }::json as x`)[0].x + return ['hello,42', [x.a, x.b].join()] +}) + +t('implicit jsonb', async() => { + const x = (await sql`select ${ { a: 'hello', b: 42 } }::jsonb as x`)[0].x + return ['hello,42', [x.a, x.b].join()] }) t('Empty array', async() => - [true, Array.isArray((await sql`select ${ sql.array([]) }::int[] as x`)[0].x)] + [true, Array.isArray((await sql`select ${ sql.array([], 1009) } as x`)[0].x)] ) t('Array of Integer', async() => @@ -171,7 +185,7 @@ t('Transaction throws on uncaught savepoint', async() => { await sql`insert into test values(2)` throw new Error('fail') }) - }).catch(() => 'fail')), await sql`drop table test`] + }).catch((err) => err.message)), await sql`drop table test`] }) t('Transaction throws on uncaught named savepoint', async() => { @@ -179,7 +193,7 @@ t('Transaction throws on uncaught named savepoint', async() => { return ['fail', (await sql.begin(async sql => { await sql`insert into test values(1)` - await sql.savepoint('watpoint', async sql => { + await sql.savepoit('watpoint', async sql => { await sql`insert into test values(2)` throw new Error('fail') }) @@ -211,6 +225,14 @@ t('Savepoint returns Result', async() => { return [1, result[0].x] }) +t('Transaction requests are executed implicitly', async() => [ + 'testing', + (await sql.begin(async sql => { + sql`select set_config('postgres_js.test', 'testing', true)` + return await sql`select current_setting('postgres_js.test') as x` + }))[0].x +]) + t('Parallel transactions', async() => { await sql`create table test (a int)` return ['11', (await Promise.all([ @@ -272,7 +294,7 @@ t('Throw syntax error', async() => t('Connect using uri', async() => [true, await new Promise((resolve, reject) => { const sql = postgres('postgres://' + login.user + ':' + (login.pass || '') + '@localhost:5432/' + options.db, { - idle_timeout: options.idle_timeout + idle_timeout }) sql`select 1`.then(() => resolve(true), reject) })] @@ -281,7 +303,7 @@ t('Connect using uri', async() => t('Fail with proper error on no host', async() => ['ECONNREFUSED', (await new Promise((resolve, reject) => { const sql = postgres('postgres://localhost:33333/' + options.db, { - idle_timeout: options.idle_timeout + idle_timeout }) sql`select 1`.then(reject, resolve) })).code] @@ -291,7 +313,7 @@ t('Connect using SSL', async() => [true, (await new Promise((resolve, reject) => { postgres({ ssl: { rejectUnauthorized: false }, - idle_timeout: options.idle_timeout + idle_timeout })`select 1`.then(() => resolve(true), reject) }))] ) @@ -300,27 +322,39 @@ t('Connect using SSL require', async() => [true, (await new Promise((resolve, reject) => { postgres({ ssl: 'require', - idle_timeout: options.idle_timeout + idle_timeout })`select 1`.then(() => resolve(true), reject) }))] ) t('Connect using SSL prefer', async() => { - cp.execSync('psql -c "alter system set ssl=off"') - cp.execSync('psql -c "select pg_reload_conf()"') + await exec('psql', ['-c', 'alter system set ssl=off']) + await exec('psql', ['-c', 'select pg_reload_conf()']) const sql = postgres({ ssl: 'prefer', - idle_timeout: options.idle_timeout + idle_timeout }) return [ 1, (await sql`select 1 as x`)[0].x, - cp.execSync('psql -c "alter system set ssl=on"'), - cp.execSync('psql -c "select pg_reload_conf()"') + await exec('psql', ['-c', 'alter system set ssl=on']), + await exec('psql', ['-c', 'select pg_reload_conf()']) ] }) +t('Reconnect using SSL', { timeout: 2 }, async() => { + const sql = postgres({ + ssl: 'require', + idle_timeout: 0.1 + }) + + await sql`select 1` + await delay(200) + + return [1, (await sql`select 1 as x`)[0].x] +}) + t('Login without password', async() => { return [true, (await postgres({ ...options, ...login })`select true as x`)[0].x] }) @@ -334,7 +368,7 @@ t('Login using scram-sha-256', async() => { }) t('Parallel connections using scram-sha-256', { - timeout: 2000 + timeout: 2 }, async() => { const sql = postgres({ ...options, ...login_scram }) return [true, (await Promise.all([ @@ -397,32 +431,32 @@ t('Point type array', async() => { }) t('sql file', async() => - [1, (await sql.file(path.join(__dirname, 'select.sql')))[0].x] + [1, (await sql.file(rel('select.sql')))[0].x] ) -t('sql file can stream', async() => { +t('sql file has forEach', async() => { let result await sql - .file(path.join(__dirname, 'select.sql'), { cache: false }) - .stream(({ x }) => result = x) + .file(rel('select.sql'), { cache: false }) + .forEach(({ x }) => result = x) return [1, result] }) t('sql file throws', async() => - ['ENOENT', (await sql.file('./selectomondo.sql').catch(x => x.code))] + ['ENOENT', (await sql.file(rel('selectomondo.sql')).catch(x => x.code))] ) t('sql file cached', async() => { - await sql.file(path.join(__dirname, 'select.sql')) + await sql.file(rel('select.sql')) await delay(20) - return [1, (await sql.file(path.join(__dirname, 'select.sql')))[0].x] + return [1, (await sql.file(rel('select.sql')))[0].x] }) t('Parameters in file', async() => { const result = await sql.file( - path.join(__dirname, 'select-param.sql'), + rel('select-param.sql'), ['hello'] ) return ['hello', result[0].x] @@ -453,7 +487,8 @@ t('Connection ended error', async() => { t('Connection end does not cancel query', async() => { const sql = postgres(options) - const promise = sql`select 1 as x` + const promise = sql`select 1 as x`.execute() + sql.end() return [1, (await promise)[0].x] @@ -533,6 +568,7 @@ t('listen and notify', async() => { return ['world', await new Promise((resolve, reject) => sql.listen(channel, resolve) .then(() => sql.notify(channel, 'world')) + .then(() => delay(20)) .catch(reject) .then(sql.end) )] @@ -570,41 +606,43 @@ t('listen and notify with weird name', async() => { sql.listen(channel, resolve) .then(() => sql.notify(channel, 'world')) .catch(reject) + .then(() => delay(20)) .then(sql.end) )] }) t('listen and notify with upper case', async() => { + const sql = postgres(options) let result - const { unlisten } = await sql.listen('withUpperChar', x => result = x) + await sql.listen('withUpperChar', x => result = x) sql.notify('withUpperChar', 'works') await delay(50) return [ 'works', result, - unlisten() + sql.end() ] }) -t('listen reconnects', async() => { - const listener = postgres(options) +t('listen reconnects', { timeout: 4 }, async() => { + const sql = postgres(options) , xs = [] - const { state: { pid } } = await listener.listen('test', x => xs.push(x)) + const { state: { pid } } = await sql.listen('test', x => xs.push(x)) await sql.notify('test', 'a') await sql`select pg_terminate_backend(${ pid }::int)` - await delay(50) + await delay(200) await sql.notify('test', 'b') - await delay(50) - listener.end() + await delay(200) + sql.end() return ['ab', xs.join('')] }) -t('listen reconnects after connection error', { timeout: 2000 }, async() => { +t('listen reconnects after connection error', { timeout: 3 }, async() => { const sql = postgres() , xs = [] @@ -613,11 +651,7 @@ t('listen reconnects after connection error', { timeout: 2000 }, async() => { const { state: { pid } } = await sql.listen('test', x => xs.push(x)) await sql.notify('test', 'a') await sql`select pg_terminate_backend(${ pid }::int)` - - cp.execSync('pg_ctl stop -D "' + a + '"') - await delay(50) - cp.execSync('pg_ctl start -D "' + a + '" -w -l "' + a + '/postgresql.log"') - await delay(50) + await delay(1000) await sql.notify('test', 'b') await delay(50) @@ -627,64 +661,64 @@ t('listen reconnects after connection error', { timeout: 2000 }, async() => { }) t('listen result reports correct connection state after reconnection', async() => { - const listener = postgres(options) + const sql = postgres(options) , xs = [] - const result = await listener.listen('test', x => xs.push(x)) + const result = await sql.listen('test', x => xs.push(x)) const initialPid = result.state.pid await sql.notify('test', 'a') await sql`select pg_terminate_backend(${ initialPid }::int)` await delay(50) - listener.end() + sql.end() return [result.state.pid !== initialPid, true] }) t('unlisten removes subscription', async() => { - const listener = postgres(options) + const sql = postgres(options) , xs = [] - const { unlisten } = await listener.listen('test', x => xs.push(x)) - await listener.notify('test', 'a') + const { unlisten } = await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'a') await delay(50) await unlisten() - await listener.notify('test', 'b') + await sql.notify('test', 'b') await delay(50) - listener.end() + sql.end() return ['a', xs.join('')] }) t('listen after unlisten', async() => { - const listener = postgres(options) + const sql = postgres(options) , xs = [] - const { unlisten } = await listener.listen('test', x => xs.push(x)) - await listener.notify('test', 'a') + const { unlisten } = await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'a') await delay(50) await unlisten() - await listener.notify('test', 'b') + await sql.notify('test', 'b') await delay(50) - await listener.listen('test', x => xs.push(x)) - await listener.notify('test', 'c') + await sql.listen('test', x => xs.push(x)) + await sql.notify('test', 'c') await delay(50) - listener.end() + sql.end() return ['ac', xs.join('')] }) t('multiple listeners and unlisten one', async() => { - const listener = postgres(options) + const sql = postgres(options) , xs = [] - await listener.listen('test', x => xs.push('1', x)) - const s2 = await listener.listen('test', x => xs.push('2', x)) - await listener.notify('test', 'a') + await sql.listen('test', x => xs.push('1', x)) + const s2 = await sql.listen('test', x => xs.push('2', x)) + await sql.notify('test', 'a') await delay(50) await s2.unlisten() - await listener.notify('test', 'b') + await sql.notify('test', 'b') await delay(50) - listener.end() + sql.end() return ['1a2a1b', xs.join('')] }) @@ -777,22 +811,24 @@ t('little bobby tables', async() => { }) t('Connection errors are caught using begin()', { - timeout: 20000 + timeout: 2 }, async() => { let error try { - const sql = postgres({ host: 'wat' }) + const sql = postgres({ host: 'wat', port: 1337 }) await sql.begin(async(sql) => { await sql`insert into test (label, value) values (${1}, ${2})` }) - - await sql.end() } catch (err) { error = err } - return ['ENOTFOUND', error.code] + return [ + true, + error.code === 'ENOTFOUND' || + error.message === 'failed to lookup address information: nodename nor servname provided, or not known' + ] }) t('dynamic column name', async() => { @@ -877,6 +913,22 @@ t('dynamic select args', async() => { return ['yay', (await sql`select ${ sql('a', 'b') } from test`)[0].b, await sql`drop table test`] }) +t('dynamic values single row', async() => { + const [{ b }] = await sql` + select * from (values ${ sql(['a', 'b', 'c']) }) AS x(a, b, c) + ` + + return ['b', b] +}) + +t('dynamic values multi row', async() => { + const [_, { b }] = await sql` + select * from (values ${ sql([['a', 'b', 'c'],['a', 'b', 'c']]) }) AS x(a, b, c) + ` + + return ['b', b] +}) + t('connection parameters', async() => { const sql = postgres({ ...options, @@ -952,25 +1004,25 @@ t('bytea serializes and parses', async() => { await sql`insert into test values (${ buf })` return [ - 0, - Buffer.compare(buf, (await sql`select x from test`)[0].x), + buf.toString(), + (await sql`select x from test`)[0].x.toString(), await sql`drop table test` ] }) -t('Stream works', async() => { +t('forEach works', async() => { let result - await sql`select 1 as x`.stream(({ x }) => result = x) + await sql`select 1 as x`.forEach(({ x }) => result = x) return [1, result] }) -t('Stream returns empty array', async() => { - return [0, (await sql`select 1 as x`.stream(() => { /* noop */ })).length] +t('forEach returns empty array', async() => { + return [0, (await sql`select 1 as x`.forEach(() => { /* noop */ })).length] }) t('Cursor works', async() => { const order = [] - await sql`select 1 as x union select 2 as x`.cursor(async(x) => { + await sql`select 1 as x union select 2 as x`.cursor(async([x]) => { order.push(x.x + 'a') await delay(100) order.push(x.x + 'b') @@ -980,7 +1032,7 @@ t('Cursor works', async() => { t('Unsafe cursor works', async() => { const order = [] - await sql.unsafe('select 1 as x union select 2 as x').cursor(async(x) => { + await sql.unsafe('select 1 as x union select 2 as x').cursor(async([x]) => { order.push(x.x + 'a') await delay(100) order.push(x.x + 'b') @@ -1014,16 +1066,16 @@ t('Cursor custom with less results than batch size works', async() => { t('Cursor cancel works', async() => { let result - await sql`select * from generate_series(1,10) as x`.cursor(async({ x }) => { + await sql`select * from generate_series(1,10) as x`.cursor(async([{ x }]) => { result = x - return sql.END + return sql.CLOSE }) return [1, result] }) t('Cursor throw works', async() => { const order = [] - await sql`select 1 as x union select 2 as x`.cursor(async(x) => { + await sql`select 1 as x union select 2 as x`.cursor(async([x]) => { order.push(x.x + 'a') await delay(100) throw new Error('watty') @@ -1031,11 +1083,38 @@ t('Cursor throw works', async() => { return ['1aerr', order.join('')] }) -t('Cursor throw works', async() => [ - 'err', - await sql`wat`.cursor(() => { /* noop */ }).catch(() => 'err') +t('Cursor error works', async() => [ + '42601', + await sql`wat`.cursor(() => { /* noop */ }).catch((err) => err.code) ]) +t('Multiple Cursors', { timeout: 2 }, async() => { + const result = [] + const xs = await sql.begin(async sql => [ + await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async ([row]) => { + result.push(row.x) + await new Promise(r => setTimeout(r, 200)) + }), + await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async ([row]) => { + result.push(row.x) + await new Promise(r => setTimeout(r, 100)) + }) + ]) + + return ['1,2,3,4,101,102,103,104', result.join(',')] +}) + +t('Cursor as async iterator', async() => { + const order = [] + for await (const [x] of sql`select generate_series(1,2) as x;`.cursor()) { + order.push(x.x + 'a') + await delay(100) + order.push(x.x + 'b') + } + + return ['1a1b2a2b', order.join('')] +}) + t('Transform row', async() => { const sql = postgres({ ...options, @@ -1045,14 +1124,14 @@ t('Transform row', async() => { return [1, (await sql`select 'wat'`)[0]] }) -t('Transform row stream', async() => { +t('Transform row forEach', async() => { let result const sql = postgres({ ...options, transform: { row: () => 1 } }) - await sql`select 1`.stream(x => result = x) + await sql`select 1`.forEach(x => result = x) return [1, result] }) @@ -1121,8 +1200,8 @@ t('numeric is returned as string', async() => [ t('Async stack trace', async() => { const sql = postgres({ ...options, debug: false }) return [ - parseInt(new Error().stack.split('\n')[1].split(':')[1]) + 1, - parseInt(await sql`select.sql`.catch(x => x.stack.split('\n').pop().split(':')[1])) + parseInt(new Error().stack.split('\n')[1].match(':([0-9]+):')[1]) + 1, + parseInt(await sql`error`.catch(x => x.stack.split('\n').pop().match(':([0-9]+):')[1])) ] }) @@ -1139,7 +1218,7 @@ t('Debug has long async stack trace', async() => { } function wat() { - return sql`selec 1` + return sql`error` } }) @@ -1149,24 +1228,16 @@ t('Error contains query string', async() => [ ]) t('Error contains query serialized parameters', async() => [ - '1', - (await sql`selec ${ 1 }`.catch(err => err.parameters[0].value)) + 1, + (await sql`selec ${ 1 }`.catch(err => err.parameters[0])) ]) t('Error contains query raw parameters', async() => [ 1, - (await sql`selec ${ 1 }`.catch(err => err.parameters[0].raw)) + (await sql`selec ${ 1 }`.catch(err => err.args[0])) ]) -t('Query string is not enumerable', async() => { - const sql = postgres({ ...options, debug: false }) - return [ - -1, - (await sql`selec 1`.catch(err => Object.keys(err).indexOf('query'))) - ] -}) - -t('Query and parameters are not enumerable if debug is not set', async() => { +t('Query and parameters on errorare not enumerable if debug is not set', async() => { const sql = postgres({ ...options, debug: false }) return [ @@ -1184,11 +1255,11 @@ t('Query and parameters are enumerable if debug is set', async() => { ] }) -t('connect_timeout works', async() => { +t('connect_timeout works', { timeout: 20 }, async() => { const connect_timeout = 0.2 const server = net.createServer() server.listen() - const sql = postgres({ port: server.address().port, connect_timeout }) + const sql = postgres({ port: server.address().port, host: '127.0.0.1', connect_timeout }) const start = Date.now() let end await sql`select 1`.catch((e) => { @@ -1215,14 +1286,14 @@ t('requests works after single connect_timeout', async() => { const sql = postgres({ ...options, ...login_scram, - connect_timeout: { valueOf() { return first ? (first = false, 0.001) : 1 } } + connect_timeout: { valueOf() { return first ? (first = false, 0.01) : 1 } } }) return [ 'CONNECT_TIMEOUT,,1', [ await sql`select 1 as x`.catch(x => x.code), - await new Promise(r => setTimeout(r, 10)), + await delay(10), (await sql`select 1 as x`)[0].x ].join(',') ] @@ -1236,9 +1307,9 @@ t('Result has columns spec', async() => ['x', (await sql`select 1 as x`).columns[0].name] ) -t('Stream has result as second argument', async() => { +t('forEach has result as second argument', async() => { let x - await sql`select 1 as x`.stream((_, result) => x = result) + await sql`select 1 as x`.forEach((_, result) => x = result) return ['x', x.columns[0].name] }) @@ -1265,48 +1336,74 @@ t('Insert empty array', async() => { t('Insert array in sql()', async() => { await sql`create table tester (ints int[])` return [ - Array.isArray((await sql`insert into tester ${ sql({ ints: sql.array([]) })} returning *`)[0].ints), + Array.isArray((await sql`insert into tester ${ sql({ ints: sql.array([]) }) } returning *`)[0].ints), true, await sql`drop table tester` ] }) t('Automatically creates prepared statements', async() => { - const sql = postgres({ ...options, no_prepare: false }) + const sql = postgres(options) const result = await sql`select * from pg_prepared_statements` - return [result[0].statement, 'select * from pg_prepared_statements'] + return [true, result.some(x => x.name = result.statement.name)] }) -t('no_prepare: true disables prepared transactions (deprecated)', async() => { +t('no_prepare: true disables prepared statements (deprecated)', async() => { const sql = postgres({ ...options, no_prepare: true }) const result = await sql`select * from pg_prepared_statements` - return [0, result.count] + return [false, result.some(x => x.name = result.statement.name)] }) -t('prepare: false disables prepared transactions', async() => { +t('prepare: false disables prepared statements', async() => { const sql = postgres({ ...options, prepare: false }) const result = await sql`select * from pg_prepared_statements` - return [0, result.count] + return [false, result.some(x => x.name = result.statement.name)] }) -t('prepare: true enables prepared transactions', async() => { +t('prepare: true enables prepared statements', async() => { const sql = postgres({ ...options, prepare: true }) const result = await sql`select * from pg_prepared_statements` - return [result[0].statement, 'select * from pg_prepared_statements'] + return [true, result.some(x => x.name = result.statement.name)] }) t('prepares unsafe query when "prepare" option is true', async() => { const sql = postgres({ ...options, prepare: true }) const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla'], { prepare: true }) - return [result[0].statement, 'select * from pg_prepared_statements where name <> $1'] + return [true, result.some(x => x.name = result.statement.name)] }) t('does not prepare unsafe query by default', async() => { const sql = postgres({ ...options, prepare: true }) const result = await sql.unsafe('select * from pg_prepared_statements where name <> $1', ['bla']) - return [0, result.count] + return [false, result.some(x => x.name = result.statement.name)] +}) + +t('Recreate prepared statements on transformAssignedExpr error', async() => { + const insert = () => sql`insert into test (name) values (${ '1' }) returning name` + await sql`create table test (name text)` + await insert() + await sql`alter table test alter column name type int using name::integer` + return [ + 1, + (await insert())[0].name, + await sql`drop table test` + ] }) +t('Recreate prepared statements on RevalidateCachedQuery error', async() => { + const select = () => sql`select name from test` + await sql`create table test (name text)` + await sql`insert into test values ('1')` + await select() + await sql`alter table test alter column name type int using name::integer` + return [ + 1, + (await select())[0].name, + await sql`drop table test` + ] +}) + + t('Catches connection config errors', async() => { const sql = postgres({ ...options, user: { toString: () => { throw new Error('wat') } }, database: 'prut' }) @@ -1332,22 +1429,24 @@ t('Catches query format errors', async() => [ ]) t('Multiple hosts', { - timeout: 10000 + timeout: 10 }, async() => { - const sql = postgres('postgres://localhost:5432,localhost:5433', { idle_timeout: options.idle_timeout }) + const s1 = postgres({ idle_timeout }) + , s2 = postgres({ idle_timeout, port: 5433 }) + , sql = postgres('postgres://localhost:5432,localhost:5433', { idle_timeout, max: 1 }) , result = [] - const a = (await sql`show data_directory`)[0].data_directory + const x1 = await sql`select 1` result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x) - cp.execSync('pg_ctl stop -D "' + a + '"') + await s1`select pg_terminate_backend(${ x1.state.pid }::int)` + await delay(100) - const b = (await sql`show data_directory`)[0].data_directory + const x2 = await sql`select 1` result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x) - cp.execSync('pg_ctl start -D "' + a + '" -w -l "' + a + '/postgresql.log"') - cp.execSync('pg_ctl stop -D "' + b + '"') + await s2`select pg_terminate_backend(${ x2.state.pid }::int)` + await delay(100) result.push((await sql`select setting as x from pg_settings where name = 'port'`)[0].x) - cp.execSync('pg_ctl start -o "-p 5433" -D "' + b + '" -w -l "' + b + '/postgresql.log"') return ['5432,5433,5432', result.join(',')] }) @@ -1375,7 +1474,7 @@ t('Raw method returns rows as arrays', async() => { t('Raw method returns values unparsed as Buffer', async() => { const [[x]] = await sql`select 1`.raw() return [ - x instanceof Buffer, + x instanceof Uint8Array, true ] }) @@ -1385,7 +1484,7 @@ t('Copy read works', async() => { await sql`create table test (x int)` await sql`insert into test select * from generate_series(1,10)` - const readable = sql`copy test to stdout`.readable() + const readable = await sql`copy test to stdout`.readable() readable.on('data', x => result.push(x)) await new Promise(r => readable.on('end', r)) @@ -1396,9 +1495,9 @@ t('Copy read works', async() => { ] }) -t('Copy write works', async() => { +t('Copy write works', { timeout: 2 }, async() => { await sql`create table test (x int)` - const writable = sql`copy test from stdin`.writable() + const writable = await sql`copy test from stdin`.writable() writable.write('1\n') writable.write('1\n') @@ -1416,7 +1515,7 @@ t('Copy write works', async() => { t('Copy write as first works', async() => { await sql`create table test (x int)` const first = postgres(options) - const writable = first`COPY test FROM STDIN WITH(FORMAT csv, HEADER false, DELIMITER ',')`.writable() + const writable = await first`COPY test FROM STDIN WITH(FORMAT csv, HEADER false, DELIMITER ',')`.writable() writable.write('1\n') writable.write('1\n') writable.end() @@ -1433,9 +1532,9 @@ t('Copy write as first works', async() => { t('Copy from file works', async() => { await sql`create table test (x int, y int, z int)` - await new Promise(r => fs - .createReadStream(path.join(__dirname, 'copy.csv')) - .pipe(sql`copy test from stdin`.writable()) + await new Promise(async r => fs + .createReadStream(rel('copy.csv')) + .pipe(await sql`copy test from stdin`.writable()) .on('finish', r) ) @@ -1449,7 +1548,8 @@ t('Copy from file works', async() => { t('Copy from works in transaction', async() => { await sql`create table test(x int)` const xs = await sql.begin(async sql => { - sql`copy test from stdin`.writable().end('1\n2') + (await sql`copy test from stdin`.writable()).end('1\n2') + await delay(20) return sql`select 1 from test` }) @@ -1462,54 +1562,29 @@ t('Copy from works in transaction', async() => { t('Copy from abort works', async() => { const sql = postgres(options) - const readable = fs.createReadStream(path.join(__dirname, 'copy.csv')) + const readable = fs.createReadStream(rel('copy.csv')) await sql`create table test (x int, y int, z int)` await sql`TRUNCATE TABLE test` - const writable = sql`COPY test FROM STDIN`.writable() + const writable = await sql`COPY test FROM STDIN`.writable() let aborted readable .pipe(writable) - .on('error', () => aborted = true) + .on('error', (err) => aborted = err) writable.destroy(new Error('abort')) await sql.end() return [ - aborted, - true, + 'abort', + aborted.message, await postgres(options)`drop table test` ] }) -t('Recreate prepared statements on transformAssignedExpr error', async() => { - const insert = () => sql`insert into test (name) values (${ '1' }) returning name` - await sql`create table test (name text)` - await insert() - await sql`alter table test alter column name type int using name::integer` - return [ - 1, - (await insert())[0].name, - await sql`drop table test` - ] -}) - -t('Recreate prepared statements on RevalidateCachedQuery error', async() => { - const select = () => sql`select name from test` - await sql`create table test (name text)` - await sql`insert into test values ('1')` - await select() - await sql`alter table test alter column name type int using name::integer` - return [ - 1, - (await select())[0].name, - await sql`drop table test` - ] -}) - t('multiple queries before connect', async() => { const sql = postgres({ ...options, max: 2 }) const xs = await Promise.all([ @@ -1525,10 +1600,11 @@ t('multiple queries before connect', async() => { ] }) -t('subscribe', { timeout: 1000 }, async() => { +t('subscribe', { timeout: 2 }, async() => { const sql = postgres({ database: 'postgres_js_test', - publications: 'alltables' + publications: 'alltables', + fetch_types: false }) await sql.unsafe('create publication alltables for all tables') @@ -1557,3 +1633,105 @@ t('subscribe', { timeout: 1000 }, async() => { await sql.end() ] }) + +t('Execute works', async() => { + const result = await new Promise((resolve) => { + const sql = postgres({ ...options, fetch_types: false, debug (id, query) { + resolve(query) + }}) + sql`select 1`.execute() + }) + + return [result, 'select 1'] +}) + +t('Cancel running query works', async() => { + const query = sql`select pg_sleep(2)` + setTimeout(() => query.cancel(), 50) + const error = await query.catch(x => x) + return ['57014', error.code] +}) + +t('Cancel piped query works', async() => { + await sql`select 1` + const last = sql`select pg_sleep(0.2)`.execute() + const query = sql`select pg_sleep(2) as dig` + setTimeout(() => query.cancel(), 100) + const error = await query.catch(x => x) + await last + return ['57014', error.code] +}) + +t('Cancel queued query works', async() => { + const tx = sql.begin(sql => sql`select pg_sleep(0.2) as hej, 'hejsa'`) + const query = sql`select pg_sleep(2) as nej` + setTimeout(() => query.cancel(), 50) + const error = await query.catch(x => x) + await tx + return ['57014', error.code] +}) + +t('Fragments', async() => [ + 1, + (await sql` + ${ sql`select` } 1 as x + `)[0].x +]) + +t('Result becomes array', async() => [ + true, + (await sql`select 1`).slice() instanceof Array +]) + +t('Describe', async() => { + const type = (await sql`select ${ 1 }::int as x`.describe()).types[0] + return [23, type] +}) + +t('Describe a statement', async() => { + await sql`create table tester (name text, age int)` + const r = await sql`select name, age from tester where name like $1 and age > $2`.describe() + return [ + '25,23/name:25,age:23', + `${ r.types.join(',') }/${ r.columns.map(c => `${c.name}:${c.type}`).join(',') }`, + await sql`drop table tester` + ] + }) + +t('Describe a statement without parameters', async() => { + await sql`create table tester (name text, age int)` + const r = await sql`select name, age from tester`.describe() + return [ + '0,2', + `${ r.types.length },${ r.columns.length }`, + await sql`drop table tester` + ] + }) + +t('Describe a statement without columns', async () => { + await sql`create table tester (name text, age int)` + const r = await sql`insert into tester (name, age) values ($1, $2)`.describe() + return [ + '2,0', + `${ r.types.length },${ r.columns.length }`, + await sql`drop table tester` + ] + }) + +t('Large object', async() => { + const file = rel('index.js') + , md5 = crypto.createHash('md5').update(fs.readFileSync(file)).digest('hex') + + const lo = await sql.largeObject() + await new Promise(async r => fs.createReadStream(file).pipe(await lo.writable()).on('finish', r)) + await lo.seek(0) + + const out = crypto.createHash('md5') + await new Promise(r => lo.readable().then(x => x.on('data', x => out.update(x)).on('end', r))) + + return [ + md5, + out.digest('hex'), + await lo.close() + ] +}) diff --git a/tests/test.js b/tests/test.js index 05583e61..d184cdd8 100644 --- a/tests/test.js +++ b/tests/test.js @@ -1,22 +1,24 @@ /* eslint no-console: 0 */ -const util = require('util') +import util from 'util' let done = 0 let only = false let ignored = 0 +let failed = false let promise = Promise.resolve() const tests = {} + , ignore = {} -module.exports.not = () => ignored++ -module.exports.ot = (...rest) => (only = true, test(true, ...rest)) - -const t = module.exports.t = (...rest) => test(false, ...rest) -t.timeout = 500 +export const nt = () => ignored++ +export const ot = (...rest) => (only = true, test(true, ...rest)) +export const t = (...rest) => test(false, ...rest) +t.timeout = 0.5 async function test(o, name, options, fn) { typeof options !== 'object' && (fn = options, options = {}) - const line = new Error().stack.split('\n')[3].split(':')[1] + const line = new Error().stack.split('\n')[3].match(':([0-9]+):')[1] + await 1 if (only && !o) @@ -25,22 +27,31 @@ async function test(o, name, options, fn) { tests[line] = { fn, line, name } promise = promise.then(() => Promise.race([ new Promise((resolve, reject) => - fn.timer = setTimeout(() => reject('Timed out'), options.timeout || t.timeout).unref() + fn.timer = setTimeout(() => reject('Timed out'), (options.timeout || t.timeout) * 1000) ), - fn() + failed + ? (ignored++, ignore) + : fn() ])) .then((x) => { + clearTimeout(fn.timer) + if (x === ignore) + return + if (!Array.isArray(x)) throw new Error('Test should return result array') const [expected, got] = x - if (expected !== got) - throw new Error(expected + ' != ' + util.inspect(got)) + if (expected !== got) { + failed = true + throw new Error(util.inspect(expected) + ' != ' + util.inspect(got)) + } + tests[line].succeeded = true process.stdout.write('✅') }) .catch(err => { - tests[line].failed = true + tests[line].failed = failed = true tests[line].error = err instanceof Error ? err : new Error(util.inspect(err)) }) .then(() => { @@ -48,24 +59,20 @@ async function test(o, name, options, fn) { }) } -process.on('exit', exit) - -process.on('SIGINT', exit) - function exit() { - process.removeAllListeners('exit') console.log('') let success = true - Object.values(tests).forEach((x) => { - if (!x.succeeded) { - success = false - x.cleanup - ? console.error('⛔️', x.name + ' at line', x.line, 'cleanup failed', '\n', util.inspect(x.cleanup)) - : console.error('⛔️', x.name + ' at line', x.line, x.failed - ? 'failed' - : 'never finished', '\n', util.inspect(x.error) - ) - } + Object.values(tests).every((x) => { + if (x.succeeded) + return true + + success = false + x.cleanup + ? console.error('⛔️', x.name + ' at line', x.line, 'cleanup failed', '\n', util.inspect(x.cleanup)) + : console.error('⛔️', x.name + ' at line', x.line, x.failed + ? 'failed' + : 'never finished', x.error ? '\n' + util.inspect(x.error) : '' + ) }) only @@ -78,3 +85,4 @@ function exit() { !process.exitCode && (!success || only || ignored) && (process.exitCode = 1) } + diff --git a/transpile.cjs b/transpile.cjs new file mode 100644 index 00000000..1ee35626 --- /dev/null +++ b/transpile.cjs @@ -0,0 +1,43 @@ +const fs = require('fs') + , path = require('path') + +const empty = x => fs.readdirSync(x).forEach(f => fs.unlinkSync(path.join(x, f))) + , ensureEmpty = x => !fs.existsSync(x) ? fs.mkdirSync(x) : empty(x) + , root = 'cjs' + , src = path.join(root, 'src') + , tests = path.join(root, 'tests') + +!fs.existsSync(root) && fs.mkdirSync(root) +ensureEmpty(src) +ensureEmpty(tests) + +fs.readdirSync('src').forEach(name => + fs.writeFileSync( + path.join(src, name), + transpile(fs.readFileSync(path.join('src', name), 'utf8')) + ) +) + +fs.readdirSync('tests').forEach(name => + fs.writeFileSync( + path.join(tests, name), + name.endsWith('.js') + ? transpile(fs.readFileSync(path.join('tests', name), 'utf8')) + : fs.readFileSync(path.join('tests', name), 'utf8') + ) +) + +fs.writeFileSync(path.join(root, 'package.json'), JSON.stringify({ type: 'commonjs' })) + +function transpile(x) { + return x.replace(/export default function ([^(]+)/, 'module.exports = $1;function $1') + .replace(/export class ([^ ]+) ([^;]+?);/g, 'class $1 $2;module.exports.$1 = $1') + .replace(/export default /, 'module.exports = ') + .replace(/export {/g, 'module.exports = {') + .replace(/export const ([a-z0-9_$]+)/gi, 'const $1 = module.exports.$1') + .replace(/export function ([a-z0-9_$]+)/gi, 'module.exports.$1 = $1;function $1') + .replace(/import {([^{}]*?)} from (['"].*?['"])/gi, 'const {$1} = require($2)') + .replace(/import (.*?) from (['"].*?['"])/gi, 'const $1 = require($2)') + .replace(/import (['"].*?['"])/gi, 'require($1)') + .replace('new URL(x, import.meta.url)', 'path.join(__dirname, x)') +} diff --git a/transpile.deno.js b/transpile.deno.js new file mode 100644 index 00000000..364c19d4 --- /dev/null +++ b/transpile.deno.js @@ -0,0 +1,78 @@ +import fs from 'fs' +import path from 'path' + +const empty = x => fs.readdirSync(x).forEach(f => fs.unlinkSync(path.join(x, f))) + , ensureEmpty = x => !fs.existsSync(x) ? fs.mkdirSync(x) : empty(x) + , root = 'deno' + , src = path.join(root, 'src') + , tests = path.join(root, 'tests') + +ensureEmpty(src) +ensureEmpty(tests) + +fs.readdirSync('src').forEach(name => + fs.writeFileSync( + path.join(src, name), + transpile(fs.readFileSync(path.join('src', name), 'utf8'), name, 'src') + ) +) + +fs.readdirSync('tests').forEach(name => + fs.writeFileSync( + path.join(tests, name), + name.endsWith('.js') + ? transpile(fs.readFileSync(path.join('tests', name), 'utf8'), name, 'tests') + : fs.readFileSync(path.join('tests', name), 'utf8') + ) +) + +fs.writeFileSync(path.join(root, 'package.json'), JSON.stringify({ type: 'commonjs' })) + +function transpile(x, name, folder) { + if (folder === 'tests') { + if (name === 'bootstrap.js') { + x = x.replace('export function exec(', 'function ignore(') + .replace('async function execAsync(', 'export async function exec(') + .replace(/\nexec\(/g, '\nawait exec(') + .replace('{ spawnSync }', '{ spawn }') + } + + if (name === 'index.js') { + // Ignore tests that use node create stream functions not supported in deno yet + x = x.replace(/(t\('Copy from file works)/, 'n$1') + .replace(/(t\('Copy from abort works)/, 'n$1') + .replace(/(t\('Large object)/, 'n$1') + } + } + + const buffer = x.includes('Buffer') + ? 'import { Buffer } from \'https://deno.land/std@0.120.0/node/buffer.ts\'\n' + : '' + + const process = x.includes('process.') + ? 'import process from \'https://deno.land/std@0.120.0/node/process.ts\'\n' + : '' + + const timers = x.includes('setImmediate') + ? 'import { setImmediate, clearImmediate } from \'../polyfills.js\'\n' + : '' + + const hmac = x.includes('createHmac') + ? 'import { HmacSha256 } from \'https://deno.land/std@0.120.0/hash/sha256.ts\'\n' + : '' + + return hmac + buffer + process + timers + x + .replace(/setTimeout\((.*)\)\.unref\(\)/g, '(window.timer = setTimeout($1), Deno.unrefTimer(window.timer), window.timer)') + .replace( + 'crypto.createHmac(\'sha256\', key).update(x).digest()', + 'Buffer.from(new HmacSha256(key).update(x).digest())' + ) + .replace( + 'query.writable.push({ chunk, callback })', + '(query.writable.push({ chunk }), callback())' + ) + .replace(/.setKeepAlive\([^)]+\)/g, '') + .replace(/import net from 'net'/, 'import { net } from \'../polyfills.js\'') + .replace(/import tls from 'tls'/, 'import { tls } from \'../polyfills.js\'') + .replace(/ from '([a-z_]+)'/g, ' from \'https://deno.land/std@0.120.0/node/$1.ts\'') +} From 872529a5702f6440297b0df106e2baa992b217e9 Mon Sep 17 00:00:00 2001 From: Minigugus <43109623+Minigugus@users.noreply.github.com> Date: Wed, 12 Jan 2022 15:50:09 +0100 Subject: [PATCH 02/51] Fix and update types (#257) * Fix and update types --- README.md | 5 +- tests/index.js | 3 +- types/index.d.ts | 454 ++++++++++++++++++++++++++++++++------------- types/package.json | 5 + 4 files changed, 332 insertions(+), 135 deletions(-) create mode 100644 types/package.json diff --git a/README.md b/README.md index cb99ff19..601a277e 100644 --- a/README.md +++ b/README.md @@ -213,11 +213,10 @@ return user // => User // NOTE: const [first, second]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}` // fails: `second` does not exist on `[User?]` -// vs -const [first, second] = await sql<[User?]>`SELECT * FROM users WHERE id = ${id}` // ok but should fail +const [first, second] = await sql<[User?]>`SELECT * FROM users WHERE id = ${id}` // don't fail : `second: User | undefined` ``` -All the public API is typed. Also, TypeScript support is still in beta. Feel free to open an issue if you have trouble with types. +We do our best to type all the public API, however types are not always updated when features are added ou changed. Feel free to open an issue if you have trouble with types. ## forEach ```sql` `.forEach(fn) -> Promise``` diff --git a/tests/index.js b/tests/index.js index 4d5f7ce0..786286a1 100644 --- a/tests/index.js +++ b/tests/index.js @@ -9,8 +9,7 @@ import net from 'net' import fs from 'fs' import crypto from 'crypto' -/** @type {import('../types')} */ -import postgres from '../src/index.js' +import postgres from '..' const delay = ms => new Promise(r => setTimeout(r, ms)) const rel = x => new URL(x, import.meta.url) diff --git a/types/index.d.ts b/types/index.d.ts index 4f2c2a6d..f5719589 100644 --- a/types/index.d.ts +++ b/types/index.d.ts @@ -16,124 +16,223 @@ declare function postgres(url: string, options?: * Connection options of Postgres. */ interface BaseOptions { - /** Postgres ip address or domain name */ + /** Postgres ip address[s] or domain name[s] */ host: string | string[]; - /** Postgres server port */ + /** Postgres server[s] port[s] */ port: number | number[]; - /** Name of database to connect to */ + /** unix socket path (usually '/tmp') */ + path: string | undefined; + /** + * Name of database to connect to + * @default process.env['PGDATABASE'] || options.user + */ database: string; - /** Username of database user */ + /** + * Username of database user + * @default process.env['PGUSERNAME'] || process.env['PGUSER'] || require('os').userInfo().username + */ user: string; - /** True; or options for tls.connect */ - ssl: 'require' | 'prefer' | boolean | object; - /** Max number of connections */ + /** + * true, prefer, require or tls.connect options + * @default false + */ + ssl: 'require' | 'allow' | 'prefer' | boolean | object; + /** + * Max number of connections + * @default 10 + */ max: number; - /** Idle connection timeout in seconds */ + /** + * Idle connection timeout in seconds + * @default process.env['PGIDLE_TIMEOUT'] + */ idle_timeout: number | undefined; - /** Connect timeout in seconds */ + /** + * Connect timeout in seconds + * @default process.env['PGCONNECT_TIMEOUT'] + */ connect_timeout: number; /** Array of custom types; see more below */ types: PostgresTypeList; - /** - * Disable prepared mode - * @deprecated use "prepare" option instead - */ - no_prepare: boolean; /** * Enables prepare mode. * @default true */ prepare: boolean; - /** Defaults to console.log */ + /** + * Called when a notice is received + * @default console.log + */ onnotice: (notice: postgres.Notice) => void; - /** (key; value) when server param change */ + /** (key; value) when a server param change */ onparameter: (key: string, value: any) => void; /** Is called with (connection; query; parameters) */ debug: boolean | ((connection: number, query: string, parameters: any[]) => void); /** Transform hooks */ transform: { - /** Transforms incoming column names */ - column?: (column: string) => string; - /** Transforms incoming row values */ - value?: (value: any) => any; + /** Transforms incoming and outgoing column names */ + column?: ((column: string) => string) | { + /** SQL to JS */ + from?: (column: string) => string; + /** JS to SQL */ + to?: (column: string) => string; + }; + /** Transforms incoming and outgoing row values */ + value?: ((value: any) => any) | { + /** SQL to JS */ + from?: (value: unknown) => any; + // /** JS to SQL */ + // to?: (value: unknown) => any; // unused + }; /** Transforms entire rows */ - row?: (row: postgres.Row) => any; + row?: ((row: postgres.Row) => any) | { + /** SQL to JS */ + from?: (row: postgres.Row) => any; + // /** JS to SQL */ + // to?: (row: postgres.Row) => any; // unused + }; }; /** Connection parameters */ connection: Partial; + /** + * Use 'read-write' with multiple hosts to ensure only connecting to primary + * @default process.env['PGTARGETSESSIONATTRS'] + */ + target_session_attrs: undefined | 'read-write' | 'read-only' | 'primary' | 'standby' | 'prefer-standby'; + /** + * Automatically fetches types on connect + * @default true + */ + fetch_types: boolean; + /** + * Publications to subscribe to (only relevant when calling `sql.subscribe()`) + * @default 'alltables' + */ + publications: string } type PostgresTypeList = { - [name in keyof T]: T[name] extends (...args: any) => unknown + [name in keyof T]: T[name] extends (...args: any) => postgres.SerializableParameter ? postgres.PostgresType - : postgres.PostgresType; + : postgres.PostgresType<(...args: any) => postgres.SerializableParameter>; }; interface JSToPostgresTypeMap { [name: string]: unknown; } -declare class PostgresError extends Error { - name: 'PostgresError'; - severity_local: string; - severity: string; - code: string; - position: string; - file: string; - line: string; - routine: string; - - detail?: string; - hint?: string; - internal_position?: string; - internal_query?: string; - where?: string; - schema_name?: string; - table_name?: string; - column_name?: string; - data?: string; - type_name?: string; - constraint_name?: string; - - // Disable user-side creation of PostgresError - private constructor(); +declare const PRIVATE: unique symbol; + +declare class NotAPromise { + private [PRIVATE]: never; // prevent user-side interface implementation + + /** + * @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;``` + * @throws NOT_TAGGED_CALL + */ + private then(): never; + /** + * @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;``` + * @throws NOT_TAGGED_CALL + */ + private catch(): never; + /** + * @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;``` + * @throws NOT_TAGGED_CALL + */ + private finally(): never; } type UnwrapPromiseArray = T extends any[] ? { [k in keyof T]: T[k] extends Promise ? R : T[k] } : T; -type PostgresErrorType = typeof PostgresError - declare namespace postgres { - export const PostgresError: PostgresErrorType; + class PostgresError extends Error { + name: 'PostgresError'; + severity_local: string; + severity: string; + code: string; + position: string; + file: string; + line: string; + routine: string; + + detail?: string; + hint?: string; + internal_position?: string; + internal_query?: string; + where?: string; + schema_name?: string; + table_name?: string; + column_name?: string; + data?: string; + type_name?: string; + constraint_name?: string; + + /** Only set when debug is enabled */ + query: string; + /** Only set when debug is enabled */ + parameters: any[]; + + // Disable user-side creation of PostgresError + private constructor(); + } /** - * Convert a string to Pascal case. - * @param str THe string to convert - * @returns The new string in Pascal case + * Convert a snake_case string to PascalCase. + * @param str The string from snake_case to convert + * @returns The new string in PascalCase */ function toPascal(str: string): string; /** - * Convert a string to Camel case. - * @param str THe string to convert - * @returns The new string in Camel case + * Convert a PascalCase string to snake_case. + * @param str The string from snake_case to convert + * @returns The new string in snake_case + */ + function fromPascal(str: string): string; + /** + * Convert a snake_case string to camelCase. + * @param str The string from snake_case to convert + * @returns The new string in camelCase */ function toCamel(str: string): string; /** - * Convert a string to Kebab case. - * @param str THe string to convert - * @returns The new string in Kebab case + * Convert a camelCase string to snake_case. + * @param str The string from snake_case to convert + * @returns The new string in snake_case + */ + function fromCamel(str: string): string; + /** + * Convert a snake_case string to kebab-case. + * @param str The string from snake_case to convert + * @returns The new string in kebab-case */ function toKebab(str: string): string; + /** + * Convert a kebab-case string to snake_case. + * @param str The string from snake_case to convert + * @returns The new string in snake_case + */ + function fromKebab(str: string): string; const BigInt: PostgresType<(number: bigint) => string>; + interface PostgresType unknown> { + to: number; + from: number[]; + serialize: T; + parse: (raw: string) => unknown; + } + interface ConnectionParameters { - /** Default application_name */ + /** + * Default application_name + * @default 'postgres.js' + */ application_name: string; /** Other connection parameters */ - [name: string]: any; + [name: string]: string; } interface Options extends Partial> { @@ -141,18 +240,31 @@ declare namespace postgres { host?: string; /** @inheritdoc */ port?: number; - /** unix socket path (usually '/tmp') */ - path?: string | (() => string); + /** @inheritdoc */ + path?: string; /** Password of database user (an alias for `password`) */ pass?: Options['password']; - /** Password of database user */ + /** + * Password of database user + * @default process.env['PGPASSWORD'] + */ password?: string | (() => string | Promise); /** Name of database to connect to (an alias for `database`) */ db?: Options['database']; - /** Username of database user (an alias for `username`) */ + /** Username of database user (an alias for `user`) */ username?: Options['user']; /** Postgres ip address or domain name (an alias for `host`) */ hostname?: Options['host']; + /** + * Disable prepared mode + * @deprecated use "prepare" option instead + */ + no_prepare?: boolean; + /** + * Idle connection timeout in seconds + * @deprecated use "idle_timeout" option instead + */ + timeout?: Options['idle_timeout']; } interface ParsedOptions extends BaseOptions { @@ -162,22 +274,35 @@ declare namespace postgres { port: number[]; /** @inheritdoc */ pass: null; - serializers: { [oid: number]: T[keyof T] }; - parsers: { [oid: number]: T[keyof T] }; + /** @inheritdoc */ + transform: Transform; + serializers: Record SerializableParameter>; + parsers: Record unknown>; } - interface Notice { - [field: string]: string; + interface Transform { + /** Transforms incoming column names */ + column: { + from: ((column: string) => string) | undefined; + to: ((column: string) => string) | undefined; + }; + /** Transforms incoming row values */ + value: { + from: ((value: any) => any) | undefined; + to: undefined; // (value: any) => any + }; + /** Transforms entire rows */ + row: { + from: ((row: postgres.Row) => any) | undefined; + to: undefined; // (row: postgres.Row) => any + }; } - interface PostgresType any = (...args: any) => any> { - to: number; - from: number[]; - serialize: T; - parse: (raw: ReturnType) => unknown; + interface Notice { + [field: string]: string; } - interface Parameter { + interface Parameter extends NotAPromise { /** * PostgreSQL OID of the type */ @@ -197,7 +322,7 @@ declare namespace postgres { } interface ConnectionError extends globalThis.Error { - code: never + code: | 'CONNECTION_DESTROYED' | 'CONNECT_TIMEOUT' | 'CONNECTION_CLOSED' @@ -209,17 +334,12 @@ declare namespace postgres { interface NotSupportedError extends globalThis.Error { code: 'MESSAGE_NOT_SUPPORTED'; - name: never - | 'CopyInResponse' - | 'CopyOutResponse' - | 'ParameterDescription' - | 'FunctionCallResponse' - | 'NegotiateProtocolVersion' - | 'CopyBothResponse'; + name: string; } interface GenericError extends globalThis.Error { - code: never + code: + | '57014' // canceling statement due to user request | 'NOT_TAGGED_CALL' | 'UNDEFINED_VALUE' | 'MAX_PARAMETERS_EXCEEDED' @@ -229,17 +349,7 @@ declare namespace postgres { interface AuthNotImplementedError extends globalThis.Error { code: 'AUTH_TYPE_NOT_IMPLEMENTED'; - type: number - | 'KerberosV5' - | 'CleartextPassword' - | 'MD5Password' - | 'SCMCredential' - | 'GSS' - | 'GSSContinue' - | 'SSPI' - | 'SASL' - | 'SASLContinue' - | 'SASLFinal'; + type: number | string; message: string; } @@ -250,6 +360,50 @@ declare namespace postgres { | GenericError | AuthNotImplementedError; + interface ColumnInfo { + key: number; + name: string; + type: number; + parser?(raw: string): unknown; + atttypmod: number; + } + + interface RelationInfo { + schema: string; + table: string; + columns: ColumnInfo[]; + keys: ColumnInfo[]; + } + + type ReplicationEvent = + | { command: 'insert', relation: RelationInfo } + | { command: 'delete', relation: RelationInfo, key: boolean } + | { command: 'update', relation: RelationInfo, key: boolean, old: Row | null }; + + interface SubscriptionHandle { + unsubscribe(): void; + } + + interface LargeObject { + writable(options?: { + highWaterMark?: number, + start?: number + }): Promise; + readable(options?: { + highWaterMark?: number, + start?: number, + end?: number + }): Promise; + + close(): Promise; + tell(): Promise; + read(size: number): Promise; + write(buffer: Uint8Array): Promise<[{ data: Uint8Array }]>; + truncate(size: number): Promise; + seek(offset: number, whence?: number): Promise; + size(): Promise<[{ position: bigint, size: bigint }]>; + } + type Serializable = null | boolean | number @@ -261,7 +415,8 @@ declare namespace postgres { | Helper | Parameter | ArrayParameter - | SerializableParameter[]; + | Record // implicit JSON + | readonly SerializableParameter[]; type HelperSerializable = { [index: string]: SerializableParameter } | { [index: string]: SerializableParameter }[]; @@ -277,10 +432,6 @@ declare namespace postgres { [column: string]: any; } - interface UnlabeledRow { - '?column?': T; - } - type MaybeRow = Row | undefined; type TransformRow = T extends Serializable @@ -292,20 +443,31 @@ declare namespace postgres { interface Column { name: T; type: number; - parser(raw: string): string; + parser?(raw: string): unknown; } type ColumnList = (T extends string ? Column : never)[]; interface State { - state: 'I'; + status: string; pid: number; secret: number; } + interface Statement { + /** statement unique name */ + name: string; + /** sql query */ + string: string; + /** parameters types */ + types: number[]; + columns: ColumnList; + } + interface ResultMeta { count: T; // For tuples command: string; + statement: Statement; state: State; } @@ -314,13 +476,37 @@ declare namespace postgres { } type ExecutionResult = [] & ResultQueryMeta>; - type RowList = T & Iterable> & ResultQueryMeta; + type RawRowList = Buffer[][] & Iterable & ResultQueryMeta; + type RowList = T & Iterable> & ResultQueryMeta; + + interface PendingQueryModifiers { + readable(): import('node:stream').Readable; + writable(): import('node:stream').Writable; + + execute(): this; + cancel(): void; + + /** + * @deprecated `.stream` has been renamed to `.forEach` + * @throws + */ + stream(cb: (row: NonNullable, result: ExecutionResult) => void): never; + forEach(cb: (row: NonNullable, result: ExecutionResult) => void): Promise>; + + cursor(rows?: number): AsyncIterable[]>; + cursor(cb: (row: [NonNullable]) => void): Promise>; + cursor(rows: number, cb: (rows: NonNullable[]) => void): Promise>; + } - interface PendingQuery extends Promise> { - stream(cb: (row: NonNullable, result: ExecutionResult) => void): Promise>; - cursor(cb: (row: NonNullable) => void): Promise>; - cursor(size: 1, cb: (row: NonNullable) => void): Promise>; - cursor(size: number, cb: (rows: NonNullable[]) => void): Promise>; + interface PendingDescribeQuery extends Promise { + } + + interface PendingRawQuery extends Promise>, PendingQueryModifiers { + } + + interface PendingQuery extends Promise>, PendingQueryModifiers { + describe(): PendingDescribeQuery; + raw(): PendingRawQuery; } interface PendingRequest extends Promise<[] & ResultMeta> { } @@ -330,7 +516,7 @@ declare namespace postgres { unlisten(): Promise } - interface Helper { + interface Helper extends NotAPromise { first: T; rest: U; } @@ -343,7 +529,7 @@ declare namespace postgres { * @param args Interpoled values of the template string * @returns A promise resolving to the result of your query */ - (template: TemplateStringsArray, ...args: SerializableParameter[]): PendingQuery>; + (template: TemplateStringsArray, ...args: SerializableParameter[]): PendingQuery>; /** * Escape column names @@ -361,18 +547,10 @@ declare namespace postgres { */ >(objOrArray: T, ...keys: U[]): Helper; - END: {}; // FIXME unique symbol ? + CLOSE: {}; + END: this['CLOSE']; PostgresError: typeof PostgresError; - array(value: T): ArrayParameter; - begin(cb: (sql: TransactionSql) => T | Promise): Promise>; - begin(options: string, cb: (sql: TransactionSql) => T | Promise): Promise>; - end(options?: { timeout?: number }): Promise; - file(path: string, options?: { cache?: boolean }): PendingQuery>; - file(path: string, args: SerializableParameter[], options?: { cache?: boolean }): PendingQuery>; - json(value: any): Parameter; - listen(channel: string, cb: (value?: string) => void): ListenRequest; - notify(channel: string, payload: string): PendingRequest; options: ParsedOptions; parameters: ConnectionParameters; types: { @@ -380,22 +558,38 @@ declare namespace postgres { ? (...args: Parameters) => postgres.Parameter> : (...args: any) => postgres.Parameter; }; - unsafe(query: string, parameters?: SerializableParameter[], queryOptions?: UnsafeQueryOptions): PendingQuery>; + + unsafe)[]>(query: string, parameters?: SerializableParameter[], queryOptions?: UnsafeQueryOptions): PendingQuery>; + end(options?: { timeout?: number }): Promise; + + listen(channel: string, cb: (value: string) => void): ListenRequest; + notify(channel: string, payload: string): PendingRequest; + + subscribe(event: string, cb: (row: Row | null, info: ReplicationEvent) => void): Promise; + + largeObject(oid?: number, /** @default 0x00020000 | 0x00040000 */ mode?: number): Promise; + + begin(cb: (sql: TransactionSql) => T | Promise): Promise>; + begin(options: string, cb: (sql: TransactionSql) => T | Promise): Promise>; + + array(value: T, type?: number): ArrayParameter; + file(path: import('node:fs').PathOrFileDescriptor, options?: { cache?: boolean }): PendingQuery>; + file(path: import('node:fs').PathOrFileDescriptor, args: SerializableParameter[], options?: { cache?: boolean }): PendingQuery>; + json(value: any): Parameter; + } + + interface UnsafeQueryOptions { + /** + * When executes query as prepared statement. + * @default false + */ + prepare?: boolean; } interface TransactionSql extends Sql { savepoint(cb: (sql: TransactionSql) => T | Promise): Promise>; savepoint(name: string, cb: (sql: TransactionSql) => T | Promise): Promise>; } - -} - -interface UnsafeQueryOptions { - /** - * When executes query as prepared statement. - * @default false - */ - prepare?: boolean; } export = postgres; diff --git a/types/package.json b/types/package.json new file mode 100644 index 00000000..49a279aa --- /dev/null +++ b/types/package.json @@ -0,0 +1,5 @@ +{ + "devDependencies": { + "@types/node": "^16" + } +} From 8e0812ad73dad9887e5840592d52cda212b21515 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Thu, 13 Jan 2022 18:29:57 +0100 Subject: [PATCH 03/51] Fix transaction edge cases and error handling --- src/index.js | 122 +++++++++++++++++++++++++------------------------ src/query.js | 4 +- tests/index.js | 23 +++++++--- 3 files changed, 81 insertions(+), 68 deletions(-) diff --git a/src/index.js b/src/index.js index 43831e91..2f63f7e0 100644 --- a/src/index.js +++ b/src/index.js @@ -93,7 +93,7 @@ function Postgres(a, b) { : typeof strings === 'string' && !args.length ? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings) : new Builder(strings, args) - instant && query instanceof Query && Promise.resolve().then(() => { !query.fragment && query.execute() }) + instant && query instanceof Query && query.execute() return query } @@ -104,7 +104,7 @@ function Postgres(a, b) { ...options, simple: 'simple' in options ? options.simple : args.length === 0 }) - instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + instant && query.execute() return query } @@ -122,7 +122,7 @@ function Postgres(a, b) { ...options, simple: 'simple' in options ? options.simple : args.length === 0 }) - instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + instant && query.execute() return query } } @@ -178,65 +178,66 @@ function Postgres(a, b) { async function begin(options, fn) { !fn && (fn = options, options = '') - return new Promise(async(resolve, reject) => { - await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).catch(reject) - - function onexecute(c) { - const queries = Queue() - let savepoints = 0 - - queues[c.state].remove(c) - c.state = 'reserved' - c.reserved = () => queries.length && handler(queries.shift()) - reserved.push(c) - - const sql = Sql(handler, true) - sql.savepoint = savepoint - - start() - - return false - - async function start() { - try { - const xs = fn(sql) - const result = await (Array.isArray(xs) ? Promise.all(xs) : xs) - await sql`commit` - resolve(result) - } catch (error) { - await sql`rollback`.catch(reject) - reject(error) - } - c.reserved = null - onopen(c) - } + const queries = Queue() + let savepoints = 0 + , connection + + try { + await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }) + return await scope(connection, fn) + } catch (error) { + throw error + } finally { + if (connection) { + connection.reserved = null + onopen(connection) + } + } - async function savepoint(name, fn) { - if (name && Array.isArray(name.raw)) - return savepoint(sql => sql.apply(sql, arguments)) - - try { - arguments.length === 1 && (fn = name, name = null) - name = 's' + savepoints++ + (name ? '_' + name : '') - await sql`savepoint ${ sql(name) }` - } catch (err) { - reject(err) - } - try { - return await Promise.resolve(fn(sql)) - } catch (err) { - await sql`rollback to ${ sql(name) }` - throw err - } - } + async function scope(c, fn, name) { + const sql = Sql(handler, true) + sql.savepoint = savepoint + let errored + name && await sql`savepoint ${ sql(name) }` + try { + const result = await new Promise((resolve, reject) => { + errored = reject + const x = fn(sql) + Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject) + }) + !name && await sql`commit` + return result + } catch (e) { + await (name + ? sql`rollback to ${ sql(name) }` + : sql`rollback` + ) + throw e + } - function handler(query) { - c.state === 'full' - ? queries.push(query) - : c.execute(query) - } + function savepoint(name, fn) { + if (name && Array.isArray(name.raw)) + return savepoint(sql => sql.apply(sql, arguments)) + + arguments.length === 1 && (fn = name, name = null) + return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : '')) } - }) + + function handler(q) { + errored && q.catch(errored) + c.state === 'full' + ? queries.push(q) + : c.execute(q) || (c.state = 'full', full.push(c)) + } + } + + function onexecute(c) { + queues[c.state].remove(c) + c.state = 'reserved' + c.reserved = () => queries.length && c.execute(queries.shift()) + reserved.push(c) + connection = c + } } function largeObject(oid, mode = 0x00020000 | 0x00040000) { @@ -356,10 +357,11 @@ function Postgres(a, b) { }) } - function end({ timeout = null } = {}) { + async function end({ timeout = null } = {}) { if (ending) return ending + await 1 let timer return ending = Promise.race([ new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))), diff --git a/src/query.js b/src/query.js index 1b5826c5..816fd184 100644 --- a/src/query.js +++ b/src/query.js @@ -104,8 +104,8 @@ export default class Query extends Promise { return this } - handle() { - !this.executed && this.handler((this.executed = true, this)) + async handle() { + !this.executed && (this.executed = true) && await 1 && this.handler(this) } execute() { diff --git a/tests/index.js b/tests/index.js index 786286a1..93b2539d 100644 --- a/tests/index.js +++ b/tests/index.js @@ -224,12 +224,23 @@ t('Savepoint returns Result', async() => { return [1, result[0].x] }) -t('Transaction requests are executed implicitly', async() => [ - 'testing', - (await sql.begin(async sql => { - sql`select set_config('postgres_js.test', 'testing', true)` - return await sql`select current_setting('postgres_js.test') as x` - }))[0].x +t('Transaction requests are executed implicitly', async() => { + const sql = postgres({ debug: true, idle_timeout: 1, fetch_types: false }) + return [ + 'testing', + (await sql.begin(async sql => { + sql`select set_config('postgres_js.test', 'testing', true)` + return await sql`select current_setting('postgres_js.test') as x` + }))[0].x + ] +}) + +t('Uncaught transaction request errors bubbles to transaction', async() => [ + '42703', + (await sql.begin(sql => ( + sql`select wat`, + sql`select current_setting('postgres_js.test') as x, ${ 1 } as a` + )).catch(e => e.code)) ]) t('Parallel transactions', async() => { From 5366d87873d8a029725b97b21e3a6186a99edc9d Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Thu, 13 Jan 2022 18:30:52 +0100 Subject: [PATCH 04/51] Clean up --- .eslintrc.json | 2 +- cjs/src/index.js | 122 ++++++++++++++++++++++---------------------- cjs/src/query.js | 4 +- cjs/tests/index.js | 38 +++++++++----- deno/src/index.js | 122 ++++++++++++++++++++++---------------------- deno/src/query.js | 4 +- deno/tests/index.js | 38 +++++++++----- package.json | 4 +- src/connection.js | 6 +-- src/errors.js | 2 +- src/types.js | 6 +-- tests/bootstrap.js | 4 +- tests/index.js | 15 +++--- 13 files changed, 198 insertions(+), 169 deletions(-) diff --git a/.eslintrc.json b/.eslintrc.json index f117263c..4a50f178 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -5,7 +5,7 @@ "node": true }, "parserOptions": { - "ecmaVersion": 9, + "ecmaVersion": 2020, "sourceType": "module" }, "rules": { diff --git a/cjs/src/index.js b/cjs/src/index.js index c4e945f0..12bf4930 100644 --- a/cjs/src/index.js +++ b/cjs/src/index.js @@ -93,7 +93,7 @@ function Postgres(a, b) { : typeof strings === 'string' && !args.length ? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings) : new Builder(strings, args) - instant && query instanceof Query && Promise.resolve().then(() => { !query.fragment && query.execute() }) + instant && query instanceof Query && query.execute() return query } @@ -104,7 +104,7 @@ function Postgres(a, b) { ...options, simple: 'simple' in options ? options.simple : args.length === 0 }) - instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + instant && query.execute() return query } @@ -122,7 +122,7 @@ function Postgres(a, b) { ...options, simple: 'simple' in options ? options.simple : args.length === 0 }) - instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + instant && query.execute() return query } } @@ -178,65 +178,66 @@ function Postgres(a, b) { async function begin(options, fn) { !fn && (fn = options, options = '') - return new Promise(async(resolve, reject) => { - await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).catch(reject) - - function onexecute(c) { - const queries = Queue() - let savepoints = 0 - - queues[c.state].remove(c) - c.state = 'reserved' - c.reserved = () => queries.length && handler(queries.shift()) - reserved.push(c) - - const sql = Sql(handler, true) - sql.savepoint = savepoint - - start() - - return false - - async function start() { - try { - const xs = fn(sql) - const result = await (Array.isArray(xs) ? Promise.all(xs) : xs) - await sql`commit` - resolve(result) - } catch (error) { - await sql`rollback`.catch(reject) - reject(error) - } - c.reserved = null - onopen(c) - } + const queries = Queue() + let savepoints = 0 + , connection + + try { + await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }) + return await scope(connection, fn) + } catch (error) { + throw error + } finally { + if (connection) { + connection.reserved = null + onopen(connection) + } + } - async function savepoint(name, fn) { - if (name && Array.isArray(name.raw)) - return savepoint(sql => sql.apply(sql, arguments)) - - try { - arguments.length === 1 && (fn = name, name = null) - name = 's' + savepoints++ + (name ? '_' + name : '') - await sql`savepoint ${ sql(name) }` - } catch (err) { - reject(err) - } - try { - return await Promise.resolve(fn(sql)) - } catch (err) { - await sql`rollback to ${ sql(name) }` - throw err - } - } + async function scope(c, fn, name) { + const sql = Sql(handler, true) + sql.savepoint = savepoint + let errored + name && await sql`savepoint ${ sql(name) }` + try { + const result = await new Promise((resolve, reject) => { + errored = reject + const x = fn(sql) + Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject) + }) + !name && await sql`commit` + return result + } catch (e) { + await (name + ? sql`rollback to ${ sql(name) }` + : sql`rollback` + ) + throw e + } - function handler(query) { - c.state === 'full' - ? queries.push(query) - : c.execute(query) - } + function savepoint(name, fn) { + if (name && Array.isArray(name.raw)) + return savepoint(sql => sql.apply(sql, arguments)) + + arguments.length === 1 && (fn = name, name = null) + return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : '')) } - }) + + function handler(q) { + errored && q.catch(errored) + c.state === 'full' + ? queries.push(q) + : c.execute(q) || (c.state = 'full', full.push(c)) + } + } + + function onexecute(c) { + queues[c.state].remove(c) + c.state = 'reserved' + c.reserved = () => queries.length && c.execute(queries.shift()) + reserved.push(c) + connection = c + } } function largeObject(oid, mode = 0x00020000 | 0x00040000) { @@ -356,10 +357,11 @@ function Postgres(a, b) { }) } - function end({ timeout = null } = {}) { + async function end({ timeout = null } = {}) { if (ending) return ending + await 1 let timer return ending = Promise.race([ new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))), diff --git a/cjs/src/query.js b/cjs/src/query.js index 86fbaf54..748e0718 100644 --- a/cjs/src/query.js +++ b/cjs/src/query.js @@ -104,8 +104,8 @@ module.exports = class Query extends Promise { return this } - handle() { - !this.executed && this.handler((this.executed = true, this)) + async handle() { + !this.executed && (this.executed = true) && await 1 && this.handler(this) } execute() { diff --git a/cjs/tests/index.js b/cjs/tests/index.js index 12a43194..6ac8be3f 100644 --- a/cjs/tests/index.js +++ b/cjs/tests/index.js @@ -225,12 +225,23 @@ t('Savepoint returns Result', async() => { return [1, result[0].x] }) -t('Transaction requests are executed implicitly', async() => [ - 'testing', - (await sql.begin(async sql => { - sql`select set_config('postgres_js.test', 'testing', true)` - return await sql`select current_setting('postgres_js.test') as x` - }))[0].x +t('Transaction requests are executed implicitly', async() => { + const sql = postgres({ debug: true, idle_timeout: 1, fetch_types: false }) + return [ + 'testing', + (await sql.begin(async sql => { + sql`select set_config('postgres_js.test', 'testing', true)` + return await sql`select current_setting('postgres_js.test') as x` + }))[0].x + ] +}) + +t('Uncaught transaction request errors bubbles to transaction', async() => [ + '42703', + (await sql.begin(sql => ( + sql`select wat`, + sql`select current_setting('postgres_js.test') as x, ${ 1 } as a` + )).catch(e => e.code)) ]) t('Parallel transactions', async() => { @@ -626,11 +637,12 @@ t('listen and notify with upper case', async() => { ] }) -t('listen reconnects', { timeout: 4 }, async() => { +t('listen reconnects', { timeout: 2 }, async() => { const sql = postgres(options) , xs = [] const { state: { pid } } = await sql.listen('test', x => xs.push(x)) + await delay(200) await sql.notify('test', 'a') await sql`select pg_terminate_backend(${ pid }::int)` await delay(200) @@ -1286,13 +1298,13 @@ t('requests works after single connect_timeout', async() => { const sql = postgres({ ...options, ...login_scram, - connect_timeout: { valueOf() { return first ? (first = false, 0.01) : 1 } } + connect_timeout: { valueOf() { return first ? (first = false, 0.001) : 1 } } }) return [ 'CONNECT_TIMEOUT,,1', [ - await sql`select 1 as x`.catch(x => x.code), + await sql`select 1 as x`.then(() => 'success', x => x.code), await delay(10), (await sql`select 1 as x`)[0].x ].join(',') @@ -1696,7 +1708,7 @@ t('Describe a statement', async() => { `${ r.types.join(',') }/${ r.columns.map(c => `${c.name}:${c.type}`).join(',') }`, await sql`drop table tester` ] - }) +}) t('Describe a statement without parameters', async() => { await sql`create table tester (name text, age int)` @@ -1706,9 +1718,9 @@ t('Describe a statement without parameters', async() => { `${ r.types.length },${ r.columns.length }`, await sql`drop table tester` ] - }) +}) -t('Describe a statement without columns', async () => { +t('Describe a statement without columns', async() => { await sql`create table tester (name text, age int)` const r = await sql`insert into tester (name, age) values ($1, $2)`.describe() return [ @@ -1716,7 +1728,7 @@ t('Describe a statement without columns', async () => { `${ r.types.length },${ r.columns.length }`, await sql`drop table tester` ] - }) +}) t('Large object', async() => { const file = rel('index.js') diff --git a/deno/src/index.js b/deno/src/index.js index 0667e218..6b0b31ef 100644 --- a/deno/src/index.js +++ b/deno/src/index.js @@ -94,7 +94,7 @@ function Postgres(a, b) { : typeof strings === 'string' && !args.length ? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings) : new Builder(strings, args) - instant && query instanceof Query && Promise.resolve().then(() => { !query.fragment && query.execute() }) + instant && query instanceof Query && query.execute() return query } @@ -105,7 +105,7 @@ function Postgres(a, b) { ...options, simple: 'simple' in options ? options.simple : args.length === 0 }) - instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + instant && query.execute() return query } @@ -123,7 +123,7 @@ function Postgres(a, b) { ...options, simple: 'simple' in options ? options.simple : args.length === 0 }) - instant && Promise.resolve().then(() => { !query.fragment && query.execute() }) + instant && query.execute() return query } } @@ -179,65 +179,66 @@ function Postgres(a, b) { async function begin(options, fn) { !fn && (fn = options, options = '') - return new Promise(async(resolve, reject) => { - await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).catch(reject) - - function onexecute(c) { - const queries = Queue() - let savepoints = 0 - - queues[c.state].remove(c) - c.state = 'reserved' - c.reserved = () => queries.length && handler(queries.shift()) - reserved.push(c) - - const sql = Sql(handler, true) - sql.savepoint = savepoint - - start() - - return false - - async function start() { - try { - const xs = fn(sql) - const result = await (Array.isArray(xs) ? Promise.all(xs) : xs) - await sql`commit` - resolve(result) - } catch (error) { - await sql`rollback`.catch(reject) - reject(error) - } - c.reserved = null - onopen(c) - } + const queries = Queue() + let savepoints = 0 + , connection + + try { + await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }) + return await scope(connection, fn) + } catch (error) { + throw error + } finally { + if (connection) { + connection.reserved = null + onopen(connection) + } + } - async function savepoint(name, fn) { - if (name && Array.isArray(name.raw)) - return savepoint(sql => sql.apply(sql, arguments)) - - try { - arguments.length === 1 && (fn = name, name = null) - name = 's' + savepoints++ + (name ? '_' + name : '') - await sql`savepoint ${ sql(name) }` - } catch (err) { - reject(err) - } - try { - return await Promise.resolve(fn(sql)) - } catch (err) { - await sql`rollback to ${ sql(name) }` - throw err - } - } + async function scope(c, fn, name) { + const sql = Sql(handler, true) + sql.savepoint = savepoint + let errored + name && await sql`savepoint ${ sql(name) }` + try { + const result = await new Promise((resolve, reject) => { + errored = reject + const x = fn(sql) + Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject) + }) + !name && await sql`commit` + return result + } catch (e) { + await (name + ? sql`rollback to ${ sql(name) }` + : sql`rollback` + ) + throw e + } - function handler(query) { - c.state === 'full' - ? queries.push(query) - : c.execute(query) - } + function savepoint(name, fn) { + if (name && Array.isArray(name.raw)) + return savepoint(sql => sql.apply(sql, arguments)) + + arguments.length === 1 && (fn = name, name = null) + return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : '')) } - }) + + function handler(q) { + errored && q.catch(errored) + c.state === 'full' + ? queries.push(q) + : c.execute(q) || (c.state = 'full', full.push(c)) + } + } + + function onexecute(c) { + queues[c.state].remove(c) + c.state = 'reserved' + c.reserved = () => queries.length && c.execute(queries.shift()) + reserved.push(c) + connection = c + } } function largeObject(oid, mode = 0x00020000 | 0x00040000) { @@ -357,10 +358,11 @@ function Postgres(a, b) { }) } - function end({ timeout = null } = {}) { + async function end({ timeout = null } = {}) { if (ending) return ending + await 1 let timer return ending = Promise.race([ new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))), diff --git a/deno/src/query.js b/deno/src/query.js index 1b5826c5..816fd184 100644 --- a/deno/src/query.js +++ b/deno/src/query.js @@ -104,8 +104,8 @@ export default class Query extends Promise { return this } - handle() { - !this.executed && this.handler((this.executed = true, this)) + async handle() { + !this.executed && (this.executed = true) && await 1 && this.handler(this) } execute() { diff --git a/deno/tests/index.js b/deno/tests/index.js index 99f34edb..6cdd01ca 100644 --- a/deno/tests/index.js +++ b/deno/tests/index.js @@ -226,12 +226,23 @@ t('Savepoint returns Result', async() => { return [1, result[0].x] }) -t('Transaction requests are executed implicitly', async() => [ - 'testing', - (await sql.begin(async sql => { - sql`select set_config('postgres_js.test', 'testing', true)` - return await sql`select current_setting('postgres_js.test') as x` - }))[0].x +t('Transaction requests are executed implicitly', async() => { + const sql = postgres({ debug: true, idle_timeout: 1, fetch_types: false }) + return [ + 'testing', + (await sql.begin(async sql => { + sql`select set_config('postgres_js.test', 'testing', true)` + return await sql`select current_setting('postgres_js.test') as x` + }))[0].x + ] +}) + +t('Uncaught transaction request errors bubbles to transaction', async() => [ + '42703', + (await sql.begin(sql => ( + sql`select wat`, + sql`select current_setting('postgres_js.test') as x, ${ 1 } as a` + )).catch(e => e.code)) ]) t('Parallel transactions', async() => { @@ -627,11 +638,12 @@ t('listen and notify with upper case', async() => { ] }) -t('listen reconnects', { timeout: 4 }, async() => { +t('listen reconnects', { timeout: 2 }, async() => { const sql = postgres(options) , xs = [] const { state: { pid } } = await sql.listen('test', x => xs.push(x)) + await delay(200) await sql.notify('test', 'a') await sql`select pg_terminate_backend(${ pid }::int)` await delay(200) @@ -1287,13 +1299,13 @@ t('requests works after single connect_timeout', async() => { const sql = postgres({ ...options, ...login_scram, - connect_timeout: { valueOf() { return first ? (first = false, 0.01) : 1 } } + connect_timeout: { valueOf() { return first ? (first = false, 0.001) : 1 } } }) return [ 'CONNECT_TIMEOUT,,1', [ - await sql`select 1 as x`.catch(x => x.code), + await sql`select 1 as x`.then(() => 'success', x => x.code), await delay(10), (await sql`select 1 as x`)[0].x ].join(',') @@ -1697,7 +1709,7 @@ t('Describe a statement', async() => { `${ r.types.join(',') }/${ r.columns.map(c => `${c.name}:${c.type}`).join(',') }`, await sql`drop table tester` ] - }) +}) t('Describe a statement without parameters', async() => { await sql`create table tester (name text, age int)` @@ -1707,9 +1719,9 @@ t('Describe a statement without parameters', async() => { `${ r.types.length },${ r.columns.length }`, await sql`drop table tester` ] - }) +}) -t('Describe a statement without columns', async () => { +t('Describe a statement without columns', async() => { await sql`create table tester (name text, age int)` const r = await sql`insert into tester (name, age) values ($1, $2)`.describe() return [ @@ -1717,7 +1729,7 @@ t('Describe a statement without columns', async () => { `${ r.types.length },${ r.columns.length }`, await sql`drop table tester` ] - }) +}) nt('Large object', async() => { const file = rel('index.js') diff --git a/package.json b/package.json index 70417497..c10c0705 100644 --- a/package.json +++ b/package.json @@ -15,10 +15,10 @@ "build": "npm run build:cjs && npm run build:deno", "build:cjs": "node transpile.cjs", "build:deno": "node transpile.deno.js", - "test:cjs": "npm run build:cjs && pushd cjs/tests && node index.js && popd", + "test": "npm run test:esm && npm run test:cjs && npm run test:deno", + "test:esm": "node tests/index.js", "test:cjs": "npm run build:cjs && pushd cjs/tests && node index.js && popd", "test:deno": "npm run build:deno && pushd deno/tests && deno run --unstable --allow-all --unsafely-ignore-certificate-errors index.js && popd", - "test": "node tests/index.js && npm run test:cjs && npm run test:deno", "lint": "eslint src && eslint tests", "prepare": "npm run build", "prepublishOnly": "npm run lint" diff --git a/src/connection.js b/src/connection.js index 61685c50..6f62c1eb 100644 --- a/src/connection.js +++ b/src/connection.js @@ -839,7 +839,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl /* c8 ignore next 3 */ function EmptyQueryResponse() { - + /* noop */ } /* c8 ignore next 3 */ @@ -854,12 +854,12 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl /* c8 ignore next 3 */ function UnknownMessage(x) { - console.error('Unknown message', x) + console.error('Postgres.js : Unknown Message:', x[0]) // eslint-disable-line } /* c8 ignore next 3 */ function UnknownAuth(x, type) { - console.error('Unknown auth', type) + console.error('Postgres.js : Unknown Auth:', type) // eslint-disable-line } /* Messages */ diff --git a/src/errors.js b/src/errors.js index ed12202d..1402b850 100644 --- a/src/errors.js +++ b/src/errors.js @@ -4,7 +4,7 @@ export class PostgresError extends Error { this.name = this.constructor.name Object.assign(this, x) } -}; +}; // eslint-disable-line export const Errors = { connection, diff --git a/src/types.js b/src/types.js index 9eb76a4f..e49180e9 100644 --- a/src/types.js +++ b/src/types.js @@ -53,7 +53,7 @@ export class Identifier extends NotTagged { super() this.value = escapeIdentifier(value) } -}; +}; // eslint-disable-line export class Parameter extends NotTagged { constructor(value, type, array) { @@ -62,7 +62,7 @@ export class Parameter extends NotTagged { this.type = type this.array = array } -}; +}; // eslint-disable-line export class Builder extends NotTagged { constructor(first, rest) { @@ -78,7 +78,7 @@ export class Builder extends NotTagged { return keyword.fn(this.first, this.rest, parameters, types, transform) } -}; +}; // eslint-disable-line export function handleValue(x, parameters, types) { if (Array.isArray(x)) diff --git a/tests/bootstrap.js b/tests/bootstrap.js index e90bce4d..6a4fa4c1 100644 --- a/tests/bootstrap.js +++ b/tests/bootstrap.js @@ -19,9 +19,9 @@ export function exec(cmd, args) { throw stderr } -async function execAsync(cmd, args) { +async function execAsync(cmd, args) { // eslint-disable-line let stderr = '' - const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) + const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) // eslint-disable-line cp.stderr.on('data', x => stderr += x) await new Promise(x => cp.on('exit', x)) if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist')) diff --git a/tests/index.js b/tests/index.js index 93b2539d..41cbe1a2 100644 --- a/tests/index.js +++ b/tests/index.js @@ -636,11 +636,12 @@ t('listen and notify with upper case', async() => { ] }) -t('listen reconnects', { timeout: 4 }, async() => { +t('listen reconnects', { timeout: 2 }, async() => { const sql = postgres(options) , xs = [] const { state: { pid } } = await sql.listen('test', x => xs.push(x)) + await delay(200) await sql.notify('test', 'a') await sql`select pg_terminate_backend(${ pid }::int)` await delay(200) @@ -1296,13 +1297,13 @@ t('requests works after single connect_timeout', async() => { const sql = postgres({ ...options, ...login_scram, - connect_timeout: { valueOf() { return first ? (first = false, 0.01) : 1 } } + connect_timeout: { valueOf() { return first ? (first = false, 0.001) : 1 } } }) return [ 'CONNECT_TIMEOUT,,1', [ - await sql`select 1 as x`.catch(x => x.code), + await sql`select 1 as x`.then(() => 'success', x => x.code), await delay(10), (await sql`select 1 as x`)[0].x ].join(',') @@ -1706,7 +1707,7 @@ t('Describe a statement', async() => { `${ r.types.join(',') }/${ r.columns.map(c => `${c.name}:${c.type}`).join(',') }`, await sql`drop table tester` ] - }) +}) t('Describe a statement without parameters', async() => { await sql`create table tester (name text, age int)` @@ -1716,9 +1717,9 @@ t('Describe a statement without parameters', async() => { `${ r.types.length },${ r.columns.length }`, await sql`drop table tester` ] - }) +}) -t('Describe a statement without columns', async () => { +t('Describe a statement without columns', async() => { await sql`create table tester (name text, age int)` const r = await sql`insert into tester (name, age) values ($1, $2)`.describe() return [ @@ -1726,7 +1727,7 @@ t('Describe a statement without columns', async () => { `${ r.types.length },${ r.columns.length }`, await sql`drop table tester` ] - }) +}) t('Large object', async() => { const file = rel('index.js') From 963f79ca186d044ba596225a5e735f268d22f54e Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Thu, 13 Jan 2022 23:54:10 +0100 Subject: [PATCH 05/51] Handle some edge case errors better --- cjs/src/connection.js | 19 ++++++---- cjs/src/errors.js | 2 +- cjs/src/types.js | 6 +-- cjs/tests/bootstrap.js | 4 +- cjs/tests/index.js | 83 +++++++++++++++++++++++++++++++++++++++- deno/src/connection.js | 19 ++++++---- deno/src/errors.js | 2 +- deno/src/types.js | 6 +-- deno/tests/bootstrap.js | 4 +- deno/tests/index.js | 83 +++++++++++++++++++++++++++++++++++++++- src/connection.js | 13 +++++-- tests/index.js | 84 ++++++++++++++++++++++++++++++++++++++++- 12 files changed, 292 insertions(+), 33 deletions(-) diff --git a/cjs/src/connection.js b/cjs/src/connection.js index 62be51ed..bf488d21 100644 --- a/cjs/src/connection.js +++ b/cjs/src/connection.js @@ -312,7 +312,12 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl break } - handle(incoming.slice(0, length + 1)) + try { + handle(incoming.slice(0, length + 1)) + } catch (e) { + query && (query.cursorFn || query.describeFirst) && write(Sync) + errored(e) + } incoming = incoming.slice(length + 1) remaining = 0 incomings = null @@ -365,9 +370,6 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function queryError(query, err) { - if (err.query) - return - err.stack += query.origin.replace(/.*\n/, '\n') Object.defineProperties(err, { query: { value: query.string, enumerable: options.debug }, @@ -399,6 +401,9 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function closed(hadError) { + incoming = Buffer.alloc(0) + remaining = 0 + incomings = null clearImmediate(nextWriteTimer) socket.removeListener('data', data) socket.removeListener('connect', connected) @@ -839,7 +844,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl /* c8 ignore next 3 */ function EmptyQueryResponse() { - + /* noop */ } /* c8 ignore next 3 */ @@ -854,12 +859,12 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl /* c8 ignore next 3 */ function UnknownMessage(x) { - console.error('Unknown message', x) + console.error('Postgres.js : Unknown Message:', x[0]) // eslint-disable-line } /* c8 ignore next 3 */ function UnknownAuth(x, type) { - console.error('Unknown auth', type) + console.error('Postgres.js : Unknown Auth:', type) // eslint-disable-line } /* Messages */ diff --git a/cjs/src/errors.js b/cjs/src/errors.js index 0de66340..e6e8b83a 100644 --- a/cjs/src/errors.js +++ b/cjs/src/errors.js @@ -4,7 +4,7 @@ class PostgresError extends Error { this.name = this.constructor.name Object.assign(this, x) } -};module.exports.PostgresError = PostgresError +};module.exports.PostgresError = PostgresError // eslint-disable-line const Errors = module.exports.Errors = { connection, diff --git a/cjs/src/types.js b/cjs/src/types.js index fa70d8d8..aaae12d0 100644 --- a/cjs/src/types.js +++ b/cjs/src/types.js @@ -53,7 +53,7 @@ class Identifier extends NotTagged { super() this.value = escapeIdentifier(value) } -};module.exports.Identifier = Identifier +};module.exports.Identifier = Identifier // eslint-disable-line class Parameter extends NotTagged { constructor(value, type, array) { @@ -62,7 +62,7 @@ class Parameter extends NotTagged { this.type = type this.array = array } -};module.exports.Parameter = Parameter +};module.exports.Parameter = Parameter // eslint-disable-line class Builder extends NotTagged { constructor(first, rest) { @@ -78,7 +78,7 @@ class Builder extends NotTagged { return keyword.fn(this.first, this.rest, parameters, types, transform) } -};module.exports.Builder = Builder +};module.exports.Builder = Builder // eslint-disable-line module.exports.handleValue = handleValue;function handleValue(x, parameters, types) { if (Array.isArray(x)) diff --git a/cjs/tests/bootstrap.js b/cjs/tests/bootstrap.js index d7c0dc44..15295975 100644 --- a/cjs/tests/bootstrap.js +++ b/cjs/tests/bootstrap.js @@ -19,9 +19,9 @@ module.exports.exec = exec;function exec(cmd, args) { throw stderr } -async function execAsync(cmd, args) { +async function execAsync(cmd, args) { // eslint-disable-line let stderr = '' - const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) + const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) // eslint-disable-line cp.stderr.on('data', x => stderr += x) await new Promise(x => cp.on('exit', x)) if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist')) diff --git a/cjs/tests/index.js b/cjs/tests/index.js index 6ac8be3f..74c801bf 100644 --- a/cjs/tests/index.js +++ b/cjs/tests/index.js @@ -9,7 +9,6 @@ const net = require('net') const fs = require('fs') const crypto = require('crypto') -/** @type {import('../types')} */ const postgres = require('../src/index.js') const delay = ms => new Promise(r => setTimeout(r, ms)) @@ -1747,3 +1746,85 @@ t('Large object', async() => { await lo.close() ] }) + +t('Catches type serialize errors', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: x => x, + serialize: () => { throw new Error('watSerialize') } + } + } + }) + + return [ + 'watSerialize', + (await sql`select ${ 'wat' }`.catch(e => e.message)) + ] +}) + +t('Catches type parse errors', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: () => { throw new Error('watParse') }, + serialize: x => x + } + } + }) + + return [ + 'watParse', + (await sql`select 'wat'`.catch(e => e.message)) + ] +}) + +t('Catches type serialize errors in transactions', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: x => x, + serialize: () => { throw new Error('watSerialize') } + } + } + }) + + return [ + 'watSerialize', + (await sql.begin(sql => ( + sql`select 1`, + sql`select ${ 'wat' }` + )).catch(e => e.message)) + ] +}) + +t('Catches type parse errors in transactions', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: () => { throw new Error('watParse') }, + serialize: x => x + } + } + }) + + return [ + 'watParse', + (await sql.begin(sql => ( + sql`select 1`, + sql`select 'wat'` + )).catch(e => e.message)) + ] +}) diff --git a/deno/src/connection.js b/deno/src/connection.js index 61a2bc15..78d33b32 100644 --- a/deno/src/connection.js +++ b/deno/src/connection.js @@ -315,7 +315,12 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl break } - handle(incoming.slice(0, length + 1)) + try { + handle(incoming.slice(0, length + 1)) + } catch (e) { + query && (query.cursorFn || query.describeFirst) && write(Sync) + errored(e) + } incoming = incoming.slice(length + 1) remaining = 0 incomings = null @@ -368,9 +373,6 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function queryError(query, err) { - if (err.query) - return - err.stack += query.origin.replace(/.*\n/, '\n') Object.defineProperties(err, { query: { value: query.string, enumerable: options.debug }, @@ -402,6 +404,9 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function closed(hadError) { + incoming = Buffer.alloc(0) + remaining = 0 + incomings = null clearImmediate(nextWriteTimer) socket.removeListener('data', data) socket.removeListener('connect', connected) @@ -842,7 +847,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl /* c8 ignore next 3 */ function EmptyQueryResponse() { - + /* noop */ } /* c8 ignore next 3 */ @@ -857,12 +862,12 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl /* c8 ignore next 3 */ function UnknownMessage(x) { - console.error('Unknown message', x) + console.error('Postgres.js : Unknown Message:', x[0]) // eslint-disable-line } /* c8 ignore next 3 */ function UnknownAuth(x, type) { - console.error('Unknown auth', type) + console.error('Postgres.js : Unknown Auth:', type) // eslint-disable-line } /* Messages */ diff --git a/deno/src/errors.js b/deno/src/errors.js index ed12202d..1402b850 100644 --- a/deno/src/errors.js +++ b/deno/src/errors.js @@ -4,7 +4,7 @@ export class PostgresError extends Error { this.name = this.constructor.name Object.assign(this, x) } -}; +}; // eslint-disable-line export const Errors = { connection, diff --git a/deno/src/types.js b/deno/src/types.js index 778ff580..0806170c 100644 --- a/deno/src/types.js +++ b/deno/src/types.js @@ -54,7 +54,7 @@ export class Identifier extends NotTagged { super() this.value = escapeIdentifier(value) } -}; +}; // eslint-disable-line export class Parameter extends NotTagged { constructor(value, type, array) { @@ -63,7 +63,7 @@ export class Parameter extends NotTagged { this.type = type this.array = array } -}; +}; // eslint-disable-line export class Builder extends NotTagged { constructor(first, rest) { @@ -79,7 +79,7 @@ export class Builder extends NotTagged { return keyword.fn(this.first, this.rest, parameters, types, transform) } -}; +}; // eslint-disable-line export function handleValue(x, parameters, types) { if (Array.isArray(x)) diff --git a/deno/tests/bootstrap.js b/deno/tests/bootstrap.js index 210a14f3..d606238a 100644 --- a/deno/tests/bootstrap.js +++ b/deno/tests/bootstrap.js @@ -19,9 +19,9 @@ function ignore(cmd, args) { throw stderr } -export async function exec(cmd, args) { +export async function exec(cmd, args) { // eslint-disable-line let stderr = '' - const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) + const cp = await spawn(cmd, args, { stdio: 'pipe', encoding: 'utf8' }) // eslint-disable-line cp.stderr.on('data', x => stderr += x) await new Promise(x => cp.on('exit', x)) if (stderr && !stderr.includes('already exists') && !stderr.includes('does not exist')) diff --git a/deno/tests/index.js b/deno/tests/index.js index 6cdd01ca..b33d4db7 100644 --- a/deno/tests/index.js +++ b/deno/tests/index.js @@ -10,7 +10,6 @@ import { net } from '../polyfills.js' import fs from 'https://deno.land/std@0.120.0/node/fs.ts' import crypto from 'https://deno.land/std@0.120.0/node/crypto.ts' -/** @type {import('../types')} */ import postgres from '../src/index.js' const delay = ms => new Promise(r => setTimeout(r, ms)) @@ -1748,3 +1747,85 @@ nt('Large object', async() => { await lo.close() ] }) + +t('Catches type serialize errors', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: x => x, + serialize: () => { throw new Error('watSerialize') } + } + } + }) + + return [ + 'watSerialize', + (await sql`select ${ 'wat' }`.catch(e => e.message)) + ] +}) + +t('Catches type parse errors', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: () => { throw new Error('watParse') }, + serialize: x => x + } + } + }) + + return [ + 'watParse', + (await sql`select 'wat'`.catch(e => e.message)) + ] +}) + +t('Catches type serialize errors in transactions', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: x => x, + serialize: () => { throw new Error('watSerialize') } + } + } + }) + + return [ + 'watSerialize', + (await sql.begin(sql => ( + sql`select 1`, + sql`select ${ 'wat' }` + )).catch(e => e.message)) + ] +}) + +t('Catches type parse errors in transactions', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: () => { throw new Error('watParse') }, + serialize: x => x + } + } + }) + + return [ + 'watParse', + (await sql.begin(sql => ( + sql`select 1`, + sql`select 'wat'` + )).catch(e => e.message)) + ] +}) diff --git a/src/connection.js b/src/connection.js index 6f62c1eb..b11b371c 100644 --- a/src/connection.js +++ b/src/connection.js @@ -312,7 +312,12 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl break } - handle(incoming.slice(0, length + 1)) + try { + handle(incoming.slice(0, length + 1)) + } catch (e) { + query && (query.cursorFn || query.describeFirst) && write(Sync) + errored(e) + } incoming = incoming.slice(length + 1) remaining = 0 incomings = null @@ -365,9 +370,6 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function queryError(query, err) { - if (err.query) - return - err.stack += query.origin.replace(/.*\n/, '\n') Object.defineProperties(err, { query: { value: query.string, enumerable: options.debug }, @@ -399,6 +401,9 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function closed(hadError) { + incoming = Buffer.alloc(0) + remaining = 0 + incomings = null clearImmediate(nextWriteTimer) socket.removeListener('data', data) socket.removeListener('connect', connected) diff --git a/tests/index.js b/tests/index.js index 41cbe1a2..8406a657 100644 --- a/tests/index.js +++ b/tests/index.js @@ -9,7 +9,7 @@ import net from 'net' import fs from 'fs' import crypto from 'crypto' -import postgres from '..' +import postgres from '../src/index.js' const delay = ms => new Promise(r => setTimeout(r, ms)) const rel = x => new URL(x, import.meta.url) @@ -1746,3 +1746,85 @@ t('Large object', async() => { await lo.close() ] }) + +t('Catches type serialize errors', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: x => x, + serialize: () => { throw new Error('watSerialize') } + } + } + }) + + return [ + 'watSerialize', + (await sql`select ${ 'wat' }`.catch(e => e.message)) + ] +}) + +t('Catches type parse errors', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: () => { throw new Error('watParse') }, + serialize: x => x + } + } + }) + + return [ + 'watParse', + (await sql`select 'wat'`.catch(e => e.message)) + ] +}) + +t('Catches type serialize errors in transactions', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: x => x, + serialize: () => { throw new Error('watSerialize') } + } + } + }) + + return [ + 'watSerialize', + (await sql.begin(sql => ( + sql`select 1`, + sql`select ${ 'wat' }` + )).catch(e => e.message)) + ] +}) + +t('Catches type parse errors in transactions', async() => { + const sql = postgres({ + idle_timeout, + types: { + text: { + from: 25, + to: 25, + parse: () => { throw new Error('watParse') }, + serialize: x => x + } + } + }) + + return [ + 'watParse', + (await sql.begin(sql => ( + sql`select 1`, + sql`select 'wat'` + )).catch(e => e.message)) + ] +}) From 8da46a3242ef80825278e6247e1716b9bb3af3d6 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Thu, 13 Jan 2022 23:59:41 +0100 Subject: [PATCH 06/51] Fix input arrays as strings --- cjs/src/types.js | 3 +++ cjs/tests/index.js | 4 ++++ deno/src/types.js | 3 +++ deno/tests/index.js | 4 ++++ src/types.js | 3 +++ tests/index.js | 4 ++++ 6 files changed, 21 insertions(+) diff --git a/cjs/src/types.js b/cjs/src/types.js index aaae12d0..687c19eb 100644 --- a/cjs/src/types.js +++ b/cjs/src/types.js @@ -216,6 +216,9 @@ function arrayEscape(x) { } const arraySerializer = module.exports.arraySerializer = function arraySerializer(xs, serializer) { + if (Array.isArray(xs) === false) + return xs + if (!xs.length) return '{}' diff --git a/cjs/tests/index.js b/cjs/tests/index.js index 74c801bf..105e1619 100644 --- a/cjs/tests/index.js +++ b/cjs/tests/index.js @@ -124,6 +124,10 @@ t('Empty array', async() => [true, Array.isArray((await sql`select ${ sql.array([], 1009) } as x`)[0].x)] ) +t('String array', async() => + ['123', (await sql`select ${ '{1,2,3}' }::int[] as x`)[0].x.join('')] +) + t('Array of Integer', async() => ['3', (await sql`select ${ sql.array([1, 2, 3]) } as x`)[0].x[2]] ) diff --git a/deno/src/types.js b/deno/src/types.js index 0806170c..9a07ff1c 100644 --- a/deno/src/types.js +++ b/deno/src/types.js @@ -217,6 +217,9 @@ function arrayEscape(x) { } export const arraySerializer = function arraySerializer(xs, serializer) { + if (Array.isArray(xs) === false) + return xs + if (!xs.length) return '{}' diff --git a/deno/tests/index.js b/deno/tests/index.js index b33d4db7..da7e44b6 100644 --- a/deno/tests/index.js +++ b/deno/tests/index.js @@ -125,6 +125,10 @@ t('Empty array', async() => [true, Array.isArray((await sql`select ${ sql.array([], 1009) } as x`)[0].x)] ) +t('String array', async() => + ['123', (await sql`select ${ '{1,2,3}' }::int[] as x`)[0].x.join('')] +) + t('Array of Integer', async() => ['3', (await sql`select ${ sql.array([1, 2, 3]) } as x`)[0].x[2]] ) diff --git a/src/types.js b/src/types.js index e49180e9..0d5d730e 100644 --- a/src/types.js +++ b/src/types.js @@ -216,6 +216,9 @@ function arrayEscape(x) { } export const arraySerializer = function arraySerializer(xs, serializer) { + if (Array.isArray(xs) === false) + return xs + if (!xs.length) return '{}' diff --git a/tests/index.js b/tests/index.js index 8406a657..1b38c318 100644 --- a/tests/index.js +++ b/tests/index.js @@ -124,6 +124,10 @@ t('Empty array', async() => [true, Array.isArray((await sql`select ${ sql.array([], 1009) } as x`)[0].x)] ) +t('String array', async() => + ['123', (await sql`select ${ '{1,2,3}' }::int[] as x`)[0].x.join('')] +) + t('Array of Integer', async() => ['3', (await sql`select ${ sql.array([1, 2, 3]) } as x`)[0].x[2]] ) From 3962d955353a07e88b3db03fc927a2d13243009e Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Fri, 14 Jan 2022 23:01:27 +0100 Subject: [PATCH 07/51] Don't end reserved connections prematurely --- cjs/src/connection.js | 28 +++++++++++++++++----------- cjs/src/index.js | 12 +++--------- cjs/tests/index.js | 15 +++++++++++++++ deno/src/connection.js | 28 +++++++++++++++++----------- deno/src/index.js | 12 +++--------- deno/tests/index.js | 15 +++++++++++++++ src/connection.js | 28 +++++++++++++++++----------- src/index.js | 12 +++--------- tests/index.js | 15 +++++++++++++++ 9 files changed, 105 insertions(+), 60 deletions(-) diff --git a/cjs/src/connection.js b/cjs/src/connection.js index bf488d21..2ed5c924 100644 --- a/cjs/src/connection.js +++ b/cjs/src/connection.js @@ -91,6 +91,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl , rows = 0 , serverSignature = null , nextWriteTimer = null + , terminated = false , incomings = null , results = null , initial = null @@ -143,6 +144,9 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function execute(q) { + if (terminated) + return q.reject(Errors.connection('CONNECTION_DESTROYED', options)) + if (q.cancelled) return @@ -325,6 +329,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function connect() { + terminated = false backendParameters = {} connectTimer.start() socket.on('connect', ssl ? secure : connected) @@ -370,14 +375,13 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function queryError(query, err) { - err.stack += query.origin.replace(/.*\n/, '\n') - Object.defineProperties(err, { + query.reject(Object.create(err, { + stack: { value: err.stack + query.origin.replace(/.*\n/, '\n'), enumerable: options.debug }, query: { value: query.string, enumerable: options.debug }, parameters: { value: query.parameters, enumerable: options.debug }, args: { value: query.args, enumerable: options.debug }, types: { value: query.statement && query.statement.types, enumerable: options.debug } - }) - query.reject(err) + })) } function end() { @@ -390,6 +394,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function terminate() { + terminated = true if (query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) @@ -499,7 +504,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } } - function ReadyForQuery() { + function ReadyForQuery(x) { query && query.options.simple && query.resolve(results || result) query = results = null result = new Result() @@ -524,13 +529,14 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl while (sent.length && (query = sent.shift()) && (query.active = true) && query.cancelled) Connection(options, {}).cancel(query.state, query.cancelled.resolve, query.cancelled.reject) - if (query) // Consider opening if able and sent.length < 50 - return - - ending - ? terminate() - : onopen(connection) + if (query) + return // Consider opening if able and sent.length < 50 + connection.reserved && x[5] !== 73 + ? connection.reserved() + : ending + ? terminate() + : onopen(connection) } function CommandComplete(x) { diff --git a/cjs/src/index.js b/cjs/src/index.js index 12bf4930..e4809d0a 100644 --- a/cjs/src/index.js +++ b/cjs/src/index.js @@ -234,7 +234,9 @@ function Postgres(a, b) { function onexecute(c) { queues[c.state].remove(c) c.state = 'reserved' - c.reserved = () => queries.length && c.execute(queries.shift()) + c.reserved = () => queries.length + ? c.execute(queries.shift()) + : c.state = 'reserved' reserved.push(c) connection = c } @@ -393,14 +395,6 @@ function Postgres(a, b) { function onopen(c) { queues[c.state].remove(c) - - if (c.reserved) { - c.state = 'reserved' - c.reserved() - reserved.push(c) - return - } - if (queries.length === 0) return (c.state = 'open', open.push(c)) diff --git a/cjs/tests/index.js b/cjs/tests/index.js index 105e1619..085b841b 100644 --- a/cjs/tests/index.js +++ b/cjs/tests/index.js @@ -1832,3 +1832,18 @@ t('Catches type parse errors in transactions', async() => { )).catch(e => e.message)) ] }) + +t('Prevent premature end of connection in transaction', async() => { + const sql = postgres({ max_lifetime: 0.1, idle_timeout }) + const result = await sql.begin(async sql => { + await sql`select 1` + await delay(200) + return 'yay' + }) + + + return [ + 'yay', + result + ] +}) diff --git a/deno/src/connection.js b/deno/src/connection.js index 78d33b32..e158f659 100644 --- a/deno/src/connection.js +++ b/deno/src/connection.js @@ -94,6 +94,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl , rows = 0 , serverSignature = null , nextWriteTimer = null + , terminated = false , incomings = null , results = null , initial = null @@ -146,6 +147,9 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function execute(q) { + if (terminated) + return q.reject(Errors.connection('CONNECTION_DESTROYED', options)) + if (q.cancelled) return @@ -328,6 +332,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function connect() { + terminated = false backendParameters = {} connectTimer.start() socket.on('connect', ssl ? secure : connected) @@ -373,14 +378,13 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function queryError(query, err) { - err.stack += query.origin.replace(/.*\n/, '\n') - Object.defineProperties(err, { + query.reject(Object.create(err, { + stack: { value: err.stack + query.origin.replace(/.*\n/, '\n'), enumerable: options.debug }, query: { value: query.string, enumerable: options.debug }, parameters: { value: query.parameters, enumerable: options.debug }, args: { value: query.args, enumerable: options.debug }, types: { value: query.statement && query.statement.types, enumerable: options.debug } - }) - query.reject(err) + })) } function end() { @@ -393,6 +397,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function terminate() { + terminated = true if (query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) @@ -502,7 +507,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } } - function ReadyForQuery() { + function ReadyForQuery(x) { query && query.options.simple && query.resolve(results || result) query = results = null result = new Result() @@ -527,13 +532,14 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl while (sent.length && (query = sent.shift()) && (query.active = true) && query.cancelled) Connection(options, {}).cancel(query.state, query.cancelled.resolve, query.cancelled.reject) - if (query) // Consider opening if able and sent.length < 50 - return - - ending - ? terminate() - : onopen(connection) + if (query) + return // Consider opening if able and sent.length < 50 + connection.reserved && x[5] !== 73 + ? connection.reserved() + : ending + ? terminate() + : onopen(connection) } function CommandComplete(x) { diff --git a/deno/src/index.js b/deno/src/index.js index 6b0b31ef..aa340d43 100644 --- a/deno/src/index.js +++ b/deno/src/index.js @@ -235,7 +235,9 @@ function Postgres(a, b) { function onexecute(c) { queues[c.state].remove(c) c.state = 'reserved' - c.reserved = () => queries.length && c.execute(queries.shift()) + c.reserved = () => queries.length + ? c.execute(queries.shift()) + : c.state = 'reserved' reserved.push(c) connection = c } @@ -394,14 +396,6 @@ function Postgres(a, b) { function onopen(c) { queues[c.state].remove(c) - - if (c.reserved) { - c.state = 'reserved' - c.reserved() - reserved.push(c) - return - } - if (queries.length === 0) return (c.state = 'open', open.push(c)) diff --git a/deno/tests/index.js b/deno/tests/index.js index da7e44b6..0a8d5701 100644 --- a/deno/tests/index.js +++ b/deno/tests/index.js @@ -1833,3 +1833,18 @@ t('Catches type parse errors in transactions', async() => { )).catch(e => e.message)) ] }) + +t('Prevent premature end of connection in transaction', async() => { + const sql = postgres({ max_lifetime: 0.1, idle_timeout }) + const result = await sql.begin(async sql => { + await sql`select 1` + await delay(200) + return 'yay' + }) + + + return [ + 'yay', + result + ] +}) diff --git a/src/connection.js b/src/connection.js index b11b371c..a593d3e5 100644 --- a/src/connection.js +++ b/src/connection.js @@ -91,6 +91,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl , rows = 0 , serverSignature = null , nextWriteTimer = null + , terminated = false , incomings = null , results = null , initial = null @@ -143,6 +144,9 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function execute(q) { + if (terminated) + return q.reject(Errors.connection('CONNECTION_DESTROYED', options)) + if (q.cancelled) return @@ -325,6 +329,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function connect() { + terminated = false backendParameters = {} connectTimer.start() socket.on('connect', ssl ? secure : connected) @@ -370,14 +375,13 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function queryError(query, err) { - err.stack += query.origin.replace(/.*\n/, '\n') - Object.defineProperties(err, { + query.reject(Object.create(err, { + stack: { value: err.stack + query.origin.replace(/.*\n/, '\n'), enumerable: options.debug }, query: { value: query.string, enumerable: options.debug }, parameters: { value: query.parameters, enumerable: options.debug }, args: { value: query.args, enumerable: options.debug }, types: { value: query.statement && query.statement.types, enumerable: options.debug } - }) - query.reject(err) + })) } function end() { @@ -390,6 +394,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function terminate() { + terminated = true if (query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) @@ -499,7 +504,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } } - function ReadyForQuery() { + function ReadyForQuery(x) { query && query.options.simple && query.resolve(results || result) query = results = null result = new Result() @@ -524,13 +529,14 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl while (sent.length && (query = sent.shift()) && (query.active = true) && query.cancelled) Connection(options, {}).cancel(query.state, query.cancelled.resolve, query.cancelled.reject) - if (query) // Consider opening if able and sent.length < 50 - return - - ending - ? terminate() - : onopen(connection) + if (query) + return // Consider opening if able and sent.length < 50 + connection.reserved && x[5] !== 73 + ? connection.reserved() + : ending + ? terminate() + : onopen(connection) } function CommandComplete(x) { diff --git a/src/index.js b/src/index.js index 2f63f7e0..bc71d483 100644 --- a/src/index.js +++ b/src/index.js @@ -234,7 +234,9 @@ function Postgres(a, b) { function onexecute(c) { queues[c.state].remove(c) c.state = 'reserved' - c.reserved = () => queries.length && c.execute(queries.shift()) + c.reserved = () => queries.length + ? c.execute(queries.shift()) + : c.state = 'reserved' reserved.push(c) connection = c } @@ -393,14 +395,6 @@ function Postgres(a, b) { function onopen(c) { queues[c.state].remove(c) - - if (c.reserved) { - c.state = 'reserved' - c.reserved() - reserved.push(c) - return - } - if (queries.length === 0) return (c.state = 'open', open.push(c)) diff --git a/tests/index.js b/tests/index.js index 1b38c318..e1d6ba6b 100644 --- a/tests/index.js +++ b/tests/index.js @@ -1832,3 +1832,18 @@ t('Catches type parse errors in transactions', async() => { )).catch(e => e.message)) ] }) + +t('Prevent premature end of connection in transaction', async() => { + const sql = postgres({ max_lifetime: 0.1, idle_timeout }) + const result = await sql.begin(async sql => { + await sql`select 1` + await delay(200) + return 'yay' + }) + + + return [ + 'yay', + result + ] +}) From d2a7f08e35b6a2eb1a2ea377bd8695606bc55054 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 19 Jan 2022 17:07:32 +0100 Subject: [PATCH 08/51] Add deno entry point --- mod.js | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 mod.js diff --git a/mod.js b/mod.js new file mode 100644 index 00000000..29769a16 --- /dev/null +++ b/mod.js @@ -0,0 +1,2 @@ +// Deno entry +export { default } from './deno/src/index.js' From 2e0bc0df2ed034bdcf61c523d921cf5a00df14b2 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 19 Jan 2022 17:14:34 +0100 Subject: [PATCH 09/51] Hint deno types --- mod.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mod.js b/mod.js index 29769a16..7c9e3bcd 100644 --- a/mod.js +++ b/mod.js @@ -1,2 +1,2 @@ -// Deno entry +// @deno-types="./types/index.d.ts" export { default } from './deno/src/index.js' From 1701bac6fa2e0eaa4b108155da3eb65817acd095 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 19 Jan 2022 17:21:39 +0100 Subject: [PATCH 10/51] Fix path for deno mod.js --- mod.js => deno/mod.js | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename mod.js => deno/mod.js (100%) diff --git a/mod.js b/deno/mod.js similarity index 100% rename from mod.js rename to deno/mod.js From 4db7f95c0ea9a50b178fc60dbc53051b5545355e Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Tue, 25 Jan 2022 13:08:07 +0100 Subject: [PATCH 11/51] Add jsonb helper --- src/index.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/index.js b/src/index.js index bc71d483..aac4ef17 100644 --- a/src/index.js +++ b/src/index.js @@ -81,6 +81,7 @@ function Postgres(a, b) { }, {}), unsafe, array, + jsonb, json, file }) @@ -311,6 +312,10 @@ function Postgres(a, b) { }) } + function jsonb(x) { + return new Parameter(x, 3802) + } + function json(x) { return new Parameter(x, 114) } From 02948a26fcf2ec58cb66215b53a8fc3ea3fad674 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Tue, 25 Jan 2022 13:09:21 +0100 Subject: [PATCH 12/51] Remove support for implicit array parameter expansion --- src/types.js | 3 --- tests/index.js | 4 ++-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/src/types.js b/src/types.js index 0d5d730e..e1f631c6 100644 --- a/src/types.js +++ b/src/types.js @@ -81,9 +81,6 @@ export class Builder extends NotTagged { }; // eslint-disable-line export function handleValue(x, parameters, types) { - if (Array.isArray(x)) - return x.map(x => handleValue(x, parameters, types)).join(',') - const value = x instanceof Parameter ? x.value : x if (value === undefined) throw Errors.generic({ code: 'UNDEFINED_VALUE', message: 'Undefined values are not allowed' }) diff --git a/tests/index.js b/tests/index.js index e1d6ba6b..db63c320 100644 --- a/tests/index.js +++ b/tests/index.js @@ -874,7 +874,7 @@ t('dynamic insert pluck', async() => { t('array insert', async() => { await sql`create table test (a int, b int)` - return [2, (await sql`insert into test (a, b) values (${ [1, 2] }) returning *`)[0].b, await sql`drop table test`] + return [2, (await sql`insert into test (a, b) values ${ sql([1, 2]) } returning *`)[0].b, await sql`drop table test`] }) t('parameters in()', async() => { @@ -882,7 +882,7 @@ t('parameters in()', async() => { with rows as ( select * from (values (1), (2), (3), (4)) as x(a) ) - select * from rows where a in (${ [3, 4] }) + select * from rows where a in ${ sql([3, 4]) } `).count] }) From dc4cca502f2f09a766303f081bc23e3218c477c5 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Tue, 25 Jan 2022 14:27:40 +0100 Subject: [PATCH 13/51] Revert to v2 sql.json functionality --- src/index.js | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/index.js b/src/index.js index aac4ef17..3b485012 100644 --- a/src/index.js +++ b/src/index.js @@ -81,7 +81,6 @@ function Postgres(a, b) { }, {}), unsafe, array, - jsonb, json, file }) @@ -312,12 +311,8 @@ function Postgres(a, b) { }) } - function jsonb(x) { - return new Parameter(x, 3802) - } - function json(x) { - return new Parameter(x, 114) + return new Parameter(x, 3802) } function array(x, type) { From fcf76c8d19d711c6214d09e7fab20b0567caa057 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 26 Jan 2022 14:48:54 +0100 Subject: [PATCH 14/51] Reconnect logical replication connnection --- src/connection.js | 2 +- src/subscribe.js | 30 ++++++++++++++++++++++++++---- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/src/connection.js b/src/connection.js index a593d3e5..c7853e92 100644 --- a/src/connection.js +++ b/src/connection.js @@ -815,7 +815,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl /* c8 ignore next 3 */ function CopyBothResponse() { - stream = new Stream.Readable({ + stream = new Stream.Duplex({ read() { socket.resume() }, /* c8 ignore next 11 */ write(chunk, encoding, callback) { diff --git a/src/subscribe.js b/src/subscribe.js index da238d7f..b81c7c3a 100644 --- a/src/subscribe.js +++ b/src/subscribe.js @@ -7,14 +7,26 @@ export default function Subscribe(postgres, options) { event = parseEvent(event) options.max = 1 + options.onclose = onclose options.connection = { ...options.connection, replication: 'database' } + let stream + , ended = false + const sql = postgres(options) + , slot = 'postgresjs_' + Math.random().toString(36).slice(2) + , end = sql.end + + sql.end = async() => { + ended = true + stream && (await new Promise(r => (stream.once('end', r), stream.end()))) + return end() + } - !connection && (subscribe.sql = sql, connection = init(sql, options.publications)) + !connection && (subscribe.sql = sql, connection = init(sql, slot, options.publications)) const fns = listeners.has(event) ? listeners.get(event).add(fn) @@ -25,14 +37,18 @@ export default function Subscribe(postgres, options) { fns.size === 0 && listeners.delete(event) } - return connection.then(() => ({ unsubscribe })) + return connection.then(x => (stream = x, { unsubscribe })) + + async function onclose() { + stream = null + !ended && (stream = await init(sql, slot, options.publications)) + } } - async function init(sql, publications = 'alltables') { + async function init(sql, slot, publications = 'alltables') { if (!publications) throw new Error('Missing publication names') - const slot = 'postgresjs_' + Math.random().toString(36).slice(2) const [x] = await sql.unsafe( `CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT` ) @@ -48,6 +64,12 @@ export default function Subscribe(postgres, options) { } stream.on('data', data) + stream.on('error', (error) => { + console.error('Logical Replication Error - Reconnecting', error) + sql.end() + }) + + return stream function data(x) { if (x[0] === 0x77) From e82a0cafaf8e8fd0d65e1811b816fecf8be4cb78 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 26 Jan 2022 14:49:15 +0100 Subject: [PATCH 15/51] Properly call errors and clean up streams --- src/connection.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/connection.js b/src/connection.js index c7853e92..75700dd7 100644 --- a/src/connection.js +++ b/src/connection.js @@ -370,6 +370,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function errored(err) { + stream && (stream.destroy(err), stream = null) query && queryError(query, err) initial && (queryError(initial, err), initial = null) } @@ -395,7 +396,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function terminate() { terminated = true - if (query || initial || sent.length) + if (stream || query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) clearImmediate(nextWriteTimer) @@ -839,6 +840,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function CopyDone() { stream.push(null) + stream = null } function NoticeResponse(x) { From db759fe62557274e26dc0bc5c7cbdf473c18f5bc Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 26 Jan 2022 14:51:35 +0100 Subject: [PATCH 16/51] Build deno and cjs --- cjs/src/connection.js | 6 ++++-- cjs/src/index.js | 2 +- cjs/src/subscribe.js | 30 ++++++++++++++++++++++++++---- cjs/src/types.js | 3 --- cjs/tests/index.js | 4 ++-- deno/src/connection.js | 6 ++++-- deno/src/index.js | 2 +- deno/src/subscribe.js | 30 ++++++++++++++++++++++++++---- deno/src/types.js | 3 --- deno/tests/index.js | 4 ++-- 10 files changed, 66 insertions(+), 24 deletions(-) diff --git a/cjs/src/connection.js b/cjs/src/connection.js index 2ed5c924..0b170770 100644 --- a/cjs/src/connection.js +++ b/cjs/src/connection.js @@ -370,6 +370,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function errored(err) { + stream && (stream.destroy(err), stream = null) query && queryError(query, err) initial && (queryError(initial, err), initial = null) } @@ -395,7 +396,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function terminate() { terminated = true - if (query || initial || sent.length) + if (stream || query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) clearImmediate(nextWriteTimer) @@ -815,7 +816,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl /* c8 ignore next 3 */ function CopyBothResponse() { - stream = new Stream.Readable({ + stream = new Stream.Duplex({ read() { socket.resume() }, /* c8 ignore next 11 */ write(chunk, encoding, callback) { @@ -839,6 +840,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function CopyDone() { stream.push(null) + stream = null } function NoticeResponse(x) { diff --git a/cjs/src/index.js b/cjs/src/index.js index e4809d0a..b720ed56 100644 --- a/cjs/src/index.js +++ b/cjs/src/index.js @@ -312,7 +312,7 @@ function Postgres(a, b) { } function json(x) { - return new Parameter(x, 114) + return new Parameter(x, 3802) } function array(x, type) { diff --git a/cjs/src/subscribe.js b/cjs/src/subscribe.js index 9ec5aafd..a0f9dba7 100644 --- a/cjs/src/subscribe.js +++ b/cjs/src/subscribe.js @@ -7,14 +7,26 @@ module.exports = Subscribe;function Subscribe(postgres, options) { event = parseEvent(event) options.max = 1 + options.onclose = onclose options.connection = { ...options.connection, replication: 'database' } + let stream + , ended = false + const sql = postgres(options) + , slot = 'postgresjs_' + Math.random().toString(36).slice(2) + , end = sql.end + + sql.end = async() => { + ended = true + stream && (await new Promise(r => (stream.once('end', r), stream.end()))) + return end() + } - !connection && (subscribe.sql = sql, connection = init(sql, options.publications)) + !connection && (subscribe.sql = sql, connection = init(sql, slot, options.publications)) const fns = listeners.has(event) ? listeners.get(event).add(fn) @@ -25,14 +37,18 @@ module.exports = Subscribe;function Subscribe(postgres, options) { fns.size === 0 && listeners.delete(event) } - return connection.then(() => ({ unsubscribe })) + return connection.then(x => (stream = x, { unsubscribe })) + + async function onclose() { + stream = null + !ended && (stream = await init(sql, slot, options.publications)) + } } - async function init(sql, publications = 'alltables') { + async function init(sql, slot, publications = 'alltables') { if (!publications) throw new Error('Missing publication names') - const slot = 'postgresjs_' + Math.random().toString(36).slice(2) const [x] = await sql.unsafe( `CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT` ) @@ -48,6 +64,12 @@ module.exports = Subscribe;function Subscribe(postgres, options) { } stream.on('data', data) + stream.on('error', (error) => { + console.error('Logical Replication Error - Reconnecting', error) + sql.end() + }) + + return stream function data(x) { if (x[0] === 0x77) diff --git a/cjs/src/types.js b/cjs/src/types.js index 687c19eb..615b5cec 100644 --- a/cjs/src/types.js +++ b/cjs/src/types.js @@ -81,9 +81,6 @@ class Builder extends NotTagged { };module.exports.Builder = Builder // eslint-disable-line module.exports.handleValue = handleValue;function handleValue(x, parameters, types) { - if (Array.isArray(x)) - return x.map(x => handleValue(x, parameters, types)).join(',') - const value = x instanceof Parameter ? x.value : x if (value === undefined) throw Errors.generic({ code: 'UNDEFINED_VALUE', message: 'Undefined values are not allowed' }) diff --git a/cjs/tests/index.js b/cjs/tests/index.js index 085b841b..372e70de 100644 --- a/cjs/tests/index.js +++ b/cjs/tests/index.js @@ -874,7 +874,7 @@ t('dynamic insert pluck', async() => { t('array insert', async() => { await sql`create table test (a int, b int)` - return [2, (await sql`insert into test (a, b) values (${ [1, 2] }) returning *`)[0].b, await sql`drop table test`] + return [2, (await sql`insert into test (a, b) values ${ sql([1, 2]) } returning *`)[0].b, await sql`drop table test`] }) t('parameters in()', async() => { @@ -882,7 +882,7 @@ t('parameters in()', async() => { with rows as ( select * from (values (1), (2), (3), (4)) as x(a) ) - select * from rows where a in (${ [3, 4] }) + select * from rows where a in ${ sql([3, 4]) } `).count] }) diff --git a/deno/src/connection.js b/deno/src/connection.js index e158f659..438c3f09 100644 --- a/deno/src/connection.js +++ b/deno/src/connection.js @@ -373,6 +373,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl } function errored(err) { + stream && (stream.destroy(err), stream = null) query && queryError(query, err) initial && (queryError(initial, err), initial = null) } @@ -398,7 +399,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function terminate() { terminated = true - if (query || initial || sent.length) + if (stream || query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) clearImmediate(nextWriteTimer) @@ -818,7 +819,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl /* c8 ignore next 3 */ function CopyBothResponse() { - stream = new Stream.Readable({ + stream = new Stream.Duplex({ read() { socket.resume() }, /* c8 ignore next 11 */ write(chunk, encoding, callback) { @@ -842,6 +843,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function CopyDone() { stream.push(null) + stream = null } function NoticeResponse(x) { diff --git a/deno/src/index.js b/deno/src/index.js index aa340d43..601b6acf 100644 --- a/deno/src/index.js +++ b/deno/src/index.js @@ -313,7 +313,7 @@ function Postgres(a, b) { } function json(x) { - return new Parameter(x, 114) + return new Parameter(x, 3802) } function array(x, type) { diff --git a/deno/src/subscribe.js b/deno/src/subscribe.js index e54414ab..8b949767 100644 --- a/deno/src/subscribe.js +++ b/deno/src/subscribe.js @@ -8,14 +8,26 @@ export default function Subscribe(postgres, options) { event = parseEvent(event) options.max = 1 + options.onclose = onclose options.connection = { ...options.connection, replication: 'database' } + let stream + , ended = false + const sql = postgres(options) + , slot = 'postgresjs_' + Math.random().toString(36).slice(2) + , end = sql.end + + sql.end = async() => { + ended = true + stream && (await new Promise(r => (stream.once('end', r), stream.end()))) + return end() + } - !connection && (subscribe.sql = sql, connection = init(sql, options.publications)) + !connection && (subscribe.sql = sql, connection = init(sql, slot, options.publications)) const fns = listeners.has(event) ? listeners.get(event).add(fn) @@ -26,14 +38,18 @@ export default function Subscribe(postgres, options) { fns.size === 0 && listeners.delete(event) } - return connection.then(() => ({ unsubscribe })) + return connection.then(x => (stream = x, { unsubscribe })) + + async function onclose() { + stream = null + !ended && (stream = await init(sql, slot, options.publications)) + } } - async function init(sql, publications = 'alltables') { + async function init(sql, slot, publications = 'alltables') { if (!publications) throw new Error('Missing publication names') - const slot = 'postgresjs_' + Math.random().toString(36).slice(2) const [x] = await sql.unsafe( `CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT` ) @@ -49,6 +65,12 @@ export default function Subscribe(postgres, options) { } stream.on('data', data) + stream.on('error', (error) => { + console.error('Logical Replication Error - Reconnecting', error) + sql.end() + }) + + return stream function data(x) { if (x[0] === 0x77) diff --git a/deno/src/types.js b/deno/src/types.js index 9a07ff1c..ca104014 100644 --- a/deno/src/types.js +++ b/deno/src/types.js @@ -82,9 +82,6 @@ export class Builder extends NotTagged { }; // eslint-disable-line export function handleValue(x, parameters, types) { - if (Array.isArray(x)) - return x.map(x => handleValue(x, parameters, types)).join(',') - const value = x instanceof Parameter ? x.value : x if (value === undefined) throw Errors.generic({ code: 'UNDEFINED_VALUE', message: 'Undefined values are not allowed' }) diff --git a/deno/tests/index.js b/deno/tests/index.js index 0a8d5701..780ac918 100644 --- a/deno/tests/index.js +++ b/deno/tests/index.js @@ -875,7 +875,7 @@ t('dynamic insert pluck', async() => { t('array insert', async() => { await sql`create table test (a int, b int)` - return [2, (await sql`insert into test (a, b) values (${ [1, 2] }) returning *`)[0].b, await sql`drop table test`] + return [2, (await sql`insert into test (a, b) values ${ sql([1, 2]) } returning *`)[0].b, await sql`drop table test`] }) t('parameters in()', async() => { @@ -883,7 +883,7 @@ t('parameters in()', async() => { with rows as ( select * from (values (1), (2), (3), (4)) as x(a) ) - select * from rows where a in (${ [3, 4] }) + select * from rows where a in ${ sql([3, 4]) } `).count] }) From 1248ca77513bac847fcbb3aa12902f8ed43d07a3 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 2 Feb 2022 13:37:56 +0100 Subject: [PATCH 17/51] Re-add float8 to number --- src/types.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/types.js b/src/types.js index e1f631c6..6e9ad166 100644 --- a/src/types.js +++ b/src/types.js @@ -9,7 +9,7 @@ export const types = { }, number: { to: 0, - from: [21, 23, 26, 700], + from: [21, 23, 26, 700, 701], serialize: x => '' + x, parse: x => +x }, From 445b5fa29ff683d561be7a71bdad8a4f4d94529e Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sat, 5 Feb 2022 21:55:33 +0100 Subject: [PATCH 18/51] Fix wrong dynamic sql concat --- src/connection.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/connection.js b/src/connection.js index 75700dd7..7978f68c 100644 --- a/src/connection.js +++ b/src/connection.js @@ -245,7 +245,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function fragment(string, q, parameters, types) { q.fragment = true - return stringify(q, string + q.strings[0], q.args[0], parameters, types) + return stringify(q, q.strings[0], q.args[0], parameters, types) } function write(x, fn) { @@ -533,7 +533,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (query) return // Consider opening if able and sent.length < 50 - connection.reserved && x[5] !== 73 + connection.reserved && x[5] !== 73 // I ? connection.reserved() : ending ? terminate() From ece705a0f47d4967d0d098fa15ddcfc58e31cadd Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sat, 5 Feb 2022 22:21:51 +0100 Subject: [PATCH 19/51] Update cjs and deno --- cjs/src/connection.js | 4 ++-- cjs/src/types.js | 2 +- deno/src/connection.js | 4 ++-- deno/src/types.js | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/cjs/src/connection.js b/cjs/src/connection.js index 0b170770..9ca23433 100644 --- a/cjs/src/connection.js +++ b/cjs/src/connection.js @@ -245,7 +245,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function fragment(string, q, parameters, types) { q.fragment = true - return stringify(q, string + q.strings[0], q.args[0], parameters, types) + return stringify(q, q.strings[0], q.args[0], parameters, types) } function write(x, fn) { @@ -533,7 +533,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (query) return // Consider opening if able and sent.length < 50 - connection.reserved && x[5] !== 73 + connection.reserved && x[5] !== 73 // I ? connection.reserved() : ending ? terminate() diff --git a/cjs/src/types.js b/cjs/src/types.js index 615b5cec..25293c12 100644 --- a/cjs/src/types.js +++ b/cjs/src/types.js @@ -9,7 +9,7 @@ const types = module.exports.types = { }, number: { to: 0, - from: [21, 23, 26, 700], + from: [21, 23, 26, 700, 701], serialize: x => '' + x, parse: x => +x }, diff --git a/deno/src/connection.js b/deno/src/connection.js index 438c3f09..9c0576a6 100644 --- a/deno/src/connection.js +++ b/deno/src/connection.js @@ -248,7 +248,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function fragment(string, q, parameters, types) { q.fragment = true - return stringify(q, string + q.strings[0], q.args[0], parameters, types) + return stringify(q, q.strings[0], q.args[0], parameters, types) } function write(x, fn) { @@ -536,7 +536,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (query) return // Consider opening if able and sent.length < 50 - connection.reserved && x[5] !== 73 + connection.reserved && x[5] !== 73 // I ? connection.reserved() : ending ? terminate() diff --git a/deno/src/types.js b/deno/src/types.js index ca104014..0886859c 100644 --- a/deno/src/types.js +++ b/deno/src/types.js @@ -10,7 +10,7 @@ export const types = { }, number: { to: 0, - from: [21, 23, 26, 700], + from: [21, 23, 26, 700, 701], serialize: x => '' + x, parse: x => +x }, From 81c5df6ce1905b1654056bbe0bf92c088ee5f283 Mon Sep 17 00:00:00 2001 From: dilan-dio4 <54545871+dilan-dio4@users.noreply.github.com> Date: Sun, 6 Feb 2022 15:02:11 -0500 Subject: [PATCH 20/51] Updated README (#264) * Cleaned up some language in README * README update * Moved sections to advanced query methods * cursor to async iterators * sql array removed parenthesis * https://github.com/porsager/postgres/pull/264#issuecomment-1030895092 2 and 3 --- README.md | 833 +++++++++++++++++++++++++++++++----------------------- 1 file changed, 486 insertions(+), 347 deletions(-) diff --git a/README.md b/README.md index 601a277e..55990c8c 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,10 @@ Fastest full PostgreSQL nodejs client -- [🚀 Fastest full featured PostgreSQL node client](https://github.com/porsager/postgres-benchmarks#results) +- [🚀 Fastest full-featured PostgreSQL node client](https://github.com/porsager/postgres-benchmarks#results) - 🚯 1850 LOC - 0 dependencies - 🏷 ES6 Tagged Template Strings at the core - 🏄‍♀️ Simple surface API +- 🖊️ Dynamic query support - 💬 Chat on [Gitter](https://gitter.im/porsager/postgres)
@@ -14,32 +15,66 @@ Good UX with Postgres.js
-**Install** +### Installation ```bash $ npm install postgres ``` -**Use** +### Usage ```js -// db.js const postgres = require('postgres') +// import postgres from 'postgres' const sql = postgres({ ...options }) // will default to the same as psql -module.exports = sql -``` - -```js -// other.js -const sql = require('./db.js') +const insertUser = await sql` + INSERT INTO users ${ + sql({ name: "Serena", age: 35 }) + } RETURNING * +`; +// [{ name: "Serena", age: 35 }] -const users = await sql` +const selectUsers = await sql` select name, age from users ` -// users: [{ name: 'Murray', age: 68 }, { name: 'Walter', age: 78 }] -``` - -## Connection options `postgres([url], [options])` +// [{ name: "Serena", age: 35 }, { name: 'Murray', age: 68 }, ...] +``` + +# Table of Contents + +* [Connection](#connection) +* [Queries](#queries) + * [Select](#select) + * [Insert](#insert) + * [Update](#update) + * [Delete](#delete) +* [Dynamic queries](#dynamic-queries) + * [Building partial queries](#partial-queries) + * [WHERE clause](#dynamic-where-clause) + * [Identifiers](#identifier-and-value-utilities) +* [Advanced query methods](#advanced-query-methods) + * [`forEach`](#foreach) + * [`cursor`](#cursor) + * [`describe`](#describe) + * [`raw`](#raw) + * [`file`](#file) + * [Transactions](#transactions) +* [Custom types](#custom-types) +* [Advanced communication](#advanced-communication) + * [`LISTEN` and `NOTIFY`](#listen-and-notify) + * [Subscribe / Realtime](#subscribe-realtime) +* [Connection options](#connection-options) + * [SSL](#ssl) + * [Multi-host connection](#multi-host-connections-high-availability-ha) + * [Connection timeout](#connection-timeout) + * [Environmental variables](#environmental-variables) +* [Error handling](#error-handling) +* [TypeScript support](#typescript-support) + + +## Connection + +### `postgres([url], [options])` You can use either a `postgres://` url connection string or the options to define your database connection properties. Options in the object will override any present in the url. @@ -47,86 +82,27 @@ You can use either a `postgres://` url connection string or the options to defin const sql = postgres('postgres://username:password@host:port/database', { host : '', // Postgres ip address[s] or domain name[s] port : 5432, // Postgres server port[s] - path : '', // unix socket path (usually '/tmp') database : '', // Name of database to connect to username : '', // Username of database user password : '', // Password of database user - ssl : false, // true, prefer, require, tls.connect options - max : 10, // Max number of connections - idle_timeout : 0, // Idle connection timeout in seconds - connect_timeout : 30, // Connect timeout in seconds - no_prepare : false, // No automatic creation of prepared statements - types : [], // Array of custom types, see more below - onnotice : fn, // Defaults to console.log - onparameter : fn, // (key, value) when server param change - debug : fn, // Is called with (connection, query, params) - transform : { - column : fn, // Transforms incoming column names - value : fn, // Transforms incoming row values - row : fn // Transforms entire rows - }, - connection : { - application_name : 'postgres.js', // Default application_name - ... // Other connection parameters - }, - target_session_attrs : null, // Use 'read-write' with multiple hosts to - // ensure only connecting to primary - fetch_types : true, // Automatically fetches types on connect - // on initial connection. + ...and more }) ``` -### SSL -More info for the `ssl` option can be found in the [Node.js docs for tls connect options](https://nodejs.org/dist/latest-v10.x/docs/api/tls.html#tls_new_tls_tlssocket_socket_options). +More options can be found in the [Advanced Connection Options section](#advanced-connection-options). -Although it is [vulnerable to MITM attacks](https://security.stackexchange.com/a/229297/174913), a common configuration for the `ssl` option for some cloud providers like Heroku is to set `rejectUnauthorized` to `false` (if `NODE_ENV` is `production`): +## Queries -```js -const sql = - process.env.NODE_ENV === 'production' - ? // "Unless you're using a Private or Shield Heroku Postgres database, Heroku Postgres does not currently support verifiable certificates" - // https://help.heroku.com/3DELT3RK/why-can-t-my-third-party-utility-connect-to-heroku-postgres-with-ssl - postgres({ ssl: { rejectUnauthorized: false } }) - : postgres(); -``` - -### Multi host connections - High Availability (HA) - -Connection uri strings with multiple hosts works like in [`psql multiple host uris`](https://www.postgresql.org/docs/13/libpq-connect.html#LIBPQ-MULTIPLE-HOSTS) - -Connecting to the specified hosts/ports will be tried in order, and on a successfull connection retries will be reset. This ensures that hosts can come up and down seamless to your application. +### ```sql`` -> Promise``` -If you specify `target_session_attrs: 'primary'` or `PGTARGETSESSIONATTRS=primary` Postgres.js will only connect to a the primary host allowing for zero down time failovers. +Postgres.js utilizes [Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) to process query parameters **before** interpolation. Using this advanced form of template literals benefits developers by: -### Auto fetching of array types - -When Postgres.js first connects to the database it automatically fetches array type information. - -If you have revoked access to `pg_catalog` this feature will no longer work and will need to be disabled. +1. **Enforcing** safe query generation +2. Giving the `sql`` ` function powerful [utility](#insert) and [dynamic parameterization](#dynamic-queries) features. -You can disable fetching array types by setting `fetch_types` to `false` when creating an instance. +Any generic value will be serialized according to an inferred type, and replaced by a PostgreSQL protocol placeholder `$1, $2, ...`. This is then sent to the database as a parameter to handle escaping & casting. -### Environment Variables for Options - -It is also possible to connect to the database without a connection string or any options. Postgres.js will fall back to the common environment variables used by `psql` as in the table below: - -```js -const sql = postgres() -``` - -| Option | Environment Variables | -| ----------------- | ------------------------ | -| `host` | `PGHOST` | -| `port` | `PGPORT` | -| `database` | `PGDATABASE` | -| `username` | `PGUSERNAME` or `PGUSER` | -| `password` | `PGPASSWORD` | -| `idle_timeout` | `PGIDLE_TIMEOUT` | -| `connect_timeout` | `PGCONNECT_TIMEOUT` | - -## Query ```sql` ` -> Promise``` - -A query will always return a `Promise` which resolves to a results array `[...]{ count, command, columns }`. Destructuring is great to immediately access the first element. +All queries will return a `Result` array, mapping column names to each row. ```js @@ -143,13 +119,16 @@ const [new_user] = await sql` // new_user = { user_id: 1, name: 'Murray', age: 68 } ``` +Please note that queries are executed when `awaited` – or manually by using `.execute`. + #### Query parameters -Parameters are automatically inferred and handled by Postgres so that SQL injection isn't possible. No special handling is necessary, simply use JS tagged template literals as usual. +Parameters are automatically inferred and handled by Postgres so that SQL injection isn't possible. No special handling is necessary, simply use JS tagged template literals as usual. **Dynamic and partial queries can be seen in the [next section]()**. ```js +let searchName = 'Mur' +let searchAge = 60 -let search = 'Mur' const users = await sql` select @@ -157,167 +136,48 @@ const users = await sql` age from users where - name like ${ search + '%' } + name like ${searchName + '%'} + and age > ${searchAge} ` // users = [{ name: 'Murray', age: 68 }] ``` -> Be careful with quotation marks here. Because Postgres infers the types, you don't need to wrap your interpolated parameters in quotes like `'${name}'`. In fact, this will cause an error because the tagged template replaces `${name}` with `$1` in the query string, leaving Postgres to do the interpolation. If you wrap that in a string, Postgres will see `'$1'` and interpret it as a string as opposed to a parameter. +> Be careful with quotation marks here. Because Postgres infers column types, you do not need to wrap your interpolated parameters in quotes like `'${name}'`. This will cause an error because the tagged template replaces `${name}` with `$1` in the query string, leaving Postgres to do the interpolation. If you wrap that in a string, Postgres will see `'$1'` and interpret it as a string as opposed to a parameter. -#### Arrays -Arrays will be handled by replacement parameters too, so `where in` queries are also simple. +### Select ```js +const columns = ['name', 'age'] -const users = await sql` - select - * - from users - where age in (${ [68, 75, 23] }) +sql` + select ${ + sql(columns) + } from users ` +// Is translated into this query: +select "name", "age" from users ``` -### TypeScript support - -`postgres` has TypeScript support. You can pass a row list type for your queries in this way: -```ts -interface User { - id: number - name: string -} - -const users = await sql`SELECT * FROM users` -users[0].id // ok => number -users[1].name // ok => string -users[0].invalid // fails: `invalid` does not exists on `User` -``` - -However, be sure to check the array length to avoid accessing properties of `undefined` rows: -```ts -const users = await sql`SELECT * FROM users WHERE id = ${id}` -if (!users.length) - throw new Error('Not found') -return users[0] -``` - -You can also prefer destructuring when you only care about a fixed number of rows. -In this case, we recommand you to prefer using tuples to handle `undefined` properly: -```ts -const [user]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}` -if (!user) // => User | undefined - throw new Error('Not found') -return user // => User - -// NOTE: -const [first, second]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}` // fails: `second` does not exist on `[User?]` -const [first, second] = await sql<[User?]>`SELECT * FROM users WHERE id = ${id}` // don't fail : `second: User | undefined` -``` - -We do our best to type all the public API, however types are not always updated when features are added ou changed. Feel free to open an issue if you have trouble with types. - -## forEach ```sql` `.forEach(fn) -> Promise``` - -If you want to handle rows returned by a query one by one, you can use `.forEach` which returns a promise that resolves once there are no more rows. -```js - -await sql` - select created_at, name from events -`.forEach(row => { - // row = { created_at: '2019-11-22T14:22:00Z', name: 'connected' } -}) - -// No more rows - -``` - -## Cursor ```sql` `.cursor([rows = 1], fn) -> Promise``` - -Use cursors if you need to throttle the amount of rows being returned from a query. New results won't be requested until the promise / async callback function has resolved. - -```js - -await sql` - select * from generate_series(1,4) as x -`.cursor(async ([row]) => { - // row = { x: 1 } - await http.request('https://example.com/wat', { row }) -}) - -// No more rows - -``` - -A single row will be returned by default, but you can also request batches by setting the number of rows desired in each batch as the first argument. That is usefull if you can do work with the rows in parallel like in this example: - -```js - -await sql` - select * from generate_series(1,1000) as x -`.cursor(10, async rows => { - // rows = [{ x: 1 }, { x: 2 }, ... ] - await Promise.all(rows.map(row => - http.request('https://example.com/wat', { row }) - )) -}) - -``` - -If an error is thrown inside the callback function no more rows will be requested and the promise will reject with the thrown error. - -You can also stop receiving any more rows early by returning an end token `sql.END` from the callback function. - ```js +let resultOne = await sql` + select user_id, name from users +` +// [{ user_id: 0, name: "Serena" }, { user_id: 1, name: "Murray" }, { user_id: 2, name: "Lysander" }, ...] -await sql` - select * from generate_series(1,1000) as x -`.cursor(row => { - return Math.random() > 0.9 && sql.END -}) +resultOne.unshift() +let resultTwo = await sql` + select user_id from users where user_id IN ${resultOne.map(row => row.user_id)} +` +// [{ user_id: 1, name: 'Murray' }, { user_id: 2, name: "Lysander" }, ...] ``` -## Raw ```sql``.raw()``` - -Using `.raw()` will return rows as an array with `Buffer` values for each column, instead of objects. - -This can be useful to receive identical named columns, or for specific performance / transformation reasons. The column definitions are still included on the result array with access to parsers for each column. - -## Listen and notify - -When you call listen, a dedicated connection will automatically be made to ensure that you receive notifications in real time. This connection will be used for any further calls to listen. Listen returns a promise which resolves once the `LISTEN` query to Postgres completes, or if there is already a listener active. +### Insert ```js - -await sql.listen('news', payload => { - const json = JSON.parse(payload) - console.log(json.this) // logs 'is' -}) - -``` - -Notify can be done as usual in sql, or by using the `sql.notify` method. -```js - -sql.notify('news', JSON.stringify({ no: 'this', is: 'news' })) - -``` - -## Tagged template function ``` sql`` ``` -[Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) are not just ordinary template literal strings. They allow the function to handle any parameters within before interpolation. This means that they can be used to enforce a safe way of writing queries, which is what Postgres.js does. Any generic value will be serialized according to an inferred type, and replaced by a PostgreSQL protocol placeholders `$1, $2, ...` and then sent to the database as a parameter to let it handle any need for escaping / casting. - -This also means you cannot write dynamic queries or concat queries together by simple string manipulation. To enable dynamic queries in a safe way, the `sql` function doubles as a regular function which escapes any value properly. It also includes overloads for common cases of inserting, selecting, updating and querying. - -## Dynamic query helpers - `sql()` inside tagged template - -Postgres.js has a safe, ergonomic way to aid you in writing queries. This makes it easier to write dynamic `insert`, `select` and `update` queries, and pass `where` parameters. - -#### Insert - -```js - const user = { name: 'Murray', age: 68 @@ -329,39 +189,47 @@ sql` } ` -// Is translated into this query: +// Is translated to: insert into users ("name", "age") values ($1, $2) - ``` -You can leave out the column names and simply do `sql(user)` if you want to get all fields from the object as columns, but be careful not to allow users to supply columns you don't want. +**You can omit column names and simply execute `sql(user)` to get all the fields from the object as columns**. Be careful to not allow users to supply columns that you do not want to be inserted. #### Multiple inserts in one query If you need to insert multiple rows at the same time it's also much faster to do it with a single `insert`. Simply pass an array of objects to `sql()`. ```js +const users = [ + { + name: 'Murray', + age: 68, + garbage: 'ignore' + }, + { + name: 'Walter', + age: 78 + } +] -const users = [{ - name: 'Murray', - age: 68, - garbage: 'ignore' -}, { - name: 'Walter', - age: 78 -}] +sql`insert into users ${sql(users, 'name', 'age')}` -sql` - insert into users ${ - sql(users, 'name', 'age') - } -` +// Is translated to: +insert into users ("name", "age") values ($1, $2), ($3, $4) -// Is translated into this query: +// Omitting column names + +users[0] = { + name: 'Serena', + age: 35, +} + +sql`insert into users ${sql(users)}` + +// Is translated to: insert into users ("name", "age") values ($1, $2), ($3, $4) ``` -#### Update - +### Update This is also useful for update queries ```js @@ -374,149 +242,222 @@ sql` update users set ${ sql(user, 'name') } where - id = ${ user.id } + user_id = ${user.id} ` -// Is translated into this query: -update users set "name" = $1 where id = $2 +// Is translated to: +update users set "name" = $1 where user_id = $2 ``` -#### Select +### Delete ```js -const columns = ['name', 'age'] +const user = { + id: 1, + name: 'Muray' +} -sql` - select ${ - sql(columns) - } from users -` +sql`delete from users where user_id = ${user.id}` -// Is translated into this query: -select "name", "age" from users +// Is translated to: +delete from users where user_id = $1 ``` -#### Dynamic table name +## Dynamic queries + +Postgres.js features a powerful dynamic query parser for conditionally appending/omitting query fragments. + +This works by nestings a ` sql`` ` call within another ` sql`` ` call. + +#### Partial queries ```js +let savedQuery = () => sql`and age > 50` -const table = 'users' +let isQueryingForAge = true sql` - select id from ${ sql(table) } + select + * + from users + where + name is not null + ${isQueryingForAge ? + savedQuery() + : + sql`` + } ` - -// Is translated into this query: -select id from "users" ``` -#### Arrays `sql.array(Array)` +#### Dynamic where clause +```js +sql` + select + * + from users ${id ? + sql`where user_id = ${ id }` + : + sql`` + } +` -PostgreSQL has a native array type which is similar to js arrays, but only allows the same type and shape for nested items. This method automatically infers the item type and serializes js arrays into PostgreSQL arrays. +// Is translated to: +select * from users +// Or +select * from users where user_id = $1 +``` +#### Dynamic filters ```js +let ageFilter = 50; -const types = sql` - insert into types ( - integers, - strings, - dates, - buffers, - multi - ) values ( - ${ sql.array([1,2,3,4,5]) }, - ${ sql.array(['Hello', 'Postgres']) }, - ${ sql.array([new Date(), new Date(), new Date()]) }, - ${ sql.array([Buffer.from('Hello'), Buffer.from('Postgres')]) }, - ${ sql.array([[[1,2],[3,4]][[5,6],[7,8]]]) }, - ) +sql` + select + * + from users + where + age > ${ageFilter} + ${id ? + sql`and user_id = ${id}` + : + sql`` + } ` +// Is translated to: +select * from users where age > $1 +// Or +select * from users where age > $1 and user_id = $2 ``` -#### JSON `sql.json(object)` +### Identifier and value utilities + +#### Arrays +Arrays will be handled by replacement parameters too, so `where in` queries are also simple. ```js +const users = await sql` + select + * + from users + where age in ${sql([68, 75, 23])} +` +``` -const body = { hello: 'postgres' } +#### SQL functions -const [{ json }] = await sql` - insert into json ( - body - ) values ( - ${ sql.json(body) } - ) - returning body +```js +let now = true + +sql` + update users set updated_at = ${ now ? sql`now()` : someDate } ` +``` + +#### Table names + +```js +const table = 'users' -// json = { hello: 'postgres' } +sql` + select id from ${ sql(table) } +` ``` -## File query `sql.file(path, [args], [options]) -> Promise` +## Advanced query methods -Using an `.sql` file for a query. The contents will be cached in memory so that the file is only read once. +### forEach +#### ```sql``.forEach(fn) -> Promise``` +If you want to handle rows returned by a query one by one, you can use `.forEach` which returns a promise that resolves once there are no more rows. ```js -sql.file(path.join(__dirname, 'query.sql'), [], { - cache: true // Default true - disable for single shot queries or memory reasons +await sql` + select created_at, name from events +`.forEach(row => { + // row = { created_at: '2019-11-22T14:22:00Z', name: 'connected' } }) +// No more rows ``` -## Subscribe / Realtime +### Cursor +#### ```sql``.cursor([rows = 1], fn) -> Promise``` -Postgres.js implements the logical replication protocol of PostgreSQL to support subscription to realtime updates of `insert`, `update` and `delete` operations. +Use cursors if you need to throttle the amount of rows being returned from a query. New results won't be requested until the promise / async callback function has resolved. -> **NOTE** To make this work you must [create the proper publications in your database](https://www.postgresql.org/docs/current/sql-createpublication.html), enable logical replication by setting `wal_level = logical` in `postgresql.conf` and connect using either a replication or superuser. +```js +for await (const [row] of sql`select * from generate_series(1,4) as x`.cursor()) { + // row = { x: 1 } + await http.request('https://example.com/wat', { row }) +} -### Quick start +// All rows iterated +``` -#### Create a publication (eg. in migration) -```sql -CREATE PUBLICATION alltables FOR ALL TABLES +A single row will be returned by default, but you can also request batches by setting the number of rows desired in each batch as an argument of `.cursor`: + +```js +for await (const rows of sql`select * from generate_series(1,1000) as x`.cursor(10)) { + // rows = [{ x: 1 }, { x: 2 }, ... ] + await Promise.all(rows.map(row => + http.request('https://example.com/wat', { row }) + )) +} ``` -#### Subscribe to updates +If an error is thrown inside the callback function no more rows will be requested and the promise will reject with the thrown error. + +You can also stop receiving any more rows early by returning an end token `sql.END` from the callback function. + ```js -const sql = postgres({ publications: 'alltables' }) -const { unsubscribe } = await sql.subscribe('insert:events', row => - // tell about new event row over eg. websockets or do something else -) +await sql` + select * from generate_series(1,1000) as x +`.cursor(row => { + return Math.random() > 0.9 && sql.END +}) + ``` -### Subscribe pattern +### describe +#### ```sql``.describe([rows = 1], fn) -> Promise``` -You can subscribe to specific operations, tables or even rows with primary keys. +Rather than executing a given query, `.describe` will return information utilized in the query process. This information can include the query identifier, column types, etc. -### `operation` `:` `schema` `.` `table` `=` `primary_key` +This is useful for debugging and analyzing your Postgres queries. Furthermore, **`.describe` will give you access to the final generated query string that would be executed.** -**`operation`** is one of ``` * | insert | update | delete ``` and defaults to `*` +### Raw +#### ```sql``.raw()``` -**`schema`** defaults to `public.` +Using `.raw()` will return rows as an array with `Buffer` values for each column, instead of objects. -**`table`** is a specific table name and defaults to `*` +This can be useful to receive identically named columns, or for specific performance/transformation reasons. The column definitions are still included on the result array, plus access to parsers for each column. -**`primary_key`** can be used to only subscribe to specific rows +### File +#### `sql.file(path, [args], [options]) -> Promise` -#### Examples +Using a `.sql` file for a query. + +The contents will be cached in memory so that the file is only read once. ```js -sql.subscribe('*', () => /* everything */ ) -sql.subscribe('insert', () => /* all inserts */ ) -sql.subscribe('*:users', () => /* all operations on the public.users table */ ) -sql.subscribe('delete:users', () => /* all deletes on the public.users table */ ) -sql.subscribe('update:users=1', () => /* all updates on the users row with a primary key = 1 */ ) -``` -## Transactions +sql.file(path.join(__dirname, 'query.sql'), [], { + cache: true // Default true - disable for single shot queries or memory reasons +}) +``` + +### Transactions #### BEGIN / COMMIT `sql.begin(fn) -> Promise` -Calling begin with a function will return a Promise which resolves with the returned value from the function. The function provides a single argument which is `sql` with a context of the newly created transaction. `BEGIN` is automatically called, and if the Promise fails `ROLLBACK` will be called. If it succeeds `COMMIT` will be called. +Calling `.begin` with a function will return a Promise. This will resolve with the returned value from the function. The function provides a single argument which is `sql` with a context of the newly created transaction. + +`BEGIN` is automatically called, and if the Promise fails `ROLLBACK` will be called. If it succeeds `COMMIT` will be called. ```js @@ -542,7 +483,6 @@ const [user, account] = await sql.begin(async sql => { ``` - #### SAVEPOINT `sql.savepoint([name], fn) -> Promise` ```js @@ -581,6 +521,19 @@ sql.begin(async sql => { Do note that you can often achieve the same result using [`WITH` queries (Common Table Expressions)](https://www.postgresql.org/docs/current/queries-with.html) instead of using transactions. +
+sql.unsafe - Advanced unsafe use cases + +### Unsafe queries `sql.unsafe(query, [args], [options]) -> promise` + +If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to sql injection if you're not careful. + +```js + +sql.unsafe('select ' + danger + ' from users where id = ' + dragons) + +``` +
## Custom Types @@ -589,7 +542,6 @@ You can add ergonomic support for custom types, or simply pass an object with a Adding Query helpers is the recommended approach which can be done like this: ```js - const sql = postgres({ types: { rect: { @@ -624,6 +576,76 @@ const [custom] = sql` ``` +## Advanced communication + +### Listen and notify + +When you call `.listen`, a dedicated connection will be created to ensure that you receive notifications in real-time. This connection will be used for any further calls to `.listen`. + +`.listen` returns a promise which resolves once the `LISTEN` query to Postgres completes, or if there is already a listener active. + +```js + +await sql.listen('news', payload => { + const json = JSON.parse(payload) + console.log(json.this) // logs 'is' +}) + +``` + +Notify can be done as usual in sql, or by using the `sql.notify` method. +```js + +sql.notify('news', JSON.stringify({ no: 'this', is: 'news' })) + +``` + +### Subscribe / Realtime + +Postgres.js implements the logical replication protocol of PostgreSQL to support subscription to real-time updates of `insert`, `update` and `delete` operations. + +> **NOTE** To make this work you must [create the proper publications in your database](https://www.postgresql.org/docs/current/sql-createpublication.html), enable logical replication by setting `wal_level = logical` in `postgresql.conf` and connect using either a replication or superuser. + +#### Quick start + +##### Create a publication (eg. in migration) +```sql +CREATE PUBLICATION alltables FOR ALL TABLES +``` + +##### Subscribe to updates +```js +const sql = postgres({ publications: 'alltables' }) + +const { unsubscribe } = await sql.subscribe('insert:events', row => + // tell about new event row over eg. websockets or do something else +) +``` + +#### Subscribe pattern + +You can subscribe to specific operations, tables, or even rows with primary keys. + +##### `operation` `:` `schema` `.` `table` `=` `primary_key` + +**`operation`** is one of ``` * | insert | update | delete ``` and defaults to `*` + +**`schema`** defaults to `public.` + +**`table`** is a specific table name and defaults to `*` + +**`primary_key`** can be used to only subscribe to specific rows + +#### Examples + +```js +sql.subscribe('*', () => /* everything */ ) +sql.subscribe('insert', () => /* all inserts */ ) +sql.subscribe('*:users', () => /* all operations on the public.users table */ ) +sql.subscribe('delete:users', () => /* all deletes on the public.users table */ ) +sql.subscribe('update:users=1', () => /* all updates on the users row with a primary key = 1 */ ) +``` + ## Teardown / Cleanup To ensure proper teardown and cleanup on server restarts use `sql.end({ timeout: 0 })` before `process.exit()`. @@ -647,7 +669,7 @@ prexit(async () => { `Number` in javascript is only able to represent 253-1 safely which means that types in PostgreSQLs like `bigint` and `numeric` won't fit into `Number`. -Since Node.js v10.4 we can use [`BigInt`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) to match the PostgreSQL type `bigint` which is returned for eg. `count(*)`. Unfortunately it doesn't work with `JSON.stringify` out of the box, so Postgres.js will return it as a string. +Since Node.js v10.4 we can use [`BigInt`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) to match the PostgreSQL type `bigint` which is returned for eg. `count(*)`. Unfortunately, it doesn't work with `JSON.stringify` out of the box, so Postgres.js will return it as a string. If you want to use `BigInt` you can add this custom type: @@ -659,9 +681,74 @@ const sql = postgres({ }) ``` -There is currently no way to handle `numeric / decimal` in a native way in Javascript, so these and similar will be returned as `string`. You can also handle types like these using [custom types](#types) if you want to. +There is currently no guaranteed way to handle `numeric / decimal` types in native Javascript. **These [and similar] types will be returned as a `string`**. The best way this case is to use [custom types](#custom-types). + + +## Connection options + +### All Postgres options + +```js +const sql = postgres('postgres://username:password@host:port/database', { + host : '', // Postgres ip address[s] or domain name[s] + port : 5432, // Postgres server port[s] + path : '', // unix socket path (usually '/tmp') + database : '', // Name of database to connect to + username : '', // Username of database user + password : '', // Password of database user + ssl : false, // true, prefer, require, tls.connect options + max : 10, // Max number of connections + max_lifetime : null, // Max lifetime in seconds (more info below) + idle_timeout : 0, // Idle connection timeout in seconds + connect_timeout : 30, // Connect timeout in seconds + no_prepare : false, // No automatic creation of prepared statements + types : [], // Array of custom types, see more below + onnotice : fn, // Defaults to console.log + onparameter : fn, // (key, value) when server param change + debug : fn, // Is called with (connection, query, params) + transform : { + column : fn, // Transforms incoming column names + value : fn, // Transforms incoming row values + row : fn // Transforms entire rows + }, + connection : { + application_name : 'postgres.js', // Default application_name + ... // Other connection parameters + }, + target_session_attrs : null, // Use 'read-write' with multiple hosts to + // ensure only connecting to primary + fetch_types : true, // Automatically fetches types on connect + // on initial connection. +}) +``` + +Note that `max_lifetime = 60 * (30 + Math.random() * 30)` by default. This resolves to an interval between 45 and 90 minutes to optimize for the benefits of prepared statements **and** working nicely with Linux's OOM killer. + +### SSL + +Although [vulnerable to MITM attacks](https://security.stackexchange.com/a/229297/174913), a common configuration for the `ssl` option for some cloud providers is to set `rejectUnauthorized` to `false` (if `NODE_ENV` is `production`): + +```js +const sql = + process.env.NODE_ENV === 'production' + ? // "Unless you're using a Private or Shield Heroku Postgres database, Heroku Postgres does not currently support verifiable certificates" + // https://help.heroku.com/3DELT3RK/why-can-t-my-third-party-utility-connect-to-heroku-postgres-with-ssl + postgres({ ssl: { rejectUnauthorized: false } }) + : postgres(); +``` + +For more information regarding `ssl` with `postgres`, check out the [Node.js documentation for tls](https://nodejs.org/dist/latest-v10.x/docs/api/tls.html#tls_new_tls_tlssocket_socket_options). + + +### Multi-host connections - High Availability (HA) + +Multiple connection strings can be passed to `postgres()` in the form of `postgres('postgres://localhost:5432,localhost:5433', ...)`. This works the same as native the `psql` command. Read more at [multiple host uris](https://www.postgresql.org/docs/13/libpq-connect.html#LIBPQ-MULTIPLE-HOSTS) -## The Connection Pool +Connections will be attempted in order of the specified hosts/ports. On a successful connection, all retries will be reset. This ensures that hosts can come up and down seamlessly. + +If you specify `target_session_attrs: 'primary'` or `PGTARGETSESSIONATTRS=primary` Postgres.js will only connect to the primary host, allowing for zero downtime failovers. + +### The Connection Pool Connections are created lazily once a query is created. This means that simply doing const `sql = postgres(...)` won't have any effect other than instantiating a new `sql` instance. @@ -673,42 +760,56 @@ Any query which was already sent over the wire will be rejected if the connectio There are no guarantees about queries executing in order unless using a transaction with `sql.begin()` or setting `max: 1`. Of course doing a series of queries, one awaiting the other will work as expected, but that's just due to the nature of js async/promise handling, so it's not necessary for this library to be concerned with ordering. -### Idle timeout +### Connection timeout By default, connections will not close until `.end()` is called. However, it may be useful to have them close automatically when: -- there is no activity for some period of time -- if using Postgres.js in Lamdas / Serverless environments -- if using Postgres.js with a database service that automatically closes the connection after some time (see [`ECONNRESET` issue](https://github.com/porsager/postgres/issues/179)) +- re-instantiating multiple ` sql`` ` instances +- using Postgres.js in a Serverless environment (Lambda, etc.) +- using Postgres.js with a database service that automatically closes connections after some time (see [`ECONNRESET` issue](https://github.com/porsager/postgres/issues/179)) -This can be done using the `idle_timeout` option to specify the amount of seconds to wait before automatically closing an idle connection. +This can be done using the `idle_timeout` or `max_lifetime` options. These configuration options specify the number of seconds to wait before automatically closing an idle connection and the maximum time a connection can exist, respectively. -For example, to close idle connections after 2 seconds: +For example, to close a connection that has either been idle for 2 seconds or exists for 30 seconds: ```js const sql = postgres({ - idle_timeout: 2 + idle_timeout: 2, + max_lifetime: 30 }) ``` -## Prepared statements +### Auto fetching of array types -Prepared statements will automatically be created for any queries where it can be inferred that the query is static. This can be disabled by using the `no_prepare` option. For instance — this is useful when [using PGBouncer in `transaction mode`](https://github.com/porsager/postgres/issues/93). +Postgres.js will automatically fetch table/array-type information when it first connects to a database. -
sql.unsafe - Advanced unsafe use cases +If you have revoked access to `pg_catalog` this feature will no longer work and will need to be disabled. -### Unsafe queries `sql.unsafe(query, [args], [options]) -> promise` +You can disable this feature by setting `fetch_types` to `false`. -If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to sql injection if you're not careful. +### Environmental variables + +It is also possible to connect to the database without a connection string or any options. Postgres.js will fall back to the common environment variables used by `psql` as in the table below: ```js +const sql = postgres() +``` -sql.unsafe('select ' + danger + ' from users where id = ' + dragons) +| Option | Environment Variables | +| ----------------- | ------------------------ | +| `host` | `PGHOST` | +| `port` | `PGPORT` | +| `database` | `PGDATABASE` | +| `username` | `PGUSERNAME` or `PGUSER` | +| `password` | `PGPASSWORD` | +| `idle_timeout` | `PGIDLE_TIMEOUT` | +| `connect_timeout` | `PGCONNECT_TIMEOUT` | -``` -
+### Prepared statements -## Errors +Prepared statements will automatically be created for any queries where it can be inferred that the query is static. This can be disabled by using the `no_prepare` option. For instance — this is useful when [using PGBouncer in `transaction mode`](https://github.com/porsager/postgres/issues/93). + +## Error handling Errors are all thrown to related queries and never globally. Errors coming from PostgreSQL itself are always in the [native Postgres format](https://www.postgresql.org/docs/current/errcodes-appendix.html), and the same goes for any [Node.js errors](https://nodejs.org/api/errors.html#errors_common_system_errors) eg. coming from the underlying connection. @@ -736,7 +837,7 @@ The postgres protocol doesn't allow more than 65534 (16bit) parameters. If you r ##### SASL_SIGNATURE_MISMATCH > Message type X not supported -When using SASL authentication the server responds with a signature at the end of the authentication flow which needs to match the one on the client. This is to avoid [man in the middle attacks](https://en.wikipedia.org/wiki/Man-in-the-middle_attack). If you receive this error the connection was cancelled because the server did not reply with the expected signature. +When using SASL authentication the server responds with a signature at the end of the authentication flow which needs to match the one on the client. This is to avoid [man-in-the-middle attacks](https://en.wikipedia.org/wiki/Man-in-the-middle_attack). If you receive this error the connection was canceled because the server did not reply with the expected signature. ##### NOT_TAGGED_CALL > Query not called as a tagged template literal @@ -751,12 +852,12 @@ Postgres supports many different authentication types. This one is not supported ##### CONNECTION_CLOSED > write CONNECTION_CLOSED host:port -This error is thrown if the connection was closed without an error. This should not happen during normal operation, so please create an issue if this was unexpected. +This error is thrown if the connection was closed without an error. This should not happen during normal operations, so please create an issue if this was unexpected. ##### CONNECTION_ENDED > write CONNECTION_ENDED host:port -This error is thrown if the user has called [`sql.end()`](#sql_end) and performed a query afterwards. +This error is thrown if the user has called [`sql.end()`](#sql_end) and performed a query afterward. ##### CONNECTION_DESTROYED > write CONNECTION_DESTROYED host:port @@ -766,11 +867,49 @@ This error is thrown for any queries that were pending when the timeout to [`sql ##### CONNECTION_CONNECT_TIMEOUT > write CONNECTION_CONNECT_TIMEOUT host:port -This error is thrown if the startup phase of the connection (tcp, protocol negotiation and auth) took more than the default 30 seconds or what was specified using `connect_timeout` or `PGCONNECT_TIMEOUT`. +This error is thrown if the startup phase of the connection (tcp, protocol negotiation, and auth) took more than the default 30 seconds or what was specified using `connect_timeout` or `PGCONNECT_TIMEOUT`. + +## TypeScript support + +`postgres` has TypeScript support. You can pass a row list type for your queries in this way: +```ts +interface User { + id: number + name: string +} + +const users = await sql`SELECT * FROM users` +users[0].id // ok => number +users[1].name // ok => string +users[0].invalid // fails: `invalid` does not exists on `User` +``` + +However, be sure to check the array length to avoid accessing properties of `undefined` rows: +```ts +const users = await sql`SELECT * FROM users WHERE id = ${id}` +if (!users.length) + throw new Error('Not found') +return users[0] +``` + +You can also prefer destructuring when you only care about a fixed number of rows. +In this case, we recommand you to prefer using tuples to handle `undefined` properly: +```ts +const [user]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}` +if (!user) // => User | undefined + throw new Error('Not found') +return user // => User + +// NOTE: +const [first, second]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}` // fails: `second` does not exist on `[User?]` +const [first, second] = await sql<[User?]>`SELECT * FROM users WHERE id = ${id}` // don't fail : `second: User | undefined` +``` + +We do our best to type all the public API, however types are not always updated when features are added ou changed. Feel free to open an issue if you have trouble with types. ## Migration tools -Postgres.js doesn't come with any migration solution since it's way out of scope, but here are some modules that supports Postgres.js for migrations: +Postgres.js doesn't come with any migration solution since it's way out of scope, but here are some modules that support Postgres.js for migrations: - https://github.com/porsager/postgres-shift - https://github.com/lukeed/ley @@ -781,4 +920,4 @@ A really big thank you to [@JAForbes](https://twitter.com/jmsfbs) who introduced Thanks to [@ACXgit](https://twitter.com/andreacoiutti) for initial tests and dogfooding. -Also thanks to [Ryan Dahl](http://github.com/ry) for letting me have the `postgres` npm package name. +Also thanks to [Ryan Dahl](http://github.com/ry) for letting me have the `postgres` npm package name. \ No newline at end of file From 08e3d46720ab858d317e9da6f284155ed9396b9d Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 6 Feb 2022 21:16:02 +0100 Subject: [PATCH 21/51] Fix dynamic helper regex (should be case insensitive) --- cjs/src/types.js | 4 ++-- deno/src/types.js | 4 ++-- src/types.js | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/cjs/src/types.js b/cjs/src/types.js index 25293c12..7c24c90b 100644 --- a/cjs/src/types.js +++ b/cjs/src/types.js @@ -74,7 +74,7 @@ class Builder extends NotTagged { build(before, parameters, types, transform) { const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop() if (keyword.i === -1) - throw new Error('WTF') + throw new Error('Could not infer helper mode') return keyword.fn(this.first, this.rest, parameters, types, transform) } @@ -153,7 +153,7 @@ const builders = Object.entries({ ).join(',') + ')values' + valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, transform, columns) } -}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x), fn])) +}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x, 'i'), fn])) function notTagged() { throw Errors.generic({ message: 'Query not called as a tagged template literal', code: 'NOT_TAGGED_CALL' }) diff --git a/deno/src/types.js b/deno/src/types.js index 0886859c..0b560166 100644 --- a/deno/src/types.js +++ b/deno/src/types.js @@ -75,7 +75,7 @@ export class Builder extends NotTagged { build(before, parameters, types, transform) { const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop() if (keyword.i === -1) - throw new Error('WTF') + throw new Error('Could not infer helper mode') return keyword.fn(this.first, this.rest, parameters, types, transform) } @@ -154,7 +154,7 @@ const builders = Object.entries({ ).join(',') + ')values' + valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, transform, columns) } -}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x), fn])) +}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x, 'i'), fn])) function notTagged() { throw Errors.generic({ message: 'Query not called as a tagged template literal', code: 'NOT_TAGGED_CALL' }) diff --git a/src/types.js b/src/types.js index 6e9ad166..4e784d20 100644 --- a/src/types.js +++ b/src/types.js @@ -74,7 +74,7 @@ export class Builder extends NotTagged { build(before, parameters, types, transform) { const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop() if (keyword.i === -1) - throw new Error('WTF') + throw new Error('Could not infer helper mode') return keyword.fn(this.first, this.rest, parameters, types, transform) } @@ -153,7 +153,7 @@ const builders = Object.entries({ ).join(',') + ')values' + valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, transform, columns) } -}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x), fn])) +}).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x, 'i'), fn])) function notTagged() { throw Errors.generic({ message: 'Query not called as a tagged template literal', code: 'NOT_TAGGED_CALL' }) From 0035d5428618569a2b4814307f6076f5f289ca9b Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 6 Feb 2022 21:18:43 +0100 Subject: [PATCH 22/51] Please eslint --- tests/index.js | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/tests/index.js b/tests/index.js index db63c320..55a963f4 100644 --- a/tests/index.js +++ b/tests/index.js @@ -3,8 +3,6 @@ import { exec } from './bootstrap.js' import { t, nt, ot } from './test.js' // eslint-disable-line -import cp from 'child_process' -import path from 'path' import net from 'net' import fs from 'fs' import crypto from 'crypto' @@ -661,8 +659,6 @@ t('listen reconnects after connection error', { timeout: 3 }, async() => { const sql = postgres() , xs = [] - const a = (await sql`show data_directory`)[0].data_directory - const { state: { pid } } = await sql.listen('test', x => xs.push(x)) await sql.notify('test', 'a') await sql`select pg_terminate_backend(${ pid }::int)` @@ -937,8 +933,8 @@ t('dynamic values single row', async() => { }) t('dynamic values multi row', async() => { - const [_, { b }] = await sql` - select * from (values ${ sql([['a', 'b', 'c'],['a', 'b', 'c']]) }) AS x(a, b, c) + const [, { b }] = await sql` + select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) AS x(a, b, c) ` return ['b', b] @@ -1105,12 +1101,12 @@ t('Cursor error works', async() => [ t('Multiple Cursors', { timeout: 2 }, async() => { const result = [] - const xs = await sql.begin(async sql => [ - await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async ([row]) => { + await sql.begin(async sql => [ + await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async([row]) => { result.push(row.x) await new Promise(r => setTimeout(r, 200)) }), - await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async ([row]) => { + await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async([row]) => { result.push(row.x) await new Promise(r => setTimeout(r, 100)) }) @@ -1651,9 +1647,7 @@ t('subscribe', { timeout: 2 }, async() => { t('Execute works', async() => { const result = await new Promise((resolve) => { - const sql = postgres({ ...options, fetch_types: false, debug (id, query) { - resolve(query) - }}) + const sql = postgres({ ...options, fetch_types: false, debug:(id, query) => resolve(query) }) sql`select 1`.execute() }) From 28f756979bf6d570d7b2ec6696bbb755e4d44dd1 Mon Sep 17 00:00:00 2001 From: dilan-dio4 <54545871+dilan-dio4@users.noreply.github.com> Date: Mon, 7 Feb 2022 12:43:50 -0500 Subject: [PATCH 23/51] Dead link in Readme TOC (#265) * Dead link in Readme TOC * Fixed last header point on README from wrapping --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 55990c8c..60147f50 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -Fastest full PostgreSQL nodejs client +Fastest full PostgreSQL nodejs client - [🚀 Fastest full-featured PostgreSQL node client](https://github.com/porsager/postgres-benchmarks#results) - 🚯 1850 LOC - 0 dependencies @@ -65,7 +65,7 @@ const selectUsers = await sql` * [Subscribe / Realtime](#subscribe-realtime) * [Connection options](#connection-options) * [SSL](#ssl) - * [Multi-host connection](#multi-host-connections-high-availability-ha) + * [Multi-host connection](#multi-host-connections---high-availability-ha) * [Connection timeout](#connection-timeout) * [Environmental variables](#environmental-variables) * [Error handling](#error-handling) @@ -920,4 +920,4 @@ A really big thank you to [@JAForbes](https://twitter.com/jmsfbs) who introduced Thanks to [@ACXgit](https://twitter.com/andreacoiutti) for initial tests and dogfooding. -Also thanks to [Ryan Dahl](http://github.com/ry) for letting me have the `postgres` npm package name. \ No newline at end of file +Also thanks to [Ryan Dahl](http://github.com/ry) for letting me have the `postgres` npm package name. From e29a8c3a8dac083f704198ede31f1949d041501a Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Tue, 8 Feb 2022 22:17:01 +0100 Subject: [PATCH 24/51] Add sql.typed --- README.md | 10 ++++++---- src/index.js | 15 +++++++++++---- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 60147f50..3d19614d 100644 --- a/README.md +++ b/README.md @@ -72,7 +72,7 @@ const selectUsers = await sql` * [TypeScript support](#typescript-support) -## Connection +## Connection ### `postgres([url], [options])` @@ -537,7 +537,9 @@ sql.unsafe('select ' + danger + ' from users where id = ' + dragons) ## Custom Types -You can add ergonomic support for custom types, or simply pass an object with a `{ type, value }` signature that contains the Postgres `oid` for the type and the correctly serialized value. _(`oid` values for types can be found in the `pg_catalog.pg_types` table.)_ +You can add ergonomic support for custom types, or simply use `sql.typed(value, type)` inline, where type is the PostgreSQL `oid` for the type and the correctly serialized value. _(`oid` values for types can be found in the `pg_catalog.pg_types` table.)_ + +Using custom types is necessary if using parameters with [`DO` queries](https://www.postgresql.org/docs/9.0/sql-do.html). If not doing this, PostgreSQL will throw a `could not determine data type of parameter $1` error. Adding Query helpers is the recommended approach which can be done like this: @@ -560,14 +562,14 @@ const sql = postgres({ } }) -// Now you can use sql.types.rect() as specified above +// Now you can use sql.typed.rect() as specified above const [custom] = sql` insert into rectangles ( name, rect ) values ( 'wat', - ${ sql.types.rect({ x: 13, y: 37, width: 42, height: 80 }) } + ${ sql.typed.rect({ x: 13, y: 37, width: 42, height: 80 }) } ) returning * ` diff --git a/src/index.js b/src/index.js index 3b485012..7c3ef8b5 100644 --- a/src/index.js +++ b/src/index.js @@ -74,11 +74,14 @@ function Postgres(a, b) { function Sql(handler, instant) { handler.debug = options.debug + Object.entries(options.types).reduce((acc, [name, type]) => { + acc[name] = (x) => new Parameter(x, type.to) + return acc + }, typed) + Object.assign(sql, { - types: Object.entries(options.types).reduce((acc, [name, type]) => { - acc[name] = (x) => new Parameter(x, type.to) - return acc - }, {}), + types: typed, + typed, unsafe, array, json, @@ -87,6 +90,10 @@ function Postgres(a, b) { return sql + function typed(value, type) { + return new Parameter(value, type) + } + function sql(strings, ...args) { const query = strings && Array.isArray(strings.raw) ? new Query(strings, args, handler, cancel) From f48b2d5f7de504b38277229dd4ba42bccadd1079 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Tue, 8 Feb 2022 22:17:09 +0100 Subject: [PATCH 25/51] Update logo size --- postgresjs.svg | 71 +++++++++++--------------------------------------- 1 file changed, 15 insertions(+), 56 deletions(-) diff --git a/postgresjs.svg b/postgresjs.svg index cf59775b..9786e84a 100644 --- a/postgresjs.svg +++ b/postgresjs.svg @@ -1,57 +1,16 @@ - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + From a5fd8c7dcb003fee9553ebe1cf940a09158f0dbb Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Tue, 8 Feb 2022 23:39:50 +0100 Subject: [PATCH 26/51] Fix logo size + dark mode --- README.md | 2 +- postgresjs.svg | 31 ++++++++++++++++--------------- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 3d19614d..5f23243c 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -Fastest full PostgreSQL nodejs client +Fastest full PostgreSQL nodejs client - [🚀 Fastest full-featured PostgreSQL node client](https://github.com/porsager/postgres-benchmarks#results) - 🚯 1850 LOC - 0 dependencies diff --git a/postgresjs.svg b/postgresjs.svg index 9786e84a..5e1fd021 100644 --- a/postgresjs.svg +++ b/postgresjs.svg @@ -1,16 +1,17 @@ - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + From 6381782991a6a056378b2b481d6d1f16ee4856e2 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 9 Feb 2022 13:08:46 +0100 Subject: [PATCH 27/51] Fix early break or return from async iterator cursor --- src/query.js | 10 ++++++++-- tests/index.js | 12 ++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/src/query.js b/src/query.js index 816fd184..c7c67478 100644 --- a/src/query.js +++ b/src/query.js @@ -1,3 +1,5 @@ +import { CLOSE } from './types.js' + const originCache = new Map() export default class Query extends Promise { @@ -71,8 +73,8 @@ export default class Query extends Promise { next: () => { prev && prev() const promise = new Promise((resolve, reject) => { - this.cursorFn = x => { - resolve({ value: x, done: false }) + this.cursorFn = value => { + resolve({ value, done: false }) return new Promise(r => prev = r) } this.resolve = () => (this.active = false, resolve({ done: true })) @@ -80,6 +82,10 @@ export default class Query extends Promise { }) this.execute() return promise + }, + return() { + prev && prev(CLOSE) + return { done: true } } }) } diff --git a/tests/index.js b/tests/index.js index 55a963f4..5e24a504 100644 --- a/tests/index.js +++ b/tests/index.js @@ -1126,6 +1126,18 @@ t('Cursor as async iterator', async() => { return ['1a1b2a2b', order.join('')] }) +t('Cursor as async iterator with break', async() => { + const order = [] + for await (const xs of sql`select generate_series(1,2) as x;`.cursor()) { + order.push(xs[0].x + 'a') + await delay(10) + order.push(xs[0].x + 'b') + break + } + + return ['1a1b', order.join('')] +}) + t('Transform row', async() => { const sql = postgres({ ...options, From 154e28d6e4cfc7f0e2a22d741509e861f1ee0ec5 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Fri, 18 Feb 2022 23:14:54 +0000 Subject: [PATCH 28/51] Fix async iterator cursor with rest result --- src/query.js | 3 +++ tests/index.js | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/src/query.js b/src/query.js index c7c67478..cc3514b3 100644 --- a/src/query.js +++ b/src/query.js @@ -71,6 +71,9 @@ export default class Query extends Promise { return { [Symbol.asyncIterator]: () => ({ next: () => { + if (this.executed && !this.active) + return { done: true } + prev && prev() const promise = new Promise((resolve, reject) => { this.cursorFn = value => { diff --git a/tests/index.js b/tests/index.js index 5e24a504..e9c54dde 100644 --- a/tests/index.js +++ b/tests/index.js @@ -1138,6 +1138,39 @@ t('Cursor as async iterator with break', async() => { return ['1a1b', order.join('')] }) +t('Async Iterator Unsafe cursor works', async() => { + const order = [] + for await (const [x] of sql.unsafe('select 1 as x union select 2 as x').cursor()) { + order.push(x.x + 'a') + await delay(100) + order.push(x.x + 'b') + } + return ['1a1b2a2b', order.join('')] +}) + +t('Async Iterator Cursor custom n works', async() => { + const order = [] + for await (const x of sql`select * from generate_series(1,20)`.cursor(10)) + order.push(x.length) + + return ['10,10', order.join(',')] +}) + +t('Async Iterator Cursor custom with rest n works', async() => { + const order = [] + for await (const x of sql`select * from generate_series(1,20)`.cursor(11)) + order.push(x.length) + + return ['11,9', order.join(',')] +}) + +t('Async Iterator Cursor custom with less results than batch size works', async() => { + const order = [] + for await (const x of sql`select * from generate_series(1,20)`.cursor(21)) + order.push(x.length) + return ['20', order.join(',')] +}) + t('Transform row', async() => { const sql = postgres({ ...options, From ae1e90804b2a49410f1ae7e251f6d6aead4918fc Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 23 Feb 2022 16:07:46 +0000 Subject: [PATCH 29/51] Fix cjs - fixes #269 --- src/connection.js | 4 ++-- src/errors.js | 2 +- src/index.js | 5 ++--- src/query.js | 5 ++--- src/types.js | 10 ++++------ transpile.cjs | 4 ++-- 6 files changed, 13 insertions(+), 17 deletions(-) diff --git a/src/connection.js b/src/connection.js index 7978f68c..723703ce 100644 --- a/src/connection.js +++ b/src/connection.js @@ -3,11 +3,11 @@ import tls from 'tls' import crypto from 'crypto' import Stream from 'stream' -import { Identifier, Builder, handleValue, arrayParser, arraySerializer, CLOSE } from './types.js' +import { Identifier, Builder, handleValue, arrayParser, arraySerializer } from './types.js' import { Errors } from './errors.js' import Result from './result.js' import Queue from './queue.js' -import Query from './query.js' +import { Query, CLOSE } from './query.js' import b from './bytes.js' export default Connection diff --git a/src/errors.js b/src/errors.js index 1402b850..f83c9f39 100644 --- a/src/errors.js +++ b/src/errors.js @@ -4,7 +4,7 @@ export class PostgresError extends Error { this.name = this.constructor.name Object.assign(this, x) } -}; // eslint-disable-line +} export const Errors = { connection, diff --git a/src/index.js b/src/index.js index 7c3ef8b5..05a68c46 100644 --- a/src/index.js +++ b/src/index.js @@ -13,12 +13,11 @@ import { toKebab, fromPascal, fromCamel, - fromKebab, - CLOSE + fromKebab } from './types.js' import Connection from './connection.js' -import Query from './query.js' +import { Query, CLOSE } from './query.js' import Queue from './queue.js' import { Errors, PostgresError } from './errors.js' import Subscribe from './subscribe.js' diff --git a/src/query.js b/src/query.js index cc3514b3..36b0748c 100644 --- a/src/query.js +++ b/src/query.js @@ -1,8 +1,7 @@ -import { CLOSE } from './types.js' - const originCache = new Map() -export default class Query extends Promise { +export const CLOSE = {} +export class Query extends Promise { constructor(strings, args, handler, canceller, options = {}) { let resolve , reject diff --git a/src/types.js b/src/types.js index 4e784d20..682afe0d 100644 --- a/src/types.js +++ b/src/types.js @@ -1,4 +1,4 @@ -import Query from './query.js' +import { Query } from './query.js' import { Errors } from './errors.js' export const types = { @@ -53,7 +53,7 @@ export class Identifier extends NotTagged { super() this.value = escapeIdentifier(value) } -}; // eslint-disable-line +} export class Parameter extends NotTagged { constructor(value, type, array) { @@ -62,7 +62,7 @@ export class Parameter extends NotTagged { this.type = type this.array = array } -}; // eslint-disable-line +} export class Builder extends NotTagged { constructor(first, rest) { @@ -78,7 +78,7 @@ export class Builder extends NotTagged { return keyword.fn(this.first, this.rest, parameters, types, transform) } -}; // eslint-disable-line +} export function handleValue(x, parameters, types) { const value = x instanceof Parameter ? x.value : x @@ -95,8 +95,6 @@ export function handleValue(x, parameters, types) { )) } -export const CLOSE = {} - const defaultHandlers = typeHandlers(types) function valuesBuilder(first, parameters, types, transform, columns) { diff --git a/transpile.cjs b/transpile.cjs index 1ee35626..3cf80805 100644 --- a/transpile.cjs +++ b/transpile.cjs @@ -31,7 +31,7 @@ fs.writeFileSync(path.join(root, 'package.json'), JSON.stringify({ type: 'common function transpile(x) { return x.replace(/export default function ([^(]+)/, 'module.exports = $1;function $1') - .replace(/export class ([^ ]+) ([^;]+?);/g, 'class $1 $2;module.exports.$1 = $1') + .replace(/export class ([a-z0-9_$]+)/gi, 'const $1 = module.exports.$1 = class $1') .replace(/export default /, 'module.exports = ') .replace(/export {/g, 'module.exports = {') .replace(/export const ([a-z0-9_$]+)/gi, 'const $1 = module.exports.$1') @@ -39,5 +39,5 @@ function transpile(x) { .replace(/import {([^{}]*?)} from (['"].*?['"])/gi, 'const {$1} = require($2)') .replace(/import (.*?) from (['"].*?['"])/gi, 'const $1 = require($2)') .replace(/import (['"].*?['"])/gi, 'require($1)') - .replace('new URL(x, import.meta.url)', 'path.join(__dirname, x)') + .replace('new URL(x, import.meta.url)', 'require("path").join(__dirname, x)') } From f4a926c31fd9c4aa137dda549a5d1f88ca5b7c5c Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 23 Feb 2022 16:09:02 +0000 Subject: [PATCH 30/51] Add rebuilds --- cjs/src/connection.js | 4 +-- cjs/src/errors.js | 4 +-- cjs/src/index.js | 20 ++++++++----- cjs/src/query.js | 14 +++++++-- cjs/src/types.js | 16 +++++------ cjs/tests/index.js | 65 +++++++++++++++++++++++++++++++++--------- deno/src/connection.js | 4 +-- deno/src/errors.js | 2 +- deno/src/index.js | 20 ++++++++----- deno/src/query.js | 14 +++++++-- deno/src/types.js | 10 +++---- deno/tests/index.js | 63 ++++++++++++++++++++++++++++++++-------- 12 files changed, 169 insertions(+), 67 deletions(-) diff --git a/cjs/src/connection.js b/cjs/src/connection.js index 9ca23433..12393a4e 100644 --- a/cjs/src/connection.js +++ b/cjs/src/connection.js @@ -3,11 +3,11 @@ const tls = require('tls') const crypto = require('crypto') const Stream = require('stream') -const { Identifier, Builder, handleValue, arrayParser, arraySerializer, CLOSE } = require('./types.js') +const { Identifier, Builder, handleValue, arrayParser, arraySerializer } = require('./types.js') const { Errors } = require('./errors.js') const Result = require('./result.js') const Queue = require('./queue.js') -const Query = require('./query.js') +const { Query, CLOSE } = require('./query.js') const b = require('./bytes.js') module.exports = Connection diff --git a/cjs/src/errors.js b/cjs/src/errors.js index e6e8b83a..027e8b75 100644 --- a/cjs/src/errors.js +++ b/cjs/src/errors.js @@ -1,10 +1,10 @@ -class PostgresError extends Error { +const PostgresError = module.exports.PostgresError = class PostgresError extends Error { constructor(x) { super(x.message) this.name = this.constructor.name Object.assign(this, x) } -};module.exports.PostgresError = PostgresError // eslint-disable-line +} const Errors = module.exports.Errors = { connection, diff --git a/cjs/src/index.js b/cjs/src/index.js index b720ed56..97e2c34a 100644 --- a/cjs/src/index.js +++ b/cjs/src/index.js @@ -13,12 +13,11 @@ const { toKebab, fromPascal, fromCamel, - fromKebab, - CLOSE + fromKebab } = require('./types.js') const Connection = require('./connection.js') -const Query = require('./query.js') +const { Query, CLOSE } = require('./query.js') const Queue = require('./queue.js') const { Errors, PostgresError } = require('./errors.js') const Subscribe = require('./subscribe.js') @@ -74,11 +73,14 @@ function Postgres(a, b) { function Sql(handler, instant) { handler.debug = options.debug + Object.entries(options.types).reduce((acc, [name, type]) => { + acc[name] = (x) => new Parameter(x, type.to) + return acc + }, typed) + Object.assign(sql, { - types: Object.entries(options.types).reduce((acc, [name, type]) => { - acc[name] = (x) => new Parameter(x, type.to) - return acc - }, {}), + types: typed, + typed, unsafe, array, json, @@ -87,6 +89,10 @@ function Postgres(a, b) { return sql + function typed(value, type) { + return new Parameter(value, type) + } + function sql(strings, ...args) { const query = strings && Array.isArray(strings.raw) ? new Query(strings, args, handler, cancel) diff --git a/cjs/src/query.js b/cjs/src/query.js index 748e0718..8b61808d 100644 --- a/cjs/src/query.js +++ b/cjs/src/query.js @@ -1,6 +1,7 @@ const originCache = new Map() -module.exports = class Query extends Promise { +const CLOSE = module.exports.CLOSE = {} +const Query = module.exports.Query = class Query extends Promise { constructor(strings, args, handler, canceller, options = {}) { let resolve , reject @@ -69,10 +70,13 @@ module.exports = class Query extends Promise { return { [Symbol.asyncIterator]: () => ({ next: () => { + if (this.executed && !this.active) + return { done: true } + prev && prev() const promise = new Promise((resolve, reject) => { - this.cursorFn = x => { - resolve({ value: x, done: false }) + this.cursorFn = value => { + resolve({ value, done: false }) return new Promise(r => prev = r) } this.resolve = () => (this.active = false, resolve({ done: true })) @@ -80,6 +84,10 @@ module.exports = class Query extends Promise { }) this.execute() return promise + }, + return() { + prev && prev(CLOSE) + return { done: true } } }) } diff --git a/cjs/src/types.js b/cjs/src/types.js index 7c24c90b..c8c367f0 100644 --- a/cjs/src/types.js +++ b/cjs/src/types.js @@ -1,4 +1,4 @@ -const Query = require('./query.js') +const { Query } = require('./query.js') const { Errors } = require('./errors.js') const types = module.exports.types = { @@ -48,23 +48,23 @@ const BigInt = module.exports.BigInt = { class NotTagged { then() { notTagged() } catch() { notTagged() } finally() { notTagged() }} -class Identifier extends NotTagged { +const Identifier = module.exports.Identifier = class Identifier extends NotTagged { constructor(value) { super() this.value = escapeIdentifier(value) } -};module.exports.Identifier = Identifier // eslint-disable-line +} -class Parameter extends NotTagged { +const Parameter = module.exports.Parameter = class Parameter extends NotTagged { constructor(value, type, array) { super() this.value = value this.type = type this.array = array } -};module.exports.Parameter = Parameter // eslint-disable-line +} -class Builder extends NotTagged { +const Builder = module.exports.Builder = class Builder extends NotTagged { constructor(first, rest) { super() this.first = first @@ -78,7 +78,7 @@ class Builder extends NotTagged { return keyword.fn(this.first, this.rest, parameters, types, transform) } -};module.exports.Builder = Builder // eslint-disable-line +} module.exports.handleValue = handleValue;function handleValue(x, parameters, types) { const value = x instanceof Parameter ? x.value : x @@ -95,8 +95,6 @@ module.exports.handleValue = handleValue;function handleValue(x, parameters, typ )) } -const CLOSE = module.exports.CLOSE = {} - const defaultHandlers = typeHandlers(types) function valuesBuilder(first, parameters, types, transform, columns) { diff --git a/cjs/tests/index.js b/cjs/tests/index.js index 372e70de..7ea99fa3 100644 --- a/cjs/tests/index.js +++ b/cjs/tests/index.js @@ -3,8 +3,6 @@ const { exec } = require('./bootstrap.js') const { t, nt, ot } = require('./test.js') // eslint-disable-line -const cp = require('child_process') -const path = require('path') const net = require('net') const fs = require('fs') const crypto = require('crypto') @@ -12,7 +10,7 @@ const crypto = require('crypto') const postgres = require('../src/index.js') const delay = ms => new Promise(r => setTimeout(r, ms)) -const rel = x => path.join(__dirname, x) +const rel = x => require("path").join(__dirname, x) const idle_timeout = 1 const login = { @@ -661,8 +659,6 @@ t('listen reconnects after connection error', { timeout: 3 }, async() => { const sql = postgres() , xs = [] - const a = (await sql`show data_directory`)[0].data_directory - const { state: { pid } } = await sql.listen('test', x => xs.push(x)) await sql.notify('test', 'a') await sql`select pg_terminate_backend(${ pid }::int)` @@ -937,8 +933,8 @@ t('dynamic values single row', async() => { }) t('dynamic values multi row', async() => { - const [_, { b }] = await sql` - select * from (values ${ sql([['a', 'b', 'c'],['a', 'b', 'c']]) }) AS x(a, b, c) + const [, { b }] = await sql` + select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) AS x(a, b, c) ` return ['b', b] @@ -1105,12 +1101,12 @@ t('Cursor error works', async() => [ t('Multiple Cursors', { timeout: 2 }, async() => { const result = [] - const xs = await sql.begin(async sql => [ - await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async ([row]) => { + await sql.begin(async sql => [ + await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async([row]) => { result.push(row.x) await new Promise(r => setTimeout(r, 200)) }), - await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async ([row]) => { + await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async([row]) => { result.push(row.x) await new Promise(r => setTimeout(r, 100)) }) @@ -1130,6 +1126,51 @@ t('Cursor as async iterator', async() => { return ['1a1b2a2b', order.join('')] }) +t('Cursor as async iterator with break', async() => { + const order = [] + for await (const xs of sql`select generate_series(1,2) as x;`.cursor()) { + order.push(xs[0].x + 'a') + await delay(10) + order.push(xs[0].x + 'b') + break + } + + return ['1a1b', order.join('')] +}) + +t('Async Iterator Unsafe cursor works', async() => { + const order = [] + for await (const [x] of sql.unsafe('select 1 as x union select 2 as x').cursor()) { + order.push(x.x + 'a') + await delay(100) + order.push(x.x + 'b') + } + return ['1a1b2a2b', order.join('')] +}) + +t('Async Iterator Cursor custom n works', async() => { + const order = [] + for await (const x of sql`select * from generate_series(1,20)`.cursor(10)) + order.push(x.length) + + return ['10,10', order.join(',')] +}) + +t('Async Iterator Cursor custom with rest n works', async() => { + const order = [] + for await (const x of sql`select * from generate_series(1,20)`.cursor(11)) + order.push(x.length) + + return ['11,9', order.join(',')] +}) + +t('Async Iterator Cursor custom with less results than batch size works', async() => { + const order = [] + for await (const x of sql`select * from generate_series(1,20)`.cursor(21)) + order.push(x.length) + return ['20', order.join(',')] +}) + t('Transform row', async() => { const sql = postgres({ ...options, @@ -1651,9 +1692,7 @@ t('subscribe', { timeout: 2 }, async() => { t('Execute works', async() => { const result = await new Promise((resolve) => { - const sql = postgres({ ...options, fetch_types: false, debug (id, query) { - resolve(query) - }}) + const sql = postgres({ ...options, fetch_types: false, debug:(id, query) => resolve(query) }) sql`select 1`.execute() }) diff --git a/deno/src/connection.js b/deno/src/connection.js index 9c0576a6..c517dcc4 100644 --- a/deno/src/connection.js +++ b/deno/src/connection.js @@ -6,11 +6,11 @@ import { tls } from '../polyfills.js' import crypto from 'https://deno.land/std@0.120.0/node/crypto.ts' import Stream from 'https://deno.land/std@0.120.0/node/stream.ts' -import { Identifier, Builder, handleValue, arrayParser, arraySerializer, CLOSE } from './types.js' +import { Identifier, Builder, handleValue, arrayParser, arraySerializer } from './types.js' import { Errors } from './errors.js' import Result from './result.js' import Queue from './queue.js' -import Query from './query.js' +import { Query, CLOSE } from './query.js' import b from './bytes.js' export default Connection diff --git a/deno/src/errors.js b/deno/src/errors.js index 1402b850..f83c9f39 100644 --- a/deno/src/errors.js +++ b/deno/src/errors.js @@ -4,7 +4,7 @@ export class PostgresError extends Error { this.name = this.constructor.name Object.assign(this, x) } -}; // eslint-disable-line +} export const Errors = { connection, diff --git a/deno/src/index.js b/deno/src/index.js index 601b6acf..941642d7 100644 --- a/deno/src/index.js +++ b/deno/src/index.js @@ -14,12 +14,11 @@ import { toKebab, fromPascal, fromCamel, - fromKebab, - CLOSE + fromKebab } from './types.js' import Connection from './connection.js' -import Query from './query.js' +import { Query, CLOSE } from './query.js' import Queue from './queue.js' import { Errors, PostgresError } from './errors.js' import Subscribe from './subscribe.js' @@ -75,11 +74,14 @@ function Postgres(a, b) { function Sql(handler, instant) { handler.debug = options.debug + Object.entries(options.types).reduce((acc, [name, type]) => { + acc[name] = (x) => new Parameter(x, type.to) + return acc + }, typed) + Object.assign(sql, { - types: Object.entries(options.types).reduce((acc, [name, type]) => { - acc[name] = (x) => new Parameter(x, type.to) - return acc - }, {}), + types: typed, + typed, unsafe, array, json, @@ -88,6 +90,10 @@ function Postgres(a, b) { return sql + function typed(value, type) { + return new Parameter(value, type) + } + function sql(strings, ...args) { const query = strings && Array.isArray(strings.raw) ? new Query(strings, args, handler, cancel) diff --git a/deno/src/query.js b/deno/src/query.js index 816fd184..36b0748c 100644 --- a/deno/src/query.js +++ b/deno/src/query.js @@ -1,6 +1,7 @@ const originCache = new Map() -export default class Query extends Promise { +export const CLOSE = {} +export class Query extends Promise { constructor(strings, args, handler, canceller, options = {}) { let resolve , reject @@ -69,10 +70,13 @@ export default class Query extends Promise { return { [Symbol.asyncIterator]: () => ({ next: () => { + if (this.executed && !this.active) + return { done: true } + prev && prev() const promise = new Promise((resolve, reject) => { - this.cursorFn = x => { - resolve({ value: x, done: false }) + this.cursorFn = value => { + resolve({ value, done: false }) return new Promise(r => prev = r) } this.resolve = () => (this.active = false, resolve({ done: true })) @@ -80,6 +84,10 @@ export default class Query extends Promise { }) this.execute() return promise + }, + return() { + prev && prev(CLOSE) + return { done: true } } }) } diff --git a/deno/src/types.js b/deno/src/types.js index 0b560166..bd4dea1b 100644 --- a/deno/src/types.js +++ b/deno/src/types.js @@ -1,5 +1,5 @@ import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts' -import Query from './query.js' +import { Query } from './query.js' import { Errors } from './errors.js' export const types = { @@ -54,7 +54,7 @@ export class Identifier extends NotTagged { super() this.value = escapeIdentifier(value) } -}; // eslint-disable-line +} export class Parameter extends NotTagged { constructor(value, type, array) { @@ -63,7 +63,7 @@ export class Parameter extends NotTagged { this.type = type this.array = array } -}; // eslint-disable-line +} export class Builder extends NotTagged { constructor(first, rest) { @@ -79,7 +79,7 @@ export class Builder extends NotTagged { return keyword.fn(this.first, this.rest, parameters, types, transform) } -}; // eslint-disable-line +} export function handleValue(x, parameters, types) { const value = x instanceof Parameter ? x.value : x @@ -96,8 +96,6 @@ export function handleValue(x, parameters, types) { )) } -export const CLOSE = {} - const defaultHandlers = typeHandlers(types) function valuesBuilder(first, parameters, types, transform, columns) { diff --git a/deno/tests/index.js b/deno/tests/index.js index 780ac918..5125f6af 100644 --- a/deno/tests/index.js +++ b/deno/tests/index.js @@ -4,8 +4,6 @@ import { Buffer } from 'https://deno.land/std@0.120.0/node/buffer.ts' import { exec } from './bootstrap.js' import { t, nt, ot } from './test.js' // eslint-disable-line -import cp from 'https://deno.land/std@0.120.0/node/child_process.ts' -import path from 'https://deno.land/std@0.120.0/node/path.ts' import { net } from '../polyfills.js' import fs from 'https://deno.land/std@0.120.0/node/fs.ts' import crypto from 'https://deno.land/std@0.120.0/node/crypto.ts' @@ -662,8 +660,6 @@ t('listen reconnects after connection error', { timeout: 3 }, async() => { const sql = postgres() , xs = [] - const a = (await sql`show data_directory`)[0].data_directory - const { state: { pid } } = await sql.listen('test', x => xs.push(x)) await sql.notify('test', 'a') await sql`select pg_terminate_backend(${ pid }::int)` @@ -938,8 +934,8 @@ t('dynamic values single row', async() => { }) t('dynamic values multi row', async() => { - const [_, { b }] = await sql` - select * from (values ${ sql([['a', 'b', 'c'],['a', 'b', 'c']]) }) AS x(a, b, c) + const [, { b }] = await sql` + select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) AS x(a, b, c) ` return ['b', b] @@ -1106,12 +1102,12 @@ t('Cursor error works', async() => [ t('Multiple Cursors', { timeout: 2 }, async() => { const result = [] - const xs = await sql.begin(async sql => [ - await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async ([row]) => { + await sql.begin(async sql => [ + await sql`select 1 as cursor, x from generate_series(1,4) as x`.cursor(async([row]) => { result.push(row.x) await new Promise(r => setTimeout(r, 200)) }), - await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async ([row]) => { + await sql`select 2 as cursor, x from generate_series(101,104) as x`.cursor(async([row]) => { result.push(row.x) await new Promise(r => setTimeout(r, 100)) }) @@ -1131,6 +1127,51 @@ t('Cursor as async iterator', async() => { return ['1a1b2a2b', order.join('')] }) +t('Cursor as async iterator with break', async() => { + const order = [] + for await (const xs of sql`select generate_series(1,2) as x;`.cursor()) { + order.push(xs[0].x + 'a') + await delay(10) + order.push(xs[0].x + 'b') + break + } + + return ['1a1b', order.join('')] +}) + +t('Async Iterator Unsafe cursor works', async() => { + const order = [] + for await (const [x] of sql.unsafe('select 1 as x union select 2 as x').cursor()) { + order.push(x.x + 'a') + await delay(100) + order.push(x.x + 'b') + } + return ['1a1b2a2b', order.join('')] +}) + +t('Async Iterator Cursor custom n works', async() => { + const order = [] + for await (const x of sql`select * from generate_series(1,20)`.cursor(10)) + order.push(x.length) + + return ['10,10', order.join(',')] +}) + +t('Async Iterator Cursor custom with rest n works', async() => { + const order = [] + for await (const x of sql`select * from generate_series(1,20)`.cursor(11)) + order.push(x.length) + + return ['11,9', order.join(',')] +}) + +t('Async Iterator Cursor custom with less results than batch size works', async() => { + const order = [] + for await (const x of sql`select * from generate_series(1,20)`.cursor(21)) + order.push(x.length) + return ['20', order.join(',')] +}) + t('Transform row', async() => { const sql = postgres({ ...options, @@ -1652,9 +1693,7 @@ t('subscribe', { timeout: 2 }, async() => { t('Execute works', async() => { const result = await new Promise((resolve) => { - const sql = postgres({ ...options, fetch_types: false, debug (id, query) { - resolve(query) - }}) + const sql = postgres({ ...options, fetch_types: false, debug:(id, query) => resolve(query) }) sql`select 1`.execute() }) From 1e09976704433b618318e7701254339516ca6ac3 Mon Sep 17 00:00:00 2001 From: dilan-dio4 <54545871+dilan-dio4@users.noreply.github.com> Date: Wed, 2 Mar 2022 11:08:40 -0500 Subject: [PATCH 31/51] Removed PathOrFileDescriptor type of types.d.ts which is undefined before around Node 16.3 (#268) --- types/index.d.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/types/index.d.ts b/types/index.d.ts index f5719589..92ee9e2e 100644 --- a/types/index.d.ts +++ b/types/index.d.ts @@ -573,8 +573,8 @@ declare namespace postgres { begin(options: string, cb: (sql: TransactionSql) => T | Promise): Promise>; array(value: T, type?: number): ArrayParameter; - file(path: import('node:fs').PathOrFileDescriptor, options?: { cache?: boolean }): PendingQuery>; - file(path: import('node:fs').PathOrFileDescriptor, args: SerializableParameter[], options?: { cache?: boolean }): PendingQuery>; + file(path: string | Buffer | URL | number, options?: { cache?: boolean }): PendingQuery>; + file(path: string | Buffer | URL | number, args: SerializableParameter[], options?: { cache?: boolean }): PendingQuery>; json(value: any): Parameter; } From c548c94f6ae90df7c316294fff79bfe093ccdb32 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Fri, 4 Mar 2022 21:34:43 +0100 Subject: [PATCH 32/51] Fix where in ${ sql(...) } helper --- cjs/src/types.js | 15 ++++++++------- cjs/tests/index.js | 12 +++++++++++- deno/src/types.js | 15 ++++++++------- deno/tests/index.js | 12 +++++++++++- src/types.js | 15 ++++++++------- tests/index.js | 12 +++++++++++- 6 files changed, 57 insertions(+), 24 deletions(-) diff --git a/cjs/src/types.js b/cjs/src/types.js index c8c367f0..86ef5cce 100644 --- a/cjs/src/types.js +++ b/cjs/src/types.js @@ -111,8 +111,15 @@ function valuesBuilder(first, parameters, types, transform, columns) { ).join(',') } +function values(first, rest, parameters, types, transform) { + const multi = Array.isArray(first[0]) + const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first) + return valuesBuilder(multi ? first : [first], parameters, types, transform, columns) +} + const builders = Object.entries({ - valuesBuilder, + values, + in: values, update(first, rest, parameters, types, transform) { return (rest.length ? rest.flat() : Object.keys(first)).map(x => @@ -138,12 +145,6 @@ const builders = Object.entries({ }).join(',') }, - values(first, rest, parameters, types, transform) { - const multi = Array.isArray(first[0]) - const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first) - return valuesBuilder(multi ? first : [first], parameters, types, transform, columns) - }, - insert(first, rest, parameters, types, transform) { const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first) return '(' + columns.map(x => diff --git a/cjs/tests/index.js b/cjs/tests/index.js index 7ea99fa3..0f8ca9b4 100644 --- a/cjs/tests/index.js +++ b/cjs/tests/index.js @@ -873,7 +873,17 @@ t('array insert', async() => { return [2, (await sql`insert into test (a, b) values ${ sql([1, 2]) } returning *`)[0].b, await sql`drop table test`] }) -t('parameters in()', async() => { +t('where parameters in()', async() => { + await sql`create table test (x text)` + await sql`insert into test values ('a')` + return [ + (await sql`select * from test where x in ${ sql(['a', 'b', 'c']) }`)[0].x, + 'a', + await sql`drop table test` + ] +}) + +t('where parameters in() values before', async() => { return [2, (await sql` with rows as ( select * from (values (1), (2), (3), (4)) as x(a) diff --git a/deno/src/types.js b/deno/src/types.js index bd4dea1b..5b6fca4b 100644 --- a/deno/src/types.js +++ b/deno/src/types.js @@ -112,8 +112,15 @@ function valuesBuilder(first, parameters, types, transform, columns) { ).join(',') } +function values(first, rest, parameters, types, transform) { + const multi = Array.isArray(first[0]) + const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first) + return valuesBuilder(multi ? first : [first], parameters, types, transform, columns) +} + const builders = Object.entries({ - valuesBuilder, + values, + in: values, update(first, rest, parameters, types, transform) { return (rest.length ? rest.flat() : Object.keys(first)).map(x => @@ -139,12 +146,6 @@ const builders = Object.entries({ }).join(',') }, - values(first, rest, parameters, types, transform) { - const multi = Array.isArray(first[0]) - const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first) - return valuesBuilder(multi ? first : [first], parameters, types, transform, columns) - }, - insert(first, rest, parameters, types, transform) { const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first) return '(' + columns.map(x => diff --git a/deno/tests/index.js b/deno/tests/index.js index 5125f6af..e6dd4b51 100644 --- a/deno/tests/index.js +++ b/deno/tests/index.js @@ -874,7 +874,17 @@ t('array insert', async() => { return [2, (await sql`insert into test (a, b) values ${ sql([1, 2]) } returning *`)[0].b, await sql`drop table test`] }) -t('parameters in()', async() => { +t('where parameters in()', async() => { + await sql`create table test (x text)` + await sql`insert into test values ('a')` + return [ + (await sql`select * from test where x in ${ sql(['a', 'b', 'c']) }`)[0].x, + 'a', + await sql`drop table test` + ] +}) + +t('where parameters in() values before', async() => { return [2, (await sql` with rows as ( select * from (values (1), (2), (3), (4)) as x(a) diff --git a/src/types.js b/src/types.js index 682afe0d..ea04ef3d 100644 --- a/src/types.js +++ b/src/types.js @@ -111,8 +111,15 @@ function valuesBuilder(first, parameters, types, transform, columns) { ).join(',') } +function values(first, rest, parameters, types, transform) { + const multi = Array.isArray(first[0]) + const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first) + return valuesBuilder(multi ? first : [first], parameters, types, transform, columns) +} + const builders = Object.entries({ - valuesBuilder, + values, + in: values, update(first, rest, parameters, types, transform) { return (rest.length ? rest.flat() : Object.keys(first)).map(x => @@ -138,12 +145,6 @@ const builders = Object.entries({ }).join(',') }, - values(first, rest, parameters, types, transform) { - const multi = Array.isArray(first[0]) - const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first) - return valuesBuilder(multi ? first : [first], parameters, types, transform, columns) - }, - insert(first, rest, parameters, types, transform) { const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first) return '(' + columns.map(x => diff --git a/tests/index.js b/tests/index.js index e9c54dde..e889c37e 100644 --- a/tests/index.js +++ b/tests/index.js @@ -873,7 +873,17 @@ t('array insert', async() => { return [2, (await sql`insert into test (a, b) values ${ sql([1, 2]) } returning *`)[0].b, await sql`drop table test`] }) -t('parameters in()', async() => { +t('where parameters in()', async() => { + await sql`create table test (x text)` + await sql`insert into test values ('a')` + return [ + (await sql`select * from test where x in ${ sql(['a', 'b', 'c']) }`)[0].x, + 'a', + await sql`drop table test` + ] +}) + +t('where parameters in() values before', async() => { return [2, (await sql` with rows as ( select * from (values (1), (2), (3), (4)) as x(a) From 5d90a32b8426af4802bc0fe324ddaa1d7964589d Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Tue, 8 Mar 2022 22:38:04 +0100 Subject: [PATCH 33/51] Fix keep alive not working after reconnecting --- src/connection.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/connection.js b/src/connection.js index 723703ce..13e013b0 100644 --- a/src/connection.js +++ b/src/connection.js @@ -127,7 +127,6 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function createSocket() { const x = net.Socket() - x.setKeepAlive(true, 1000 * keep_alive) x.on('error', error) x.on('close', closed) x.on('drain', drain) @@ -353,6 +352,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl statementCount = 1 lifeTimer.start() socket.on('data', data) + socket.setKeepAlive(true, 1000 * keep_alive) const s = StartupMessage() write(s) } catch (err) { From 8a1e3ee41cc53699855bd9297541d327ef64c1a7 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 13 Mar 2022 18:04:30 +0100 Subject: [PATCH 34/51] Throw correct error when retrying in transactions - fixes #272 --- src/connection.js | 12 +++++++----- tests/index.js | 10 ++++++++++ tests/test.js | 4 ++-- 3 files changed, 19 insertions(+), 7 deletions(-) diff --git a/src/connection.js b/src/connection.js index 13e013b0..6939a7bd 100644 --- a/src/connection.js +++ b/src/connection.js @@ -749,14 +749,16 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function ErrorResponse(x) { query && (query.cursorFn || query.describeFirst) && write(Sync) const error = Errors.postgres(parseError(x)) - query && !query.retried && retryRoutines.has(error.routine) - ? retry(query) - : errored(error) + query && query.retried + ? errored(query.retried) + : query && retryRoutines.has(error.routine) + ? retry(query, error) + : errored(error) } - function retry(q) { + function retry(q, error) { delete statements[q.signature] - q.retried = true + q.retried = error execute(q) } diff --git a/tests/index.js b/tests/index.js index e889c37e..a0881a37 100644 --- a/tests/index.js +++ b/tests/index.js @@ -1456,6 +1456,16 @@ t('Recreate prepared statements on transformAssignedExpr error', async() => { ] }) +t('Throws correct error when retrying in transactions', async() => { + await sql`create table test(x int)` + const error = await sql.begin(sql => sql`insert into test (x) values (${ false })`).catch(e => e) + return [ + error.code, + '42804', + sql`drop table test` + ] +}) + t('Recreate prepared statements on RevalidateCachedQuery error', async() => { const select = () => sql`select name from test` await sql`create table test (name text)` diff --git a/tests/test.js b/tests/test.js index d184cdd8..09da8abc 100644 --- a/tests/test.js +++ b/tests/test.js @@ -33,7 +33,7 @@ async function test(o, name, options, fn) { ? (ignored++, ignore) : fn() ])) - .then((x) => { + .then(async x => { clearTimeout(fn.timer) if (x === ignore) return @@ -41,7 +41,7 @@ async function test(o, name, options, fn) { if (!Array.isArray(x)) throw new Error('Test should return result array') - const [expected, got] = x + const [expected, got] = await Promise.all(x) if (expected !== got) { failed = true throw new Error(util.inspect(expected) + ' != ' + util.inspect(got)) From 0aacdb3231e3b725276f71613d74d9cb6daefc18 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 13 Mar 2022 20:55:48 +0100 Subject: [PATCH 35/51] First get stack when origin is accessed - fixes #273 --- src/query.js | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/src/query.js b/src/query.js index 36b0748c..513c044a 100644 --- a/src/query.js +++ b/src/query.js @@ -1,4 +1,6 @@ const originCache = new Map() + , originStackCache = new Map() + , originError = Symbol('OriginError') export const CLOSE = {} export class Query extends Promise { @@ -29,7 +31,17 @@ export class Query extends Promise { this.executed = false this.signature = '' - this.origin = handler.debug ? new Error().stack : cachedError(this.strings) + this[originError] = handler.debug || !this.tagged + ? new Error() + : cachedError(this.strings) + } + + get origin() { + return this.handler.debug || !this.tagged + ? this[originError].stack + : originStackCache.has(this.strings) + ? originStackCache.get(this.strings) + : originStackCache.set(this.strings, this[originError].stack).get(this.strings) } static get [Symbol.species]() { @@ -143,7 +155,7 @@ function cachedError(xs) { const x = Error.stackTraceLimit Error.stackTraceLimit = 4 - originCache.set(xs, new Error().stack) + originCache.set(xs, new Error()) Error.stackTraceLimit = x return originCache.get(xs) } From 3a8efe728057ca4c19437795af79bdbfd60baf4f Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Tue, 15 Mar 2022 13:01:52 +0100 Subject: [PATCH 36/51] Fix wrong connection reserved state if initial begin query - fixes #274 --- cjs/src/connection.js | 20 ++++++++++++-------- cjs/src/query.js | 16 ++++++++++++++-- cjs/tests/index.js | 18 +++++++++++++++++- cjs/tests/test.js | 4 ++-- deno/src/connection.js | 20 ++++++++++++-------- deno/src/query.js | 16 ++++++++++++++-- deno/tests/index.js | 18 +++++++++++++++++- deno/tests/test.js | 4 ++-- src/connection.js | 6 ++++-- tests/index.js | 8 +++++++- 10 files changed, 101 insertions(+), 29 deletions(-) diff --git a/cjs/src/connection.js b/cjs/src/connection.js index 12393a4e..3c6eef61 100644 --- a/cjs/src/connection.js +++ b/cjs/src/connection.js @@ -127,7 +127,6 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function createSocket() { const x = net.Socket() - x.setKeepAlive(true, 1000 * keep_alive) x.on('error', error) x.on('close', closed) x.on('drain', drain) @@ -353,6 +352,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl statementCount = 1 lifeTimer.start() socket.on('data', data) + socket.setKeepAlive(true, 1000 * keep_alive) const s = StartupMessage() write(s) } catch (err) { @@ -533,8 +533,10 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (query) return // Consider opening if able and sent.length < 50 - connection.reserved && x[5] !== 73 // I - ? connection.reserved() + connection.reserved + ? x[5] === 73 + ? ending && terminate() + : connection.reserved() // I : ending ? terminate() : onopen(connection) @@ -749,14 +751,16 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function ErrorResponse(x) { query && (query.cursorFn || query.describeFirst) && write(Sync) const error = Errors.postgres(parseError(x)) - query && !query.retried && retryRoutines.has(error.routine) - ? retry(query) - : errored(error) + query && query.retried + ? errored(query.retried) + : query && retryRoutines.has(error.routine) + ? retry(query, error) + : errored(error) } - function retry(q) { + function retry(q, error) { delete statements[q.signature] - q.retried = true + q.retried = error execute(q) } diff --git a/cjs/src/query.js b/cjs/src/query.js index 8b61808d..56643a40 100644 --- a/cjs/src/query.js +++ b/cjs/src/query.js @@ -1,4 +1,6 @@ const originCache = new Map() + , originStackCache = new Map() + , originError = Symbol('OriginError') const CLOSE = module.exports.CLOSE = {} const Query = module.exports.Query = class Query extends Promise { @@ -29,7 +31,17 @@ const Query = module.exports.Query = class Query extends Promise { this.executed = false this.signature = '' - this.origin = handler.debug ? new Error().stack : cachedError(this.strings) + this[originError] = handler.debug || !this.tagged + ? new Error() + : cachedError(this.strings) + } + + get origin() { + return this.handler.debug || !this.tagged + ? this[originError].stack + : originStackCache.has(this.strings) + ? originStackCache.get(this.strings) + : originStackCache.set(this.strings, this[originError].stack).get(this.strings) } static get [Symbol.species]() { @@ -143,7 +155,7 @@ function cachedError(xs) { const x = Error.stackTraceLimit Error.stackTraceLimit = 4 - originCache.set(xs, new Error().stack) + originCache.set(xs, new Error()) Error.stackTraceLimit = x return originCache.get(xs) } diff --git a/cjs/tests/index.js b/cjs/tests/index.js index 0f8ca9b4..59d1c634 100644 --- a/cjs/tests/index.js +++ b/cjs/tests/index.js @@ -253,6 +253,12 @@ t('Parallel transactions', async() => { ])).map(x => x.count).join(''), await sql`drop table test`] }) +t('Many transactions at beginning of connection', async() => { + const sql = postgres(options) + const xs = await Promise.all(Array.from({ length: 100 }, () => sql.begin(sql => sql`select 1`))) + return [100, xs.length] +}) + t('Transactions array', async() => { await sql`create table test (a int)` @@ -1456,6 +1462,16 @@ t('Recreate prepared statements on transformAssignedExpr error', async() => { ] }) +t('Throws correct error when retrying in transactions', async() => { + await sql`create table test(x int)` + const error = await sql.begin(sql => sql`insert into test (x) values (${ false })`).catch(e => e) + return [ + error.code, + '42804', + sql`drop table test` + ] +}) + t('Recreate prepared statements on RevalidateCachedQuery error', async() => { const select = () => sql`select name from test` await sql`create table test (name text)` @@ -1595,7 +1611,6 @@ t('Copy write as first works', async() => { ] }) - t('Copy from file works', async() => { await sql`create table test (x int, y int, z int)` await new Promise(async r => fs @@ -1887,6 +1902,7 @@ t('Prevent premature end of connection in transaction', async() => { const result = await sql.begin(async sql => { await sql`select 1` await delay(200) + await sql`select 1` return 'yay' }) diff --git a/cjs/tests/test.js b/cjs/tests/test.js index b282e871..a6a83922 100644 --- a/cjs/tests/test.js +++ b/cjs/tests/test.js @@ -33,7 +33,7 @@ async function test(o, name, options, fn) { ? (ignored++, ignore) : fn() ])) - .then((x) => { + .then(async x => { clearTimeout(fn.timer) if (x === ignore) return @@ -41,7 +41,7 @@ async function test(o, name, options, fn) { if (!Array.isArray(x)) throw new Error('Test should return result array') - const [expected, got] = x + const [expected, got] = await Promise.all(x) if (expected !== got) { failed = true throw new Error(util.inspect(expected) + ' != ' + util.inspect(got)) diff --git a/deno/src/connection.js b/deno/src/connection.js index c517dcc4..809ff302 100644 --- a/deno/src/connection.js +++ b/deno/src/connection.js @@ -130,7 +130,6 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function createSocket() { const x = net.Socket() - x x.on('error', error) x.on('close', closed) x.on('drain', drain) @@ -356,6 +355,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl statementCount = 1 lifeTimer.start() socket.on('data', data) + socket const s = StartupMessage() write(s) } catch (err) { @@ -536,8 +536,10 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (query) return // Consider opening if able and sent.length < 50 - connection.reserved && x[5] !== 73 // I - ? connection.reserved() + connection.reserved + ? x[5] === 73 + ? ending && terminate() + : connection.reserved() // I : ending ? terminate() : onopen(connection) @@ -752,14 +754,16 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function ErrorResponse(x) { query && (query.cursorFn || query.describeFirst) && write(Sync) const error = Errors.postgres(parseError(x)) - query && !query.retried && retryRoutines.has(error.routine) - ? retry(query) - : errored(error) + query && query.retried + ? errored(query.retried) + : query && retryRoutines.has(error.routine) + ? retry(query, error) + : errored(error) } - function retry(q) { + function retry(q, error) { delete statements[q.signature] - q.retried = true + q.retried = error execute(q) } diff --git a/deno/src/query.js b/deno/src/query.js index 36b0748c..513c044a 100644 --- a/deno/src/query.js +++ b/deno/src/query.js @@ -1,4 +1,6 @@ const originCache = new Map() + , originStackCache = new Map() + , originError = Symbol('OriginError') export const CLOSE = {} export class Query extends Promise { @@ -29,7 +31,17 @@ export class Query extends Promise { this.executed = false this.signature = '' - this.origin = handler.debug ? new Error().stack : cachedError(this.strings) + this[originError] = handler.debug || !this.tagged + ? new Error() + : cachedError(this.strings) + } + + get origin() { + return this.handler.debug || !this.tagged + ? this[originError].stack + : originStackCache.has(this.strings) + ? originStackCache.get(this.strings) + : originStackCache.set(this.strings, this[originError].stack).get(this.strings) } static get [Symbol.species]() { @@ -143,7 +155,7 @@ function cachedError(xs) { const x = Error.stackTraceLimit Error.stackTraceLimit = 4 - originCache.set(xs, new Error().stack) + originCache.set(xs, new Error()) Error.stackTraceLimit = x return originCache.get(xs) } diff --git a/deno/tests/index.js b/deno/tests/index.js index e6dd4b51..468219eb 100644 --- a/deno/tests/index.js +++ b/deno/tests/index.js @@ -254,6 +254,12 @@ t('Parallel transactions', async() => { ])).map(x => x.count).join(''), await sql`drop table test`] }) +t('Many transactions at beginning of connection', async() => { + const sql = postgres(options) + const xs = await Promise.all(Array.from({ length: 100 }, () => sql.begin(sql => sql`select 1`))) + return [100, xs.length] +}) + t('Transactions array', async() => { await sql`create table test (a int)` @@ -1457,6 +1463,16 @@ t('Recreate prepared statements on transformAssignedExpr error', async() => { ] }) +t('Throws correct error when retrying in transactions', async() => { + await sql`create table test(x int)` + const error = await sql.begin(sql => sql`insert into test (x) values (${ false })`).catch(e => e) + return [ + error.code, + '42804', + sql`drop table test` + ] +}) + t('Recreate prepared statements on RevalidateCachedQuery error', async() => { const select = () => sql`select name from test` await sql`create table test (name text)` @@ -1596,7 +1612,6 @@ t('Copy write as first works', async() => { ] }) - nt('Copy from file works', async() => { await sql`create table test (x int, y int, z int)` await new Promise(async r => fs @@ -1888,6 +1903,7 @@ t('Prevent premature end of connection in transaction', async() => { const result = await sql.begin(async sql => { await sql`select 1` await delay(200) + await sql`select 1` return 'yay' }) diff --git a/deno/tests/test.js b/deno/tests/test.js index 4b8eca74..2e36de60 100644 --- a/deno/tests/test.js +++ b/deno/tests/test.js @@ -34,7 +34,7 @@ async function test(o, name, options, fn) { ? (ignored++, ignore) : fn() ])) - .then((x) => { + .then(async x => { clearTimeout(fn.timer) if (x === ignore) return @@ -42,7 +42,7 @@ async function test(o, name, options, fn) { if (!Array.isArray(x)) throw new Error('Test should return result array') - const [expected, got] = x + const [expected, got] = await Promise.all(x) if (expected !== got) { failed = true throw new Error(util.inspect(expected) + ' != ' + util.inspect(got)) diff --git a/src/connection.js b/src/connection.js index 6939a7bd..c9e2d4f3 100644 --- a/src/connection.js +++ b/src/connection.js @@ -533,8 +533,10 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (query) return // Consider opening if able and sent.length < 50 - connection.reserved && x[5] !== 73 // I - ? connection.reserved() + connection.reserved + ? x[5] === 73 + ? ending && terminate() + : connection.reserved() // I : ending ? terminate() : onopen(connection) diff --git a/tests/index.js b/tests/index.js index a0881a37..1cdd1496 100644 --- a/tests/index.js +++ b/tests/index.js @@ -253,6 +253,12 @@ t('Parallel transactions', async() => { ])).map(x => x.count).join(''), await sql`drop table test`] }) +t('Many transactions at beginning of connection', async() => { + const sql = postgres(options) + const xs = await Promise.all(Array.from({ length: 100 }, () => sql.begin(sql => sql`select 1`))) + return [100, xs.length] +}) + t('Transactions array', async() => { await sql`create table test (a int)` @@ -1605,7 +1611,6 @@ t('Copy write as first works', async() => { ] }) - t('Copy from file works', async() => { await sql`create table test (x int, y int, z int)` await new Promise(async r => fs @@ -1897,6 +1902,7 @@ t('Prevent premature end of connection in transaction', async() => { const result = await sql.begin(async sql => { await sql`select 1` await delay(200) + await sql`select 1` return 'yay' }) From 7e0521fac623483ae55c207416554bfb12652ce9 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 20 Mar 2022 20:04:20 +0100 Subject: [PATCH 37/51] Simplify generic error call --- src/connection.js | 7 ++----- src/errors.js | 4 ++-- src/index.js | 2 +- src/types.js | 4 ++-- 4 files changed, 7 insertions(+), 10 deletions(-) diff --git a/src/connection.js b/src/connection.js index c9e2d4f3..6812843f 100644 --- a/src/connection.js +++ b/src/connection.js @@ -169,7 +169,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function toBuffer(q) { if (q.parameters.length >= 65534) - throw Errors.generic({ message: 'Max number of parameters (65534) exceeded', code: 'MAX_PARAMETERS_EXCEEDED' }) + throw Errors.generic('MAX_PARAMETERS_EXCEEDED', 'Max number of parameters (65534) exceeded') return q.options.simple ? b().Q().str(q.strings[0] + b.N).end() @@ -679,10 +679,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (x.toString('utf8', 9).split(b.N, 1)[0].slice(2) === serverSignature) return /* c8 ignore next 5 */ - errored(Errors.generic({ - message: 'The server did not return the correct signature', - code: 'SASL_SIGNATURE_MISMATCH' - })) + errored(Errors.generic('SASL_SIGNATURE_MISMATCH', 'The server did not return the correct signature')) socket.destroy() } diff --git a/src/errors.js b/src/errors.js index f83c9f39..0ff83c42 100644 --- a/src/errors.js +++ b/src/errors.js @@ -33,8 +33,8 @@ function postgres(x) { return error } -function generic(x) { - const error = Object.assign(new Error(x.message), x) +function generic(code, message) { + const error = Object.assign(new Error(code + ': ' + message), { code }) Error.captureStackTrace(error, generic) return error } diff --git a/src/index.js b/src/index.js index 05a68c46..5d0e7cf0 100644 --- a/src/index.js +++ b/src/index.js @@ -359,7 +359,7 @@ function Postgres(a, b) { : ( queries.remove(query), query.cancelled = true, - query.reject(Errors.generic({ code: '57014', message: 'canceling statement due to user request' })), + query.reject(Errors.generic('57014', 'canceling statement due to user request')), resolve() ) }) diff --git a/src/types.js b/src/types.js index ea04ef3d..c806acb6 100644 --- a/src/types.js +++ b/src/types.js @@ -83,7 +83,7 @@ export class Builder extends NotTagged { export function handleValue(x, parameters, types) { const value = x instanceof Parameter ? x.value : x if (value === undefined) - throw Errors.generic({ code: 'UNDEFINED_VALUE', message: 'Undefined values are not allowed' }) + throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed') return '$' + (types.push( x instanceof Parameter @@ -155,7 +155,7 @@ const builders = Object.entries({ }).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x, 'i'), fn])) function notTagged() { - throw Errors.generic({ message: 'Query not called as a tagged template literal', code: 'NOT_TAGGED_CALL' }) + throw Errors.generic('NOT_TAGGED_CALL', 'Query not called as a tagged template literal') } export const serializers = defaultHandlers.serializers From 66a9b3efffbae2537219684bcd2ec2035241a737 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 20 Mar 2022 20:05:04 +0100 Subject: [PATCH 38/51] Throw on unsafe transactions --- README.md | 5 +++++ src/connection.js | 4 ++++ tests/index.js | 9 +++++++++ 3 files changed, 18 insertions(+) diff --git a/README.md b/README.md index 5f23243c..2e63d749 100644 --- a/README.md +++ b/README.md @@ -821,6 +821,11 @@ Query errors will also contain the `query` string and the `parameters` which are There are also the following errors specifically for this library. +##### UNSAFE_TRANSACTION +> Only use sql.begin or max: 1 + +To ensure statements in a transaction runs on the same connection (which is required for them to run inside the transaction), you must use [`sql.begin(...)`](#Transactions) or only allow a single connection in options (`max: 1`). + ##### UNDEFINED_VALUE > Undefined values are not allowed diff --git a/src/connection.js b/src/connection.js index 6812843f..c52a8e40 100644 --- a/src/connection.js +++ b/src/connection.js @@ -51,6 +51,7 @@ const errorFields = { function Connection(options, { onopen = noop, onend = noop, ondrain = noop, onclose = noop } = {}) { const { ssl, + max, user, host, port, @@ -557,6 +558,9 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl final && (final(), final = null) + if (result.command === 'BEGIN' && max !== 1 && !connection.reserved) + return errored(Errors.generic('UNSAFE_TRANSACTION', 'Only use sql.begin or max: 1')) + if (query.options.simple) return diff --git a/tests/index.js b/tests/index.js index 1cdd1496..17ebdf9e 100644 --- a/tests/index.js +++ b/tests/index.js @@ -160,6 +160,15 @@ t('null for int', async() => { return [1, (await sql`insert into test values(${ null })`).count, await sql`drop table test`] }) +t('Throws on illegal transactions', async() => { + const sql = postgres({ ...options, max: 2, fetch_types: false }) + const error = await sql`begin`.catch(e => e) + return [ + error.code, + 'UNSAFE_TRANSACTION' + ] +}) + t('Transaction throws', async() => { await sql`create table test (a int)` return ['22P02', await sql.begin(async sql => { From 58ecc88f95a25aecaaa739296f11ff3598165d0c Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 20 Mar 2022 21:04:46 +0100 Subject: [PATCH 39/51] Documentation improvements --- README.md | 362 +++++++++++++++++++++++++++------------------------ package.json | 8 +- 2 files changed, 196 insertions(+), 174 deletions(-) diff --git a/README.md b/README.md index 2e63d749..24b7528a 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,6 @@ Fastest full PostgreSQL nodejs client - [🚀 Fastest full-featured PostgreSQL node client](https://github.com/porsager/postgres-benchmarks#results) -- 🚯 1850 LOC - 0 dependencies - 🏷 ES6 Tagged Template Strings at the core - 🏄‍♀️ Simple surface API - 🖊️ Dynamic query support @@ -21,23 +20,39 @@ $ npm install postgres ``` ### Usage +Create your `sql` database instance ```js -const postgres = require('postgres') -// import postgres from 'postgres' +// db.js +import postgres from 'postgres' -const sql = postgres({ ...options }) // will default to the same as psql +const sql = postgres({ /* options */ }) // will use psql environment variables -const insertUser = await sql` - INSERT INTO users ${ - sql({ name: "Serena", age: 35 }) - } RETURNING * -`; -// [{ name: "Serena", age: 35 }] +export default sql +``` + +Simply import for use elsewhere +```js +// other.js +import sql from './db.js' -const selectUsers = await sql` - select name, age from users +const users = await sql` + select + name, + age + from users + where age > ${ 65 } ` -// [{ name: "Serena", age: 35 }, { name: 'Murray', age: 68 }, ...] +// Result [{ name: "Walter", age: 80 }, { name: 'Murray', age: 68 }, ...] + +const [user] = await sql` + insert into users + (name, age) + values + (${ 'Ludwig' }, ${ 92 }) + returning name, age +` +// Result [{ name: "Murray", age: 68 }] + ``` # Table of Contents @@ -58,6 +73,7 @@ const selectUsers = await sql` * [`describe`](#describe) * [`raw`](#raw) * [`file`](#file) + * [`cancel`](#canceling-queries-in-progress) * [Transactions](#transactions) * [Custom types](#custom-types) * [Advanced communication](#advanced-communication) @@ -76,7 +92,7 @@ const selectUsers = await sql` ### `postgres([url], [options])` -You can use either a `postgres://` url connection string or the options to define your database connection properties. Options in the object will override any present in the url. +You can use either a `postgres://` url connection string or the options to define your database connection properties. Options in the object will override any present in the url. Options will fall back to the same environment variables as psql. ```js const sql = postgres('postgres://username:password@host:port/database', { @@ -93,9 +109,9 @@ More options can be found in the [Advanced Connection Options section](#advanced ## Queries -### ```sql`` -> Promise``` +### ```sql`` -> Promise -> Result[]``` -Postgres.js utilizes [Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) to process query parameters **before** interpolation. Using this advanced form of template literals benefits developers by: +Postgres.js utilizes [Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) to process query parameters **before** interpolation. Using tagged template literals benefits developers by: 1. **Enforcing** safe query generation 2. Giving the `sql`` ` function powerful [utility](#insert) and [dynamic parameterization](#dynamic-queries) features. @@ -119,16 +135,15 @@ const [new_user] = await sql` // new_user = { user_id: 1, name: 'Murray', age: 68 } ``` -Please note that queries are executed when `awaited` – or manually by using `.execute`. +Please note that queries are executed when `awaited` – or manually by using `.execute()`. #### Query parameters -Parameters are automatically inferred and handled by Postgres so that SQL injection isn't possible. No special handling is necessary, simply use JS tagged template literals as usual. **Dynamic and partial queries can be seen in the [next section]()**. +Parameters are automatically inferred and handled by Postgres so that SQL injection isn't possible. No special handling is necessary, simply use tagged template literals as usual. **Dynamic and partial queries can be seen in the [next section]()**. ```js -let searchName = 'Mur' -let searchAge = 60 - +const name = 'Mur' + , age = 60 const users = await sql` select @@ -136,46 +151,30 @@ const users = await sql` age from users where - name like ${searchName + '%'} - and age > ${searchAge} + name like ${ name + '%' } + and age > ${ age } ` - // users = [{ name: 'Murray', age: 68 }] - ``` > Be careful with quotation marks here. Because Postgres infers column types, you do not need to wrap your interpolated parameters in quotes like `'${name}'`. This will cause an error because the tagged template replaces `${name}` with `$1` in the query string, leaving Postgres to do the interpolation. If you wrap that in a string, Postgres will see `'$1'` and interpret it as a string as opposed to a parameter. -### Select +### Dynamic column selection ```js const columns = ['name', 'age'] sql` - select ${ - sql(columns) - } from users + select + ${ sql(columns) } + from users ` -// Is translated into this query: +// Which results in: select "name", "age" from users ``` -```js -let resultOne = await sql` - select user_id, name from users -` -// [{ user_id: 0, name: "Serena" }, { user_id: 1, name: "Murray" }, { user_id: 2, name: "Lysander" }, ...] - -resultOne.unshift() - -let resultTwo = await sql` - select user_id from users where user_id IN ${resultOne.map(row => row.user_id)} -` -// [{ user_id: 1, name: 'Murray' }, { user_id: 2, name: "Lysander" }, ...] -``` - -### Insert +### Dynamic insert ```js const user = { @@ -189,7 +188,7 @@ sql` } ` -// Is translated to: +// Which results in: insert into users ("name", "age") values ($1, $2) ``` @@ -199,208 +198,184 @@ insert into users ("name", "age") values ($1, $2) If you need to insert multiple rows at the same time it's also much faster to do it with a single `insert`. Simply pass an array of objects to `sql()`. ```js -const users = [ - { - name: 'Murray', - age: 68, - garbage: 'ignore' - }, - { - name: 'Walter', - age: 78 - } -] +const users = [{ + name: 'Murray', + age: 68, + garbage: 'ignore' +}, +{ + name: 'Walter', + age: 80 +}] -sql`insert into users ${sql(users, 'name', 'age')}` +sql`insert into users ${ sql(users, 'name', 'age') }` // Is translated to: insert into users ("name", "age") values ($1, $2), ($3, $4) -// Omitting column names +// You can also omit column names which will use object keys as columns +sql`insert into users ${ sql(users) }` -users[0] = { - name: 'Serena', - age: 35, -} - -sql`insert into users ${sql(users)}` - -// Is translated to: +// Which results in: insert into users ("name", "age") values ($1, $2), ($3, $4) ``` -### Update +### Dynamic Updates This is also useful for update queries ```js - const user = { id: 1, - name: 'Muray' + name: 'Murray' } sql` update users set ${ sql(user, 'name') - } where - user_id = ${user.id} + } + where user_id = ${ user.id } ` -// Is translated to: +// Which results in: update users set "name" = $1 where user_id = $2 ``` -### Delete +### Dynamic delete ```js const user = { id: 1, - name: 'Muray' + name: 'Murray' } -sql`delete from users where user_id = ${user.id}` +sql`delete from users where user_id = ${ user.id }` -// Is translated to: +// Which results in: delete from users where user_id = $1 ``` ## Dynamic queries -Postgres.js features a powerful dynamic query parser for conditionally appending/omitting query fragments. - -This works by nestings a ` sql`` ` call within another ` sql`` ` call. +Postgres.js features a simple dynamic query builder by conditionally appending/omitting query fragments. +It works by nesting ` sql`` ` fragments within other ` sql`` ` calls or fragments. This allows you to build dynamic queries safely without risking sql injections through usual string concatenation. #### Partial queries - ```js -let savedQuery = () => sql`and age > 50` +const olderThan = x => sql`and age > ${ x }` -let isQueryingForAge = true +const filterAge = true sql` select * from users - where - name is not null - ${isQueryingForAge ? - savedQuery() - : - sql`` - } -` -``` - -#### Dynamic where clause -```js -sql` - select - * - from users ${id ? - sql`where user_id = ${ id }` - : - sql`` + where name is not null ${ + filterAge + ? olderThan(50) + : sql`` } ` - -// Is translated to: -select * from users +// Which results in: +select * from users where name is not null // Or -select * from users where user_id = $1 +select * from users where name is not null and age > 50 ``` #### Dynamic filters ```js -let ageFilter = 50; - sql` - select - * - from users - where - age > ${ageFilter} - ${id ? - sql`and user_id = ${id}` - : - sql`` + select + * + from users ${ + id + ? sql`where user_id = ${ id }` + : sql`` } ` -// Is translated to: -select * from users where age > $1 +// Which results in: +select * from users // Or -select * from users where age > $1 and user_id = $2 +select * from users where user_id = $1 ``` ### Identifier and value utilities -#### Arrays -Arrays will be handled by replacement parameters too, so `where in` queries are also simple. - +#### Where ` in ` +Value lists can also be created dynamically, making `where in` queries simple too. ```js const users = await sql` select * from users - where age in ${sql([68, 75, 23])} + where age in ${ sql([68, 75, 23]) } ` ``` #### SQL functions - +Using keywords or calling functions dynamically is also possible by using ``` sql`` ``` fragments. ```js -let now = true +const date = null sql` - update users set updated_at = ${ now ? sql`now()` : someDate } + update users set updated_at = ${ date || sql`now()` } ` + +// Which results in: +update users set updated_at = now() ``` #### Table names - +Dynamic identifiers like table names and column names is also supported like so: ```js const table = 'users' sql` select id from ${ sql(table) } ` + +// Which results in: +select id from "users" ``` ## Advanced query methods -### forEach -#### ```sql``.forEach(fn) -> Promise``` +### .cursor() +#### ```sql``.cursor([rows = 1], [fn]) -> Promise``` -If you want to handle rows returned by a query one by one, you can use `.forEach` which returns a promise that resolves once there are no more rows. -```js +Use cursors if you need to throttle the amount of rows being returned from a query. You can use a cursor either as an [async iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of) or with a callback function. For a callback function new results won't be requested until the promise / async callback function has resolved. +##### callback function +```js await sql` - select created_at, name from events -`.forEach(row => { - // row = { created_at: '2019-11-22T14:22:00Z', name: 'connected' } -}) - -// No more rows + select + * + from generate_series(1,4) as x +`.cursor(async([row]) => { + // row = { x: 1 } + await http.request('https://example.com/wat', { row }) +} ``` -### Cursor -#### ```sql``.cursor([rows = 1], fn) -> Promise``` - -Use cursors if you need to throttle the amount of rows being returned from a query. New results won't be requested until the promise / async callback function has resolved. - +##### for await...of ```js -for await (const [row] of sql`select * from generate_series(1,4) as x`.cursor()) { +// for await...of +const cursor = sql`select * from generate_series(1,4) as x`.cursor() + +for await (const [row] of cursor) { // row = { x: 1 } await http.request('https://example.com/wat', { row }) } - -// All rows iterated ``` -A single row will be returned by default, but you can also request batches by setting the number of rows desired in each batch as an argument of `.cursor`: +A single row will be returned by default, but you can also request batches by setting the number of rows desired in each batch as the first argument to `.cursor`: ```js -for await (const rows of sql`select * from generate_series(1,1000) as x`.cursor(10)) { +await sql` + select + * + from generate_series(1,1000) as x +`.cursor(10, async rows => { // rows = [{ x: 1 }, { x: 2 }, ... ] await Promise.all(rows.map(row => http.request('https://example.com/wat', { row }) @@ -408,9 +383,9 @@ for await (const rows of sql`select * from generate_series(1,1000) as x`.cursor( } ``` -If an error is thrown inside the callback function no more rows will be requested and the promise will reject with the thrown error. +If an error is thrown inside the callback function no more rows will be requested and the outer promise will reject with the thrown error. -You can also stop receiving any more rows early by returning an end token `sql.END` from the callback function. +You can close the cursor early either by calling `break` in the `for await...of` loop, or by returning the token `sql.CLOSE` from the callback function. ```js @@ -422,6 +397,21 @@ await sql` ``` +### .forEach() +#### ```sql``.forEach(fn) -> Promise``` + +If you want to handle rows returned by a query one by one, you can use `.forEach` which returns a promise that resolves once there are no more rows. +```js + +await sql` + select created_at, name from events +`.forEach(row => { + // row = { created_at: '2019-11-22T14:22:00Z', name: 'connected' } +}) + +// No more rows +``` + ### describe #### ```sql``.describe([rows = 1], fn) -> Promise``` @@ -451,13 +441,25 @@ sql.file(path.join(__dirname, 'query.sql'), [], { ``` -### Transactions +## Canceling Queries in Progress + +Postgres.js supports, [canceling queries in progress](https://www.postgresql.org/docs/7.1/protocol-protocol.html#AEN39000). It works by opening a new connection with a protocol level startup message to cancel the current query running on a specific connection. That means there is no guarantee that the query will be canceled, and due to the possible race conditions it might even result in canceling another query. This is fine for long running queries, but in the case of high load and fast queries it might be better to simply ignore results instead of canceling. + +```js + +const query = sql`select pg_sleep 100`.execute() +setTimeout(() => query.cancel(), 100) +const result = await query + +``` + +## Transactions -#### BEGIN / COMMIT `sql.begin(fn) -> Promise` +#### BEGIN / COMMIT `sql.begin([options = ''], fn) -> Promise` -Calling `.begin` with a function will return a Promise. This will resolve with the returned value from the function. The function provides a single argument which is `sql` with a context of the newly created transaction. +Use `sql.begin` to start a new transaction. Postgres.js will reserve a connection for the transaction and supply a scoped `sql` instance for all transaction uses in the callback function. `sql.begin` will resolve with the returned value from the callback function. -`BEGIN` is automatically called, and if the Promise fails `ROLLBACK` will be called. If it succeeds `COMMIT` will be called. +`BEGIN` is automatically sent with the optional options, and if anything fails `ROLLBACK` will be called so the connection can be released and execution can continue. ```js @@ -466,7 +468,7 @@ const [user, account] = await sql.begin(async sql => { insert into users ( name ) values ( - 'Alice' + 'Murray' ) ` @@ -483,16 +485,28 @@ const [user, account] = await sql.begin(async sql => { ``` +It's also possible to pipeline the requests in a transaction if needed by returning an array with queries from the callback function like this: + +```js + +const result = await sql.begin(sql => [ + sql`update ...`, + sql`update ...`, + sql`insert ...` +]) + +``` + #### SAVEPOINT `sql.savepoint([name], fn) -> Promise` ```js -sql.begin(async sql => { +sql.begin('read write', async sql => { const [user] = await sql` insert into users ( name ) values ( - 'Alice' + 'Murray' ) ` @@ -521,10 +535,12 @@ sql.begin(async sql => { Do note that you can often achieve the same result using [`WITH` queries (Common Table Expressions)](https://www.postgresql.org/docs/current/queries-with.html) instead of using transactions. +## Unsafe raw string queries +
-sql.unsafe - Advanced unsafe use cases +Advanced unsafe use cases -### Unsafe queries `sql.unsafe(query, [args], [options]) -> promise` +### `sql.unsafe(query, [args], [options]) -> promise` If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to sql injection if you're not careful. @@ -537,11 +553,9 @@ sql.unsafe('select ' + danger + ' from users where id = ' + dragons) ## Custom Types -You can add ergonomic support for custom types, or simply use `sql.typed(value, type)` inline, where type is the PostgreSQL `oid` for the type and the correctly serialized value. _(`oid` values for types can be found in the `pg_catalog.pg_types` table.)_ - -Using custom types is necessary if using parameters with [`DO` queries](https://www.postgresql.org/docs/9.0/sql-do.html). If not doing this, PostgreSQL will throw a `could not determine data type of parameter $1` error. +You can add ergonomic support for custom types, or simply use `sql.typed(value, type)` inline, where type is the PostgreSQL `oid` for the type and the correctly serialized string. _(`oid` values for types can be found in the `pg_catalog.pg_types` table.)_ -Adding Query helpers is the recommended approach which can be done like this: +Adding Query helpers is the cleanest approach which can be done like this: ```js const sql = postgres({ @@ -619,7 +633,7 @@ CREATE PUBLICATION alltables FOR ALL TABLES ```js const sql = postgres({ publications: 'alltables' }) -const { unsubscribe } = await sql.subscribe('insert:events', row => +const { unsubscribe } = await sql.subscribe('insert:events', (row, { command, relation, key, old }) => // tell about new event row over eg. websockets or do something else ) ``` @@ -650,9 +664,9 @@ sql.subscribe('update:users=1', () => /* all updates on the users row with a p ## Teardown / Cleanup -To ensure proper teardown and cleanup on server restarts use `sql.end({ timeout: 0 })` before `process.exit()`. +To ensure proper teardown and cleanup on server restarts use `await sql.end()` before `process.exit()`. -Calling `sql.end()` will reject new queries and return a Promise which resolves when all queries are finished and the underlying connections are closed. If a timeout is provided any pending queries will be rejected once the timeout is reached and the connections will be destroyed. +Calling `sql.end()` will reject new queries and return a Promise which resolves when all queries are finished and the underlying connections are closed. If a `{ timeout }` option is provided any pending queries will be rejected once the timeout (in seconds) is reached and the connections will be destroyed. #### Sample shutdown using [Prexit](http://npmjs.com/prexit) @@ -683,7 +697,7 @@ const sql = postgres({ }) ``` -There is currently no guaranteed way to handle `numeric / decimal` types in native Javascript. **These [and similar] types will be returned as a `string`**. The best way this case is to use [custom types](#custom-types). +There is currently no guaranteed way to handle `numeric / decimal` types in native Javascript. **These [and similar] types will be returned as a `string`**. The best way in this case is to use [custom types](#custom-types). ## Connection options @@ -736,7 +750,7 @@ const sql = ? // "Unless you're using a Private or Shield Heroku Postgres database, Heroku Postgres does not currently support verifiable certificates" // https://help.heroku.com/3DELT3RK/why-can-t-my-third-party-utility-connect-to-heroku-postgres-with-ssl postgres({ ssl: { rejectUnauthorized: false } }) - : postgres(); + : postgres() ``` For more information regarding `ssl` with `postgres`, check out the [Node.js documentation for tls](https://nodejs.org/dist/latest-v10.x/docs/api/tls.html#tls_new_tls_tlssocket_socket_options). @@ -762,6 +776,8 @@ Any query which was already sent over the wire will be rejected if the connectio There are no guarantees about queries executing in order unless using a transaction with `sql.begin()` or setting `max: 1`. Of course doing a series of queries, one awaiting the other will work as expected, but that's just due to the nature of js async/promise handling, so it's not necessary for this library to be concerned with ordering. +Since this library automatically creates prepared statements, it also has a default max lifetime for connections to prevent memory bloat on the database itself. This is a random interval for each connection between 45 and 90 minutes. This allows multiple connections to come up and down seamlessly without user interference. + ### Connection timeout By default, connections will not close until `.end()` is called. However, it may be useful to have them close automatically when: @@ -772,12 +788,12 @@ By default, connections will not close until `.end()` is called. However, it may This can be done using the `idle_timeout` or `max_lifetime` options. These configuration options specify the number of seconds to wait before automatically closing an idle connection and the maximum time a connection can exist, respectively. -For example, to close a connection that has either been idle for 2 seconds or exists for 30 seconds: +For example, to close a connection that has either been idle for 20 seconds or existed for more than 30 minutes: ```js const sql = postgres({ - idle_timeout: 2, - max_lifetime: 30 + idle_timeout: 20, + max_lifetime: 60 * 30 }) ``` @@ -813,11 +829,11 @@ Prepared statements will automatically be created for any queries where it can b ## Error handling -Errors are all thrown to related queries and never globally. Errors coming from PostgreSQL itself are always in the [native Postgres format](https://www.postgresql.org/docs/current/errcodes-appendix.html), and the same goes for any [Node.js errors](https://nodejs.org/api/errors.html#errors_common_system_errors) eg. coming from the underlying connection. +Errors are all thrown to related queries and never globally. Errors coming from database itself are always in the [native Postgres format](https://www.postgresql.org/docs/current/errcodes-appendix.html), and the same goes for any [Node.js errors](https://nodejs.org/api/errors.html#errors_common_system_errors) eg. coming from the underlying connection. Query errors will contain a stored error with the origin of the query to aid in tracing errors. -Query errors will also contain the `query` string and the `parameters` which are not enumerable to avoid accidentally leaking confidential information in logs. To log these it is required to specifically access `error.query` and `error.parameters`. +Query errors will also contain the `query` string and the `parameters`. These are not enumerable to avoid accidentally leaking confidential information in logs. To log these it is required to specifically access `error.query` and `error.parameters`, or set `debug: true` in options. There are also the following errors specifically for this library. diff --git a/package.json b/package.json index c10c0705..c98f5358 100644 --- a/package.json +++ b/package.json @@ -28,9 +28,15 @@ "/src", "/types" ], - "author": "Rasmus Porsager ", + "author": "Rasmus Porsager (https://www.porsager.com)", + "funding": { + "type" : "individual", + "url" : "https://github.com/sponsors/porsager" + }, "license": "Unlicense", "repository": "porsager/postgres", + "homepage": "https://github.com/porsager/postgres", + "bugs": "https://github.com/porsager/postgres/issues", "keywords": [ "driver", "postgresql", From 47d0982ad4075fb512a632acfeb483c968ce0cbb Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 20 Mar 2022 21:57:27 +0100 Subject: [PATCH 40/51] Documentation improvements --- README.md | 181 ++++++++++++++++++++----------------------------- src/index.js | 2 +- tests/index.js | 4 +- 3 files changed, 76 insertions(+), 111 deletions(-) diff --git a/README.md b/README.md index 24b7528a..03030c5c 100644 --- a/README.md +++ b/README.md @@ -32,61 +32,36 @@ export default sql Simply import for use elsewhere ```js -// other.js +// users.js import sql from './db.js' -const users = await sql` - select - name, - age - from users - where age > ${ 65 } -` -// Result [{ name: "Walter", age: 80 }, { name: 'Murray', age: 68 }, ...] - -const [user] = await sql` - insert into users - (name, age) - values - (${ 'Ludwig' }, ${ 92 }) - returning name, age -` -// Result [{ name: "Murray", age: 68 }] - -``` - -# Table of Contents - -* [Connection](#connection) -* [Queries](#queries) - * [Select](#select) - * [Insert](#insert) - * [Update](#update) - * [Delete](#delete) -* [Dynamic queries](#dynamic-queries) - * [Building partial queries](#partial-queries) - * [WHERE clause](#dynamic-where-clause) - * [Identifiers](#identifier-and-value-utilities) -* [Advanced query methods](#advanced-query-methods) - * [`forEach`](#foreach) - * [`cursor`](#cursor) - * [`describe`](#describe) - * [`raw`](#raw) - * [`file`](#file) - * [`cancel`](#canceling-queries-in-progress) - * [Transactions](#transactions) -* [Custom types](#custom-types) -* [Advanced communication](#advanced-communication) - * [`LISTEN` and `NOTIFY`](#listen-and-notify) - * [Subscribe / Realtime](#subscribe-realtime) -* [Connection options](#connection-options) - * [SSL](#ssl) - * [Multi-host connection](#multi-host-connections---high-availability-ha) - * [Connection timeout](#connection-timeout) - * [Environmental variables](#environmental-variables) -* [Error handling](#error-handling) -* [TypeScript support](#typescript-support) +async function getUsersOver(age) { + const users = await sql` + select + name, + age + from users + where age > ${ age } + ` + // users = Result [{ name: "Walter", age: 80 }, { name: 'Murray', age: 68 }, ...] + return users +} + + +async function insertUser({ name, age }) { + const users = sql` + insert into users + (name, age) + values + (${ name }, ${ age }) + returning name, age + ` + // users = Result [{ name: "Murray", age: 68 }] + return users +} + +``` ## Connection @@ -109,20 +84,20 @@ More options can be found in the [Advanced Connection Options section](#advanced ## Queries -### ```sql`` -> Promise -> Result[]``` +### ```await sql`...` -> Result[]``` Postgres.js utilizes [Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) to process query parameters **before** interpolation. Using tagged template literals benefits developers by: 1. **Enforcing** safe query generation 2. Giving the `sql`` ` function powerful [utility](#insert) and [dynamic parameterization](#dynamic-queries) features. -Any generic value will be serialized according to an inferred type, and replaced by a PostgreSQL protocol placeholder `$1, $2, ...`. This is then sent to the database as a parameter to handle escaping & casting. +Any generic value will be serialized according to an inferred type, and replaced by a PostgreSQL protocol placeholder `$1, $2, ...`. The parameters are then sent separately to the database which handles escaping & casting. -All queries will return a `Result` array, mapping column names to each row. +All queries will return a `Result` array, with objects mapping column names to each row. ```js -const [new_user] = await sql` +const xs = await sql` insert into users ( name, age ) values ( @@ -132,14 +107,14 @@ const [new_user] = await sql` returning * ` -// new_user = { user_id: 1, name: 'Murray', age: 68 } +// xs = [{ user_id: 1, name: 'Murray', age: 68 }] ``` -Please note that queries are executed when `awaited` – or manually by using `.execute()`. +> Please note that queries are first executed when `awaited` – or manually by using `.execute()`. -#### Query parameters +### Query parameters -Parameters are automatically inferred and handled by Postgres so that SQL injection isn't possible. No special handling is necessary, simply use tagged template literals as usual. **Dynamic and partial queries can be seen in the [next section]()**. +Parameters are automatically extracted and handled by the database so that SQL injection isn't possible. No special handling is necessary, simply use tagged template literals as usual. **Dynamic queries and query building can be seen in the [next section]()**. // todo ```js const name = 'Mur' @@ -174,7 +149,7 @@ sql` select "name", "age" from users ``` -### Dynamic insert +### Dynamic inserts ```js const user = { @@ -192,7 +167,7 @@ sql` insert into users ("name", "age") values ($1, $2) ``` -**You can omit column names and simply execute `sql(user)` to get all the fields from the object as columns**. Be careful to not allow users to supply columns that you do not want to be inserted. +**You can omit column names and simply execute `sql(user)` to get all the fields from the object as columns**. Be careful not to allow users to supply columns that you do not want to be inserted. #### Multiple inserts in one query If you need to insert multiple rows at the same time it's also much faster to do it with a single `insert`. Simply pass an array of objects to `sql()`. @@ -213,53 +188,58 @@ sql`insert into users ${ sql(users, 'name', 'age') }` // Is translated to: insert into users ("name", "age") values ($1, $2), ($3, $4) -// You can also omit column names which will use object keys as columns +// Here you can also omit column names which will use object keys as columns sql`insert into users ${ sql(users) }` // Which results in: insert into users ("name", "age") values ($1, $2), ($3, $4) ``` -### Dynamic Updates +### Dynamic columns in updates This is also useful for update queries ```js const user = { id: 1, - name: 'Murray' + name: 'Murray', + age: 68 } sql` update users set ${ - sql(user, 'name') + sql(user, 'name', 'age') } where user_id = ${ user.id } ` // Which results in: -update users set "name" = $1 where user_id = $2 +update users set "name" = $1, "age" = $2 where user_id = $3 ``` -### Dynamic delete - +### Dyanmic values and `where in` +Value lists can also be created dynamically, making `where in` queries simple too. ```js +const users = await sql` + select + * + from users + where age in ${ sql([68, 75, 23]) } +` +``` -const user = { - id: 1, - name: 'Murray' -} - -sql`delete from users where user_id = ${ user.id }` - -// Which results in: -delete from users where user_id = $1 +or +```js +const [{ a, b, c }] => await sql` + select + * + from (values ${ sql(['a', 'b', 'c']) }) as x(a, b, c) ``` -## Dynamic queries +## Building queries Postgres.js features a simple dynamic query builder by conditionally appending/omitting query fragments. It works by nesting ` sql`` ` fragments within other ` sql`` ` calls or fragments. This allows you to build dynamic queries safely without risking sql injections through usual string concatenation. -#### Partial queries +### Partial queries ```js const olderThan = x => sql`and age > ${ x }` @@ -281,7 +261,7 @@ select * from users where name is not null select * from users where name is not null and age > 50 ``` -#### Dynamic filters +### Dynamic filters ```js sql` select @@ -299,20 +279,7 @@ select * from users select * from users where user_id = $1 ``` -### Identifier and value utilities - -#### Where ` in ` -Value lists can also be created dynamically, making `where in` queries simple too. -```js -const users = await sql` - select - * - from users - where age in ${ sql([68, 75, 23]) } -` -``` - -#### SQL functions +### SQL functions Using keywords or calling functions dynamically is also possible by using ``` sql`` ``` fragments. ```js const date = null @@ -325,22 +292,24 @@ sql` update users set updated_at = now() ``` -#### Table names +### Table names Dynamic identifiers like table names and column names is also supported like so: ```js const table = 'users' + , column = 'id' sql` - select id from ${ sql(table) } + select ${ sql(column) } from ${ sql(table) } ` // Which results in: -select id from "users" +select "id" from "users" ``` ## Advanced query methods ### .cursor() + #### ```sql``.cursor([rows = 1], [fn]) -> Promise``` Use cursors if you need to throttle the amount of rows being returned from a query. You can use a cursor either as an [async iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of) or with a callback function. For a callback function new results won't be requested until the promise / async callback function has resolved. @@ -368,7 +337,6 @@ for await (const [row] of cursor) { } ``` - A single row will be returned by default, but you can also request batches by setting the number of rows desired in each batch as the first argument to `.cursor`: ```js await sql` @@ -398,7 +366,8 @@ await sql` ``` ### .forEach() -#### ```sql``.forEach(fn) -> Promise``` + +#### ```await sql``.forEach(fn)``` If you want to handle rows returned by a query one by one, you can use `.forEach` which returns a promise that resolves once there are no more rows. ```js @@ -413,7 +382,7 @@ await sql` ``` ### describe -#### ```sql``.describe([rows = 1], fn) -> Promise``` +#### ```await sql``.describe([rows = 1], fn) -> Result[]``` Rather than executing a given query, `.describe` will return information utilized in the query process. This information can include the query identifier, column types, etc. @@ -427,17 +396,13 @@ Using `.raw()` will return rows as an array with `Buffer` values for each column This can be useful to receive identically named columns, or for specific performance/transformation reasons. The column definitions are still included on the result array, plus access to parsers for each column. ### File -#### `sql.file(path, [args], [options]) -> Promise` - -Using a `.sql` file for a query. +#### `await sql.file(path, [args], [options]) -> Result[]` -The contents will be cached in memory so that the file is only read once. +Using a `.sql` file for a query is also supported with optional parameters to use if the file includes `$1, $2, etc` ```js -sql.file(path.join(__dirname, 'query.sql'), [], { - cache: true // Default true - disable for single shot queries or memory reasons -}) +const result = await sql.file('query.sql', ['Murray', 68]) ``` diff --git a/src/index.js b/src/index.js index 5d0e7cf0..61664e69 100644 --- a/src/index.js +++ b/src/index.js @@ -114,7 +114,7 @@ function Postgres(a, b) { return query } - function file(path, args = [], options = { cache: true }) { + function file(path, args = [], options = {}) { arguments.length === 2 && !Array.isArray(args) && (options = args, args = []) const query = new Query([], args, (query) => { fs.readFile(path, 'utf8', (err, string) => { diff --git a/tests/index.js b/tests/index.js index 17ebdf9e..3451fbd6 100644 --- a/tests/index.js +++ b/tests/index.js @@ -951,7 +951,7 @@ t('dynamic select args', async() => { t('dynamic values single row', async() => { const [{ b }] = await sql` - select * from (values ${ sql(['a', 'b', 'c']) }) AS x(a, b, c) + select * from (values ${ sql(['a', 'b', 'c']) }) as x(a, b, c) ` return ['b', b] @@ -959,7 +959,7 @@ t('dynamic values single row', async() => { t('dynamic values multi row', async() => { const [, { b }] = await sql` - select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) AS x(a, b, c) + select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) as x(a, b, c) ` return ['b', b] From 4e957480b0faf805ed0d7ffa9d8f0471ce83ad22 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 20 Mar 2022 22:29:44 +0100 Subject: [PATCH 41/51] Smaller documentation fixes --- README.md | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 03030c5c..1da3da06 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ Fastest full PostgreSQL nodejs client -- [🚀 Fastest full-featured PostgreSQL node client](https://github.com/porsager/postgres-benchmarks#results) +- [🚀 Fastest full-featured node & deno client](https://github.com/porsager/postgres-benchmarks#results) - 🏷 ES6 Tagged Template Strings at the core - 🏄‍♀️ Simple surface API - 🖊️ Dynamic query support -- 💬 Chat on [Gitter](https://gitter.im/porsager/postgres) +- 💬 Chat and help on [Gitter](https://gitter.im/porsager/postgres)
@@ -80,7 +80,7 @@ const sql = postgres('postgres://username:password@host:port/database', { }) ``` -More options can be found in the [Advanced Connection Options section](#advanced-connection-options). +More options can be found in the [Advanced Connection Options section](#connection-options). ## Queries @@ -89,7 +89,7 @@ More options can be found in the [Advanced Connection Options section](#advanced Postgres.js utilizes [Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) to process query parameters **before** interpolation. Using tagged template literals benefits developers by: 1. **Enforcing** safe query generation -2. Giving the `sql`` ` function powerful [utility](#insert) and [dynamic parameterization](#dynamic-queries) features. +2. Giving the `sql`` ` function powerful [utility](#dynamic-inserts) and [query building](#building-queries) features. Any generic value will be serialized according to an inferred type, and replaced by a PostgreSQL protocol placeholder `$1, $2, ...`. The parameters are then sent separately to the database which handles escaping & casting. @@ -310,7 +310,7 @@ select "id" from "users" ### .cursor() -#### ```sql``.cursor([rows = 1], [fn]) -> Promise``` +#### ```await sql``.cursor([rows = 1], [fn])``` Use cursors if you need to throttle the amount of rows being returned from a query. You can use a cursor either as an [async iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of) or with a callback function. For a callback function new results won't be requested until the promise / async callback function has resolved. @@ -420,7 +420,7 @@ const result = await query ## Transactions -#### BEGIN / COMMIT `sql.begin([options = ''], fn) -> Promise` +#### BEGIN / COMMIT `await sql.begin([options = ''], fn) -> fn()` Use `sql.begin` to start a new transaction. Postgres.js will reserve a connection for the transaction and supply a scoped `sql` instance for all transaction uses in the callback function. `sql.begin` will resolve with the returned value from the callback function. @@ -462,7 +462,7 @@ const result = await sql.begin(sql => [ ``` -#### SAVEPOINT `sql.savepoint([name], fn) -> Promise` +#### SAVEPOINT `await sql.savepoint([name], fn) -> fn()` ```js @@ -505,7 +505,7 @@ Do note that you can often achieve the same result using [`WITH` queries (Common
Advanced unsafe use cases -### `sql.unsafe(query, [args], [options]) -> promise` +### `await sql.unsafe(query, [args], [options]) -> Result[]` If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to sql injection if you're not careful. @@ -633,7 +633,7 @@ To ensure proper teardown and cleanup on server restarts use `await sql.end()` b Calling `sql.end()` will reject new queries and return a Promise which resolves when all queries are finished and the underlying connections are closed. If a `{ timeout }` option is provided any pending queries will be rejected once the timeout (in seconds) is reached and the connections will be destroyed. -#### Sample shutdown using [Prexit](http://npmjs.com/prexit) +#### Sample shutdown using [Prexit](https://github.com/porsager/prexit) ```js @@ -718,7 +718,7 @@ const sql = : postgres() ``` -For more information regarding `ssl` with `postgres`, check out the [Node.js documentation for tls](https://nodejs.org/dist/latest-v10.x/docs/api/tls.html#tls_new_tls_tlssocket_socket_options). +For more information regarding `ssl` with `postgres`, check out the [Node.js documentation for tls](https://nodejs.org/dist/latest-v16.x/docs/api/tls.html#new-tlstlssocketsocket-options). ### Multi-host connections - High Availability (HA) @@ -805,7 +805,7 @@ There are also the following errors specifically for this library. ##### UNSAFE_TRANSACTION > Only use sql.begin or max: 1 -To ensure statements in a transaction runs on the same connection (which is required for them to run inside the transaction), you must use [`sql.begin(...)`](#Transactions) or only allow a single connection in options (`max: 1`). +To ensure statements in a transaction runs on the same connection (which is required for them to run inside the transaction), you must use [`sql.begin(...)`](#transactions) or only allow a single connection in options (`max: 1`). ##### UNDEFINED_VALUE > Undefined values are not allowed @@ -845,12 +845,12 @@ This error is thrown if the connection was closed without an error. This should ##### CONNECTION_ENDED > write CONNECTION_ENDED host:port -This error is thrown if the user has called [`sql.end()`](#sql_end) and performed a query afterward. +This error is thrown if the user has called [`sql.end()`](#teardown--cleanup) and performed a query afterward. ##### CONNECTION_DESTROYED > write CONNECTION_DESTROYED host:port -This error is thrown for any queries that were pending when the timeout to [`sql.end({ timeout: X })`](#sql_destroy) was reached. +This error is thrown for any queries that were pending when the timeout to [`sql.end({ timeout: X })`](#teardown--cleanup) was reached. ##### CONNECTION_CONNECT_TIMEOUT > write CONNECTION_CONNECT_TIMEOUT host:port From 75c376dab6a0a5282e156174654c091b9960a66c Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 20 Mar 2022 22:50:23 +0100 Subject: [PATCH 42/51] Reorder and readd toc --- README.md | 193 +++++++++++++++++++++++++++++------------------------- 1 file changed, 104 insertions(+), 89 deletions(-) diff --git a/README.md b/README.md index 1da3da06..d068053f 100644 --- a/README.md +++ b/README.md @@ -63,6 +63,23 @@ async function insertUser({ name, age }) { ``` +## Table of Contents + +* [Connection](#connection) +* [Queries](#queries) +* [Building queries](#building-queries) +* [Advanced query methods](#advanced-query-methods) +* [Transactions](#transactions) +* [Listen & notify](#listen--notify) +* [Realtime subscribe](#realtime-subscribe) +* [Numbers, bigint, numeric](#numbers-bigint-numeric) +* [Connection details](#connection-details) +* [Custom Types](#custom-types) +* [Teardown / Cleanup](#teardown--cleanup) +* [Error handling](#error-handling) +* [TypeScript support](#typescript-support) + + ## Connection ### `postgres([url], [options])` @@ -80,7 +97,7 @@ const sql = postgres('postgres://username:password@host:port/database', { }) ``` -More options can be found in the [Advanced Connection Options section](#connection-options). +More options can be found in the [Connection details section](#connection-details). ## Queries @@ -406,7 +423,7 @@ const result = await sql.file('query.sql', ['Murray', 68]) ``` -## Canceling Queries in Progress +### Canceling Queries in Progress Postgres.js supports, [canceling queries in progress](https://www.postgresql.org/docs/7.1/protocol-protocol.html#AEN39000). It works by opening a new connection with a protocol level startup message to cancel the current query running on a specific connection. That means there is no guarantee that the query will be canceled, and due to the possible race conditions it might even result in canceling another query. This is fine for long running queries, but in the case of high load and fast queries it might be better to simply ignore results instead of canceling. @@ -418,6 +435,22 @@ const result = await query ``` +### Unsafe raw string queries + +
+Advanced unsafe use cases + +### `await sql.unsafe(query, [args], [options]) -> Result[]` + +If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to sql injection if you're not careful. + +```js + +sql.unsafe('select ' + danger + ' from users where id = ' + dragons) + +``` +
+ ## Transactions #### BEGIN / COMMIT `await sql.begin([options = ''], fn) -> fn()` @@ -500,66 +533,7 @@ sql.begin('read write', async sql => { Do note that you can often achieve the same result using [`WITH` queries (Common Table Expressions)](https://www.postgresql.org/docs/current/queries-with.html) instead of using transactions. -## Unsafe raw string queries - -
-Advanced unsafe use cases - -### `await sql.unsafe(query, [args], [options]) -> Result[]` - -If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to sql injection if you're not careful. - -```js - -sql.unsafe('select ' + danger + ' from users where id = ' + dragons) - -``` -
- -## Custom Types - -You can add ergonomic support for custom types, or simply use `sql.typed(value, type)` inline, where type is the PostgreSQL `oid` for the type and the correctly serialized string. _(`oid` values for types can be found in the `pg_catalog.pg_types` table.)_ - -Adding Query helpers is the cleanest approach which can be done like this: - -```js -const sql = postgres({ - types: { - rect: { - // The pg_types oid to pass to the db along with the serialized value. - to : 1337, - - // An array of pg_types oids to handle when parsing values coming from the db. - from : [1337], - - //Function that transform values before sending them to the db. - serialize : ({ x, y, width, height }) => [x, y, width, height], - - // Function that transforms values coming from the db. - parse : ([x, y, width, height]) => { x, y, width, height } - } - } -}) - -// Now you can use sql.typed.rect() as specified above -const [custom] = sql` - insert into rectangles ( - name, - rect - ) values ( - 'wat', - ${ sql.typed.rect({ x: 13, y: 37, width: 42, height: 80 }) } - ) - returning * -` - -// custom = { name: 'wat', rect: { x: 13, y: 37, width: 42, height: 80 } } - -``` - -## Advanced communication - -### Listen and notify +## Listen & notify When you call `.listen`, a dedicated connection will be created to ensure that you receive notifications in real-time. This connection will be used for any further calls to `.listen`. @@ -581,20 +555,20 @@ sql.notify('news', JSON.stringify({ no: 'this', is: 'news' })) ``` -### Subscribe / Realtime +## Realtime subscribe Postgres.js implements the logical replication protocol of PostgreSQL to support subscription to real-time updates of `insert`, `update` and `delete` operations. > **NOTE** To make this work you must [create the proper publications in your database](https://www.postgresql.org/docs/current/sql-createpublication.html), enable logical replication by setting `wal_level = logical` in `postgresql.conf` and connect using either a replication or superuser. -#### Quick start +### Quick start -##### Create a publication (eg. in migration) +#### Create a publication (eg. in migration) ```sql CREATE PUBLICATION alltables FOR ALL TABLES ``` -##### Subscribe to updates +#### Subscribe to updates ```js const sql = postgres({ publications: 'alltables' }) @@ -603,11 +577,11 @@ const { unsubscribe } = await sql.subscribe('insert:events', (row, { command, re ) ``` -#### Subscribe pattern +### Subscribe pattern You can subscribe to specific operations, tables, or even rows with primary keys. -##### `operation` `:` `schema` `.` `table` `=` `primary_key` +#### `operation` `:` `schema` `.` `table` `=` `primary_key` **`operation`** is one of ``` * | insert | update | delete ``` and defaults to `*` @@ -617,7 +591,7 @@ You can subscribe to specific operations, tables, or even rows with primary keys **`primary_key`** can be used to only subscribe to specific rows -#### Examples +### Examples ```js sql.subscribe('*', () => /* everything */ ) @@ -627,25 +601,6 @@ sql.subscribe('delete:users', () => /* all deletes on the public.users table sql.subscribe('update:users=1', () => /* all updates on the users row with a primary key = 1 */ ) ``` -## Teardown / Cleanup - -To ensure proper teardown and cleanup on server restarts use `await sql.end()` before `process.exit()`. - -Calling `sql.end()` will reject new queries and return a Promise which resolves when all queries are finished and the underlying connections are closed. If a `{ timeout }` option is provided any pending queries will be rejected once the timeout (in seconds) is reached and the connections will be destroyed. - -#### Sample shutdown using [Prexit](https://github.com/porsager/prexit) - -```js - -import prexit from 'prexit' - -prexit(async () => { - await sql.end({ timeout: 5 }) - await new Promise(r => server.close(r)) -}) - -``` - ## Numbers, bigint, numeric `Number` in javascript is only able to represent 253-1 safely which means that types in PostgreSQLs like `bigint` and `numeric` won't fit into `Number`. @@ -665,7 +620,7 @@ const sql = postgres({ There is currently no guaranteed way to handle `numeric / decimal` types in native Javascript. **These [and similar] types will be returned as a `string`**. The best way in this case is to use [custom types](#custom-types). -## Connection options +## Connection details ### All Postgres options @@ -792,6 +747,66 @@ const sql = postgres() Prepared statements will automatically be created for any queries where it can be inferred that the query is static. This can be disabled by using the `no_prepare` option. For instance — this is useful when [using PGBouncer in `transaction mode`](https://github.com/porsager/postgres/issues/93). +## Custom Types + +You can add ergonomic support for custom types, or simply use `sql.typed(value, type)` inline, where type is the PostgreSQL `oid` for the type and the correctly serialized string. _(`oid` values for types can be found in the `pg_catalog.pg_types` table.)_ + +Adding Query helpers is the cleanest approach which can be done like this: + +```js +const sql = postgres({ + types: { + rect: { + // The pg_types oid to pass to the db along with the serialized value. + to : 1337, + + // An array of pg_types oids to handle when parsing values coming from the db. + from : [1337], + + //Function that transform values before sending them to the db. + serialize : ({ x, y, width, height }) => [x, y, width, height], + + // Function that transforms values coming from the db. + parse : ([x, y, width, height]) => { x, y, width, height } + } + } +}) + +// Now you can use sql.typed.rect() as specified above +const [custom] = sql` + insert into rectangles ( + name, + rect + ) values ( + 'wat', + ${ sql.typed.rect({ x: 13, y: 37, width: 42, height: 80 }) } + ) + returning * +` + +// custom = { name: 'wat', rect: { x: 13, y: 37, width: 42, height: 80 } } + +``` + +## Teardown / Cleanup + +To ensure proper teardown and cleanup on server restarts use `await sql.end()` before `process.exit()`. + +Calling `sql.end()` will reject new queries and return a Promise which resolves when all queries are finished and the underlying connections are closed. If a `{ timeout }` option is provided any pending queries will be rejected once the timeout (in seconds) is reached and the connections will be destroyed. + +#### Sample shutdown using [Prexit](https://github.com/porsager/prexit) + +```js + +import prexit from 'prexit' + +prexit(async () => { + await sql.end({ timeout: 5 }) + await new Promise(r => server.close(r)) +}) + +``` + ## Error handling Errors are all thrown to related queries and never globally. Errors coming from database itself are always in the [native Postgres format](https://www.postgresql.org/docs/current/errcodes-appendix.html), and the same goes for any [Node.js errors](https://nodejs.org/api/errors.html#errors_common_system_errors) eg. coming from the underlying connection. From f049db42be2984ae5f38d8a359c770f498485ccb Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 20 Mar 2022 23:01:47 +0100 Subject: [PATCH 43/51] Build deno + cjs --- cjs/src/connection.js | 11 ++++++----- cjs/src/errors.js | 4 ++-- cjs/src/index.js | 4 ++-- cjs/src/types.js | 4 ++-- cjs/tests/index.js | 13 +++++++++++-- deno/src/connection.js | 11 ++++++----- deno/src/errors.js | 4 ++-- deno/src/index.js | 4 ++-- deno/src/types.js | 4 ++-- deno/tests/index.js | 13 +++++++++++-- 10 files changed, 46 insertions(+), 26 deletions(-) diff --git a/cjs/src/connection.js b/cjs/src/connection.js index 3c6eef61..de4eab19 100644 --- a/cjs/src/connection.js +++ b/cjs/src/connection.js @@ -51,6 +51,7 @@ const errorFields = { function Connection(options, { onopen = noop, onend = noop, ondrain = noop, onclose = noop } = {}) { const { ssl, + max, user, host, port, @@ -169,7 +170,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function toBuffer(q) { if (q.parameters.length >= 65534) - throw Errors.generic({ message: 'Max number of parameters (65534) exceeded', code: 'MAX_PARAMETERS_EXCEEDED' }) + throw Errors.generic('MAX_PARAMETERS_EXCEEDED', 'Max number of parameters (65534) exceeded') return q.options.simple ? b().Q().str(q.strings[0] + b.N).end() @@ -557,6 +558,9 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl final && (final(), final = null) + if (result.command === 'BEGIN' && max !== 1 && !connection.reserved) + return errored(Errors.generic('UNSAFE_TRANSACTION', 'Only use sql.begin or max: 1')) + if (query.options.simple) return @@ -679,10 +683,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (x.toString('utf8', 9).split(b.N, 1)[0].slice(2) === serverSignature) return /* c8 ignore next 5 */ - errored(Errors.generic({ - message: 'The server did not return the correct signature', - code: 'SASL_SIGNATURE_MISMATCH' - })) + errored(Errors.generic('SASL_SIGNATURE_MISMATCH', 'The server did not return the correct signature')) socket.destroy() } diff --git a/cjs/src/errors.js b/cjs/src/errors.js index 027e8b75..ef66149a 100644 --- a/cjs/src/errors.js +++ b/cjs/src/errors.js @@ -33,8 +33,8 @@ function postgres(x) { return error } -function generic(x) { - const error = Object.assign(new Error(x.message), x) +function generic(code, message) { + const error = Object.assign(new Error(code + ': ' + message), { code }) Error.captureStackTrace(error, generic) return error } diff --git a/cjs/src/index.js b/cjs/src/index.js index 97e2c34a..21ef995a 100644 --- a/cjs/src/index.js +++ b/cjs/src/index.js @@ -114,7 +114,7 @@ function Postgres(a, b) { return query } - function file(path, args = [], options = { cache: true }) { + function file(path, args = [], options = {}) { arguments.length === 2 && !Array.isArray(args) && (options = args, args = []) const query = new Query([], args, (query) => { fs.readFile(path, 'utf8', (err, string) => { @@ -359,7 +359,7 @@ function Postgres(a, b) { : ( queries.remove(query), query.cancelled = true, - query.reject(Errors.generic({ code: '57014', message: 'canceling statement due to user request' })), + query.reject(Errors.generic('57014', 'canceling statement due to user request')), resolve() ) }) diff --git a/cjs/src/types.js b/cjs/src/types.js index 86ef5cce..42657874 100644 --- a/cjs/src/types.js +++ b/cjs/src/types.js @@ -83,7 +83,7 @@ const Builder = module.exports.Builder = class Builder extends NotTagged { module.exports.handleValue = handleValue;function handleValue(x, parameters, types) { const value = x instanceof Parameter ? x.value : x if (value === undefined) - throw Errors.generic({ code: 'UNDEFINED_VALUE', message: 'Undefined values are not allowed' }) + throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed') return '$' + (types.push( x instanceof Parameter @@ -155,7 +155,7 @@ const builders = Object.entries({ }).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x, 'i'), fn])) function notTagged() { - throw Errors.generic({ message: 'Query not called as a tagged template literal', code: 'NOT_TAGGED_CALL' }) + throw Errors.generic('NOT_TAGGED_CALL', 'Query not called as a tagged template literal') } const serializers = module.exports.serializers = defaultHandlers.serializers diff --git a/cjs/tests/index.js b/cjs/tests/index.js index 59d1c634..56d2617a 100644 --- a/cjs/tests/index.js +++ b/cjs/tests/index.js @@ -160,6 +160,15 @@ t('null for int', async() => { return [1, (await sql`insert into test values(${ null })`).count, await sql`drop table test`] }) +t('Throws on illegal transactions', async() => { + const sql = postgres({ ...options, max: 2, fetch_types: false }) + const error = await sql`begin`.catch(e => e) + return [ + error.code, + 'UNSAFE_TRANSACTION' + ] +}) + t('Transaction throws', async() => { await sql`create table test (a int)` return ['22P02', await sql.begin(async sql => { @@ -942,7 +951,7 @@ t('dynamic select args', async() => { t('dynamic values single row', async() => { const [{ b }] = await sql` - select * from (values ${ sql(['a', 'b', 'c']) }) AS x(a, b, c) + select * from (values ${ sql(['a', 'b', 'c']) }) as x(a, b, c) ` return ['b', b] @@ -950,7 +959,7 @@ t('dynamic values single row', async() => { t('dynamic values multi row', async() => { const [, { b }] = await sql` - select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) AS x(a, b, c) + select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) as x(a, b, c) ` return ['b', b] diff --git a/deno/src/connection.js b/deno/src/connection.js index 809ff302..4b72b308 100644 --- a/deno/src/connection.js +++ b/deno/src/connection.js @@ -54,6 +54,7 @@ const errorFields = { function Connection(options, { onopen = noop, onend = noop, ondrain = noop, onclose = noop } = {}) { const { ssl, + max, user, host, port, @@ -172,7 +173,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl function toBuffer(q) { if (q.parameters.length >= 65534) - throw Errors.generic({ message: 'Max number of parameters (65534) exceeded', code: 'MAX_PARAMETERS_EXCEEDED' }) + throw Errors.generic('MAX_PARAMETERS_EXCEEDED', 'Max number of parameters (65534) exceeded') return q.options.simple ? b().Q().str(q.strings[0] + b.N).end() @@ -560,6 +561,9 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl final && (final(), final = null) + if (result.command === 'BEGIN' && max !== 1 && !connection.reserved) + return errored(Errors.generic('UNSAFE_TRANSACTION', 'Only use sql.begin or max: 1')) + if (query.options.simple) return @@ -682,10 +686,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (x.toString('utf8', 9).split(b.N, 1)[0].slice(2) === serverSignature) return /* c8 ignore next 5 */ - errored(Errors.generic({ - message: 'The server did not return the correct signature', - code: 'SASL_SIGNATURE_MISMATCH' - })) + errored(Errors.generic('SASL_SIGNATURE_MISMATCH', 'The server did not return the correct signature')) socket.destroy() } diff --git a/deno/src/errors.js b/deno/src/errors.js index f83c9f39..0ff83c42 100644 --- a/deno/src/errors.js +++ b/deno/src/errors.js @@ -33,8 +33,8 @@ function postgres(x) { return error } -function generic(x) { - const error = Object.assign(new Error(x.message), x) +function generic(code, message) { + const error = Object.assign(new Error(code + ': ' + message), { code }) Error.captureStackTrace(error, generic) return error } diff --git a/deno/src/index.js b/deno/src/index.js index 941642d7..7a16b029 100644 --- a/deno/src/index.js +++ b/deno/src/index.js @@ -115,7 +115,7 @@ function Postgres(a, b) { return query } - function file(path, args = [], options = { cache: true }) { + function file(path, args = [], options = {}) { arguments.length === 2 && !Array.isArray(args) && (options = args, args = []) const query = new Query([], args, (query) => { fs.readFile(path, 'utf8', (err, string) => { @@ -360,7 +360,7 @@ function Postgres(a, b) { : ( queries.remove(query), query.cancelled = true, - query.reject(Errors.generic({ code: '57014', message: 'canceling statement due to user request' })), + query.reject(Errors.generic('57014', 'canceling statement due to user request')), resolve() ) }) diff --git a/deno/src/types.js b/deno/src/types.js index 5b6fca4b..a3dabd10 100644 --- a/deno/src/types.js +++ b/deno/src/types.js @@ -84,7 +84,7 @@ export class Builder extends NotTagged { export function handleValue(x, parameters, types) { const value = x instanceof Parameter ? x.value : x if (value === undefined) - throw Errors.generic({ code: 'UNDEFINED_VALUE', message: 'Undefined values are not allowed' }) + throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed') return '$' + (types.push( x instanceof Parameter @@ -156,7 +156,7 @@ const builders = Object.entries({ }).map(([x, fn]) => ([new RegExp('(^|[\\s(])' + x, 'i'), fn])) function notTagged() { - throw Errors.generic({ message: 'Query not called as a tagged template literal', code: 'NOT_TAGGED_CALL' }) + throw Errors.generic('NOT_TAGGED_CALL', 'Query not called as a tagged template literal') } export const serializers = defaultHandlers.serializers diff --git a/deno/tests/index.js b/deno/tests/index.js index 468219eb..713b077b 100644 --- a/deno/tests/index.js +++ b/deno/tests/index.js @@ -161,6 +161,15 @@ t('null for int', async() => { return [1, (await sql`insert into test values(${ null })`).count, await sql`drop table test`] }) +t('Throws on illegal transactions', async() => { + const sql = postgres({ ...options, max: 2, fetch_types: false }) + const error = await sql`begin`.catch(e => e) + return [ + error.code, + 'UNSAFE_TRANSACTION' + ] +}) + t('Transaction throws', async() => { await sql`create table test (a int)` return ['22P02', await sql.begin(async sql => { @@ -943,7 +952,7 @@ t('dynamic select args', async() => { t('dynamic values single row', async() => { const [{ b }] = await sql` - select * from (values ${ sql(['a', 'b', 'c']) }) AS x(a, b, c) + select * from (values ${ sql(['a', 'b', 'c']) }) as x(a, b, c) ` return ['b', b] @@ -951,7 +960,7 @@ t('dynamic values single row', async() => { t('dynamic values multi row', async() => { const [, { b }] = await sql` - select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) AS x(a, b, c) + select * from (values ${ sql([['a', 'b', 'c'], ['a', 'b', 'c']]) }) as x(a, b, c) ` return ['b', b] From 14e2993ae2d57fb87b5ce8ba5b692bea01544cb0 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 20 Mar 2022 23:16:03 +0100 Subject: [PATCH 44/51] Fix npm files --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index c98f5358..3de35fcb 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,8 @@ "prepublishOnly": "npm run lint" }, "files": [ - "/cjs", + "/cjs/src", + "/cjs/package.json", "/src", "/types" ], From 00dd39ced3f28926e2a8d3b2293ac7f39b3056e9 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Sun, 20 Mar 2022 23:19:42 +0100 Subject: [PATCH 45/51] 3.0.0-rc.1 --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 3de35fcb..52a2f28e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "postgres", - "version": "2.0.0-beta.11", + "version": "3.0.0-rc.1", "description": "Fastest full featured PostgreSQL client for Node.js", "type": "module", "module": "src/index.js", @@ -31,8 +31,8 @@ ], "author": "Rasmus Porsager (https://www.porsager.com)", "funding": { - "type" : "individual", - "url" : "https://github.com/sponsors/porsager" + "type": "individual", + "url": "https://github.com/sponsors/porsager" }, "license": "Unlicense", "repository": "porsager/postgres", From fc1fac50d7b87adc9b0216a0c6298f9ccb265e85 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Mon, 21 Mar 2022 20:55:00 +0100 Subject: [PATCH 46/51] Mark connection as closed immediately on terminate --- cjs/src/connection.js | 1 + deno/src/connection.js | 1 + src/connection.js | 1 + 3 files changed, 3 insertions(+) diff --git a/cjs/src/connection.js b/cjs/src/connection.js index de4eab19..d58597f6 100644 --- a/cjs/src/connection.js +++ b/cjs/src/connection.js @@ -400,6 +400,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (stream || query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) + onclose(connection) clearImmediate(nextWriteTimer) socket.removeListener('data', data) socket.removeListener('connect', connected) diff --git a/deno/src/connection.js b/deno/src/connection.js index 4b72b308..faf10245 100644 --- a/deno/src/connection.js +++ b/deno/src/connection.js @@ -403,6 +403,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (stream || query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) + onclose(connection) clearImmediate(nextWriteTimer) socket.removeListener('data', data) socket.removeListener('connect', connected) diff --git a/src/connection.js b/src/connection.js index c52a8e40..7d9f2ca1 100644 --- a/src/connection.js +++ b/src/connection.js @@ -400,6 +400,7 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (stream || query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) + onclose(connection) clearImmediate(nextWriteTimer) socket.removeListener('data', data) socket.removeListener('connect', connected) From 2549e55eb388200b97d155810bebab81a9c4cac7 Mon Sep 17 00:00:00 2001 From: s13k Date: Mon, 21 Mar 2022 20:25:37 +0000 Subject: [PATCH 47/51] fixed docs typos + minor cleanup (#280) * fixed docs typos + minor cleanup * image alignment fix Co-authored-by: s13k --- README.md | 109 +++++++++++++++++++++--------------------------------- 1 file changed, 43 insertions(+), 66 deletions(-) diff --git a/README.md b/README.md index d068053f..6dd9463e 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -Fastest full PostgreSQL nodejs client +Fastest full PostgreSQL nodejs client - [🚀 Fastest full-featured node & deno client](https://github.com/porsager/postgres-benchmarks#results) - 🏷 ES6 Tagged Template Strings at the core @@ -11,7 +11,7 @@ ## Getting started
-Good UX with Postgres.js +Good UX with Postgres.js
### Installation @@ -37,9 +37,9 @@ import sql from './db.js' async function getUsersOver(age) { const users = await sql` - select + select name, - age + age from users where age > ${ age } ` @@ -50,17 +50,15 @@ async function getUsersOver(age) { async function insertUser({ name, age }) { const users = sql` - insert into users - (name, age) - values + insert into users + (name, age) + values (${ name }, ${ age }) returning name, age ` // users = Result [{ name: "Murray", age: 68 }] return users } - - ``` ## Table of Contents @@ -106,14 +104,13 @@ More options can be found in the [Connection details section](#connection-detail Postgres.js utilizes [Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) to process query parameters **before** interpolation. Using tagged template literals benefits developers by: 1. **Enforcing** safe query generation -2. Giving the `sql`` ` function powerful [utility](#dynamic-inserts) and [query building](#building-queries) features. +2. Giving the ` sql`` ` function powerful [utility](#dynamic-inserts) and [query building](#building-queries) features. Any generic value will be serialized according to an inferred type, and replaced by a PostgreSQL protocol placeholder `$1, $2, ...`. The parameters are then sent separately to the database which handles escaping & casting. All queries will return a `Result` array, with objects mapping column names to each row. ```js - const xs = await sql` insert into users ( name, age @@ -157,8 +154,8 @@ const users = await sql` const columns = ['name', 'age'] sql` - select - ${ sql(columns) } + select + ${ sql(columns) } from users ` @@ -194,7 +191,7 @@ const users = [{ name: 'Murray', age: 68, garbage: 'ignore' -}, +}, { name: 'Walter', age: 80 @@ -213,7 +210,7 @@ insert into users ("name", "age") values ($1, $2), ($3, $4) ``` ### Dynamic columns in updates -This is also useful for update queries +This is also useful for update queries ```js const user = { id: 1, @@ -224,7 +221,7 @@ const user = { sql` update users set ${ sql(user, 'name', 'age') - } + } where user_id = ${ user.id } ` @@ -232,7 +229,7 @@ sql` update users set "name" = $1, "age" = $2 where user_id = $3 ``` -### Dyanmic values and `where in` +### Dynamic values and `where in` Value lists can also be created dynamically, making `where in` queries simple too. ```js const users = await sql` @@ -243,12 +240,13 @@ const users = await sql` ` ``` -or +or ```js const [{ a, b, c }] => await sql` - select - * + select + * from (values ${ sql(['a', 'b', 'c']) }) as x(a, b, c) +` ``` ## Building queries @@ -265,7 +263,7 @@ const filterAge = true sql` select * - from users + from users where name is not null ${ filterAge ? olderThan(50) @@ -280,11 +278,11 @@ select * from users where name is not null and age > 50 ### Dynamic filters ```js -sql` +sql` select * from users ${ - id + id ? sql`where user_id = ${ id }` : sql`` } @@ -301,7 +299,7 @@ Using keywords or calling functions dynamically is also possible by using ``` sq ```js const date = null -sql` +sql` update users set updated_at = ${ date || sql`now()` } ` @@ -334,8 +332,8 @@ Use cursors if you need to throttle the amount of rows being returned from a que ##### callback function ```js await sql` - select - * + select + * from generate_series(1,4) as x `.cursor(async([row]) => { // row = { x: 1 } @@ -357,8 +355,8 @@ for await (const [row] of cursor) { A single row will be returned by default, but you can also request batches by setting the number of rows desired in each batch as the first argument to `.cursor`: ```js await sql` - select - * + select + * from generate_series(1,1000) as x `.cursor(10, async rows => { // rows = [{ x: 1 }, { x: 2 }, ... ] @@ -373,13 +371,11 @@ If an error is thrown inside the callback function no more rows will be requeste You can close the cursor early either by calling `break` in the `for await...of` loop, or by returning the token `sql.CLOSE` from the callback function. ```js - await sql` select * from generate_series(1,1000) as x `.cursor(row => { - return Math.random() > 0.9 && sql.END + return Math.random() > 0.9 && sql.CLOSE // or sql.END }) - ``` ### .forEach() @@ -388,7 +384,6 @@ await sql` If you want to handle rows returned by a query one by one, you can use `.forEach` which returns a promise that resolves once there are no more rows. ```js - await sql` select created_at, name from events `.forEach(row => { @@ -398,7 +393,7 @@ await sql` // No more rows ``` -### describe +### describe #### ```await sql``.describe([rows = 1], fn) -> Result[]``` Rather than executing a given query, `.describe` will return information utilized in the query process. This information can include the query identifier, column types, etc. @@ -418,9 +413,7 @@ This can be useful to receive identically named columns, or for specific perform Using a `.sql` file for a query is also supported with optional parameters to use if the file includes `$1, $2, etc` ```js - const result = await sql.file('query.sql', ['Murray', 68]) - ``` ### Canceling Queries in Progress @@ -428,11 +421,9 @@ const result = await sql.file('query.sql', ['Murray', 68]) Postgres.js supports, [canceling queries in progress](https://www.postgresql.org/docs/7.1/protocol-protocol.html#AEN39000). It works by opening a new connection with a protocol level startup message to cancel the current query running on a specific connection. That means there is no guarantee that the query will be canceled, and due to the possible race conditions it might even result in canceling another query. This is fine for long running queries, but in the case of high load and fast queries it might be better to simply ignore results instead of canceling. ```js - const query = sql`select pg_sleep 100`.execute() setTimeout(() => query.cancel(), 100) const result = await query - ``` ### Unsafe raw string queries @@ -442,12 +433,10 @@ const result = await query ### `await sql.unsafe(query, [args], [options]) -> Result[]` -If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to sql injection if you're not careful. +If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to SQL injection if you're not careful. ```js - sql.unsafe('select ' + danger + ' from users where id = ' + dragons) - ```
@@ -460,7 +449,6 @@ Use `sql.begin` to start a new transaction. Postgres.js will reserve a connectio `BEGIN` is automatically sent with the optional options, and if anything fails `ROLLBACK` will be called so the connection can be released and execution can continue. ```js - const [user, account] = await sql.begin(async sql => { const [user] = await sql` insert into users ( @@ -480,25 +468,21 @@ const [user, account] = await sql.begin(async sql => { return [user, account] }) - ``` It's also possible to pipeline the requests in a transaction if needed by returning an array with queries from the callback function like this: ```js - const result = await sql.begin(sql => [ sql`update ...`, sql`update ...`, sql`insert ...` ]) - ``` #### SAVEPOINT `await sql.savepoint([name], fn) -> fn()` ```js - sql.begin('read write', async sql => { const [user] = await sql` insert into users ( @@ -508,7 +492,7 @@ sql.begin('read write', async sql => { ) ` - const [account] = (await sql.savepoint(sql => + const [account] = (await sql.savepoint(sql => sql` insert into accounts ( user_id @@ -528,7 +512,6 @@ sql.begin('read write', async sql => { .catch(() => { // not so good - ROLLBACK was called }) - ``` Do note that you can often achieve the same result using [`WITH` queries (Common Table Expressions)](https://www.postgresql.org/docs/current/queries-with.html) instead of using transactions. @@ -540,19 +523,15 @@ When you call `.listen`, a dedicated connection will be created to ensure that y `.listen` returns a promise which resolves once the `LISTEN` query to Postgres completes, or if there is already a listener active. ```js - await sql.listen('news', payload => { const json = JSON.parse(payload) console.log(json.this) // logs 'is' }) - ``` -Notify can be done as usual in sql, or by using the `sql.notify` method. +Notify can be done as usual in SQL, or by using the `sql.notify` method. ```js - sql.notify('news', JSON.stringify({ no: 'this', is: 'news' })) - ``` ## Realtime subscribe @@ -585,7 +564,7 @@ You can subscribe to specific operations, tables, or even rows with primary keys **`operation`** is one of ``` * | insert | update | delete ``` and defaults to `*` -**`schema`** defaults to `public.` +**`schema`** defaults to `public` **`table`** is a specific table name and defaults to `*` @@ -605,7 +584,7 @@ sql.subscribe('update:users=1', () => /* all updates on the users row with a p `Number` in javascript is only able to represent 253-1 safely which means that types in PostgreSQLs like `bigint` and `numeric` won't fit into `Number`. -Since Node.js v10.4 we can use [`BigInt`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) to match the PostgreSQL type `bigint` which is returned for eg. `count(*)`. Unfortunately, it doesn't work with `JSON.stringify` out of the box, so Postgres.js will return it as a string. +Since Node.js v10.4 we can use [`BigInt`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) to match the PostgreSQL type `bigint` which is returned for eg. `count(*)`. Unfortunately, it doesn't work with `JSON.stringify` out of the box, so Postgres.js will return it as a string. If you want to use `BigInt` you can add this custom type: @@ -617,7 +596,7 @@ const sql = postgres({ }) ``` -There is currently no guaranteed way to handle `numeric / decimal` types in native Javascript. **These [and similar] types will be returned as a `string`**. The best way in this case is to use [custom types](#custom-types). +There is currently no guaranteed way to handle `numeric` / `decimal` types in native Javascript. **These [and similar] types will be returned as a `string`**. The best way in this case is to use [custom types](#custom-types). ## Connection details @@ -626,7 +605,7 @@ There is currently no guaranteed way to handle `numeric / decimal` types in nati ```js const sql = postgres('postgres://username:password@host:port/database', { - host : '', // Postgres ip address[s] or domain name[s] + host : '', // Postgres ip address[es] or domain name[s] port : 5432, // Postgres server port[s] path : '', // unix socket path (usually '/tmp') database : '', // Name of database to connect to @@ -651,7 +630,7 @@ const sql = postgres('postgres://username:password@host:port/database', { application_name : 'postgres.js', // Default application_name ... // Other connection parameters }, - target_session_attrs : null, // Use 'read-write' with multiple hosts to + target_session_attrs : null, // Use 'read-write' with multiple hosts to // ensure only connecting to primary fetch_types : true, // Automatically fetches types on connect // on initial connection. @@ -678,7 +657,7 @@ For more information regarding `ssl` with `postgres`, check out the [Node.js doc ### Multi-host connections - High Availability (HA) -Multiple connection strings can be passed to `postgres()` in the form of `postgres('postgres://localhost:5432,localhost:5433', ...)`. This works the same as native the `psql` command. Read more at [multiple host uris](https://www.postgresql.org/docs/13/libpq-connect.html#LIBPQ-MULTIPLE-HOSTS) +Multiple connection strings can be passed to `postgres()` in the form of `postgres('postgres://localhost:5432,localhost:5433', ...)`. This works the same as native the `psql` command. Read more at [multiple host URIs](https://www.postgresql.org/docs/13/libpq-connect.html#LIBPQ-MULTIPLE-HOSTS). Connections will be attempted in order of the specified hosts/ports. On a successful connection, all retries will be reset. This ensures that hosts can come up and down seamlessly. @@ -686,9 +665,9 @@ If you specify `target_session_attrs: 'primary'` or `PGTARGETSESSIONATTRS=primar ### The Connection Pool -Connections are created lazily once a query is created. This means that simply doing const `sql = postgres(...)` won't have any effect other than instantiating a new `sql` instance. +Connections are created lazily once a query is created. This means that simply doing const `sql = postgres(...)` won't have any effect other than instantiating a new `sql` instance. -> No connection will be made until a query is made. +> No connection will be made until a query is made. This means that we get a much simpler story for error handling and reconnections. Queries will be sent over the wire immediately on the next available connection in the pool. Connections are automatically taken out of the pool if you start a transaction using `sql.begin()`, and automatically returned to the pool once your transaction is done. @@ -719,9 +698,9 @@ const sql = postgres({ ### Auto fetching of array types -Postgres.js will automatically fetch table/array-type information when it first connects to a database. +Postgres.js will automatically fetch table/array-type information when it first connects to a database. -If you have revoked access to `pg_catalog` this feature will no longer work and will need to be disabled. +If you have revoked access to `pg_catalog` this feature will no longer work and will need to be disabled. You can disable this feature by setting `fetch_types` to `false`. @@ -797,14 +776,12 @@ Calling `sql.end()` will reject new queries and return a Promise which resolves #### Sample shutdown using [Prexit](https://github.com/porsager/prexit) ```js - import prexit from 'prexit' prexit(async () => { await sql.end({ timeout: 5 }) await new Promise(r => server.close(r)) }) - ``` ## Error handling @@ -896,7 +873,7 @@ return users[0] ``` You can also prefer destructuring when you only care about a fixed number of rows. -In this case, we recommand you to prefer using tuples to handle `undefined` properly: +In this case, we recommend you to prefer using tuples to handle `undefined` properly: ```ts const [user]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}` if (!user) // => User | undefined @@ -908,7 +885,7 @@ const [first, second]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}` const [first, second] = await sql<[User?]>`SELECT * FROM users WHERE id = ${id}` // don't fail : `second: User | undefined` ``` -We do our best to type all the public API, however types are not always updated when features are added ou changed. Feel free to open an issue if you have trouble with types. +We do our best to type all the public API, however types are not always updated when features are added or changed. Feel free to open an issue if you have trouble with types. ## Migration tools From 6e615ae324f87f429c6e42cadb65cab03264f15a Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Mon, 21 Mar 2022 23:48:14 +0100 Subject: [PATCH 48/51] Revert "Mark connection as closed immediately on terminate" This reverts commit fc1fac50d7b87adc9b0216a0c6298f9ccb265e85. --- cjs/src/connection.js | 1 - deno/src/connection.js | 1 - src/connection.js | 1 - 3 files changed, 3 deletions(-) diff --git a/cjs/src/connection.js b/cjs/src/connection.js index d58597f6..de4eab19 100644 --- a/cjs/src/connection.js +++ b/cjs/src/connection.js @@ -400,7 +400,6 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (stream || query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) - onclose(connection) clearImmediate(nextWriteTimer) socket.removeListener('data', data) socket.removeListener('connect', connected) diff --git a/deno/src/connection.js b/deno/src/connection.js index faf10245..4b72b308 100644 --- a/deno/src/connection.js +++ b/deno/src/connection.js @@ -403,7 +403,6 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (stream || query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) - onclose(connection) clearImmediate(nextWriteTimer) socket.removeListener('data', data) socket.removeListener('connect', connected) diff --git a/src/connection.js b/src/connection.js index 7d9f2ca1..c52a8e40 100644 --- a/src/connection.js +++ b/src/connection.js @@ -400,7 +400,6 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl if (stream || query || initial || sent.length) error(Errors.connection('CONNECTION_DESTROYED', options)) - onclose(connection) clearImmediate(nextWriteTimer) socket.removeListener('data', data) socket.removeListener('connect', connected) From 315407edd2a343b59ad595bc4362277b1648df2b Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 23 Mar 2022 21:15:23 +0100 Subject: [PATCH 49/51] Fix race condition with transactions on end --- cjs/src/connection.js | 8 +++++--- cjs/src/index.js | 5 ----- cjs/tests/index.js | 13 +++++++++++++ deno/src/connection.js | 8 +++++--- deno/src/index.js | 5 ----- deno/tests/index.js | 13 +++++++++++++ src/connection.js | 8 +++++--- src/index.js | 5 ----- tests/index.js | 13 +++++++++++++ 9 files changed, 54 insertions(+), 24 deletions(-) diff --git a/cjs/src/connection.js b/cjs/src/connection.js index de4eab19..14760caf 100644 --- a/cjs/src/connection.js +++ b/cjs/src/connection.js @@ -535,9 +535,11 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl return // Consider opening if able and sent.length < 50 connection.reserved - ? x[5] === 73 - ? ending && terminate() - : connection.reserved() // I + ? x[5] === 73 // I + ? ending + ? terminate() + : (connection.reserved = null, onopen(connection)) + : connection.reserved() : ending ? terminate() : onopen(connection) diff --git a/cjs/src/index.js b/cjs/src/index.js index 21ef995a..816b2678 100644 --- a/cjs/src/index.js +++ b/cjs/src/index.js @@ -193,11 +193,6 @@ function Postgres(a, b) { return await scope(connection, fn) } catch (error) { throw error - } finally { - if (connection) { - connection.reserved = null - onopen(connection) - } } async function scope(c, fn, name) { diff --git a/cjs/tests/index.js b/cjs/tests/index.js index 56d2617a..85508809 100644 --- a/cjs/tests/index.js +++ b/cjs/tests/index.js @@ -1921,3 +1921,16 @@ t('Prevent premature end of connection in transaction', async() => { result ] }) + +t('Ensure reconnect after max_lifetime with transactions', { timeout: 5000 }, async() => { + const sql = postgres({ + max_lifetime: 0.01, + idle_timeout, + max: 1 + }) + + let x = 0 + while (x++ < 10) await sql.begin(sql => sql`select 1 as x`) + + return [true, true] +}) diff --git a/deno/src/connection.js b/deno/src/connection.js index 4b72b308..b2ff5b9a 100644 --- a/deno/src/connection.js +++ b/deno/src/connection.js @@ -538,9 +538,11 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl return // Consider opening if able and sent.length < 50 connection.reserved - ? x[5] === 73 - ? ending && terminate() - : connection.reserved() // I + ? x[5] === 73 // I + ? ending + ? terminate() + : (connection.reserved = null, onopen(connection)) + : connection.reserved() : ending ? terminate() : onopen(connection) diff --git a/deno/src/index.js b/deno/src/index.js index 7a16b029..82cdeb59 100644 --- a/deno/src/index.js +++ b/deno/src/index.js @@ -194,11 +194,6 @@ function Postgres(a, b) { return await scope(connection, fn) } catch (error) { throw error - } finally { - if (connection) { - connection.reserved = null - onopen(connection) - } } async function scope(c, fn, name) { diff --git a/deno/tests/index.js b/deno/tests/index.js index 713b077b..5a4ea5c6 100644 --- a/deno/tests/index.js +++ b/deno/tests/index.js @@ -1922,3 +1922,16 @@ t('Prevent premature end of connection in transaction', async() => { result ] }) + +t('Ensure reconnect after max_lifetime with transactions', { timeout: 5000 }, async() => { + const sql = postgres({ + max_lifetime: 0.01, + idle_timeout, + max: 1 + }) + + let x = 0 + while (x++ < 10) await sql.begin(sql => sql`select 1 as x`) + + return [true, true] +}) diff --git a/src/connection.js b/src/connection.js index c52a8e40..c6dcc2e9 100644 --- a/src/connection.js +++ b/src/connection.js @@ -535,9 +535,11 @@ function Connection(options, { onopen = noop, onend = noop, ondrain = noop, oncl return // Consider opening if able and sent.length < 50 connection.reserved - ? x[5] === 73 - ? ending && terminate() - : connection.reserved() // I + ? x[5] === 73 // I + ? ending + ? terminate() + : (connection.reserved = null, onopen(connection)) + : connection.reserved() : ending ? terminate() : onopen(connection) diff --git a/src/index.js b/src/index.js index 61664e69..691a2c97 100644 --- a/src/index.js +++ b/src/index.js @@ -193,11 +193,6 @@ function Postgres(a, b) { return await scope(connection, fn) } catch (error) { throw error - } finally { - if (connection) { - connection.reserved = null - onopen(connection) - } } async function scope(c, fn, name) { diff --git a/tests/index.js b/tests/index.js index 3451fbd6..876f85ec 100644 --- a/tests/index.js +++ b/tests/index.js @@ -1921,3 +1921,16 @@ t('Prevent premature end of connection in transaction', async() => { result ] }) + +t('Ensure reconnect after max_lifetime with transactions', { timeout: 5000 }, async() => { + const sql = postgres({ + max_lifetime: 0.01, + idle_timeout, + max: 1 + }) + + let x = 0 + while (x++ < 10) await sql.begin(sql => sql`select 1 as x`) + + return [true, true] +}) From 5076f604dcca528a592ab39e5dfceb7313f40a25 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Wed, 23 Mar 2022 22:30:35 +0100 Subject: [PATCH 50/51] 3.0.0-rc.2 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 52a2f28e..2d323201 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "postgres", - "version": "3.0.0-rc.1", + "version": "3.0.0-rc.2", "description": "Fastest full featured PostgreSQL client for Node.js", "type": "module", "module": "src/index.js", From 06b27cd46e911e3b1ea0100f462efeed89742a93 Mon Sep 17 00:00:00 2001 From: Rasmus Porsager Date: Thu, 24 Mar 2022 16:54:55 +0100 Subject: [PATCH 51/51] Fix unlicense --- LICENSE => UNLICENSE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename LICENSE => UNLICENSE (94%) diff --git a/LICENSE b/UNLICENSE similarity index 94% rename from LICENSE rename to UNLICENSE index 68a49daa..efb98088 100644 --- a/LICENSE +++ b/UNLICENSE @@ -21,4 +21,4 @@ OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -For more information, please refer to +For more information, please refer to