Package Exports
- @pgkit/client
- @pgkit/client/dist/index.js
This package does not declare an exports field, so the exports above have been automatically detected and optimized by JSPM instead. If any package subpath is missing, it is recommended to post an issue to the original package (@pgkit/client) to support the "exports" field. If that is not possible, create a JSPM override to customize the exports field for this package.
Readme
@pgkit/client
A strongly-typed postgres client for node.js

Contents
API
sql.array:
const result = await client.any(sql`
select *
from usage_test
where name = any(${sql.array(['one', 'two'], 'text')})
`)
expect(result).toEqual([
{id: 1, name: 'one'},
{id: 2, name: 'two'},
])sql.identifier:
const result = await client.oneFirst(sql`
select count(1)
from ${sql.identifier(['public', 'usage_test'])}
`)
expect(Number(result)).toEqual(3)sql.unnest:
const entries = [
{id: 1, name: 'one'},
{id: 2, name: 'two'},
{id: 3, name: 'three'},
{id: 4, name: 'four'},
]
const result = await client.any(sql`
insert into usage_test(id, name)
select *
from ${sql.unnest(
entries.map(({id, name}) => [id, name]),
['int4', 'text'],
)}
returning *
`)
expect(result).toEqual([
{id: 1, name: 'one'},
{id: 2, name: 'two'},
{id: 3, name: 'three'},
{id: 4, name: 'four'},
])sql.join:
const [result] = await client.any(sql`
update usage_test
set ${sql.join([sql`name = 'one hundred'`, sql`id = 100`], sql`, `)}
where id = 1
returning *
`)
expect(result).toEqual({id: 100, name: 'one hundred'})sql.fragment:
const condition = sql.fragment`id = 1`
const result = await client.one(sql`select * from usage_test where ${condition}`)
expect(result).toEqual({id: 1, name: 'one'})sql.interval:
const result = await client.oneFirst(sql`
select '2000-01-01T12:00:00Z'::timestamptz + ${sql.interval({
days: 1,
hours: 1,
})} as ts
`)
expect(result).toBeInstanceOf(Date)
expect(result).toMatchInlineSnapshot(`2000-01-02T13:00:00.000Z`)
const interval = await client.oneFirst(sql`select ${sql.interval({days: 1})}`)
expect(interval).toMatchInlineSnapshot(`"1 day"`)sql.binary:
const result = await client.oneFirst(sql`
select ${sql.binary(Buffer.from('hello'))} as b
`)
expect(result).toMatchInlineSnapshot(`"\\x68656c6c6f"`)sql.json:
await client.query(sql`
drop table if exists jsonb_test;
create table jsonb_test (id int, data jsonb);
`)
const insert = await client.one(sql`
insert into jsonb_test values (1, ${sql.json({foo: 'bar'})})
returning *
`)
expect(insert).toEqual({data: {foo: 'bar'}, id: 1})
const insert3 = await client.one(sql`
insert into jsonb_test values (1, ${JSON.stringify({foo: 'bar'})})
returning *
`)
expect(insert3).toEqual(insert)sql.jsonb:
const insert2 = await client.one(sql`
insert into jsonb_test values (1, ${sql.jsonb({foo: 'bar'})})
returning *
`)
expect(insert2).toEqual(insert2)sql.literalValue:
const result = await client.transaction(async tx => {
await tx.query(sql`set local search_path to ${sql.literalValue('abc')}`)
return tx.one(sql`show search_path`)
})
expect(result).toEqual({search_path: 'abc'})
const result2 = await client.one(sql`show search_path`)
expect(result2).toEqual({search_path: '"$user", public'})sub-transactions:
const result = await client.transaction(async t1 => {
const count1 = await t1.oneFirst(sql`select count(1) from usage_test where id > 3`)
const count2 = await t1.transaction(async t2 => {
await t2.query(sql`insert into usage_test(id, name) values (5, 'five')`)
return t2.oneFirst(sql`select count(1) from usage_test where id > 3`)
})
return {count1, count2}
})
expect(result).toEqual({count1: 0, count2: 1})transaction savepoints:
let error: Error | undefined
await client.transaction(async t1 => {
await t1.query(sql`insert into usage_test(id, name) values (10, 'ten')`)
await t1
.transaction(async t2 => {
await t2.query(sql`insert into usage_test(id, name) values (11, 'eleven')`)
throw new Error(`Uh-oh`)
})
.catch(e => {
error = e as Error
})
})
expect(error).toBeInstanceOf(Error)
expect(error).toMatchInlineSnapshot(`[Error: Uh-oh]`)
const newRecords = await client.any(sql`select * from usage_test where id >= 10`)
expect(newRecords).toEqual([{id: 10, name: 'ten'}])sql.type:
const Fooish = z.object({foo: z.number()})
await expect(client.one(sql.type(Fooish)`select 1 as foo`)).resolves.toMatchInlineSnapshot(`
{
"foo": 1,
}
`)
await expect(client.one(sql.type(Fooish)`select 'hello' as foo`)).rejects.toMatchInlineSnapshot(`
[Error: [Query select_c2b3cb1]: [
{
"code": "invalid_type",
"expected": "number",
"received": "string",
"path": [
"foo"
],
"message": "Expected number, received string"
}
]]
`)sql.typeAlias:
const sql = createSqlTag({
typeAliases: {
foo: z.object({
foo: z.string(),
}),
},
})
const result = await client.one(sql.typeAlias('foo')`select 'hi' as foo`)
expectTypeOf(result).toEqualTypeOf<{foo: string}>()
expect(result).toEqual({foo: 'hi'})
await expect(client.one(sql.typeAlias('foo')`select 123 as foo`)).rejects.toMatchInlineSnapshot(`
[Error: [Query select_1534c96]: [
{
"code": "invalid_type",
"expected": "string",
"received": "number",
"path": [
"foo"
],
"message": "Expected string, received number"
}
]]
`)Get started
npm install @pgkit/clientimport {sql, createClient} from '@pgkit/client'
const client = createClient('postgres://postgres:postgres@localhost:5432/postgres')
export const getProfile = async (id: string) => {
const profile = await client.one(sql`select * from profile where id = ${id}`)
return {
name: profile.name,
}
}
export const updateProfileName = (id: string, name: string) => {
await client.transaction(async tx => {
const profile = await tx.one(sql`
update profile set name = ${name} where id = ${id} returning *
`)
await tx.query(sql`
insert into some_other_table (foo) values (${profile.foo})
`)
})
}Types
The companion library @pgkit/typegen will add typescript types to your queries. This offers a pretty unique developer experience. You get the type-safety of an ORM, but without any of the tradeoffs: no vendor lock-in, no having to learn how to use the ORM rather than PostgreSQL, no non-performant queries, no limitations on the queries you can run.
Check out the typegen package for more details, but essentially it will analyse your SQL queries, and map PostgreSQL types to TypeScript, to transform code like this:
const profiles = await client.any(sql`select * from profile`)Into this:
const profiles = await client.any(sql<queries.Profile>`select * from profile`)
declare namespace queries {
// Generated by @pgkit/typegen
export interface Profile {
id: string
name: string | null
}
}Alternatively, you can manually define the type for a query:
const profiles = await client.any(sql<{id: string; name: string}>`select * from profile`)
// 👆 will have type `Array<{id: string; name: string}>`Zod
If you like, you can use zod to parse query results:
const Profile = z.object({id: z.string(), name: z.string()})
const profiles = await client.any(sql.type(Profile)`select * from profile`)This will use zod to validate each row returned by your query.
Note that zod is not a dependency of this library, nor even a peer dependency. In fact, you could use a different library entirely, as long as you provide a "type" which has a parse method:
import * as v from 'valibot'
const ProfileSchema = v.object({id: v.string(), name: v.string()})
const Profile = {
parse: (input: unknown) => v.parse(ProfileSchema, input),
}
const profiles = await client.any(sql.type(Profile)`select * from profile`)Recipes
Inserting many rows with sql.unnest:
// Pass an array of rows to be inserted. There's only one variable in the generated SQL per column
await client.query(sql`
insert into recipes_test(id, name)
select *
from ${sql.unnest(
[
[1, 'one'],
[2, 'two'],
[3, 'three'],
],
['int4', 'text'],
)}
`)
expect(sqlProduced).toMatchInlineSnapshot(`
[
{
"sql": "\\n insert into recipes_test(id, name)\\n select *\\n from unnest($1::int4[], $2::text[])\\n ",
"values": [
[
1,
2,
3
],
[
"one",
"two",
"three"
]
]
}
]
`)Query logging:
// Simplistic way of logging query times. For more accurate results, use process.hrtime()
const log = vi.fn()
const client = createClient('postgresql://postgres:postgres@localhost:5432/postgres', {
wrapQueryFn: queryFn => {
return async query => {
const start = Date.now()
const result = await queryFn(query)
const end = Date.now()
log({start, end, took: end - start, query, result})
return result
}
},
})
await client.query(sql`select * from recipes_test`)
expect(log.mock.calls[0][0]).toMatchInlineSnapshot(
{
start: expect.any(Number),
end: expect.any(Number),
took: expect.any(Number),
},
`
{
"start": {
"inverse": false
},
"end": {
"inverse": false
},
"took": {
"inverse": false
},
"query": {
"name": "select-recipes_test_8d7ce25",
"sql": "select * from recipes_test",
"token": "sql",
"values": []
},
"result": {
"rows": [
{
"id": 1,
"name": "one"
},
{
"id": 2,
"name": "two"
},
{
"id": 3,
"name": "three"
}
]
}
}
`,
)query timeouts:
const shortTimeoutMs = 20
const impatient = createClient(client.connectionString() + '?shortTimeout', {
pgpOptions: {
connect: ({client}) => {
client.connectionParameters.query_timeout = shortTimeoutMs
},
},
})
const patient = createClient(client.connectionString() + '?longTimeout', {
pgpOptions: {
connect: ({client}) => {
client.connectionParameters.query_timeout = shortTimeoutMs * 3
},
},
})
const sleepSeconds = (shortTimeoutMs * 2) / 1000
await expect(impatient.one(sql`select pg_sleep(${sleepSeconds})`)).rejects.toThrowErrorMatchingInlineSnapshot(
`
{
"cause": {
"query": {
"name": "select_9dcc021",
"sql": "select pg_sleep($1)",
"token": "sql",
"values": [
0.04
]
},
"error": {
"query": "select pg_sleep(0.04)"
}
}
}
`,
)
await expect(patient.one(sql`select pg_sleep(${sleepSeconds})`)).resolves.toMatchObject({
pg_sleep: '',
})switchable clients:
const shortTimeoutMs = 20
const impatientClient = createClient(client.connectionString() + '?shortTimeout', {
pgpOptions: {
connect: ({client}) => {
client.connectionParameters.query_timeout = shortTimeoutMs
},
},
})
const patientClient = createClient(client.connectionString() + '?longTimeout', {
pgpOptions: {
connect: ({client}) => {
client.connectionParameters.query_timeout = shortTimeoutMs * 3
},
},
})
const appClient = createClient(client.connectionString(), {
wrapQueryFn: queryFn => {
return async query => {
let clientToUse = patientClient
try {
// use https://www.npmjs.com/package/pgsql-ast-parser - note that this is just an example, you may want to do something like route
// readonly queries to a readonly connection, and others to a readwrite connection.
const parsed = pgSqlAstParser.parse(query.sql)
if (parsed.every(statement => statement.type === 'select')) {
clientToUse = impatientClient
}
} catch {}
return clientToUse.query(query)
}
},
})
const sleepSeconds = (shortTimeoutMs * 2) / 1000
await expect(
appClient.one(sql`
select pg_sleep(${sleepSeconds})
`),
).rejects.toThrowErrorMatchingInlineSnapshot(`
{
"cause": {
"query": {
"name": "select_6289211",
"sql": "\\n select pg_sleep($1)\\n ",
"token": "sql",
"values": [
0.04
]
},
"error": {
"query": "\\n select pg_sleep(0.04)\\n "
}
}
}
`)
await expect(
appClient.one(sql`
with delay as (
select pg_sleep(${sleepSeconds})
)
insert into recipes_test (id, name)
values (10, 'ten')
returning *
`),
).resolves.toMatchObject({
id: 10,
name: 'ten',
})mocking:
const fakeDb = pgMem.newDb() // https://www.npmjs.com/package/pg-mem
const client = createClient('postgresql://', {
wrapQueryFn: () => {
return async query => {
// not a great way to do pass to pg-mem, in search of a better one: https://github.com/oguimbal/pg-mem/issues/384
let statement = pgSqlAstParser.parse(query.sql)
statement = JSON.parse(JSON.stringify(statement), (key, value) => {
if (value?.type === 'parameter' && typeof value?.name === 'string') {
const literalValue = query.values[Number(value.name.slice(1)) - 1]
return {type: 'string', value: literalValue}
}
return value
})
console.dir({statement}, {depth: 100})
return fakeDb.public.query(statement)
}
},
})
await client.query(sql`create table recipes_test(id int, name text)`)
const insert = await client.one(sql`insert into recipes_test(id, name) values (${10}, 'ten') returning *`)
expect(insert).toMatchObject({id: 10, name: 'ten'})
const select = await client.any(sql`select name from recipes_test`)
expect(select).toMatchObject([{name: 'ten'}])Comparison with slonik
- The API is inspired by Slonik, or rather what Slonik used to be/I wish it still were. The "driver" for the client is pg-promise. But the query API and
sqltag design is from Slonik. So, mostly, you can use this as a drop-in replacement for slonik.
Generally, usage of a client (or pool, to use the slonik term), should be identical. Initialization is likely different. Some differences which would likely require code changes if migrating from slonik:
Most slonik initialization options are not carried over. I haven't come across any abstractions which invented by slonik which don't have simpler implementations in the underlying layer or in pg-promise. Specifically:
type parsers: just use
pg.types.setTypeParser. Some helper functions to achieve parity with slonik, and this library's recommendations are available, but they're trivial and you can just as easily implement them yourself.interceptors: Instead of interceptors, which require book-keeping in order to do things as simple as tracking query timings, there's an option to wrap the core
queryfunction this library calls. The wrapped function will be called for all query methods. For the other slonik interceptors, you can usepg-promiseevents.custom errors: when a query produces an error, this library will throw an error with the corresponding message, along with a tag for the query which caused it. Slonik wraps each error type with a custom class. From a few years working with slonik, the re-thrown errors tend to make the useful information in the underlying error harder to find (less visible in Sentry, etc.). The purpose of the wrapper errors is to protect against potentially changing underlying errors, but there are dozens of breaking changes in Slonik every year, so pgkit opts to rely on the design and language chosen by PostgreSQL instead.
Added features/improvements
sql
Interestingly, slonik removed the ability to use the sql tag directly, when Gajus decided he wanted to start using zod parsers. There were many attempts to point out other use-case and options, but to no avail.
In slonik, you need to use sql.unsafe, which is untyped. Note that recommendation is never to use it, and there's been some indication that even this will go away, but there are many examples of its usage in the slonik readme:
const profile = await client.one(sql.unsafe`select * from profile`)
// 👆 has type `any` 👆 no generic typearg supportedIn @pgkit/client:
const profile = await client.one(sql<Profile>`select * from profile`)
// 👆 has type `Profile`sql.raw
Slonik doesn't let you do this, but there are certain cases when you need to run a SQL query directly. Note that, as the name implies, this is somewhat dangerous. Make sure you trust the query you pass into this.
const query = 'select 1 as foo'
await client.query(sql.raw(query))(P.S., even though slonik claims to not allow this - it doesn't actually have a a way to stop you. here's how in practice people achieve the same in slonik - it's more confusing to look at, but lacks the "raw" label despite being equally dangerous, which is why pgkit opts to support it):
const query = 'select 1 as foo'
const result = await pool.one({
parser: z.any(),
sql: query,
type: 'SLONIK_TOKEN_QUERY',
values: [],
})Non-readonly output types
This one is my fault, really. I was the one who ported slonik to TypeScript, and I carried over some FlowType readonlys. Unfortunately, slonik's output types are marked readonly, which means they're quite awkward to work with. For example, you can't pass them to a normal utility function which hasn't marked its inputs as readonly (even if it doesn't mutate the array). For example:
const groupBy = <T>(list: T[], fn: (item: T) => string) => {
// ...groupBy implementation
}
const profiles = await client.any(sql.type(Profile)`select * from profile`)
const byEmailHost = groupBy(profiles, p => p.email.split('@')[1])
// 👆 type error: `profiles` is readonly, but groupBy accepts a regular array.It's fixable by making sure all utility functions take readonly inputs, but this is a pain, and sometimes even leads to unnecessary calls to .slice(), or dangerous casting, in practice.
Errors
Errors from the underlying driver are wrapped but the message is not changed. A prefix corresponding to the query name is added to the message.
For errors based on the number of rows returned (for one, oneFirst, many, manyFirst etc.) the query and result are added to the cause property.
Here's what some sample errors look like
one error:
await expect(pool.one(sql`select * from test_errors where id > 1`)).rejects.toMatchInlineSnapshot(
`
{
"message": "[Query select-test_errors_36f5f64]: Expected one row",
"cause": {
"query": {
"name": "select-test_errors_36f5f64",
"sql": "select * from test_errors where id > 1",
"token": "sql",
"values": []
},
"result": {
"rows": [
{
"id": 2,
"name": "two"
},
{
"id": 3,
"name": "three"
}
]
}
}
}
`,
)maybeOne error:
await expect(pool.maybeOne(sql`select * from test_errors where id > 1`)).rejects.toMatchInlineSnapshot(`
{
"message": "[Query select-test_errors_36f5f64]: Expected at most one row",
"cause": {
"query": {
"name": "select-test_errors_36f5f64",
"sql": "select * from test_errors where id > 1",
"token": "sql",
"values": []
},
"result": {
"rows": [
{
"id": 2,
"name": "two"
},
{
"id": 3,
"name": "three"
}
]
}
}
}
`)many error:
await expect(pool.many(sql`select * from test_errors where id > 100`)).rejects.toMatchInlineSnapshot(`
{
"message": "[Query select-test_errors_34cad85]: Expected at least one row",
"cause": {
"query": {
"name": "select-test_errors_34cad85",
"sql": "select * from test_errors where id > 100",
"token": "sql",
"values": []
},
"result": {
"rows": []
}
}
}
`)syntax error:
await expect(pool.query(sql`select * frooom test_errors`)).rejects.toMatchInlineSnapshot(`
{
"message": "[Query select_fb83277]: syntax error at or near \\"frooom\\"",
"pg_code": "42601",
"pg_code_name": "syntax_error",
"cause": {
"query": {
"name": "select_fb83277",
"sql": "select * frooom test_errors",
"token": "sql",
"values": []
},
"error": {
"length": 95,
"name": "error",
"severity": "ERROR",
"code": "42601",
"position": "10",
"file": "scan.l",
"line": "1145",
"routine": "scanner_yyerror",
"query": "select * frooom test_errors"
}
}
}
`)
const err: Error = await pool.query(sql`select * frooom test_errors`).catch(e => e)
expect(err.stack).toMatchInlineSnapshot(`
Error: [Query select_fb83277]: syntax error at or near "frooom"
at Object.query (<repo>/packages/client/src/client.ts:<line>:<col>)
at <repo>/packages/client/test/errors.test.ts:<line>:<col>
`)
expect((err as QueryError).cause?.error?.stack).toMatchInlineSnapshot(`
error: syntax error at or near "frooom"
at Parser.parseErrorMessage (<repo>/node_modules/.pnpm/pg-protocol@1.6.0/node_modules/pg-protocol/src/parser.ts:<line>:<col>)
at Parser.handlePacket (<repo>/node_modules/.pnpm/pg-protocol@1.6.0/node_modules/pg-protocol/src/parser.ts:<line>:<col>)
at Parser.parse (<repo>/node_modules/.pnpm/pg-protocol@1.6.0/node_modules/pg-protocol/src/parser.ts:<line>:<col>)
at Socket.<anonymous> (<repo>/node_modules/.pnpm/pg-protocol@1.6.0/node_modules/pg-protocol/src/index.ts:<line>:<col>)
`)Ecosystem
@pgkit/client is the basis for these libraries:
- @pgkit/admin - A zero-config admin UI for running queries against PostgreSQL database, with autocomplete for tables, columns, views, functions etc.
- @pgkit/migra - A port of @djrobstep's python migra library.
- @pgkit/migrator - A cli migration tool for postgres, using pgkit.
- @pgkit/schemainspect - A port of @djrobstep's python schemainspect library.
- @pgkit/typegen - A library that uses pgkit to generate typescript interfaces based on your sql
Note that @pgkit/migra and @pgkit/schemainspect are pure ports of their Python equivalents. They are fantastically useful, and hopefully more and more can be built on top of them in the future.
👽 Future
Some features that will be added to @pgkit/client at some point, that may or may not be in slonik too:
- an analogue of the QueryFile concept in pg-promise. This will be integrated with
@pgkit/typegento enable an API something likeawait client.any(sql.file('my-file.sql')) - an analogue of the PerparedStatement concept in pg-promise.
- support for specifying the types of parameters in SQL queries as well as results. For example,
sql`select * from profile where id = ${x} and name = ${y}- we can add type-safety to ensure thatxis a number, andyis a string (say) - first-class support for query naming. Read Show Your Query You Love It By Naming It from the fantastic HoneyComb blog for some context on this.
- a
pgkitmonopackage, which exposes this client, as well as the above packages, as a single dependency. TBD on how to keep package size manageable - or whether that's important given this will almost always be used in a server environment - equivalents to the "Community interceptors" in the slonik docs - mostly as an implementation guide of how they can be achieved, and to verify that there's no functional loss from not having interceptors.