From 408bebd9900f0b41558076c85317ed5ebe8c9488 Mon Sep 17 00:00:00 2001 From: Brian C Date: Fri, 5 Apr 2024 16:48:44 -0500 Subject: [PATCH] Fix import syntax for commonJS in documentation (#3191) --- docs/pages/announcements.mdx | 3 ++- docs/pages/apis/client.mdx | 15 ++++++++++----- docs/pages/apis/cursor.mdx | 6 ++++-- docs/pages/apis/pool.mdx | 18 ++++++++++++------ docs/pages/apis/result.mdx | 3 ++- docs/pages/features/connecting.mdx | 15 ++++++++++----- docs/pages/features/native.mdx | 3 ++- docs/pages/features/pooling.mdx | 9 ++++++--- docs/pages/features/ssl.mdx | 3 ++- docs/pages/features/transactions.mdx | 5 +++-- docs/pages/guides/async-express.md | 5 +++-- docs/pages/guides/project-structure.md | 11 +++++++---- docs/pages/guides/upgrading.md | 17 +++++++++-------- docs/pages/index.mdx | 9 ++++++--- 14 files changed, 78 insertions(+), 44 deletions(-) diff --git a/docs/pages/announcements.mdx b/docs/pages/announcements.mdx index 2bc55e00a..87929731e 100644 --- a/docs/pages/announcements.mdx +++ b/docs/pages/announcements.mdx @@ -116,7 +116,8 @@ pg@7.1.2 To demonstrate the issue & see if you are vunerable execute the following in node: ```js -import { Client } from 'pg' +import pg from 'pg' +const { Client } = pg const client = new Client() client.connect() diff --git a/docs/pages/apis/client.mdx b/docs/pages/apis/client.mdx index 081f2954a..56b596783 100644 --- a/docs/pages/apis/client.mdx +++ b/docs/pages/apis/client.mdx @@ -29,7 +29,8 @@ type Config = { example to create a client with specific connection information: ```js -import { Client } from 'pg' +import pg from 'pg' +const { Client } = pg const client = new Client({ host: 'my.database-server.com', @@ -43,7 +44,8 @@ const client = new Client({ ## client.connect ```js -import { Client } from 'pg' +import pg from 'pg' +const { Client } = pg const client = new Client() await client.connect() @@ -82,7 +84,8 @@ client.query(text: string, values?: any[]) => Promise **Plain text query** ```js -import { Client } from 'pg' +import pg from 'pg' +const { Client } = pg const client = new Client() await client.connect() @@ -96,7 +99,8 @@ await client.end() **Parameterized query** ```js -import { Client } from 'pg' +import pg from 'pg' +const { Client } = pg const client = new Client() await client.connect() @@ -134,7 +138,8 @@ await client.end() If you pass an object to `client.query` and the object has a `.submit` function on it, the client will pass it's PostgreSQL server connection to the object and delegate query dispatching to the supplied object. This is an advanced feature mostly intended for library authors. It is incidentally also currently how the callback and promise based queries above are handled internally, but this is subject to change. It is also how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work. ```js -import { Query } from 'pg' +import pg from 'pg' +const { Query } = pg const query = new Query('select $1::text as name', ['brianc']) const result = client.query(query) diff --git a/docs/pages/apis/cursor.mdx b/docs/pages/apis/cursor.mdx index eadde4bfc..7728520c6 100644 --- a/docs/pages/apis/cursor.mdx +++ b/docs/pages/apis/cursor.mdx @@ -18,7 +18,8 @@ $ npm install pg pg-cursor Instantiates a new Cursor. A cursor is an instance of `Submittable` and should be passed directly to the `client.query` method. ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg import Cursor from 'pg-cursor' const pool = new Pool() @@ -57,7 +58,8 @@ If the cursor has read to the end of the result sets all subsequent calls to cur Here is an example of reading to the end of a cursor: ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg import Cursor from 'pg-cursor' const pool = new Pool() diff --git a/docs/pages/apis/pool.mdx b/docs/pages/apis/pool.mdx index 3cf32b6c4..d3975c1d8 100644 --- a/docs/pages/apis/pool.mdx +++ b/docs/pages/apis/pool.mdx @@ -48,7 +48,8 @@ type Config = { example to create a new pool with configuration: ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool({ host: 'localhost', @@ -68,7 +69,8 @@ pool.query(text: string, values?: any[]) => Promise ``` ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() @@ -100,7 +102,8 @@ Acquires a client from the pool. - If the pool is 'full' and all clients are currently checked out will wait in a FIFO queue until a client becomes available by it being released back to the pool. ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() @@ -118,7 +121,8 @@ Client instances returned from `pool.connect` will have a `release` method which The `release` method on an acquired client returns it back to the pool. If you pass a truthy value in the `destroy` parameter, instead of releasing the client to the pool, the pool will be instructed to disconnect and destroy this client, leaving a space within itself for a new client. ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() @@ -130,7 +134,8 @@ client.release() ``` ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() assert(pool.totalCount === 0) @@ -163,7 +168,8 @@ Calling `pool.end` will drain the pool of all active clients, disconnect them, a ```js // again both promises and callbacks are supported: -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() diff --git a/docs/pages/apis/result.mdx b/docs/pages/apis/result.mdx index 314d50497..8a23e697c 100644 --- a/docs/pages/apis/result.mdx +++ b/docs/pages/apis/result.mdx @@ -18,7 +18,8 @@ Every result will have a rows array. If no rows are returned the array will be e Every result will have a fields array. This array contains the `name` and `dataTypeID` of each field in the result. These fields are ordered in the same order as the columns if you are using `arrayMode` for the query: ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() diff --git a/docs/pages/features/connecting.mdx b/docs/pages/features/connecting.mdx index 3745024ca..2708f70e6 100644 --- a/docs/pages/features/connecting.mdx +++ b/docs/pages/features/connecting.mdx @@ -7,7 +7,8 @@ title: Connecting node-postgres uses the same [environment variables](https://www.postgresql.org/docs/9.1/static/libpq-envars.html) as libpq and psql to connect to a PostgreSQL server. Both individual clients & pools will use these environment variables. Here's a tiny program connecting node.js to the PostgreSQL server: ```js -import { Pool, Client } from 'pg' +import pg from 'pg' +const { Pool, Client } = pg // pools will use environment variables // for connection information @@ -54,7 +55,8 @@ PGPORT=5432 node-postgres also supports configuring a pool or client programmatically with connection information. Here's our same script from above modified to use programmatic (hard-coded in this case) values. This can be useful if your application already has a way to manage config values or you don't want to use environment variables. ```js -import { Pool, Client } from 'pg' +import pg from 'pg' +const { Pool, Client } = pg const pool = new Pool({ user: 'dbuser', @@ -84,7 +86,8 @@ await client.end() Many cloud providers include alternative methods for connecting to database instances using short-lived authentication tokens. node-postgres supports dynamic passwords via a callback function, either synchronous or asynchronous. The callback function must resolve to a string. ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg import { RDS } from 'aws-sdk' const signerOptions = { @@ -116,7 +119,8 @@ const pool = new Pool({ Connections to unix sockets can also be made. This can be useful on distros like Ubuntu, where authentication is managed via the socket connection instead of a password. ```js -import { Client } from 'pg' +import pg from 'pg' +const { Client } = pg client = new Client({ host: '/cloudsql/myproject:zone:mydb', user: 'username', @@ -130,7 +134,8 @@ client = new Client({ You can initialize both a pool and a client with a connection string URI as well. This is common in environments like Heroku where the database connection string is supplied to your application dyno through an environment variable. Connection string parsing brought to you by [pg-connection-string](https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string). ```js -import { Pool, Client } from 'pg' +import pg from 'pg' +const { Pool, Client } = pg const connectionString = 'postgresql://dbuser:secretpassword@database.server.com:3211/mydb' const pool = new Pool({ diff --git a/docs/pages/features/native.mdx b/docs/pages/features/native.mdx index c6f860119..cdec4ae9b 100644 --- a/docs/pages/features/native.mdx +++ b/docs/pages/features/native.mdx @@ -15,7 +15,8 @@ $ npm install pg pg-native Once `pg-native` is installed instead of requiring a `Client` or `Pool` constructor from `pg` you do the following: ```js -import { native } from 'pg' +import pg from 'pg' +const { native } = pg const { Client, Pool } = native ``` diff --git a/docs/pages/features/pooling.mdx b/docs/pages/features/pooling.mdx index e5e36345c..ebe2844bc 100644 --- a/docs/pages/features/pooling.mdx +++ b/docs/pages/features/pooling.mdx @@ -28,7 +28,8 @@ The client pool allows you to have a reusable pool of clients you can check out, ### Checkout, use, and return ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() @@ -60,7 +61,8 @@ client.release() If you don't need a transaction or you just need to run a single query, the pool has a convenience method to run a query on any available client in the pool. This is the preferred way to query with node-postgres if you can as it removes the risk of leaking a client. ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() @@ -73,7 +75,8 @@ console.log('user:', res.rows[0]) To shut down a pool call `pool.end()` on the pool. This will wait for all checked-out clients to be returned and then shut down all the clients and the pool timers. ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() console.log('starting async query') diff --git a/docs/pages/features/ssl.mdx b/docs/pages/features/ssl.mdx index 95683aca1..2ead9ee47 100644 --- a/docs/pages/features/ssl.mdx +++ b/docs/pages/features/ssl.mdx @@ -22,7 +22,8 @@ const config = { }, } -import { Client, Pool } from 'pg' +import pg from 'pg' +const { Client, Pool } = pg const client = new Client(config) await client.connect() diff --git a/docs/pages/features/transactions.mdx b/docs/pages/features/transactions.mdx index 492cbbe0e..9280d1f40 100644 --- a/docs/pages/features/transactions.mdx +++ b/docs/pages/features/transactions.mdx @@ -16,7 +16,8 @@ To execute a transaction with node-postgres you simply execute `BEGIN / COMMIT / ## Examples ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() const client = await pool.connect() @@ -36,4 +37,4 @@ try { } finally { client.release() } -``` \ No newline at end of file +``` diff --git a/docs/pages/guides/async-express.md b/docs/pages/guides/async-express.md index 982fdc50c..601164524 100644 --- a/docs/pages/guides/async-express.md +++ b/docs/pages/guides/async-express.md @@ -22,11 +22,12 @@ That's the same structure I used in the [project structure](/guides/project-stru My `db/index.js` file usually starts out like this: ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() -export const query = (text, params) => pool.query(text, params); +export const query = (text, params) => pool.query(text, params) ``` Then I will install [express-promise-router](https://www.npmjs.com/package/express-promise-router) and use it to define my routes. Here is my `routes/user.js` file: diff --git a/docs/pages/guides/project-structure.md b/docs/pages/guides/project-structure.md index 95d0018d9..e4dafbad0 100644 --- a/docs/pages/guides/project-structure.md +++ b/docs/pages/guides/project-structure.md @@ -27,7 +27,8 @@ The location doesn't really matter - I've found it usually ends up being somewha Typically I'll start out my `db/index.js` file like so: ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() @@ -54,7 +55,8 @@ app.get('/:id', async (req, res, next) => { Imagine we have lots of routes scattered throughout many files under our `routes/` directory. We now want to go back and log every single query that's executed, how long it took, and the number of rows it returned. If we had required node-postgres directly in every route file we'd have to go edit every single route - that would take forever & be really error prone! But thankfully we put our data access into `db/index.js`. Let's go add some logging: ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() @@ -74,7 +76,8 @@ _note: I didn't log the query parameters. Depending on your application you migh Now what if we need to check out a client from the pool to run several queries in a row in a transaction? We can add another method to our `db/index.js` file when we need to do this: ```js -import { Pool } from 'pg' +import pg from 'pg' +const { Pool } = pg const pool = new Pool() @@ -85,7 +88,7 @@ export const query = async (text, params) => { console.log('executed query', { text, duration, rows: res.rowCount }) return res } - + export const getClient = () => { return pool.connect() } diff --git a/docs/pages/guides/upgrading.md b/docs/pages/guides/upgrading.md index e3bd941c8..fe435d7cd 100644 --- a/docs/pages/guides/upgrading.md +++ b/docs/pages/guides/upgrading.md @@ -5,13 +5,13 @@ slug: /guides/upgrading # Upgrading to 8.0 -node-postgres at 8.0 introduces a breaking change to ssl-verified connections. If you connect with ssl and use +node-postgres at 8.0 introduces a breaking change to ssl-verified connections. If you connect with ssl and use ``` const client = new Client({ ssl: true }) ``` -and the server's SSL certificate is self-signed, connections will fail as of node-postgres 8.0. To keep the existing behavior, modify the invocation to +and the server's SSL certificate is self-signed, connections will fail as of node-postgres 8.0. To keep the existing behavior, modify the invocation to ``` const client = new Client({ ssl: { rejectUnauthorized: false } }) @@ -37,7 +37,7 @@ If your application still relies on these they will be _gone_ in `pg@7.0`. In or // old way, deprecated in 6.3.0: // connection using global singleton -pg.connect(function(err, client, done) { +pg.connect(function (err, client, done) { client.query(/* etc, etc */) done() }) @@ -53,7 +53,7 @@ pg.end() var pool = new pg.Pool() // connection using created pool -pool.connect(function(err, client, done) { +pool.connect(function (err, client, done) { client.query(/* etc, etc */) done() }) @@ -102,11 +102,12 @@ If you do **not** pass a callback `client.query` will return an instance of a `P `client.query` has always accepted any object that has a `.submit` method on it. In this scenario the client calls `.submit` on the object, delegating execution responsibility to it. In this situation the client also **returns the instance it was passed**. This is how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work. So, if you need the event emitter functionality on your queries for some reason, it is still possible because `Query` is an instance of `Submittable`: ```js -import { Client, Query } from 'pg' +import pg from 'pg' +const { Client, Query } = pg const query = client.query(new Query('SELECT NOW()')) -query.on('row', row => {}) -query.on('end', res => {}) -query.on('error', res => {}) +query.on('row', (row) => {}) +query.on('end', (res) => {}) +query.on('error', (res) => {}) ``` `Query` is considered a public, documented part of the API of node-postgres and this form will be supported indefinitely. diff --git a/docs/pages/index.mdx b/docs/pages/index.mdx index efdd2fdcc..0330e2c79 100644 --- a/docs/pages/index.mdx +++ b/docs/pages/index.mdx @@ -26,7 +26,8 @@ node-postgres strives to be compatible with all recent LTS versions of node & th The simplest possible way to connect, query, and disconnect is with async/await: ```js -import { Client } from 'pg' +import pg from 'pg' +const { Client } = pg const client = new Client() await client.connect() @@ -40,7 +41,8 @@ await client.end() For the sake of simplicity, these docs will assume that the methods are successful. In real life use, make sure to properly handle errors thrown in the methods. A `try/catch` block is a great way to do so: ```ts -import { Client } from 'pg' +import pg from 'pg' +const { Client } = pg const client = new Client() await client.connect() @@ -59,7 +61,8 @@ try { If you prefer a callback-style approach to asynchronous programming, all async methods support an optional callback parameter as well: ```js -import { Client } from 'pg' +import pg from 'pg' +const { Client } = pg const client = new Client() client.connect((err) => {