Skip to content

Commit a18467f

Browse files
committed
wip: add a global result size
1 parent 529fb59 commit a18467f

File tree

6 files changed

+76
-77
lines changed

6 files changed

+76
-77
lines changed

package.json

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,5 @@
3636
"arrowParens": "always",
3737
"trailingComma": "es5",
3838
"singleQuote": true
39-
},
40-
"packageManager": "[email protected]+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e"
39+
}
4140
}

packages/pg/README.md

Lines changed: 0 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -21,45 +21,12 @@ $ npm install pg
2121
- Pure JavaScript client and native libpq bindings share _the same API_
2222
- Connection pooling
2323
- Extensible JS ↔ PostgreSQL data-type coercion
24-
- Memory safety with configurable result size limits
2524
- Supported PostgreSQL features
2625
- Parameterized queries
2726
- Named statements with query plan caching
2827
- Async notifications with `LISTEN/NOTIFY`
2928
- Bulk import & export with `COPY TO/COPY FROM`
3029

31-
### Memory Safety with Result Size Limits
32-
33-
To prevent out-of-memory errors when dealing with unexpectedly large query results, you can set a maximum result size:
34-
35-
```js
36-
const { Client, Pool } = require('pg')
37-
38-
// For a single client
39-
const client = new Client({
40-
// other connection options
41-
maxResultSize: 50 * 1024 * 1024 // 50MB limit
42-
})
43-
44-
// Or with a pool
45-
const pool = new Pool({
46-
// other connection options
47-
maxResultSize: 10 * 1024 * 1024 // 10MB limit
48-
})
49-
50-
// If a query result exceeds the limit, it will emit an error with code 'RESULT_SIZE_EXCEEDED'
51-
client.query('SELECT * FROM large_table').catch(err => {
52-
if (err.code === 'RESULT_SIZE_EXCEEDED') {
53-
console.error(`Query result exceeded size limit of ${err.maxResultSize} bytes`)
54-
// Handle gracefully - perhaps use a cursor or add a LIMIT clause
55-
} else {
56-
// Handle other errors
57-
}
58-
})
59-
```
60-
61-
For large datasets, consider using [pg-cursor](https://github.com/brianc/node-postgres/tree/master/packages/pg-cursor) to process rows in batches.
62-
6330
### Extras
6431

6532
node-postgres is by design pretty light on abstractions. These are some handy modules we've been using over the years to complete the picture.

packages/pg/lib/client.js

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ class Client extends EventEmitter {
5252
keepAlive: c.keepAlive || false,
5353
keepAliveInitialDelayMillis: c.keepAliveInitialDelayMillis || 0,
5454
encoding: this.connectionParameters.client_encoding || 'utf8',
55-
maxResultSize: c.maxResultSize
55+
maxResultSize: c.maxResultSize,
5656
})
5757
this.queryQueue = []
5858
this.binary = c.binary || defaults.binary
@@ -244,7 +244,7 @@ class Client extends EventEmitter {
244244
}
245245
}
246246

247-
_handleAuthCleartextPassword(_msg) {
247+
_handleAuthCleartextPassword(msg) {
248248
this._checkPgPass(() => {
249249
this.connection.password(this.password)
250250
})
@@ -300,7 +300,7 @@ class Client extends EventEmitter {
300300
this.secretKey = msg.secretKey
301301
}
302302

303-
_handleReadyForQuery(_msg) {
303+
_handleReadyForQuery(msg) {
304304
if (this._connecting) {
305305
this._connecting = false
306306
this._connected = true
@@ -377,12 +377,12 @@ class Client extends EventEmitter {
377377
this.activeQuery.handleDataRow(msg)
378378
}
379379

380-
_handlePortalSuspended(_msg) {
380+
_handlePortalSuspended(msg) {
381381
// delegate portalSuspended to active query
382382
this.activeQuery.handlePortalSuspended(this.connection)
383383
}
384384

385-
_handleEmptyQuery(_msg) {
385+
_handleEmptyQuery(msg) {
386386
// delegate emptyQuery to active query
387387
this.activeQuery.handleEmptyQuery(this.connection)
388388
}
@@ -411,7 +411,7 @@ class Client extends EventEmitter {
411411
}
412412
}
413413

414-
_handleCopyInResponse(_msg) {
414+
_handleCopyInResponse(msg) {
415415
this.activeQuery.handleCopyInResponse(this.connection)
416416
}
417417

packages/pg/lib/connection.js

Lines changed: 6 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -133,55 +133,37 @@ class Connection extends EventEmitter {
133133
_attachListenersWithSizeLimit(stream) {
134134
parse(stream, (msg) => {
135135
var eventName = msg.name === 'error' ? 'errorMessage' : msg.name
136-
136+
137137
// Only track data row messages for result size
138138
if (msg.name === 'dataRow') {
139139
// Approximate size by using message length
140-
const msgSize = msg.length || this._getApproximateMessageSize(msg)
140+
const msgSize = msg.length || 1024 // Default to 1KB if we don't have lenght info
141141
this._currentResultSize += msgSize
142-
142+
143143
// Check if we've exceeded the max result size
144144
if (this._currentResultSize > this._maxResultSize) {
145145
const error = new Error('Query result size exceeded the configured limit')
146146
error.code = 'RESULT_SIZE_EXCEEDED'
147147
error.resultSize = this._currentResultSize
148148
error.maxResultSize = this._maxResultSize
149-
this.emit('error', error)
149+
this.emit('errorMessage', error)
150150
this.end() // Terminate the connection
151151
return
152152
}
153153
}
154-
154+
155155
// Reset counter on query completion
156156
if (msg.name === 'readyForQuery') {
157157
this._currentResultSize = 0
158158
}
159-
159+
160160
if (this._emitMessage) {
161161
this.emit('message', msg)
162162
}
163163
this.emit(eventName, msg)
164164
})
165165
}
166166

167-
// Helper method to approximate message size when length is not available
168-
_getApproximateMessageSize(msg) {
169-
let size = 0
170-
if (msg.fields) {
171-
// Sum up the sizes of field values
172-
msg.fields.forEach(field => {
173-
if (field && typeof field === 'string') {
174-
size += field.length;
175-
} else if (field && typeof field === 'object') {
176-
size += JSON.stringify(field).length;
177-
} else if (field !== null && field !== undefined) {
178-
size += String(field).length;
179-
}
180-
});
181-
}
182-
return size > 0 ? size : 1024; // Default to 1KB if we can't determine size
183-
}
184-
185167
requestSsl() {
186168
this.stream.write(serialize.requestSsl())
187169
}

packages/pg/test/integration/client/max-result-size-tests.js

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,6 @@ var assert = require('assert')
44
const { Client } = helper.pg
55
const suite = new helper.Suite()
66

7-
// Just verify the test infrastructure works
8-
suite.test('sanity check', function (done) {
9-
var client = new Client(helper.args)
10-
client.connect(assert.success(function () {
11-
client.query('SELECT 1 as num', assert.success(function(result) {
12-
assert.equal(result.rows.length, 1)
13-
client.end(done)
14-
}))
15-
}))
16-
})
177

188
// Basic test to check if the _maxResultSize property is passed to Connection
199
suite.test('client passes maxResultSize to connection', function (done) {
@@ -82,7 +72,7 @@ suite.testAsync('large result triggers error', async () => {
8272

8373
// Start the query but don't await it (it will error)
8474
const queryPromise = client.query('SELECT repeat(\'x\', 1000) as data FROM generate_series(1, 100)')
85-
.catch(err => {
75+
.catch(() => {
8676
// We expect this to error out, silence the rejection
8777
return null
8878
})
@@ -94,5 +84,5 @@ suite.testAsync('large result triggers error', async () => {
9484
await queryPromise
9585

9686
// Clean up
97-
await client.end().catch(() => {}) // Ignore errors during cleanup
87+
await client.end().catch(() => {})
9888
})
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
'use strict'
2+
3+
const helper = require('../test-helper')
4+
const pg = helper.pg
5+
var assert = require('assert')
6+
7+
8+
const suite = new helper.Suite()
9+
10+
// Test that the pool respects the maxResultSize option
11+
suite.test('pool respects maxResultSize option', (done) => {
12+
const pool = new pg.Pool({
13+
...helper.args,
14+
maxResultSize: 1024, // very small limit
15+
})
16+
17+
pool.on('error', (err) => {
18+
if (err.code === 'RESULT_SIZE_EXCEEDED') {
19+
return done()
20+
}
21+
})
22+
23+
const largeQuery = `
24+
SELECT generate_series(1, 1000) as num,
25+
repeat('x', 100) as data
26+
`
27+
28+
pool.query(largeQuery, (err) => {
29+
if (!err) {
30+
return done(new Error('Expected query to fail with size limit error'))
31+
}
32+
33+
if (err.code !== 'RESULT_SIZE_EXCEEDED') {
34+
return done(new Error(`Expected RESULT_SIZE_EXCEEDED error but got: ${err.message} (${err.code})`))
35+
}
36+
})
37+
})
38+
39+
suite.test('pool query works with adequate maxResultSize', (done) => {
40+
// Create a pool with a much larger limit
41+
const pool = new pg.Pool({
42+
...helper.args,
43+
maxResultSize: 100 * 1024,
44+
})
45+
46+
// Use a very simple query that returns a single value
47+
const simpleQuery = `SELECT 1 as num`
48+
49+
// This should succeed
50+
pool.query(simpleQuery, (err, result) => {
51+
if (err) {
52+
return done(err)
53+
}
54+
55+
// Verify we got the expected result
56+
assert.deepEqual(result.rows, [{ num: 1 }])
57+
// Test passed, clean up
58+
pool.end(done)
59+
})
60+
})
61+

0 commit comments

Comments
 (0)