Skip to content

Commit 18d61e2

Browse files
committed
fix: use query level statement timeout
1 parent 2fa2011 commit 18d61e2

File tree

4 files changed

+50
-32
lines changed

4 files changed

+50
-32
lines changed

package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@
3030
"test": "run-s db:clean db:run test:run db:clean",
3131
"db:clean": "cd test/db && docker compose down",
3232
"db:run": "cd test/db && docker compose up --detach --wait",
33-
"test:run": "PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=3 PG_CONN_TIMEOUT_SECS=30 vitest run --coverage",
34-
"test:update": "run-s db:clean db:run && PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=3 PG_CONN_TIMEOUT_SECS=30 vitest run --update && run-s db:clean"
33+
"test:run": "PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=5 PG_CONN_TIMEOUT_SECS=30 vitest run --coverage",
34+
"test:update": "run-s db:clean db:run && PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=5 PG_CONN_TIMEOUT_SECS=30 vitest run --update && run-s db:clean"
3535
},
3636
"engines": {
3737
"node": ">=20",

src/lib/db.ts

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@ import { parse as parseArray } from 'postgres-array'
44
import { PostgresMetaResult, PoolConfig } from './types.js'
55
import { PG_STATEMENT_TIMEOUT_SECS } from '../server/constants.js'
66

7+
const STATEMENT_TIMEOUT_QUERY_PREFIX = `SET statement_timeout='${PG_STATEMENT_TIMEOUT_SECS}s';`
8+
79
pg.types.setTypeParser(pg.types.builtins.INT8, (x) => {
810
const asNumber = Number(x)
911
if (Number.isSafeInteger(asNumber)) {
@@ -117,7 +119,7 @@ export const init: (config: PoolConfig) => {
117119
// otherwise the query will keep running on the database even if query timeout was reached
118120
// This need to be added at query and not connection level because poolers (pgbouncer) doesn't
119121
// allow to set this parameter at connection time
120-
const sqlWithStatementTimeout = `SET statement_timeout='${PG_STATEMENT_TIMEOUT_SECS}s';\n${sql}`
122+
const sqlWithStatementTimeout = `${STATEMENT_TIMEOUT_QUERY_PREFIX}${sql}`
121123
try {
122124
if (!pool) {
123125
const pool = new pg.Pool(config)
@@ -153,13 +155,17 @@ export const init: (config: PoolConfig) => {
153155
formattedError += '\n'
154156
if (error.position) {
155157
// error.position is 1-based
156-
const position = Number(error.position) - 1
158+
// we also remove our `SET statement_timeout = 'XXs';\n` from the position
159+
const position =
160+
Number(error.position) - 1 - STATEMENT_TIMEOUT_QUERY_PREFIX.length
161+
// we set the new error position
162+
error.position = `${position + 1}`
157163

158164
let line = ''
159165
let lineNumber = 0
160166
let lineOffset = 0
161167

162-
const lines = sqlWithStatementTimeout.split('\n')
168+
const lines = sql.split('\n')
163169
let currentOffset = 0
164170
for (let i = 0; i < lines.length; i++) {
165171
if (currentOffset + lines[i].length > position) {

test/server/query-timeout.ts

Lines changed: 28 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -2,32 +2,38 @@ import { expect, test, describe } from 'vitest'
22
import { app } from './utils'
33
import { pgMeta } from '../lib/utils'
44

5+
const TIMEOUT = (Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10) + 2
6+
57
describe('test query timeout', () => {
6-
test('query timeout after 3s and connection cleanup', async () => {
7-
const query = `SELECT pg_sleep(10);`
8-
// Execute a query that will sleep for 10 seconds
9-
const res = await app.inject({
10-
method: 'POST',
11-
path: '/query',
12-
payload: {
13-
query,
14-
},
15-
})
8+
test(
9+
`query timeout after ${TIMEOUT}s and connection cleanup`,
10+
async () => {
11+
const query = `SELECT pg_sleep(${TIMEOUT});`
12+
// Execute a query that will sleep for 10 seconds
13+
const res = await app.inject({
14+
method: 'POST',
15+
path: '/query',
16+
payload: {
17+
query,
18+
},
19+
})
1620

17-
// Check that we get the proper timeout error response
18-
expect(res.statusCode).toBe(408) // Request Timeout
19-
expect(res.json()).toMatchObject({
20-
error: expect.stringContaining('Query read timeout'),
21-
})
22-
// wait one second for the statement timeout to take effect
23-
await new Promise((resolve) => setTimeout(resolve, 1000))
21+
// Check that we get the proper timeout error response
22+
expect(res.statusCode).toBe(408) // Request Timeout
23+
expect(res.json()).toMatchObject({
24+
error: expect.stringContaining('Query read timeout'),
25+
})
26+
// wait one second for the statement timeout to take effect
27+
await new Promise((resolve) => setTimeout(resolve, 1000))
2428

25-
// Verify that the connection has been cleaned up by checking active connections
26-
const connectionsRes = await pgMeta.query(`
29+
// Verify that the connection has been cleaned up by checking active connections
30+
const connectionsRes = await pgMeta.query(`
2731
SELECT * FROM pg_stat_activity where application_name = 'postgres-meta 0.0.0-automated' and query ILIKE '%${query}%';
2832
`)
2933

30-
// Should have no active connections except for our current query
31-
expect(connectionsRes.data).toHaveLength(0)
32-
}, 5000)
34+
// Should have no active connections except for our current query
35+
expect(connectionsRes.data).toHaveLength(0)
36+
},
37+
TIMEOUT * 1000
38+
)
3339
})

test/server/result-size-limit.ts

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -72,23 +72,29 @@ describe('test js parser error max result', () => {
7272
// Create a table with large data for testing
7373
beforeAll(async () => {
7474
// Create a table with a large text column
75-
await pgMeta.query(`
75+
await pgMeta.query(
76+
`
7677
CREATE TABLE very_large_data (
7778
id SERIAL PRIMARY KEY,
7879
data TEXT
7980
);
80-
`)
81+
`,
82+
false
83+
)
8184

8285
// Insert data that will exceed our limit in tests it's set around ~20MB
83-
await pgMeta.query(`
86+
await pgMeta.query(
87+
`
8488
INSERT INTO very_large_data (data)
8589
VALUES (repeat('x', 710 * 1024 * 1024)) -- 700+MB string will raise a JS exception at parse time
86-
`)
90+
`,
91+
false
92+
)
8793
})
8894

8995
afterAll(async () => {
9096
// Clean up the test table
91-
await pgMeta.query('DROP TABLE very_large_data;')
97+
await pgMeta.query('DROP TABLE very_large_data;', false)
9298
})
9399

94100
test(

0 commit comments

Comments
 (0)