Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 53da452

Browse files
committedMay 25, 2024··
feat: add support for the SDK server
With this change, the gptscript SDK server will be fork/exec-ed instead of fork/exec-ing for every gptscript command. This produces enhancements with daemons in gptscript. This change also intentionally removes support for browser-based applications.
1 parent 525ae1d commit 53da452

7 files changed

+357
-1099
lines changed
 

‎README.md

+20-15
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@ running `npm install`.
1818

1919
## Usage
2020

21-
To use the module and run gptscripts, you need to first set the OPENAI_API_KEY environment variable to your OpenAI API
22-
key.
21+
To use the module and run gptscripts, you need to first set the `OPENAI_API_KEY` environment variable to your OpenAI API
22+
key. You can also set the `GPTSCRIPT_BIN` environment variable to change the execution of the gptscripts.
2323

2424
To ensure it is working properly, you can run the following command:
2525

@@ -31,11 +31,10 @@ You will see "Hello, World!" in the output of the command.
3131

3232
## Client
3333

34-
There are currently a couple "global" options, and the client helps to manage those. A client without any options is
35-
likely what you want. However, here are the current global options:
36-
37-
- `gptscriptURL`: The URL (including `http(s)://) of an "SDK server" to use instead of the fork/exec model.
38-
- `gptscriptBin`: The path to a `gptscript` binary to use instead of the bundled one.
34+
The client allows the caller to run gptscript files, tools, and other operations (see below). There are currently no
35+
options for this singleton client, so `await gptscript.Client.init()` is all you need. Although, the intention is that a
36+
single client is all you need for the life of your application, you should call `close()` on the client when you are
37+
done.
3938

4039
## Options
4140

@@ -45,7 +44,6 @@ None of the options is required, and the defaults will reduce the number of call
4544
- `disableCache`: Enable or disable caching, default (true)
4645
- `cacheDir`: Specify the cache directory
4746
- `quiet`: No output logging
48-
- `chdir`: Change current working directory
4947
- `subTool`: Use tool of this name, not the first tool
5048
- `workspace`: Directory to use for the workspace, if specified it will not be deleted on exit
5149

@@ -61,9 +59,10 @@ Lists all the available built-in tools.
6159
const gptscript = require('@gptscript-ai/gptscript');
6260

6361
async function listTools() {
64-
const client = new gptscript.Client();
62+
const client = await gptscript.Client.init();
6563
const tools = await client.listTools();
6664
console.log(tools);
65+
client.close()
6766
}
6867
```
6968

@@ -78,12 +77,13 @@ const gptscript = require('@gptscript-ai/gptscript');
7877

7978
async function listModels() {
8079
let models = [];
80+
const client = await gptscript.Client.init();
8181
try {
82-
const client = new gptscript.Client();
8382
models = await client.listModels();
8483
} catch (error) {
8584
console.error(error);
8685
}
86+
client.close()
8787
}
8888
```
8989

@@ -97,12 +97,13 @@ Get the first of the current `gptscript` binary being used for the calls.
9797
const gptscript = require('@gptscript-ai/gptscript');
9898

9999
async function version() {
100+
const client = await gptscript.Client.init();
100101
try {
101-
const client = new gptscript.Client();
102102
console.log(await client.version());
103103
} catch (error) {
104104
console.error(error);
105105
}
106+
client.close()
106107
}
107108
```
108109

@@ -118,13 +119,14 @@ const t = {
118119
instructions: "Who was the president of the united states in 1928?"
119120
};
120121

122+
const client = await gptscript.Client.init();
121123
try {
122-
const client = new gptscript.Client();
123124
const run = client.evaluate(t);
124125
console.log(await run.text());
125126
} catch (error) {
126127
console.error(error);
127128
}
129+
client.close();
128130
```
129131

130132
### run
@@ -140,13 +142,14 @@ const opts = {
140142
};
141143

142144
async function execFile() {
145+
const client = await gptscript.Client.init();
143146
try {
144-
const client = new gptscript.Client();
145147
const run = client.run('./hello.gpt', opts);
146148
console.log(await run.text());
147149
} catch (e) {
148150
console.error(e);
149151
}
152+
client.close();
150153
}
151154
```
152155

@@ -178,8 +181,8 @@ const opts = {
178181
};
179182

180183
async function streamExecFileWithEvents() {
184+
const client = await gptscript.Client.init();
181185
try {
182-
const client = new gptscript.Client();
183186
const run = client.run('./test.gpt', opts);
184187

185188
run.on(gptscript.RunEventType.Event, data => {
@@ -190,6 +193,7 @@ async function streamExecFileWithEvents() {
190193
} catch (e) {
191194
console.error(e);
192195
}
196+
client.close();
193197
}
194198
```
195199

@@ -218,7 +222,7 @@ const t = {
218222
};
219223

220224
async function streamExecFileWithEvents() {
221-
const client = new gptscript.Client();
225+
const client = await gptscript.Client.init();
222226
let run = client.evaluate(t, opts);
223227
try {
224228
// Wait for the initial run to complete.
@@ -238,6 +242,7 @@ async function streamExecFileWithEvents() {
238242
console.error(e);
239243
}
240244

245+
client.close();
241246

242247
// The state here should either be RunState.Finished (on success) or RunState.Error (on error).
243248
console.log(run.state)

‎babel.test.cjs

-18
This file was deleted.

‎package-lock.json

+77-440
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎package.json

+2-16
Original file line numberDiff line numberDiff line change
@@ -19,35 +19,25 @@
1919
"test": "jest",
2020
"postinstall": "node scripts/install-binary.js",
2121
"clean": "rm -rf dist",
22-
"build": "tsc && rollup -c"
22+
"build": "tsc"
2323
},
2424
"keywords": [
2525
"gptscript",
2626
"gpt",
2727
"AI"
2828
],
29-
"browser": {
30-
"child_process": false
31-
},
3229
"author": "Bill Maxwell <bill@acorn.io>",
3330
"license": "Apache-2.0",
34-
"peerDependencies": {
35-
"sse.js": "^2.4.1"
36-
},
3731
"dependencies": {
38-
"@babel/template": "^7.24.0",
39-
"@babel/types": "^7.24.5",
32+
"@types/sync-fetch": "^0.4.3",
4033
"adm-zip": "^0.5.10",
41-
"child_process": "^1.0.2",
42-
"net": "^1.0.2",
4334
"node-downloader-helper": "^2.1.9",
4435
"tar": "^6.2.0"
4536
},
4637
"devDependencies": {
4738
"@babel/core": "^7.24.5",
4839
"@babel/preset-env": "^7.24.5",
4940
"@babel/preset-typescript": "^7.24.1",
50-
"@rollup/plugin-typescript": "^11.1.6",
5141
"@swc/cli": "^0.3.9",
5242
"@swc/core": "^1.4.2",
5343
"@types/jest": "^29.5.12",
@@ -57,9 +47,6 @@
5747
"copyfiles": "^2.4.1",
5848
"jest": "^29.7.0",
5949
"npm-run-all": "^4.1.5",
60-
"rollup": "^4.17.2",
61-
"rollup-plugin-commonjs": "^10.1.0",
62-
"rollup-plugin-node-resolve": "^5.2.0",
6350
"ts-jest": "^29.1.2",
6451
"ts-loader": "^9.5.1",
6552
"typescript": "^5.4.5",
@@ -70,7 +57,6 @@
7057
"^.+\\.ts?$": [
7158
"ts-jest",
7259
{
73-
"babelConfig": "babel.test.cjs",
7460
"useESM": true
7561
}
7662
]

‎rollup.config.js

-21
This file was deleted.

‎src/gptscript.ts

+197-494
Large diffs are not rendered by default.

‎tests/gptscript.test.ts

+61-95
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,24 @@
11
import * as gptscript from "../src/gptscript"
22
import path from "path"
33

4-
const client = new gptscript.Client(process.env.GPTSCRIPT_URL, process.env.GPTSCRIPT_BIN)
4+
let client: gptscript.Client
55

66
describe("gptscript module", () => {
7-
beforeAll(() => {
7+
beforeAll(async () => {
88
if (!process.env.OPENAI_API_KEY && !process.env.GPTSCRIPT_URL) {
99
throw new Error("neither OPENAI_API_KEY nor GPTSCRIPT_URL is set")
1010
}
11+
12+
client = await gptscript.Client.init()
13+
})
14+
afterAll(() => {
15+
client.close()
16+
})
17+
18+
test("creating an closing another client should work", async () => {
19+
const other = await gptscript.Client.init()
20+
await other.version()
21+
other.close()
1122
})
1223

1324
test("listTools returns available tools", async () => {
@@ -47,17 +58,13 @@ describe("gptscript module", () => {
4758
disableCache: true,
4859
}
4960

50-
try {
51-
const run = client.evaluate(t as any, opts)
52-
run.on(gptscript.RunEventType.CallProgress, data => {
53-
out += `system: ${(data as any).content}`
54-
})
61+
const run = client.evaluate(t as any, opts)
62+
run.on(gptscript.RunEventType.CallProgress, (data: gptscript.CallFrame) => {
63+
for (let output of data.output) out += `system: ${output.content}`
64+
})
5565

56-
await run.text()
57-
err = run.err
58-
} catch (e) {
59-
console.error(e)
60-
}
66+
await run.text()
67+
err = run.err
6168

6269
expect(out).toContain("Calvin Coolidge")
6370
expect(err).toEqual("")
@@ -71,45 +78,20 @@ describe("gptscript module", () => {
7178
context: [path.join(__dirname, "fixtures", "acorn-labs-context.gpt")]
7279
}
7380

74-
try {
75-
const run = client.evaluate(t as any, {disableCache: true})
76-
out = await run.text()
77-
err = run.err
78-
} catch (e) {
79-
console.error(e)
80-
}
81+
const run = client.evaluate(t as any, {disableCache: true})
82+
out = await run.text()
83+
err = run.err
8184

8285
expect(out).toContain("Acorn Labs")
8386
expect(err).toEqual("")
8487
})
8588

86-
describe("run with test.gpt fixture", () => {
87-
test("should execute test.gpt correctly", async () => {
88-
const testGptPath = path.join(__dirname, "fixtures", "test.gpt")
89-
90-
try {
91-
const result = await client.run(testGptPath).text()
92-
expect(result).toBeDefined()
93-
expect(result).toContain("Calvin Coolidge")
94-
} catch (error) {
95-
console.error(error)
96-
fail("run threw an unexpected error.")
97-
}
98-
})
89+
test("should execute test.gpt correctly", async () => {
90+
const testGptPath = path.join(__dirname, "fixtures", "test.gpt")
9991

100-
test("should execute test.gpt correctly when chdir is set", async () => {
101-
const testGptPath = path.join(__dirname, "fixtures")
102-
103-
try {
104-
// By changing the directory here, we should be able to find the test.gpt file without prepending the path.
105-
const result = await client.run("test.gpt", {chdir: testGptPath}).text()
106-
expect(result).toBeDefined()
107-
expect(result).toContain("Calvin Coolidge")
108-
} catch (error) {
109-
console.error(error)
110-
fail("run threw an unexpected error.")
111-
}
112-
})
92+
const result = await client.run(testGptPath).text()
93+
expect(result).toBeDefined()
94+
expect(result).toContain("Calvin Coolidge")
11395
})
11496

11597
test("run executes and stream a file correctly", async () => {
@@ -120,16 +102,12 @@ describe("gptscript module", () => {
120102
disableCache: true,
121103
}
122104

123-
try {
124-
const run = client.run(testGptPath, opts)
125-
run.on(gptscript.RunEventType.CallProgress, data => {
126-
out += `system: ${(data as any).content}`
127-
})
128-
await run.text()
129-
err = run.err
130-
} catch (e) {
131-
console.error(e)
132-
}
105+
const run = client.run(testGptPath, opts)
106+
run.on(gptscript.RunEventType.CallProgress, data => {
107+
for (let output of data.output) out += `system: ${output.content}`
108+
})
109+
await run.text()
110+
err = run.err
133111

134112
expect(out).toContain("Calvin Coolidge")
135113
expect(err).toEqual("")
@@ -143,16 +121,12 @@ describe("gptscript module", () => {
143121
disableCache: true,
144122
}
145123

146-
try {
147-
const run = client.run(testGptPath, opts)
148-
run.on(gptscript.RunEventType.CallProgress, data => {
149-
out += `system: ${(data as any).content}`
150-
})
151-
await run.text()
152-
err = run.err
153-
} catch (e) {
154-
console.error(e)
155-
}
124+
const run = client.run(testGptPath, opts)
125+
run.on(gptscript.RunEventType.CallProgress, data => {
126+
for (let output of data.output) out += `system: ${output.content}`
127+
})
128+
await run.text()
129+
err = run.err
156130

157131
expect(out).toContain("Hello!")
158132
expect(err).toEqual("")
@@ -315,26 +289,22 @@ describe("gptscript module", () => {
315289
"Alaska Time Zone"
316290
]
317291

318-
try {
319-
await run.text()
320-
for (let i: number = 0; i < inputs.length; i++) {
321-
run = run.nextChat(inputs[i])
322-
err = run.err
323-
324-
if (err) {
325-
break
326-
}
292+
await run.text()
293+
for (let i: number = 0; i < inputs.length; i++) {
294+
run = run.nextChat(inputs[i])
295+
err = run.err
327296

328-
expect(await run.text()).toContain(expectedOutputs[i])
329-
expect(run.state).toEqual(gptscript.RunState.Continue)
297+
if (err) {
298+
break
330299
}
331300

332-
run = run.nextChat("bye")
333-
await run.text()
334-
} catch (e) {
335-
console.error(e)
301+
expect(await run.text()).toContain(expectedOutputs[i])
302+
expect(run.state).toEqual(gptscript.RunState.Continue)
336303
}
337304

305+
run = run.nextChat("bye")
306+
await run.text()
307+
338308
expect(run.state).toEqual(gptscript.RunState.Finished)
339309
expect(err).toEqual("")
340310
}, 60000)
@@ -358,26 +328,22 @@ describe("gptscript module", () => {
358328
"Lake Huron"
359329
]
360330

361-
try {
362-
await run.text()
363-
for (let i: number = 0; i < inputs.length; i++) {
364-
run = run.nextChat(inputs[i])
365-
err = run.err
366-
367-
if (err) {
368-
break
369-
}
331+
await run.text()
332+
for (let i: number = 0; i < inputs.length; i++) {
333+
run = run.nextChat(inputs[i])
334+
err = run.err
370335

371-
expect(await run.text()).toContain(expectedOutputs[i])
372-
expect(run.state).toEqual(gptscript.RunState.Continue)
336+
if (err) {
337+
break
373338
}
374339

375-
run = run.nextChat("bye")
376-
await run.text()
377-
} catch (e) {
378-
console.error(e)
340+
expect(await run.text()).toContain(expectedOutputs[i])
341+
expect(run.state).toEqual(gptscript.RunState.Continue)
379342
}
380343

344+
run = run.nextChat("bye")
345+
await run.text()
346+
381347
expect(run.state).toEqual(gptscript.RunState.Finished)
382348
expect(err).toEqual("")
383349
}, 60000)

0 commit comments

Comments
 (0)
Please sign in to comment.