Skip to content

Commit a0916b9

Browse files
feat: add sys.chat.history
This allows multi agent flows to recall previous conversations
1 parent 171bb77 commit a0916b9

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+333
-575
lines changed

pkg/builtin/builtin.go

+40-4
Original file line numberDiff line numberDiff line change
@@ -20,15 +20,17 @@ import (
2020
"github.com/AlecAivazis/survey/v2"
2121
"github.com/BurntSushi/locker"
2222
"github.com/google/shlex"
23+
"github.com/gptscript-ai/gptscript/pkg/engine"
2324
"github.com/gptscript-ai/gptscript/pkg/types"
2425
"github.com/jaytaylor/html2text"
2526
)
2627

2728
var SafeTools = map[string]struct{}{
28-
"sys.echo": {},
29-
"sys.time.now": {},
30-
"sys.prompt": {},
31-
"sys.chat.finish": {},
29+
"sys.echo": {},
30+
"sys.time.now": {},
31+
"sys.prompt": {},
32+
"sys.chat.finish": {},
33+
"sys.chat.history": {},
3234
}
3335

3436
var tools = map[string]types.Tool{
@@ -182,6 +184,13 @@ var tools = map[string]types.Tool{
182184
},
183185
BuiltinFunc: SysPrompt,
184186
},
187+
"sys.chat.history": {
188+
Parameters: types.Parameters{
189+
Description: "Retrieves the previous chat dialog",
190+
Arguments: types.ObjectSchema(),
191+
},
192+
BuiltinFunc: SysChatHistory,
193+
},
185194
}
186195

187196
func SysProgram() *types.Program {
@@ -567,6 +576,33 @@ func (e *ErrChatFinish) Error() string {
567576
return fmt.Sprintf("CHAT FINISH: %s", e.Message)
568577
}
569578

579+
func SysChatHistory(ctx context.Context, _ []string, _ string) (string, error) {
580+
engineContext, _ := engine.FromContext(ctx)
581+
582+
data, err := json.Marshal(engine.ChatHistory{
583+
History: writeHistory(engineContext),
584+
})
585+
586+
return string(data), err
587+
}
588+
589+
func writeHistory(ctx *engine.Context) (result []engine.ChatHistoryCall) {
590+
if ctx == nil {
591+
return nil
592+
}
593+
if ctx.Parent != nil {
594+
result = append(result, writeHistory(ctx.Parent)...)
595+
}
596+
if ctx.LastReturn != nil && ctx.LastReturn.State != nil {
597+
result = append(result, engine.ChatHistoryCall{
598+
ID: ctx.ID,
599+
Tool: ctx.Tool,
600+
Completion: ctx.LastReturn.State.Completion,
601+
})
602+
}
603+
return
604+
}
605+
570606
func SysChatFinish(ctx context.Context, env []string, input string) (string, error) {
571607
var params struct {
572608
Message string `json:"return,omitempty"`

pkg/engine/cmd.go

+12-4
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import (
2121
"github.com/gptscript-ai/gptscript/pkg/version"
2222
)
2323

24-
func (e *Engine) runCommand(ctx context.Context, tool types.Tool, input string, toolCategory ToolCategory) (cmdOut string, cmdErr error) {
24+
func (e *Engine) runCommand(ctx Context, tool types.Tool, input string, toolCategory ToolCategory) (cmdOut string, cmdErr error) {
2525
id := counter.Next()
2626

2727
defer func() {
@@ -42,10 +42,18 @@ func (e *Engine) runCommand(ctx context.Context, tool types.Tool, input string,
4242
"input": input,
4343
},
4444
}
45-
return tool.BuiltinFunc(ctx, e.Env, input)
45+
return tool.BuiltinFunc(ctx.WrappedContext(), e.Env, input)
4646
}
4747

48-
cmd, stop, err := e.newCommand(ctx, nil, tool, input)
48+
var instructions []string
49+
for _, inputContext := range ctx.InputContext {
50+
instructions = append(instructions, inputContext.Content)
51+
}
52+
var extraEnv = []string{
53+
strings.TrimSpace(fmt.Sprintf("GPTSCRIPT_CONTEXT=%s", strings.Join(instructions, "\n"))),
54+
}
55+
56+
cmd, stop, err := e.newCommand(ctx.Ctx, extraEnv, tool, input)
4957
if err != nil {
5058
return "", err
5159
}
@@ -66,7 +74,7 @@ func (e *Engine) runCommand(ctx context.Context, tool types.Tool, input string,
6674
cmd.Stdout = io.MultiWriter(all, output)
6775

6876
if toolCategory == CredentialToolCategory {
69-
pause := context2.GetPauseFuncFromCtx(ctx)
77+
pause := context2.GetPauseFuncFromCtx(ctx.Ctx)
7078
unpause := pause()
7179
defer unpause()
7280
}

pkg/engine/engine.go

+19-4
Original file line numberDiff line numberDiff line change
@@ -69,9 +69,20 @@ type CallContext struct {
6969

7070
type Context struct {
7171
commonContext
72-
Ctx context.Context
73-
Parent *Context
74-
Program *types.Program
72+
Ctx context.Context
73+
Parent *Context
74+
LastReturn *Return
75+
Program *types.Program
76+
}
77+
78+
type ChatHistory struct {
79+
History []ChatHistoryCall `json:"history,omitempty"`
80+
}
81+
82+
type ChatHistoryCall struct {
83+
ID string `json:"id,omitempty"`
84+
Tool types.Tool `json:"tool,omitempty"`
85+
Completion types.CompletionRequest `json:"completion,omitempty"`
7586
}
7687

7788
type ToolCategory string
@@ -194,7 +205,7 @@ func (e *Engine) Start(ctx Context, input string) (ret *Return, _ error) {
194205
} else if tool.IsEcho() {
195206
return e.runEcho(tool)
196207
}
197-
s, err := e.runCommand(ctx.WrappedContext(), tool, input, ctx.ToolCategory)
208+
s, err := e.runCommand(ctx, tool, input, ctx.ToolCategory)
198209
if err != nil {
199210
return nil, err
200211
}
@@ -233,6 +244,10 @@ func (e *Engine) Start(ctx Context, input string) (ret *Return, _ error) {
233244
input = ""
234245
}
235246

247+
if tool.Chat && input == "{}" {
248+
input = ""
249+
}
250+
236251
if input != "" {
237252
completion.Messages = append(completion.Messages, types.CompletionMessage{
238253
Role: types.CompletionMessageRoleTypeUser,

pkg/runner/runner.go

+4
Original file line numberDiff line numberDiff line change
@@ -711,6 +711,10 @@ func (r *Runner) subCalls(callCtx engine.Context, monitor Monitor, env []string,
711711
resultLock sync.Mutex
712712
)
713713

714+
if state.Continuation != nil {
715+
callCtx.LastReturn = state.Continuation
716+
}
717+
714718
if state.InputContextContinuation != nil {
715719
return state, nil, nil
716720
}

pkg/tests/runner_test.go

+24-60
Original file line numberDiff line numberDiff line change
@@ -227,9 +227,8 @@ func TestSubChat(t *testing.T) {
227227
"state": {
228228
"input": "Hello",
229229
"completion": {
230-
"Model": "gpt-4o",
231-
"InternalSystemPrompt": null,
232-
"Tools": [
230+
"model": "gpt-4o",
231+
"tools": [
233232
{
234233
"function": {
235234
"toolID": "testdata/TestSubChat/test.gpt:chatbot",
@@ -238,7 +237,7 @@ func TestSubChat(t *testing.T) {
238237
}
239238
}
240239
],
241-
"Messages": [
240+
"messages": [
242241
{
243242
"role": "system",
244243
"content": [
@@ -272,12 +271,7 @@ func TestSubChat(t *testing.T) {
272271
],
273272
"usage": {}
274273
}
275-
],
276-
"MaxTokens": 0,
277-
"Temperature": null,
278-
"JSONResponse": false,
279-
"Grammar": "",
280-
"Cache": null
274+
]
281275
},
282276
"pending": {
283277
"call_1": {
@@ -303,10 +297,9 @@ func TestSubChat(t *testing.T) {
303297
"continuation": {
304298
"state": {
305299
"completion": {
306-
"Model": "gpt-4o",
307-
"InternalSystemPrompt": false,
308-
"Tools": null,
309-
"Messages": [
300+
"model": "gpt-4o",
301+
"internalSystemPrompt": false,
302+
"messages": [
310303
{
311304
"role": "system",
312305
"content": [
@@ -325,12 +318,7 @@ func TestSubChat(t *testing.T) {
325318
],
326319
"usage": {}
327320
}
328-
],
329-
"MaxTokens": 0,
330-
"Temperature": null,
331-
"JSONResponse": false,
332-
"Grammar": "",
333-
"Cache": null
321+
]
334322
}
335323
},
336324
"result": "Assistant 1"
@@ -355,9 +343,8 @@ func TestSubChat(t *testing.T) {
355343
"state": {
356344
"input": "Hello",
357345
"completion": {
358-
"Model": "gpt-4o",
359-
"InternalSystemPrompt": null,
360-
"Tools": [
346+
"model": "gpt-4o",
347+
"tools": [
361348
{
362349
"function": {
363350
"toolID": "testdata/TestSubChat/test.gpt:chatbot",
@@ -366,7 +353,7 @@ func TestSubChat(t *testing.T) {
366353
}
367354
}
368355
],
369-
"Messages": [
356+
"messages": [
370357
{
371358
"role": "system",
372359
"content": [
@@ -400,12 +387,7 @@ func TestSubChat(t *testing.T) {
400387
],
401388
"usage": {}
402389
}
403-
],
404-
"MaxTokens": 0,
405-
"Temperature": null,
406-
"JSONResponse": false,
407-
"Grammar": "",
408-
"Cache": null
390+
]
409391
},
410392
"pending": {
411393
"call_1": {
@@ -431,10 +413,9 @@ func TestSubChat(t *testing.T) {
431413
"continuation": {
432414
"state": {
433415
"completion": {
434-
"Model": "gpt-4o",
435-
"InternalSystemPrompt": false,
436-
"Tools": null,
437-
"Messages": [
416+
"model": "gpt-4o",
417+
"internalSystemPrompt": false,
418+
"messages": [
438419
{
439420
"role": "system",
440421
"content": [
@@ -471,12 +452,7 @@ func TestSubChat(t *testing.T) {
471452
],
472453
"usage": {}
473454
}
474-
],
475-
"MaxTokens": 0,
476-
"Temperature": null,
477-
"JSONResponse": false,
478-
"Grammar": "",
479-
"Cache": null
455+
]
480456
}
481457
},
482458
"result": "Assistant 2"
@@ -513,10 +489,9 @@ func TestChat(t *testing.T) {
513489
"state": {
514490
"input": "Hello",
515491
"completion": {
516-
"Model": "gpt-4o",
517-
"InternalSystemPrompt": false,
518-
"Tools": null,
519-
"Messages": [
492+
"model": "gpt-4o",
493+
"internalSystemPrompt": false,
494+
"messages": [
520495
{
521496
"role": "system",
522497
"content": [
@@ -544,12 +519,7 @@ func TestChat(t *testing.T) {
544519
],
545520
"usage": {}
546521
}
547-
],
548-
"MaxTokens": 0,
549-
"Temperature": null,
550-
"JSONResponse": false,
551-
"Grammar": "",
552-
"Cache": null
522+
]
553523
}
554524
},
555525
"result": "Assistant 1"
@@ -570,10 +540,9 @@ func TestChat(t *testing.T) {
570540
"state": {
571541
"input": "Hello",
572542
"completion": {
573-
"Model": "gpt-4o",
574-
"InternalSystemPrompt": false,
575-
"Tools": null,
576-
"Messages": [
543+
"model": "gpt-4o",
544+
"internalSystemPrompt": false,
545+
"messages": [
577546
{
578547
"role": "system",
579548
"content": [
@@ -619,12 +588,7 @@ func TestChat(t *testing.T) {
619588
],
620589
"usage": {}
621590
}
622-
],
623-
"MaxTokens": 0,
624-
"Temperature": null,
625-
"JSONResponse": false,
626-
"Grammar": "",
627-
"Cache": null
591+
]
628592
}
629593
},
630594
"result": "Assistant 2"

pkg/tests/testdata/TestCase/call1.golden

+4-10
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
`{
2-
"Model": "gpt-4o",
3-
"InternalSystemPrompt": null,
4-
"Tools": [
2+
"model": "gpt-4o",
3+
"tools": [
54
{
65
"function": {
76
"toolID": "testdata/TestCase/test.gpt:Bob",
@@ -19,7 +18,7 @@
1918
}
2019
}
2120
],
22-
"Messages": [
21+
"messages": [
2322
{
2423
"role": "system",
2524
"content": [
@@ -29,10 +28,5 @@
2928
],
3029
"usage": {}
3130
}
32-
],
33-
"MaxTokens": 0,
34-
"Temperature": null,
35-
"JSONResponse": false,
36-
"Grammar": "",
37-
"Cache": null
31+
]
3832
}`

0 commit comments

Comments
 (0)