Skip to content

Commit c38e88f

Browse files
authored
Merge branch 'main' into remove-azure-builtin
2 parents 57c3a17 + 664c3fb commit c38e88f

File tree

22 files changed

+279
-110
lines changed

22 files changed

+279
-110
lines changed

Makefile

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,5 @@ validate: tidy lint
4242
ci: build
4343
./bin/gptscript ./scripts/ci.gpt
4444

45-
gen-docs: build
46-
./bin/gptscript ./scripts/gen-docs.gpt
47-
4845
serve-docs:
4946
(cd docs && npm i && npm start)

go.mod

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ require (
1616
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510
1717
github.com/google/uuid v1.6.0
1818
github.com/gptscript-ai/chat-completion-client v0.0.0-20240515050533-bdef9f2226a9
19-
github.com/gptscript-ai/tui v0.0.0-20240531161317-ce32f93ac844
19+
github.com/gptscript-ai/tui v0.0.0-20240604045848-e01b0b7aab9f
2020
github.com/hexops/autogold/v2 v2.2.1
2121
github.com/hexops/valast v1.4.4
2222
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056
@@ -62,7 +62,7 @@ require (
6262
github.com/gookit/color v1.5.4 // indirect
6363
github.com/gorilla/css v1.0.0 // indirect
6464
github.com/gorilla/websocket v1.5.0 // indirect
65-
github.com/gptscript-ai/go-gptscript v0.0.0-20240531155932-57eeae840062 // indirect
65+
github.com/gptscript-ai/go-gptscript v0.0.0-20240604030145-39497c0575b3 // indirect
6666
github.com/hashicorp/errwrap v1.0.0 // indirect
6767
github.com/hashicorp/go-multierror v1.1.1 // indirect
6868
github.com/hexops/autogold v1.3.1 // indirect

go.sum

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -172,10 +172,10 @@ github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWm
172172
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
173173
github.com/gptscript-ai/chat-completion-client v0.0.0-20240515050533-bdef9f2226a9 h1:s6nL/aokB1sJTqVXEjN0zFI5CJa66ubw9g68VTMzEw0=
174174
github.com/gptscript-ai/chat-completion-client v0.0.0-20240515050533-bdef9f2226a9/go.mod h1:7P/o6/IWa1KqsntVf68hSnLKuu3+xuqm6lYhch1w4jo=
175-
github.com/gptscript-ai/go-gptscript v0.0.0-20240531155932-57eeae840062 h1:30/1iOONBhw9WeCsoo4U74B+a20lATBFHFiVlpyk5oo=
176-
github.com/gptscript-ai/go-gptscript v0.0.0-20240531155932-57eeae840062/go.mod h1:h1yYzC0rgB5Kk7lwdba+Xs6cWkuJfLq6sPRna45OVG0=
177-
github.com/gptscript-ai/tui v0.0.0-20240531161317-ce32f93ac844 h1:c8sS5fCD3TXQZj94GDlQNR0zAw+1nuT3sYSH/Qo96+A=
178-
github.com/gptscript-ai/tui v0.0.0-20240531161317-ce32f93ac844/go.mod h1:G51xpKSFH0g1hG1s9J8vxj7NL6x43KJxZKM+77rt79s=
175+
github.com/gptscript-ai/go-gptscript v0.0.0-20240604030145-39497c0575b3 h1:mXLpCzEg4DoOeFZt6w99QFh9n60UwpRGGG0c+aaT+5k=
176+
github.com/gptscript-ai/go-gptscript v0.0.0-20240604030145-39497c0575b3/go.mod h1:h1yYzC0rgB5Kk7lwdba+Xs6cWkuJfLq6sPRna45OVG0=
177+
github.com/gptscript-ai/tui v0.0.0-20240604045848-e01b0b7aab9f h1:7dCE0E/y3y3p1BPSQGQ4mtsz5cWWl0FbXfCCDCf57SI=
178+
github.com/gptscript-ai/tui v0.0.0-20240604045848-e01b0b7aab9f/go.mod h1:ybD/8QfwaMiK2QUSnnTxgzrIaXRgipowU4pW1qxgNJ8=
179179
github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
180180
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
181181
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=

pkg/builtin/builtin.go

Lines changed: 2 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package builtin
22

33
import (
4-
"bytes"
54
"context"
65
"encoding/json"
76
"errors"
@@ -18,9 +17,9 @@ import (
1817
"strings"
1918
"time"
2019

21-
"github.com/AlecAivazis/survey/v2"
2220
"github.com/BurntSushi/locker"
2321
"github.com/gptscript-ai/gptscript/pkg/engine"
22+
"github.com/gptscript-ai/gptscript/pkg/prompt"
2423
"github.com/gptscript-ai/gptscript/pkg/types"
2524
"github.com/jaytaylor/html2text"
2625
)
@@ -216,7 +215,7 @@ var tools = map[string]types.Tool{
216215
"sensitive", "(true or false) Whether the input should be hidden",
217216
),
218217
},
219-
BuiltinFunc: SysPrompt,
218+
BuiltinFunc: prompt.SysPrompt,
220219
},
221220
},
222221
"sys.chat.history": {
@@ -772,79 +771,6 @@ func SysDownload(_ context.Context, env []string, input string) (_ string, err e
772771
return fmt.Sprintf("Downloaded %s to %s", params.URL, params.Location), nil
773772
}
774773

775-
func sysPromptHTTP(ctx context.Context, url string, prompt types.Prompt) (_ string, err error) {
776-
data, err := json.Marshal(prompt)
777-
if err != nil {
778-
return "", err
779-
}
780-
781-
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(data))
782-
if err != nil {
783-
return "", err
784-
}
785-
req.Header.Set("Content-Type", "application/json")
786-
787-
resp, err := http.DefaultClient.Do(req)
788-
if err != nil {
789-
return "", err
790-
}
791-
defer resp.Body.Close()
792-
793-
if resp.StatusCode != 200 {
794-
return "", fmt.Errorf("invalid status code [%d], expected 200", resp.StatusCode)
795-
}
796-
797-
data, err = io.ReadAll(resp.Body)
798-
return string(data), err
799-
}
800-
801-
func SysPrompt(ctx context.Context, envs []string, input string) (_ string, err error) {
802-
var params struct {
803-
Message string `json:"message,omitempty"`
804-
Fields string `json:"fields,omitempty"`
805-
Sensitive string `json:"sensitive,omitempty"`
806-
}
807-
if err := json.Unmarshal([]byte(input), &params); err != nil {
808-
return "", err
809-
}
810-
811-
for _, env := range envs {
812-
if url, ok := strings.CutPrefix(env, types.PromptURLEnvVar+"="); ok {
813-
httpPrompt := types.Prompt{
814-
Message: params.Message,
815-
Fields: strings.Split(params.Fields, ","),
816-
Sensitive: params.Sensitive == "true",
817-
}
818-
return sysPromptHTTP(ctx, url, httpPrompt)
819-
}
820-
}
821-
822-
if params.Message != "" {
823-
_, _ = fmt.Fprintln(os.Stderr, params.Message)
824-
}
825-
826-
results := map[string]string{}
827-
for _, f := range strings.Split(params.Fields, ",") {
828-
var value string
829-
if params.Sensitive == "true" {
830-
err = survey.AskOne(&survey.Password{Message: f}, &value, survey.WithStdio(os.Stdin, os.Stderr, os.Stderr))
831-
} else {
832-
err = survey.AskOne(&survey.Input{Message: f}, &value, survey.WithStdio(os.Stdin, os.Stderr, os.Stderr))
833-
}
834-
if err != nil {
835-
return "", err
836-
}
837-
results[f] = value
838-
}
839-
840-
resultsStr, err := json.Marshal(results)
841-
if err != nil {
842-
return "", err
843-
}
844-
845-
return string(resultsStr), nil
846-
}
847-
848774
func SysTimeNow(context.Context, []string, string) (string, error) {
849775
return time.Now().Format(time.RFC3339), nil
850776
}

pkg/cli/gptscript.go

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ type GPTScript struct {
7070
ForceSequential bool `usage:"Force parallel calls to run sequentially"`
7171
Workspace string `usage:"Directory to use for the workspace, if specified it will not be deleted on exit"`
7272
UI bool `usage:"Launch the UI" local:"true" name:"ui"`
73-
TUI bool `usage:"Launch the TUI" local:"true" name:"tui"`
73+
DisableTUI bool `usage:"Don't use chat TUI but instead verbose output" local:"true" name:"disable-tui"`
7474

7575
readData []byte
7676
}
@@ -438,9 +438,14 @@ func (r *GPTScript) Run(cmd *cobra.Command, args []string) (retErr error) {
438438
}
439439

440440
if prg.IsChat() || r.ForceChat {
441-
if r.TUI {
442-
return tui.Run(cmd.Context(), args[0], r.Workspace, strings.Join(args[1:], " "), tui.RunOptions{
441+
if !r.DisableTUI && !r.Debug && !r.DebugMessages {
442+
return tui.Run(cmd.Context(), args[0], tui.RunOptions{
443443
TrustedRepoPrefixes: []string{"github.com/gptscript-ai/context"},
444+
DisableCache: r.DisableCache,
445+
Input: strings.Join(args[1:], " "),
446+
CacheDir: r.CacheDir,
447+
SubTool: r.SubTool,
448+
Workspace: r.Workspace,
444449
})
445450
}
446451
return chat.Start(cmd.Context(), nil, gptScript, func() (types.Program, error) {

pkg/engine/cmd.go

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -14,13 +14,17 @@ import (
1414
"strings"
1515

1616
"github.com/google/shlex"
17-
context2 "github.com/gptscript-ai/gptscript/pkg/context"
1817
"github.com/gptscript-ai/gptscript/pkg/counter"
1918
"github.com/gptscript-ai/gptscript/pkg/env"
2019
"github.com/gptscript-ai/gptscript/pkg/types"
2120
"github.com/gptscript-ai/gptscript/pkg/version"
2221
)
2322

23+
var requiredFileExtensions = map[string]string{
24+
"powershell.exe": "*.ps1",
25+
"powershell": "*.ps1",
26+
}
27+
2428
func (e *Engine) runCommand(ctx Context, tool types.Tool, input string, toolCategory ToolCategory) (cmdOut string, cmdErr error) {
2529
id := counter.Next()
2630

@@ -73,12 +77,6 @@ func (e *Engine) runCommand(ctx Context, tool types.Tool, input string, toolCate
7377
cmd.Stderr = io.MultiWriter(all, os.Stderr)
7478
cmd.Stdout = io.MultiWriter(all, output)
7579

76-
if toolCategory == CredentialToolCategory {
77-
pause := context2.GetPauseFuncFromCtx(ctx.Ctx)
78-
unpause := pause()
79-
defer unpause()
80-
}
81-
8280
if err := cmd.Run(); err != nil {
8381
if toolCategory == NoCategory {
8482
return fmt.Sprintf("ERROR: got (%v) while running tool, OUTPUT: %s", err, all), nil
@@ -205,7 +203,7 @@ func (e *Engine) newCommand(ctx context.Context, extraEnv []string, tool types.T
205203
)
206204

207205
if strings.TrimSpace(rest) != "" {
208-
f, err := os.CreateTemp("", version.ProgramName)
206+
f, err := os.CreateTemp("", version.ProgramName+requiredFileExtensions[args[0]])
209207
if err != nil {
210208
return nil, nil, err
211209
}

pkg/gptscript/gptscript.go

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,18 @@ import (
55
"fmt"
66
"os"
77
"path/filepath"
8+
"slices"
89

910
"github.com/gptscript-ai/gptscript/pkg/builtin"
1011
"github.com/gptscript-ai/gptscript/pkg/cache"
12+
context2 "github.com/gptscript-ai/gptscript/pkg/context"
1113
"github.com/gptscript-ai/gptscript/pkg/engine"
1214
"github.com/gptscript-ai/gptscript/pkg/hash"
1315
"github.com/gptscript-ai/gptscript/pkg/llm"
1416
"github.com/gptscript-ai/gptscript/pkg/monitor"
1517
"github.com/gptscript-ai/gptscript/pkg/mvl"
1618
"github.com/gptscript-ai/gptscript/pkg/openai"
19+
"github.com/gptscript-ai/gptscript/pkg/prompt"
1720
"github.com/gptscript-ai/gptscript/pkg/remote"
1821
"github.com/gptscript-ai/gptscript/pkg/repos/runtimes"
1922
"github.com/gptscript-ai/gptscript/pkg/runner"
@@ -28,6 +31,8 @@ type GPTScript struct {
2831
Cache *cache.Client
2932
WorkspacePath string
3033
DeleteWorkspaceOnClose bool
34+
extraEnv []string
35+
close func()
3136
}
3237

3338
type Options struct {
@@ -96,12 +101,21 @@ func New(opts *Options) (*GPTScript, error) {
96101
return nil, err
97102
}
98103

104+
ctx, closeServer := context.WithCancel(context2.AddPauseFuncToCtx(context.Background(), opts.Runner.MonitorFactory.Pause))
105+
extraEnv, err := prompt.NewServer(ctx, opts.Env)
106+
if err != nil {
107+
closeServer()
108+
return nil, err
109+
}
110+
99111
return &GPTScript{
100112
Registry: registry,
101113
Runner: runner,
102114
Cache: cacheClient,
103115
WorkspacePath: opts.Workspace,
104116
DeleteWorkspaceOnClose: opts.Workspace == "",
117+
extraEnv: extraEnv,
118+
close: closeServer,
105119
}, nil
106120
}
107121

@@ -122,10 +136,10 @@ func (g *GPTScript) getEnv(env []string) ([]string, error) {
122136
if err := os.MkdirAll(g.WorkspacePath, 0700); err != nil {
123137
return nil, err
124138
}
125-
return append([]string{
139+
return slices.Concat(g.extraEnv, []string{
126140
fmt.Sprintf("GPTSCRIPT_WORKSPACE_DIR=%s", g.WorkspacePath),
127141
fmt.Sprintf("GPTSCRIPT_WORKSPACE_ID=%s", hash.ID(g.WorkspacePath)),
128-
}, env...), nil
142+
}, env), nil
129143
}
130144

131145
func (g *GPTScript) Chat(ctx context.Context, prevState runner.ChatState, prg types.Program, envs []string, input string) (runner.ChatResponse, error) {
@@ -153,6 +167,8 @@ func (g *GPTScript) Close(closeDaemons bool) {
153167
}
154168
}
155169

170+
g.close()
171+
156172
if closeDaemons {
157173
engine.CloseDaemons()
158174
}

pkg/loader/openapi.go

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -199,6 +199,12 @@ func getOpenAPITools(t *openapi3.T, defaultHost string) ([]types.Tool, error) {
199199
arg.Description = content.Schema.Value.Description
200200
}
201201

202+
// Read Only can not be sent in the request body, so we remove it
203+
for key, property := range arg.Properties {
204+
if property.Value.ReadOnly {
205+
delete(arg.Properties, key)
206+
}
207+
}
202208
// Unfortunately, the request body doesn't contain any good descriptor for it,
203209
// so we just use "requestBodyContent" as the name of the arg.
204210
tool.Parameters.Arguments.Properties["requestBodyContent"] = &openapi3.SchemaRef{Value: arg}

pkg/monitor/display.go

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ type Console struct {
3838
dumpState string
3939
displayProgress bool
4040
printMessages bool
41+
callLock sync.Mutex
4142
}
4243

4344
var (
@@ -47,6 +48,7 @@ var (
4748
func (c *Console) Start(_ context.Context, prg *types.Program, _ []string, input string) (runner.Monitor, error) {
4849
id := counter.Next()
4950
mon := newDisplay(c.dumpState, c.displayProgress, c.printMessages)
51+
mon.callLock = &c.callLock
5052
mon.dump.ID = fmt.Sprint(id)
5153
mon.dump.Program = prg
5254
mon.dump.Input = input
@@ -55,13 +57,20 @@ func (c *Console) Start(_ context.Context, prg *types.Program, _ []string, input
5557
return mon, nil
5658
}
5759

60+
func (c *Console) Pause() func() {
61+
c.callLock.Lock()
62+
return func() {
63+
c.callLock.Unlock()
64+
}
65+
}
66+
5867
type display struct {
5968
dump dump
6069
printMessages bool
6170
livePrinter *livePrinter
6271
dumpState string
6372
callIDMap map[string]string
64-
callLock sync.Mutex
73+
callLock *sync.Mutex
6574
usage types.Usage
6675
}
6776

pkg/monitor/fd.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,10 @@ func (s *fileFactory) Start(_ context.Context, prg *types.Program, env []string,
7070
return fd, nil
7171
}
7272

73+
func (s *fileFactory) Pause() func() {
74+
return func() {}
75+
}
76+
7377
func (s *fileFactory) close() {
7478
s.lock.Lock()
7579
defer s.lock.Unlock()

pkg/mvl/log.go

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,9 @@ func (f formatter) Format(entry *logrus.Entry) ([]byte, error) {
5353
if i, ok := entry.Data["request"]; ok && i != "" {
5454
msg += fmt.Sprintf(" [request=%s]", i)
5555
}
56+
if i, ok := entry.Data["cached"]; ok && i == true {
57+
msg += " [cached]"
58+
}
5659
if i, ok := entry.Data["response"]; ok && i != "" {
5760
msg += fmt.Sprintf(" [response=%s]", i)
5861
}

pkg/openai/client.go

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -344,16 +344,15 @@ func (c *Client) Call(ctx context.Context, messageRequest types.CompletionReques
344344
result.Role = types.CompletionMessageRoleTypeAssistant
345345
}
346346

347-
var usage types.Usage
348-
if !cacheResponse {
349-
usage = result.Usage
347+
if cacheResponse {
348+
result.Usage = types.Usage{}
350349
}
351350

352351
status <- types.CompletionStatus{
353352
CompletionID: id,
354353
Chunks: response,
355354
Response: result,
356-
Usage: usage,
355+
Usage: result.Usage,
357356
Cached: cacheResponse,
358357
}
359358

0 commit comments

Comments
 (0)