Skip to content

Commit d0541eb

Browse files
authored
Merge pull request #12 from thedadams/add-chdir
feat: add chdir option
2 parents 3ed8f0f + e2dcc90 commit d0541eb

File tree

3 files changed

+33
-17
lines changed

3 files changed

+33
-17
lines changed

README.md

+4-2
Original file line numberDiff line numberDiff line change
@@ -71,11 +71,13 @@ Aside from the list methods there are `exec` and `exec_file` methods that allow
7171

7272
### Opts
7373

74-
You can pass the following options to the exec and exec_file functions:
74+
You can pass the following options to the exec and exec_file functions. See `gptscript --help` for further information.
7575

7676
opts= {
7777
"cache": True(default)|False,
78-
"cache-dir": "",
78+
"cache-dir": "/path/to/dir",
79+
"quiet": True|False(default),
80+
"chdir": "/path/to/dir",
7981
}
8082

8183
Cache can be set to true or false to enable or disable caching globally or it can be set at the individual tool level. The cache-dir can be set to a directory to use for caching. If not set, the default cache directory will be used.

gptscript/command.py

+16-15
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,11 @@
44
from gptscript.exec_utils import exec_cmd, stream_exec_cmd, stream_exec_cmd_with_events
55
from gptscript.tool import FreeForm, Tool
66

7-
optToArg = {
7+
opt_to_arg = {
88
"cache": "--disable-cache=",
99
"cacheDir": "--cache-dir=",
10+
"quiet": "--quiet=",
11+
"chdir": "--chdir=",
1012
}
1113

1214

@@ -35,7 +37,7 @@ def list_models():
3537

3638
def exec(tool, opts={}):
3739
cmd = _get_command()
38-
args = toArgs(opts)
40+
args = to_args(opts)
3941
args.append("-")
4042
try:
4143
tool_str = process_tools(tool)
@@ -47,7 +49,7 @@ def exec(tool, opts={}):
4749

4850
def stream_exec(tool, opts={}):
4951
cmd = _get_command()
50-
args = toArgs(opts)
52+
args = to_args(opts)
5153
args.append("-")
5254
try:
5355
tool_str = process_tools(tool)
@@ -59,7 +61,7 @@ def stream_exec(tool, opts={}):
5961

6062
def stream_exec_with_events(tool, opts={}):
6163
cmd = _get_command()
62-
args = toArgs(opts)
64+
args = to_args(opts)
6365
args.append("-")
6466
try:
6567
tool_str = process_tools(tool)
@@ -71,7 +73,7 @@ def stream_exec_with_events(tool, opts={}):
7173

7274
def exec_file(tool_path, input="", opts={}):
7375
cmd = _get_command()
74-
args = toArgs(opts)
76+
args = to_args(opts)
7577

7678
args.append(tool_path)
7779

@@ -86,7 +88,7 @@ def exec_file(tool_path, input="", opts={}):
8688

8789
def stream_exec_file(tool_path, input="", opts={}):
8890
cmd = _get_command()
89-
args = toArgs(opts)
91+
args = to_args(opts)
9092

9193
args.append(tool_path)
9294

@@ -102,7 +104,7 @@ def stream_exec_file(tool_path, input="", opts={}):
102104

103105
def stream_exec_file_with_events(tool_path, input="", opts={}):
104106
cmd = _get_command()
105-
args = toArgs(opts)
107+
args = to_args(opts)
106108

107109
args.append(tool_path)
108110

@@ -116,14 +118,15 @@ def stream_exec_file_with_events(tool_path, input="", opts={}):
116118
raise e
117119

118120

119-
def toArgs(opts):
120-
args = ["--quiet=false"]
121+
def to_args(opts):
122+
args = []
121123
for opt, val in opts.items():
122-
if optToArg.get(opt):
124+
opt_arg = opt_to_arg.get(opt, None)
125+
if opt_arg is not None:
123126
if opt == "cache":
124-
args.append(optToArg[opt] + str(not val))
127+
args.append(opt_arg + str(not val))
125128
else:
126-
args.append(optToArg[opt] + val)
129+
args.append(opt_arg + val)
127130
return args
128131

129132

@@ -147,9 +150,7 @@ def tool_concat(tools=[]):
147150
resp = ""
148151
if len(tools) == 1:
149152
return str(tools[0])
150-
for tool in tools:
153+
if tools:
151154
resp = "\n---\n".join([str(tool) for tool in tools])
152155

153-
print(resp)
154-
155156
return resp

tests/test_gptscript.py

+13
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
list_models,
55
list_tools,
66
exec,
7+
exec_file,
78
stream_exec,
89
stream_exec_with_events,
910
stream_exec_file,
@@ -112,6 +113,18 @@ def test_stream_exec_complex_tool(complex_tool):
112113
), "Expected a successful response from stream_exec using complex_tool"
113114

114115

116+
def test_exec_file_with_chdir():
117+
# By changing the directory here, we should be able to find the test.gpt file without `./test`
118+
out, err = exec_file("./test.gpt", opts={"chdir": "./fixtures"})
119+
for line in out:
120+
print(line)
121+
for line in err:
122+
print(line)
123+
assert (
124+
out is not None and err is not None
125+
), "Expected some output or error from stream_exec_file"
126+
127+
115128
# Test streaming execution from a file
116129
def test_stream_exec_file():
117130
out, err, wait = stream_exec_file("./fixtures/test.gpt")

0 commit comments

Comments
 (0)