Skip to content

Commit d2738d4

Browse files
committed
feat(api): delete messages (#1388)
1 parent 11460b5 commit d2738d4

File tree

22 files changed

+379
-106
lines changed

22 files changed

+379
-106
lines changed

.github/workflows/ci.yml

+21-1
Original file line numberDiff line numberDiff line change
@@ -39,5 +39,25 @@ jobs:
3939
- name: Ensure importable
4040
run: |
4141
rye run python -c 'import openai'
42+
test:
43+
name: test
44+
runs-on: ubuntu-latest
45+
if: github.repository == 'openai/openai-python'
46+
47+
steps:
48+
- uses: actions/checkout@v4
49+
50+
- name: Install Rye
51+
run: |
52+
curl -sSf https://rye-up.com/get | bash
53+
echo "$HOME/.rye/shims" >> $GITHUB_PATH
54+
env:
55+
RYE_VERSION: 0.24.0
56+
RYE_INSTALL_OPTION: '--yes'
57+
58+
- name: Bootstrap
59+
run: ./scripts/bootstrap
60+
61+
- name: Run tests
62+
run: ./scripts/test
4263

43-

.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,4 @@ dist
1212
.env
1313
.envrc
1414
codegen.log
15+
Brewfile.lock.json

.stats.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
configured_endpoints: 63
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-0839c14b2b61dad4e830884410cfc3695546682ced009e50583c8bb5c44512d7.yml
1+
configured_endpoints: 64
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-97c9a5f089049dc9eb5cee9475558049003e37e42202cab39e59d75e08b4c613.yml

Brewfile

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
brew "rye"
2+

api.md

+1
Original file line numberDiff line numberDiff line change
@@ -392,6 +392,7 @@ Methods:
392392
- <code title="get /threads/{thread_id}/messages/{message_id}">client.beta.threads.messages.<a href="./src/openai/resources/beta/threads/messages.py">retrieve</a>(message_id, \*, thread_id) -> <a href="./src/openai/types/beta/threads/message.py">Message</a></code>
393393
- <code title="post /threads/{thread_id}/messages/{message_id}">client.beta.threads.messages.<a href="./src/openai/resources/beta/threads/messages.py">update</a>(message_id, \*, thread_id, \*\*<a href="src/openai/types/beta/threads/message_update_params.py">params</a>) -> <a href="./src/openai/types/beta/threads/message.py">Message</a></code>
394394
- <code title="get /threads/{thread_id}/messages">client.beta.threads.messages.<a href="./src/openai/resources/beta/threads/messages.py">list</a>(thread_id, \*\*<a href="src/openai/types/beta/threads/message_list_params.py">params</a>) -> <a href="./src/openai/types/beta/threads/message.py">SyncCursorPage[Message]</a></code>
395+
- <code title="delete /threads/{thread_id}/messages/{message_id}">client.beta.threads.messages.<a href="./src/openai/resources/beta/threads/messages.py">delete</a>(message_id, \*, thread_id) -> <a href="./src/openai/types/beta/threads/message_deleted.py">MessageDeleted</a></code>
395396

396397
# Batches
397398

bin/check-env-state.py

-40
This file was deleted.

bin/check-test-server

-50
This file was deleted.

bin/test

-3
This file was deleted.

pyproject.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ format = { chain = [
7474
"fix:ruff",
7575
]}
7676
"format:black" = "black ."
77-
"format:docs" = "python bin/ruffen-docs.py README.md api.md"
77+
"format:docs" = "python scripts/utils/ruffen-docs.py README.md api.md"
7878
"format:ruff" = "ruff format"
7979
"format:isort" = "isort ."
8080

@@ -197,5 +197,6 @@ known-first-party = ["openai", "tests"]
197197

198198
[tool.ruff.per-file-ignores]
199199
"bin/**.py" = ["T201", "T203"]
200+
"scripts/**.py" = ["T201", "T203"]
200201
"tests/**.py" = ["T201", "T203"]
201202
"examples/**.py" = ["T201", "T203"]

scripts/bootstrap

+19
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
#!/usr/bin/env bash
2+
3+
set -e
4+
5+
cd "$(dirname "$0")/.."
6+
7+
if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ]; then
8+
brew bundle check >/dev/null 2>&1 || {
9+
echo "==> Installing Homebrew dependencies…"
10+
brew bundle
11+
}
12+
fi
13+
14+
echo "==> Installing Python dependencies…"
15+
16+
# experimental uv support makes installations significantly faster
17+
rye config --set-bool behavior.use-uv=true
18+
19+
rye sync

scripts/format

+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
#!/usr/bin/env bash
2+
3+
set -e
4+
5+
cd "$(dirname "$0")/.."
6+
7+
rye run format
8+

scripts/lint

+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
#!/usr/bin/env bash
2+
3+
set -e
4+
5+
cd "$(dirname "$0")/.."
6+
7+
rye run lint
8+

scripts/mock

+41
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
#!/usr/bin/env bash
2+
3+
set -e
4+
5+
cd "$(dirname "$0")/.."
6+
7+
if [[ -n "$1" && "$1" != '--'* ]]; then
8+
URL="$1"
9+
shift
10+
else
11+
URL="$(grep 'openapi_spec_url' .stats.yml | cut -d' ' -f2)"
12+
fi
13+
14+
# Check if the URL is empty
15+
if [ -z "$URL" ]; then
16+
echo "Error: No OpenAPI spec path/url provided or found in .stats.yml"
17+
exit 1
18+
fi
19+
20+
echo "==> Starting mock server with URL ${URL}"
21+
22+
# Run prism mock on the given spec
23+
if [ "$1" == "--daemon" ]; then
24+
npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock "$URL" &> .prism.log &
25+
26+
# Wait for server to come online
27+
echo -n "Waiting for server"
28+
while ! grep -q "✖ fatal\|Prism is listening" ".prism.log" ; do
29+
echo -n "."
30+
sleep 0.1
31+
done
32+
33+
if grep -q "✖ fatal" ".prism.log"; then
34+
cat .prism.log
35+
exit 1
36+
fi
37+
38+
echo
39+
else
40+
npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock "$URL"
41+
fi

scripts/test

+57
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
#!/usr/bin/env bash
2+
3+
set -e
4+
5+
cd "$(dirname "$0")/.."
6+
7+
RED='\033[0;31m'
8+
GREEN='\033[0;32m'
9+
YELLOW='\033[0;33m'
10+
NC='\033[0m' # No Color
11+
12+
function prism_is_running() {
13+
curl --silent "http://localhost:4010" >/dev/null 2>&1
14+
}
15+
16+
kill_server_on_port() {
17+
pids=$(lsof -t -i tcp:"$1" || echo "")
18+
if [ "$pids" != "" ]; then
19+
kill "$pids"
20+
echo "Stopped $pids."
21+
fi
22+
}
23+
24+
function is_overriding_api_base_url() {
25+
[ -n "$TEST_API_BASE_URL" ]
26+
}
27+
28+
if ! is_overriding_api_base_url && ! prism_is_running ; then
29+
# When we exit this script, make sure to kill the background mock server process
30+
trap 'kill_server_on_port 4010' EXIT
31+
32+
# Start the dev server
33+
./scripts/mock --daemon
34+
fi
35+
36+
if is_overriding_api_base_url ; then
37+
echo -e "${GREEN}✔ Running tests against ${TEST_API_BASE_URL}${NC}"
38+
echo
39+
elif ! prism_is_running ; then
40+
echo -e "${RED}ERROR:${NC} The test suite will not run without a mock Prism server"
41+
echo -e "running against your OpenAPI spec."
42+
echo
43+
echo -e "To run the server, pass in the path or url of your OpenAPI"
44+
echo -e "spec to the prism command:"
45+
echo
46+
echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}"
47+
echo
48+
49+
exit 1
50+
else
51+
echo -e "${GREEN}✔ Mock prism server is running with your OpenAPI spec${NC}"
52+
echo
53+
fi
54+
55+
# Run tests
56+
echo "==> Running tests"
57+
rye run pytest "$@"
File renamed without changes.

src/openai/resources/batches.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def create(
4040
self,
4141
*,
4242
completion_window: Literal["24h"],
43-
endpoint: Literal["/v1/chat/completions"],
43+
endpoint: Literal["/v1/chat/completions", "/v1/embeddings"],
4444
input_file_id: str,
4545
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
4646
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -57,8 +57,8 @@ def create(
5757
completion_window: The time frame within which the batch should be processed. Currently only `24h`
5858
is supported.
5959
60-
endpoint: The endpoint to be used for all requests in the batch. Currently only
61-
`/v1/chat/completions` is supported.
60+
endpoint: The endpoint to be used for all requests in the batch. Currently
61+
`/v1/chat/completions` and `/v1/embeddings` are supported.
6262
6363
input_file_id: The ID of an uploaded file that contains requests for the new batch.
6464
@@ -228,7 +228,7 @@ async def create(
228228
self,
229229
*,
230230
completion_window: Literal["24h"],
231-
endpoint: Literal["/v1/chat/completions"],
231+
endpoint: Literal["/v1/chat/completions", "/v1/embeddings"],
232232
input_file_id: str,
233233
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
234234
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -245,8 +245,8 @@ async def create(
245245
completion_window: The time frame within which the batch should be processed. Currently only `24h`
246246
is supported.
247247
248-
endpoint: The endpoint to be used for all requests in the batch. Currently only
249-
`/v1/chat/completions` is supported.
248+
endpoint: The endpoint to be used for all requests in the batch. Currently
249+
`/v1/chat/completions` and `/v1/embeddings` are supported.
250250
251251
input_file_id: The ID of an uploaded file that contains requests for the new batch.
252252

0 commit comments

Comments
 (0)