Skip to content

Commit

Permalink
feat(api): delete messages (#1388)
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-bot committed May 1, 2024
1 parent 11460b5 commit d0597cd
Show file tree
Hide file tree
Showing 22 changed files with 379 additions and 106 deletions.
22 changes: 21 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,5 +39,25 @@ jobs:
- name: Ensure importable
run: |
rye run python -c 'import openai'
test:
name: test
runs-on: ubuntu-latest
if: github.repository == 'openai/openai-python'

steps:
- uses: actions/checkout@v4

- name: Install Rye
run: |
curl -sSf https://rye-up.com/get | bash
echo "$HOME/.rye/shims" >> $GITHUB_PATH
env:
RYE_VERSION: 0.24.0
RYE_INSTALL_OPTION: '--yes'

- name: Bootstrap
run: ./scripts/bootstrap

- name: Run tests
run: ./scripts/test

1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@ dist
.env
.envrc
codegen.log
Brewfile.lock.json
4 changes: 2 additions & 2 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 63
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-0839c14b2b61dad4e830884410cfc3695546682ced009e50583c8bb5c44512d7.yml
configured_endpoints: 64
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-97c9a5f089049dc9eb5cee9475558049003e37e42202cab39e59d75e08b4c613.yml
2 changes: 2 additions & 0 deletions Brewfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
brew "rye"

1 change: 1 addition & 0 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -392,6 +392,7 @@ Methods:
- <code title="get /threads/{thread_id}/messages/{message_id}">client.beta.threads.messages.<a href="./src/openai/resources/beta/threads/messages.py">retrieve</a>(message_id, \*, thread_id) -> <a href="./src/openai/types/beta/threads/message.py">Message</a></code>
- <code title="post /threads/{thread_id}/messages/{message_id}">client.beta.threads.messages.<a href="./src/openai/resources/beta/threads/messages.py">update</a>(message_id, \*, thread_id, \*\*<a href="src/openai/types/beta/threads/message_update_params.py">params</a>) -> <a href="./src/openai/types/beta/threads/message.py">Message</a></code>
- <code title="get /threads/{thread_id}/messages">client.beta.threads.messages.<a href="./src/openai/resources/beta/threads/messages.py">list</a>(thread_id, \*\*<a href="src/openai/types/beta/threads/message_list_params.py">params</a>) -> <a href="./src/openai/types/beta/threads/message.py">SyncCursorPage[Message]</a></code>
- <code title="delete /threads/{thread_id}/messages/{message_id}">client.beta.threads.messages.<a href="./src/openai/resources/beta/threads/messages.py">delete</a>(message_id, \*, thread_id) -> <a href="./src/openai/types/beta/threads/message_deleted.py">MessageDeleted</a></code>

# Batches

Expand Down
40 changes: 0 additions & 40 deletions bin/check-env-state.py

This file was deleted.

50 changes: 0 additions & 50 deletions bin/check-test-server

This file was deleted.

3 changes: 0 additions & 3 deletions bin/test

This file was deleted.

3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ format = { chain = [
"fix:ruff",
]}
"format:black" = "black ."
"format:docs" = "python bin/ruffen-docs.py README.md api.md"
"format:docs" = "python scripts/utils/ruffen-docs.py README.md api.md"
"format:ruff" = "ruff format"
"format:isort" = "isort ."

Expand Down Expand Up @@ -197,5 +197,6 @@ known-first-party = ["openai", "tests"]

[tool.ruff.per-file-ignores]
"bin/**.py" = ["T201", "T203"]
"scripts/**.py" = ["T201", "T203"]
"tests/**.py" = ["T201", "T203"]
"examples/**.py" = ["T201", "T203"]
19 changes: 19 additions & 0 deletions scripts/bootstrap
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/usr/bin/env bash

set -e

cd "$(dirname "$0")/.."

if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ]; then
brew bundle check >/dev/null 2>&1 || {
echo "==> Installing Homebrew dependencies…"
brew bundle
}
fi

echo "==> Installing Python dependencies…"

# experimental uv support makes installations significantly faster
rye config --set-bool behavior.use-uv=true

rye sync
8 changes: 8 additions & 0 deletions scripts/format
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
#!/usr/bin/env bash

set -e

cd "$(dirname "$0")/.."

rye run format

8 changes: 8 additions & 0 deletions scripts/lint
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
#!/usr/bin/env bash

set -e

cd "$(dirname "$0")/.."

rye run lint

41 changes: 41 additions & 0 deletions scripts/mock
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
#!/usr/bin/env bash

set -e

cd "$(dirname "$0")/.."

if [[ -n "$1" && "$1" != '--'* ]]; then
URL="$1"
shift
else
URL="$(grep 'openapi_spec_url' .stats.yml | cut -d' ' -f2)"
fi

# Check if the URL is empty
if [ -z "$URL" ]; then
echo "Error: No OpenAPI spec path/url provided or found in .stats.yml"
exit 1
fi

echo "==> Starting mock server with URL ${URL}"

# Run prism mock on the given spec
if [ "$1" == "--daemon" ]; then
npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock "$URL" &> .prism.log &

# Wait for server to come online
echo -n "Waiting for server"
while ! grep -q "✖ fatal\|Prism is listening" ".prism.log" ; do
echo -n "."
sleep 0.1
done

if grep -q "✖ fatal" ".prism.log"; then
cat .prism.log
exit 1
fi

echo
else
npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock "$URL"
fi
57 changes: 57 additions & 0 deletions scripts/test
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
#!/usr/bin/env bash

set -e

cd "$(dirname "$0")/.."

RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
NC='\033[0m' # No Color

function prism_is_running() {
curl --silent "http://localhost:4010" >/dev/null 2>&1
}

kill_server_on_port() {
pids=$(lsof -t -i tcp:"$1" || echo "")
if [ "$pids" != "" ]; then
kill "$pids"
echo "Stopped $pids."
fi
}

function is_overriding_api_base_url() {
[ -n "$TEST_API_BASE_URL" ]
}

if ! is_overriding_api_base_url && ! prism_is_running ; then
# When we exit this script, make sure to kill the background mock server process
trap 'kill_server_on_port 4010' EXIT

# Start the dev server
./scripts/mock --daemon
fi

if is_overriding_api_base_url ; then
echo -e "${GREEN}✔ Running tests against ${TEST_API_BASE_URL}${NC}"
echo
elif ! prism_is_running ; then
echo -e "${RED}ERROR:${NC} The test suite will not run without a mock Prism server"
echo -e "running against your OpenAPI spec."
echo
echo -e "To run the server, pass in the path or url of your OpenAPI"
echo -e "spec to the prism command:"
echo
echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}"
echo

exit 1
else
echo -e "${GREEN}✔ Mock prism server is running with your OpenAPI spec${NC}"
echo
fi

# Run tests
echo "==> Running tests"
rye run pytest "$@"
File renamed without changes.
12 changes: 6 additions & 6 deletions src/openai/resources/batches.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def create(
self,
*,
completion_window: Literal["24h"],
endpoint: Literal["/v1/chat/completions"],
endpoint: Literal["/v1/chat/completions", "/v1/embeddings"],
input_file_id: str,
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand All @@ -57,8 +57,8 @@ def create(
completion_window: The time frame within which the batch should be processed. Currently only `24h`
is supported.
endpoint: The endpoint to be used for all requests in the batch. Currently only
`/v1/chat/completions` is supported.
endpoint: The endpoint to be used for all requests in the batch. Currently
`/v1/chat/completions` and `/v1/embeddings` are supported.
input_file_id: The ID of an uploaded file that contains requests for the new batch.
Expand Down Expand Up @@ -228,7 +228,7 @@ async def create(
self,
*,
completion_window: Literal["24h"],
endpoint: Literal["/v1/chat/completions"],
endpoint: Literal["/v1/chat/completions", "/v1/embeddings"],
input_file_id: str,
metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand All @@ -245,8 +245,8 @@ async def create(
completion_window: The time frame within which the batch should be processed. Currently only `24h`
is supported.
endpoint: The endpoint to be used for all requests in the batch. Currently only
`/v1/chat/completions` is supported.
endpoint: The endpoint to be used for all requests in the batch. Currently
`/v1/chat/completions` and `/v1/embeddings` are supported.
input_file_id: The ID of an uploaded file that contains requests for the new batch.
Expand Down

0 comments on commit d0597cd

Please sign in to comment.