Skip to content

Commit

Permalink
feat: add raw response variants for OpenAI functions
Browse files Browse the repository at this point in the history
Add _with_raw_response variants for all OpenAI functions that return the
raw API response as JSONB. These variants accept the same arguments as
their regular counterparts, making it easy to access raw API responses
when needed.

New methods:

- ai.openai_list_models_with_raw_response
- ai.openai_embed_with_raw_response
- ai.openai_moderate_with_raw_response
- ai.openai_chat_complete_with_raw_response
  • Loading branch information
alejandrodnm committed Feb 6, 2025
1 parent 3fa323d commit bf33286
Show file tree
Hide file tree
Showing 8 changed files with 540 additions and 13 deletions.
11 changes: 11 additions & 0 deletions docs/openai.md
Original file line number Diff line number Diff line change
Expand Up @@ -510,6 +510,17 @@ from
;
```
### Raw response
Each OpenAI function has a variant that returns the raw API response as JSONB.
These variants accept the same arguments as their regular counterparts and are
suffixed with `_with_raw_response`:
- ai.openai_list_models_with_raw_response
- ai.openai_embed_with_raw_response
- ai.openai_moderate_with_raw_response
- ai.openai_chat_complete_with_raw_response
### Undocumented request params
If you want to include additional parameters in a request, you can do so using
Expand Down
29 changes: 29 additions & 0 deletions projects/extension/ai/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,3 +88,32 @@ def embed(
return None
for obj in response.data:
yield obj.index, obj.embedding


def embed_with_raw_response(
plpy,
model: str,
input: str | list[str] | list[int],
api_key: str,
base_url: str | None = None,
dimensions: int | None = None,
user: str | None = None,
extra_headers: str | None = None,
extra_query: str | None = None,
extra_body: str | None = None,
timeout: float | None = None,
) -> str:
client = make_client(plpy, api_key, base_url)

kwargs = create_kwargs(
dimensions=dimensions,
user=user,
extra_headers=str_arg_to_dict(extra_headers),
extra_query=str_arg_to_dict(extra_query),
extra_body=str_arg_to_dict(extra_body),
timeout=timeout,
)
response = client.embeddings.with_raw_response.create(
input=input, model=model, **kwargs
)
return response.text
25 changes: 25 additions & 0 deletions projects/extension/justfile
Original file line number Diff line number Diff line change
Expand Up @@ -118,3 +118,28 @@ docker-shell:
# Launches a psql shell in the container
psql-shell:
@docker exec -it -u postgres pgai-ext /bin/bash -c "set -e; if [ -f .env ]; then set -a; source .env; set +a; fi; psql"

# Runs tests that rely on snapshots against PG 17 and 16 and updates the
# expected files with the generated actual files.
update-snapshots: docker-rm docker-build docker-run
#!/usr/bin/env bash
set -uo pipefail
docker exec pgai-ext just install-py install-sql
docker exec pgai-ext uv run pytest tests/contents/test_contents.py
rm tests/contents/output17.expected
mv tests/contents/output17.actual tests/contents/output17.expected
docker exec pgai-ext uv run pytest tests/privileges/test_privileges.py
for f in tests/privileges/*.actual; do
if [ -f "$f" ]; then
expected="${f%.actual}.expected"
rm -f "$expected"
mv "$f" "$expected"
fi
done
just docker-rm
PG_MAJOR=16 just docker-build docker-run
docker exec pgai-ext just install-py install-sql
docker exec pgai-ext uv run pytest tests/contents/test_contents.py
rm tests/contents/output16.expected
mv tests/contents/output16.actual tests/contents/output16.expected
just docker-rm
266 changes: 266 additions & 0 deletions projects/extension/sql/idempotent/001-openai.sql
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,41 @@ language plpython3u volatile parallel safe security invoker
set search_path to pg_catalog, pg_temp
;


-------------------------------------------------------------------------------
-- openai_list_models_with_raw_response
-- list models supported on the openai platform
-- https://platform.openai.com/docs/api-reference/models/list
create or replace function ai.openai_list_models_with_raw_response
( api_key text default null
, api_key_name text default null
, base_url text default null
, extra_headers jsonb default null
, extra_query jsonb default null
, timeout float8 default null
)
returns jsonb
as $python$
#ADD-PYTHON-LIB-DIR
import ai.openai
import ai.secrets
from datetime import datetime, timezone

api_key_resolved = ai.secrets.get_secret(plpy, api_key, api_key_name, ai.openai.DEFAULT_KEY_NAME, SD)
client = ai.openai.make_client(plpy, api_key, base_url)

kwargs = ai.openai.create_kwargs(
extra_headers=ai.openai.str_arg_to_dict(extra_headers),
extra_query=ai.openai.str_arg_to_dict(extra_query),
timeout=timeout,
)

return client.models.with_raw_response.list(**kwargs).text
$python$
language plpython3u volatile parallel safe security invoker
set search_path to pg_catalog, pg_temp
;

-------------------------------------------------------------------------------
-- openai_embed
-- generate an embedding from a text value
Expand Down Expand Up @@ -195,6 +230,126 @@ language plpython3u immutable parallel safe security invoker
set search_path to pg_catalog, pg_temp
;


-------------------------------------------------------------------------------
-- openai_embed_with_raw_response
-- generate an embedding from a text value
-- https://platform.openai.com/docs/api-reference/embeddings/create
create or replace function ai.openai_embed_with_raw_response
( model text
, input_text text
, api_key text default null
, api_key_name text default null
, base_url text default null
, dimensions int default null
, openai_user text default null
, extra_headers jsonb default null
, extra_query jsonb default null
, extra_body jsonb default null
, timeout float8 default null
) returns jsonb
as $python$
#ADD-PYTHON-LIB-DIR
import ai.openai
import ai.secrets
api_key_resolved = ai.secrets.get_secret(plpy, api_key, api_key_name, ai.openai.DEFAULT_KEY_NAME, SD)
return ai.openai.embed_with_raw_response(
plpy,
model,
input_text,
api_key_resolved,
base_url,
dimensions,
openai_user,
extra_headers,
extra_query,
extra_body,
timeout)
$python$
language plpython3u immutable parallel safe security invoker
set search_path to pg_catalog, pg_temp
;

-------------------------------------------------------------------------------
-- openai_embed_with_raw_response
-- generate embeddings from an array of text values
-- https://platform.openai.com/docs/api-reference/embeddings/create
create or replace function ai.openai_embed_with_raw_response
( model text
, input_texts text[]
, api_key text default null
, api_key_name text default null
, base_url text default null
, dimensions int default null
, openai_user text default null
, extra_headers jsonb default null
, extra_query jsonb default null
, extra_body jsonb default null
, timeout float8 default null
) returns jsonb
as $python$
#ADD-PYTHON-LIB-DIR
import ai.openai
import ai.secrets
api_key_resolved = ai.secrets.get_secret(plpy, api_key, api_key_name, ai.openai.DEFAULT_KEY_NAME, SD)

return ai.openai.embed_with_raw_response(
plpy,
model,
input_texts,
api_key_resolved,
base_url,
dimensions,
openai_user,
extra_headers,
extra_query,
extra_body,
timeout)
$python$
language plpython3u immutable parallel safe security invoker
set search_path to pg_catalog, pg_temp
;

-------------------------------------------------------------------------------
-- openai_embed_with_raw_response
-- generate embeddings from an array of tokens
-- https://platform.openai.com/docs/api-reference/embeddings/create
create or replace function ai.openai_embed_with_raw_response
( model text
, input_tokens int[]
, api_key text default null
, api_key_name text default null
, base_url text default null
, dimensions int default null
, openai_user text default null
, extra_headers jsonb default null
, extra_query jsonb default null
, extra_body jsonb default null
, timeout float8 default null
) returns jsonb
as $python$
#ADD-PYTHON-LIB-DIR
import ai.openai
import ai.secrets
api_key_resolved = ai.secrets.get_secret(plpy, api_key, api_key_name, ai.openai.DEFAULT_KEY_NAME, SD)

return ai.openai.embed_with_raw_response(
plpy,
model,
input_tokens,
api_key_resolved,
base_url,
dimensions,
openai_user,
extra_headers,
extra_query,
extra_body,
timeout)
$python$
language plpython3u immutable parallel safe security invoker
set search_path to pg_catalog, pg_temp
;

-------------------------------------------------------------------------------
-- openai_chat_complete
-- text generation / chat completion
Expand Down Expand Up @@ -271,6 +426,81 @@ language plpython3u volatile parallel safe security invoker
set search_path to pg_catalog, pg_temp
;

-------------------------------------------------------------------------------
-- openai_chat_complete_with_raw_response
-- text generation / chat completion
-- https://platform.openai.com/docs/api-reference/chat/create
create or replace function ai.openai_chat_complete_with_raw_response
( model text
, messages jsonb
, api_key text default null
, api_key_name text default null
, base_url text default null
, frequency_penalty float8 default null
, logit_bias jsonb default null
, logprobs boolean default null
, top_logprobs int default null
, max_tokens int default null
, n int default null
, presence_penalty float8 default null
, response_format jsonb default null
, seed int default null
, stop text default null
, temperature float8 default null
, top_p float8 default null
, tools jsonb default null
, tool_choice text default null
, openai_user text default null
, extra_headers jsonb default null
, extra_query jsonb default null
, extra_body jsonb default null
, timeout float8 default null
) returns jsonb
as $python$
#ADD-PYTHON-LIB-DIR
import ai.openai
import ai.secrets
api_key_resolved = ai.secrets.get_secret(plpy, api_key, api_key_name, ai.openai.DEFAULT_KEY_NAME, SD)
client = ai.openai.make_client(plpy, api_key_resolved, base_url)
import json

messages_1 = json.loads(messages)
if not isinstance(messages_1, list):
plpy.error("messages is not an array")

kwargs = ai.openai.create_kwargs(
frequency_penalty=frequency_penalty,
logit_bias=ai.openai.str_arg_to_dict(logit_bias),
logprobs=logprobs,
top_logprobs=top_logprobs,
max_tokens=max_tokens,
n=n,
presence_penalty=presence_penalty,
response_format=ai.openai.str_arg_to_dict(response_format),
seed=seed,
stop=stop,
temperature=temperature,
top_p=top_p,
tools=ai.openai.str_arg_to_dict(tools),
tool_choice=tool_choice if tool_choice in {'auto', 'none', 'required'} else ai.openai.str_arg_to_dict(tool_choice),
user=openai_user,
extra_headers=ai.openai.str_arg_to_dict(extra_headers),
extra_query=ai.openai.str_arg_to_dict(extra_query),
extra_body=ai.openai.str_arg_to_dict(extra_body),
timeout=timeout)

response = client.chat.completions.with_raw_response.create(
model=model,
messages=messages_1,
stream=False,
**kwargs)

return response.text
$python$
language plpython3u volatile parallel safe security invoker
set search_path to pg_catalog, pg_temp
;

------------------------------------------------------------------------------------
-- openai_chat_complete_simple
-- simple chat completion that only requires a message and only returns the response
Expand Down Expand Up @@ -333,3 +563,39 @@ $python$
language plpython3u immutable parallel safe security invoker
set search_path to pg_catalog, pg_temp
;

-------------------------------------------------------------------------------
-- openai_moderate_with_raw_response
-- classify text as potentially harmful or not
-- https://platform.openai.com/docs/api-reference/moderations/create
create or replace function ai.openai_moderate_with_raw_response
( model text
, input_text text
, api_key text default null
, api_key_name text default null
, base_url text default null
, extra_headers jsonb default null
, extra_query jsonb default null
, extra_body jsonb default null
, timeout float8 default null
) returns jsonb
as $python$
#ADD-PYTHON-LIB-DIR
import ai.openai
import ai.secrets
api_key_resolved = ai.secrets.get_secret(plpy, api_key, api_key_name, ai.openai.DEFAULT_KEY_NAME, SD)
client = ai.openai.make_client(plpy, api_key_resolved, base_url)
kwargs = ai.openai.create_kwargs(
extra_headers=ai.openai.str_arg_to_dict(extra_headers),
extra_query=ai.openai.str_arg_to_dict(extra_query),
extra_body=ai.openai.str_arg_to_dict(extra_body),
timeout=timeout)
moderation = client.moderations.with_raw_response.create(
input=input_text,
model=model,
**kwargs)
return moderation.text
$python$
language plpython3u immutable parallel safe security invoker
set search_path to pg_catalog, pg_temp
;
Loading

0 comments on commit bf33286

Please sign in to comment.