Skip to content

Commit

Permalink
Add "exclude prompts" flag as optional
Browse files Browse the repository at this point in the history
  • Loading branch information
colin-sentry committed Mar 7, 2024
1 parent 4b33a4e commit 56d2679
Showing 1 changed file with 30 additions and 4 deletions.
34 changes: 30 additions & 4 deletions sentry_sdk/integrations/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@ def count_tokens(s):
class OpenAIIntegration(Integration):
identifier = "openai"

def __init__(self, exclude_prompts=False):
# type: (OpenAIIntegration, bool) -> None
self.exclude_prompts = exclude_prompts

@staticmethod
def setup_once():
# type: () -> None
Expand Down Expand Up @@ -122,6 +126,14 @@ def _wrap_chat_completion_create(f):
@wraps(f)
def new_chat_completion(*args, **kwargs):
# type: (*Any, **Any) -> Any
hub = Hub.current
if not hub:
return f(*args, **kwargs)

Check warning on line 131 in sentry_sdk/integrations/openai.py

View check run for this annotation

Codecov / codecov/patch

sentry_sdk/integrations/openai.py#L131

Added line #L131 was not covered by tests

integration = hub.get_integration(OpenAIIntegration) # type: OpenAIIntegration
if not integration:
return f(*args, **kwargs)

Check warning on line 135 in sentry_sdk/integrations/openai.py

View check run for this annotation

Codecov / codecov/patch

sentry_sdk/integrations/openai.py#L135

Added line #L135 was not covered by tests

if "messages" not in kwargs:
# invalid call (in all versions of openai), let it return error
return f(*args, **kwargs)

Check warning on line 139 in sentry_sdk/integrations/openai.py

View check run for this annotation

Codecov / codecov/patch

sentry_sdk/integrations/openai.py#L139

Added line #L139 was not covered by tests
Expand Down Expand Up @@ -149,13 +161,13 @@ def new_chat_completion(*args, **kwargs):
raise e from None

with capture_internal_exceptions():
if _should_send_default_pii():
if _should_send_default_pii() or not integration.exclude_prompts:
span.set_data("ai.input_messages", messages)
span.set_data("ai.model_id", model)
span.set_data("ai.streaming", streaming)

if hasattr(res, "choices"):
if _should_send_default_pii():
if _should_send_default_pii() or not integration.exclude_prompts:
span.set_data(
"ai.responses", list(map(lambda x: x.message, res.choices))
)
Expand Down Expand Up @@ -186,7 +198,10 @@ def new_iterator():
all_responses = list(
map(lambda chunk: "".join(chunk), data_buf)
)
if _should_send_default_pii():
if (
_should_send_default_pii()
or not integration.exclude_prompts
):
span.set_data("ai.responses", all_responses)
_calculate_chat_completion_usage(
messages, res, span, all_responses
Expand All @@ -208,11 +223,22 @@ def _wrap_embeddings_create(f):
@wraps(f)
def new_embeddings_create(*args, **kwargs):
# type: (*Any, **Any) -> Any

hub = Hub.current
if not hub:
return f(*args, **kwargs)

Check warning on line 229 in sentry_sdk/integrations/openai.py

View check run for this annotation

Codecov / codecov/patch

sentry_sdk/integrations/openai.py#L229

Added line #L229 was not covered by tests

integration = hub.get_integration(OpenAIIntegration) # type: OpenAIIntegration
if not integration:
return f(*args, **kwargs)

Check warning on line 233 in sentry_sdk/integrations/openai.py

View check run for this annotation

Codecov / codecov/patch

sentry_sdk/integrations/openai.py#L233

Added line #L233 was not covered by tests

with sentry_sdk.start_span(
op=consts.OP.OPENAI_EMBEDDINGS_CREATE,
description="OpenAI Embedding Creation",
) as span:
if "input" in kwargs:
if "input" in kwargs and (
_should_send_default_pii() or not integration.exclude_prompts
):
if isinstance(kwargs["input"], str):
span.set_data("ai.input_messages", [kwargs["input"]])
elif (
Expand Down

0 comments on commit 56d2679

Please sign in to comment.