2828except ImportError :
2929 raise DidNotEnable ("OpenAI not installed" )
3030
31+ RESPONSES_API_ENABLED = True
3132try :
3233 # responses API support was instroduces in v1.66.0
33- from openai .resources .responses import Responses
34+ from openai .resources .responses import Responses , AsyncResponses
3435except ImportError :
35- Responses = None
36+ RESPONSES_API_ENABLED = False
3637
3738
3839class OpenAIIntegration (Integration ):
@@ -53,16 +54,17 @@ def __init__(self, include_prompts=True, tiktoken_encoding_name=None):
5354 def setup_once ():
5455 # type: () -> None
5556 Completions .create = _wrap_chat_completion_create (Completions .create )
56- Embeddings .create = _wrap_embeddings_create (Embeddings .create )
57-
58- if Responses is not None :
59- Responses .create = _wrap_responses_create (Responses .create )
60-
6157 AsyncCompletions .create = _wrap_async_chat_completion_create (
6258 AsyncCompletions .create
6359 )
60+
61+ Embeddings .create = _wrap_embeddings_create (Embeddings .create )
6462 AsyncEmbeddings .create = _wrap_async_embeddings_create (AsyncEmbeddings .create )
6563
64+ if RESPONSES_API_ENABLED :
65+ Responses .create = _wrap_responses_create (Responses .create )
66+ AsyncResponses .create = _wrap_async_responses_create (AsyncResponses .create )
67+
6668 def count_tokens (self , s ):
6769 # type: (OpenAIIntegration, str) -> int
6870 if self .tiktoken_encoding is not None :
@@ -172,7 +174,7 @@ def _new_chat_completion_common(f, *args, **kwargs):
172174
173175 span = sentry_sdk .start_span (
174176 op = consts .OP .GEN_AI_CHAT ,
175- name = f"{ consts . OP . GEN_AI_CHAT } { model } " ,
177+ name = f"chat { model } " ,
176178 origin = OpenAIIntegration .origin ,
177179 )
178180 span .__enter__ ()
@@ -183,7 +185,9 @@ def _new_chat_completion_common(f, *args, **kwargs):
183185 if should_send_default_pii () and integration .include_prompts :
184186 set_data_normalized (span , SPANDATA .GEN_AI_REQUEST_MESSAGES , messages )
185187
188+ set_data_normalized (span , SPANDATA .GEN_AI_SYSTEM , "openai" )
186189 set_data_normalized (span , SPANDATA .GEN_AI_REQUEST_MODEL , model )
190+ set_data_normalized (span , SPANDATA .GEN_AI_OPERATION_NAME , "chat" )
187191 set_data_normalized (span , SPANDATA .AI_STREAMING , streaming )
188192
189193 if hasattr (res , "choices" ):
@@ -357,9 +361,13 @@ def _new_embeddings_create_common(f, *args, **kwargs):
357361
358362 with sentry_sdk .start_span (
359363 op = consts .OP .GEN_AI_EMBEDDINGS ,
360- name = f"{ consts . OP . GEN_AI_EMBEDDINGS } { model } " ,
364+ name = f"embeddings { model } " ,
361365 origin = OpenAIIntegration .origin ,
362366 ) as span :
367+ set_data_normalized (span , SPANDATA .GEN_AI_SYSTEM , "openai" )
368+ set_data_normalized (span , SPANDATA .GEN_AI_REQUEST_MODEL , model )
369+ set_data_normalized (span , SPANDATA .GEN_AI_OPERATION_NAME , "embeddings" )
370+
363371 if "input" in kwargs and (
364372 should_send_default_pii () and integration .include_prompts
365373 ):
@@ -483,12 +491,14 @@ def _new_responses_create_common(f, *args, **kwargs):
483491
484492 span = sentry_sdk .start_span (
485493 op = consts .OP .GEN_AI_RESPONSES ,
486- name = f"{ consts . OP . GEN_AI_RESPONSES } { model } " ,
494+ name = f"responses { model } " ,
487495 origin = OpenAIIntegration .origin ,
488496 )
489497 span .__enter__ ()
490498
499+ set_data_normalized (span , SPANDATA .GEN_AI_SYSTEM , "openai" )
491500 set_data_normalized (span , SPANDATA .GEN_AI_REQUEST_MODEL , model )
501+ set_data_normalized (span , SPANDATA .GEN_AI_OPERATION_NAME , "responses" )
492502
493503 if should_send_default_pii () and integration .include_prompts :
494504 set_data_normalized (span , SPANDATA .GEN_AI_REQUEST_MESSAGES , input )
@@ -544,3 +554,37 @@ def _sentry_patched_create_sync(*args, **kwargs):
544554 return _execute_sync (f , * args , ** kwargs )
545555
546556 return _sentry_patched_create_sync
557+
558+
559+ def _wrap_async_responses_create (f ):
560+ # type: (Any) -> Any
561+ async def _execute_async (f , * args , ** kwargs ):
562+ # type: (Any, *Any, **Any) -> Any
563+ gen = _new_responses_create_common (f , * args , ** kwargs )
564+
565+ try :
566+ f , args , kwargs = next (gen )
567+ except StopIteration as e :
568+ return await e .value
569+
570+ try :
571+ try :
572+ result = await f (* args , ** kwargs )
573+ except Exception as e :
574+ _capture_exception (e )
575+ raise e from None
576+
577+ return gen .send (result )
578+ except StopIteration as e :
579+ return e .value
580+
581+ @wraps (f )
582+ async def _sentry_patched_create_async (* args , ** kwargs ):
583+ # type: (*Any, **Any) -> Any
584+ integration = sentry_sdk .get_client ().get_integration (OpenAIIntegration )
585+ if integration is None :
586+ return await f (* args , ** kwargs )
587+
588+ return await _execute_async (f , * args , ** kwargs )
589+
590+ return _sentry_patched_create_async
0 commit comments