Skip to content

Commit e0b6a06

Browse files
authored
feat: Support function call requests/responses (#56)
1 parent c872a2c commit e0b6a06

19 files changed

+334
-178
lines changed

langchain_openai_api_bridge/chat_completion/chat_completion_chunk_choice_adapter.py

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -4,33 +4,37 @@
44
from langchain_openai_api_bridge.chat_completion.chat_completion_chunk_object_factory import (
55
create_chat_completion_chunk_object,
66
)
7-
from langchain_openai_api_bridge.chat_completion.content_adapter import (
8-
to_string_content,
9-
)
10-
from langchain_openai_api_bridge.core.types.openai import (
11-
OpenAIChatCompletionChunkChoice,
12-
OpenAIChatCompletionChunkObject,
13-
OpenAIChatMessage,
14-
)
7+
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk, Choice, ChoiceDelta, ChoiceDeltaFunctionCall
158

169

1710
def to_openai_chat_message(
1811
event: StreamEvent,
1912
role: str = "assistant",
20-
) -> OpenAIChatMessage:
21-
content = event["data"]["chunk"].content
22-
return OpenAIChatMessage(content=to_string_content(content), role=role)
13+
) -> ChoiceDelta:
14+
if getattr(event["data"]["chunk"], "tool_call_chunks", None):
15+
function_call = ChoiceDeltaFunctionCall(
16+
name=event["data"]["chunk"].tool_call_chunks[0]["name"],
17+
arguments=event["data"]["chunk"].tool_call_chunks[0]["args"],
18+
)
19+
else:
20+
function_call = None
21+
22+
return ChoiceDelta(
23+
content=event["data"]["chunk"].content,
24+
role=role,
25+
function_call=function_call,
26+
)
2327

2428

2529
def to_openai_chat_completion_chunk_choice(
2630
event: StreamEvent,
2731
index: int = 0,
28-
role: str = "assistant",
32+
role: Optional[str] = None,
2933
finish_reason: Optional[str] = None,
30-
) -> OpenAIChatCompletionChunkChoice:
34+
) -> Choice:
3135
message = to_openai_chat_message(event, role)
3236

33-
return OpenAIChatCompletionChunkChoice(
37+
return Choice(
3438
index=index,
3539
delta=message,
3640
finish_reason=finish_reason,
@@ -42,9 +46,9 @@ def to_openai_chat_completion_chunk_object(
4246
id: str = "",
4347
model: str = "",
4448
system_fingerprint: Optional[str] = None,
45-
role: str = "assistant",
49+
role: Optional[str] = None,
4650
finish_reason: Optional[str] = None,
47-
) -> OpenAIChatCompletionChunkObject:
51+
) -> ChatCompletionChunk:
4852

4953
choice1 = to_openai_chat_completion_chunk_choice(
5054
event, index=0, role=role, finish_reason=finish_reason
Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,16 @@
11
import time
2-
from typing import Dict, List, Optional
2+
from typing import List, Literal, Optional
33

4-
from langchain_openai_api_bridge.core.types.openai import (
5-
OpenAIChatCompletionChunkChoice,
6-
OpenAIChatCompletionChunkObject,
7-
)
4+
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk, Choice
85

96

107
def create_chat_completion_chunk_object(
118
id: str,
129
model: str,
1310
system_fingerprint: Optional[str],
14-
choices: List[OpenAIChatCompletionChunkChoice] = [],
15-
) -> OpenAIChatCompletionChunkObject:
16-
return OpenAIChatCompletionChunkObject(
11+
choices: List[Choice] = [],
12+
) -> ChatCompletionChunk:
13+
return ChatCompletionChunk(
1714
id=id,
1815
object="chat.completion.chunk",
1916
created=int(time.time()),
@@ -25,18 +22,24 @@ def create_chat_completion_chunk_object(
2522

2623
def create_final_chat_completion_chunk_choice(
2724
index: int,
28-
) -> OpenAIChatCompletionChunkChoice:
29-
return OpenAIChatCompletionChunkChoice(index=index, delta={}, finish_reason="stop")
25+
finish_reason: Literal["stop", "tool_calls"],
26+
) -> Choice:
27+
return Choice(
28+
index=index,
29+
delta={},
30+
finish_reason=finish_reason,
31+
)
3032

3133

3234
def create_final_chat_completion_chunk_object(
3335
id: str,
3436
model: str = "",
3537
system_fingerprint: Optional[str] = None,
36-
) -> Dict:
38+
finish_reason: Literal["stop", "tool_calls"] = "stop",
39+
) -> ChatCompletionChunk:
3740
return create_chat_completion_chunk_object(
3841
id=id,
3942
model=model,
4043
system_fingerprint=system_fingerprint,
41-
choices=[create_final_chat_completion_chunk_choice(index=0)],
44+
choices=[create_final_chat_completion_chunk_choice(index=0, finish_reason=finish_reason)],
4245
)

langchain_openai_api_bridge/chat_completion/chat_completion_compatible_api.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@
77
from langchain_openai_api_bridge.chat_completion.langchain_stream_adapter import (
88
LangchainStreamAdapter,
99
)
10-
from langchain_openai_api_bridge.core.types.openai import OpenAIChatMessage
1110
from langchain_openai_api_bridge.core.utils.pydantic_async_iterator import ato_dict
11+
from openai.types.chat import ChatCompletionMessage
1212

1313

1414
class ChatCompletionCompatibleAPI:
@@ -39,7 +39,7 @@ def __init__(
3939
self.agent = agent
4040
self.event_adapter = event_adapter
4141

42-
async def astream(self, messages: List[OpenAIChatMessage]) -> AsyncIterator[dict]:
42+
async def astream(self, messages: List[ChatCompletionMessage]) -> AsyncIterator[dict]:
4343
async with self.agent as runnable:
4444
input = self.__to_input(runnable, messages)
4545
astream_event = runnable.astream_events(
@@ -51,7 +51,7 @@ async def astream(self, messages: List[OpenAIChatMessage]) -> AsyncIterator[dict
5151
):
5252
yield it
5353

54-
async def ainvoke(self, messages: List[OpenAIChatMessage]) -> dict:
54+
async def ainvoke(self, messages: List[ChatCompletionMessage]) -> dict:
5555
async with self.agent as runnable:
5656
input = self.__to_input(runnable, messages)
5757
result = await runnable.ainvoke(
@@ -60,16 +60,16 @@ async def ainvoke(self, messages: List[OpenAIChatMessage]) -> dict:
6060

6161
return self.invoke_adapter.to_chat_completion_object(result).model_dump()
6262

63-
def __to_input(self, runnable: Runnable, messages: List[OpenAIChatMessage]):
63+
def __to_input(self, runnable: Runnable, messages: List[ChatCompletionMessage]):
6464
if isinstance(runnable, CompiledStateGraph):
6565
return self.__to_react_agent_input(messages)
6666
else:
6767
return self.__to_chat_model_input(messages)
6868

69-
def __to_react_agent_input(self, messages: List[OpenAIChatMessage]):
69+
def __to_react_agent_input(self, messages: List[ChatCompletionMessage]):
7070
return {
71-
"messages": [message.model_dump() for message in messages],
71+
"messages": [message for message in messages],
7272
}
7373

74-
def __to_chat_model_input(self, messages: List[OpenAIChatMessage]):
75-
return [message.model_dump() for message in messages]
74+
def __to_chat_model_input(self, messages: List[ChatCompletionMessage]):
75+
return [message for message in messages]

langchain_openai_api_bridge/chat_completion/chat_completion_object_factory.py

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,30 +1,26 @@
11
import time
22
from typing import List, Optional
33

4-
from langchain_openai_api_bridge.core.types.openai import (
5-
OpenAIChatCompletionChoice,
6-
OpenAIChatCompletionObject,
7-
OpenAIChatCompletionUsage,
8-
)
4+
from openai.types.chat.chat_completion import ChatCompletion, Choice, CompletionUsage
95

106

117
class ChatCompletionObjectFactory:
128
def create(
139
id: str,
1410
model: str,
15-
choices: List[OpenAIChatCompletionChoice] = [],
11+
choices: List[Choice] = [],
1612
usage: Optional[
17-
OpenAIChatCompletionUsage
18-
] = OpenAIChatCompletionUsage.default(),
13+
CompletionUsage
14+
] = CompletionUsage(completion_tokens=-1, prompt_tokens=-1, total_tokens=-1),
1915
object: str = "chat.completion",
2016
system_fingerprint: str = "",
2117
created: int = None,
22-
) -> OpenAIChatCompletionObject:
23-
return OpenAIChatCompletionObject(
18+
) -> ChatCompletion:
19+
return ChatCompletion(
2420
id=id,
25-
object=object,
26-
created=created if created is not None else int(time.time()),
2721
model=model,
22+
created=created or int(time.time()),
23+
object=object,
2824
system_fingerprint=system_fingerprint,
2925
choices=choices,
3026
usage=usage,
Lines changed: 21 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,56 +1,43 @@
1+
import time
2+
from langchain_core.messages import BaseMessage
3+
from langchain_openai.chat_models.base import _convert_message_to_dict
4+
from openai.types.chat.chat_completion import ChatCompletion, Choice, ChatCompletionMessage
5+
16
from langchain_openai_api_bridge.chat_completion.chat_completion_object_factory import (
27
ChatCompletionObjectFactory,
38
)
4-
from langchain_openai_api_bridge.chat_completion.content_adapter import (
5-
to_string_content,
6-
)
7-
from langchain_openai_api_bridge.core.role_adapter import to_openai_role
8-
from langchain_openai_api_bridge.core.types.openai import (
9-
OpenAIChatCompletionChoice,
10-
OpenAIChatCompletionObject,
11-
OpenAIChatMessage,
12-
)
13-
from langchain_core.messages import AIMessage
9+
from langchain_core.runnables.utils import Output
1410

1511

1612
class LangchainInvokeAdapter:
1713
def __init__(self, llm_model: str, system_fingerprint: str = ""):
1814
self.llm_model = llm_model
1915
self.system_fingerprint = system_fingerprint
2016

21-
def to_chat_completion_object(self, invoke_result) -> OpenAIChatCompletionObject:
22-
message = self.__create_openai_chat_message(invoke_result)
23-
id = self.__get_id(invoke_result)
17+
def to_chat_completion_object(self, invoke_result: Output) -> ChatCompletion:
18+
invoke_message = invoke_result if isinstance(invoke_result, BaseMessage) else invoke_result["messages"][-1]
19+
message = self.__create_openai_chat_message(invoke_message)
20+
id = self.__get_id(invoke_message)
2421

2522
return ChatCompletionObjectFactory.create(
2623
id=id,
2724
model=self.llm_model,
25+
created=int(time.time()),
26+
object="chat.completion",
2827
system_fingerprint=self.system_fingerprint,
2928
choices=[
30-
OpenAIChatCompletionChoice(
29+
Choice(
3130
index=0,
3231
message=message,
33-
finish_reason="stop",
32+
finish_reason="tool_calls" if "tool_calls" in message else "stop",
3433
)
35-
],
34+
]
3635
)
3736

38-
def __get_id(self, invoke_result):
39-
if isinstance(invoke_result, AIMessage):
40-
return invoke_result.id
37+
def __create_openai_chat_message(self, message: BaseMessage) -> ChatCompletionMessage:
38+
message = _convert_message_to_dict(message)
39+
message["role"] = "assistant"
40+
return message
4141

42-
last_message = invoke_result["messages"][-1]
43-
return last_message.id
44-
45-
def __create_openai_chat_message(self, invoke_result) -> OpenAIChatMessage:
46-
if isinstance(invoke_result, AIMessage):
47-
return OpenAIChatMessage(
48-
role=to_openai_role(invoke_result.type),
49-
content=to_string_content(content=invoke_result.content),
50-
)
51-
52-
last_message = invoke_result["messages"][-1]
53-
return OpenAIChatMessage(
54-
role=to_openai_role(last_message.type),
55-
content=to_string_content(content=last_message.content),
56-
)
42+
def __get_id(self, message: BaseMessage):
43+
return message.id or ""

langchain_openai_api_bridge/chat_completion/langchain_stream_adapter.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,7 @@
88
from langchain_openai_api_bridge.chat_completion.chat_completion_chunk_object_factory import (
99
create_final_chat_completion_chunk_object,
1010
)
11-
from langchain_openai_api_bridge.core.types.openai import (
12-
OpenAIChatCompletionChunkObject,
13-
)
11+
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk
1412

1513

1614
class LangchainStreamAdapter:
@@ -23,22 +21,31 @@ async def ato_chat_completion_chunk_stream(
2321
astream_event: AsyncIterator[StreamEvent],
2422
id: str = "",
2523
event_adapter=lambda event: None,
26-
) -> AsyncIterator[OpenAIChatCompletionChunkObject]:
24+
) -> AsyncIterator[ChatCompletionChunk]:
2725
if id == "":
2826
id = str(uuid.uuid4())
27+
28+
is_function_call_prev = is_function_call = False
29+
role = "assistant"
2930
async for event in astream_event:
3031
custom_event = event_adapter(event)
3132
event_to_process = custom_event if custom_event is not None else event
3233
kind = event_to_process["event"]
3334
if kind == "on_chat_model_stream" or custom_event is not None:
34-
yield to_openai_chat_completion_chunk_object(
35+
chat_completion_chunk = to_openai_chat_completion_chunk_object(
3536
event=event_to_process,
3637
id=id,
3738
model=self.llm_model,
3839
system_fingerprint=self.system_fingerprint,
40+
role=role,
3941
)
42+
role = None
43+
yield chat_completion_chunk
44+
is_function_call = is_function_call or any(choice.delta.function_call for choice in chat_completion_chunk.choices)
45+
elif kind == "on_chat_model_end":
46+
is_function_call_prev, is_function_call = is_function_call, False
4047

4148
stop_chunk = create_final_chat_completion_chunk_object(
42-
id=id, model=self.llm_model
49+
id=id, model=self.llm_model, finish_reason="tool_calls" if is_function_call_prev else "stop"
4350
)
4451
yield stop_chunk

langchain_openai_api_bridge/core/create_agent_dto.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from typing import Optional
22
from pydantic import BaseModel
3+
from openai.types.chat import ChatCompletionToolChoiceOptionParam, ChatCompletionToolParam
34

45

56
class CreateAgentDto(BaseModel):
@@ -9,3 +10,5 @@ class CreateAgentDto(BaseModel):
910
max_tokens: Optional[int] = None
1011
assistant_id: Optional[str] = ""
1112
thread_id: Optional[str] = ""
13+
tools: list[ChatCompletionToolParam] = []
14+
tool_choice: ChatCompletionToolChoiceOptionParam = "none"
Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,7 @@
1-
from .message import OpenAIChatMessage
21
from .chat_completion import (
32
OpenAIChatCompletionRequest,
4-
OpenAIChatCompletionUsage,
5-
OpenAIChatCompletionChoice,
6-
OpenAIChatCompletionObject,
7-
OpenAIChatCompletionChunkChoice,
8-
OpenAIChatCompletionChunkObject,
93
)
104

115
__all__ = [
12-
"OpenAIChatMessage",
136
"OpenAIChatCompletionRequest",
14-
"OpenAIChatCompletionUsage",
15-
"OpenAIChatCompletionChoice",
16-
"OpenAIChatCompletionObject",
17-
"OpenAIChatCompletionChunkChoice",
18-
"OpenAIChatCompletionChunkObject",
197
]

0 commit comments

Comments
 (0)