Skip to content

Commit 1cfb88b

Browse files
committed
test: Add chat completion function call functional test
1 parent d104dd0 commit 1cfb88b

File tree

2 files changed

+192
-0
lines changed

2 files changed

+192
-0
lines changed
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
from fastapi import FastAPI
2+
from fastapi.middleware.cors import CORSMiddleware
3+
from dotenv import load_dotenv, find_dotenv
4+
import uvicorn
5+
6+
from langchain_openai_api_bridge.core.create_agent_dto import CreateAgentDto
7+
from langchain_openai_api_bridge.fastapi.langchain_openai_api_bridge_fastapi import (
8+
LangchainOpenaiApiBridgeFastAPI,
9+
)
10+
from langchain_openai import ChatOpenAI
11+
12+
_ = load_dotenv(find_dotenv())
13+
14+
15+
app = FastAPI(
16+
title="Langchain Agent OpenAI API Bridge",
17+
version="1.0",
18+
description="OpenAI API exposing langchain agent",
19+
)
20+
21+
app.add_middleware(
22+
CORSMiddleware,
23+
allow_origins=["*"],
24+
allow_credentials=True,
25+
allow_methods=["*"],
26+
allow_headers=["*"],
27+
expose_headers=["*"],
28+
)
29+
30+
31+
def create_agent(dto: CreateAgentDto):
32+
llm = ChatOpenAI(
33+
temperature=dto.temperature or 0.7,
34+
model=dto.model,
35+
max_tokens=dto.max_tokens,
36+
api_key=dto.api_key,
37+
)
38+
return llm.bind_tools(dto.tools)
39+
40+
41+
bridge = LangchainOpenaiApiBridgeFastAPI(app=app, agent_factory_provider=create_agent)
42+
bridge.bind_openai_chat_completion(prefix="/my-custom-path")
43+
44+
if __name__ == "__main__":
45+
uvicorn.run(app, host="localhost")
Lines changed: 147 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,147 @@
1+
import json
2+
import pytest
3+
from openai import OpenAI
4+
from openai.lib.streaming.chat import ChatCompletionStreamState
5+
from fastapi.testclient import TestClient
6+
from server_openai_function_call import app
7+
8+
9+
test_api = TestClient(app)
10+
11+
12+
@pytest.fixture
13+
def openai_client():
14+
return OpenAI(
15+
base_url="http://testserver/my-custom-path/openai/v1",
16+
http_client=test_api,
17+
)
18+
19+
20+
def test_chat_completion_function_call_weather(openai_client: OpenAI):
21+
tools = [{
22+
"type": "function",
23+
"function": {
24+
"name": "get_weather",
25+
"description": "Get current temperature for a given location.",
26+
"parameters": {
27+
"type": "object",
28+
"properties": {
29+
"location": {
30+
"type": "string",
31+
"description": "City and country e.g. Bogotá, Colombia"
32+
}
33+
},
34+
"required": [
35+
"location"
36+
],
37+
"additionalProperties": False
38+
},
39+
"strict": True
40+
}
41+
}]
42+
43+
chat_completion = openai_client.chat.completions.create(
44+
model="gpt-4o-mini",
45+
messages=[
46+
{
47+
"role": "user",
48+
"content": 'What is the weather like in London today?',
49+
}
50+
],
51+
tools=tools,
52+
tool_choice={"type": "function", "function": {"name": "get_weather"}},
53+
)
54+
55+
assert chat_completion.choices[0].finish_reason == "tool_calls"
56+
assert chat_completion.choices[0].message.tool_calls[0].function.name == "get_weather"
57+
58+
args = json.loads(chat_completion.choices[0].message.tool_calls[0].function.arguments)
59+
assert "london" in args["location"].lower()
60+
61+
62+
def test_chat_completion_function_call_weather_stream(openai_client: OpenAI):
63+
tools = [{
64+
"type": "function",
65+
"function": {
66+
"name": "get_weather",
67+
"description": "Get current temperature for a given location.",
68+
"parameters": {
69+
"type": "object",
70+
"properties": {
71+
"location": {
72+
"type": "string",
73+
"description": "City and country e.g. Bogotá, Colombia"
74+
}
75+
},
76+
"required": [
77+
"location"
78+
],
79+
"additionalProperties": False
80+
},
81+
"strict": True
82+
}
83+
}]
84+
85+
chunks = openai_client.chat.completions.create(
86+
model="gpt-4o-mini",
87+
messages=[
88+
{
89+
"role": "user",
90+
"content": 'What is the weather like in London today?',
91+
}
92+
],
93+
tools=tools,
94+
tool_choice={"type": "function", "function": {"name": "get_weather"}},
95+
stream=True,
96+
)
97+
98+
state = ChatCompletionStreamState()
99+
for chunk in chunks:
100+
state.handle_chunk(chunk)
101+
102+
chat_completion = state.get_final_completion()
103+
104+
assert chat_completion.choices[0].finish_reason == "tool_calls"
105+
assert chat_completion.choices[0].message.function_call.name == "get_weather"
106+
assert chat_completion.choices[0].message.role == "assistant"
107+
108+
args = json.loads(chat_completion.choices[0].message.function_call.arguments)
109+
assert "london" in args["location"].lower()
110+
111+
112+
def test_chat_completion_function_call_not_called(openai_client: OpenAI):
113+
tools = [{
114+
"type": "function",
115+
"function": {
116+
"name": "get_weather",
117+
"description": "Get current temperature for a given location.",
118+
"parameters": {
119+
"type": "object",
120+
"properties": {
121+
"location": {
122+
"type": "string",
123+
"description": "City and country e.g. Bogotá, Colombia"
124+
}
125+
},
126+
"required": [
127+
"location"
128+
],
129+
"additionalProperties": False
130+
},
131+
"strict": True
132+
}
133+
}]
134+
135+
chat_completion = openai_client.chat.completions.create(
136+
model="gpt-4o-mini",
137+
messages=[
138+
{
139+
"role": "user",
140+
"content": 'Hello!',
141+
}
142+
],
143+
tools=tools,
144+
tool_choice="none",
145+
)
146+
147+
assert chat_completion.choices[0].finish_reason == "stop"

0 commit comments

Comments
 (0)