Skip to content

Commit d05946b

Browse files
authored
Merge pull request #469 from tisnik/lcore-466-llama-stack-version-in-info-endpoint
LCORE-466: Llama Stack version in /info endpoint
2 parents 8bd3479 + 08a4934 commit d05946b

File tree

6 files changed

+158
-21
lines changed

6 files changed

+158
-21
lines changed

docs/openapi.json

Lines changed: 26 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,15 @@
6363
}
6464
},
6565
"name": "Service name",
66-
"version": "Service version"
66+
"service_version": "Service version",
67+
"llama_stack_version": "Llama Stack version"
68+
},
69+
"500": {
70+
"description": "Internal Server Error",
71+
"detail": {
72+
"response": "Unable to connect to Llama Stack",
73+
"cause": "Connection error."
74+
}
6775
}
6876
}
6977
}
@@ -1606,28 +1614,40 @@
16061614
"Lightspeed Stack"
16071615
]
16081616
},
1609-
"version": {
1617+
"service_version": {
16101618
"type": "string",
1611-
"title": "Version",
1619+
"title": "Service Version",
16121620
"description": "Service version",
16131621
"examples": [
16141622
"0.1.0",
16151623
"0.2.0",
16161624
"1.0.0"
16171625
]
1626+
},
1627+
"llama_stack_version": {
1628+
"type": "string",
1629+
"title": "Llama Stack Version",
1630+
"description": "Llama Stack version",
1631+
"examples": [
1632+
"0.2.1",
1633+
"0.2.2",
1634+
"0.2.18"
1635+
]
16181636
}
16191637
},
16201638
"type": "object",
16211639
"required": [
16221640
"name",
1623-
"version"
1641+
"service_version",
1642+
"llama_stack_version"
16241643
],
16251644
"title": "InfoResponse",
1626-
"description": "Model representing a response to an info request.\n\nAttributes:\n name: Service name.\n version: Service version.\n\nExample:\n ```python\n info_response = InfoResponse(\n name=\"Lightspeed Stack\",\n version=\"1.0.0\",\n )\n ```",
1645+
"description": "Model representing a response to an info request.\n\nAttributes:\n name: Service name.\n service_version: Service version.\n llama_stack_version: Llama Stack version.\n\nExample:\n ```python\n info_response = InfoResponse(\n name=\"Lightspeed Stack\",\n service_version=\"1.0.0\",\n llama_stack_version=\"0.2.18\",\n )\n ```",
16271646
"examples": [
16281647
{
1648+
"llama_stack_version": "1.0.0",
16291649
"name": "Lightspeed Stack",
1630-
"version": "1.0.0"
1650+
"service_version": "1.0.0"
16311651
}
16321652
]
16331653
},

docs/openapi.md

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ Returns:
4848
| Status Code | Description | Component |
4949
|-------------|-------------|-----------|
5050
| 200 | Successful Response | [InfoResponse](#inforesponse) |
51+
| 500 | Internal Server Error | |
5152
## GET `/v1/models`
5253

5354
> **Models Endpoint Handler**
@@ -882,21 +883,24 @@ Model representing a response to an info request.
882883

883884
Attributes:
884885
name: Service name.
885-
version: Service version.
886+
service_version: Service version.
887+
llama_stack_version: Llama Stack version.
886888

887889
Example:
888890
```python
889891
info_response = InfoResponse(
890892
name="Lightspeed Stack",
891-
version="1.0.0",
893+
service_version="1.0.0",
894+
llama_stack_version="0.2.18",
892895
)
893896
```
894897

895898

896899
| Field | Type | Description |
897900
|-------|------|-------------|
898901
| name | string | Service name |
899-
| version | string | Service version |
902+
| service_version | string | Service version |
903+
| llama_stack_version | string | Llama Stack version |
900904

901905

902906
## JsonPathOperator

docs/output.md

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ Returns:
4848
| Status Code | Description | Component |
4949
|-------------|-------------|-----------|
5050
| 200 | Successful Response | [InfoResponse](#inforesponse) |
51+
| 500 | Internal Server Error | |
5152
## GET `/v1/models`
5253

5354
> **Models Endpoint Handler**
@@ -872,21 +873,24 @@ Model representing a response to an info request.
872873

873874
Attributes:
874875
name: Service name.
875-
version: Service version.
876+
service_version: Service version.
877+
llama_stack_version: Llama Stack version.
876878

877879
Example:
878880
```python
879881
info_response = InfoResponse(
880882
name="Lightspeed Stack",
881-
version="1.0.0",
883+
service_version="1.0.0",
884+
llama_stack_version="0.2.18",
882885
)
883886
```
884887

885888

886889
| Field | Type | Description |
887890
|-------|------|-------------|
888891
| name | string | Service name |
889-
| version | string | Service version |
892+
| service_version | string | Service version |
893+
| llama_stack_version | string | Llama Stack version |
890894

891895

892896
## JsonPathOperator

src/app/endpoints/info.py

Lines changed: 32 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,15 @@
33
import logging
44
from typing import Annotated, Any
55

6-
from fastapi import APIRouter, Request
6+
from fastapi import APIRouter, HTTPException, Request, status
77
from fastapi import Depends
8+
from llama_stack_client import APIConnectionError
89

910
from auth.interface import AuthTuple
1011
from auth import get_auth_dependency
1112
from authorization.middleware import authorize
1213
from configuration import configuration
14+
from client import AsyncLlamaStackClientHolder
1315
from models.config import Action
1416
from models.responses import InfoResponse
1517
from version import __version__
@@ -23,7 +25,14 @@
2325
get_info_responses: dict[int | str, dict[str, Any]] = {
2426
200: {
2527
"name": "Service name",
26-
"version": "Service version",
28+
"service_version": "Service version",
29+
"llama_stack_version": "Llama Stack version",
30+
},
31+
500: {
32+
"detail": {
33+
"response": "Unable to connect to Llama Stack",
34+
"cause": "Connection error.",
35+
}
2736
},
2837
}
2938

@@ -49,4 +58,24 @@ async def info_endpoint_handler(
4958
# Nothing interesting in the request
5059
_ = request
5160

52-
return InfoResponse(name=configuration.configuration.name, version=__version__)
61+
try:
62+
# try to get Llama Stack client
63+
client = AsyncLlamaStackClientHolder().get_client()
64+
# retrieve version
65+
llama_stack_version_object = await client.inspect.version()
66+
llama_stack_version = llama_stack_version_object.version
67+
return InfoResponse(
68+
name=configuration.configuration.name,
69+
service_version=__version__,
70+
llama_stack_version=llama_stack_version,
71+
)
72+
# connection to Llama Stack server
73+
except APIConnectionError as e:
74+
logger.error("Unable to connect to Llama Stack: %s", e)
75+
raise HTTPException(
76+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
77+
detail={
78+
"response": "Unable to connect to Llama Stack",
79+
"cause": str(e),
80+
},
81+
) from e

src/models/responses.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -84,13 +84,15 @@ class InfoResponse(BaseModel):
8484
8585
Attributes:
8686
name: Service name.
87-
version: Service version.
87+
service_version: Service version.
88+
llama_stack_version: Llama Stack version.
8889
8990
Example:
9091
```python
9192
info_response = InfoResponse(
9293
name="Lightspeed Stack",
93-
version="1.0.0",
94+
service_version="1.0.0",
95+
llama_stack_version="0.2.18",
9496
)
9597
```
9698
"""
@@ -100,18 +102,24 @@ class InfoResponse(BaseModel):
100102
examples=["Lightspeed Stack"],
101103
)
102104

103-
version: str = Field(
105+
service_version: str = Field(
104106
description="Service version",
105107
examples=["0.1.0", "0.2.0", "1.0.0"],
106108
)
107109

110+
llama_stack_version: str = Field(
111+
description="Llama Stack version",
112+
examples=["0.2.1", "0.2.2", "0.2.18"],
113+
)
114+
108115
# provides examples for /docs endpoint
109116
model_config = {
110117
"json_schema_extra": {
111118
"examples": [
112119
{
113120
"name": "Lightspeed Stack",
114-
"version": "1.0.0",
121+
"service_version": "1.0.0",
122+
"llama_stack_version": "1.0.0",
115123
}
116124
]
117125
}

tests/unit/app/endpoints/test_info.py

Lines changed: 74 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
11
"""Unit tests for the /info REST API endpoint."""
22

33
import pytest
4-
from fastapi import Request
4+
from fastapi import Request, HTTPException, status
5+
6+
from llama_stack_client import APIConnectionError
7+
from llama_stack_client.types import VersionInfo
58

69
from app.endpoints.info import info_endpoint_handler
710
from configuration import AppConfig
@@ -11,6 +14,9 @@
1114
@pytest.mark.asyncio
1215
async def test_info_endpoint(mocker):
1316
"""Test the info endpoint handler."""
17+
mock_authorization_resolvers(mocker)
18+
19+
# configuration for tests
1420
config_dict = {
1521
"name": "foo",
1622
"service": {
@@ -36,6 +42,14 @@ async def test_info_endpoint(mocker):
3642
cfg = AppConfig()
3743
cfg.init_from_dict(config_dict)
3844

45+
# Mock the LlamaStack client
46+
mock_client = mocker.AsyncMock()
47+
mock_client.inspect.version.return_value = VersionInfo(version="0.1.2")
48+
mock_lsc = mocker.patch("client.AsyncLlamaStackClientHolder.get_client")
49+
mock_lsc.return_value = mock_client
50+
mock_config = mocker.Mock()
51+
mocker.patch("app.endpoints.models.configuration", mock_config)
52+
3953
# Mock configuration
4054
mocker.patch("configuration.configuration", cfg)
4155

@@ -50,4 +64,62 @@ async def test_info_endpoint(mocker):
5064
response = await info_endpoint_handler(auth=auth, request=request)
5165
assert response is not None
5266
assert response.name is not None
53-
assert response.version is not None
67+
assert response.service_version is not None
68+
assert response.llama_stack_version == "0.1.2"
69+
70+
71+
@pytest.mark.asyncio
72+
async def test_info_endpoint_connection_error(mocker):
73+
"""Test the info endpoint handler."""
74+
mock_authorization_resolvers(mocker)
75+
76+
# configuration for tests
77+
config_dict = {
78+
"name": "foo",
79+
"service": {
80+
"host": "localhost",
81+
"port": 8080,
82+
"auth_enabled": False,
83+
"workers": 1,
84+
"color_log": True,
85+
"access_log": True,
86+
},
87+
"llama_stack": {
88+
"api_key": "xyzzy",
89+
"url": "http://x.y.com:1234",
90+
"use_as_library_client": False,
91+
},
92+
"user_data_collection": {
93+
"feedback_enabled": False,
94+
},
95+
"customization": None,
96+
"authorization": {"access_rules": []},
97+
"authentication": {"module": "noop"},
98+
}
99+
cfg = AppConfig()
100+
cfg.init_from_dict(config_dict)
101+
102+
# Mock the LlamaStack client
103+
mock_client = mocker.AsyncMock()
104+
mock_client.inspect.version.side_effect = APIConnectionError(request=None)
105+
mock_lsc = mocker.patch("client.AsyncLlamaStackClientHolder.get_client")
106+
mock_lsc.return_value = mock_client
107+
mock_config = mocker.Mock()
108+
mocker.patch("app.endpoints.models.configuration", mock_config)
109+
110+
# Mock configuration
111+
mocker.patch("configuration.configuration", cfg)
112+
113+
mock_authorization_resolvers(mocker)
114+
115+
request = Request(
116+
scope={
117+
"type": "http",
118+
}
119+
)
120+
auth = ("test_user", "token", {})
121+
122+
with pytest.raises(HTTPException) as e:
123+
await info_endpoint_handler(auth=auth, request=request)
124+
assert e.value.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR
125+
assert e.detail["response"] == "Unable to connect to Llama Stack"

0 commit comments

Comments
 (0)