@@ -106,10 +106,145 @@ async def aconnect_httpx_sse(
106106
107107
108108class ChatBaichuan (BaseChatModel ):
109- """Baichuan chat models API by Baichuan Intelligent Technology.
109+ """Baichuan chat model integration.
110+
111+ Setup:
112+ To use, you should have the environment variable``BAICHUAN_API_KEY`` set with
113+ your API KEY.
114+
115+ .. code-block:: bash
116+
117+ export BAICHUAN_API_KEY="your-api-key"
118+
119+ Key init args — completion params:
120+ model: Optional[str]
121+ Name of Baichuan model to use.
122+ max_tokens: Optional[int]
123+ Max number of tokens to generate.
124+ streaming: Optional[bool]
125+ Whether to stream the results or not.
126+ temperature: Optional[float]
127+ Sampling temperature.
128+ top_p: Optional[float]
129+ What probability mass to use.
130+ top_k: Optional[int]
131+ What search sampling control to use.
132+
133+ Key init args — client params:
134+ api_key: Optional[str]
135+ MiniMax API key. If not passed in will be read from env var BAICHUAN_API_KEY.
136+ base_url: Optional[str]
137+ Base URL for API requests.
138+
139+ See full list of supported init args and their descriptions in the params section.
140+
141+ Instantiate:
142+ .. code-block:: python
143+
144+ from langchain_community.chat_models import ChatBaichuan
145+
146+ chat = ChatBaichuan(
147+ api_key=api_key,
148+ model='Baichuan4',
149+ # temperature=...,
150+ # other params...
151+ )
110152
111- For more information, see https://platform.baichuan-ai.com/docs/api
112- """
153+ Invoke:
154+ .. code-block:: python
155+
156+ messages = [
157+ ("system", "你是一名专业的翻译家,可以将用户的中文翻译为英文。"),
158+ ("human", "我喜欢编程。"),
159+ ]
160+ chat.invoke(messages)
161+
162+ .. code-block:: python
163+
164+ AIMessage(
165+ content='I enjoy programming.',
166+ response_metadata={
167+ 'token_usage': {
168+ 'prompt_tokens': 93,
169+ 'completion_tokens': 5,
170+ 'total_tokens': 98
171+ },
172+ 'model': 'Baichuan4'
173+ },
174+ id='run-944ff552-6a93-44cf-a861-4e4d849746f9-0'
175+ )
176+
177+ Stream:
178+ .. code-block:: python
179+
180+ for chunk in chat.stream(messages):
181+ print(chunk)
182+
183+ .. code-block:: python
184+
185+ content='I' id='run-f99fcd6f-dd31-46d5-be8f-0b6a22bf77d8'
186+ content=' enjoy programming.' id='run-f99fcd6f-dd31-46d5-be8f-0b6a22bf77d8
187+
188+ .. code-block:: python
189+
190+ stream = chat.stream(messages)
191+ full = next(stream)
192+ for chunk in stream:
193+ full += chunk
194+ full
195+
196+ .. code-block:: python
197+
198+ AIMessageChunk(
199+ content='I like programming.',
200+ id='run-74689970-dc31-461d-b729-3b6aa93508d2'
201+ )
202+
203+ Async:
204+ .. code-block:: python
205+
206+ await chat.ainvoke(messages)
207+
208+ # stream
209+ # async for chunk in chat.astream(messages):
210+ # print(chunk)
211+
212+ # batch
213+ # await chat.abatch([messages])
214+
215+ .. code-block:: python
216+
217+ AIMessage(
218+ content='I enjoy programming.',
219+ response_metadata={
220+ 'token_usage': {
221+ 'prompt_tokens': 93,
222+ 'completion_tokens': 5,
223+ 'total_tokens': 98
224+ },
225+ 'model': 'Baichuan4'
226+ },
227+ id='run-952509ed-9154-4ff9-b187-e616d7ddfbba-0'
228+ )
229+
230+ Response metadata
231+ .. code-block:: python
232+
233+ ai_msg = chat.invoke(messages)
234+ ai_msg.response_metadata
235+
236+ .. code-block:: python
237+
238+ {
239+ 'token_usage': {
240+ 'prompt_tokens': 93,
241+ 'completion_tokens': 5,
242+ 'total_tokens': 98
243+ },
244+ 'model': 'Baichuan4'
245+ }
246+
247+ """ # noqa: E501
113248
114249 @property
115250 def lc_secrets (self ) -> Dict [str , str ]:
0 commit comments