Skip to content

Commit 60c0b87

Browse files
committed
add llm__achat to all (but one) supported providers in llm__chat
1 parent 842d99f commit 60c0b87

File tree

28 files changed

+1649
-2
lines changed

28 files changed

+1649
-2
lines changed

edenai_apis/apis/amazon/amazon_llm_api.py

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -88,3 +88,84 @@ def llm__chat(
8888
**kwargs,
8989
)
9090
return response
91+
92+
async def llm__achat(
93+
self,
94+
messages: List = [],
95+
model: Optional[str] = None,
96+
# Optional OpenAI params: see https://platform.openai.com/docs/api-reference/chat/create
97+
timeout: Optional[Union[float, str, httpx.Timeout]] = None,
98+
temperature: Optional[float] = None,
99+
top_p: Optional[float] = None,
100+
n: Optional[int] = None,
101+
stream: Optional[bool] = None,
102+
stream_options: Optional[dict] = None,
103+
stop: Optional[str] = None,
104+
stop_sequences: Optional[any] = None,
105+
max_tokens: Optional[int] = None,
106+
presence_penalty: Optional[float] = None,
107+
frequency_penalty: Optional[float] = None,
108+
logit_bias: Optional[dict] = None,
109+
modalities: Optional[List[Literal["text", "audio", "image"]]] = None,
110+
audio: Optional[Dict] = None,
111+
# openai v1.0+ new params
112+
response_format: Optional[
113+
Union[dict, Type[BaseModel]]
114+
] = None, # Structured outputs
115+
seed: Optional[int] = None,
116+
tools: Optional[List] = None,
117+
tool_choice: Optional[Union[str, dict]] = None,
118+
logprobs: Optional[bool] = None,
119+
top_logprobs: Optional[int] = None,
120+
parallel_tool_calls: Optional[bool] = None,
121+
deployment_id=None,
122+
extra_headers: Optional[dict] = None,
123+
# soon to be deprecated params by OpenAI -> This should be replaced by tools
124+
functions: Optional[List] = None,
125+
function_call: Optional[str] = None,
126+
base_url: Optional[str] = None,
127+
api_version: Optional[str] = None,
128+
api_key: Optional[str] = None,
129+
model_list: Optional[list] = None, # pass in a list of api_base,keys, etc.
130+
drop_invalid_params: bool = True, # If true, all the invalid parameters will be ignored (dropped) before sending to the model
131+
user: str | None = None,
132+
# Optional parameters
133+
**kwargs,
134+
) -> ChatDataClass:
135+
response = await self.llm_client.acompletion(
136+
messages=messages,
137+
model=model,
138+
timeout=timeout,
139+
temperature=temperature,
140+
top_p=top_p,
141+
n=n,
142+
stream=stream,
143+
stream_options=stream_options,
144+
stop=stop,
145+
stop_sequences=stop_sequences,
146+
max_tokens=max_tokens,
147+
presence_penalty=presence_penalty,
148+
frequency_penalty=frequency_penalty,
149+
logit_bias=logit_bias,
150+
response_format=response_format,
151+
seed=seed,
152+
tools=tools,
153+
tool_choice=tool_choice,
154+
logprobs=logprobs,
155+
top_logprobs=top_logprobs,
156+
parallel_tool_calls=parallel_tool_calls,
157+
deployment_id=deployment_id,
158+
extra_headers=extra_headers,
159+
functions=functions,
160+
function_call=function_call,
161+
base_url=base_url,
162+
api_version=api_version,
163+
api_key=api_key,
164+
model_list=model_list,
165+
drop_invalid_params=drop_invalid_params,
166+
user=user,
167+
modalities=modalities,
168+
audio=audio,
169+
**kwargs,
170+
)
171+
return response
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
{
2+
"id": "chatcmpl-B71qqu4Y7m1ZVuF5YGqxz7LXzgf0Y",
3+
"created": 1741015112,
4+
"model": "model",
5+
"object": "chat.completion",
6+
"system_fingerprint": "fp_7fcd609668",
7+
"choices": [
8+
{
9+
"finish_reason": "stop",
10+
"index": 0,
11+
"message": {
12+
"content": "Arrr, matey! What ye be seein in this here image is a grand pathway, made of wooden planks, weavin' its way through a lush and green landscape. The verdant grass sways in the gentle breeze, and the sky above be a brilliant blue, decorated with fluffy white clouds. Ye can spot trees and bushes on either side, makin' it a perfect setting for a stroll amongst nature. A peaceful place for a pirate at heart, aye!",
13+
"role": "assistant",
14+
"tool_calls": null,
15+
"function_call": null
16+
}
17+
}
18+
],
19+
"provider_time": 3692885792,
20+
"edenai_time": null,
21+
"usage": {
22+
"completion_tokens": 99,
23+
"prompt_tokens": 1170,
24+
"total_tokens": 1269,
25+
"completion_tokens_details": {
26+
"accepted_prediction_tokens": 0,
27+
"audio_tokens": 0,
28+
"reasoning_tokens": 0,
29+
"rejected_prediction_tokens": 0,
30+
"text_tokens": 99
31+
},
32+
"prompt_tokens_details": {
33+
"audio_tokens": 0,
34+
"cached_tokens": 1024,
35+
"text_tokens": null,
36+
"image_tokens": null
37+
}
38+
},
39+
"service_tier": "default",
40+
"cost": 0.0002349
41+
}

edenai_apis/apis/anthropic/anthropic_api.py

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -210,3 +210,84 @@ def llm__chat(
210210
**kwargs,
211211
)
212212
return response
213+
214+
async def llm__achat(
215+
self,
216+
messages: List = [],
217+
model: Optional[str] = None,
218+
# Optional OpenAI params: see https://platform.openai.com/docs/api-reference/chat/create
219+
timeout: Optional[Union[float, str, httpx.Timeout]] = None,
220+
temperature: Optional[float] = None,
221+
top_p: Optional[float] = None,
222+
n: Optional[int] = None,
223+
stream: Optional[bool] = None,
224+
stream_options: Optional[dict] = None,
225+
stop: Optional[str] = None,
226+
stop_sequences: Optional[any] = None,
227+
max_tokens: Optional[int] = None,
228+
presence_penalty: Optional[float] = None,
229+
frequency_penalty: Optional[float] = None,
230+
logit_bias: Optional[dict] = None,
231+
modalities: Optional[List[Literal["text", "audio", "image"]]] = None,
232+
audio: Optional[Dict] = None,
233+
# openai v1.0+ new params
234+
response_format: Optional[
235+
Union[dict, Type[BaseModel]]
236+
] = None, # Structured outputs
237+
seed: Optional[int] = None,
238+
tools: Optional[List] = None,
239+
tool_choice: Optional[Union[str, dict]] = None,
240+
logprobs: Optional[bool] = None,
241+
top_logprobs: Optional[int] = None,
242+
parallel_tool_calls: Optional[bool] = None,
243+
deployment_id=None,
244+
extra_headers: Optional[dict] = None,
245+
# soon to be deprecated params by OpenAI -> This should be replaced by tools
246+
functions: Optional[List] = None,
247+
function_call: Optional[str] = None,
248+
base_url: Optional[str] = None,
249+
api_version: Optional[str] = None,
250+
api_key: Optional[str] = None,
251+
model_list: Optional[list] = None, # pass in a list of api_base,keys, etc.
252+
drop_invalid_params: bool = True, # If true, all the invalid parameters will be ignored (dropped) before sending to the model
253+
user: str | None = None,
254+
# Optional parameters
255+
**kwargs,
256+
) -> ChatDataClass:
257+
response = await self.llm_client.acompletion(
258+
messages=messages,
259+
model=model,
260+
timeout=timeout,
261+
temperature=temperature,
262+
top_p=top_p,
263+
n=n,
264+
stream=stream,
265+
stream_options=stream_options,
266+
stop=stop,
267+
stop_sequences=stop_sequences,
268+
max_tokens=max_tokens,
269+
presence_penalty=presence_penalty,
270+
frequency_penalty=frequency_penalty,
271+
logit_bias=logit_bias,
272+
response_format=response_format,
273+
seed=seed,
274+
tools=tools,
275+
tool_choice=tool_choice,
276+
logprobs=logprobs,
277+
top_logprobs=top_logprobs,
278+
parallel_tool_calls=parallel_tool_calls,
279+
deployment_id=deployment_id,
280+
extra_headers=extra_headers,
281+
functions=functions,
282+
function_call=function_call,
283+
base_url=base_url,
284+
api_version=api_version,
285+
api_key=api_key,
286+
model_list=model_list,
287+
drop_invalid_params=drop_invalid_params,
288+
user=user,
289+
modalities=modalities,
290+
audio=audio,
291+
**kwargs,
292+
)
293+
return response
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
{
2+
"id": "chatcmpl-B71qqu4Y7m1ZVuF5YGqxz7LXzgf0Y",
3+
"created": 1741015112,
4+
"model": "model",
5+
"object": "chat.completion",
6+
"system_fingerprint": "fp_7fcd609668",
7+
"choices": [
8+
{
9+
"finish_reason": "stop",
10+
"index": 0,
11+
"message": {
12+
"content": "Arrr, matey! What ye be seein in this here image is a grand pathway, made of wooden planks, weavin' its way through a lush and green landscape. The verdant grass sways in the gentle breeze, and the sky above be a brilliant blue, decorated with fluffy white clouds. Ye can spot trees and bushes on either side, makin' it a perfect setting for a stroll amongst nature. A peaceful place for a pirate at heart, aye!",
13+
"role": "assistant",
14+
"tool_calls": null,
15+
"function_call": null
16+
}
17+
}
18+
],
19+
"provider_time": 3692885792,
20+
"edenai_time": null,
21+
"usage": {
22+
"completion_tokens": 99,
23+
"prompt_tokens": 1170,
24+
"total_tokens": 1269,
25+
"completion_tokens_details": {
26+
"accepted_prediction_tokens": 0,
27+
"audio_tokens": 0,
28+
"reasoning_tokens": 0,
29+
"rejected_prediction_tokens": 0,
30+
"text_tokens": 99
31+
},
32+
"prompt_tokens_details": {
33+
"audio_tokens": 0,
34+
"cached_tokens": 1024,
35+
"text_tokens": null,
36+
"image_tokens": null
37+
}
38+
},
39+
"service_tier": "default",
40+
"cost": 0.0002349
41+
}

edenai_apis/apis/deepseek/deepseek_api.py

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,3 +136,84 @@ def llm__chat(
136136
**kwargs,
137137
)
138138
return response
139+
140+
async def llm__achat(
141+
self,
142+
messages: List = [],
143+
model: Optional[str] = None,
144+
# Optional OpenAI params: see https://platform.openai.com/docs/api-reference/chat/create
145+
timeout: Optional[Union[float, str, httpx.Timeout]] = None,
146+
temperature: Optional[float] = None,
147+
top_p: Optional[float] = None,
148+
n: Optional[int] = None,
149+
stream: Optional[bool] = None,
150+
stream_options: Optional[dict] = None,
151+
stop: Optional[str] = None,
152+
stop_sequences: Optional[any] = None,
153+
max_tokens: Optional[int] = None,
154+
presence_penalty: Optional[float] = None,
155+
frequency_penalty: Optional[float] = None,
156+
logit_bias: Optional[dict] = None,
157+
modalities: Optional[List[Literal["text", "audio", "image"]]] = None,
158+
audio: Optional[Dict] = None,
159+
# openai v1.0+ new params
160+
response_format: Optional[
161+
Union[dict, Type[BaseModel]]
162+
] = None, # Structured outputs
163+
seed: Optional[int] = None,
164+
tools: Optional[List] = None,
165+
tool_choice: Optional[Union[str, dict]] = None,
166+
logprobs: Optional[bool] = None,
167+
top_logprobs: Optional[int] = None,
168+
parallel_tool_calls: Optional[bool] = None,
169+
deployment_id=None,
170+
extra_headers: Optional[dict] = None,
171+
# soon to be deprecated params by OpenAI -> This should be replaced by tools
172+
functions: Optional[List] = None,
173+
function_call: Optional[str] = None,
174+
base_url: Optional[str] = None,
175+
api_version: Optional[str] = None,
176+
api_key: Optional[str] = None,
177+
model_list: Optional[list] = None, # pass in a list of api_base,keys, etc.
178+
drop_invalid_params: bool = True, # If true, all the invalid parameters will be ignored (dropped) before sending to the model
179+
user: str | None = None,
180+
# Optional parameters
181+
**kwargs,
182+
) -> ChatDataClass:
183+
response = await self.llm_client.acompletion(
184+
messages=messages,
185+
model=f"{model}",
186+
timeout=timeout,
187+
temperature=temperature,
188+
top_p=top_p,
189+
n=n,
190+
stream=stream,
191+
stream_options=stream_options,
192+
stop=stop,
193+
stop_sequences=stop_sequences,
194+
max_tokens=max_tokens,
195+
presence_penalty=presence_penalty,
196+
frequency_penalty=frequency_penalty,
197+
logit_bias=logit_bias,
198+
response_format=response_format,
199+
seed=seed,
200+
tools=tools,
201+
tool_choice=tool_choice,
202+
logprobs=logprobs,
203+
top_logprobs=top_logprobs,
204+
parallel_tool_calls=parallel_tool_calls,
205+
deployment_id=deployment_id,
206+
extra_headers=extra_headers,
207+
functions=functions,
208+
function_call=function_call,
209+
base_url=base_url,
210+
api_version=api_version,
211+
api_key=api_key,
212+
model_list=model_list,
213+
drop_invalid_params=drop_invalid_params,
214+
user=user,
215+
modalities=modalities,
216+
audio=audio,
217+
**kwargs,
218+
)
219+
return response
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
{
2+
"id": "chatcmpl-B71qqu4Y7m1ZVuF5YGqxz7LXzgf0Y",
3+
"created": 1741015112,
4+
"model": "model",
5+
"object": "chat.completion",
6+
"system_fingerprint": "fp_7fcd609668",
7+
"choices": [
8+
{
9+
"finish_reason": "stop",
10+
"index": 0,
11+
"message": {
12+
"content": "Arrr, matey! What ye be seein in this here image is a grand pathway, made of wooden planks, weavin' its way through a lush and green landscape. The verdant grass sways in the gentle breeze, and the sky above be a brilliant blue, decorated with fluffy white clouds. Ye can spot trees and bushes on either side, makin' it a perfect setting for a stroll amongst nature. A peaceful place for a pirate at heart, aye!",
13+
"role": "assistant",
14+
"tool_calls": null,
15+
"function_call": null
16+
}
17+
}
18+
],
19+
"provider_time": 3692885792,
20+
"edenai_time": null,
21+
"usage": {
22+
"completion_tokens": 99,
23+
"prompt_tokens": 1170,
24+
"total_tokens": 1269,
25+
"completion_tokens_details": {
26+
"accepted_prediction_tokens": 0,
27+
"audio_tokens": 0,
28+
"reasoning_tokens": 0,
29+
"rejected_prediction_tokens": 0,
30+
"text_tokens": 99
31+
},
32+
"prompt_tokens_details": {
33+
"audio_tokens": 0,
34+
"cached_tokens": 1024,
35+
"text_tokens": null,
36+
"image_tokens": null
37+
}
38+
},
39+
"service_tier": "default",
40+
"cost": 0.0002349
41+
}

edenai_apis/apis/google/outputs/llm/achat_output.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
{
22
"id": "chatcmpl-B71qqu4Y7m1ZVuF5YGqxz7LXzgf0Y",
33
"created": 1741015112,
4-
"model": "gpt-4o-mini-2024-07-18",
4+
"model": "model",
55
"object": "chat.completion",
66
"system_fingerprint": "fp_7fcd609668",
77
"choices": [
88
{
99
"finish_reason": "stop",
1010
"index": 0,
1111
"message": {
12-
"content": "Arrr, matey! What ye be seein in this here image is a grand pathway, made of wooden planks, weavin' its way through a lush and green landscape. The verdant grass sways in the gentle breeze, and the sky above be a brilliant blue, decorated with fluffy white clouds. Ye can spot trees and bushes on either side, makin' it a perfect setting for a stroll amongst nature. A peaceful place for a pirate at heart, aye!",
12+
"content": "Arrr, matey! What ye be seein in this here image is a grand pathway, made of wooden planks, weavin' its way through a lush and green landscape. The verdant grass sways in the gentle breeze, and the sky above be a brilliant blue, decorated with fluffy white clouds. Ye can spot trees and bushes on either side, makin' it a perfect setting for a stroll amongst nature. A peaceful place for a pirate at heart, aye!",
1313
"role": "assistant",
1414
"tool_calls": null,
1515
"function_call": null

0 commit comments

Comments
 (0)