|
| 1 | +# Adapted from |
| 2 | +# https://github.com/openai/openai-python/blob/a52463c93215a09f9a142e25c975935523d15c10/src/openai/resources/chat/completions/completions.py#L238 |
| 3 | + |
| 4 | +def create( |
| 5 | + messages: Iterable[ChatCompletionMessageParam], |
| 6 | + model: Union[str, ChatModel], |
| 7 | + audio: Optional[ChatCompletionAudioParam] | NotGiven = NOT_GIVEN, |
| 8 | + frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, |
| 9 | + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, |
| 10 | + functions: Iterable[completion_create_params.Function] | NotGiven = NOT_GIVEN, |
| 11 | + logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, |
| 12 | + logprobs: Optional[bool] | NotGiven = NOT_GIVEN, |
| 13 | + max_completion_tokens: Optional[int] | NotGiven = NOT_GIVEN, |
| 14 | + max_tokens: Optional[int] | NotGiven = NOT_GIVEN, |
| 15 | + metadata: Optional[Metadata] | NotGiven = NOT_GIVEN, |
| 16 | + modalities: Optional[List[Literal['text', 'audio']]] | NotGiven = NOT_GIVEN, |
| 17 | + n: Optional[int] | NotGiven = NOT_GIVEN, |
| 18 | + parallel_tool_calls: bool | NotGiven = NOT_GIVEN, |
| 19 | + prediction: Optional[ChatCompletionPredictionContentParam] | NotGiven = NOT_GIVEN, |
| 20 | + presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, |
| 21 | + prompt_cache_key: str | NotGiven = NOT_GIVEN, |
| 22 | + reasoning_effort: Optional[ReasoningEffort] | NotGiven = NOT_GIVEN, |
| 23 | + response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN, |
| 24 | + safety_identifier: str | NotGiven = NOT_GIVEN, |
| 25 | + seed: Optional[int] | NotGiven = NOT_GIVEN, |
| 26 | + service_tier: Optional[Literal['auto', 'default', 'flex', 'scale', 'priority']] | NotGiven = NOT_GIVEN, |
| 27 | + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, |
| 28 | + store: Optional[bool] | NotGiven = NOT_GIVEN, |
| 29 | + stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, |
| 30 | + stream_options: Optional[ChatCompletionStreamOptionsParam] | NotGiven = NOT_GIVEN, |
| 31 | + temperature: Optional[float] | NotGiven = NOT_GIVEN, |
| 32 | + tool_choice: ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN, |
| 33 | + tools: Iterable[ChatCompletionToolUnionParam] | NotGiven = NOT_GIVEN, |
| 34 | + top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, |
| 35 | + top_p: Optional[float] | NotGiven = NOT_GIVEN, |
| 36 | + user: str | NotGiven = NOT_GIVEN, |
| 37 | + verbosity: Optional[Literal['low', 'medium', 'high']] | NotGiven = NOT_GIVEN, |
| 38 | + web_search_options: completion_create_params.WebSearchOptions | NotGiven = NOT_GIVEN, |
| 39 | + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. |
| 40 | + # The extra values given here take precedence over values defined on the client or passed to this method. |
| 41 | + extra_headers: Headers | None = None, |
| 42 | + extra_query: Query | None = None, |
| 43 | + extra_body: Body | None = None, |
| 44 | + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, |
| 45 | +) -> ChatCompletion: |
| 46 | + """ |
| 47 | + Create a model response for the given chat conversation. |
| 48 | +
|
| 49 | + The docstring of the 3rd input arg, `audio`, has a comment "# noqa: LN002". |
| 50 | + For version 0.7.2 or older, this would trigger a false positive DOC105, |
| 51 | + because there would be SyntaxError wen parsing the whole type hint |
| 52 | + (Optional[ChatCompletionAudioParam] | NotGiven, default=NOT_GIVEN # noqa: LN002) |
| 53 | +
|
| 54 | + In version 0.7.3 or later, this bug is fixed. |
| 55 | +
|
| 56 | + Parameters |
| 57 | + ---------- |
| 58 | + messages : Iterable[ChatCompletionMessageParam] |
| 59 | + Placeholder text |
| 60 | + model : Union[str, ChatModel] |
| 61 | + Placeholder text |
| 62 | + audio : Optional[ChatCompletionAudioParam] | NotGiven, default=NOT_GIVEN # noqa: LN002 |
| 63 | + Placeholder text |
| 64 | + frequency_penalty : Optional[float] | NotGiven, default=NOT_GIVEN |
| 65 | + Placeholder text |
| 66 | + function_call : completion_create_params.FunctionCall | NotGiven, default=NOT_GIVEN |
| 67 | + Placeholder text |
| 68 | + functions : Iterable[completion_create_params.Function] | NotGiven, default=NOT_GIVEN |
| 69 | + Placeholder text |
| 70 | + logit_bias : Optional[Dict[str, int]] | NotGiven, default=NOT_GIVEN |
| 71 | + Placeholder text |
| 72 | + logprobs : Optional[bool] | NotGiven, default=NOT_GIVEN |
| 73 | + Placeholder text |
| 74 | + max_completion_tokens : Optional[int] | NotGiven, default=NOT_GIVEN |
| 75 | + Placeholder text |
| 76 | + max_tokens : Optional[int] | NotGiven, default=NOT_GIVEN |
| 77 | + Placeholder text |
| 78 | + metadata : Optional[Metadata] | NotGiven, default=NOT_GIVEN |
| 79 | + Placeholder text |
| 80 | + modalities : Optional[List[Literal["text", "audio"]]] | NotGiven, default=NOT_GIVEN |
| 81 | + Placeholder text |
| 82 | + n : Optional[int] | NotGiven, default=NOT_GIVEN |
| 83 | + Placeholder text |
| 84 | + parallel_tool_calls : bool | NotGiven, default=NOT_GIVEN |
| 85 | + Placeholder text |
| 86 | + prediction : Optional[ChatCompletionPredictionContentParam] | NotGiven, default=NOT_GIVEN |
| 87 | + Placeholder text |
| 88 | + presence_penalty : Optional[float] | NotGiven, default=NOT_GIVEN |
| 89 | + Placeholder text |
| 90 | + prompt_cache_key : str | NotGiven, default=NOT_GIVEN |
| 91 | + Placeholder text |
| 92 | + reasoning_effort : Optional[ReasoningEffort] | NotGiven, default=NOT_GIVEN |
| 93 | + Placeholder text |
| 94 | + response_format : completion_create_params.ResponseFormat | NotGiven, default=NOT_GIVEN |
| 95 | + Placeholder text |
| 96 | + safety_identifier : str | NotGiven, default=NOT_GIVEN |
| 97 | + Placeholder text |
| 98 | + seed : Optional[int] | NotGiven, default=NOT_GIVEN |
| 99 | + Placeholder text |
| 100 | + service_tier : Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven, default=NOT_GIVEN |
| 101 | + Placeholder text |
| 102 | + stop : Union[Optional[str], SequenceNotStr[str], None] | NotGiven, default=NOT_GIVEN |
| 103 | + Placeholder text |
| 104 | + store : Optional[bool] | NotGiven, default=NOT_GIVEN |
| 105 | + Placeholder text |
| 106 | + stream : Optional[Literal[False]] | NotGiven, default=NOT_GIVEN |
| 107 | + Placeholder text |
| 108 | + stream_options : Optional[ChatCompletionStreamOptionsParam] | NotGiven, default=NOT_GIVEN |
| 109 | + Placeholder text |
| 110 | + temperature : Optional[float] | NotGiven, default=NOT_GIVEN |
| 111 | + Placeholder text |
| 112 | + tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven, default=NOT_GIVEN |
| 113 | + Placeholder text |
| 114 | + tools : Iterable[ChatCompletionToolUnionParam] | NotGiven, default=NOT_GIVEN |
| 115 | + Placeholder text |
| 116 | + top_logprobs : Optional[int] | NotGiven, default=NOT_GIVEN |
| 117 | + Placeholder text |
| 118 | + top_p : Optional[float] | NotGiven, default=NOT_GIVEN |
| 119 | + Placeholder text |
| 120 | + user : str | NotGiven, default=NOT_GIVEN |
| 121 | + Placeholder text |
| 122 | + verbosity : Optional[Literal["low", "medium", "high"]] | NotGiven, default=NOT_GIVEN |
| 123 | + Placeholder text |
| 124 | + web_search_options : completion_create_params.WebSearchOptions | NotGiven, default=NOT_GIVEN |
| 125 | + Placeholder text |
| 126 | + extra_headers : Headers | None, default=None |
| 127 | + Placeholder text |
| 128 | + extra_query : Query | None, default=None |
| 129 | + Placeholder text |
| 130 | + extra_body : Body | None, default=None |
| 131 | + Placeholder text |
| 132 | + timeout : float | httpx.Timeout | None | NotGiven, default=NOT_GIVEN |
| 133 | + Placeholder text |
| 134 | +
|
| 135 | + Returns |
| 136 | + ------- |
| 137 | + ChatCompletion |
| 138 | + The chat completion response |
| 139 | + """ |
| 140 | + pass |
0 commit comments