# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) Python Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
# pylint: disable=useless-super-delegation
import datetime
from typing import Any, Dict, List, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload
from .. import _model_base
from .._model_base import rest_discriminator, rest_field
from ._enums import ChatRole
if TYPE_CHECKING:
from .. import models as _models
[docs]
class ContentItem(_model_base.Model):
"""An abstract representation of a structured content item within a chat message.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
ImageContentItem, AudioContentItem, TextContentItem
:ivar type: The discriminated object type. Required. Default value is None.
:vartype type: str
"""
__mapping__: Dict[str, _model_base.Model] = {}
type: str = rest_discriminator(name="type")
"""The discriminated object type. Required. Default value is None."""
@overload
def __init__(
self,
*,
type: str,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class AudioContentItem(ContentItem, discriminator="input_audio"):
"""A structured chat content item containing an audio content.
:ivar type: The discriminated object type: always 'input_audio' for this type. Required.
Default value is "input_audio".
:vartype type: str
:ivar input_audio: The details of the input audio. Required.
:vartype input_audio: ~azure.ai.inference.models.InputAudio
"""
type: Literal["input_audio"] = rest_discriminator(name="type") # type: ignore
"""The discriminated object type: always 'input_audio' for this type. Required. Default value is
\"input_audio\"."""
input_audio: "_models.InputAudio" = rest_field()
"""The details of the input audio. Required."""
@overload
def __init__(
self,
*,
input_audio: "_models.InputAudio",
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, type="input_audio", **kwargs)
[docs]
class ChatChoice(_model_base.Model):
"""The representation of a single prompt completion as part of an overall chat completions
request.
Generally, ``n`` choices are generated per provided prompt with a default value of 1.
Token limits and other settings may limit the number of choices generated.
:ivar index: The ordered index associated with this chat completions choice. Required.
:vartype index: int
:ivar finish_reason: The reason that this chat completions choice completed its generated.
Required. Known values are: "stop", "length", "content_filter", and "tool_calls".
:vartype finish_reason: str or ~azure.ai.inference.models.CompletionsFinishReason
:ivar message: The chat message for a given chat completions prompt. Required.
:vartype message: ~azure.ai.inference.models.ChatResponseMessage
"""
index: int = rest_field()
"""The ordered index associated with this chat completions choice. Required."""
finish_reason: Union[str, "_models.CompletionsFinishReason"] = rest_field()
"""The reason that this chat completions choice completed its generated. Required. Known values
are: \"stop\", \"length\", \"content_filter\", and \"tool_calls\"."""
message: "_models.ChatResponseMessage" = rest_field()
"""The chat message for a given chat completions prompt. Required."""
@overload
def __init__(
self,
*,
index: int,
finish_reason: Union[str, "_models.CompletionsFinishReason"],
message: "_models.ChatResponseMessage",
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
class ChatCompletions(_model_base.Model):
"""Representation of the response data from a chat completions request.
Completions support a wide variety of tasks and generate text that continues from or
"completes"
provided prompt data.
:ivar id: A unique identifier associated with this chat completions response. Required.
:vartype id: str
:ivar created: The first timestamp associated with generation activity for this completions
response,
represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. Required.
:vartype created: ~datetime.datetime
:ivar model: The model used for the chat completion. Required.
:vartype model: str
:ivar choices: The collection of completions choices associated with this completions response.
Generally, ``n`` choices are generated per provided prompt with a default value of 1.
Token limits and other settings may limit the number of choices generated. Required.
:vartype choices: list[~azure.ai.inference.models.ChatChoice]
:ivar usage: Usage information for tokens processed and generated as part of this completions
operation. Required.
:vartype usage: ~azure.ai.inference.models.CompletionsUsage
"""
id: str = rest_field()
"""A unique identifier associated with this chat completions response. Required."""
created: datetime.datetime = rest_field(format="unix-timestamp")
"""The first timestamp associated with generation activity for this completions response,
represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. Required."""
model: str = rest_field()
"""The model used for the chat completion. Required."""
choices: List["_models.ChatChoice"] = rest_field()
"""The collection of completions choices associated with this completions response.
Generally, ``n`` choices are generated per provided prompt with a default value of 1.
Token limits and other settings may limit the number of choices generated. Required."""
usage: "_models.CompletionsUsage" = rest_field()
"""Usage information for tokens processed and generated as part of this completions operation.
Required."""
@overload
def __init__(
self,
*,
id: str, # pylint: disable=redefined-builtin
created: datetime.datetime,
model: str,
choices: List["_models.ChatChoice"],
usage: "_models.CompletionsUsage",
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
class ChatCompletionsResponseFormat(_model_base.Model):
"""Represents the format that the model must output. Use this to enable JSON mode instead of the
default text mode.
Note that to enable JSON mode, some AI models may also require you to instruct the model to
produce JSON
via a system or user message.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
ChatCompletionsResponseFormatJsonObject, ChatCompletionsResponseFormatJsonSchema,
ChatCompletionsResponseFormatText
:ivar type: The response format type to use for chat completions. Required. Default value is
None.
:vartype type: str
"""
__mapping__: Dict[str, _model_base.Model] = {}
type: str = rest_discriminator(name="type")
"""The response format type to use for chat completions. Required. Default value is None."""
@overload
def __init__(
self,
*,
type: str,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
class ChatCompletionsResponseFormatJsonObject(ChatCompletionsResponseFormat, discriminator="json_object"):
"""A response format for Chat Completions that restricts responses to emitting valid JSON objects.
Note that to enable JSON mode, some AI models may also require you to instruct the model to
produce JSON
via a system or user message.
:ivar type: Response format type: always 'json_object' for this object. Required. Default value
is "json_object".
:vartype type: str
"""
type: Literal["json_object"] = rest_discriminator(name="type") # type: ignore
"""Response format type: always 'json_object' for this object. Required. Default value is
\"json_object\"."""
@overload
def __init__(
self,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, type="json_object", **kwargs)
class ChatCompletionsResponseFormatJsonSchema(ChatCompletionsResponseFormat, discriminator="json_schema"):
"""A response format for Chat Completions that restricts responses to emitting valid JSON objects,
with a
JSON schema specified by the caller.
:ivar type: The type of response format being defined: ``json_schema``. Required. Default value
is "json_schema".
:vartype type: str
:ivar json_schema: The definition of the required JSON schema in the response, and associated
metadata. Required.
:vartype json_schema: ~azure.ai.inference.models.JsonSchemaFormat
"""
type: Literal["json_schema"] = rest_discriminator(name="type") # type: ignore
"""The type of response format being defined: ``json_schema``. Required. Default value is
\"json_schema\"."""
json_schema: "_models.JsonSchemaFormat" = rest_field()
"""The definition of the required JSON schema in the response, and associated metadata. Required."""
@overload
def __init__(
self,
*,
json_schema: "_models.JsonSchemaFormat",
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, type="json_schema", **kwargs)
class ChatCompletionsResponseFormatText(ChatCompletionsResponseFormat, discriminator="text"):
"""A response format for Chat Completions that emits text responses. This is the default response
format.
:ivar type: Response format type: always 'text' for this object. Required. Default value is
"text".
:vartype type: str
"""
type: Literal["text"] = rest_discriminator(name="type") # type: ignore
"""Response format type: always 'text' for this object. Required. Default value is \"text\"."""
@overload
def __init__(
self,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, type="text", **kwargs)
[docs]
class ChatRequestMessage(_model_base.Model):
"""An abstract representation of a chat message as provided in a request.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
ChatRequestAssistantMessage, ChatRequestDeveloperMessage, ChatRequestSystemMessage,
ChatRequestToolMessage, ChatRequestUserMessage
:ivar role: The chat role associated with this message. Required. Known values are: "system",
"user", "assistant", "tool", and "developer".
:vartype role: str or ~azure.ai.inference.models.ChatRole
"""
__mapping__: Dict[str, _model_base.Model] = {}
role: str = rest_discriminator(name="role")
"""The chat role associated with this message. Required. Known values are: \"system\", \"user\",
\"assistant\", \"tool\", and \"developer\"."""
@overload
def __init__(
self,
*,
role: str,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
class ChatRequestAssistantMessage(ChatRequestMessage, discriminator="assistant"):
"""A request chat message representing response or action from the assistant.
:ivar role: The chat role associated with this message, which is always 'assistant' for
assistant messages. Required. The role that provides responses to system-instructed,
user-prompted input.
:vartype role: str or ~azure.ai.inference.models.ASSISTANT
:ivar content: The content of the message.
:vartype content: str
:ivar tool_calls: The tool calls that must be resolved and have their outputs appended to
subsequent input messages for the chat
completions request to resolve as configured.
:vartype tool_calls: list[~azure.ai.inference.models.ChatCompletionsToolCall]
"""
role: Literal[ChatRole.ASSISTANT] = rest_discriminator(name="role") # type: ignore
"""The chat role associated with this message, which is always 'assistant' for assistant messages.
Required. The role that provides responses to system-instructed, user-prompted input."""
content: Optional[str] = rest_field()
"""The content of the message."""
tool_calls: Optional[List["_models.ChatCompletionsToolCall"]] = rest_field()
"""The tool calls that must be resolved and have their outputs appended to subsequent input
messages for the chat
completions request to resolve as configured."""
@overload
def __init__(
self,
*,
content: Optional[str] = None,
tool_calls: Optional[List["_models.ChatCompletionsToolCall"]] = None,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, role=ChatRole.ASSISTANT, **kwargs)
class ChatRequestDeveloperMessage(ChatRequestMessage, discriminator="developer"):
"""A request chat message containing system instructions that influence how the model will
generate a chat completions
response. Some AI models support a developer message instead of a system message.
:ivar role: The chat role associated with this message, which is always 'developer' for
developer messages. Required. The role that instructs or sets the behavior of the assistant.
Some AI models support this role instead of the 'system' role.
:vartype role: str or ~azure.ai.inference.models.DEVELOPER
:ivar content: The contents of the developer message. Required.
:vartype content: str
"""
role: Literal[ChatRole.DEVELOPER] = rest_discriminator(name="role") # type: ignore
"""The chat role associated with this message, which is always 'developer' for developer messages.
Required. The role that instructs or sets the behavior of the assistant. Some AI models support
this role instead of the 'system' role."""
content: str = rest_field()
"""The contents of the developer message. Required."""
@overload
def __init__(
self,
*,
content: str,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, role=ChatRole.DEVELOPER, **kwargs)
class ChatRequestSystemMessage(ChatRequestMessage, discriminator="system"):
"""A request chat message containing system instructions that influence how the model will
generate a chat completions
response.
:ivar role: The chat role associated with this message, which is always 'system' for system
messages. Required. The role that instructs or sets the behavior of the assistant.
:vartype role: str or ~azure.ai.inference.models.SYSTEM
:ivar content: The contents of the system message. Required.
:vartype content: str
"""
role: Literal[ChatRole.SYSTEM] = rest_discriminator(name="role") # type: ignore
"""The chat role associated with this message, which is always 'system' for system messages.
Required. The role that instructs or sets the behavior of the assistant."""
content: str = rest_field()
"""The contents of the system message. Required."""
@overload
def __init__(
self,
*,
content: str,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, role=ChatRole.SYSTEM, **kwargs)
class ChatRequestToolMessage(ChatRequestMessage, discriminator="tool"):
"""A request chat message representing requested output from a configured tool.
:ivar role: The chat role associated with this message, which is always 'tool' for tool
messages. Required. The role that represents extension tool activity within a chat completions
operation.
:vartype role: str or ~azure.ai.inference.models.TOOL
:ivar content: The content of the message.
:vartype content: str
:ivar tool_call_id: The ID of the tool call resolved by the provided content. Required.
:vartype tool_call_id: str
"""
role: Literal[ChatRole.TOOL] = rest_discriminator(name="role") # type: ignore
"""The chat role associated with this message, which is always 'tool' for tool messages. Required.
The role that represents extension tool activity within a chat completions operation."""
content: Optional[str] = rest_field()
"""The content of the message."""
tool_call_id: str = rest_field()
"""The ID of the tool call resolved by the provided content. Required."""
@overload
def __init__(
self,
*,
tool_call_id: str,
content: Optional[str] = None,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, role=ChatRole.TOOL, **kwargs)
class ChatRequestUserMessage(ChatRequestMessage, discriminator="user"):
"""A request chat message representing user input to the assistant.
:ivar role: The chat role associated with this message, which is always 'user' for user
messages. Required. The role that provides input for chat completions.
:vartype role: str or ~azure.ai.inference.models.USER
:ivar content: The contents of the user message, with available input types varying by selected
model. Required. Is either a str type or a [ContentItem] type.
:vartype content: str or list[~azure.ai.inference.models.ContentItem]
"""
role: Literal[ChatRole.USER] = rest_discriminator(name="role") # type: ignore
"""The chat role associated with this message, which is always 'user' for user messages. Required.
The role that provides input for chat completions."""
content: Union["str", List["_models.ContentItem"]] = rest_field()
"""The contents of the user message, with available input types varying by selected model.
Required. Is either a str type or a [ContentItem] type."""
@overload
def __init__(
self,
*,
content: Union[str, List["_models.ContentItem"]],
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, role=ChatRole.USER, **kwargs)
[docs]
class ChatResponseMessage(_model_base.Model):
"""A representation of a chat message as received in a response.
:ivar role: The chat role associated with the message. Required. Known values are: "system",
"user", "assistant", "tool", and "developer".
:vartype role: str or ~azure.ai.inference.models.ChatRole
:ivar content: The content of the message. Required.
:vartype content: str
:ivar tool_calls: The tool calls that must be resolved and have their outputs appended to
subsequent input messages for the chat
completions request to resolve as configured.
:vartype tool_calls: list[~azure.ai.inference.models.ChatCompletionsToolCall]
"""
role: Union[str, "_models.ChatRole"] = rest_field()
"""The chat role associated with the message. Required. Known values are: \"system\", \"user\",
\"assistant\", \"tool\", and \"developer\"."""
content: str = rest_field()
"""The content of the message. Required."""
tool_calls: Optional[List["_models.ChatCompletionsToolCall"]] = rest_field()
"""The tool calls that must be resolved and have their outputs appended to subsequent input
messages for the chat
completions request to resolve as configured."""
@overload
def __init__(
self,
*,
role: Union[str, "_models.ChatRole"],
content: str,
tool_calls: Optional[List["_models.ChatCompletionsToolCall"]] = None,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class CompletionsUsage(_model_base.Model):
"""Representation of the token counts processed for a completions request.
Counts consider all tokens across prompts, choices, choice alternates, best_of generations, and
other consumers.
:ivar completion_tokens: The number of tokens generated across all completions emissions.
Required.
:vartype completion_tokens: int
:ivar prompt_tokens: The number of tokens in the provided prompts for the completions request.
Required.
:vartype prompt_tokens: int
:ivar total_tokens: The total number of tokens processed for the completions request and
response. Required.
:vartype total_tokens: int
"""
completion_tokens: int = rest_field()
"""The number of tokens generated across all completions emissions. Required."""
prompt_tokens: int = rest_field()
"""The number of tokens in the provided prompts for the completions request. Required."""
total_tokens: int = rest_field()
"""The total number of tokens processed for the completions request and response. Required."""
@overload
def __init__(
self,
*,
completion_tokens: int,
prompt_tokens: int,
total_tokens: int,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class EmbeddingItem(_model_base.Model):
"""Representation of a single embeddings relatedness comparison.
:ivar embedding: List of embedding values for the input prompt. These represent a measurement
of the
vector-based relatedness of the provided input. Or a base64 encoded string of the embedding
vector. Required. Is either a str type or a [float] type.
:vartype embedding: str or list[float]
:ivar index: Index of the prompt to which the EmbeddingItem corresponds. Required.
:vartype index: int
"""
embedding: Union["str", List[float]] = rest_field()
"""List of embedding values for the input prompt. These represent a measurement of the
vector-based relatedness of the provided input. Or a base64 encoded string of the embedding
vector. Required. Is either a str type or a [float] type."""
index: int = rest_field()
"""Index of the prompt to which the EmbeddingItem corresponds. Required."""
@overload
def __init__(
self,
*,
embedding: Union[str, List[float]],
index: int,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
class EmbeddingsResult(_model_base.Model):
"""Representation of the response data from an embeddings request.
Embeddings measure the relatedness of text strings and are commonly used for search,
clustering,
recommendations, and other similar scenarios.
:ivar id: Unique identifier for the embeddings result. Required.
:vartype id: str
:ivar data: Embedding values for the prompts submitted in the request. Required.
:vartype data: list[~azure.ai.inference.models.EmbeddingItem]
:ivar usage: Usage counts for tokens input using the embeddings API. Required.
:vartype usage: ~azure.ai.inference.models.EmbeddingsUsage
:ivar model: The model ID used to generate this result. Required.
:vartype model: str
"""
id: str = rest_field()
"""Unique identifier for the embeddings result. Required."""
data: List["_models.EmbeddingItem"] = rest_field()
"""Embedding values for the prompts submitted in the request. Required."""
usage: "_models.EmbeddingsUsage" = rest_field()
"""Usage counts for tokens input using the embeddings API. Required."""
model: str = rest_field()
"""The model ID used to generate this result. Required."""
@overload
def __init__(
self,
*,
id: str, # pylint: disable=redefined-builtin
data: List["_models.EmbeddingItem"],
usage: "_models.EmbeddingsUsage",
model: str,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class EmbeddingsUsage(_model_base.Model):
"""Measurement of the amount of tokens used in this request and response.
:ivar prompt_tokens: Number of tokens in the request. Required.
:vartype prompt_tokens: int
:ivar total_tokens: Total number of tokens transacted in this request/response. Should equal
the
number of tokens in the request. Required.
:vartype total_tokens: int
"""
prompt_tokens: int = rest_field()
"""Number of tokens in the request. Required."""
total_tokens: int = rest_field()
"""Total number of tokens transacted in this request/response. Should equal the
number of tokens in the request. Required."""
@overload
def __init__(
self,
*,
prompt_tokens: int,
total_tokens: int,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class FunctionCall(_model_base.Model):
"""The name and arguments of a function that should be called, as generated by the model.
:ivar name: The name of the function to call. Required.
:vartype name: str
:ivar arguments: The arguments to call the function with, as generated by the model in JSON
format.
Note that the model does not always generate valid JSON, and may hallucinate parameters
not defined by your function schema. Validate the arguments in your code before calling
your function. Required.
:vartype arguments: str
"""
name: str = rest_field()
"""The name of the function to call. Required."""
arguments: str = rest_field()
"""The arguments to call the function with, as generated by the model in JSON format.
Note that the model does not always generate valid JSON, and may hallucinate parameters
not defined by your function schema. Validate the arguments in your code before calling
your function. Required."""
@overload
def __init__(
self,
*,
name: str,
arguments: str,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class FunctionDefinition(_model_base.Model):
"""The definition of a caller-specified function that chat completions may invoke in response to
matching user input.
:ivar name: The name of the function to be called. Required.
:vartype name: str
:ivar description: A description of what the function does. The model will use this description
when selecting the function and
interpreting its parameters.
:vartype description: str
:ivar parameters: The parameters the function accepts, described as a JSON Schema object.
:vartype parameters: any
"""
name: str = rest_field()
"""The name of the function to be called. Required."""
description: Optional[str] = rest_field()
"""A description of what the function does. The model will use this description when selecting the
function and
interpreting its parameters."""
parameters: Optional[Any] = rest_field()
"""The parameters the function accepts, described as a JSON Schema object."""
@overload
def __init__(
self,
*,
name: str,
description: Optional[str] = None,
parameters: Optional[Any] = None,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class ImageContentItem(ContentItem, discriminator="image_url"):
"""A structured chat content item containing an image reference.
:ivar type: The discriminated object type: always 'image_url' for this type. Required. Default
value is "image_url".
:vartype type: str
:ivar image_url: An internet location, which must be accessible to the model,from which the
image may be retrieved. Required.
:vartype image_url: ~azure.ai.inference.models.ImageUrl
"""
type: Literal["image_url"] = rest_discriminator(name="type") # type: ignore
"""The discriminated object type: always 'image_url' for this type. Required. Default value is
\"image_url\"."""
image_url: "_models.ImageUrl" = rest_field()
"""An internet location, which must be accessible to the model,from which the image may be
retrieved. Required."""
@overload
def __init__(
self,
*,
image_url: "_models.ImageUrl",
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, type="image_url", **kwargs)
class ImageEmbeddingInput(_model_base.Model):
"""Represents an image with optional text.
:ivar image: The input image encoded in base64 string as a data URL. Example:
``data:image/{format};base64,{data}``. Required.
:vartype image: str
:ivar text: Optional. The text input to feed into the model (like DINO, CLIP).
Returns a 422 error if the model doesn't support the value or parameter.
:vartype text: str
"""
image: str = rest_field()
"""The input image encoded in base64 string as a data URL. Example:
``data:image/{format};base64,{data}``. Required."""
text: Optional[str] = rest_field()
"""Optional. The text input to feed into the model (like DINO, CLIP).
Returns a 422 error if the model doesn't support the value or parameter."""
@overload
def __init__(
self,
*,
image: str,
text: Optional[str] = None,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
class ImageUrl(_model_base.Model):
"""An internet location from which the model may retrieve an image.
:ivar url: The URL of the image. Required.
:vartype url: str
:ivar detail: The evaluation quality setting to use, which controls relative prioritization of
speed, token consumption, and
accuracy. Known values are: "auto", "low", and "high".
:vartype detail: str or ~azure.ai.inference.models.ImageDetailLevel
"""
url: str = rest_field()
"""The URL of the image. Required."""
detail: Optional[Union[str, "_models.ImageDetailLevel"]] = rest_field()
"""The evaluation quality setting to use, which controls relative prioritization of speed, token
consumption, and
accuracy. Known values are: \"auto\", \"low\", and \"high\"."""
@overload
def __init__(
self,
*,
url: str,
detail: Optional[Union[str, "_models.ImageDetailLevel"]] = None,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
class InputAudio(_model_base.Model):
"""The details of an audio chat message content part.
:ivar data: Base64 encoded audio data. Required.
:vartype data: str
:ivar format: The audio format of the audio content. Required. Known values are: "wav" and
"mp3".
:vartype format: str or ~azure.ai.inference.models.AudioContentFormat
"""
data: str = rest_field()
"""Base64 encoded audio data. Required."""
format: Union[str, "_models.AudioContentFormat"] = rest_field()
"""The audio format of the audio content. Required. Known values are: \"wav\" and \"mp3\"."""
@overload
def __init__(
self,
*,
data: str,
format: Union[str, "_models.AudioContentFormat"],
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class ModelInfo(_model_base.Model):
"""Represents some basic information about the AI model.
:ivar model_name: The name of the AI model. For example: ``Phi21``. Required.
:vartype model_name: str
:ivar model_type: The type of the AI model. A Unique identifier for the profile. Required.
Known values are: "embeddings", "image_generation", "text_generation", "image_embeddings",
"audio_generation", and "chat_completion".
:vartype model_type: str or ~azure.ai.inference.models.ModelType
:ivar model_provider_name: The model provider name. For example: ``Microsoft Research``.
Required.
:vartype model_provider_name: str
"""
model_name: str = rest_field()
"""The name of the AI model. For example: ``Phi21``. Required."""
model_type: Union[str, "_models.ModelType"] = rest_field()
"""The type of the AI model. A Unique identifier for the profile. Required. Known values are:
\"embeddings\", \"image_generation\", \"text_generation\", \"image_embeddings\",
\"audio_generation\", and \"chat_completion\"."""
model_provider_name: str = rest_field()
"""The model provider name. For example: ``Microsoft Research``. Required."""
@overload
def __init__(
self,
*,
model_name: str,
model_type: Union[str, "_models.ModelType"],
model_provider_name: str,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class StreamingChatChoiceUpdate(_model_base.Model):
"""Represents an update to a single prompt completion when the service is streaming updates
using Server Sent Events (SSE).
Generally, ``n`` choices are generated per provided prompt with a default value of 1.
Token limits and other settings may limit the number of choices generated.
:ivar index: The ordered index associated with this chat completions choice. Required.
:vartype index: int
:ivar finish_reason: The reason that this chat completions choice completed its generated.
Required. Known values are: "stop", "length", "content_filter", and "tool_calls".
:vartype finish_reason: str or ~azure.ai.inference.models.CompletionsFinishReason
:ivar delta: An update to the chat message for a given chat completions prompt. Required.
:vartype delta: ~azure.ai.inference.models.StreamingChatResponseMessageUpdate
"""
index: int = rest_field()
"""The ordered index associated with this chat completions choice. Required."""
finish_reason: Union[str, "_models.CompletionsFinishReason"] = rest_field()
"""The reason that this chat completions choice completed its generated. Required. Known values
are: \"stop\", \"length\", \"content_filter\", and \"tool_calls\"."""
delta: "_models.StreamingChatResponseMessageUpdate" = rest_field()
"""An update to the chat message for a given chat completions prompt. Required."""
@overload
def __init__(
self,
*,
index: int,
finish_reason: Union[str, "_models.CompletionsFinishReason"],
delta: "_models.StreamingChatResponseMessageUpdate",
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class StreamingChatCompletionsUpdate(_model_base.Model):
"""Represents a response update to a chat completions request, when the service is streaming
updates
using Server Sent Events (SSE).
Completions support a wide variety of tasks and generate text that continues from or
"completes"
provided prompt data.
:ivar id: A unique identifier associated with this chat completions response. Required.
:vartype id: str
:ivar created: The first timestamp associated with generation activity for this completions
response,
represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. Required.
:vartype created: ~datetime.datetime
:ivar model: The model used for the chat completion. Required.
:vartype model: str
:ivar choices: An update to the collection of completion choices associated with this
completions response.
Generally, ``n`` choices are generated per provided prompt with a default value of 1.
Token limits and other settings may limit the number of choices generated. Required.
:vartype choices: list[~azure.ai.inference.models.StreamingChatChoiceUpdate]
:ivar usage: Usage information for tokens processed and generated as part of this completions
operation.
:vartype usage: ~azure.ai.inference.models.CompletionsUsage
"""
id: str = rest_field()
"""A unique identifier associated with this chat completions response. Required."""
created: datetime.datetime = rest_field(format="unix-timestamp")
"""The first timestamp associated with generation activity for this completions response,
represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. Required."""
model: str = rest_field()
"""The model used for the chat completion. Required."""
choices: List["_models.StreamingChatChoiceUpdate"] = rest_field()
"""An update to the collection of completion choices associated with this completions response.
Generally, ``n`` choices are generated per provided prompt with a default value of 1.
Token limits and other settings may limit the number of choices generated. Required."""
usage: Optional["_models.CompletionsUsage"] = rest_field()
"""Usage information for tokens processed and generated as part of this completions operation."""
@overload
def __init__(
self,
*,
id: str, # pylint: disable=redefined-builtin
created: datetime.datetime,
model: str,
choices: List["_models.StreamingChatChoiceUpdate"],
usage: Optional["_models.CompletionsUsage"] = None,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class StreamingChatResponseMessageUpdate(_model_base.Model):
"""A representation of a chat message update as received in a streaming response.
:ivar role: The chat role associated with the message. If present, should always be
'assistant'. Known values are: "system", "user", "assistant", "tool", and "developer".
:vartype role: str or ~azure.ai.inference.models.ChatRole
:ivar content: The content of the message.
:vartype content: str
:ivar tool_calls: The tool calls that must be resolved and have their outputs appended to
subsequent input messages for the chat
completions request to resolve as configured.
:vartype tool_calls: list[~azure.ai.inference.models.StreamingChatResponseToolCallUpdate]
"""
role: Optional[Union[str, "_models.ChatRole"]] = rest_field()
"""The chat role associated with the message. If present, should always be 'assistant'. Known
values are: \"system\", \"user\", \"assistant\", \"tool\", and \"developer\"."""
content: Optional[str] = rest_field()
"""The content of the message."""
tool_calls: Optional[List["_models.StreamingChatResponseToolCallUpdate"]] = rest_field()
"""The tool calls that must be resolved and have their outputs appended to subsequent input
messages for the chat
completions request to resolve as configured."""
@overload
def __init__(
self,
*,
role: Optional[Union[str, "_models.ChatRole"]] = None,
content: Optional[str] = None,
tool_calls: Optional[List["_models.StreamingChatResponseToolCallUpdate"]] = None,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
[docs]
class TextContentItem(ContentItem, discriminator="text"):
"""A structured chat content item containing plain text.
:ivar type: The discriminated object type: always 'text' for this type. Required. Default value
is "text".
:vartype type: str
:ivar text: The content of the message. Required.
:vartype text: str
"""
type: Literal["text"] = rest_discriminator(name="type") # type: ignore
"""The discriminated object type: always 'text' for this type. Required. Default value is
\"text\"."""
text: str = rest_field()
"""The content of the message. Required."""
@overload
def __init__(
self,
*,
text: str,
) -> None: ...
@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, type="text", **kwargs)