diff --git a/sdk/agentserver/azure-ai-agentserver-responses/Makefile b/sdk/agentserver/azure-ai-agentserver-responses/Makefile
new file mode 100644
index 000000000000..977fc20dfedd
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/Makefile
@@ -0,0 +1,152 @@
+# Python TypeSpec Code Generation Tooling
+# Targets: generate-models, clean, install-typespec-deps
+
+OUTPUT_DIR ?= azure/ai/responses/server/models/_generated
+TYPESPEC_DIR ?= type_spec
+OPENAPI_SPEC ?= type_spec/TempTypeSpecFiles/Foundry/openapi3/v1/microsoft-foundry-openapi3.yaml
+VALIDATORS_OUTPUT ?= $(OUTPUT_DIR)/_validators.py
+ROOT_SCHEMAS ?= CreateResponse
+LOCAL_TYPESPEC_PACKAGES := @typespec/compiler @typespec/http @typespec/openapi @typespec/openapi3 @typespec/versioning @typespec/events @typespec/sse @azure-tools/typespec-python @azure-tools/typespec-azure-core @azure-tools/typespec-client-generator-core @azure-tools/openai-typespec
+TEMP_OUTPUT_DIR := $(OUTPUT_DIR)/.tmp_codegen
+MODEL_PACKAGE_DIR := $(TEMP_OUTPUT_DIR)/azure/ai/responses/server/sdk/models
+MODEL_SHIMS_DIR := scripts/generated_shims
+
+.PHONY: generate-models generate-validators generate-contracts clean install-typespec-deps
+
+ifeq ($(OS),Windows_NT)
+SHELL := cmd
+.SHELLFLAGS := /c
+endif
+
+# --------------------------------------------------------------------------
+# generate-validators: Generate JSON payload validators from OpenAPI
+# --------------------------------------------------------------------------
+ifeq ($(OS),Windows_NT)
+generate-validators:
+ @where python >NUL 2>NUL || (echo Error: python is required and was not found on PATH. 1>&2 && exit /b 1)
+ @if not exist "$(OPENAPI_SPEC)" (echo Error: OpenAPI spec not found at $(OPENAPI_SPEC). 1>&2 && exit /b 1)
+ @echo Generating payload validators from $(OPENAPI_SPEC)...
+ python scripts/generate_validators.py --input "$(OPENAPI_SPEC)" --output "$(VALIDATORS_OUTPUT)" --root-schemas "$(ROOT_SCHEMAS)"
+ @echo Generated validators at $(VALIDATORS_OUTPUT)
+else
+generate-validators:
+ @command -v python >/dev/null 2>&1 || { \
+ echo "Error: python is required and was not found on PATH." >&2; \
+ exit 1; \
+ }
+ @test -f "$(OPENAPI_SPEC)" || { \
+ echo "Error: OpenAPI spec not found at $(OPENAPI_SPEC)." >&2; \
+ exit 1; \
+ }
+ @echo "Generating payload validators from $(OPENAPI_SPEC)..."
+ python scripts/generate_validators.py --input "$(OPENAPI_SPEC)" --output "$(VALIDATORS_OUTPUT)" --root-schemas "$(ROOT_SCHEMAS)"
+ @echo "Generated validators at $(VALIDATORS_OUTPUT)"
+endif
+
+# --------------------------------------------------------------------------
+# generate-contracts: Generate models + validators artifacts
+# --------------------------------------------------------------------------
+generate-contracts: generate-models generate-validators
+
+TYPESPEC_OUTPUT_DIR := {cwd}/../$(TEMP_OUTPUT_DIR)
+
+# --------------------------------------------------------------------------
+# generate-models: Compile TypeSpec definitions into Python model classes
+# --------------------------------------------------------------------------
+ifeq ($(OS),Windows_NT)
+generate-models:
+ @where tsp-client >NUL 2>NUL || (echo Error: tsp-client is not installed. 1>&2 && echo Run 'make install-typespec-deps' to install it. 1>&2 && exit /b 1)
+ @where npm >NUL 2>NUL || (echo Error: npm is required. Install Node.js ^(v18+^) from https://nodejs.org/ 1>&2 && exit /b 1)
+ @echo Syncing upstream TypeSpec sources...
+ cd /d $(TYPESPEC_DIR) && tsp-client sync
+ @echo Installing local TypeSpec compiler dependencies...
+ npm install --prefix $(TYPESPEC_DIR) --no-save $(LOCAL_TYPESPEC_PACKAGES)
+ @echo Generating Python models...
+ @if exist "$(OUTPUT_DIR)" rmdir /s /q "$(OUTPUT_DIR)"
+ cd /d $(TYPESPEC_DIR) && npx tsp compile . --emit @azure-tools/typespec-python --option "@azure-tools/typespec-python.emitter-output-dir=$(TYPESPEC_OUTPUT_DIR)"
+ @if not exist "$(MODEL_PACKAGE_DIR)" (echo Error: generated model package was not found. 1>&2 && exit /b 1)
+ @if not exist "$(OUTPUT_DIR)\sdk" mkdir "$(OUTPUT_DIR)\sdk"
+ @xcopy /E /I /Y "$(MODEL_PACKAGE_DIR)" "$(OUTPUT_DIR)\sdk\models" >NUL
+ @if exist "$(OUTPUT_DIR)\sdk\models\aio" rmdir /s /q "$(OUTPUT_DIR)\sdk\models\aio"
+ @if exist "$(OUTPUT_DIR)\sdk\models\operations" rmdir /s /q "$(OUTPUT_DIR)\sdk\models\operations"
+ @if exist "$(OUTPUT_DIR)\sdk\models\_client.py" del /q "$(OUTPUT_DIR)\sdk\models\_client.py"
+ @if exist "$(OUTPUT_DIR)\sdk\models\_configuration.py" del /q "$(OUTPUT_DIR)\sdk\models\_configuration.py"
+ @if exist "$(OUTPUT_DIR)\sdk\models\_version.py" del /q "$(OUTPUT_DIR)\sdk\models\_version.py"
+ @copy /Y "$(MODEL_SHIMS_DIR)\sdk_models__init__.py" "$(OUTPUT_DIR)\sdk\models\__init__.py" >NUL
+ @copy /Y "$(MODEL_SHIMS_DIR)\__init__.py" "$(OUTPUT_DIR)\__init__.py" >NUL
+ @copy /Y "$(MODEL_SHIMS_DIR)\_enums.py" "$(OUTPUT_DIR)\_enums.py" >NUL
+ @copy /Y "$(MODEL_SHIMS_DIR)\_models.py" "$(OUTPUT_DIR)\_models.py" >NUL
+ @copy /Y "$(MODEL_SHIMS_DIR)\_patch.py" "$(OUTPUT_DIR)\_patch.py" >NUL
+ @if exist "$(TEMP_OUTPUT_DIR)" rmdir /s /q "$(TEMP_OUTPUT_DIR)"
+else
+generate-models:
+ @command -v tsp-client >/dev/null 2>&1 || { \
+ echo "Error: tsp-client is not installed." >&2; \
+ echo "Run 'make install-typespec-deps' to install it." >&2; \
+ exit 1; \
+ }
+ @command -v npm >/dev/null 2>&1 || { \
+ echo "Error: npm is required. Install Node.js (v18+) from https://nodejs.org/" >&2; \
+ exit 1; \
+ }
+ @echo "Syncing upstream TypeSpec sources..."
+ cd $(TYPESPEC_DIR) && tsp-client sync
+ @echo "Installing local TypeSpec compiler dependencies..."
+ npm install --prefix $(TYPESPEC_DIR) --no-save $(LOCAL_TYPESPEC_PACKAGES)
+ @echo "Generating Python models..."
+ rm -rf $(OUTPUT_DIR)
+ cd $(TYPESPEC_DIR) && npx tsp compile . --emit @azure-tools/typespec-python --option "@azure-tools/typespec-python.emitter-output-dir=$(TYPESPEC_OUTPUT_DIR)"
+ @test -d $(MODEL_PACKAGE_DIR) || { \
+ echo "Error: generated model package was not found." >&2; \
+ exit 1; \
+ }
+ mkdir -p $(OUTPUT_DIR)/sdk
+ cp -R $(MODEL_PACKAGE_DIR) $(OUTPUT_DIR)/sdk/models
+ rm -rf $(OUTPUT_DIR)/sdk/models/aio
+ rm -rf $(OUTPUT_DIR)/sdk/models/operations
+ rm -f $(OUTPUT_DIR)/sdk/models/_client.py
+ rm -f $(OUTPUT_DIR)/sdk/models/_configuration.py
+ rm -f $(OUTPUT_DIR)/sdk/models/_version.py
+ cp $(MODEL_SHIMS_DIR)/sdk_models__init__.py $(OUTPUT_DIR)/sdk/models/__init__.py
+ cp $(MODEL_SHIMS_DIR)/__init__.py $(OUTPUT_DIR)/__init__.py
+ cp $(MODEL_SHIMS_DIR)/_enums.py $(OUTPUT_DIR)/_enums.py
+ cp $(MODEL_SHIMS_DIR)/_models.py $(OUTPUT_DIR)/_models.py
+ cp $(MODEL_SHIMS_DIR)/_patch.py $(OUTPUT_DIR)/_patch.py
+ rm -rf $(TEMP_OUTPUT_DIR)
+endif
+
+# --------------------------------------------------------------------------
+# clean: Remove all previously generated Python model files
+# --------------------------------------------------------------------------
+ifeq ($(OS),Windows_NT)
+clean:
+ @if exist "$(OUTPUT_DIR)" rmdir /s /q "$(OUTPUT_DIR)"
+else
+clean:
+ rm -rf $(OUTPUT_DIR)
+endif
+
+# --------------------------------------------------------------------------
+# install-typespec-deps: Install tsp-client CLI and sync TypeSpec sources
+# --------------------------------------------------------------------------
+ifeq ($(OS),Windows_NT)
+install-typespec-deps:
+ @where node >NUL 2>NUL || (echo Error: Node.js ^(v18+^) is required. Install from https://nodejs.org/ 1>&2 && exit /b 1)
+ @where npm >NUL 2>NUL || (echo Error: npm is required. Install Node.js ^(v18+^) from https://nodejs.org/ 1>&2 && exit /b 1)
+ npm install -g @azure-tools/typespec-client-generator-cli
+ npm install --prefix $(TYPESPEC_DIR) --no-save $(LOCAL_TYPESPEC_PACKAGES)
+ cd /d $(TYPESPEC_DIR) && tsp-client sync
+else
+install-typespec-deps:
+ @command -v node >/dev/null 2>&1 || { \
+ echo "Error: Node.js (v18+) is required. Install from https://nodejs.org/" >&2; \
+ exit 1; \
+ }
+ @command -v npm >/dev/null 2>&1 || { \
+ echo "Error: npm is required. Install Node.js (v18+) from https://nodejs.org/" >&2; \
+ exit 1; \
+ }
+ npm install -g @azure-tools/typespec-client-generator-cli
+ npm install --prefix $(TYPESPEC_DIR) --no-save $(LOCAL_TYPESPEC_PACKAGES)
+ cd $(TYPESPEC_DIR) && tsp-client sync
+endif
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_handlers.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_handlers.py
new file mode 100644
index 000000000000..b499bd22cf0b
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_handlers.py
@@ -0,0 +1,115 @@
+"""Handler and context contracts for user-defined response execution."""
+
+from __future__ import annotations
+
+import asyncio
+from dataclasses import dataclass, field
+from datetime import datetime, timezone
+from typing import Any, AsyncIterable, Awaitable, Callable, Mapping, Protocol, Sequence, runtime_checkable
+
+from ._generated import CreateResponse, OutputItem, ResponseStreamEvent
+from .models import ResponseModeFlags
+
+OutputItemsLoader = Callable[[], Awaitable[Sequence[OutputItem]]]
+RawBodyType = Mapping[str, Any] | Sequence[Any] | str | int | float | bool | None
+
+
+@runtime_checkable
+class ResponseContext(Protocol):
+ """Runtime context exposed to response handlers.
+
+ This mirrors the referenced .NET ``IResponseContext`` shape:
+ - response identifier
+ - shutdown signal flag
+ - raw body access
+ - async input/history resolution
+ """
+
+ @property
+ def response_id(self) -> str:
+ """Get the unique response identifier."""
+
+ @property
+ def is_shutdown_requested(self) -> bool:
+ """Get whether shutdown has been requested by the host."""
+
+ @is_shutdown_requested.setter
+ def is_shutdown_requested(self, value: bool) -> None:
+ """Set whether shutdown has been requested by the host."""
+
+ @property
+ def raw_body(self) -> RawBodyType:
+ """Get the raw request body payload for extension field access."""
+
+ async def get_input_items(self) -> Sequence[OutputItem]:
+ """Resolve and return request input items."""
+
+ async def get_history(self) -> Sequence[OutputItem]:
+ """Resolve and return conversation history items."""
+
+
+@dataclass(slots=True)
+class RuntimeResponseContext(ResponseContext):
+ """Default runtime context implementation used by hosting orchestration."""
+
+ response_id: str
+ mode_flags: ResponseModeFlags
+ raw_body: RawBodyType = None
+ created_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
+ _is_shutdown_requested: bool = False
+ _input_items_loader: OutputItemsLoader | None = None
+ _history_loader: OutputItemsLoader | None = None
+ _input_items_cache: Sequence[OutputItem] | None = None
+ _history_cache: Sequence[OutputItem] | None = None
+
+ @property
+ def is_shutdown_requested(self) -> bool:
+ """Get whether shutdown has been requested by the host."""
+ return self._is_shutdown_requested
+
+ @is_shutdown_requested.setter
+ def is_shutdown_requested(self, value: bool) -> None:
+ """Set whether shutdown has been requested by the host."""
+ self._is_shutdown_requested = value
+
+ async def get_input_items(self) -> Sequence[OutputItem]:
+ """Resolve and cache request input items."""
+ if self._input_items_cache is not None:
+ return self._input_items_cache
+
+ if self._input_items_loader is None:
+ self._input_items_cache = ()
+ return self._input_items_cache
+
+ loaded = await self._input_items_loader()
+ self._input_items_cache = tuple(loaded)
+ return self._input_items_cache
+
+ async def get_history(self) -> Sequence[OutputItem]:
+ """Resolve and cache conversation history items."""
+ if self._history_cache is not None:
+ return self._history_cache
+
+ if self._history_loader is None:
+ self._history_cache = ()
+ return self._history_cache
+
+ loaded = await self._history_loader()
+ self._history_cache = tuple(loaded)
+ return self._history_cache
+
+
+@runtime_checkable
+class ResponseHandler(Protocol):
+ """Primary async handler contract consumed by route orchestration.
+
+ Mirrors the referenced .NET ``IResponseHandler`` single-method design.
+ """
+
+ def create_async(
+ self,
+ request: CreateResponse,
+ context: ResponseContext,
+ cancellation_signal: asyncio.Event,
+ ) -> AsyncIterable[ResponseStreamEvent]:
+ """Yield the full response event stream for one create request."""
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_hosting.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_hosting.py
new file mode 100644
index 000000000000..8c6aca1c5209
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_hosting.py
@@ -0,0 +1,29 @@
+"""Starlette hosting integration for the Responses server package."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from ._options import ResponsesServerOptions
+
+if TYPE_CHECKING:
+ from starlette.applications import Starlette
+
+ from ._handlers import ResponseHandler
+
+
+def map_responses_server(
+ app: "Starlette",
+ handler: "ResponseHandler",
+ *,
+ prefix: str = "",
+ options: ResponsesServerOptions | None = None,
+) -> None:
+ """Register Responses API routes on a Starlette application.
+
+ :param app: Starlette application instance to configure.
+ :param handler: User-provided response handler implementation.
+ :param prefix: Optional route prefix.
+ :param options: Optional server runtime options.
+ """
+ raise NotImplementedError("Route registration will be implemented in Phase 2/3.")
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_models.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_models.py
new file mode 100644
index 000000000000..e6e79fd7ee76
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_models.py
@@ -0,0 +1,24 @@
+"""Compatibility module for runtime response server models.
+
+Canonical non-generated type definitions now live in ``azure.ai.responses.server.models``.
+"""
+
+from .models.runtime import (
+ ResponseExecution,
+ ResponseModeFlags,
+ ResponseSession,
+ ResponseStatus,
+ StreamEventRecord,
+ StreamReplayState,
+ TerminalResponseStatus,
+)
+
+__all__ = [
+ "ResponseExecution",
+ "ResponseModeFlags",
+ "ResponseSession",
+ "ResponseStatus",
+ "StreamEventRecord",
+ "StreamReplayState",
+ "TerminalResponseStatus",
+]
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_observability.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_observability.py
new file mode 100644
index 000000000000..f79a978e7f0e
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_observability.py
@@ -0,0 +1,16 @@
+"""Observability and identity header helpers."""
+
+from __future__ import annotations
+
+
+def build_platform_server_header(sdk_name: str, version: str, runtime: str, extra: str | None = None) -> str:
+ """Build the platform server identity header value.
+
+ :param sdk_name: SDK package name.
+ :param version: SDK package version.
+ :param runtime: Runtime marker, such as python/3.10.
+ :param extra: Optional additional identity suffix.
+ :returns: Formatted identity header value.
+ """
+ base_value = f"{sdk_name}/{version} ({runtime})"
+ return f"{base_value} {extra}".strip() if extra else base_value
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_options.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_options.py
new file mode 100644
index 000000000000..8173b2760d31
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_options.py
@@ -0,0 +1,44 @@
+"""Typed options for configuring the Responses server runtime."""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+
+
+@dataclass(slots=True)
+class ResponsesServerOptions:
+ """Configuration values for hosting and runtime behavior.
+
+ This shape mirrors the .NET `ResponsesServerOptions` surface:
+ - SSE keep-alive is disabled by default.
+ - `default_model` is optional.
+ - `default_fetch_history_count` defaults to 100.
+ - `additional_server_identity` is optional.
+ """
+
+ default_fetch_history_count_value: int = 100
+ additional_server_identity: str | None = None
+ default_model: str | None = None
+ default_fetch_history_count: int = default_fetch_history_count_value
+ sse_keep_alive_interval_seconds: int | None = None
+
+ def __post_init__(self) -> None:
+ """Validate and normalize option values."""
+ if self.additional_server_identity is not None:
+ normalized = self.additional_server_identity.strip()
+ self.additional_server_identity = normalized or None
+
+ if self.default_model is not None:
+ normalized_model = self.default_model.strip()
+ self.default_model = normalized_model or None
+
+ if self.sse_keep_alive_interval_seconds is not None and self.sse_keep_alive_interval_seconds <= 0:
+ raise ValueError("sse_keep_alive_interval_seconds must be > 0 when set")
+
+ if self.default_fetch_history_count <= 0:
+ raise ValueError("default_fetch_history_count must be > 0")
+
+ @property
+ def sse_keep_alive_enabled(self) -> bool:
+ """Return whether periodic SSE keep-alive comments are enabled."""
+ return self.sse_keep_alive_interval_seconds is not None
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_sse.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_sse.py
new file mode 100644
index 000000000000..292988253093
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_sse.py
@@ -0,0 +1,14 @@
+"""Server-sent events helpers for Responses streaming."""
+
+from __future__ import annotations
+
+from .models._generated import ResponseStreamEvent
+
+
+def encode_sse_event(event: ResponseStreamEvent) -> str:
+ """Encode a response stream event into SSE wire format.
+
+ :param event: Generated response stream event model.
+ :returns: Encoded SSE payload string.
+ """
+ raise NotImplementedError("SSE encoding will be implemented in Phase 2.")
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_validation.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_validation.py
new file mode 100644
index 000000000000..3eca33781865
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/_validation.py
@@ -0,0 +1,144 @@
+"""Validation utilities for request and response models."""
+
+from __future__ import annotations
+
+from typing import Any, Mapping
+
+from ._options import ResponsesServerOptions
+from .models.errors import RequestValidationError
+
+try:
+ from .models._generated import ApiErrorResponse, CreateResponse, Error
+except Exception: # pragma: no cover - allows isolated unit testing when generated deps are unavailable.
+ class _GeneratedUnavailable:
+ def __init__(self, *_args: Any, **_kwargs: Any) -> None:
+ raise ModuleNotFoundError(
+ "generated contract models are unavailable; run generation to restore runtime dependencies"
+ )
+
+ ApiErrorResponse = _GeneratedUnavailable # type: ignore[assignment]
+ CreateResponse = _GeneratedUnavailable # type: ignore[assignment]
+ Error = _GeneratedUnavailable # type: ignore[assignment]
+
+try:
+ from .models._generated import _validators as _generated_validators
+except Exception: # pragma: no cover - optional until validator generation is integrated in all environments.
+ _generated_validators = None
+
+
+def parse_create_response(payload: Mapping[str, Any]) -> CreateResponse:
+ """Parse incoming JSON payload into the generated ``CreateResponse`` model.
+
+ :param payload: Raw request payload mapping.
+ :returns: Parsed generated create response model.
+ :raises RequestValidationError: If payload is not an object or cannot be parsed.
+ """
+ if not isinstance(payload, Mapping):
+ raise RequestValidationError("request body must be a JSON object", code="invalid_json")
+
+ validator = getattr(_generated_validators, "validate_CreateResponse", None) if _generated_validators else None
+ if callable(validator):
+ validation_errors = validator(payload)
+ if validation_errors:
+ raise RequestValidationError(
+ "request body failed schema validation",
+ code="invalid_request",
+ debug_info={"errors": validation_errors},
+ )
+
+ try:
+ return CreateResponse(payload)
+ except Exception as exc: # pragma: no cover - generated model raises implementation-specific errors.
+ raise RequestValidationError(
+ "request body failed schema validation",
+ code="invalid_request",
+ debug_info={"exception_type": type(exc).__name__, "detail": str(exc)},
+ ) from exc
+
+
+def normalize_create_response(
+ request: CreateResponse,
+ options: ResponsesServerOptions | None,
+) -> CreateResponse:
+ """Apply server-side defaults to a parsed create request model."""
+ if (request.model is None or (isinstance(request.model, str) and not request.model.strip())) and options:
+ request.model = options.default_model
+
+ if isinstance(request.model, str):
+ request.model = request.model.strip() or None
+
+ return request
+
+
+def validate_create_response(request: CreateResponse) -> None:
+ """Validate create request semantics not enforced by generated model typing.
+
+ :raises RequestValidationError: If semantic preconditions are violated.
+ """
+ store_enabled = True if request.store is None else bool(request.store)
+
+ if request.background and not store_enabled:
+ raise RequestValidationError(
+ "background=true requires store=true",
+ code="invalid_mode",
+ param="store",
+ )
+
+ if request.stream_options is not None and request.stream is not True:
+ raise RequestValidationError(
+ "stream_options requires stream=true",
+ code="invalid_mode",
+ param="stream_options",
+ )
+
+ if request.model is None:
+ raise RequestValidationError(
+ "model is required",
+ code="missing_required",
+ param="model",
+ )
+
+
+def parse_and_validate_create_response(
+ payload: Mapping[str, Any],
+ *,
+ options: ResponsesServerOptions | None = None,
+) -> CreateResponse:
+ """Parse, normalize, and validate a create request using generated models."""
+ request = parse_create_response(payload)
+ request = normalize_create_response(request, options)
+ validate_create_response(request)
+ return request
+
+
+def build_api_error_response(
+ message: str,
+ *,
+ code: str,
+ param: str | None = None,
+ error_type: str = "invalid_request_error",
+ debug_info: dict[str, Any] | None = None,
+) -> ApiErrorResponse:
+ """Build a generated ``ApiErrorResponse`` envelope for client-visible failures."""
+ return ApiErrorResponse(
+ error=Error(
+ code=code,
+ message=message,
+ param=param,
+ type=error_type,
+ debug_info=debug_info,
+ )
+ )
+
+
+def to_api_error_response(error: Exception) -> ApiErrorResponse:
+ """Map a Python exception to a generated API error envelope."""
+ if isinstance(error, RequestValidationError):
+ return error.to_api_error_response()
+
+ return build_api_error_response(
+ message="internal server error",
+ code="internal_error",
+ error_type="server_error",
+ debug_info={"exception_type": type(error).__name__},
+ )
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/__init__.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/__init__.py
new file mode 100644
index 000000000000..ca1ef50af391
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/__init__.py
@@ -0,0 +1,32 @@
+"""Canonical non-generated model types for the response server."""
+
+from .errors import RequestValidationError
+try:
+ from .runtime import (
+ ResponseExecution,
+ ResponseModeFlags,
+ ResponseSession,
+ ResponseStatus,
+ StreamEventRecord,
+ StreamReplayState,
+ TerminalResponseStatus,
+ )
+except Exception: # pragma: no cover - allows importing lightweight model errors in isolated test envs.
+ pass
+
+__all__ = [
+ "RequestValidationError",
+]
+
+if "ResponseExecution" in globals():
+ __all__.extend(
+ [
+ "ResponseExecution",
+ "ResponseModeFlags",
+ "ResponseSession",
+ "ResponseStatus",
+ "StreamEventRecord",
+ "StreamReplayState",
+ "TerminalResponseStatus",
+ ]
+ )
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/__init__.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/__init__.py
new file mode 100644
index 000000000000..013008e395b4
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/__init__.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility re-exports for generated models preserved under sdk/models."""
+
+from .sdk.models.models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_enums.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_enums.py
new file mode 100644
index 000000000000..ffeb0d1362db
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_enums.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated enum symbols."""
+
+from .sdk.models.models._enums import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_models.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_models.py
new file mode 100644
index 000000000000..8c6878d69796
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_models.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated model symbols."""
+
+from .sdk.models.models._models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_patch.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_patch.py
new file mode 100644
index 000000000000..3d222c31c566
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_patch.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated patch helpers."""
+
+from .sdk.models.models._patch import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_validators.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_validators.py
new file mode 100644
index 000000000000..b2dfc33c9c4a
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/_validators.py
@@ -0,0 +1,666 @@
+# pylint: disable=line-too-long,useless-suppression,too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+
+from __future__ import annotations
+
+from typing import Any
+
+try:
+ from . import _enums as _generated_enums
+except Exception:
+ _generated_enums = None
+
+def _append_error(errors: list[dict[str, str]], path: str, message: str) -> None:
+ errors.append({'path': path, 'message': message})
+
+def _type_label(value: Any) -> str:
+ if value is None:
+ return 'null'
+ if isinstance(value, bool):
+ return 'boolean'
+ if isinstance(value, int):
+ return 'integer'
+ if isinstance(value, float):
+ return 'number'
+ if isinstance(value, str):
+ return 'string'
+ if isinstance(value, dict):
+ return 'object'
+ if isinstance(value, list):
+ return 'array'
+ return type(value).__name__
+
+def _is_type(value: Any, expected: str) -> bool:
+ if expected == 'string':
+ return isinstance(value, str)
+ if expected == 'integer':
+ return isinstance(value, int) and not isinstance(value, bool)
+ if expected == 'number':
+ return (isinstance(value, int) and not isinstance(value, bool)) or isinstance(value, float)
+ if expected == 'boolean':
+ return isinstance(value, bool)
+ if expected == 'object':
+ return isinstance(value, dict)
+ if expected == 'array':
+ return isinstance(value, list)
+ return True
+
+def _append_type_mismatch(errors: list[dict[str, str]], path: str, expected: str, value: Any) -> None:
+ _append_error(errors, path, f"Expected {expected}, got {_type_label(value)}")
+
+def _enum_values(enum_name: str) -> tuple[tuple[str, ...] | None, str | None]:
+ if _generated_enums is None:
+ return None, f'enum type _enums.{enum_name} is unavailable'
+ enum_cls = getattr(_generated_enums, enum_name, None)
+ if enum_cls is None:
+ return None, f'enum type _enums.{enum_name} is not defined'
+ try:
+ return tuple(str(member.value) for member in enum_cls), None
+ except Exception:
+ return None, f'enum type _enums.{enum_name} failed to load values'
+
+def _validate_CreateResponse(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'agent_reference' in value:
+ _validate_CreateResponse_agent_reference(value['agent_reference'], f"{path}.agent_reference", errors)
+ if 'background' in value:
+ _validate_CreateResponse_background(value['background'], f"{path}.background", errors)
+ if 'context_management' in value:
+ _validate_CreateResponse_context_management(value['context_management'], f"{path}.context_management", errors)
+ if 'conversation' in value:
+ _validate_CreateResponse_conversation(value['conversation'], f"{path}.conversation", errors)
+ if 'include' in value:
+ _validate_CreateResponse_include(value['include'], f"{path}.include", errors)
+ if 'input' in value:
+ _validate_CreateResponse_input(value['input'], f"{path}.input", errors)
+ if 'instructions' in value:
+ _validate_CreateResponse_instructions(value['instructions'], f"{path}.instructions", errors)
+ if 'max_output_tokens' in value:
+ _validate_CreateResponse_max_output_tokens(value['max_output_tokens'], f"{path}.max_output_tokens", errors)
+ if 'max_tool_calls' in value:
+ _validate_CreateResponse_max_output_tokens(value['max_tool_calls'], f"{path}.max_tool_calls", errors)
+ if 'metadata' in value:
+ _validate_CreateResponse_metadata(value['metadata'], f"{path}.metadata", errors)
+ if 'model' in value:
+ _validate_CreateResponse_model(value['model'], f"{path}.model", errors)
+ if 'parallel_tool_calls' in value:
+ _validate_CreateResponse_parallel_tool_calls(value['parallel_tool_calls'], f"{path}.parallel_tool_calls", errors)
+ if 'previous_response_id' in value:
+ _validate_CreateResponse_instructions(value['previous_response_id'], f"{path}.previous_response_id", errors)
+ if 'prompt' in value:
+ _validate_CreateResponse_prompt(value['prompt'], f"{path}.prompt", errors)
+ if 'prompt_cache_key' in value:
+ _validate_CreateResponse_prompt_cache_key(value['prompt_cache_key'], f"{path}.prompt_cache_key", errors)
+ if 'prompt_cache_retention' in value:
+ _validate_CreateResponse_prompt_cache_retention(value['prompt_cache_retention'], f"{path}.prompt_cache_retention", errors)
+ if 'reasoning' in value:
+ _validate_CreateResponse_reasoning(value['reasoning'], f"{path}.reasoning", errors)
+ if 'safety_identifier' in value:
+ _validate_CreateResponse_safety_identifier(value['safety_identifier'], f"{path}.safety_identifier", errors)
+ if 'service_tier' in value:
+ _validate_CreateResponse_service_tier(value['service_tier'], f"{path}.service_tier", errors)
+ if 'store' in value:
+ _validate_CreateResponse_parallel_tool_calls(value['store'], f"{path}.store", errors)
+ if 'stream' in value:
+ _validate_CreateResponse_background(value['stream'], f"{path}.stream", errors)
+ if 'stream_options' in value:
+ _validate_CreateResponse_stream_options(value['stream_options'], f"{path}.stream_options", errors)
+ if 'structured_inputs' in value:
+ _validate_CreateResponse_structured_inputs(value['structured_inputs'], f"{path}.structured_inputs", errors)
+ if 'temperature' in value:
+ _validate_CreateResponse_temperature(value['temperature'], f"{path}.temperature", errors)
+ if 'text' in value:
+ _validate_CreateResponse_text(value['text'], f"{path}.text", errors)
+ if 'tool_choice' in value:
+ _validate_CreateResponse_tool_choice(value['tool_choice'], f"{path}.tool_choice", errors)
+ if 'tools' in value:
+ _validate_CreateResponse_tools(value['tools'], f"{path}.tools", errors)
+ if 'top_logprobs' in value:
+ _validate_CreateResponse_max_output_tokens(value['top_logprobs'], f"{path}.top_logprobs", errors)
+ if 'top_p' in value:
+ _validate_CreateResponse_temperature(value['top_p'], f"{path}.top_p", errors)
+ if 'truncation' in value:
+ _validate_CreateResponse_truncation(value['truncation'], f"{path}.truncation", errors)
+ if 'user' in value:
+ _validate_CreateResponse_user(value['user'], f"{path}.user", errors)
+
+def _validate_CreateResponse_agent_reference(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ return
+
+def _validate_CreateResponse_background(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'boolean'):
+ _append_type_mismatch(errors, path, 'boolean', value)
+ return
+
+def _validate_CreateResponse_context_management(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'array'):
+ _append_type_mismatch(errors, path, 'array', value)
+ return
+ for _idx, _item in enumerate(value):
+ _validate_CreateResponse_context_management_item(_item, f"{path}[{_idx}]", errors)
+
+def _validate_CreateResponse_conversation(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+
+def _validate_CreateResponse_include(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'array'):
+ _append_type_mismatch(errors, path, 'array', value)
+ return
+ for _idx, _item in enumerate(value):
+ _validate_CreateResponse_include_item(_item, f"{path}[{_idx}]", errors)
+
+def _validate_CreateResponse_input(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_InputParam(value, path, errors)
+
+def _validate_CreateResponse_instructions(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_max_output_tokens(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'integer'):
+ _append_type_mismatch(errors, path, 'integer', value)
+ return
+
+def _validate_CreateResponse_metadata(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+
+def _validate_CreateResponse_model(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_parallel_tool_calls(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'boolean'):
+ _append_type_mismatch(errors, path, 'boolean', value)
+ return
+
+def _validate_CreateResponse_prompt(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_Prompt(value, path, errors)
+
+def _validate_CreateResponse_prompt_cache_key(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_prompt_cache_retention(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ _allowed_values = ('in-memory', '24h')
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_reasoning(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+
+def _validate_CreateResponse_safety_identifier(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_service_tier(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ServiceTier(value, path, errors)
+
+def _validate_CreateResponse_stream_options(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+
+def _validate_CreateResponse_structured_inputs(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ for _key, _item in value.items():
+ if _key not in ():
+ _validate_CreateResponse_structured_inputs_additional_property(_item, f"{path}.{_key}", errors)
+
+def _validate_CreateResponse_temperature(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'number'):
+ _append_type_mismatch(errors, path, 'number', value)
+ return
+
+def _validate_CreateResponse_text(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ResponseTextParam(value, path, errors)
+
+def _validate_CreateResponse_tool_choice(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_ToolChoiceOptions(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'object'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_ToolChoiceParam(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected one of: OpenAI.ToolChoiceOptions, OpenAI.ToolChoiceParam; got {_type_label(value)}")
+ return
+
+def _validate_CreateResponse_tools(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ToolsArray(value, path, errors)
+
+def _validate_CreateResponse_truncation(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ _allowed_values = ('auto', 'disabled')
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_user(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_context_management_item(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ContextManagementParam(value, path, errors)
+
+def _validate_CreateResponse_include_item(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_IncludeEnum(value, path, errors)
+
+def _validate_OpenAI_InputParam(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'array'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_array(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected one of: string, array; got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_Prompt(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'id' not in value:
+ _append_error(errors, f"{path}.id", "Required property 'id' is missing")
+ if 'id' in value:
+ _validate_OpenAI_Prompt_id(value['id'], f"{path}.id", errors)
+ if 'variables' in value:
+ _validate_OpenAI_Prompt_variables(value['variables'], f"{path}.variables", errors)
+ if 'version' in value:
+ _validate_CreateResponse_instructions(value['version'], f"{path}.version", errors)
+
+def _validate_OpenAI_ServiceTier(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ _allowed_values, _enum_error = _enum_values('ServiceTier')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_structured_inputs_additional_property(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ return
+
+def _validate_OpenAI_ResponseTextParam(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'format' in value:
+ _validate_OpenAI_ResponseTextParam_format(value['format'], f"{path}.format", errors)
+ if 'verbosity' in value:
+ _validate_OpenAI_ResponseTextParam_verbosity(value['verbosity'], f"{path}.verbosity", errors)
+
+def _validate_OpenAI_ToolChoiceOptions(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('ToolChoiceOptions')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_ToolChoiceParam(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'type' not in value:
+ _append_error(errors, f"{path}.type", "Required property 'type' is missing")
+ if 'type' in value:
+ _validate_OpenAI_ToolChoiceParam_type(value['type'], f"{path}.type", errors)
+ _disc_value = value.get('type')
+ if not isinstance(_disc_value, str):
+ _append_error(errors, f"{path}.type", "Required discriminator 'type' is missing or invalid")
+ return
+
+def _validate_OpenAI_ToolsArray(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'array'):
+ _append_type_mismatch(errors, path, 'array', value)
+ return
+ for _idx, _item in enumerate(value):
+ _validate_OpenAI_ToolsArray_item(_item, f"{path}[{_idx}]", errors)
+
+def _validate_OpenAI_ContextManagementParam(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'type' not in value:
+ _append_error(errors, f"{path}.type", "Required property 'type' is missing")
+ if 'compact_threshold' in value:
+ _validate_CreateResponse_max_output_tokens(value['compact_threshold'], f"{path}.compact_threshold", errors)
+ if 'type' in value:
+ _validate_OpenAI_ContextManagementParam_type(value['type'], f"{path}.type", errors)
+
+def _validate_OpenAI_IncludeEnum(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_IncludeEnum_2(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected IncludeEnum to be a string value, got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_InputParam_string(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_InputParam_array(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'array'):
+ _append_type_mismatch(errors, path, 'array', value)
+ return
+ for _idx, _item in enumerate(value):
+ _validate_OpenAI_InputParam_array_item(_item, f"{path}[{_idx}]", errors)
+
+def _validate_OpenAI_Prompt_id(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_Prompt_variables(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+
+def _validate_OpenAI_ResponseTextParam_format(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_TextResponseFormatConfiguration(value, path, errors)
+
+def _validate_OpenAI_ResponseTextParam_verbosity(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_Verbosity(value, path, errors)
+
+def _validate_OpenAI_ToolChoiceParam_type(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ToolChoiceParamType(value, path, errors)
+
+def _validate_OpenAI_ToolsArray_item(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_Tool(value, path, errors)
+
+def _validate_OpenAI_ContextManagementParam_type(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_IncludeEnum_2(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('IncludeEnum')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_InputParam_array_item(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_InputItem(value, path, errors)
+
+def _validate_OpenAI_TextResponseFormatConfiguration(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'type' not in value:
+ _append_error(errors, f"{path}.type", "Required property 'type' is missing")
+ if 'type' in value:
+ _validate_OpenAI_TextResponseFormatConfiguration_type(value['type'], f"{path}.type", errors)
+ _disc_value = value.get('type')
+ if not isinstance(_disc_value, str):
+ _append_error(errors, f"{path}.type", "Required discriminator 'type' is missing or invalid")
+ return
+
+def _validate_OpenAI_Verbosity(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ _allowed_values, _enum_error = _enum_values('Verbosity')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_ToolChoiceParamType(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_ToolChoiceParamType_2(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected ToolChoiceParamType to be a string value, got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_Tool(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'type' not in value:
+ _append_error(errors, f"{path}.type", "Required property 'type' is missing")
+ if 'type' in value:
+ _validate_OpenAI_Tool_type(value['type'], f"{path}.type", errors)
+ _disc_value = value.get('type')
+ if not isinstance(_disc_value, str):
+ _append_error(errors, f"{path}.type", "Required discriminator 'type' is missing or invalid")
+ return
+
+def _validate_OpenAI_InputItem(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'type' not in value:
+ _append_error(errors, f"{path}.type", "Required property 'type' is missing")
+ if 'type' in value:
+ _validate_OpenAI_InputItem_type(value['type'], f"{path}.type", errors)
+ _disc_value = value.get('type')
+ if not isinstance(_disc_value, str):
+ _append_error(errors, f"{path}.type", "Required discriminator 'type' is missing or invalid")
+ return
+
+def _validate_OpenAI_TextResponseFormatConfiguration_type(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_TextResponseFormatConfigurationType(value, path, errors)
+
+def _validate_OpenAI_ToolChoiceParamType_2(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('ToolChoiceParamType')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_Tool_type(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ToolType(value, path, errors)
+
+def _validate_OpenAI_InputItem_type(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_InputItemType(value, path, errors)
+
+def _validate_OpenAI_TextResponseFormatConfigurationType(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_TextResponseFormatConfigurationType_2(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected TextResponseFormatConfigurationType to be a string value, got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_ToolType(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_ToolType_2(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected ToolType to be a string value, got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_InputItemType(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_InputItemType_2(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected InputItemType to be a string value, got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_TextResponseFormatConfigurationType_2(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('TextResponseFormatConfigurationType')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_ToolType_2(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('ToolType')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_InputItemType_2(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('InputItemType')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+ROOT_SCHEMAS = ['CreateResponse']
+
+class CreateResponseValidator:
+ """Generated validator for the root schema."""
+
+ @staticmethod
+ def validate(payload: Any) -> list[dict[str, str]]:
+ errors: list[dict[str, str]] = []
+ _validate_CreateResponse(payload, '$', errors)
+ return errors
+
+def validate_CreateResponse(payload: Any) -> list[dict[str, str]]:
+ return CreateResponseValidator.validate(payload)
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/__init__.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/__init__.py
new file mode 100644
index 000000000000..784a3edcc881
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/__init__.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Model-only generated package surface."""
+
+from .models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_patch.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_patch.py
new file mode 100644
index 000000000000..87676c65a8f0
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_patch.py
@@ -0,0 +1,21 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_types.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_types.py
new file mode 100644
index 000000000000..c99439ce635a
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_types.py
@@ -0,0 +1,71 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING, Union
+
+if TYPE_CHECKING:
+ from . import models as _models
+Filters = Union["_models.ComparisonFilter", "_models.CompoundFilter"]
+ToolCallOutputContent = Union[dict[str, Any], str, list[Any]]
+InputParam = Union[str, list["_models.Item"]]
+ConversationParam = Union[str, "_models.ConversationParam_2"]
+CreateResponseStreamingResponse = Union[
+ "_models.ResponseAudioDeltaEvent",
+ "_models.ResponseAudioTranscriptDeltaEvent",
+ "_models.ResponseCodeInterpreterCallCodeDeltaEvent",
+ "_models.ResponseCodeInterpreterCallInProgressEvent",
+ "_models.ResponseCodeInterpreterCallInterpretingEvent",
+ "_models.ResponseContentPartAddedEvent",
+ "_models.ResponseCreatedEvent",
+ "_models.ResponseErrorEvent",
+ "_models.ResponseFileSearchCallInProgressEvent",
+ "_models.ResponseFileSearchCallSearchingEvent",
+ "_models.ResponseFunctionCallArgumentsDeltaEvent",
+ "_models.ResponseInProgressEvent",
+ "_models.ResponseFailedEvent",
+ "_models.ResponseIncompleteEvent",
+ "_models.ResponseOutputItemAddedEvent",
+ "_models.ResponseReasoningSummaryPartAddedEvent",
+ "_models.ResponseReasoningSummaryTextDeltaEvent",
+ "_models.ResponseReasoningTextDeltaEvent",
+ "_models.ResponseRefusalDeltaEvent",
+ "_models.ResponseTextDeltaEvent",
+ "_models.ResponseWebSearchCallInProgressEvent",
+ "_models.ResponseWebSearchCallSearchingEvent",
+ "_models.ResponseImageGenCallGeneratingEvent",
+ "_models.ResponseImageGenCallInProgressEvent",
+ "_models.ResponseImageGenCallPartialImageEvent",
+ "_models.ResponseMCPCallArgumentsDeltaEvent",
+ "_models.ResponseMCPCallFailedEvent",
+ "_models.ResponseMCPCallInProgressEvent",
+ "_models.ResponseMCPListToolsFailedEvent",
+ "_models.ResponseMCPListToolsInProgressEvent",
+ "_models.ResponseOutputTextAnnotationAddedEvent",
+ "_models.ResponseQueuedEvent",
+ "_models.ResponseCustomToolCallInputDeltaEvent",
+ "_models.ResponseAudioDoneEvent",
+ "_models.ResponseAudioTranscriptDoneEvent",
+ "_models.ResponseCodeInterpreterCallCodeDoneEvent",
+ "_models.ResponseCodeInterpreterCallCompletedEvent",
+ "_models.ResponseCompletedEvent",
+ "_models.ResponseContentPartDoneEvent",
+ "_models.ResponseFileSearchCallCompletedEvent",
+ "_models.ResponseFunctionCallArgumentsDoneEvent",
+ "_models.ResponseOutputItemDoneEvent",
+ "_models.ResponseReasoningSummaryPartDoneEvent",
+ "_models.ResponseReasoningSummaryTextDoneEvent",
+ "_models.ResponseReasoningTextDoneEvent",
+ "_models.ResponseRefusalDoneEvent",
+ "_models.ResponseTextDoneEvent",
+ "_models.ResponseWebSearchCallCompletedEvent",
+ "_models.ResponseImageGenCallCompletedEvent",
+ "_models.ResponseMCPCallArgumentsDoneEvent",
+ "_models.ResponseMCPCallCompletedEvent",
+ "_models.ResponseMCPListToolsCompletedEvent",
+ "_models.ResponseCustomToolCallInputDoneEvent",
+]
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_utils/__init__.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_utils/__init__.py
new file mode 100644
index 000000000000..8026245c2abc
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_utils/__init__.py
@@ -0,0 +1,6 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_utils/model_base.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_utils/model_base.py
new file mode 100644
index 000000000000..a75a22adbb97
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_utils/model_base.py
@@ -0,0 +1,1368 @@
+# pylint: disable=line-too-long,useless-suppression,too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=protected-access, broad-except
+
+import copy
+import calendar
+import decimal
+import functools
+import sys
+import logging
+import base64
+import re
+import typing
+import enum
+import email.utils
+from datetime import datetime, date, time, timedelta, timezone
+from json import JSONEncoder
+import xml.etree.ElementTree as ET
+from collections.abc import MutableMapping
+from typing_extensions import Self
+import isodate
+from azure.core.exceptions import DeserializationError
+from azure.core import CaseInsensitiveEnumMeta
+from azure.core.pipeline import PipelineResponse
+from azure.core.serialization import _Null
+from azure.core.rest import HttpResponse
+
+_LOGGER = logging.getLogger(__name__)
+
+__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"]
+
+TZ_UTC = timezone.utc
+_T = typing.TypeVar("_T")
+_NONE_TYPE = type(None)
+
+
+def _timedelta_as_isostr(td: timedelta) -> str:
+ """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S'
+
+ Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython
+
+ :param timedelta td: The timedelta to convert
+ :rtype: str
+ :return: ISO8601 version of this timedelta
+ """
+
+ # Split seconds to larger units
+ seconds = td.total_seconds()
+ minutes, seconds = divmod(seconds, 60)
+ hours, minutes = divmod(minutes, 60)
+ days, hours = divmod(hours, 24)
+
+ days, hours, minutes = list(map(int, (days, hours, minutes)))
+ seconds = round(seconds, 6)
+
+ # Build date
+ date_str = ""
+ if days:
+ date_str = "%sD" % days
+
+ if hours or minutes or seconds:
+ # Build time
+ time_str = "T"
+
+ # Hours
+ bigger_exists = date_str or hours
+ if bigger_exists:
+ time_str += "{:02}H".format(hours)
+
+ # Minutes
+ bigger_exists = bigger_exists or minutes
+ if bigger_exists:
+ time_str += "{:02}M".format(minutes)
+
+ # Seconds
+ try:
+ if seconds.is_integer():
+ seconds_string = "{:02}".format(int(seconds))
+ else:
+ # 9 chars long w/ leading 0, 6 digits after decimal
+ seconds_string = "%09.6f" % seconds
+ # Remove trailing zeros
+ seconds_string = seconds_string.rstrip("0")
+ except AttributeError: # int.is_integer() raises
+ seconds_string = "{:02}".format(seconds)
+
+ time_str += "{}S".format(seconds_string)
+ else:
+ time_str = ""
+
+ return "P" + date_str + time_str
+
+
+def _serialize_bytes(o, format: typing.Optional[str] = None) -> str:
+ encoded = base64.b64encode(o).decode()
+ if format == "base64url":
+ return encoded.strip("=").replace("+", "-").replace("/", "_")
+ return encoded
+
+
+def _serialize_datetime(o, format: typing.Optional[str] = None):
+ if hasattr(o, "year") and hasattr(o, "hour"):
+ if format == "rfc7231":
+ return email.utils.format_datetime(o, usegmt=True)
+ if format == "unix-timestamp":
+ return int(calendar.timegm(o.utctimetuple()))
+
+ # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set)
+ if not o.tzinfo:
+ iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat()
+ else:
+ iso_formatted = o.astimezone(TZ_UTC).isoformat()
+ # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt)
+ return iso_formatted.replace("+00:00", "Z")
+ # Next try datetime.date or datetime.time
+ return o.isoformat()
+
+
+def _is_readonly(p):
+ try:
+ return p._visibility == ["read"]
+ except AttributeError:
+ return False
+
+
+class SdkJSONEncoder(JSONEncoder):
+ """A JSON encoder that's capable of serializing datetime objects and bytes."""
+
+ def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.exclude_readonly = exclude_readonly
+ self.format = format
+
+ def default(self, o): # pylint: disable=too-many-return-statements
+ if _is_model(o):
+ if self.exclude_readonly:
+ readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)]
+ return {k: v for k, v in o.items() if k not in readonly_props}
+ return dict(o.items())
+ try:
+ return super(SdkJSONEncoder, self).default(o)
+ except TypeError:
+ if isinstance(o, _Null):
+ return None
+ if isinstance(o, decimal.Decimal):
+ return float(o)
+ if isinstance(o, (bytes, bytearray)):
+ return _serialize_bytes(o, self.format)
+ try:
+ # First try datetime.datetime
+ return _serialize_datetime(o, self.format)
+ except AttributeError:
+ pass
+ # Last, try datetime.timedelta
+ try:
+ return _timedelta_as_isostr(o)
+ except AttributeError:
+ # This will be raised when it hits value.total_seconds in the method above
+ pass
+ return super(SdkJSONEncoder, self).default(o)
+
+
+_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+_VALID_RFC7231 = re.compile(
+ r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s"
+ r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT"
+)
+
+_ARRAY_ENCODE_MAPPING = {
+ "pipeDelimited": "|",
+ "spaceDelimited": " ",
+ "commaDelimited": ",",
+ "newlineDelimited": "\n",
+}
+
+
+def _deserialize_array_encoded(delimit: str, attr):
+ if isinstance(attr, str):
+ if attr == "":
+ return []
+ return attr.split(delimit)
+ return attr
+
+
+def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime:
+ """Deserialize ISO-8601 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: ~datetime.datetime
+ :returns: The datetime object from that input
+ """
+ if isinstance(attr, datetime):
+ # i'm already deserialized
+ return attr
+ attr = attr.upper()
+ match = _VALID_DATE.match(attr)
+ if not match:
+ raise ValueError("Invalid datetime string: " + attr)
+
+ check_decimal = attr.split(".")
+ if len(check_decimal) > 1:
+ decimal_str = ""
+ for digit in check_decimal[1]:
+ if digit.isdigit():
+ decimal_str += digit
+ else:
+ break
+ if len(decimal_str) > 6:
+ attr = attr.replace(decimal_str, decimal_str[0:6])
+
+ date_obj = isodate.parse_datetime(attr)
+ test_utc = date_obj.utctimetuple()
+ if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
+ raise OverflowError("Hit max or min date")
+ return date_obj # type: ignore[no-any-return]
+
+
+def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime:
+ """Deserialize RFC7231 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: ~datetime.datetime
+ :returns: The datetime object from that input
+ """
+ if isinstance(attr, datetime):
+ # i'm already deserialized
+ return attr
+ match = _VALID_RFC7231.match(attr)
+ if not match:
+ raise ValueError("Invalid datetime string: " + attr)
+
+ return email.utils.parsedate_to_datetime(attr)
+
+
+def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime:
+ """Deserialize unix timestamp into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: ~datetime.datetime
+ :returns: The datetime object from that input
+ """
+ if isinstance(attr, datetime):
+ # i'm already deserialized
+ return attr
+ return datetime.fromtimestamp(attr, TZ_UTC)
+
+
+def _deserialize_date(attr: typing.Union[str, date]) -> date:
+ """Deserialize ISO-8601 formatted string into Date object.
+ :param str attr: response string to be deserialized.
+ :rtype: date
+ :returns: The date object from that input
+ """
+ # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
+ if isinstance(attr, date):
+ return attr
+ return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore
+
+
+def _deserialize_time(attr: typing.Union[str, time]) -> time:
+ """Deserialize ISO-8601 formatted string into time object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: datetime.time
+ :returns: The time object from that input
+ """
+ if isinstance(attr, time):
+ return attr
+ return isodate.parse_time(attr) # type: ignore[no-any-return]
+
+
+def _deserialize_bytes(attr):
+ if isinstance(attr, (bytes, bytearray)):
+ return attr
+ return bytes(base64.b64decode(attr))
+
+
+def _deserialize_bytes_base64(attr):
+ if isinstance(attr, (bytes, bytearray)):
+ return attr
+ padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
+ attr = attr + padding # type: ignore
+ encoded = attr.replace("-", "+").replace("_", "/")
+ return bytes(base64.b64decode(encoded))
+
+
+def _deserialize_duration(attr):
+ if isinstance(attr, timedelta):
+ return attr
+ return isodate.parse_duration(attr)
+
+
+def _deserialize_decimal(attr):
+ if isinstance(attr, decimal.Decimal):
+ return attr
+ return decimal.Decimal(str(attr))
+
+
+def _deserialize_int_as_str(attr):
+ if isinstance(attr, int):
+ return attr
+ return int(attr)
+
+
+_DESERIALIZE_MAPPING = {
+ datetime: _deserialize_datetime,
+ date: _deserialize_date,
+ time: _deserialize_time,
+ bytes: _deserialize_bytes,
+ bytearray: _deserialize_bytes,
+ timedelta: _deserialize_duration,
+ typing.Any: lambda x: x,
+ decimal.Decimal: _deserialize_decimal,
+}
+
+_DESERIALIZE_MAPPING_WITHFORMAT = {
+ "rfc3339": _deserialize_datetime,
+ "rfc7231": _deserialize_datetime_rfc7231,
+ "unix-timestamp": _deserialize_datetime_unix_timestamp,
+ "base64": _deserialize_bytes,
+ "base64url": _deserialize_bytes_base64,
+}
+
+
+def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None):
+ if annotation is int and rf and rf._format == "str":
+ return _deserialize_int_as_str
+ if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING:
+ return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format])
+ if rf and rf._format:
+ return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
+ return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
+
+
+def _get_type_alias_type(module_name: str, alias_name: str):
+ types = {
+ k: v
+ for k, v in sys.modules[module_name].__dict__.items()
+ if isinstance(v, typing._GenericAlias) # type: ignore
+ }
+ if alias_name not in types:
+ return alias_name
+ return types[alias_name]
+
+
+def _get_model(module_name: str, model_name: str):
+ models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)}
+ module_end = module_name.rsplit(".", 1)[0]
+ models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)})
+ if isinstance(model_name, str):
+ model_name = model_name.split(".")[-1]
+ if model_name not in models:
+ return model_name
+ return models[model_name]
+
+
+_UNSET = object()
+
+
+class _MyMutableMapping(MutableMapping[str, typing.Any]):
+ def __init__(self, data: dict[str, typing.Any]) -> None:
+ self._data = data
+
+ def __contains__(self, key: typing.Any) -> bool:
+ return key in self._data
+
+ def __getitem__(self, key: str) -> typing.Any:
+ # If this key has been deserialized (for mutable types), we need to handle serialization
+ if hasattr(self, "_attr_to_rest_field"):
+ cache_attr = f"_deserialized_{key}"
+ if hasattr(self, cache_attr):
+ rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key)
+ if rf:
+ value = self._data.get(key)
+ if isinstance(value, (dict, list, set)):
+ # For mutable types, serialize and return
+ # But also update _data with serialized form and clear flag
+ # so mutations via this returned value affect _data
+ serialized = _serialize(value, rf._format)
+ # If serialized form is same type (no transformation needed),
+ # return _data directly so mutations work
+ if isinstance(serialized, type(value)) and serialized == value:
+ return self._data.get(key)
+ # Otherwise return serialized copy and clear flag
+ try:
+ object.__delattr__(self, cache_attr)
+ except AttributeError:
+ pass
+ # Store serialized form back
+ self._data[key] = serialized
+ return serialized
+ return self._data.__getitem__(key)
+
+ def __setitem__(self, key: str, value: typing.Any) -> None:
+ # Clear any cached deserialized value when setting through dictionary access
+ cache_attr = f"_deserialized_{key}"
+ try:
+ object.__delattr__(self, cache_attr)
+ except AttributeError:
+ pass
+ self._data.__setitem__(key, value)
+
+ def __delitem__(self, key: str) -> None:
+ self._data.__delitem__(key)
+
+ def __iter__(self) -> typing.Iterator[typing.Any]:
+ return self._data.__iter__()
+
+ def __len__(self) -> int:
+ return self._data.__len__()
+
+ def __ne__(self, other: typing.Any) -> bool:
+ return not self.__eq__(other)
+
+ def keys(self) -> typing.KeysView[str]:
+ """
+ :returns: a set-like object providing a view on D's keys
+ :rtype: ~typing.KeysView
+ """
+ return self._data.keys()
+
+ def values(self) -> typing.ValuesView[typing.Any]:
+ """
+ :returns: an object providing a view on D's values
+ :rtype: ~typing.ValuesView
+ """
+ return self._data.values()
+
+ def items(self) -> typing.ItemsView[str, typing.Any]:
+ """
+ :returns: set-like object providing a view on D's items
+ :rtype: ~typing.ItemsView
+ """
+ return self._data.items()
+
+ def get(self, key: str, default: typing.Any = None) -> typing.Any:
+ """
+ Get the value for key if key is in the dictionary, else default.
+ :param str key: The key to look up.
+ :param any default: The value to return if key is not in the dictionary. Defaults to None
+ :returns: D[k] if k in D, else d.
+ :rtype: any
+ """
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ @typing.overload
+ def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ
+
+ @typing.overload
+ def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs
+
+ @typing.overload
+ def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs
+
+ def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any:
+ """
+ Removes specified key and return the corresponding value.
+ :param str key: The key to pop.
+ :param any default: The value to return if key is not in the dictionary
+ :returns: The value corresponding to the key.
+ :rtype: any
+ :raises KeyError: If key is not found and default is not given.
+ """
+ if default is _UNSET:
+ return self._data.pop(key)
+ return self._data.pop(key, default)
+
+ def popitem(self) -> tuple[str, typing.Any]:
+ """
+ Removes and returns some (key, value) pair
+ :returns: The (key, value) pair.
+ :rtype: tuple
+ :raises KeyError: if D is empty.
+ """
+ return self._data.popitem()
+
+ def clear(self) -> None:
+ """
+ Remove all items from D.
+ """
+ self._data.clear()
+
+ def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ
+ """
+ Updates D from mapping/iterable E and F.
+ :param any args: Either a mapping object or an iterable of key-value pairs.
+ """
+ self._data.update(*args, **kwargs)
+
+ @typing.overload
+ def setdefault(self, key: str, default: None = None) -> None: ...
+
+ @typing.overload
+ def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs
+
+ def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any:
+ """
+ Same as calling D.get(k, d), and setting D[k]=d if k not found
+ :param str key: The key to look up.
+ :param any default: The value to set if key is not in the dictionary
+ :returns: D[k] if k in D, else d.
+ :rtype: any
+ """
+ if default is _UNSET:
+ return self._data.setdefault(key)
+ return self._data.setdefault(key, default)
+
+ def __eq__(self, other: typing.Any) -> bool:
+ if isinstance(other, _MyMutableMapping):
+ return self._data == other._data
+ try:
+ other_model = self.__class__(other)
+ except Exception:
+ return False
+ return self._data == other_model._data
+
+ def __repr__(self) -> str:
+ return str(self._data)
+
+
+def _is_model(obj: typing.Any) -> bool:
+ return getattr(obj, "_is_model", False)
+
+
+def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements
+ if isinstance(o, list):
+ if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o):
+ return _ARRAY_ENCODE_MAPPING[format].join(o)
+ return [_serialize(x, format) for x in o]
+ if isinstance(o, dict):
+ return {k: _serialize(v, format) for k, v in o.items()}
+ if isinstance(o, set):
+ return {_serialize(x, format) for x in o}
+ if isinstance(o, tuple):
+ return tuple(_serialize(x, format) for x in o)
+ if isinstance(o, (bytes, bytearray)):
+ return _serialize_bytes(o, format)
+ if isinstance(o, decimal.Decimal):
+ return float(o)
+ if isinstance(o, enum.Enum):
+ return o.value
+ if isinstance(o, int):
+ if format == "str":
+ return str(o)
+ return o
+ try:
+ # First try datetime.datetime
+ return _serialize_datetime(o, format)
+ except AttributeError:
+ pass
+ # Last, try datetime.timedelta
+ try:
+ return _timedelta_as_isostr(o)
+ except AttributeError:
+ # This will be raised when it hits value.total_seconds in the method above
+ pass
+ return o
+
+
+def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]:
+ try:
+ return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name)
+ except StopIteration:
+ return None
+
+
+def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any:
+ if not rf:
+ return _serialize(value, None)
+ if rf._is_multipart_file_input:
+ return value
+ if rf._is_model:
+ return _deserialize(rf._type, value)
+ if isinstance(value, ET.Element):
+ value = _deserialize(rf._type, value)
+ return _serialize(value, rf._format)
+
+
+class Model(_MyMutableMapping):
+ _is_model = True
+ # label whether current class's _attr_to_rest_field has been calculated
+ # could not see _attr_to_rest_field directly because subclass inherits it from parent class
+ _calculated: set[str] = set()
+
+ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None:
+ class_name = self.__class__.__name__
+ if len(args) > 1:
+ raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given")
+ dict_to_pass = {
+ rest_field._rest_name: rest_field._default
+ for rest_field in self._attr_to_rest_field.values()
+ if rest_field._default is not _UNSET
+ }
+ if args: # pylint: disable=too-many-nested-blocks
+ if isinstance(args[0], ET.Element):
+ existed_attr_keys = []
+ model_meta = getattr(self, "_xml", {})
+
+ for rf in self._attr_to_rest_field.values():
+ prop_meta = getattr(rf, "_xml", {})
+ xml_name = prop_meta.get("name", rf._rest_name)
+ xml_ns = prop_meta.get("ns", model_meta.get("ns", None))
+ if xml_ns:
+ xml_name = "{" + xml_ns + "}" + xml_name
+
+ # attribute
+ if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None:
+ existed_attr_keys.append(xml_name)
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name))
+ continue
+
+ # unwrapped element is array
+ if prop_meta.get("unwrapped", False):
+ # unwrapped array could either use prop items meta/prop meta
+ if prop_meta.get("itemsName"):
+ xml_name = prop_meta.get("itemsName")
+ xml_ns = prop_meta.get("itemNs")
+ if xml_ns:
+ xml_name = "{" + xml_ns + "}" + xml_name
+ items = args[0].findall(xml_name) # pyright: ignore
+ if len(items) > 0:
+ existed_attr_keys.append(xml_name)
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, items)
+ elif not rf._is_optional:
+ existed_attr_keys.append(xml_name)
+ dict_to_pass[rf._rest_name] = []
+ continue
+
+ # text element is primitive type
+ if prop_meta.get("text", False):
+ if args[0].text is not None:
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text)
+ continue
+
+ # wrapped element could be normal property or array, it should only have one element
+ item = args[0].find(xml_name)
+ if item is not None:
+ existed_attr_keys.append(xml_name)
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, item)
+
+ # rest thing is additional properties
+ for e in args[0]:
+ if e.tag not in existed_attr_keys:
+ dict_to_pass[e.tag] = _convert_element(e)
+ else:
+ dict_to_pass.update(
+ {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()}
+ )
+ else:
+ non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field]
+ if non_attr_kwargs:
+ # actual type errors only throw the first wrong keyword arg they see, so following that.
+ raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'")
+ dict_to_pass.update(
+ {
+ self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v)
+ for k, v in kwargs.items()
+ if v is not None
+ }
+ )
+ super().__init__(dict_to_pass)
+
+ def copy(self) -> "Model":
+ return Model(self.__dict__)
+
+ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self:
+ if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated:
+ # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping',
+ # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object'
+ mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order
+ attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property
+ k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type")
+ }
+ annotations = {
+ k: v
+ for mro_class in mros
+ if hasattr(mro_class, "__annotations__")
+ for k, v in mro_class.__annotations__.items()
+ }
+ for attr, rf in attr_to_rest_field.items():
+ rf._module = cls.__module__
+ if not rf._type:
+ rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None))
+ if not rf._rest_name_input:
+ rf._rest_name_input = attr
+ cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items())
+ cls._backcompat_attr_to_rest_field: dict[str, _RestField] = {
+ Model._get_backcompat_attribute_name(cls._attr_to_rest_field, attr): rf
+ for attr, rf in cls._attr_to_rest_field.items()
+ }
+ cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")
+
+ return super().__new__(cls)
+
+ def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None:
+ for base in cls.__bases__:
+ if hasattr(base, "__mapping__"):
+ base.__mapping__[discriminator or cls.__name__] = cls # type: ignore
+
+ @classmethod
+ def _get_backcompat_attribute_name(cls, attr_to_rest_field: dict[str, "_RestField"], attr_name: str) -> str:
+ rest_field_obj = attr_to_rest_field.get(attr_name) # pylint: disable=protected-access
+ if rest_field_obj is None:
+ return attr_name
+ original_tsp_name = getattr(rest_field_obj, "_original_tsp_name", None) # pylint: disable=protected-access
+ if original_tsp_name:
+ return original_tsp_name
+ return attr_name
+
+ @classmethod
+ def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]:
+ for v in cls.__dict__.values():
+ if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators:
+ return v
+ return None
+
+ @classmethod
+ def _deserialize(cls, data, exist_discriminators):
+ if not hasattr(cls, "__mapping__"):
+ return cls(data)
+ discriminator = cls._get_discriminator(exist_discriminators)
+ if discriminator is None:
+ return cls(data)
+ exist_discriminators.append(discriminator._rest_name)
+ if isinstance(data, ET.Element):
+ model_meta = getattr(cls, "_xml", {})
+ prop_meta = getattr(discriminator, "_xml", {})
+ xml_name = prop_meta.get("name", discriminator._rest_name)
+ xml_ns = prop_meta.get("ns", model_meta.get("ns", None))
+ if xml_ns:
+ xml_name = "{" + xml_ns + "}" + xml_name
+
+ if data.get(xml_name) is not None:
+ discriminator_value = data.get(xml_name)
+ else:
+ discriminator_value = data.find(xml_name).text # pyright: ignore
+ else:
+ discriminator_value = data.get(discriminator._rest_name)
+ mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member
+ return mapped_cls._deserialize(data, exist_discriminators)
+
+ def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]:
+ """Return a dict that can be turned into json using json.dump.
+
+ :keyword bool exclude_readonly: Whether to remove the readonly properties.
+ :returns: A dict JSON compatible object
+ :rtype: dict
+ """
+
+ result = {}
+ readonly_props = []
+ if exclude_readonly:
+ readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)]
+ for k, v in self.items():
+ if exclude_readonly and k in readonly_props: # pyright: ignore
+ continue
+ is_multipart_file_input = False
+ try:
+ is_multipart_file_input = next(
+ rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k
+ )._is_multipart_file_input
+ except StopIteration:
+ pass
+ result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly)
+ return result
+
+ @staticmethod
+ def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any:
+ if v is None or isinstance(v, _Null):
+ return None
+ if isinstance(v, (list, tuple, set)):
+ return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v)
+ if isinstance(v, dict):
+ return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()}
+ return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v
+
+
+def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj):
+ if _is_model(obj):
+ return obj
+ return _deserialize(model_deserializer, obj)
+
+
+def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj):
+ if obj is None:
+ return obj
+ return _deserialize_with_callable(if_obj_deserializer, obj)
+
+
+def _deserialize_with_union(deserializers, obj):
+ for deserializer in deserializers:
+ try:
+ return _deserialize(deserializer, obj)
+ except DeserializationError:
+ pass
+ raise DeserializationError()
+
+
+def _deserialize_dict(
+ value_deserializer: typing.Optional[typing.Callable],
+ module: typing.Optional[str],
+ obj: dict[typing.Any, typing.Any],
+):
+ if obj is None:
+ return obj
+ if isinstance(obj, ET.Element):
+ obj = {child.tag: child for child in obj}
+ return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()}
+
+
+def _deserialize_multiple_sequence(
+ entry_deserializers: list[typing.Optional[typing.Callable]],
+ module: typing.Optional[str],
+ obj,
+):
+ if obj is None:
+ return obj
+ return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers))
+
+
+def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool:
+ return (
+ isinstance(deserializer, functools.partial)
+ and isinstance(deserializer.args[0], functools.partial)
+ and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable
+ )
+
+
+def _deserialize_sequence(
+ deserializer: typing.Optional[typing.Callable],
+ module: typing.Optional[str],
+ obj,
+):
+ if obj is None:
+ return obj
+ if isinstance(obj, ET.Element):
+ obj = list(obj)
+
+ # encoded string may be deserialized to sequence
+ if isinstance(obj, str) and isinstance(deserializer, functools.partial):
+ # for list[str]
+ if _is_array_encoded_deserializer(deserializer):
+ return deserializer(obj)
+
+ # for list[Union[...]]
+ if isinstance(deserializer.args[0], list):
+ for sub_deserializer in deserializer.args[0]:
+ if _is_array_encoded_deserializer(sub_deserializer):
+ return sub_deserializer(obj)
+
+ return type(obj)(_deserialize(deserializer, entry, module) for entry in obj)
+
+
+def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]:
+ return sorted(
+ types,
+ key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"),
+ )
+
+
+def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches
+ annotation: typing.Any,
+ module: typing.Optional[str],
+ rf: typing.Optional["_RestField"] = None,
+) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]:
+ if not annotation:
+ return None
+
+ # is it a type alias?
+ if isinstance(annotation, str):
+ if module is not None:
+ annotation = _get_type_alias_type(module, annotation)
+
+ # is it a forward ref / in quotes?
+ if isinstance(annotation, (str, typing.ForwardRef)):
+ try:
+ model_name = annotation.__forward_arg__ # type: ignore
+ except AttributeError:
+ model_name = annotation
+ if module is not None:
+ annotation = _get_model(module, model_name) # type: ignore
+
+ try:
+ if module and _is_model(annotation):
+ if rf:
+ rf._is_model = True
+
+ return functools.partial(_deserialize_model, annotation) # pyright: ignore
+ except Exception:
+ pass
+
+ # is it a literal?
+ try:
+ if annotation.__origin__ is typing.Literal: # pyright: ignore
+ return None
+ except AttributeError:
+ pass
+
+ # is it optional?
+ try:
+ if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore
+ if rf:
+ rf._is_optional = True
+ if len(annotation.__args__) <= 2: # pyright: ignore
+ if_obj_deserializer = _get_deserialize_callable_from_annotation(
+ next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore
+ )
+
+ return functools.partial(_deserialize_with_optional, if_obj_deserializer)
+ # the type is Optional[Union[...]], we need to remove the None type from the Union
+ annotation_copy = copy.copy(annotation)
+ annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore
+ return _get_deserialize_callable_from_annotation(annotation_copy, module, rf)
+ except AttributeError:
+ pass
+
+ # is it union?
+ if getattr(annotation, "__origin__", None) is typing.Union:
+ # initial ordering is we make `string` the last deserialization option, because it is often them most generic
+ deserializers = [
+ _get_deserialize_callable_from_annotation(arg, module, rf)
+ for arg in _sorted_annotations(annotation.__args__) # pyright: ignore
+ ]
+
+ return functools.partial(_deserialize_with_union, deserializers)
+
+ try:
+ annotation_name = (
+ annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore
+ )
+ if annotation_name.lower() == "dict":
+ value_deserializer = _get_deserialize_callable_from_annotation(
+ annotation.__args__[1], module, rf # pyright: ignore
+ )
+
+ return functools.partial(
+ _deserialize_dict,
+ value_deserializer,
+ module,
+ )
+ except (AttributeError, IndexError):
+ pass
+ try:
+ annotation_name = (
+ annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore
+ )
+ if annotation_name.lower() in ["list", "set", "tuple", "sequence"]:
+ if len(annotation.__args__) > 1: # pyright: ignore
+ entry_deserializers = [
+ _get_deserialize_callable_from_annotation(dt, module, rf)
+ for dt in annotation.__args__ # pyright: ignore
+ ]
+ return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module)
+ deserializer = _get_deserialize_callable_from_annotation(
+ annotation.__args__[0], module, rf # pyright: ignore
+ )
+
+ return functools.partial(_deserialize_sequence, deserializer, module)
+ except (TypeError, IndexError, AttributeError, SyntaxError):
+ pass
+
+ def _deserialize_default(
+ deserializer,
+ obj,
+ ):
+ if obj is None:
+ return obj
+ try:
+ return _deserialize_with_callable(deserializer, obj)
+ except Exception:
+ pass
+ return obj
+
+ if get_deserializer(annotation, rf):
+ return functools.partial(_deserialize_default, get_deserializer(annotation, rf))
+
+ return functools.partial(_deserialize_default, annotation)
+
+
+def _deserialize_with_callable(
+ deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]],
+ value: typing.Any,
+): # pylint: disable=too-many-return-statements
+ try:
+ if value is None or isinstance(value, _Null):
+ return None
+ if isinstance(value, ET.Element):
+ if deserializer is str:
+ return value.text or ""
+ if deserializer is int:
+ return int(value.text) if value.text else None
+ if deserializer is float:
+ return float(value.text) if value.text else None
+ if deserializer is bool:
+ return value.text == "true" if value.text else None
+ if deserializer and deserializer in _DESERIALIZE_MAPPING.values():
+ return deserializer(value.text) if value.text else None
+ if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values():
+ return deserializer(value.text) if value.text else None
+ if deserializer is None:
+ return value
+ if deserializer in [int, float, bool]:
+ return deserializer(value)
+ if isinstance(deserializer, CaseInsensitiveEnumMeta):
+ try:
+ return deserializer(value.text if isinstance(value, ET.Element) else value)
+ except ValueError:
+ # for unknown value, return raw value
+ return value.text if isinstance(value, ET.Element) else value
+ if isinstance(deserializer, type) and issubclass(deserializer, Model):
+ return deserializer._deserialize(value, [])
+ return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value)
+ except Exception as e:
+ raise DeserializationError() from e
+
+
+def _deserialize(
+ deserializer: typing.Any,
+ value: typing.Any,
+ module: typing.Optional[str] = None,
+ rf: typing.Optional["_RestField"] = None,
+ format: typing.Optional[str] = None,
+) -> typing.Any:
+ if isinstance(value, PipelineResponse):
+ value = value.http_response.json()
+ if rf is None and format:
+ rf = _RestField(format=format)
+ if not isinstance(deserializer, functools.partial):
+ deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf)
+ return _deserialize_with_callable(deserializer, value)
+
+
+def _failsafe_deserialize(
+ deserializer: typing.Any,
+ response: HttpResponse,
+ module: typing.Optional[str] = None,
+ rf: typing.Optional["_RestField"] = None,
+ format: typing.Optional[str] = None,
+) -> typing.Any:
+ try:
+ return _deserialize(deserializer, response.json(), module, rf, format)
+ except Exception: # pylint: disable=broad-except
+ _LOGGER.warning(
+ "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
+ )
+ return None
+
+
+def _failsafe_deserialize_xml(
+ deserializer: typing.Any,
+ response: HttpResponse,
+) -> typing.Any:
+ try:
+ return _deserialize_xml(deserializer, response.text())
+ except Exception: # pylint: disable=broad-except
+ _LOGGER.warning(
+ "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
+ )
+ return None
+
+
+# pylint: disable=too-many-instance-attributes
+class _RestField:
+ def __init__(
+ self,
+ *,
+ name: typing.Optional[str] = None,
+ type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
+ is_discriminator: bool = False,
+ visibility: typing.Optional[list[str]] = None,
+ default: typing.Any = _UNSET,
+ format: typing.Optional[str] = None,
+ is_multipart_file_input: bool = False,
+ xml: typing.Optional[dict[str, typing.Any]] = None,
+ original_tsp_name: typing.Optional[str] = None,
+ ):
+ self._type = type
+ self._rest_name_input = name
+ self._module: typing.Optional[str] = None
+ self._is_discriminator = is_discriminator
+ self._visibility = visibility
+ self._is_model = False
+ self._is_optional = False
+ self._default = default
+ self._format = format
+ self._is_multipart_file_input = is_multipart_file_input
+ self._xml = xml if xml is not None else {}
+ self._original_tsp_name = original_tsp_name
+
+ @property
+ def _class_type(self) -> typing.Any:
+ result = getattr(self._type, "args", [None])[0]
+ # type may be wrapped by nested functools.partial so we need to check for that
+ if isinstance(result, functools.partial):
+ return getattr(result, "args", [None])[0]
+ return result
+
+ @property
+ def _rest_name(self) -> str:
+ if self._rest_name_input is None:
+ raise ValueError("Rest name was never set")
+ return self._rest_name_input
+
+ def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin
+ # by this point, type and rest_name will have a value bc we default
+ # them in __new__ of the Model class
+ # Use _data.get() directly to avoid triggering __getitem__ which clears the cache
+ item = obj._data.get(self._rest_name)
+ if item is None:
+ return item
+ if self._is_model:
+ return item
+
+ # For mutable types, we want mutations to directly affect _data
+ # Check if we've already deserialized this value
+ cache_attr = f"_deserialized_{self._rest_name}"
+ if hasattr(obj, cache_attr):
+ # Return the value from _data directly (it's been deserialized in place)
+ return obj._data.get(self._rest_name)
+
+ deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self)
+
+ # For mutable types, store the deserialized value back in _data
+ # so mutations directly affect _data
+ if isinstance(deserialized, (dict, list, set)):
+ obj._data[self._rest_name] = deserialized
+ object.__setattr__(obj, cache_attr, True) # Mark as deserialized
+ return deserialized
+
+ return deserialized
+
+ def __set__(self, obj: Model, value) -> None:
+ # Clear the cached deserialized object when setting a new value
+ cache_attr = f"_deserialized_{self._rest_name}"
+ if hasattr(obj, cache_attr):
+ object.__delattr__(obj, cache_attr)
+
+ if value is None:
+ # we want to wipe out entries if users set attr to None
+ try:
+ obj.__delitem__(self._rest_name)
+ except KeyError:
+ pass
+ return
+ if self._is_model:
+ if not _is_model(value):
+ value = _deserialize(self._type, value)
+ obj.__setitem__(self._rest_name, value)
+ return
+ obj.__setitem__(self._rest_name, _serialize(value, self._format))
+
+ def _get_deserialize_callable_from_annotation(
+ self, annotation: typing.Any
+ ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]:
+ return _get_deserialize_callable_from_annotation(annotation, self._module, self)
+
+
+def rest_field(
+ *,
+ name: typing.Optional[str] = None,
+ type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
+ visibility: typing.Optional[list[str]] = None,
+ default: typing.Any = _UNSET,
+ format: typing.Optional[str] = None,
+ is_multipart_file_input: bool = False,
+ xml: typing.Optional[dict[str, typing.Any]] = None,
+ original_tsp_name: typing.Optional[str] = None,
+) -> typing.Any:
+ return _RestField(
+ name=name,
+ type=type,
+ visibility=visibility,
+ default=default,
+ format=format,
+ is_multipart_file_input=is_multipart_file_input,
+ xml=xml,
+ original_tsp_name=original_tsp_name,
+ )
+
+
+def rest_discriminator(
+ *,
+ name: typing.Optional[str] = None,
+ type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
+ visibility: typing.Optional[list[str]] = None,
+ xml: typing.Optional[dict[str, typing.Any]] = None,
+) -> typing.Any:
+ return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml)
+
+
+def serialize_xml(model: Model, exclude_readonly: bool = False) -> str:
+ """Serialize a model to XML.
+
+ :param Model model: The model to serialize.
+ :param bool exclude_readonly: Whether to exclude readonly properties.
+ :returns: The XML representation of the model.
+ :rtype: str
+ """
+ return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore
+
+
+def _get_element(
+ o: typing.Any,
+ exclude_readonly: bool = False,
+ parent_meta: typing.Optional[dict[str, typing.Any]] = None,
+ wrapped_element: typing.Optional[ET.Element] = None,
+) -> typing.Union[ET.Element, list[ET.Element]]:
+ if _is_model(o):
+ model_meta = getattr(o, "_xml", {})
+
+ # if prop is a model, then use the prop element directly, else generate a wrapper of model
+ if wrapped_element is None:
+ wrapped_element = _create_xml_element(
+ model_meta.get("name", o.__class__.__name__),
+ model_meta.get("prefix"),
+ model_meta.get("ns"),
+ )
+
+ readonly_props = []
+ if exclude_readonly:
+ readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)]
+
+ for k, v in o.items():
+ # do not serialize readonly properties
+ if exclude_readonly and k in readonly_props:
+ continue
+
+ prop_rest_field = _get_rest_field(o._attr_to_rest_field, k)
+ if prop_rest_field:
+ prop_meta = getattr(prop_rest_field, "_xml").copy()
+ # use the wire name as xml name if no specific name is set
+ if prop_meta.get("name") is None:
+ prop_meta["name"] = k
+ else:
+ # additional properties will not have rest field, use the wire name as xml name
+ prop_meta = {"name": k}
+
+ # if no ns for prop, use model's
+ if prop_meta.get("ns") is None and model_meta.get("ns"):
+ prop_meta["ns"] = model_meta.get("ns")
+ prop_meta["prefix"] = model_meta.get("prefix")
+
+ if prop_meta.get("unwrapped", False):
+ # unwrapped could only set on array
+ wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta))
+ elif prop_meta.get("text", False):
+ # text could only set on primitive type
+ wrapped_element.text = _get_primitive_type_value(v)
+ elif prop_meta.get("attribute", False):
+ xml_name = prop_meta.get("name", k)
+ if prop_meta.get("ns"):
+ ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore
+ xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore
+ # attribute should be primitive type
+ wrapped_element.set(xml_name, _get_primitive_type_value(v))
+ else:
+ # other wrapped prop element
+ wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta))
+ return wrapped_element
+ if isinstance(o, list):
+ return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore
+ if isinstance(o, dict):
+ result = []
+ for k, v in o.items():
+ result.append(
+ _get_wrapped_element(
+ v,
+ exclude_readonly,
+ {
+ "name": k,
+ "ns": parent_meta.get("ns") if parent_meta else None,
+ "prefix": parent_meta.get("prefix") if parent_meta else None,
+ },
+ )
+ )
+ return result
+
+ # primitive case need to create element based on parent_meta
+ if parent_meta:
+ return _get_wrapped_element(
+ o,
+ exclude_readonly,
+ {
+ "name": parent_meta.get("itemsName", parent_meta.get("name")),
+ "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")),
+ "ns": parent_meta.get("itemsNs", parent_meta.get("ns")),
+ },
+ )
+
+ raise ValueError("Could not serialize value into xml: " + o)
+
+
+def _get_wrapped_element(
+ v: typing.Any,
+ exclude_readonly: bool,
+ meta: typing.Optional[dict[str, typing.Any]],
+) -> ET.Element:
+ wrapped_element = _create_xml_element(
+ meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None
+ )
+ if isinstance(v, (dict, list)):
+ wrapped_element.extend(_get_element(v, exclude_readonly, meta))
+ elif _is_model(v):
+ _get_element(v, exclude_readonly, meta, wrapped_element)
+ else:
+ wrapped_element.text = _get_primitive_type_value(v)
+ return wrapped_element # type: ignore[no-any-return]
+
+
+def _get_primitive_type_value(v) -> str:
+ if v is True:
+ return "true"
+ if v is False:
+ return "false"
+ if isinstance(v, _Null):
+ return ""
+ return str(v)
+
+
+def _create_xml_element(
+ tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None
+) -> ET.Element:
+ if prefix and ns:
+ ET.register_namespace(prefix, ns)
+ if ns:
+ return ET.Element("{" + ns + "}" + tag)
+ return ET.Element(tag)
+
+
+def _deserialize_xml(
+ deserializer: typing.Any,
+ value: str,
+) -> typing.Any:
+ element = ET.fromstring(value) # nosec
+ return _deserialize(deserializer, element)
+
+
+def _convert_element(e: ET.Element):
+ # dict case
+ if len(e.attrib) > 0 or len({child.tag for child in e}) > 1:
+ dict_result: dict[str, typing.Any] = {}
+ for child in e:
+ if dict_result.get(child.tag) is not None:
+ if isinstance(dict_result[child.tag], list):
+ dict_result[child.tag].append(_convert_element(child))
+ else:
+ dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)]
+ else:
+ dict_result[child.tag] = _convert_element(child)
+ dict_result.update(e.attrib)
+ return dict_result
+ # array case
+ if len(e) > 0:
+ array_result: list[typing.Any] = []
+ for child in e:
+ array_result.append(_convert_element(child))
+ return array_result
+ # primitive case
+ return e.text
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_utils/serialization.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_utils/serialization.py
new file mode 100644
index 000000000000..81ec1de5922b
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/_utils/serialization.py
@@ -0,0 +1,2041 @@
+# pylint: disable=line-too-long,useless-suppression,too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+# pyright: reportUnnecessaryTypeIgnoreComment=false
+
+from base64 import b64decode, b64encode
+import calendar
+import datetime
+import decimal
+import email
+from enum import Enum
+import json
+import logging
+import re
+import sys
+import codecs
+from typing import (
+ Any,
+ cast,
+ Optional,
+ Union,
+ AnyStr,
+ IO,
+ Mapping,
+ Callable,
+ MutableMapping,
+)
+
+try:
+ from urllib import quote # type: ignore
+except ImportError:
+ from urllib.parse import quote
+import xml.etree.ElementTree as ET
+
+import isodate # type: ignore
+from typing_extensions import Self
+
+from azure.core.exceptions import DeserializationError, SerializationError
+from azure.core.serialization import NULL as CoreNull
+
+_BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
+
+JSON = MutableMapping[str, Any]
+
+
+class RawDeserializer:
+
+ # Accept "text" because we're open minded people...
+ JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$")
+
+ # Name used in context
+ CONTEXT_NAME = "deserialized_data"
+
+ @classmethod
+ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any:
+ """Decode data according to content-type.
+
+ Accept a stream of data as well, but will be load at once in memory for now.
+
+ If no content-type, will return the string version (not bytes, not stream)
+
+ :param data: Input, could be bytes or stream (will be decoded with UTF8) or text
+ :type data: str or bytes or IO
+ :param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
+ """
+ if hasattr(data, "read"):
+ # Assume a stream
+ data = cast(IO, data).read()
+
+ if isinstance(data, bytes):
+ data_as_str = data.decode(encoding="utf-8-sig")
+ else:
+ # Explain to mypy the correct type.
+ data_as_str = cast(str, data)
+
+ # Remove Byte Order Mark if present in string
+ data_as_str = data_as_str.lstrip(_BOM)
+
+ if content_type is None:
+ return data
+
+ if cls.JSON_REGEXP.match(content_type):
+ try:
+ return json.loads(data_as_str)
+ except ValueError as err:
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
+ elif "xml" in (content_type or []):
+ try:
+
+ try:
+ if isinstance(data, unicode): # type: ignore
+ # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string
+ data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore
+ except NameError:
+ pass
+
+ return ET.fromstring(data_as_str) # nosec
+ except ET.ParseError as err:
+ # It might be because the server has an issue, and returned JSON with
+ # content-type XML....
+ # So let's try a JSON load, and if it's still broken
+ # let's flow the initial exception
+ def _json_attemp(data):
+ try:
+ return True, json.loads(data)
+ except ValueError:
+ return False, None # Don't care about this one
+
+ success, json_result = _json_attemp(data)
+ if success:
+ return json_result
+ # If i'm here, it's not JSON, it's not XML, let's scream
+ # and raise the last context in this block (the XML exception)
+ # The function hack is because Py2.7 messes up with exception
+ # context otherwise.
+ _LOGGER.critical("Wasn't XML not JSON, failing")
+ raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
+ raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
+
+ @classmethod
+ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any:
+ """Deserialize from HTTP response.
+
+ Use bytes and headers to NOT use any requests/aiohttp or whatever
+ specific implementation.
+ Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
+ """
+ # Try to use content-type from headers if available
+ content_type = None
+ if "content-type" in headers:
+ content_type = headers["content-type"].split(";")[0].strip().lower()
+ # Ouch, this server did not declare what it sent...
+ # Let's guess it's JSON...
+ # Also, since Autorest was considering that an empty body was a valid JSON,
+ # need that test as well....
+ else:
+ content_type = "application/json"
+
+ if body_bytes:
+ return cls.deserialize_from_text(body_bytes, content_type)
+ return None
+
+
+_LOGGER = logging.getLogger(__name__)
+
+try:
+ _long_type = long # type: ignore
+except NameError:
+ _long_type = int
+
+TZ_UTC = datetime.timezone.utc
+
+_FLATTEN = re.compile(r"(? None:
+ self.additional_properties: Optional[dict[str, Any]] = {}
+ for k in kwargs: # pylint: disable=consider-using-dict-items
+ if k not in self._attribute_map:
+ _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
+ elif k in self._validation and self._validation[k].get("readonly", False):
+ _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__)
+ else:
+ setattr(self, k, kwargs[k])
+
+ def __eq__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
+ if isinstance(other, self.__class__):
+ return self.__dict__ == other.__dict__
+ return False
+
+ def __ne__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
+ return not self.__eq__(other)
+
+ def __str__(self) -> str:
+ return str(self.__dict__)
+
+ @classmethod
+ def enable_additional_properties_sending(cls) -> None:
+ cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"}
+
+ @classmethod
+ def is_xml_model(cls) -> bool:
+ try:
+ cls._xml_map # type: ignore
+ except AttributeError:
+ return False
+ return True
+
+ @classmethod
+ def _create_xml_node(cls):
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
+ try:
+ xml_map = cls._xml_map # type: ignore
+ except AttributeError:
+ xml_map = {}
+
+ return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
+
+ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
+ """Return the JSON that would be sent to server from this model.
+
+ This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
+
+ If you want XML serialization, you can pass the kwargs is_xml=True.
+
+ :param bool keep_readonly: If you want to serialize the readonly attributes
+ :returns: A dict JSON compatible object
+ :rtype: dict
+ """
+ serializer = Serializer(self._infer_class_models())
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
+
+ def as_dict(
+ self,
+ keep_readonly: bool = True,
+ key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer,
+ **kwargs: Any
+ ) -> JSON:
+ """Return a dict that can be serialized using json.dump.
+
+ Advanced usage might optionally use a callback as parameter:
+
+ .. code::python
+
+ def my_key_transformer(key, attr_desc, value):
+ return key
+
+ Key is the attribute name used in Python. Attr_desc
+ is a dict of metadata. Currently contains 'type' with the
+ msrest type and 'key' with the RestAPI encoded key.
+ Value is the current value in this object.
+
+ The string returned will be used to serialize the key.
+ If the return type is a list, this is considered hierarchical
+ result dict.
+
+ See the three examples in this file:
+
+ - attribute_transformer
+ - full_restapi_key_transformer
+ - last_restapi_key_transformer
+
+ If you want XML serialization, you can pass the kwargs is_xml=True.
+
+ :param bool keep_readonly: If you want to serialize the readonly attributes
+ :param function key_transformer: A key transformer function.
+ :returns: A dict JSON compatible object
+ :rtype: dict
+ """
+ serializer = Serializer(self._infer_class_models())
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
+
+ @classmethod
+ def _infer_class_models(cls):
+ try:
+ str_models = cls.__module__.rsplit(".", 1)[0]
+ models = sys.modules[str_models]
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ if cls.__name__ not in client_models:
+ raise ValueError("Not Autorest generated code")
+ except Exception: # pylint: disable=broad-exception-caught
+ # Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
+ client_models = {cls.__name__: cls}
+ return client_models
+
+ @classmethod
+ def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self:
+ """Parse a str using the RestAPI syntax and return a model.
+
+ :param str data: A str using RestAPI structure. JSON by default.
+ :param str content_type: JSON by default, set application/xml if XML.
+ :returns: An instance of this model
+ :raises DeserializationError: if something went wrong
+ :rtype: Self
+ """
+ deserializer = Deserializer(cls._infer_class_models())
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
+
+ @classmethod
+ def from_dict(
+ cls,
+ data: Any,
+ key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None,
+ content_type: Optional[str] = None,
+ ) -> Self:
+ """Parse a dict using given key extractor return a model.
+
+ By default consider key
+ extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor
+ and last_rest_key_case_insensitive_extractor)
+
+ :param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
+ :param str content_type: JSON by default, set application/xml if XML.
+ :returns: An instance of this model
+ :raises DeserializationError: if something went wrong
+ :rtype: Self
+ """
+ deserializer = Deserializer(cls._infer_class_models())
+ deserializer.key_extractors = ( # type: ignore
+ [ # type: ignore
+ attribute_key_case_insensitive_extractor,
+ rest_key_case_insensitive_extractor,
+ last_rest_key_case_insensitive_extractor,
+ ]
+ if key_extractors is None
+ else key_extractors
+ )
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
+
+ @classmethod
+ def _flatten_subtype(cls, key, objects):
+ if "_subtype_map" not in cls.__dict__:
+ return {}
+ result = dict(cls._subtype_map[key])
+ for valuetype in cls._subtype_map[key].values():
+ result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access
+ return result
+
+ @classmethod
+ def _classify(cls, response, objects):
+ """Check the class _subtype_map for any child classes.
+ We want to ignore any inherited _subtype_maps.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
+ """
+ for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
+ subtype_value = None
+
+ if not isinstance(response, ET.Element):
+ rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
+ else:
+ subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
+ if subtype_value:
+ # Try to match base class. Can be class name only
+ # (bug to fix in Autorest to support x-ms-discriminator-name)
+ if cls.__name__ == subtype_value:
+ return cls
+ flatten_mapping_type = cls._flatten_subtype(subtype_key, objects)
+ try:
+ return objects[flatten_mapping_type[subtype_value]] # type: ignore
+ except KeyError:
+ _LOGGER.warning(
+ "Subtype value %s has no mapping, use base class %s.",
+ subtype_value,
+ cls.__name__,
+ )
+ break
+ else:
+ _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__)
+ break
+ return cls
+
+ @classmethod
+ def _get_rest_key_parts(cls, attr_key):
+ """Get the RestAPI key of this attr, split it and decode part
+ :param str attr_key: Attribute key must be in attribute_map.
+ :returns: A list of RestAPI part
+ :rtype: list
+ """
+ rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"])
+ return [_decode_attribute_map_key(key_part) for key_part in rest_split_key]
+
+
+def _decode_attribute_map_key(key):
+ """This decode a key in an _attribute_map to the actual key we want to look at
+ inside the received data.
+
+ :param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
+ """
+ return key.replace("\\.", ".")
+
+
+class Serializer: # pylint: disable=too-many-public-methods
+ """Request object model serializer."""
+
+ basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
+
+ _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()}
+ days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"}
+ months = {
+ 1: "Jan",
+ 2: "Feb",
+ 3: "Mar",
+ 4: "Apr",
+ 5: "May",
+ 6: "Jun",
+ 7: "Jul",
+ 8: "Aug",
+ 9: "Sep",
+ 10: "Oct",
+ 11: "Nov",
+ 12: "Dec",
+ }
+ validation = {
+ "min_length": lambda x, y: len(x) < y,
+ "max_length": lambda x, y: len(x) > y,
+ "minimum": lambda x, y: x < y,
+ "maximum": lambda x, y: x > y,
+ "minimum_ex": lambda x, y: x <= y,
+ "maximum_ex": lambda x, y: x >= y,
+ "min_items": lambda x, y: len(x) < y,
+ "max_items": lambda x, y: len(x) > y,
+ "pattern": lambda x, y: not re.match(y, x, re.UNICODE),
+ "unique": lambda x, y: len(x) != len(set(x)),
+ "multiple": lambda x, y: x % y != 0,
+ }
+
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
+ self.serialize_type = {
+ "iso-8601": Serializer.serialize_iso,
+ "rfc-1123": Serializer.serialize_rfc,
+ "unix-time": Serializer.serialize_unix,
+ "duration": Serializer.serialize_duration,
+ "date": Serializer.serialize_date,
+ "time": Serializer.serialize_time,
+ "decimal": Serializer.serialize_decimal,
+ "long": Serializer.serialize_long,
+ "bytearray": Serializer.serialize_bytearray,
+ "base64": Serializer.serialize_base64,
+ "object": self.serialize_object,
+ "[]": self.serialize_iter,
+ "{}": self.serialize_dict,
+ }
+ self.dependencies: dict[str, type] = dict(classes) if classes else {}
+ self.key_transformer = full_restapi_key_transformer
+ self.client_side_validation = True
+
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
+ """Serialize data into a string according to type.
+
+ :param object target_obj: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str, dict
+ :raises SerializationError: if serialization fails.
+ :returns: The serialized data.
+ """
+ key_transformer = kwargs.get("key_transformer", self.key_transformer)
+ keep_readonly = kwargs.get("keep_readonly", False)
+ if target_obj is None:
+ return None
+
+ attr_name = None
+ class_name = target_obj.__class__.__name__
+
+ if data_type:
+ return self.serialize_data(target_obj, data_type, **kwargs)
+
+ if not hasattr(target_obj, "_attribute_map"):
+ data_type = type(target_obj).__name__
+ if data_type in self.basic_types.values():
+ return self.serialize_data(target_obj, data_type, **kwargs)
+
+ # Force "is_xml" kwargs if we detect a XML model
+ try:
+ is_xml_model_serialization = kwargs["is_xml"]
+ except KeyError:
+ is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model())
+
+ serialized = {}
+ if is_xml_model_serialization:
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
+ try:
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
+ for attr, attr_desc in attributes.items():
+ attr_name = attr
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
+ continue
+
+ if attr_name == "additional_properties" and attr_desc["key"] == "":
+ if target_obj.additional_properties is not None:
+ serialized |= target_obj.additional_properties
+ continue
+ try:
+
+ orig_attr = getattr(target_obj, attr)
+ if is_xml_model_serialization:
+ pass # Don't provide "transformer" for XML for now. Keep "orig_attr"
+ else: # JSON
+ keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr)
+ keys = keys if isinstance(keys, list) else [keys]
+
+ kwargs["serialization_ctxt"] = attr_desc
+ new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs)
+
+ if is_xml_model_serialization:
+ xml_desc = attr_desc.get("xml", {})
+ xml_name = xml_desc.get("name", attr_desc["key"])
+ xml_prefix = xml_desc.get("prefix", None)
+ xml_ns = xml_desc.get("ns", None)
+ if xml_desc.get("attr", False):
+ if xml_ns:
+ ET.register_namespace(xml_prefix, xml_ns)
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+ serialized.set(xml_name, new_attr) # type: ignore
+ continue
+ if xml_desc.get("text", False):
+ serialized.text = new_attr # type: ignore
+ continue
+ if isinstance(new_attr, list):
+ serialized.extend(new_attr) # type: ignore
+ elif isinstance(new_attr, ET.Element):
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
+ if "name" not in getattr(orig_attr, "_xml_map", {}):
+ splitted_tag = new_attr.tag.split("}")
+ if len(splitted_tag) == 2: # Namespace
+ new_attr.tag = "}".join([splitted_tag[0], xml_name])
+ else:
+ new_attr.tag = xml_name
+ serialized.append(new_attr) # type: ignore
+ else: # That's a basic type
+ # Integrate namespace if necessary
+ local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
+ local_node.text = str(new_attr)
+ serialized.append(local_node) # type: ignore
+ else: # JSON
+ for k in reversed(keys): # type: ignore
+ new_attr = {k: new_attr}
+
+ _new_attr = new_attr
+ _serialized = serialized
+ for k in keys: # type: ignore
+ if k not in _serialized:
+ _serialized.update(_new_attr) # type: ignore
+ _new_attr = _new_attr[k] # type: ignore
+ _serialized = _serialized[k]
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+
+ except (AttributeError, KeyError, TypeError) as err:
+ msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
+ raise SerializationError(msg) from err
+ return serialized
+
+ def body(self, data, data_type, **kwargs):
+ """Serialize data intended for a request body.
+
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: dict
+ :raises SerializationError: if serialization fails.
+ :raises ValueError: if data is None
+ :returns: The serialized request body
+ """
+
+ # Just in case this is a dict
+ internal_data_type_str = data_type.strip("[]{}")
+ internal_data_type = self.dependencies.get(internal_data_type_str, None)
+ try:
+ is_xml_model_serialization = kwargs["is_xml"]
+ except KeyError:
+ if internal_data_type and issubclass(internal_data_type, Model):
+ is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model())
+ else:
+ is_xml_model_serialization = False
+ if internal_data_type and not isinstance(internal_data_type, Enum):
+ try:
+ deserializer = Deserializer(self.dependencies)
+ # Since it's on serialization, it's almost sure that format is not JSON REST
+ # We're not able to deal with additional properties for now.
+ deserializer.additional_properties_detection = False
+ if is_xml_model_serialization:
+ deserializer.key_extractors = [ # type: ignore
+ attribute_key_case_insensitive_extractor,
+ ]
+ else:
+ deserializer.key_extractors = [
+ rest_key_case_insensitive_extractor,
+ attribute_key_case_insensitive_extractor,
+ last_rest_key_case_insensitive_extractor,
+ ]
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
+ except DeserializationError as err:
+ raise SerializationError("Unable to build a model: " + str(err)) from err
+
+ return self._serialize(data, data_type, **kwargs)
+
+ def url(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a URL path.
+
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str
+ :returns: The serialized URL path
+ :raises TypeError: if serialization fails.
+ :raises ValueError: if data is None
+ """
+ try:
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+
+ if kwargs.get("skip_quote") is True:
+ output = str(output)
+ output = output.replace("{", quote("{")).replace("}", quote("}"))
+ else:
+ output = quote(str(output), safe="")
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
+
+ def query(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a URL query.
+
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str, list
+ :raises TypeError: if serialization fails.
+ :raises ValueError: if data is None
+ :returns: The serialized query parameter
+ """
+ try:
+ # Treat the list aside, since we don't want to encode the div separator
+ if data_type.startswith("["):
+ internal_data_type = data_type[1:-1]
+ do_quote = not kwargs.get("skip_quote", False)
+ return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
+
+ # Not a list, regular serialization
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+ if kwargs.get("skip_quote") is True:
+ output = str(output)
+ else:
+ output = quote(str(output), safe="")
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
+
+ def header(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a request header.
+
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str
+ :raises TypeError: if serialization fails.
+ :raises ValueError: if data is None
+ :returns: The serialized header
+ """
+ try:
+ if data_type in ["[str]"]:
+ data = ["" if d is None else d for d in data]
+
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
+
+ def serialize_data(self, data, data_type, **kwargs):
+ """Serialize generic data according to supplied data type.
+
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :raises AttributeError: if required data is None.
+ :raises ValueError: if data is None
+ :raises SerializationError: if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
+ """
+ if data is None:
+ raise ValueError("No value for given attribute")
+
+ try:
+ if data is CoreNull:
+ return None
+ if data_type in self.basic_types.values():
+ return self.serialize_basic(data, data_type, **kwargs)
+
+ if data_type in self.serialize_type:
+ return self.serialize_type[data_type](data, **kwargs)
+
+ # If dependencies is empty, try with current data class
+ # It has to be a subclass of Enum anyway
+ enum_type = self.dependencies.get(data_type, cast(type, data.__class__))
+ if issubclass(enum_type, Enum):
+ return Serializer.serialize_enum(data, enum_obj=enum_type)
+
+ iter_type = data_type[0] + data_type[-1]
+ if iter_type in self.serialize_type:
+ return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs)
+
+ except (ValueError, TypeError) as err:
+ msg = "Unable to serialize value: {!r} as type: {!r}."
+ raise SerializationError(msg.format(data, data_type)) from err
+ return self._serialize(data, **kwargs)
+
+ @classmethod
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
+ custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
+ if custom_serializer:
+ return custom_serializer
+ if kwargs.get("is_xml", False):
+ return cls._xml_basic_types_serializers.get(data_type)
+
+ @classmethod
+ def serialize_basic(cls, data, data_type, **kwargs):
+ """Serialize basic builting data type.
+ Serializes objects to str, int, float or bool.
+
+ Possible kwargs:
+ - basic_types_serializers dict[str, callable] : If set, use the callable as serializer
+ - is_xml bool : If set, use xml_basic_types_serializers
+
+ :param obj data: Object to be serialized.
+ :param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
+ :raises TypeError: raise if data_type is not one of str, int, float, bool.
+ """
+ custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
+ if custom_serializer:
+ return custom_serializer(data)
+ if data_type == "str":
+ return cls.serialize_unicode(data)
+ if data_type == "int":
+ return int(data)
+ if data_type == "float":
+ return float(data)
+ if data_type == "bool":
+ return bool(data)
+ raise TypeError("Unknown basic data type: {}".format(data_type))
+
+ @classmethod
+ def serialize_unicode(cls, data):
+ """Special handling for serializing unicode strings in Py2.
+ Encode to UTF-8 if unicode, otherwise handle as a str.
+
+ :param str data: Object to be serialized.
+ :rtype: str
+ :return: serialized object
+ """
+ try: # If I received an enum, return its value
+ return data.value
+ except AttributeError:
+ pass
+
+ try:
+ if isinstance(data, unicode): # type: ignore
+ # Don't change it, JSON and XML ElementTree are totally able
+ # to serialize correctly u'' strings
+ return data
+ except NameError:
+ return str(data)
+ return str(data)
+
+ def serialize_iter(self, data, iter_type, div=None, **kwargs):
+ """Serialize iterable.
+
+ Supported kwargs:
+ - serialization_ctxt dict : The current entry of _attribute_map, or same format.
+ serialization_ctxt['type'] should be same as data_type.
+ - is_xml bool : If set, serialize as XML
+
+ :param list data: Object to be serialized.
+ :param str iter_type: Type of object in the iterable.
+ :param str div: If set, this str will be used to combine the elements
+ in the iterable into a combined string. Default is 'None'.
+ Defaults to False.
+ :rtype: list, str
+ :return: serialized iterable
+ """
+ if isinstance(data, str):
+ raise SerializationError("Refuse str type as a valid iter type.")
+
+ serialization_ctxt = kwargs.get("serialization_ctxt", {})
+ is_xml = kwargs.get("is_xml", False)
+
+ serialized = []
+ for d in data:
+ try:
+ serialized.append(self.serialize_data(d, iter_type, **kwargs))
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+ serialized.append(None)
+
+ if kwargs.get("do_quote", False):
+ serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]
+
+ if div:
+ serialized = ["" if s is None else str(s) for s in serialized]
+ serialized = div.join(serialized)
+
+ if "xml" in serialization_ctxt or is_xml:
+ # XML serialization is more complicated
+ xml_desc = serialization_ctxt.get("xml", {})
+ xml_name = xml_desc.get("name")
+ if not xml_name:
+ xml_name = serialization_ctxt["key"]
+
+ # Create a wrap node if necessary (use the fact that Element and list have "append")
+ is_wrapped = xml_desc.get("wrapped", False)
+ node_name = xml_desc.get("itemsName", xml_name)
+ if is_wrapped:
+ final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ else:
+ final_result = []
+ # All list elements to "local_node"
+ for el in serialized:
+ if isinstance(el, ET.Element):
+ el_node = el
+ else:
+ el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ if el is not None: # Otherwise it writes "None" :-p
+ el_node.text = str(el)
+ final_result.append(el_node)
+ return final_result
+ return serialized
+
+ def serialize_dict(self, attr, dict_type, **kwargs):
+ """Serialize a dictionary of objects.
+
+ :param dict attr: Object to be serialized.
+ :param str dict_type: Type of object in the dictionary.
+ :rtype: dict
+ :return: serialized dictionary
+ """
+ serialization_ctxt = kwargs.get("serialization_ctxt", {})
+ serialized = {}
+ for key, value in attr.items():
+ try:
+ serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+ serialized[self.serialize_unicode(key)] = None
+
+ if "xml" in serialization_ctxt:
+ # XML serialization is more complicated
+ xml_desc = serialization_ctxt["xml"]
+ xml_name = xml_desc["name"]
+
+ final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ for key, value in serialized.items():
+ ET.SubElement(final_result, key).text = value
+ return final_result
+
+ return serialized
+
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
+ """Serialize a generic object.
+ This will be handled as a dictionary. If object passed in is not
+ a basic type (str, int, float, dict, list) it will simply be
+ cast to str.
+
+ :param dict attr: Object to be serialized.
+ :rtype: dict or str
+ :return: serialized object
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element):
+ return attr
+ obj_type = type(attr)
+ if obj_type in self.basic_types:
+ return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
+ if obj_type is _long_type:
+ return self.serialize_long(attr)
+ if obj_type is str:
+ return self.serialize_unicode(attr)
+ if obj_type is datetime.datetime:
+ return self.serialize_iso(attr)
+ if obj_type is datetime.date:
+ return self.serialize_date(attr)
+ if obj_type is datetime.time:
+ return self.serialize_time(attr)
+ if obj_type is datetime.timedelta:
+ return self.serialize_duration(attr)
+ if obj_type is decimal.Decimal:
+ return self.serialize_decimal(attr)
+
+ # If it's a model or I know this dependency, serialize as a Model
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
+ return self._serialize(attr)
+
+ if obj_type == dict:
+ serialized = {}
+ for key, value in attr.items():
+ try:
+ serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs)
+ except ValueError:
+ serialized[self.serialize_unicode(key)] = None
+ return serialized
+
+ if obj_type == list:
+ serialized = []
+ for obj in attr:
+ try:
+ serialized.append(self.serialize_object(obj, **kwargs))
+ except ValueError:
+ pass
+ return serialized
+ return str(attr)
+
+ @staticmethod
+ def serialize_enum(attr, enum_obj=None):
+ try:
+ result = attr.value
+ except AttributeError:
+ result = attr
+ try:
+ enum_obj(result) # type: ignore
+ return result
+ except ValueError as exc:
+ for enum_value in enum_obj: # type: ignore
+ if enum_value.value.lower() == str(attr).lower():
+ return enum_value.value
+ error = "{!r} is not valid value for enum {!r}"
+ raise SerializationError(error.format(attr, enum_obj)) from exc
+
+ @staticmethod
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize bytearray into base-64 string.
+
+ :param str attr: Object to be serialized.
+ :rtype: str
+ :return: serialized base64
+ """
+ return b64encode(attr).decode()
+
+ @staticmethod
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize str into base-64 string.
+
+ :param str attr: Object to be serialized.
+ :rtype: str
+ :return: serialized base64
+ """
+ encoded = b64encode(attr).decode("ascii")
+ return encoded.strip("=").replace("+", "-").replace("/", "_")
+
+ @staticmethod
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Decimal object to float.
+
+ :param decimal attr: Object to be serialized.
+ :rtype: float
+ :return: serialized decimal
+ """
+ return float(attr)
+
+ @staticmethod
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize long (Py2) or int (Py3).
+
+ :param int attr: Object to be serialized.
+ :rtype: int/long
+ :return: serialized long
+ """
+ return _long_type(attr)
+
+ @staticmethod
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Date object into ISO-8601 formatted string.
+
+ :param Date attr: Object to be serialized.
+ :rtype: str
+ :return: serialized date
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_date(attr)
+ t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day)
+ return t
+
+ @staticmethod
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Time object into ISO-8601 formatted string.
+
+ :param datetime.time attr: Object to be serialized.
+ :rtype: str
+ :return: serialized time
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_time(attr)
+ t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second)
+ if attr.microsecond:
+ t += ".{:02}".format(attr.microsecond)
+ return t
+
+ @staticmethod
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize TimeDelta object into ISO-8601 formatted string.
+
+ :param TimeDelta attr: Object to be serialized.
+ :rtype: str
+ :return: serialized duration
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_duration(attr)
+ return isodate.duration_isoformat(attr)
+
+ @staticmethod
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Datetime object into RFC-1123 formatted string.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: str
+ :raises TypeError: if format invalid.
+ :return: serialized rfc
+ """
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ utc = attr.utctimetuple()
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
+
+ return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
+ Serializer.days[utc.tm_wday],
+ utc.tm_mday,
+ Serializer.months[utc.tm_mon],
+ utc.tm_year,
+ utc.tm_hour,
+ utc.tm_min,
+ utc.tm_sec,
+ )
+
+ @staticmethod
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Datetime object into ISO-8601 formatted string.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: str
+ :raises SerializationError: if format invalid.
+ :return: serialized iso
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_datetime(attr)
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ utc = attr.utctimetuple()
+ if utc.tm_year > 9999 or utc.tm_year < 1:
+ raise OverflowError("Hit max or min date")
+
+ microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0")
+ if microseconds:
+ microseconds = "." + microseconds
+ date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
+ utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec
+ )
+ return date + microseconds + "Z"
+ except (ValueError, OverflowError) as err:
+ msg = "Unable to serialize datetime object."
+ raise SerializationError(msg) from err
+ except AttributeError as err:
+ msg = "ISO-8601 object must be valid Datetime object."
+ raise TypeError(msg) from err
+
+ @staticmethod
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Datetime object into IntTime format.
+ This is represented as seconds.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: int
+ :raises SerializationError: if format invalid
+ :return: serialied unix
+ """
+ if isinstance(attr, int):
+ return attr
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ return int(calendar.timegm(attr.utctimetuple()))
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
+
+
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ key = attr_desc["key"]
+ working_data = data
+
+ while "." in key:
+ # Need the cast, as for some reasons "split" is typed as list[str | Any]
+ dict_keys = cast(list[str], _FLATTEN.split(key))
+ if len(dict_keys) == 1:
+ key = _decode_attribute_map_key(dict_keys[0])
+ break
+ working_key = _decode_attribute_map_key(dict_keys[0])
+ working_data = working_data.get(working_key, data)
+ if working_data is None:
+ # If at any point while following flatten JSON path see None, it means
+ # that all properties under are None as well
+ return None
+ key = ".".join(dict_keys[1:])
+
+ return working_data.get(key)
+
+
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
+ key = attr_desc["key"]
+ working_data = data
+
+ while "." in key:
+ dict_keys = _FLATTEN.split(key)
+ if len(dict_keys) == 1:
+ key = _decode_attribute_map_key(dict_keys[0])
+ break
+ working_key = _decode_attribute_map_key(dict_keys[0])
+ working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data)
+ if working_data is None:
+ # If at any point while following flatten JSON path see None, it means
+ # that all properties under are None as well
+ return None
+ key = ".".join(dict_keys[1:])
+
+ if working_data:
+ return attribute_key_case_insensitive_extractor(key, None, working_data)
+
+
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
+ key = attr_desc["key"]
+ dict_keys = _FLATTEN.split(key)
+ return attribute_key_extractor(dict_keys[-1], None, data)
+
+
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
+ key = attr_desc["key"]
+ dict_keys = _FLATTEN.split(key)
+ return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data)
+
+
+def attribute_key_extractor(attr, _, data):
+ return data.get(attr)
+
+
+def attribute_key_case_insensitive_extractor(attr, _, data):
+ found_key = None
+ lower_attr = attr.lower()
+ for key in data:
+ if lower_attr == key.lower():
+ found_key = key
+ break
+
+ return data.get(found_key)
+
+
+def _extract_name_from_internal_type(internal_type):
+ """Given an internal type XML description, extract correct XML name with namespace.
+
+ :param dict internal_type: An model type
+ :rtype: tuple
+ :returns: A tuple XML name + namespace dict
+ """
+ internal_type_xml_map = getattr(internal_type, "_xml_map", {})
+ xml_name = internal_type_xml_map.get("name", internal_type.__name__)
+ xml_ns = internal_type_xml_map.get("ns", None)
+ if xml_ns:
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+ return xml_name
+
+
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
+ if isinstance(data, dict):
+ return None
+
+ # Test if this model is XML ready first
+ if not isinstance(data, ET.Element):
+ return None
+
+ xml_desc = attr_desc.get("xml", {})
+ xml_name = xml_desc.get("name", attr_desc["key"])
+
+ # Look for a children
+ is_iter_type = attr_desc["type"].startswith("[")
+ is_wrapped = xml_desc.get("wrapped", False)
+ internal_type = attr_desc.get("internalType", None)
+ internal_type_xml_map = getattr(internal_type, "_xml_map", {})
+
+ # Integrate namespace if necessary
+ xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None))
+ if xml_ns:
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+
+ # If it's an attribute, that's simple
+ if xml_desc.get("attr", False):
+ return data.get(xml_name)
+
+ # If it's x-ms-text, that's simple too
+ if xml_desc.get("text", False):
+ return data.text
+
+ # Scenario where I take the local name:
+ # - Wrapped node
+ # - Internal type is an enum (considered basic types)
+ # - Internal type has no XML/Name node
+ if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)):
+ children = data.findall(xml_name)
+ # If internal type has a local name and it's not a list, I use that name
+ elif not is_iter_type and internal_type and "name" in internal_type_xml_map:
+ xml_name = _extract_name_from_internal_type(internal_type)
+ children = data.findall(xml_name)
+ # That's an array
+ else:
+ if internal_type: # Complex type, ignore itemsName and use the complex type name
+ items_name = _extract_name_from_internal_type(internal_type)
+ else:
+ items_name = xml_desc.get("itemsName", xml_name)
+ children = data.findall(items_name)
+
+ if len(children) == 0:
+ if is_iter_type:
+ if is_wrapped:
+ return None # is_wrapped no node, we want None
+ return [] # not wrapped, assume empty list
+ return None # Assume it's not there, maybe an optional node.
+
+ # If is_iter_type and not wrapped, return all found children
+ if is_iter_type:
+ if not is_wrapped:
+ return children
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
+ xml_name
+ )
+ )
+ return list(children[0]) # Might be empty list and that's ok.
+
+ # Here it's not a itertype, we should have found one element only or empty
+ if len(children) > 1:
+ raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name))
+ return children[0]
+
+
+class Deserializer:
+ """Response object model deserializer.
+
+ :param dict classes: Class type dictionary for deserializing complex types.
+ :ivar list key_extractors: Ordered list of extractors to be used by this deserializer.
+ """
+
+ basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
+
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
+ self.deserialize_type = {
+ "iso-8601": Deserializer.deserialize_iso,
+ "rfc-1123": Deserializer.deserialize_rfc,
+ "unix-time": Deserializer.deserialize_unix,
+ "duration": Deserializer.deserialize_duration,
+ "date": Deserializer.deserialize_date,
+ "time": Deserializer.deserialize_time,
+ "decimal": Deserializer.deserialize_decimal,
+ "long": Deserializer.deserialize_long,
+ "bytearray": Deserializer.deserialize_bytearray,
+ "base64": Deserializer.deserialize_base64,
+ "object": self.deserialize_object,
+ "[]": self.deserialize_iter,
+ "{}": self.deserialize_dict,
+ }
+ self.deserialize_expected_types = {
+ "duration": (isodate.Duration, datetime.timedelta),
+ "iso-8601": (datetime.datetime),
+ }
+ self.dependencies: dict[str, type] = dict(classes) if classes else {}
+ self.key_extractors = [rest_key_extractor, xml_key_extractor]
+ # Additional properties only works if the "rest_key_extractor" is used to
+ # extract the keys. Making it to work whatever the key extractor is too much
+ # complicated, with no real scenario for now.
+ # So adding a flag to disable additional properties detection. This flag should be
+ # used if your expect the deserialization to NOT come from a JSON REST syntax.
+ # Otherwise, result are unexpected
+ self.additional_properties_detection = True
+
+ def __call__(self, target_obj, response_data, content_type=None):
+ """Call the deserializer to process a REST response.
+
+ :param str target_obj: Target data type to deserialize to.
+ :param requests.Response response_data: REST response object.
+ :param str content_type: Swagger "produces" if available.
+ :raises DeserializationError: if deserialization fails.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ data = self._unpack_content(response_data, content_type)
+ return self._deserialize(target_obj, data)
+
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
+ """Call the deserializer on a model.
+
+ Data needs to be already deserialized as JSON or XML ElementTree
+
+ :param str target_obj: Target data type to deserialize to.
+ :param object data: Object to deserialize.
+ :raises DeserializationError: if deserialization fails.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ # This is already a model, go recursive just in case
+ if hasattr(data, "_attribute_map"):
+ constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
+ try:
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
+ if attr in constants:
+ continue
+ value = getattr(data, attr)
+ if value is None:
+ continue
+ local_type = mapconfig["type"]
+ internal_data_type = local_type.strip("[]{}")
+ if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum):
+ continue
+ setattr(data, attr, self._deserialize(local_type, value))
+ return data
+ except AttributeError:
+ return
+
+ response, class_name = self._classify_target(target_obj, data)
+
+ if isinstance(response, str):
+ return self.deserialize_data(data, response)
+ if isinstance(response, type) and issubclass(response, Enum):
+ return self.deserialize_enum(data, response)
+
+ if data is None or data is CoreNull:
+ return data
+ try:
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
+ d_attrs = {}
+ for attr, attr_desc in attributes.items():
+ # Check empty string. If it's not empty, someone has a real "additionalProperties"...
+ if attr == "additional_properties" and attr_desc["key"] == "":
+ continue
+ raw_value = None
+ # Enhance attr_desc with some dynamic data
+ attr_desc = attr_desc.copy() # Do a copy, do not change the real one
+ internal_data_type = attr_desc["type"].strip("[]{}")
+ if internal_data_type in self.dependencies:
+ attr_desc["internalType"] = self.dependencies[internal_data_type]
+
+ for key_extractor in self.key_extractors:
+ found_value = key_extractor(attr, attr_desc, data)
+ if found_value is not None:
+ if raw_value is not None and raw_value != found_value:
+ msg = (
+ "Ignoring extracted value '%s' from %s for key '%s'"
+ " (duplicate extraction, follow extractors order)"
+ )
+ _LOGGER.warning(msg, found_value, key_extractor, attr)
+ continue
+ raw_value = found_value
+
+ value = self.deserialize_data(raw_value, attr_desc["type"])
+ d_attrs[attr] = value
+ except (AttributeError, TypeError, KeyError) as err:
+ msg = "Unable to deserialize to object: " + class_name # type: ignore
+ raise DeserializationError(msg) from err
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
+
+ def _build_additional_properties(self, attribute_map, data):
+ if not self.additional_properties_detection:
+ return None
+ if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "":
+ # Check empty string. If it's not empty, someone has a real "additionalProperties"
+ return None
+ if isinstance(data, ET.Element):
+ data = {el.tag: el.text for el in data}
+
+ known_keys = {
+ _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0])
+ for desc in attribute_map.values()
+ if desc["key"] != ""
+ }
+ present_keys = set(data.keys())
+ missing_keys = present_keys - known_keys
+ return {key: data[key] for key in missing_keys}
+
+ def _classify_target(self, target, data):
+ """Check to see whether the deserialization target object can
+ be classified into a subclass.
+ Once classification has been determined, initialize object.
+
+ :param str target: The target object type to deserialize to.
+ :param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
+ """
+ if target is None:
+ return None, None
+
+ if isinstance(target, str):
+ try:
+ target = self.dependencies[target]
+ except KeyError:
+ return target, target
+
+ try:
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
+ except AttributeError:
+ pass # Target is not a Model, no classify
+ return target, target.__class__.__name__ # type: ignore
+
+ def failsafe_deserialize(self, target_obj, data, content_type=None):
+ """Ignores any errors encountered in deserialization,
+ and falls back to not deserializing the object. Recommended
+ for use in error deserialization, as we want to return the
+ HttpResponseError to users, and not have them deal with
+ a deserialization error.
+
+ :param str target_obj: The target object type to deserialize to.
+ :param str/dict data: The response data to deserialize.
+ :param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ try:
+ return self(target_obj, data, content_type=content_type)
+ except: # pylint: disable=bare-except
+ _LOGGER.debug(
+ "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
+ )
+ return None
+
+ @staticmethod
+ def _unpack_content(raw_data, content_type=None):
+ """Extract the correct structure for deserialization.
+
+ If raw_data is a PipelineResponse, try to extract the result of RawDeserializer.
+ if we can't, raise. Your Pipeline should have a RawDeserializer.
+
+ If not a pipeline response and raw_data is bytes or string, use content-type
+ to decode it. If no content-type, try JSON.
+
+ If raw_data is something else, bypass all logic and return it directly.
+
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
+ :raises JSONDecodeError: If JSON is requested and parsing is impossible.
+ :raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
+ """
+ # Assume this is enough to detect a Pipeline Response without importing it
+ context = getattr(raw_data, "context", {})
+ if context:
+ if RawDeserializer.CONTEXT_NAME in context:
+ return context[RawDeserializer.CONTEXT_NAME]
+ raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize")
+
+ # Assume this is enough to recognize universal_http.ClientResponse without importing it
+ if hasattr(raw_data, "body"):
+ return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers)
+
+ # Assume this enough to recognize requests.Response without importing it.
+ if hasattr(raw_data, "_content_consumed"):
+ return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
+
+ if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
+ return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
+ return raw_data
+
+ def _instantiate_model(self, response, attrs, additional_properties=None):
+ """Instantiate a response model passing in deserialized args.
+
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
+ """
+ if callable(response):
+ subtype = getattr(response, "_subtype_map", {})
+ try:
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
+ kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
+ response_obj = response(**kwargs)
+ for attr in readonly:
+ setattr(response_obj, attr, attrs.get(attr))
+ if additional_properties:
+ response_obj.additional_properties = additional_properties # type: ignore
+ return response_obj
+ except TypeError as err:
+ msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
+ raise DeserializationError(msg + str(err)) from err
+ else:
+ try:
+ for attr, value in attrs.items():
+ setattr(response, attr, value)
+ return response
+ except Exception as exp:
+ msg = "Unable to populate response model. "
+ msg += "Type: {}, Error: {}".format(type(response), exp)
+ raise DeserializationError(msg) from exp
+
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
+ """Process data for deserialization according to data type.
+
+ :param str data: The response string to be deserialized.
+ :param str data_type: The type to deserialize to.
+ :raises DeserializationError: if deserialization fails.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ if data is None:
+ return data
+
+ try:
+ if not data_type:
+ return data
+ if data_type in self.basic_types.values():
+ return self.deserialize_basic(data, data_type)
+ if data_type in self.deserialize_type:
+ if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
+ return data
+
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
+ if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
+ return None
+ data_val = self.deserialize_type[data_type](data)
+ return data_val
+
+ iter_type = data_type[0] + data_type[-1]
+ if iter_type in self.deserialize_type:
+ return self.deserialize_type[iter_type](data, data_type[1:-1])
+
+ obj_type = self.dependencies[data_type]
+ if issubclass(obj_type, Enum):
+ if isinstance(data, ET.Element):
+ data = data.text
+ return self.deserialize_enum(data, obj_type)
+
+ except (ValueError, TypeError, AttributeError) as err:
+ msg = "Unable to deserialize response data."
+ msg += " Data: {}, {}".format(data, data_type)
+ raise DeserializationError(msg) from err
+ return self._deserialize(obj_type, data)
+
+ def deserialize_iter(self, attr, iter_type):
+ """Deserialize an iterable.
+
+ :param list attr: Iterable to be deserialized.
+ :param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
+ :rtype: list
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element): # If I receive an element here, get the children
+ attr = list(attr)
+ if not isinstance(attr, (list, set)):
+ raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr)))
+ return [self.deserialize_data(a, iter_type) for a in attr]
+
+ def deserialize_dict(self, attr, dict_type):
+ """Deserialize a dictionary.
+
+ :param dict/list attr: Dictionary to be deserialized. Also accepts
+ a list of key, value pairs.
+ :param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
+ :rtype: dict
+ """
+ if isinstance(attr, list):
+ return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr}
+
+ if isinstance(attr, ET.Element):
+ # Transform value into {"Key": "value"}
+ attr = {el.tag: el.text for el in attr}
+ return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
+
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
+ """Deserialize a generic object.
+ This will be handled as a dictionary.
+
+ :param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
+ :rtype: dict
+ :raises TypeError: if non-builtin datatype encountered.
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element):
+ # Do no recurse on XML, just return the tree as-is
+ return attr
+ if isinstance(attr, str):
+ return self.deserialize_basic(attr, "str")
+ obj_type = type(attr)
+ if obj_type in self.basic_types:
+ return self.deserialize_basic(attr, self.basic_types[obj_type])
+ if obj_type is _long_type:
+ return self.deserialize_long(attr)
+
+ if obj_type == dict:
+ deserialized = {}
+ for key, value in attr.items():
+ try:
+ deserialized[key] = self.deserialize_object(value, **kwargs)
+ except ValueError:
+ deserialized[key] = None
+ return deserialized
+
+ if obj_type == list:
+ deserialized = []
+ for obj in attr:
+ try:
+ deserialized.append(self.deserialize_object(obj, **kwargs))
+ except ValueError:
+ pass
+ return deserialized
+
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
+
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
+ """Deserialize basic builtin data type from string.
+ Will attempt to convert to str, int, float and bool.
+ This function will also accept '1', '0', 'true' and 'false' as
+ valid bool values.
+
+ :param str attr: response string to be deserialized.
+ :param str data_type: deserialization data type.
+ :return: Deserialized basic type.
+ :rtype: str, int, float or bool
+ :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool.
+ """
+ # If we're here, data is supposed to be a basic type.
+ # If it's still an XML node, take the text
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if not attr:
+ if data_type == "str":
+ # None or '', node is empty string.
+ return ""
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
+
+ if data_type == "bool":
+ if attr in [True, False, 1, 0]:
+ return bool(attr)
+ if isinstance(attr, str):
+ if attr.lower() in ["true", "1"]:
+ return True
+ if attr.lower() in ["false", "0"]:
+ return False
+ raise TypeError("Invalid boolean value: {}".format(attr))
+
+ if data_type == "str":
+ return self.deserialize_unicode(attr)
+ if data_type == "int":
+ return int(attr)
+ if data_type == "float":
+ return float(attr)
+ raise TypeError("Unknown basic data type: {}".format(data_type))
+
+ @staticmethod
+ def deserialize_unicode(data):
+ """Preserve unicode objects in Python 2, otherwise return data
+ as a string.
+
+ :param str data: response string to be deserialized.
+ :return: Deserialized string.
+ :rtype: str or unicode
+ """
+ # We might be here because we have an enum modeled as string,
+ # and we try to deserialize a partial dict with enum inside
+ if isinstance(data, Enum):
+ return data
+
+ # Consider this is real string
+ try:
+ if isinstance(data, unicode): # type: ignore
+ return data
+ except NameError:
+ return str(data)
+ return str(data)
+
+ @staticmethod
+ def deserialize_enum(data, enum_obj):
+ """Deserialize string into enum object.
+
+ If the string is not a valid enum value it will be returned as-is
+ and a warning will be logged.
+
+ :param str data: Response string to be deserialized. If this value is
+ None or invalid it will be returned as-is.
+ :param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
+ :rtype: Enum
+ """
+ if isinstance(data, enum_obj) or data is None:
+ return data
+ if isinstance(data, Enum):
+ data = data.value
+ if isinstance(data, int):
+ # Workaround. We might consider remove it in the future.
+ try:
+ return list(enum_obj.__members__.values())[data]
+ except IndexError as exc:
+ error = "{!r} is not a valid index for enum {!r}"
+ raise DeserializationError(error.format(data, enum_obj)) from exc
+ try:
+ return enum_obj(str(data))
+ except ValueError:
+ for enum_value in enum_obj:
+ if enum_value.value.lower() == str(data).lower():
+ return enum_value
+ # We don't fail anymore for unknown value, we deserialize as a string
+ _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj)
+ return Deserializer.deserialize_unicode(data)
+
+ @staticmethod
+ def deserialize_bytearray(attr):
+ """Deserialize string into bytearray.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
+ :rtype: bytearray
+ :raises TypeError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ return bytearray(b64decode(attr)) # type: ignore
+
+ @staticmethod
+ def deserialize_base64(attr):
+ """Deserialize base64 encoded string into string.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
+ :rtype: bytearray
+ :raises TypeError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
+ attr = attr + padding # type: ignore
+ encoded = attr.replace("-", "+").replace("_", "/")
+ return b64decode(encoded)
+
+ @staticmethod
+ def deserialize_decimal(attr):
+ """Deserialize string into Decimal object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized decimal
+ :raises DeserializationError: if string format invalid.
+ :rtype: decimal
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ return decimal.Decimal(str(attr)) # type: ignore
+ except decimal.DecimalException as err:
+ msg = "Invalid decimal {}".format(attr)
+ raise DeserializationError(msg) from err
+
+ @staticmethod
+ def deserialize_long(attr):
+ """Deserialize string into long (Py2) or int (Py3).
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized int
+ :rtype: long or int
+ :raises ValueError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ return _long_type(attr) # type: ignore
+
+ @staticmethod
+ def deserialize_duration(attr):
+ """Deserialize ISO-8601 formatted string into TimeDelta object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized duration
+ :rtype: TimeDelta
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ duration = isodate.parse_duration(attr)
+ except (ValueError, OverflowError, AttributeError) as err:
+ msg = "Cannot deserialize duration object."
+ raise DeserializationError(msg) from err
+ return duration
+
+ @staticmethod
+ def deserialize_date(attr):
+ """Deserialize ISO-8601 formatted string into Date object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized date
+ :rtype: Date
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
+ raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
+ # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
+ return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
+
+ @staticmethod
+ def deserialize_time(attr):
+ """Deserialize ISO-8601 formatted string into time object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized time
+ :rtype: datetime.time
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
+ raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
+ return isodate.parse_time(attr)
+
+ @staticmethod
+ def deserialize_rfc(attr):
+ """Deserialize RFC-1123 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
+ :rtype: Datetime
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ parsed_date = email.utils.parsedate_tz(attr) # type: ignore
+ date_obj = datetime.datetime(
+ *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60))
+ )
+ if not date_obj.tzinfo:
+ date_obj = date_obj.astimezone(tz=TZ_UTC)
+ except ValueError as err:
+ msg = "Cannot deserialize to rfc datetime object."
+ raise DeserializationError(msg) from err
+ return date_obj
+
+ @staticmethod
+ def deserialize_iso(attr):
+ """Deserialize ISO-8601 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
+ :rtype: Datetime
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ attr = attr.upper() # type: ignore
+ match = Deserializer.valid_date.match(attr)
+ if not match:
+ raise ValueError("Invalid datetime string: " + attr)
+
+ check_decimal = attr.split(".")
+ if len(check_decimal) > 1:
+ decimal_str = ""
+ for digit in check_decimal[1]:
+ if digit.isdigit():
+ decimal_str += digit
+ else:
+ break
+ if len(decimal_str) > 6:
+ attr = attr.replace(decimal_str, decimal_str[0:6])
+
+ date_obj = isodate.parse_datetime(attr)
+ test_utc = date_obj.utctimetuple()
+ if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
+ raise OverflowError("Hit max or min date")
+ except (ValueError, OverflowError, AttributeError) as err:
+ msg = "Cannot deserialize datetime object."
+ raise DeserializationError(msg) from err
+ return date_obj
+
+ @staticmethod
+ def deserialize_unix(attr):
+ """Serialize Datetime object into IntTime format.
+ This is represented as seconds.
+
+ :param int attr: Object to be serialized.
+ :return: Deserialized datetime
+ :rtype: Datetime
+ :raises DeserializationError: if format invalid
+ """
+ if isinstance(attr, ET.Element):
+ attr = int(attr.text) # type: ignore
+ try:
+ attr = int(attr)
+ date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
+ except ValueError as err:
+ msg = "Cannot deserialize to unix datetime object."
+ raise DeserializationError(msg) from err
+ return date_obj
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/__init__.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/__init__.py
new file mode 100644
index 000000000000..c0391d76526d
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/__init__.py
@@ -0,0 +1,872 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+
+from ._models import ( # type: ignore
+ A2APreviewTool,
+ A2AToolCall,
+ A2AToolCallOutput,
+ AISearchIndexResource,
+ AgentId,
+ AgentReference,
+ Annotation,
+ ApiErrorResponse,
+ ApplyPatchCreateFileOperation,
+ ApplyPatchCreateFileOperationParam,
+ ApplyPatchDeleteFileOperation,
+ ApplyPatchDeleteFileOperationParam,
+ ApplyPatchFileOperation,
+ ApplyPatchOperationParam,
+ ApplyPatchToolCallItemParam,
+ ApplyPatchToolCallOutputItemParam,
+ ApplyPatchToolParam,
+ ApplyPatchUpdateFileOperation,
+ ApplyPatchUpdateFileOperationParam,
+ ApproximateLocation,
+ AutoCodeInterpreterToolParam,
+ AzureAISearchTool,
+ AzureAISearchToolCall,
+ AzureAISearchToolCallOutput,
+ AzureAISearchToolResource,
+ AzureFunctionBinding,
+ AzureFunctionDefinition,
+ AzureFunctionDefinitionFunction,
+ AzureFunctionStorageQueue,
+ AzureFunctionTool,
+ AzureFunctionToolCall,
+ AzureFunctionToolCallOutput,
+ BingCustomSearchConfiguration,
+ BingCustomSearchPreviewTool,
+ BingCustomSearchToolCall,
+ BingCustomSearchToolCallOutput,
+ BingCustomSearchToolParameters,
+ BingGroundingSearchConfiguration,
+ BingGroundingSearchToolParameters,
+ BingGroundingTool,
+ BingGroundingToolCall,
+ BingGroundingToolCallOutput,
+ BrowserAutomationPreviewTool,
+ BrowserAutomationToolCall,
+ BrowserAutomationToolCallOutput,
+ BrowserAutomationToolConnectionParameters,
+ BrowserAutomationToolParameters,
+ CaptureStructuredOutputsTool,
+ ChatSummaryMemoryItem,
+ ClickParam,
+ CodeInterpreterOutputImage,
+ CodeInterpreterOutputLogs,
+ CodeInterpreterTool,
+ CompactResource,
+ CompactionSummaryItemParam,
+ ComparisonFilter,
+ CompoundFilter,
+ ComputerAction,
+ ComputerCallOutputItemParam,
+ ComputerCallSafetyCheckParam,
+ ComputerScreenshotContent,
+ ComputerScreenshotImage,
+ ComputerUsePreviewTool,
+ ContainerAutoParam,
+ ContainerFileCitationBody,
+ ContainerNetworkPolicyAllowlistParam,
+ ContainerNetworkPolicyDisabledParam,
+ ContainerNetworkPolicyDomainSecretParam,
+ ContainerNetworkPolicyParam,
+ ContainerReferenceResource,
+ ContainerSkill,
+ ContextManagementParam,
+ ConversationParam_2,
+ ConversationReference,
+ CoordParam,
+ CreateResponse,
+ CreatedBy,
+ CustomGrammarFormatParam,
+ CustomTextFormatParam,
+ CustomToolParam,
+ CustomToolParamFormat,
+ DeleteResponseResult,
+ DoubleClickAction,
+ DragParam,
+ Error,
+ FabricDataAgentToolCall,
+ FabricDataAgentToolCallOutput,
+ FabricDataAgentToolParameters,
+ FileCitationBody,
+ FilePath,
+ FileSearchTool,
+ FileSearchToolCallResults,
+ FunctionAndCustomToolCallOutput,
+ FunctionAndCustomToolCallOutputInputFileContent,
+ FunctionAndCustomToolCallOutputInputImageContent,
+ FunctionAndCustomToolCallOutputInputTextContent,
+ FunctionCallOutputItemParam,
+ FunctionShellAction,
+ FunctionShellActionParam,
+ FunctionShellCallEnvironment,
+ FunctionShellCallItemParam,
+ FunctionShellCallItemParamEnvironment,
+ FunctionShellCallItemParamEnvironmentContainerReferenceParam,
+ FunctionShellCallItemParamEnvironmentLocalEnvironmentParam,
+ FunctionShellCallOutputContent,
+ FunctionShellCallOutputContentParam,
+ FunctionShellCallOutputExitOutcome,
+ FunctionShellCallOutputExitOutcomeParam,
+ FunctionShellCallOutputItemParam,
+ FunctionShellCallOutputOutcome,
+ FunctionShellCallOutputOutcomeParam,
+ FunctionShellCallOutputTimeoutOutcome,
+ FunctionShellCallOutputTimeoutOutcomeParam,
+ FunctionShellToolParam,
+ FunctionShellToolParamEnvironment,
+ FunctionShellToolParamEnvironmentContainerReferenceParam,
+ FunctionShellToolParamEnvironmentLocalEnvironmentParam,
+ FunctionTool,
+ HybridSearchOptions,
+ ImageGenTool,
+ ImageGenToolInputImageMask,
+ InlineSkillParam,
+ InlineSkillSourceParam,
+ InputFileContent,
+ InputFileContentParam,
+ InputImageContent,
+ InputImageContentParamAutoParam,
+ InputTextContent,
+ InputTextContentParam,
+ Item,
+ ItemCodeInterpreterToolCall,
+ ItemComputerToolCall,
+ ItemCustomToolCall,
+ ItemCustomToolCallOutput,
+ ItemField,
+ ItemFieldApplyPatchToolCall,
+ ItemFieldApplyPatchToolCallOutput,
+ ItemFieldCodeInterpreterToolCall,
+ ItemFieldCompactionBody,
+ ItemFieldComputerToolCall,
+ ItemFieldComputerToolCallOutput,
+ ItemFieldCustomToolCall,
+ ItemFieldCustomToolCallOutput,
+ ItemFieldFileSearchToolCall,
+ ItemFieldFunctionShellCall,
+ ItemFieldFunctionShellCallOutput,
+ ItemFieldFunctionToolCall,
+ ItemFieldFunctionToolCallOutput,
+ ItemFieldImageGenToolCall,
+ ItemFieldLocalShellToolCall,
+ ItemFieldLocalShellToolCallOutput,
+ ItemFieldMcpApprovalRequest,
+ ItemFieldMcpApprovalResponseResource,
+ ItemFieldMcpListTools,
+ ItemFieldMcpToolCall,
+ ItemFieldMessage,
+ ItemFieldReasoningItem,
+ ItemFieldWebSearchToolCall,
+ ItemFileSearchToolCall,
+ ItemFunctionToolCall,
+ ItemImageGenToolCall,
+ ItemLocalShellToolCall,
+ ItemLocalShellToolCallOutput,
+ ItemMcpApprovalRequest,
+ ItemMcpListTools,
+ ItemMcpToolCall,
+ ItemMessage,
+ ItemOutputMessage,
+ ItemReasoningItem,
+ ItemReferenceParam,
+ ItemWebSearchToolCall,
+ KeyPressAction,
+ LocalEnvironmentResource,
+ LocalShellExecAction,
+ LocalShellToolParam,
+ LocalSkillParam,
+ LogProb,
+ MCPApprovalResponse,
+ MCPListToolsTool,
+ MCPListToolsToolAnnotations,
+ MCPListToolsToolInputSchema,
+ MCPTool,
+ MCPToolFilter,
+ MCPToolRequireApproval,
+ MemoryItem,
+ MemorySearchItem,
+ MemorySearchOptions,
+ MemorySearchPreviewTool,
+ MemorySearchTool,
+ MemorySearchToolCallItemParam,
+ MemorySearchToolCallItemResource,
+ MessageContent,
+ MessageContentInputFileContent,
+ MessageContentInputImageContent,
+ MessageContentInputTextContent,
+ MessageContentOutputTextContent,
+ MessageContentReasoningTextContent,
+ MessageContentRefusalContent,
+ Metadata,
+ MicrosoftFabricPreviewTool,
+ MoveParam,
+ OAuthConsentRequestOutputItem,
+ OpenApiAnonymousAuthDetails,
+ OpenApiAuthDetails,
+ OpenApiFunctionDefinition,
+ OpenApiFunctionDefinitionFunction,
+ OpenApiManagedAuthDetails,
+ OpenApiManagedSecurityScheme,
+ OpenApiProjectConnectionAuthDetails,
+ OpenApiProjectConnectionSecurityScheme,
+ OpenApiTool,
+ OpenApiToolCall,
+ OpenApiToolCallOutput,
+ OutputContent,
+ OutputContentOutputTextContent,
+ OutputContentReasoningTextContent,
+ OutputContentRefusalContent,
+ OutputItem,
+ OutputItemApplyPatchToolCall,
+ OutputItemApplyPatchToolCallOutput,
+ OutputItemCodeInterpreterToolCall,
+ OutputItemCompactionBody,
+ OutputItemComputerToolCall,
+ OutputItemComputerToolCallOutput,
+ OutputItemCustomToolCall,
+ OutputItemCustomToolCallOutput,
+ OutputItemFileSearchToolCall,
+ OutputItemFunctionShellCall,
+ OutputItemFunctionShellCallOutput,
+ OutputItemFunctionToolCall,
+ OutputItemFunctionToolCallOutput,
+ OutputItemImageGenToolCall,
+ OutputItemLocalShellToolCall,
+ OutputItemLocalShellToolCallOutput,
+ OutputItemMcpApprovalRequest,
+ OutputItemMcpApprovalResponseResource,
+ OutputItemMcpListTools,
+ OutputItemMcpToolCall,
+ OutputItemMessage,
+ OutputItemOutputMessage,
+ OutputItemReasoningItem,
+ OutputItemWebSearchToolCall,
+ OutputMessageContent,
+ OutputMessageContentOutputTextContent,
+ OutputMessageContentRefusalContent,
+ Prompt,
+ RankingOptions,
+ RealtimeMCPError,
+ RealtimeMCPHTTPError,
+ RealtimeMCPProtocolError,
+ RealtimeMCPToolExecutionError,
+ Reasoning,
+ ReasoningTextContent,
+ Response,
+ ResponseAudioDeltaEvent,
+ ResponseAudioDoneEvent,
+ ResponseAudioTranscriptDeltaEvent,
+ ResponseAudioTranscriptDoneEvent,
+ ResponseCodeInterpreterCallCodeDeltaEvent,
+ ResponseCodeInterpreterCallCodeDoneEvent,
+ ResponseCodeInterpreterCallCompletedEvent,
+ ResponseCodeInterpreterCallInProgressEvent,
+ ResponseCodeInterpreterCallInterpretingEvent,
+ ResponseCompletedEvent,
+ ResponseContentPartAddedEvent,
+ ResponseContentPartDoneEvent,
+ ResponseCreatedEvent,
+ ResponseCustomToolCallInputDeltaEvent,
+ ResponseCustomToolCallInputDoneEvent,
+ ResponseError,
+ ResponseErrorEvent,
+ ResponseFailedEvent,
+ ResponseFileSearchCallCompletedEvent,
+ ResponseFileSearchCallInProgressEvent,
+ ResponseFileSearchCallSearchingEvent,
+ ResponseFormatJsonSchemaSchema,
+ ResponseFunctionCallArgumentsDeltaEvent,
+ ResponseFunctionCallArgumentsDoneEvent,
+ ResponseImageGenCallCompletedEvent,
+ ResponseImageGenCallGeneratingEvent,
+ ResponseImageGenCallInProgressEvent,
+ ResponseImageGenCallPartialImageEvent,
+ ResponseInProgressEvent,
+ ResponseIncompleteDetails,
+ ResponseIncompleteEvent,
+ ResponseLogProb,
+ ResponseLogProbTopLogprobs,
+ ResponseMCPCallArgumentsDeltaEvent,
+ ResponseMCPCallArgumentsDoneEvent,
+ ResponseMCPCallCompletedEvent,
+ ResponseMCPCallFailedEvent,
+ ResponseMCPCallInProgressEvent,
+ ResponseMCPListToolsCompletedEvent,
+ ResponseMCPListToolsFailedEvent,
+ ResponseMCPListToolsInProgressEvent,
+ ResponseOutputItemAddedEvent,
+ ResponseOutputItemDoneEvent,
+ ResponseOutputTextAnnotationAddedEvent,
+ ResponsePromptVariables,
+ ResponseQueuedEvent,
+ ResponseReasoningSummaryPartAddedEvent,
+ ResponseReasoningSummaryPartAddedEventPart,
+ ResponseReasoningSummaryPartDoneEvent,
+ ResponseReasoningSummaryPartDoneEventPart,
+ ResponseReasoningSummaryTextDeltaEvent,
+ ResponseReasoningSummaryTextDoneEvent,
+ ResponseReasoningTextDeltaEvent,
+ ResponseReasoningTextDoneEvent,
+ ResponseRefusalDeltaEvent,
+ ResponseRefusalDoneEvent,
+ ResponseStreamEvent,
+ ResponseStreamOptions,
+ ResponseTextDeltaEvent,
+ ResponseTextDoneEvent,
+ ResponseTextParam,
+ ResponseUsage,
+ ResponseUsageInputTokensDetails,
+ ResponseUsageOutputTokensDetails,
+ ResponseWebSearchCallCompletedEvent,
+ ResponseWebSearchCallInProgressEvent,
+ ResponseWebSearchCallSearchingEvent,
+ ScreenshotParam,
+ ScrollParam,
+ SharepointGroundingToolCall,
+ SharepointGroundingToolCallOutput,
+ SharepointGroundingToolParameters,
+ SharepointPreviewTool,
+ SkillReferenceParam,
+ SpecificApplyPatchParam,
+ SpecificFunctionShellParam,
+ StructuredOutputDefinition,
+ StructuredOutputsOutputItem,
+ SummaryTextContent,
+ TextContent,
+ TextResponseFormatConfiguration,
+ TextResponseFormatConfigurationResponseFormatJsonObject,
+ TextResponseFormatConfigurationResponseFormatText,
+ TextResponseFormatJsonSchema,
+ Tool,
+ ToolChoiceAllowed,
+ ToolChoiceCodeInterpreter,
+ ToolChoiceComputerUsePreview,
+ ToolChoiceCustom,
+ ToolChoiceFileSearch,
+ ToolChoiceFunction,
+ ToolChoiceImageGeneration,
+ ToolChoiceMCP,
+ ToolChoiceParam,
+ ToolChoiceWebSearchPreview,
+ ToolChoiceWebSearchPreview20250311,
+ ToolProjectConnection,
+ TopLogProb,
+ TypeParam,
+ UrlCitationBody,
+ UserProfileMemoryItem,
+ VectorStoreFileAttributes,
+ WaitParam,
+ WebSearchActionFind,
+ WebSearchActionOpenPage,
+ WebSearchActionSearch,
+ WebSearchActionSearchSources,
+ WebSearchApproximateLocation,
+ WebSearchConfiguration,
+ WebSearchPreviewTool,
+ WebSearchTool,
+ WebSearchToolFilters,
+ WorkflowActionOutputItem,
+)
+
+from ._enums import ( # type: ignore
+ AnnotationType,
+ ApplyPatchCallOutputStatus,
+ ApplyPatchCallOutputStatusParam,
+ ApplyPatchCallStatus,
+ ApplyPatchCallStatusParam,
+ ApplyPatchFileOperationType,
+ ApplyPatchOperationParamType,
+ AzureAISearchQueryType,
+ ClickButtonType,
+ ComputerActionType,
+ ComputerEnvironment,
+ ContainerMemoryLimit,
+ ContainerNetworkPolicyParamType,
+ ContainerSkillType,
+ CustomToolParamFormatType,
+ DetailEnum,
+ FunctionAndCustomToolCallOutputType,
+ FunctionCallItemStatus,
+ FunctionShellCallEnvironmentType,
+ FunctionShellCallItemParamEnvironmentType,
+ FunctionShellCallItemStatus,
+ FunctionShellCallOutputOutcomeParamType,
+ FunctionShellCallOutputOutcomeType,
+ FunctionShellToolParamEnvironmentType,
+ GrammarSyntax1,
+ ImageDetail,
+ ImageGenActionEnum,
+ IncludeEnum,
+ InputFidelity,
+ ItemFieldType,
+ ItemType,
+ LocalShellCallOutputStatusEnum,
+ LocalShellCallStatus,
+ MCPToolCallStatus,
+ MemoryItemKind,
+ MessageContentType,
+ MessagePhase,
+ MessageRole,
+ MessageStatus,
+ ModelIdsCompaction,
+ OpenApiAuthType,
+ OutputContentType,
+ OutputItemType,
+ OutputMessageContentType,
+ PageOrder,
+ RankerVersionType,
+ RealtimeMcpErrorType,
+ ResponseErrorCode,
+ ResponseStreamEventType,
+ SearchContextSize,
+ TextResponseFormatConfigurationType,
+ ToolCallStatus,
+ ToolChoiceOptions,
+ ToolChoiceParamType,
+ ToolType,
+)
+from ._patch import __all__ as _patch_all
+from ._patch import *
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "A2APreviewTool",
+ "A2AToolCall",
+ "A2AToolCallOutput",
+ "AISearchIndexResource",
+ "AgentId",
+ "AgentReference",
+ "Annotation",
+ "ApiErrorResponse",
+ "ApplyPatchCreateFileOperation",
+ "ApplyPatchCreateFileOperationParam",
+ "ApplyPatchDeleteFileOperation",
+ "ApplyPatchDeleteFileOperationParam",
+ "ApplyPatchFileOperation",
+ "ApplyPatchOperationParam",
+ "ApplyPatchToolCallItemParam",
+ "ApplyPatchToolCallOutputItemParam",
+ "ApplyPatchToolParam",
+ "ApplyPatchUpdateFileOperation",
+ "ApplyPatchUpdateFileOperationParam",
+ "ApproximateLocation",
+ "AutoCodeInterpreterToolParam",
+ "AzureAISearchTool",
+ "AzureAISearchToolCall",
+ "AzureAISearchToolCallOutput",
+ "AzureAISearchToolResource",
+ "AzureFunctionBinding",
+ "AzureFunctionDefinition",
+ "AzureFunctionDefinitionFunction",
+ "AzureFunctionStorageQueue",
+ "AzureFunctionTool",
+ "AzureFunctionToolCall",
+ "AzureFunctionToolCallOutput",
+ "BingCustomSearchConfiguration",
+ "BingCustomSearchPreviewTool",
+ "BingCustomSearchToolCall",
+ "BingCustomSearchToolCallOutput",
+ "BingCustomSearchToolParameters",
+ "BingGroundingSearchConfiguration",
+ "BingGroundingSearchToolParameters",
+ "BingGroundingTool",
+ "BingGroundingToolCall",
+ "BingGroundingToolCallOutput",
+ "BrowserAutomationPreviewTool",
+ "BrowserAutomationToolCall",
+ "BrowserAutomationToolCallOutput",
+ "BrowserAutomationToolConnectionParameters",
+ "BrowserAutomationToolParameters",
+ "CaptureStructuredOutputsTool",
+ "ChatSummaryMemoryItem",
+ "ClickParam",
+ "CodeInterpreterOutputImage",
+ "CodeInterpreterOutputLogs",
+ "CodeInterpreterTool",
+ "CompactResource",
+ "CompactionSummaryItemParam",
+ "ComparisonFilter",
+ "CompoundFilter",
+ "ComputerAction",
+ "ComputerCallOutputItemParam",
+ "ComputerCallSafetyCheckParam",
+ "ComputerScreenshotContent",
+ "ComputerScreenshotImage",
+ "ComputerUsePreviewTool",
+ "ContainerAutoParam",
+ "ContainerFileCitationBody",
+ "ContainerNetworkPolicyAllowlistParam",
+ "ContainerNetworkPolicyDisabledParam",
+ "ContainerNetworkPolicyDomainSecretParam",
+ "ContainerNetworkPolicyParam",
+ "ContainerReferenceResource",
+ "ContainerSkill",
+ "ContextManagementParam",
+ "ConversationParam_2",
+ "ConversationReference",
+ "CoordParam",
+ "CreateResponse",
+ "CreatedBy",
+ "CustomGrammarFormatParam",
+ "CustomTextFormatParam",
+ "CustomToolParam",
+ "CustomToolParamFormat",
+ "DeleteResponseResult",
+ "DoubleClickAction",
+ "DragParam",
+ "Error",
+ "FabricDataAgentToolCall",
+ "FabricDataAgentToolCallOutput",
+ "FabricDataAgentToolParameters",
+ "FileCitationBody",
+ "FilePath",
+ "FileSearchTool",
+ "FileSearchToolCallResults",
+ "FunctionAndCustomToolCallOutput",
+ "FunctionAndCustomToolCallOutputInputFileContent",
+ "FunctionAndCustomToolCallOutputInputImageContent",
+ "FunctionAndCustomToolCallOutputInputTextContent",
+ "FunctionCallOutputItemParam",
+ "FunctionShellAction",
+ "FunctionShellActionParam",
+ "FunctionShellCallEnvironment",
+ "FunctionShellCallItemParam",
+ "FunctionShellCallItemParamEnvironment",
+ "FunctionShellCallItemParamEnvironmentContainerReferenceParam",
+ "FunctionShellCallItemParamEnvironmentLocalEnvironmentParam",
+ "FunctionShellCallOutputContent",
+ "FunctionShellCallOutputContentParam",
+ "FunctionShellCallOutputExitOutcome",
+ "FunctionShellCallOutputExitOutcomeParam",
+ "FunctionShellCallOutputItemParam",
+ "FunctionShellCallOutputOutcome",
+ "FunctionShellCallOutputOutcomeParam",
+ "FunctionShellCallOutputTimeoutOutcome",
+ "FunctionShellCallOutputTimeoutOutcomeParam",
+ "FunctionShellToolParam",
+ "FunctionShellToolParamEnvironment",
+ "FunctionShellToolParamEnvironmentContainerReferenceParam",
+ "FunctionShellToolParamEnvironmentLocalEnvironmentParam",
+ "FunctionTool",
+ "HybridSearchOptions",
+ "ImageGenTool",
+ "ImageGenToolInputImageMask",
+ "InlineSkillParam",
+ "InlineSkillSourceParam",
+ "InputFileContent",
+ "InputFileContentParam",
+ "InputImageContent",
+ "InputImageContentParamAutoParam",
+ "InputTextContent",
+ "InputTextContentParam",
+ "Item",
+ "ItemCodeInterpreterToolCall",
+ "ItemComputerToolCall",
+ "ItemCustomToolCall",
+ "ItemCustomToolCallOutput",
+ "ItemField",
+ "ItemFieldApplyPatchToolCall",
+ "ItemFieldApplyPatchToolCallOutput",
+ "ItemFieldCodeInterpreterToolCall",
+ "ItemFieldCompactionBody",
+ "ItemFieldComputerToolCall",
+ "ItemFieldComputerToolCallOutput",
+ "ItemFieldCustomToolCall",
+ "ItemFieldCustomToolCallOutput",
+ "ItemFieldFileSearchToolCall",
+ "ItemFieldFunctionShellCall",
+ "ItemFieldFunctionShellCallOutput",
+ "ItemFieldFunctionToolCall",
+ "ItemFieldFunctionToolCallOutput",
+ "ItemFieldImageGenToolCall",
+ "ItemFieldLocalShellToolCall",
+ "ItemFieldLocalShellToolCallOutput",
+ "ItemFieldMcpApprovalRequest",
+ "ItemFieldMcpApprovalResponseResource",
+ "ItemFieldMcpListTools",
+ "ItemFieldMcpToolCall",
+ "ItemFieldMessage",
+ "ItemFieldReasoningItem",
+ "ItemFieldWebSearchToolCall",
+ "ItemFileSearchToolCall",
+ "ItemFunctionToolCall",
+ "ItemImageGenToolCall",
+ "ItemLocalShellToolCall",
+ "ItemLocalShellToolCallOutput",
+ "ItemMcpApprovalRequest",
+ "ItemMcpListTools",
+ "ItemMcpToolCall",
+ "ItemMessage",
+ "ItemOutputMessage",
+ "ItemReasoningItem",
+ "ItemReferenceParam",
+ "ItemWebSearchToolCall",
+ "KeyPressAction",
+ "LocalEnvironmentResource",
+ "LocalShellExecAction",
+ "LocalShellToolParam",
+ "LocalSkillParam",
+ "LogProb",
+ "MCPApprovalResponse",
+ "MCPListToolsTool",
+ "MCPListToolsToolAnnotations",
+ "MCPListToolsToolInputSchema",
+ "MCPTool",
+ "MCPToolFilter",
+ "MCPToolRequireApproval",
+ "MemoryItem",
+ "MemorySearchItem",
+ "MemorySearchOptions",
+ "MemorySearchPreviewTool",
+ "MemorySearchTool",
+ "MemorySearchToolCallItemParam",
+ "MemorySearchToolCallItemResource",
+ "MessageContent",
+ "MessageContentInputFileContent",
+ "MessageContentInputImageContent",
+ "MessageContentInputTextContent",
+ "MessageContentOutputTextContent",
+ "MessageContentReasoningTextContent",
+ "MessageContentRefusalContent",
+ "Metadata",
+ "MicrosoftFabricPreviewTool",
+ "MoveParam",
+ "OAuthConsentRequestOutputItem",
+ "OpenApiAnonymousAuthDetails",
+ "OpenApiAuthDetails",
+ "OpenApiFunctionDefinition",
+ "OpenApiFunctionDefinitionFunction",
+ "OpenApiManagedAuthDetails",
+ "OpenApiManagedSecurityScheme",
+ "OpenApiProjectConnectionAuthDetails",
+ "OpenApiProjectConnectionSecurityScheme",
+ "OpenApiTool",
+ "OpenApiToolCall",
+ "OpenApiToolCallOutput",
+ "OutputContent",
+ "OutputContentOutputTextContent",
+ "OutputContentReasoningTextContent",
+ "OutputContentRefusalContent",
+ "OutputItem",
+ "OutputItemApplyPatchToolCall",
+ "OutputItemApplyPatchToolCallOutput",
+ "OutputItemCodeInterpreterToolCall",
+ "OutputItemCompactionBody",
+ "OutputItemComputerToolCall",
+ "OutputItemComputerToolCallOutput",
+ "OutputItemCustomToolCall",
+ "OutputItemCustomToolCallOutput",
+ "OutputItemFileSearchToolCall",
+ "OutputItemFunctionShellCall",
+ "OutputItemFunctionShellCallOutput",
+ "OutputItemFunctionToolCall",
+ "OutputItemFunctionToolCallOutput",
+ "OutputItemImageGenToolCall",
+ "OutputItemLocalShellToolCall",
+ "OutputItemLocalShellToolCallOutput",
+ "OutputItemMcpApprovalRequest",
+ "OutputItemMcpApprovalResponseResource",
+ "OutputItemMcpListTools",
+ "OutputItemMcpToolCall",
+ "OutputItemMessage",
+ "OutputItemOutputMessage",
+ "OutputItemReasoningItem",
+ "OutputItemWebSearchToolCall",
+ "OutputMessageContent",
+ "OutputMessageContentOutputTextContent",
+ "OutputMessageContentRefusalContent",
+ "Prompt",
+ "RankingOptions",
+ "RealtimeMCPError",
+ "RealtimeMCPHTTPError",
+ "RealtimeMCPProtocolError",
+ "RealtimeMCPToolExecutionError",
+ "Reasoning",
+ "ReasoningTextContent",
+ "Response",
+ "ResponseAudioDeltaEvent",
+ "ResponseAudioDoneEvent",
+ "ResponseAudioTranscriptDeltaEvent",
+ "ResponseAudioTranscriptDoneEvent",
+ "ResponseCodeInterpreterCallCodeDeltaEvent",
+ "ResponseCodeInterpreterCallCodeDoneEvent",
+ "ResponseCodeInterpreterCallCompletedEvent",
+ "ResponseCodeInterpreterCallInProgressEvent",
+ "ResponseCodeInterpreterCallInterpretingEvent",
+ "ResponseCompletedEvent",
+ "ResponseContentPartAddedEvent",
+ "ResponseContentPartDoneEvent",
+ "ResponseCreatedEvent",
+ "ResponseCustomToolCallInputDeltaEvent",
+ "ResponseCustomToolCallInputDoneEvent",
+ "ResponseError",
+ "ResponseErrorEvent",
+ "ResponseFailedEvent",
+ "ResponseFileSearchCallCompletedEvent",
+ "ResponseFileSearchCallInProgressEvent",
+ "ResponseFileSearchCallSearchingEvent",
+ "ResponseFormatJsonSchemaSchema",
+ "ResponseFunctionCallArgumentsDeltaEvent",
+ "ResponseFunctionCallArgumentsDoneEvent",
+ "ResponseImageGenCallCompletedEvent",
+ "ResponseImageGenCallGeneratingEvent",
+ "ResponseImageGenCallInProgressEvent",
+ "ResponseImageGenCallPartialImageEvent",
+ "ResponseInProgressEvent",
+ "ResponseIncompleteDetails",
+ "ResponseIncompleteEvent",
+ "ResponseLogProb",
+ "ResponseLogProbTopLogprobs",
+ "ResponseMCPCallArgumentsDeltaEvent",
+ "ResponseMCPCallArgumentsDoneEvent",
+ "ResponseMCPCallCompletedEvent",
+ "ResponseMCPCallFailedEvent",
+ "ResponseMCPCallInProgressEvent",
+ "ResponseMCPListToolsCompletedEvent",
+ "ResponseMCPListToolsFailedEvent",
+ "ResponseMCPListToolsInProgressEvent",
+ "ResponseOutputItemAddedEvent",
+ "ResponseOutputItemDoneEvent",
+ "ResponseOutputTextAnnotationAddedEvent",
+ "ResponsePromptVariables",
+ "ResponseQueuedEvent",
+ "ResponseReasoningSummaryPartAddedEvent",
+ "ResponseReasoningSummaryPartAddedEventPart",
+ "ResponseReasoningSummaryPartDoneEvent",
+ "ResponseReasoningSummaryPartDoneEventPart",
+ "ResponseReasoningSummaryTextDeltaEvent",
+ "ResponseReasoningSummaryTextDoneEvent",
+ "ResponseReasoningTextDeltaEvent",
+ "ResponseReasoningTextDoneEvent",
+ "ResponseRefusalDeltaEvent",
+ "ResponseRefusalDoneEvent",
+ "ResponseStreamEvent",
+ "ResponseStreamOptions",
+ "ResponseTextDeltaEvent",
+ "ResponseTextDoneEvent",
+ "ResponseTextParam",
+ "ResponseUsage",
+ "ResponseUsageInputTokensDetails",
+ "ResponseUsageOutputTokensDetails",
+ "ResponseWebSearchCallCompletedEvent",
+ "ResponseWebSearchCallInProgressEvent",
+ "ResponseWebSearchCallSearchingEvent",
+ "ScreenshotParam",
+ "ScrollParam",
+ "SharepointGroundingToolCall",
+ "SharepointGroundingToolCallOutput",
+ "SharepointGroundingToolParameters",
+ "SharepointPreviewTool",
+ "SkillReferenceParam",
+ "SpecificApplyPatchParam",
+ "SpecificFunctionShellParam",
+ "StructuredOutputDefinition",
+ "StructuredOutputsOutputItem",
+ "SummaryTextContent",
+ "TextContent",
+ "TextResponseFormatConfiguration",
+ "TextResponseFormatConfigurationResponseFormatJsonObject",
+ "TextResponseFormatConfigurationResponseFormatText",
+ "TextResponseFormatJsonSchema",
+ "Tool",
+ "ToolChoiceAllowed",
+ "ToolChoiceCodeInterpreter",
+ "ToolChoiceComputerUsePreview",
+ "ToolChoiceCustom",
+ "ToolChoiceFileSearch",
+ "ToolChoiceFunction",
+ "ToolChoiceImageGeneration",
+ "ToolChoiceMCP",
+ "ToolChoiceParam",
+ "ToolChoiceWebSearchPreview",
+ "ToolChoiceWebSearchPreview20250311",
+ "ToolProjectConnection",
+ "TopLogProb",
+ "TypeParam",
+ "UrlCitationBody",
+ "UserProfileMemoryItem",
+ "VectorStoreFileAttributes",
+ "WaitParam",
+ "WebSearchActionFind",
+ "WebSearchActionOpenPage",
+ "WebSearchActionSearch",
+ "WebSearchActionSearchSources",
+ "WebSearchApproximateLocation",
+ "WebSearchConfiguration",
+ "WebSearchPreviewTool",
+ "WebSearchTool",
+ "WebSearchToolFilters",
+ "WorkflowActionOutputItem",
+ "AnnotationType",
+ "ApplyPatchCallOutputStatus",
+ "ApplyPatchCallOutputStatusParam",
+ "ApplyPatchCallStatus",
+ "ApplyPatchCallStatusParam",
+ "ApplyPatchFileOperationType",
+ "ApplyPatchOperationParamType",
+ "AzureAISearchQueryType",
+ "ClickButtonType",
+ "ComputerActionType",
+ "ComputerEnvironment",
+ "ContainerMemoryLimit",
+ "ContainerNetworkPolicyParamType",
+ "ContainerSkillType",
+ "CustomToolParamFormatType",
+ "DetailEnum",
+ "FunctionAndCustomToolCallOutputType",
+ "FunctionCallItemStatus",
+ "FunctionShellCallEnvironmentType",
+ "FunctionShellCallItemParamEnvironmentType",
+ "FunctionShellCallItemStatus",
+ "FunctionShellCallOutputOutcomeParamType",
+ "FunctionShellCallOutputOutcomeType",
+ "FunctionShellToolParamEnvironmentType",
+ "GrammarSyntax1",
+ "ImageDetail",
+ "ImageGenActionEnum",
+ "IncludeEnum",
+ "InputFidelity",
+ "ItemFieldType",
+ "ItemType",
+ "LocalShellCallOutputStatusEnum",
+ "LocalShellCallStatus",
+ "MCPToolCallStatus",
+ "MemoryItemKind",
+ "MessageContentType",
+ "MessagePhase",
+ "MessageRole",
+ "MessageStatus",
+ "ModelIdsCompaction",
+ "OpenApiAuthType",
+ "OutputContentType",
+ "OutputItemType",
+ "OutputMessageContentType",
+ "PageOrder",
+ "RankerVersionType",
+ "RealtimeMcpErrorType",
+ "ResponseErrorCode",
+ "ResponseStreamEventType",
+ "SearchContextSize",
+ "TextResponseFormatConfigurationType",
+ "ToolCallStatus",
+ "ToolChoiceOptions",
+ "ToolChoiceParamType",
+ "ToolType",
+]
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
+_patch_sdk()
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/_enums.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/_enums.py
new file mode 100644
index 000000000000..0d91e8de8442
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/_enums.py
@@ -0,0 +1,1253 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum
+from azure.core import CaseInsensitiveEnumMeta
+
+
+class AnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of AnnotationType."""
+
+ FILE_CITATION = "file_citation"
+ """FILE_CITATION."""
+ URL_CITATION = "url_citation"
+ """URL_CITATION."""
+ CONTAINER_FILE_CITATION = "container_file_citation"
+ """CONTAINER_FILE_CITATION."""
+ FILE_PATH = "file_path"
+ """FILE_PATH."""
+
+
+class ApplyPatchCallOutputStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ApplyPatchCallOutputStatus."""
+
+ COMPLETED = "completed"
+ """COMPLETED."""
+ FAILED = "failed"
+ """FAILED."""
+
+
+class ApplyPatchCallOutputStatusParam(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Apply patch call output status."""
+
+ COMPLETED = "completed"
+ """COMPLETED."""
+ FAILED = "failed"
+ """FAILED."""
+
+
+class ApplyPatchCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ApplyPatchCallStatus."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+
+
+class ApplyPatchCallStatusParam(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Apply patch call status."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+
+
+class ApplyPatchFileOperationType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ApplyPatchFileOperationType."""
+
+ CREATE_FILE = "create_file"
+ """CREATE_FILE."""
+ DELETE_FILE = "delete_file"
+ """DELETE_FILE."""
+ UPDATE_FILE = "update_file"
+ """UPDATE_FILE."""
+
+
+class ApplyPatchOperationParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ApplyPatchOperationParamType."""
+
+ CREATE_FILE = "create_file"
+ """CREATE_FILE."""
+ DELETE_FILE = "delete_file"
+ """DELETE_FILE."""
+ UPDATE_FILE = "update_file"
+ """UPDATE_FILE."""
+
+
+class AzureAISearchQueryType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Available query types for Azure AI Search tool."""
+
+ SIMPLE = "simple"
+ """Query type ``simple``."""
+ SEMANTIC = "semantic"
+ """Query type ``semantic``."""
+ VECTOR = "vector"
+ """Query type ``vector``."""
+ VECTOR_SIMPLE_HYBRID = "vector_simple_hybrid"
+ """Query type ``vector_simple_hybrid``."""
+ VECTOR_SEMANTIC_HYBRID = "vector_semantic_hybrid"
+ """Query type ``vector_semantic_hybrid``."""
+
+
+class ClickButtonType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ClickButtonType."""
+
+ LEFT = "left"
+ """LEFT."""
+ RIGHT = "right"
+ """RIGHT."""
+ WHEEL = "wheel"
+ """WHEEL."""
+ BACK = "back"
+ """BACK."""
+ FORWARD = "forward"
+ """FORWARD."""
+
+
+class ComputerActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ComputerActionType."""
+
+ CLICK = "click"
+ """CLICK."""
+ DOUBLE_CLICK = "double_click"
+ """DOUBLE_CLICK."""
+ DRAG = "drag"
+ """DRAG."""
+ KEYPRESS = "keypress"
+ """KEYPRESS."""
+ MOVE = "move"
+ """MOVE."""
+ SCREENSHOT = "screenshot"
+ """SCREENSHOT."""
+ SCROLL = "scroll"
+ """SCROLL."""
+ TYPE = "type"
+ """TYPE."""
+ WAIT = "wait"
+ """WAIT."""
+
+
+class ComputerEnvironment(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ComputerEnvironment."""
+
+ WINDOWS = "windows"
+ """WINDOWS."""
+ MAC = "mac"
+ """MAC."""
+ LINUX = "linux"
+ """LINUX."""
+ UBUNTU = "ubuntu"
+ """UBUNTU."""
+ BROWSER = "browser"
+ """BROWSER."""
+
+
+class ContainerMemoryLimit(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ContainerMemoryLimit."""
+
+ ENUM_1_G = "1g"
+ """1_G."""
+ ENUM_4_G = "4g"
+ """4_G."""
+ ENUM_16_G = "16g"
+ """16_G."""
+ ENUM_64_G = "64g"
+ """64_G."""
+
+
+class ContainerNetworkPolicyParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ContainerNetworkPolicyParamType."""
+
+ DISABLED = "disabled"
+ """DISABLED."""
+ ALLOWLIST = "allowlist"
+ """ALLOWLIST."""
+
+
+class ContainerSkillType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ContainerSkillType."""
+
+ SKILL_REFERENCE = "skill_reference"
+ """SKILL_REFERENCE."""
+ INLINE = "inline"
+ """INLINE."""
+
+
+class CustomToolParamFormatType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of CustomToolParamFormatType."""
+
+ TEXT = "text"
+ """TEXT."""
+ GRAMMAR = "grammar"
+ """GRAMMAR."""
+
+
+class DetailEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of DetailEnum."""
+
+ LOW = "low"
+ """LOW."""
+ HIGH = "high"
+ """HIGH."""
+ AUTO = "auto"
+ """AUTO."""
+
+
+class FunctionAndCustomToolCallOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionAndCustomToolCallOutputType."""
+
+ INPUT_TEXT = "input_text"
+ """INPUT_TEXT."""
+ INPUT_IMAGE = "input_image"
+ """INPUT_IMAGE."""
+ INPUT_FILE = "input_file"
+ """INPUT_FILE."""
+
+
+class FunctionCallItemStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionCallItemStatus."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+
+
+class FunctionShellCallEnvironmentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionShellCallEnvironmentType."""
+
+ LOCAL = "local"
+ """LOCAL."""
+ CONTAINER_REFERENCE = "container_reference"
+ """CONTAINER_REFERENCE."""
+
+
+class FunctionShellCallItemParamEnvironmentType( # pylint: disable=name-too-long
+ str, Enum, metaclass=CaseInsensitiveEnumMeta
+):
+ """Type of FunctionShellCallItemParamEnvironmentType."""
+
+ LOCAL = "local"
+ """LOCAL."""
+ CONTAINER_REFERENCE = "container_reference"
+ """CONTAINER_REFERENCE."""
+
+
+class FunctionShellCallItemStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Shell call status."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+
+
+class FunctionShellCallOutputOutcomeParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionShellCallOutputOutcomeParamType."""
+
+ TIMEOUT = "timeout"
+ """TIMEOUT."""
+ EXIT = "exit"
+ """EXIT."""
+
+
+class FunctionShellCallOutputOutcomeType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionShellCallOutputOutcomeType."""
+
+ TIMEOUT = "timeout"
+ """TIMEOUT."""
+ EXIT = "exit"
+ """EXIT."""
+
+
+class FunctionShellToolParamEnvironmentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionShellToolParamEnvironmentType."""
+
+ CONTAINER_AUTO = "container_auto"
+ """CONTAINER_AUTO."""
+ LOCAL = "local"
+ """LOCAL."""
+ CONTAINER_REFERENCE = "container_reference"
+ """CONTAINER_REFERENCE."""
+
+
+class GrammarSyntax1(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of GrammarSyntax1."""
+
+ LARK = "lark"
+ """LARK."""
+ REGEX = "regex"
+ """REGEX."""
+
+
+class ImageDetail(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ImageDetail."""
+
+ LOW = "low"
+ """LOW."""
+ HIGH = "high"
+ """HIGH."""
+ AUTO = "auto"
+ """AUTO."""
+
+
+class ImageGenActionEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ImageGenActionEnum."""
+
+ GENERATE = "generate"
+ """GENERATE."""
+ EDIT = "edit"
+ """EDIT."""
+ AUTO = "auto"
+ """AUTO."""
+
+
+class IncludeEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Specify additional output data to include in the model response. Currently supported values
+ are:
+
+ * `web_search_call.action.sources`: Include the sources of the web search tool call.
+ * `code_interpreter_call.outputs`: Includes the outputs of python code execution in code
+ interpreter tool call items.
+ * `computer_call_output.output.image_url`: Include image urls from the computer call output.
+ * `file_search_call.results`: Include the search results of the file search tool call.
+ * `message.input_image.image_url`: Include image urls from the input message.
+ * `message.output_text.logprobs`: Include logprobs with assistant messages.
+ * `reasoning.encrypted_content`: Includes an encrypted version of reasoning tokens in reasoning
+ item outputs. This enables reasoning items to be used in multi-turn conversations when using
+ the Responses API statelessly (like when the `store` parameter is set to `false`, or when an
+ organization is enrolled in the zero data retention program).
+ """
+
+ FILE_SEARCH_CALL_RESULTS = "file_search_call.results"
+ """FILE_SEARCH_CALL_RESULTS."""
+ WEB_SEARCH_CALL_RESULTS = "web_search_call.results"
+ """WEB_SEARCH_CALL_RESULTS."""
+ WEB_SEARCH_CALL_ACTION_SOURCES = "web_search_call.action.sources"
+ """WEB_SEARCH_CALL_ACTION_SOURCES."""
+ MESSAGE_INPUT_IMAGE_IMAGE_URL = "message.input_image.image_url"
+ """MESSAGE_INPUT_IMAGE_IMAGE_URL."""
+ COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = "computer_call_output.output.image_url"
+ """COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL."""
+ CODE_INTERPRETER_CALL_OUTPUTS = "code_interpreter_call.outputs"
+ """CODE_INTERPRETER_CALL_OUTPUTS."""
+ REASONING_ENCRYPTED_CONTENT = "reasoning.encrypted_content"
+ """REASONING_ENCRYPTED_CONTENT."""
+ MESSAGE_OUTPUT_TEXT_LOGPROBS = "message.output_text.logprobs"
+ """MESSAGE_OUTPUT_TEXT_LOGPROBS."""
+ MEMORY_SEARCH_CALL_RESULTS = "memory_search_call.results"
+ """MEMORY_SEARCH_CALL_RESULTS."""
+
+
+class InputFidelity(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Control how much effort the model will exert to match the style and features, especially facial
+ features, of input images. This parameter is only supported for ``gpt-image-1`` and
+ ``gpt-image-1.5`` and later models, unsupported for ``gpt-image-1-mini``. Supports ``high`` and
+ ``low``. Defaults to ``low``.
+ """
+
+ HIGH = "high"
+ """HIGH."""
+ LOW = "low"
+ """LOW."""
+
+
+class ItemFieldType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ItemFieldType."""
+
+ MESSAGE = "message"
+ """MESSAGE."""
+ FUNCTION_CALL = "function_call"
+ """FUNCTION_CALL."""
+ FUNCTION_CALL_OUTPUT = "function_call_output"
+ """FUNCTION_CALL_OUTPUT."""
+ FILE_SEARCH_CALL = "file_search_call"
+ """FILE_SEARCH_CALL."""
+ WEB_SEARCH_CALL = "web_search_call"
+ """WEB_SEARCH_CALL."""
+ IMAGE_GENERATION_CALL = "image_generation_call"
+ """IMAGE_GENERATION_CALL."""
+ COMPUTER_CALL = "computer_call"
+ """COMPUTER_CALL."""
+ COMPUTER_CALL_OUTPUT = "computer_call_output"
+ """COMPUTER_CALL_OUTPUT."""
+ REASONING = "reasoning"
+ """REASONING."""
+ COMPACTION = "compaction"
+ """COMPACTION."""
+ CODE_INTERPRETER_CALL = "code_interpreter_call"
+ """CODE_INTERPRETER_CALL."""
+ LOCAL_SHELL_CALL = "local_shell_call"
+ """LOCAL_SHELL_CALL."""
+ LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output"
+ """LOCAL_SHELL_CALL_OUTPUT."""
+ SHELL_CALL = "shell_call"
+ """SHELL_CALL."""
+ SHELL_CALL_OUTPUT = "shell_call_output"
+ """SHELL_CALL_OUTPUT."""
+ APPLY_PATCH_CALL = "apply_patch_call"
+ """APPLY_PATCH_CALL."""
+ APPLY_PATCH_CALL_OUTPUT = "apply_patch_call_output"
+ """APPLY_PATCH_CALL_OUTPUT."""
+ MCP_LIST_TOOLS = "mcp_list_tools"
+ """MCP_LIST_TOOLS."""
+ MCP_APPROVAL_REQUEST = "mcp_approval_request"
+ """MCP_APPROVAL_REQUEST."""
+ MCP_APPROVAL_RESPONSE = "mcp_approval_response"
+ """MCP_APPROVAL_RESPONSE."""
+ MCP_CALL = "mcp_call"
+ """MCP_CALL."""
+ CUSTOM_TOOL_CALL = "custom_tool_call"
+ """CUSTOM_TOOL_CALL."""
+ CUSTOM_TOOL_CALL_OUTPUT = "custom_tool_call_output"
+ """CUSTOM_TOOL_CALL_OUTPUT."""
+
+
+class ItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ItemType."""
+
+ MESSAGE = "message"
+ """MESSAGE."""
+ OUTPUT_MESSAGE = "output_message"
+ """OUTPUT_MESSAGE."""
+ FILE_SEARCH_CALL = "file_search_call"
+ """FILE_SEARCH_CALL."""
+ COMPUTER_CALL = "computer_call"
+ """COMPUTER_CALL."""
+ COMPUTER_CALL_OUTPUT = "computer_call_output"
+ """COMPUTER_CALL_OUTPUT."""
+ WEB_SEARCH_CALL = "web_search_call"
+ """WEB_SEARCH_CALL."""
+ FUNCTION_CALL = "function_call"
+ """FUNCTION_CALL."""
+ FUNCTION_CALL_OUTPUT = "function_call_output"
+ """FUNCTION_CALL_OUTPUT."""
+ REASONING = "reasoning"
+ """REASONING."""
+ COMPACTION = "compaction"
+ """COMPACTION."""
+ IMAGE_GENERATION_CALL = "image_generation_call"
+ """IMAGE_GENERATION_CALL."""
+ CODE_INTERPRETER_CALL = "code_interpreter_call"
+ """CODE_INTERPRETER_CALL."""
+ LOCAL_SHELL_CALL = "local_shell_call"
+ """LOCAL_SHELL_CALL."""
+ LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output"
+ """LOCAL_SHELL_CALL_OUTPUT."""
+ SHELL_CALL = "shell_call"
+ """SHELL_CALL."""
+ SHELL_CALL_OUTPUT = "shell_call_output"
+ """SHELL_CALL_OUTPUT."""
+ APPLY_PATCH_CALL = "apply_patch_call"
+ """APPLY_PATCH_CALL."""
+ APPLY_PATCH_CALL_OUTPUT = "apply_patch_call_output"
+ """APPLY_PATCH_CALL_OUTPUT."""
+ MCP_LIST_TOOLS = "mcp_list_tools"
+ """MCP_LIST_TOOLS."""
+ MCP_APPROVAL_REQUEST = "mcp_approval_request"
+ """MCP_APPROVAL_REQUEST."""
+ MCP_APPROVAL_RESPONSE = "mcp_approval_response"
+ """MCP_APPROVAL_RESPONSE."""
+ MCP_CALL = "mcp_call"
+ """MCP_CALL."""
+ CUSTOM_TOOL_CALL_OUTPUT = "custom_tool_call_output"
+ """CUSTOM_TOOL_CALL_OUTPUT."""
+ CUSTOM_TOOL_CALL = "custom_tool_call"
+ """CUSTOM_TOOL_CALL."""
+ ITEM_REFERENCE = "item_reference"
+ """ITEM_REFERENCE."""
+ STRUCTURED_OUTPUTS = "structured_outputs"
+ """STRUCTURED_OUTPUTS."""
+ OAUTH_CONSENT_REQUEST = "oauth_consent_request"
+ """OAUTH_CONSENT_REQUEST."""
+ MEMORY_SEARCH_CALL = "memory_search_call"
+ """MEMORY_SEARCH_CALL."""
+ WORKFLOW_ACTION = "workflow_action"
+ """WORKFLOW_ACTION."""
+ A2_A_PREVIEW_CALL = "a2a_preview_call"
+ """A2_A_PREVIEW_CALL."""
+ A2_A_PREVIEW_CALL_OUTPUT = "a2a_preview_call_output"
+ """A2_A_PREVIEW_CALL_OUTPUT."""
+ BING_GROUNDING_CALL = "bing_grounding_call"
+ """BING_GROUNDING_CALL."""
+ BING_GROUNDING_CALL_OUTPUT = "bing_grounding_call_output"
+ """BING_GROUNDING_CALL_OUTPUT."""
+ SHAREPOINT_GROUNDING_PREVIEW_CALL = "sharepoint_grounding_preview_call"
+ """SHAREPOINT_GROUNDING_PREVIEW_CALL."""
+ SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT = "sharepoint_grounding_preview_call_output"
+ """SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT."""
+ AZURE_AI_SEARCH_CALL = "azure_ai_search_call"
+ """AZURE_AI_SEARCH_CALL."""
+ AZURE_AI_SEARCH_CALL_OUTPUT = "azure_ai_search_call_output"
+ """AZURE_AI_SEARCH_CALL_OUTPUT."""
+ BING_CUSTOM_SEARCH_PREVIEW_CALL = "bing_custom_search_preview_call"
+ """BING_CUSTOM_SEARCH_PREVIEW_CALL."""
+ BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT = "bing_custom_search_preview_call_output"
+ """BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT."""
+ OPENAPI_CALL = "openapi_call"
+ """OPENAPI_CALL."""
+ OPENAPI_CALL_OUTPUT = "openapi_call_output"
+ """OPENAPI_CALL_OUTPUT."""
+ BROWSER_AUTOMATION_PREVIEW_CALL = "browser_automation_preview_call"
+ """BROWSER_AUTOMATION_PREVIEW_CALL."""
+ BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT = "browser_automation_preview_call_output"
+ """BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT."""
+ FABRIC_DATAAGENT_PREVIEW_CALL = "fabric_dataagent_preview_call"
+ """FABRIC_DATAAGENT_PREVIEW_CALL."""
+ FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT = "fabric_dataagent_preview_call_output"
+ """FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT."""
+ AZURE_FUNCTION_CALL = "azure_function_call"
+ """AZURE_FUNCTION_CALL."""
+ AZURE_FUNCTION_CALL_OUTPUT = "azure_function_call_output"
+ """AZURE_FUNCTION_CALL_OUTPUT."""
+
+
+class LocalShellCallOutputStatusEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of LocalShellCallOutputStatusEnum."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+
+
+class LocalShellCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of LocalShellCallStatus."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+
+
+class MCPToolCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of MCPToolCallStatus."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+ CALLING = "calling"
+ """CALLING."""
+ FAILED = "failed"
+ """FAILED."""
+
+
+class MemoryItemKind(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Memory item kind."""
+
+ USER_PROFILE = "user_profile"
+ """User profile information extracted from conversations."""
+ CHAT_SUMMARY = "chat_summary"
+ """Summary of chat conversations."""
+
+
+class MessageContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of MessageContentType."""
+
+ INPUT_TEXT = "input_text"
+ """INPUT_TEXT."""
+ OUTPUT_TEXT = "output_text"
+ """OUTPUT_TEXT."""
+ TEXT = "text"
+ """TEXT."""
+ SUMMARY_TEXT = "summary_text"
+ """SUMMARY_TEXT."""
+ REASONING_TEXT = "reasoning_text"
+ """REASONING_TEXT."""
+ REFUSAL = "refusal"
+ """REFUSAL."""
+ INPUT_IMAGE = "input_image"
+ """INPUT_IMAGE."""
+ COMPUTER_SCREENSHOT = "computer_screenshot"
+ """COMPUTER_SCREENSHOT."""
+ INPUT_FILE = "input_file"
+ """INPUT_FILE."""
+
+
+class MessagePhase(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Labels an ``assistant`` message as intermediate commentary (``commentary``) or the final answer
+ (``final_answer``). For models like ``gpt-5.3-codex`` and beyond, when sending follow-up
+ requests, preserve and resend phase on all assistant messages — dropping it can degrade
+ performance. Not used for user messages. Use ``commentary`` for an intermediate assistant
+ message and ``final_answer`` for the final assistant message. For follow-up requests with
+ models like ``gpt-5.3-codex`` and later, preserve and resend phase on all assistant messages.
+ Omitting it can degrade performance. Not used for user messages.
+ """
+
+ COMMENTARY = "commentary"
+ """COMMENTARY."""
+ FINAL_ANSWER = "final_answer"
+ """FINAL_ANSWER."""
+
+
+class MessageRole(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of MessageRole."""
+
+ UNKNOWN = "unknown"
+ """UNKNOWN."""
+ USER = "user"
+ """USER."""
+ ASSISTANT = "assistant"
+ """ASSISTANT."""
+ SYSTEM = "system"
+ """SYSTEM."""
+ CRITIC = "critic"
+ """CRITIC."""
+ DISCRIMINATOR = "discriminator"
+ """DISCRIMINATOR."""
+ DEVELOPER = "developer"
+ """DEVELOPER."""
+ TOOL = "tool"
+ """TOOL."""
+
+
+class MessageStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of MessageStatus."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+
+
+class ModelIdsCompaction(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Model ID used to generate the response, like ``gpt-5`` or ``o3``. OpenAI offers a wide range of
+ models with different capabilities, performance characteristics, and price points. Refer to the
+ `model guide `_ to browse and compare available models.
+ """
+
+ GPT5_2 = "gpt-5.2"
+ """GPT5_2."""
+ GPT5_2_2025_12_11 = "gpt-5.2-2025-12-11"
+ """GPT5_2_2025_12_11."""
+ GPT5_2_CHAT_LATEST = "gpt-5.2-chat-latest"
+ """GPT5_2_CHAT_LATEST."""
+ GPT5_2_PRO = "gpt-5.2-pro"
+ """GPT5_2_PRO."""
+ GPT5_2_PRO2025_12_11 = "gpt-5.2-pro-2025-12-11"
+ """GPT5_2_PRO2025_12_11."""
+ GPT5_1 = "gpt-5.1"
+ """GPT5_1."""
+ GPT5_1_2025_11_13 = "gpt-5.1-2025-11-13"
+ """GPT5_1_2025_11_13."""
+ GPT5_1_CODEX = "gpt-5.1-codex"
+ """GPT5_1_CODEX."""
+ GPT5_1_MINI = "gpt-5.1-mini"
+ """GPT5_1_MINI."""
+ GPT5_1_CHAT_LATEST = "gpt-5.1-chat-latest"
+ """GPT5_1_CHAT_LATEST."""
+ GPT5 = "gpt-5"
+ """GPT5."""
+ GPT5_MINI = "gpt-5-mini"
+ """GPT5_MINI."""
+ GPT5_NANO = "gpt-5-nano"
+ """GPT5_NANO."""
+ GPT5_2025_08_07 = "gpt-5-2025-08-07"
+ """GPT5_2025_08_07."""
+ GPT5_MINI2025_08_07 = "gpt-5-mini-2025-08-07"
+ """GPT5_MINI2025_08_07."""
+ GPT5_NANO2025_08_07 = "gpt-5-nano-2025-08-07"
+ """GPT5_NANO2025_08_07."""
+ GPT5_CHAT_LATEST = "gpt-5-chat-latest"
+ """GPT5_CHAT_LATEST."""
+ GPT4_1 = "gpt-4.1"
+ """GPT4_1."""
+ GPT4_1_MINI = "gpt-4.1-mini"
+ """GPT4_1_MINI."""
+ GPT4_1_NANO = "gpt-4.1-nano"
+ """GPT4_1_NANO."""
+ GPT4_1_2025_04_14 = "gpt-4.1-2025-04-14"
+ """GPT4_1_2025_04_14."""
+ GPT4_1_MINI2025_04_14 = "gpt-4.1-mini-2025-04-14"
+ """GPT4_1_MINI2025_04_14."""
+ GPT4_1_NANO2025_04_14 = "gpt-4.1-nano-2025-04-14"
+ """GPT4_1_NANO2025_04_14."""
+ O4_MINI = "o4-mini"
+ """O4_MINI."""
+ O4_MINI2025_04_16 = "o4-mini-2025-04-16"
+ """O4_MINI2025_04_16."""
+ O3 = "o3"
+ """O3."""
+ O3_2025_04_16 = "o3-2025-04-16"
+ """O3_2025_04_16."""
+ O3_MINI = "o3-mini"
+ """O3_MINI."""
+ O3_MINI2025_01_31 = "o3-mini-2025-01-31"
+ """O3_MINI2025_01_31."""
+ O1 = "o1"
+ """O1."""
+ O1_2024_12_17 = "o1-2024-12-17"
+ """O1_2024_12_17."""
+ O1_PREVIEW = "o1-preview"
+ """O1_PREVIEW."""
+ O1_PREVIEW2024_09_12 = "o1-preview-2024-09-12"
+ """O1_PREVIEW2024_09_12."""
+ O1_MINI = "o1-mini"
+ """O1_MINI."""
+ O1_MINI2024_09_12 = "o1-mini-2024-09-12"
+ """O1_MINI2024_09_12."""
+ GPT4_O = "gpt-4o"
+ """GPT4_O."""
+ GPT4_O2024_11_20 = "gpt-4o-2024-11-20"
+ """GPT4_O2024_11_20."""
+ GPT4_O2024_08_06 = "gpt-4o-2024-08-06"
+ """GPT4_O2024_08_06."""
+ GPT4_O2024_05_13 = "gpt-4o-2024-05-13"
+ """GPT4_O2024_05_13."""
+ GPT4_O_AUDIO_PREVIEW = "gpt-4o-audio-preview"
+ """GPT4_O_AUDIO_PREVIEW."""
+ GPT4_O_AUDIO_PREVIEW2024_10_01 = "gpt-4o-audio-preview-2024-10-01"
+ """GPT4_O_AUDIO_PREVIEW2024_10_01."""
+ GPT4_O_AUDIO_PREVIEW2024_12_17 = "gpt-4o-audio-preview-2024-12-17"
+ """GPT4_O_AUDIO_PREVIEW2024_12_17."""
+ GPT4_O_AUDIO_PREVIEW2025_06_03 = "gpt-4o-audio-preview-2025-06-03"
+ """GPT4_O_AUDIO_PREVIEW2025_06_03."""
+ GPT4_O_MINI_AUDIO_PREVIEW = "gpt-4o-mini-audio-preview"
+ """GPT4_O_MINI_AUDIO_PREVIEW."""
+ GPT4_O_MINI_AUDIO_PREVIEW2024_12_17 = "gpt-4o-mini-audio-preview-2024-12-17"
+ """GPT4_O_MINI_AUDIO_PREVIEW2024_12_17."""
+ GPT4_O_SEARCH_PREVIEW = "gpt-4o-search-preview"
+ """GPT4_O_SEARCH_PREVIEW."""
+ GPT4_O_MINI_SEARCH_PREVIEW = "gpt-4o-mini-search-preview"
+ """GPT4_O_MINI_SEARCH_PREVIEW."""
+ GPT4_O_SEARCH_PREVIEW2025_03_11 = "gpt-4o-search-preview-2025-03-11"
+ """GPT4_O_SEARCH_PREVIEW2025_03_11."""
+ GPT4_O_MINI_SEARCH_PREVIEW2025_03_11 = "gpt-4o-mini-search-preview-2025-03-11"
+ """GPT4_O_MINI_SEARCH_PREVIEW2025_03_11."""
+ CHATGPT4_O_LATEST = "chatgpt-4o-latest"
+ """CHATGPT4_O_LATEST."""
+ CODEX_MINI_LATEST = "codex-mini-latest"
+ """CODEX_MINI_LATEST."""
+ GPT4_O_MINI = "gpt-4o-mini"
+ """GPT4_O_MINI."""
+ GPT4_O_MINI2024_07_18 = "gpt-4o-mini-2024-07-18"
+ """GPT4_O_MINI2024_07_18."""
+ GPT4_TURBO = "gpt-4-turbo"
+ """GPT4_TURBO."""
+ GPT4_TURBO2024_04_09 = "gpt-4-turbo-2024-04-09"
+ """GPT4_TURBO2024_04_09."""
+ GPT4_0125_PREVIEW = "gpt-4-0125-preview"
+ """GPT4_0125_PREVIEW."""
+ GPT4_TURBO_PREVIEW = "gpt-4-turbo-preview"
+ """GPT4_TURBO_PREVIEW."""
+ GPT4_1106_PREVIEW = "gpt-4-1106-preview"
+ """GPT4_1106_PREVIEW."""
+ GPT4_VISION_PREVIEW = "gpt-4-vision-preview"
+ """GPT4_VISION_PREVIEW."""
+ GPT4 = "gpt-4"
+ """GPT4."""
+ GPT4_0314 = "gpt-4-0314"
+ """GPT4_0314."""
+ GPT4_0613 = "gpt-4-0613"
+ """GPT4_0613."""
+ GPT4_32_K = "gpt-4-32k"
+ """GPT4_32_K."""
+ GPT4_32_K0314 = "gpt-4-32k-0314"
+ """GPT4_32_K0314."""
+ GPT4_32_K0613 = "gpt-4-32k-0613"
+ """GPT4_32_K0613."""
+ GPT3_5_TURBO = "gpt-3.5-turbo"
+ """GPT3_5_TURBO."""
+ GPT3_5_TURBO16_K = "gpt-3.5-turbo-16k"
+ """GPT3_5_TURBO16_K."""
+ GPT3_5_TURBO0301 = "gpt-3.5-turbo-0301"
+ """GPT3_5_TURBO0301."""
+ GPT3_5_TURBO0613 = "gpt-3.5-turbo-0613"
+ """GPT3_5_TURBO0613."""
+ GPT3_5_TURBO1106 = "gpt-3.5-turbo-1106"
+ """GPT3_5_TURBO1106."""
+ GPT3_5_TURBO0125 = "gpt-3.5-turbo-0125"
+ """GPT3_5_TURBO0125."""
+ GPT3_5_TURBO16_K0613 = "gpt-3.5-turbo-16k-0613"
+ """GPT3_5_TURBO16_K0613."""
+ O1_PRO = "o1-pro"
+ """O1_PRO."""
+ O1_PRO2025_03_19 = "o1-pro-2025-03-19"
+ """O1_PRO2025_03_19."""
+ O3_PRO = "o3-pro"
+ """O3_PRO."""
+ O3_PRO2025_06_10 = "o3-pro-2025-06-10"
+ """O3_PRO2025_06_10."""
+ O3_DEEP_RESEARCH = "o3-deep-research"
+ """O3_DEEP_RESEARCH."""
+ O3_DEEP_RESEARCH2025_06_26 = "o3-deep-research-2025-06-26"
+ """O3_DEEP_RESEARCH2025_06_26."""
+ O4_MINI_DEEP_RESEARCH = "o4-mini-deep-research"
+ """O4_MINI_DEEP_RESEARCH."""
+ O4_MINI_DEEP_RESEARCH2025_06_26 = "o4-mini-deep-research-2025-06-26"
+ """O4_MINI_DEEP_RESEARCH2025_06_26."""
+ COMPUTER_USE_PREVIEW = "computer-use-preview"
+ """COMPUTER_USE_PREVIEW."""
+ COMPUTER_USE_PREVIEW2025_03_11 = "computer-use-preview-2025-03-11"
+ """COMPUTER_USE_PREVIEW2025_03_11."""
+ GPT5_CODEX = "gpt-5-codex"
+ """GPT5_CODEX."""
+ GPT5_PRO = "gpt-5-pro"
+ """GPT5_PRO."""
+ GPT5_PRO2025_10_06 = "gpt-5-pro-2025-10-06"
+ """GPT5_PRO2025_10_06."""
+ GPT5_1_CODEX_MAX = "gpt-5.1-codex-max"
+ """GPT5_1_CODEX_MAX."""
+
+
+class OpenApiAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Authentication type for OpenApi endpoint. Allowed types are:
+
+ * Anonymous (no authentication required)
+ * Project Connection (requires project_connection_id to endpoint, as setup in AI Foundry)
+ * Managed_Identity (requires audience for identity based auth).
+ """
+
+ ANONYMOUS = "anonymous"
+ """ANONYMOUS."""
+ PROJECT_CONNECTION = "project_connection"
+ """PROJECT_CONNECTION."""
+ MANAGED_IDENTITY = "managed_identity"
+ """MANAGED_IDENTITY."""
+
+
+class OutputContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of OutputContentType."""
+
+ OUTPUT_TEXT = "output_text"
+ """OUTPUT_TEXT."""
+ REFUSAL = "refusal"
+ """REFUSAL."""
+ REASONING_TEXT = "reasoning_text"
+ """REASONING_TEXT."""
+
+
+class OutputItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of OutputItemType."""
+
+ OUTPUT_MESSAGE = "output_message"
+ """OUTPUT_MESSAGE."""
+ FILE_SEARCH_CALL = "file_search_call"
+ """FILE_SEARCH_CALL."""
+ FUNCTION_CALL = "function_call"
+ """FUNCTION_CALL."""
+ WEB_SEARCH_CALL = "web_search_call"
+ """WEB_SEARCH_CALL."""
+ COMPUTER_CALL = "computer_call"
+ """COMPUTER_CALL."""
+ REASONING = "reasoning"
+ """REASONING."""
+ COMPACTION = "compaction"
+ """COMPACTION."""
+ IMAGE_GENERATION_CALL = "image_generation_call"
+ """IMAGE_GENERATION_CALL."""
+ CODE_INTERPRETER_CALL = "code_interpreter_call"
+ """CODE_INTERPRETER_CALL."""
+ LOCAL_SHELL_CALL = "local_shell_call"
+ """LOCAL_SHELL_CALL."""
+ SHELL_CALL = "shell_call"
+ """SHELL_CALL."""
+ SHELL_CALL_OUTPUT = "shell_call_output"
+ """SHELL_CALL_OUTPUT."""
+ APPLY_PATCH_CALL = "apply_patch_call"
+ """APPLY_PATCH_CALL."""
+ APPLY_PATCH_CALL_OUTPUT = "apply_patch_call_output"
+ """APPLY_PATCH_CALL_OUTPUT."""
+ MCP_CALL = "mcp_call"
+ """MCP_CALL."""
+ MCP_LIST_TOOLS = "mcp_list_tools"
+ """MCP_LIST_TOOLS."""
+ MCP_APPROVAL_REQUEST = "mcp_approval_request"
+ """MCP_APPROVAL_REQUEST."""
+ CUSTOM_TOOL_CALL = "custom_tool_call"
+ """CUSTOM_TOOL_CALL."""
+ MESSAGE = "message"
+ """MESSAGE."""
+ COMPUTER_CALL_OUTPUT = "computer_call_output"
+ """COMPUTER_CALL_OUTPUT."""
+ FUNCTION_CALL_OUTPUT = "function_call_output"
+ """FUNCTION_CALL_OUTPUT."""
+ LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output"
+ """LOCAL_SHELL_CALL_OUTPUT."""
+ MCP_APPROVAL_RESPONSE = "mcp_approval_response"
+ """MCP_APPROVAL_RESPONSE."""
+ CUSTOM_TOOL_CALL_OUTPUT = "custom_tool_call_output"
+ """CUSTOM_TOOL_CALL_OUTPUT."""
+ STRUCTURED_OUTPUTS = "structured_outputs"
+ """STRUCTURED_OUTPUTS."""
+ OAUTH_CONSENT_REQUEST = "oauth_consent_request"
+ """OAUTH_CONSENT_REQUEST."""
+ MEMORY_SEARCH_CALL = "memory_search_call"
+ """MEMORY_SEARCH_CALL."""
+ WORKFLOW_ACTION = "workflow_action"
+ """WORKFLOW_ACTION."""
+ A2_A_PREVIEW_CALL = "a2a_preview_call"
+ """A2_A_PREVIEW_CALL."""
+ A2_A_PREVIEW_CALL_OUTPUT = "a2a_preview_call_output"
+ """A2_A_PREVIEW_CALL_OUTPUT."""
+ BING_GROUNDING_CALL = "bing_grounding_call"
+ """BING_GROUNDING_CALL."""
+ BING_GROUNDING_CALL_OUTPUT = "bing_grounding_call_output"
+ """BING_GROUNDING_CALL_OUTPUT."""
+ SHAREPOINT_GROUNDING_PREVIEW_CALL = "sharepoint_grounding_preview_call"
+ """SHAREPOINT_GROUNDING_PREVIEW_CALL."""
+ SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT = "sharepoint_grounding_preview_call_output"
+ """SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT."""
+ AZURE_AI_SEARCH_CALL = "azure_ai_search_call"
+ """AZURE_AI_SEARCH_CALL."""
+ AZURE_AI_SEARCH_CALL_OUTPUT = "azure_ai_search_call_output"
+ """AZURE_AI_SEARCH_CALL_OUTPUT."""
+ BING_CUSTOM_SEARCH_PREVIEW_CALL = "bing_custom_search_preview_call"
+ """BING_CUSTOM_SEARCH_PREVIEW_CALL."""
+ BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT = "bing_custom_search_preview_call_output"
+ """BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT."""
+ OPENAPI_CALL = "openapi_call"
+ """OPENAPI_CALL."""
+ OPENAPI_CALL_OUTPUT = "openapi_call_output"
+ """OPENAPI_CALL_OUTPUT."""
+ BROWSER_AUTOMATION_PREVIEW_CALL = "browser_automation_preview_call"
+ """BROWSER_AUTOMATION_PREVIEW_CALL."""
+ BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT = "browser_automation_preview_call_output"
+ """BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT."""
+ FABRIC_DATAAGENT_PREVIEW_CALL = "fabric_dataagent_preview_call"
+ """FABRIC_DATAAGENT_PREVIEW_CALL."""
+ FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT = "fabric_dataagent_preview_call_output"
+ """FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT."""
+ AZURE_FUNCTION_CALL = "azure_function_call"
+ """AZURE_FUNCTION_CALL."""
+ AZURE_FUNCTION_CALL_OUTPUT = "azure_function_call_output"
+ """AZURE_FUNCTION_CALL_OUTPUT."""
+
+
+class OutputMessageContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of OutputMessageContentType."""
+
+ OUTPUT_TEXT = "output_text"
+ """OUTPUT_TEXT."""
+ REFUSAL = "refusal"
+ """REFUSAL."""
+
+
+class PageOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of PageOrder."""
+
+ ASC = "asc"
+ """ASC."""
+ DESC = "desc"
+ """DESC."""
+
+
+class RankerVersionType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of RankerVersionType."""
+
+ AUTO = "auto"
+ """AUTO."""
+ DEFAULT2024_11_15 = "default-2024-11-15"
+ """DEFAULT2024_11_15."""
+
+
+class RealtimeMcpErrorType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of RealtimeMcpErrorType."""
+
+ PROTOCOL_ERROR = "protocol_error"
+ """PROTOCOL_ERROR."""
+ TOOL_EXECUTION_ERROR = "tool_execution_error"
+ """TOOL_EXECUTION_ERROR."""
+ HTTP_ERROR = "http_error"
+ """HTTP_ERROR."""
+
+
+class ResponseErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The error code for the response."""
+
+ SERVER_ERROR = "server_error"
+ """SERVER_ERROR."""
+ RATE_LIMIT_EXCEEDED = "rate_limit_exceeded"
+ """RATE_LIMIT_EXCEEDED."""
+ INVALID_PROMPT = "invalid_prompt"
+ """INVALID_PROMPT."""
+ VECTOR_STORE_TIMEOUT = "vector_store_timeout"
+ """VECTOR_STORE_TIMEOUT."""
+ INVALID_IMAGE = "invalid_image"
+ """INVALID_IMAGE."""
+ INVALID_IMAGE_FORMAT = "invalid_image_format"
+ """INVALID_IMAGE_FORMAT."""
+ INVALID_BASE64_IMAGE = "invalid_base64_image"
+ """INVALID_BASE64_IMAGE."""
+ INVALID_IMAGE_URL = "invalid_image_url"
+ """INVALID_IMAGE_URL."""
+ IMAGE_TOO_LARGE = "image_too_large"
+ """IMAGE_TOO_LARGE."""
+ IMAGE_TOO_SMALL = "image_too_small"
+ """IMAGE_TOO_SMALL."""
+ IMAGE_PARSE_ERROR = "image_parse_error"
+ """IMAGE_PARSE_ERROR."""
+ IMAGE_CONTENT_POLICY_VIOLATION = "image_content_policy_violation"
+ """IMAGE_CONTENT_POLICY_VIOLATION."""
+ INVALID_IMAGE_MODE = "invalid_image_mode"
+ """INVALID_IMAGE_MODE."""
+ IMAGE_FILE_TOO_LARGE = "image_file_too_large"
+ """IMAGE_FILE_TOO_LARGE."""
+ UNSUPPORTED_IMAGE_MEDIA_TYPE = "unsupported_image_media_type"
+ """UNSUPPORTED_IMAGE_MEDIA_TYPE."""
+ EMPTY_IMAGE_FILE = "empty_image_file"
+ """EMPTY_IMAGE_FILE."""
+ FAILED_TO_DOWNLOAD_IMAGE = "failed_to_download_image"
+ """FAILED_TO_DOWNLOAD_IMAGE."""
+ IMAGE_FILE_NOT_FOUND = "image_file_not_found"
+ """IMAGE_FILE_NOT_FOUND."""
+
+
+class ResponseStreamEventType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ResponseStreamEventType."""
+
+ RESPONSE_AUDIO_DELTA = "response.audio.delta"
+ """RESPONSE_AUDIO_DELTA."""
+ RESPONSE_AUDIO_DONE = "response.audio.done"
+ """RESPONSE_AUDIO_DONE."""
+ RESPONSE_AUDIO_TRANSCRIPT_DELTA = "response.audio.transcript.delta"
+ """RESPONSE_AUDIO_TRANSCRIPT_DELTA."""
+ RESPONSE_AUDIO_TRANSCRIPT_DONE = "response.audio.transcript.done"
+ """RESPONSE_AUDIO_TRANSCRIPT_DONE."""
+ RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA = "response.code_interpreter_call_code.delta"
+ """RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA."""
+ RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE = "response.code_interpreter_call_code.done"
+ """RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE."""
+ RESPONSE_CODE_INTERPRETER_CALL_COMPLETED = "response.code_interpreter_call.completed"
+ """RESPONSE_CODE_INTERPRETER_CALL_COMPLETED."""
+ RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS = "response.code_interpreter_call.in_progress"
+ """RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS."""
+ RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING = "response.code_interpreter_call.interpreting"
+ """RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING."""
+ RESPONSE_COMPLETED = "response.completed"
+ """RESPONSE_COMPLETED."""
+ RESPONSE_CONTENT_PART_ADDED = "response.content_part.added"
+ """RESPONSE_CONTENT_PART_ADDED."""
+ RESPONSE_CONTENT_PART_DONE = "response.content_part.done"
+ """RESPONSE_CONTENT_PART_DONE."""
+ RESPONSE_CREATED = "response.created"
+ """RESPONSE_CREATED."""
+ ERROR = "error"
+ """ERROR."""
+ RESPONSE_FILE_SEARCH_CALL_COMPLETED = "response.file_search_call.completed"
+ """RESPONSE_FILE_SEARCH_CALL_COMPLETED."""
+ RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS = "response.file_search_call.in_progress"
+ """RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS."""
+ RESPONSE_FILE_SEARCH_CALL_SEARCHING = "response.file_search_call.searching"
+ """RESPONSE_FILE_SEARCH_CALL_SEARCHING."""
+ RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA = "response.function_call_arguments.delta"
+ """RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA."""
+ RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE = "response.function_call_arguments.done"
+ """RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE."""
+ RESPONSE_IN_PROGRESS = "response.in_progress"
+ """RESPONSE_IN_PROGRESS."""
+ RESPONSE_FAILED = "response.failed"
+ """RESPONSE_FAILED."""
+ RESPONSE_INCOMPLETE = "response.incomplete"
+ """RESPONSE_INCOMPLETE."""
+ RESPONSE_OUTPUT_ITEM_ADDED = "response.output_item.added"
+ """RESPONSE_OUTPUT_ITEM_ADDED."""
+ RESPONSE_OUTPUT_ITEM_DONE = "response.output_item.done"
+ """RESPONSE_OUTPUT_ITEM_DONE."""
+ RESPONSE_REASONING_SUMMARY_PART_ADDED = "response.reasoning_summary_part.added"
+ """RESPONSE_REASONING_SUMMARY_PART_ADDED."""
+ RESPONSE_REASONING_SUMMARY_PART_DONE = "response.reasoning_summary_part.done"
+ """RESPONSE_REASONING_SUMMARY_PART_DONE."""
+ RESPONSE_REASONING_SUMMARY_TEXT_DELTA = "response.reasoning_summary_text.delta"
+ """RESPONSE_REASONING_SUMMARY_TEXT_DELTA."""
+ RESPONSE_REASONING_SUMMARY_TEXT_DONE = "response.reasoning_summary_text.done"
+ """RESPONSE_REASONING_SUMMARY_TEXT_DONE."""
+ RESPONSE_REASONING_TEXT_DELTA = "response.reasoning_text.delta"
+ """RESPONSE_REASONING_TEXT_DELTA."""
+ RESPONSE_REASONING_TEXT_DONE = "response.reasoning_text.done"
+ """RESPONSE_REASONING_TEXT_DONE."""
+ RESPONSE_REFUSAL_DELTA = "response.refusal.delta"
+ """RESPONSE_REFUSAL_DELTA."""
+ RESPONSE_REFUSAL_DONE = "response.refusal.done"
+ """RESPONSE_REFUSAL_DONE."""
+ RESPONSE_OUTPUT_TEXT_DELTA = "response.output_text.delta"
+ """RESPONSE_OUTPUT_TEXT_DELTA."""
+ RESPONSE_OUTPUT_TEXT_DONE = "response.output_text.done"
+ """RESPONSE_OUTPUT_TEXT_DONE."""
+ RESPONSE_WEB_SEARCH_CALL_COMPLETED = "response.web_search_call.completed"
+ """RESPONSE_WEB_SEARCH_CALL_COMPLETED."""
+ RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS = "response.web_search_call.in_progress"
+ """RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS."""
+ RESPONSE_WEB_SEARCH_CALL_SEARCHING = "response.web_search_call.searching"
+ """RESPONSE_WEB_SEARCH_CALL_SEARCHING."""
+ RESPONSE_IMAGE_GENERATION_CALL_COMPLETED = "response.image_generation_call.completed"
+ """RESPONSE_IMAGE_GENERATION_CALL_COMPLETED."""
+ RESPONSE_IMAGE_GENERATION_CALL_GENERATING = "response.image_generation_call.generating"
+ """RESPONSE_IMAGE_GENERATION_CALL_GENERATING."""
+ RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS = "response.image_generation_call.in_progress"
+ """RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS."""
+ RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE = "response.image_generation_call.partial_image"
+ """RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE."""
+ RESPONSE_MCP_CALL_ARGUMENTS_DELTA = "response.mcp_call_arguments.delta"
+ """RESPONSE_MCP_CALL_ARGUMENTS_DELTA."""
+ RESPONSE_MCP_CALL_ARGUMENTS_DONE = "response.mcp_call_arguments.done"
+ """RESPONSE_MCP_CALL_ARGUMENTS_DONE."""
+ RESPONSE_MCP_CALL_COMPLETED = "response.mcp_call.completed"
+ """RESPONSE_MCP_CALL_COMPLETED."""
+ RESPONSE_MCP_CALL_FAILED = "response.mcp_call.failed"
+ """RESPONSE_MCP_CALL_FAILED."""
+ RESPONSE_MCP_CALL_IN_PROGRESS = "response.mcp_call.in_progress"
+ """RESPONSE_MCP_CALL_IN_PROGRESS."""
+ RESPONSE_MCP_LIST_TOOLS_COMPLETED = "response.mcp_list_tools.completed"
+ """RESPONSE_MCP_LIST_TOOLS_COMPLETED."""
+ RESPONSE_MCP_LIST_TOOLS_FAILED = "response.mcp_list_tools.failed"
+ """RESPONSE_MCP_LIST_TOOLS_FAILED."""
+ RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS = "response.mcp_list_tools.in_progress"
+ """RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS."""
+ RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED = "response.output_text.annotation.added"
+ """RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED."""
+ RESPONSE_QUEUED = "response.queued"
+ """RESPONSE_QUEUED."""
+ RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA = "response.custom_tool_call_input.delta"
+ """RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA."""
+ RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE = "response.custom_tool_call_input.done"
+ """RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE."""
+
+
+class SearchContextSize(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of SearchContextSize."""
+
+ LOW = "low"
+ """LOW."""
+ MEDIUM = "medium"
+ """MEDIUM."""
+ HIGH = "high"
+ """HIGH."""
+
+
+class TextResponseFormatConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of TextResponseFormatConfigurationType."""
+
+ TEXT = "text"
+ """TEXT."""
+ JSON_SCHEMA = "json_schema"
+ """JSON_SCHEMA."""
+ JSON_OBJECT = "json_object"
+ """JSON_OBJECT."""
+
+
+class ToolCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The status of a tool call."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+ FAILED = "failed"
+ """FAILED."""
+
+
+class ToolChoiceOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Tool choice mode."""
+
+ NONE = "none"
+ """NONE."""
+ AUTO = "auto"
+ """AUTO."""
+ REQUIRED = "required"
+ """REQUIRED."""
+
+
+class ToolChoiceParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ToolChoiceParamType."""
+
+ ALLOWED_TOOLS = "allowed_tools"
+ """ALLOWED_TOOLS."""
+ FUNCTION = "function"
+ """FUNCTION."""
+ MCP = "mcp"
+ """MCP."""
+ CUSTOM = "custom"
+ """CUSTOM."""
+ APPLY_PATCH = "apply_patch"
+ """APPLY_PATCH."""
+ SHELL = "shell"
+ """SHELL."""
+ FILE_SEARCH = "file_search"
+ """FILE_SEARCH."""
+ WEB_SEARCH_PREVIEW = "web_search_preview"
+ """WEB_SEARCH_PREVIEW."""
+ COMPUTER_USE_PREVIEW = "computer_use_preview"
+ """COMPUTER_USE_PREVIEW."""
+ WEB_SEARCH_PREVIEW2025_03_11 = "web_search_preview_2025_03_11"
+ """WEB_SEARCH_PREVIEW2025_03_11."""
+ IMAGE_GENERATION = "image_generation"
+ """IMAGE_GENERATION."""
+ CODE_INTERPRETER = "code_interpreter"
+ """CODE_INTERPRETER."""
+
+
+class ToolType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ToolType."""
+
+ FUNCTION = "function"
+ """FUNCTION."""
+ FILE_SEARCH = "file_search"
+ """FILE_SEARCH."""
+ COMPUTER_USE_PREVIEW = "computer_use_preview"
+ """COMPUTER_USE_PREVIEW."""
+ WEB_SEARCH = "web_search"
+ """WEB_SEARCH."""
+ MCP = "mcp"
+ """MCP."""
+ CODE_INTERPRETER = "code_interpreter"
+ """CODE_INTERPRETER."""
+ IMAGE_GENERATION = "image_generation"
+ """IMAGE_GENERATION."""
+ LOCAL_SHELL = "local_shell"
+ """LOCAL_SHELL."""
+ SHELL = "shell"
+ """SHELL."""
+ CUSTOM = "custom"
+ """CUSTOM."""
+ WEB_SEARCH_PREVIEW = "web_search_preview"
+ """WEB_SEARCH_PREVIEW."""
+ APPLY_PATCH = "apply_patch"
+ """APPLY_PATCH."""
+ A2_A_PREVIEW = "a2a_preview"
+ """A2_A_PREVIEW."""
+ BING_CUSTOM_SEARCH_PREVIEW = "bing_custom_search_preview"
+ """BING_CUSTOM_SEARCH_PREVIEW."""
+ BROWSER_AUTOMATION_PREVIEW = "browser_automation_preview"
+ """BROWSER_AUTOMATION_PREVIEW."""
+ FABRIC_DATAAGENT_PREVIEW = "fabric_dataagent_preview"
+ """FABRIC_DATAAGENT_PREVIEW."""
+ SHAREPOINT_GROUNDING_PREVIEW = "sharepoint_grounding_preview"
+ """SHAREPOINT_GROUNDING_PREVIEW."""
+ MEMORY_SEARCH_PREVIEW = "memory_search_preview"
+ """MEMORY_SEARCH_PREVIEW."""
+ AZURE_AI_SEARCH = "azure_ai_search"
+ """AZURE_AI_SEARCH."""
+ AZURE_FUNCTION = "azure_function"
+ """AZURE_FUNCTION."""
+ BING_GROUNDING = "bing_grounding"
+ """BING_GROUNDING."""
+ CAPTURE_STRUCTURED_OUTPUTS = "capture_structured_outputs"
+ """CAPTURE_STRUCTURED_OUTPUTS."""
+ OPENAPI = "openapi"
+ """OPENAPI."""
+ MEMORY_SEARCH = "memory_search"
+ """MEMORY_SEARCH."""
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/_models.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/_models.py
new file mode 100644
index 000000000000..3996bd594389
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/_models.py
@@ -0,0 +1,17089 @@
+# pylint: disable=line-too-long,useless-suppression,too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=useless-super-delegation
+
+import datetime
+from typing import Any, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload
+
+from .._utils.model_base import Model as _Model, rest_discriminator, rest_field
+from ._enums import (
+ AnnotationType,
+ ApplyPatchFileOperationType,
+ ApplyPatchOperationParamType,
+ ComputerActionType,
+ ContainerNetworkPolicyParamType,
+ ContainerSkillType,
+ CustomToolParamFormatType,
+ FunctionAndCustomToolCallOutputType,
+ FunctionShellCallEnvironmentType,
+ FunctionShellCallItemParamEnvironmentType,
+ FunctionShellCallOutputOutcomeParamType,
+ FunctionShellCallOutputOutcomeType,
+ FunctionShellToolParamEnvironmentType,
+ ItemFieldType,
+ ItemType,
+ MemoryItemKind,
+ MessageContentType,
+ OpenApiAuthType,
+ OutputContentType,
+ OutputItemType,
+ OutputMessageContentType,
+ RealtimeMcpErrorType,
+ ResponseStreamEventType,
+ TextResponseFormatConfigurationType,
+ ToolChoiceParamType,
+ ToolType,
+)
+
+if TYPE_CHECKING:
+ from .. import _types, models as _models
+
+
+class Tool(_Model):
+ """A tool that can be used to generate a response.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ A2APreviewTool, ApplyPatchToolParam, AzureAISearchTool, AzureFunctionTool,
+ BingCustomSearchPreviewTool, BingGroundingTool, BrowserAutomationPreviewTool,
+ CaptureStructuredOutputsTool, CodeInterpreterTool, ComputerUsePreviewTool, CustomToolParam,
+ MicrosoftFabricPreviewTool, FileSearchTool, FunctionTool, ImageGenTool, LocalShellToolParam,
+ MCPTool, MemorySearchTool, MemorySearchPreviewTool, OpenApiTool, SharepointPreviewTool,
+ FunctionShellToolParam, WebSearchTool, WebSearchPreviewTool
+
+ :ivar type: Required. Known values are: "function", "file_search", "computer_use_preview",
+ "web_search", "mcp", "code_interpreter", "image_generation", "local_shell", "shell", "custom",
+ "web_search_preview", "apply_patch", "a2a_preview", "bing_custom_search_preview",
+ "browser_automation_preview", "fabric_dataagent_preview", "sharepoint_grounding_preview",
+ "memory_search_preview", "azure_ai_search", "azure_function", "bing_grounding",
+ "capture_structured_outputs", "openapi", and "memory_search".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ToolType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"function\", \"file_search\", \"computer_use_preview\",
+ \"web_search\", \"mcp\", \"code_interpreter\", \"image_generation\", \"local_shell\",
+ \"shell\", \"custom\", \"web_search_preview\", \"apply_patch\", \"a2a_preview\",
+ \"bing_custom_search_preview\", \"browser_automation_preview\", \"fabric_dataagent_preview\",
+ \"sharepoint_grounding_preview\", \"memory_search_preview\", \"azure_ai_search\",
+ \"azure_function\", \"bing_grounding\", \"capture_structured_outputs\", \"openapi\", and
+ \"memory_search\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class A2APreviewTool(Tool, discriminator="a2a_preview"):
+ """An agent implementing the A2A protocol.
+
+ :ivar type: The type of the tool. Always ``"a2a_preview``. Required. A2_A_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.A2_A_PREVIEW
+ :ivar base_url: Base URL of the agent.
+ :vartype base_url: str
+ :ivar agent_card_path: The path to the agent card relative to the ``base_url``. If not
+ provided, defaults to ``/.well-known/agent-card.json``.
+ :vartype agent_card_path: str
+ :ivar project_connection_id: The connection ID in the project for the A2A server. The
+ connection stores authentication and other connection details needed to connect to the A2A
+ server.
+ :vartype project_connection_id: str
+ """
+
+ type: Literal[ToolType.A2_A_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the tool. Always ``\"a2a_preview``. Required. A2_A_PREVIEW."""
+ base_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Base URL of the agent."""
+ agent_card_path: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The path to the agent card relative to the ``base_url``. If not provided, defaults to
+ ``/.well-known/agent-card.json``."""
+ project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The connection ID in the project for the A2A server. The connection stores authentication and
+ other connection details needed to connect to the A2A server."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ base_url: Optional[str] = None,
+ agent_card_path: Optional[str] = None,
+ project_connection_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.A2_A_PREVIEW # type: ignore
+
+
+class OutputItem(_Model):
+ """OutputItem.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ A2AToolCall, A2AToolCallOutput, OutputItemApplyPatchToolCall,
+ OutputItemApplyPatchToolCallOutput, AzureAISearchToolCall, AzureAISearchToolCallOutput,
+ AzureFunctionToolCall, AzureFunctionToolCallOutput, BingCustomSearchToolCall,
+ BingCustomSearchToolCallOutput, BingGroundingToolCall, BingGroundingToolCallOutput,
+ BrowserAutomationToolCall, BrowserAutomationToolCallOutput, OutputItemCodeInterpreterToolCall,
+ OutputItemCompactionBody, OutputItemComputerToolCall, OutputItemComputerToolCallOutput,
+ OutputItemCustomToolCall, OutputItemCustomToolCallOutput, FabricDataAgentToolCall,
+ FabricDataAgentToolCallOutput, OutputItemFileSearchToolCall, OutputItemFunctionToolCall,
+ OutputItemFunctionToolCallOutput, OutputItemImageGenToolCall, OutputItemLocalShellToolCall,
+ OutputItemLocalShellToolCallOutput, OutputItemMcpApprovalRequest,
+ OutputItemMcpApprovalResponseResource, OutputItemMcpToolCall, OutputItemMcpListTools,
+ MemorySearchToolCallItemResource, OutputItemMessage, OAuthConsentRequestOutputItem,
+ OpenApiToolCall, OpenApiToolCallOutput, OutputItemOutputMessage, OutputItemReasoningItem,
+ SharepointGroundingToolCall, SharepointGroundingToolCallOutput, OutputItemFunctionShellCall,
+ OutputItemFunctionShellCallOutput, StructuredOutputsOutputItem, OutputItemWebSearchToolCall,
+ WorkflowActionOutputItem
+
+ :ivar type: Required. Known values are: "output_message", "file_search_call", "function_call",
+ "web_search_call", "computer_call", "reasoning", "compaction", "image_generation_call",
+ "code_interpreter_call", "local_shell_call", "shell_call", "shell_call_output",
+ "apply_patch_call", "apply_patch_call_output", "mcp_call", "mcp_list_tools",
+ "mcp_approval_request", "custom_tool_call", "message", "computer_call_output",
+ "function_call_output", "local_shell_call_output", "mcp_approval_response",
+ "custom_tool_call_output", "structured_outputs", "oauth_consent_request", "memory_search_call",
+ "workflow_action", "a2a_preview_call", "a2a_preview_call_output", "bing_grounding_call",
+ "bing_grounding_call_output", "sharepoint_grounding_preview_call",
+ "sharepoint_grounding_preview_call_output", "azure_ai_search_call",
+ "azure_ai_search_call_output", "bing_custom_search_preview_call",
+ "bing_custom_search_preview_call_output", "openapi_call", "openapi_call_output",
+ "browser_automation_preview_call", "browser_automation_preview_call_output",
+ "fabric_dataagent_preview_call", "fabric_dataagent_preview_call_output", "azure_function_call",
+ and "azure_function_call_output".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OutputItemType
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"output_message\", \"file_search_call\", \"function_call\",
+ \"web_search_call\", \"computer_call\", \"reasoning\", \"compaction\",
+ \"image_generation_call\", \"code_interpreter_call\", \"local_shell_call\", \"shell_call\",
+ \"shell_call_output\", \"apply_patch_call\", \"apply_patch_call_output\", \"mcp_call\",
+ \"mcp_list_tools\", \"mcp_approval_request\", \"custom_tool_call\", \"message\",
+ \"computer_call_output\", \"function_call_output\", \"local_shell_call_output\",
+ \"mcp_approval_response\", \"custom_tool_call_output\", \"structured_outputs\",
+ \"oauth_consent_request\", \"memory_search_call\", \"workflow_action\", \"a2a_preview_call\",
+ \"a2a_preview_call_output\", \"bing_grounding_call\", \"bing_grounding_call_output\",
+ \"sharepoint_grounding_preview_call\", \"sharepoint_grounding_preview_call_output\",
+ \"azure_ai_search_call\", \"azure_ai_search_call_output\", \"bing_custom_search_preview_call\",
+ \"bing_custom_search_preview_call_output\", \"openapi_call\", \"openapi_call_output\",
+ \"browser_automation_preview_call\", \"browser_automation_preview_call_output\",
+ \"fabric_dataagent_preview_call\", \"fabric_dataagent_preview_call_output\",
+ \"azure_function_call\", and \"azure_function_call_output\"."""
+ created_by: Optional[Union["_models.CreatedBy", str]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The information about the creator of the item. Is either a CreatedBy type or a str type."""
+ agent_reference: Optional["_models.AgentReference"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The agent that created the item."""
+ response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response on which the item is created."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class A2AToolCall(OutputItem, discriminator="a2a_preview_call"):
+ """An A2A (Agent-to-Agent) tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. A2_A_PREVIEW_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.A2_A_PREVIEW_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the A2A agent card being called. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.A2_A_PREVIEW_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. A2_A_PREVIEW_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the A2A agent card being called. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.A2_A_PREVIEW_CALL # type: ignore
+
+
+class A2AToolCallOutput(OutputItem, discriminator="a2a_preview_call_output"):
+ """The output of an A2A (Agent-to-Agent) tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. A2_A_PREVIEW_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.A2_A_PREVIEW_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the A2A agent card that was called. Required.
+ :vartype name: str
+ :ivar output: The output from the A2A tool call. Is one of the following types: {str: Any},
+ str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.A2_A_PREVIEW_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. A2_A_PREVIEW_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the A2A agent card that was called. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the A2A tool call. Is one of the following types: {str: Any}, str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.A2_A_PREVIEW_CALL_OUTPUT # type: ignore
+
+
+class AgentId(_Model):
+ """AgentId.
+
+ :ivar type: Required. Default value is "agent_id".
+ :vartype type: str
+ :ivar name: The name of the agent. Required.
+ :vartype name: str
+ :ivar version: The version identifier of the agent. Required.
+ :vartype version: str
+ """
+
+ type: Literal["agent_id"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required. Default value is \"agent_id\"."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the agent. Required."""
+ version: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The version identifier of the agent. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ version: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["agent_id"] = "agent_id"
+
+
+class AgentReference(_Model):
+ """AgentReference.
+
+ :ivar type: Required. Default value is "agent_reference".
+ :vartype type: str
+ :ivar name: The name of the agent. Required.
+ :vartype name: str
+ :ivar version: The version identifier of the agent.
+ :vartype version: str
+ """
+
+ type: Literal["agent_reference"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required. Default value is \"agent_reference\"."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the agent. Required."""
+ version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The version identifier of the agent."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ version: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["agent_reference"] = "agent_reference"
+
+
+class AISearchIndexResource(_Model):
+ """A AI Search Index resource.
+
+ :ivar project_connection_id: An index connection ID in an IndexResource attached to this agent.
+ :vartype project_connection_id: str
+ :ivar index_name: The name of an index in an IndexResource attached to this agent.
+ :vartype index_name: str
+ :ivar query_type: Type of query in an AIIndexResource attached to this agent. Known values are:
+ "simple", "semantic", "vector", "vector_simple_hybrid", and "vector_semantic_hybrid".
+ :vartype query_type: str or ~azure.ai.responses.server.sdk.models.models.AzureAISearchQueryType
+ :ivar top_k: Number of documents to retrieve from search and present to the model.
+ :vartype top_k: int
+ :ivar filter: filter string for search resource. `Learn more here
+ `_.
+ :vartype filter: str
+ :ivar index_asset_id: Index asset id for search resource.
+ :vartype index_asset_id: str
+ """
+
+ project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An index connection ID in an IndexResource attached to this agent."""
+ index_name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of an index in an IndexResource attached to this agent."""
+ query_type: Optional[Union[str, "_models.AzureAISearchQueryType"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Type of query in an AIIndexResource attached to this agent. Known values are: \"simple\",
+ \"semantic\", \"vector\", \"vector_simple_hybrid\", and \"vector_semantic_hybrid\"."""
+ top_k: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Number of documents to retrieve from search and present to the model."""
+ filter: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """filter string for search resource. `Learn more here
+ `_."""
+ index_asset_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Index asset id for search resource."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: Optional[str] = None,
+ index_name: Optional[str] = None,
+ query_type: Optional[Union[str, "_models.AzureAISearchQueryType"]] = None,
+ top_k: Optional[int] = None,
+ filter: Optional[str] = None, # pylint: disable=redefined-builtin
+ index_asset_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class Annotation(_Model):
+ """An annotation that applies to a span of output text.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ContainerFileCitationBody, FileCitationBody, FilePath, UrlCitationBody
+
+ :ivar type: Required. Known values are: "file_citation", "url_citation",
+ "container_file_citation", and "file_path".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AnnotationType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"file_citation\", \"url_citation\", \"container_file_citation\",
+ and \"file_path\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ApiErrorResponse(_Model):
+ """Error response for API failures.
+
+ :ivar error: Required.
+ :vartype error: ~azure.ai.responses.server.sdk.models.models.Error
+ """
+
+ error: "_models.Error" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ error: "_models.Error",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ApplyPatchFileOperation(_Model):
+ """Apply patch operation.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ApplyPatchCreateFileOperation, ApplyPatchDeleteFileOperation, ApplyPatchUpdateFileOperation
+
+ :ivar type: Required. Known values are: "create_file", "delete_file", and "update_file".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchFileOperationType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"create_file\", \"delete_file\", and \"update_file\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ApplyPatchCreateFileOperation(ApplyPatchFileOperation, discriminator="create_file"):
+ """Apply patch create file operation.
+
+ :ivar type: Create a new file with the provided diff. Required. CREATE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CREATE_FILE
+ :ivar path: Path of the file to create. Required.
+ :vartype path: str
+ :ivar diff: Diff to apply. Required.
+ :vartype diff: str
+ """
+
+ type: Literal[ApplyPatchFileOperationType.CREATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Create a new file with the provided diff. Required. CREATE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to create. Required."""
+ diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Diff to apply. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ diff: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchFileOperationType.CREATE_FILE # type: ignore
+
+
+class ApplyPatchOperationParam(_Model):
+ """Apply patch operation.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ApplyPatchCreateFileOperationParam, ApplyPatchDeleteFileOperationParam,
+ ApplyPatchUpdateFileOperationParam
+
+ :ivar type: Required. Known values are: "create_file", "delete_file", and "update_file".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchOperationParamType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"create_file\", \"delete_file\", and \"update_file\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ApplyPatchCreateFileOperationParam(ApplyPatchOperationParam, discriminator="create_file"):
+ """Apply patch create file operation.
+
+ :ivar type: The operation type. Always ``create_file``. Required. CREATE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CREATE_FILE
+ :ivar path: Path of the file to create relative to the workspace root. Required.
+ :vartype path: str
+ :ivar diff: Unified diff content to apply when creating the file. Required.
+ :vartype diff: str
+ """
+
+ type: Literal[ApplyPatchOperationParamType.CREATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The operation type. Always ``create_file``. Required. CREATE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to create relative to the workspace root. Required."""
+ diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unified diff content to apply when creating the file. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ diff: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchOperationParamType.CREATE_FILE # type: ignore
+
+
+class ApplyPatchDeleteFileOperation(ApplyPatchFileOperation, discriminator="delete_file"):
+ """Apply patch delete file operation.
+
+ :ivar type: Delete the specified file. Required. DELETE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.DELETE_FILE
+ :ivar path: Path of the file to delete. Required.
+ :vartype path: str
+ """
+
+ type: Literal[ApplyPatchFileOperationType.DELETE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Delete the specified file. Required. DELETE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to delete. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchFileOperationType.DELETE_FILE # type: ignore
+
+
+class ApplyPatchDeleteFileOperationParam(ApplyPatchOperationParam, discriminator="delete_file"):
+ """Apply patch delete file operation.
+
+ :ivar type: The operation type. Always ``delete_file``. Required. DELETE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.DELETE_FILE
+ :ivar path: Path of the file to delete relative to the workspace root. Required.
+ :vartype path: str
+ """
+
+ type: Literal[ApplyPatchOperationParamType.DELETE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The operation type. Always ``delete_file``. Required. DELETE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to delete relative to the workspace root. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchOperationParamType.DELETE_FILE # type: ignore
+
+
+class Item(_Model):
+ """Content item used to generate a response.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ApplyPatchToolCallItemParam, ApplyPatchToolCallOutputItemParam, ItemCodeInterpreterToolCall,
+ CompactionSummaryItemParam, ItemComputerToolCall, ComputerCallOutputItemParam,
+ ItemCustomToolCall, ItemCustomToolCallOutput, ItemFileSearchToolCall, ItemFunctionToolCall,
+ FunctionCallOutputItemParam, ItemImageGenToolCall, ItemReferenceParam, ItemLocalShellToolCall,
+ ItemLocalShellToolCallOutput, ItemMcpApprovalRequest, MCPApprovalResponse, ItemMcpToolCall,
+ ItemMcpListTools, MemorySearchToolCallItemParam, ItemMessage, ItemOutputMessage,
+ ItemReasoningItem, FunctionShellCallItemParam, FunctionShellCallOutputItemParam,
+ ItemWebSearchToolCall
+
+ :ivar type: Required. Known values are: "message", "output_message", "file_search_call",
+ "computer_call", "computer_call_output", "web_search_call", "function_call",
+ "function_call_output", "reasoning", "compaction", "image_generation_call",
+ "code_interpreter_call", "local_shell_call", "local_shell_call_output", "shell_call",
+ "shell_call_output", "apply_patch_call", "apply_patch_call_output", "mcp_list_tools",
+ "mcp_approval_request", "mcp_approval_response", "mcp_call", "custom_tool_call_output",
+ "custom_tool_call", "item_reference", "structured_outputs", "oauth_consent_request",
+ "memory_search_call", "workflow_action", "a2a_preview_call", "a2a_preview_call_output",
+ "bing_grounding_call", "bing_grounding_call_output", "sharepoint_grounding_preview_call",
+ "sharepoint_grounding_preview_call_output", "azure_ai_search_call",
+ "azure_ai_search_call_output", "bing_custom_search_preview_call",
+ "bing_custom_search_preview_call_output", "openapi_call", "openapi_call_output",
+ "browser_automation_preview_call", "browser_automation_preview_call_output",
+ "fabric_dataagent_preview_call", "fabric_dataagent_preview_call_output", "azure_function_call",
+ and "azure_function_call_output".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ItemType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"message\", \"output_message\", \"file_search_call\",
+ \"computer_call\", \"computer_call_output\", \"web_search_call\", \"function_call\",
+ \"function_call_output\", \"reasoning\", \"compaction\", \"image_generation_call\",
+ \"code_interpreter_call\", \"local_shell_call\", \"local_shell_call_output\", \"shell_call\",
+ \"shell_call_output\", \"apply_patch_call\", \"apply_patch_call_output\", \"mcp_list_tools\",
+ \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"custom_tool_call_output\",
+ \"custom_tool_call\", \"item_reference\", \"structured_outputs\", \"oauth_consent_request\",
+ \"memory_search_call\", \"workflow_action\", \"a2a_preview_call\", \"a2a_preview_call_output\",
+ \"bing_grounding_call\", \"bing_grounding_call_output\", \"sharepoint_grounding_preview_call\",
+ \"sharepoint_grounding_preview_call_output\", \"azure_ai_search_call\",
+ \"azure_ai_search_call_output\", \"bing_custom_search_preview_call\",
+ \"bing_custom_search_preview_call_output\", \"openapi_call\", \"openapi_call_output\",
+ \"browser_automation_preview_call\", \"browser_automation_preview_call_output\",
+ \"fabric_dataagent_preview_call\", \"fabric_dataagent_preview_call_output\",
+ \"azure_function_call\", and \"azure_function_call_output\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ApplyPatchToolCallItemParam(Item, discriminator="apply_patch_call"):
+ """Apply patch tool call.
+
+ :ivar type: The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call. One of ``in_progress`` or ``completed``.
+ Required. Known values are: "in_progress" and "completed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallStatusParam
+ :ivar operation: The specific create, delete, or update instruction for the apply_patch tool
+ call. Required.
+ :vartype operation: ~azure.ai.responses.server.sdk.models.models.ApplyPatchOperationParam
+ """
+
+ type: Literal[ItemType.APPLY_PATCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallStatusParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call. One of ``in_progress`` or ``completed``. Required.
+ Known values are: \"in_progress\" and \"completed\"."""
+ operation: "_models.ApplyPatchOperationParam" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The specific create, delete, or update instruction for the apply_patch tool call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallStatusParam"],
+ operation: "_models.ApplyPatchOperationParam",
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.APPLY_PATCH_CALL # type: ignore
+
+
+class ApplyPatchToolCallOutputItemParam(Item, discriminator="apply_patch_call_output"):
+ """Apply patch tool call output.
+
+ :ivar type: The type of the item. Always ``apply_patch_call_output``. Required.
+ APPLY_PATCH_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL_OUTPUT
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call output. One of ``completed`` or
+ ``failed``. Required. Known values are: "completed" and "failed".
+ :vartype status: str or
+ ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallOutputStatusParam
+ :ivar output:
+ :vartype output: str
+ """
+
+ type: Literal[ItemType.APPLY_PATCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call_output``. Required. APPLY_PATCH_CALL_OUTPUT."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallOutputStatusParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call output. One of ``completed`` or ``failed``. Required.
+ Known values are: \"completed\" and \"failed\"."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallOutputStatusParam"],
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ output: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.APPLY_PATCH_CALL_OUTPUT # type: ignore
+
+
+class ApplyPatchToolParam(Tool, discriminator="apply_patch"):
+ """Apply patch tool.
+
+ :ivar type: The type of the tool. Always ``apply_patch``. Required. APPLY_PATCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH
+ """
+
+ type: Literal[ToolType.APPLY_PATCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the tool. Always ``apply_patch``. Required. APPLY_PATCH."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.APPLY_PATCH # type: ignore
+
+
+class ApplyPatchUpdateFileOperation(ApplyPatchFileOperation, discriminator="update_file"):
+ """Apply patch update file operation.
+
+ :ivar type: Update an existing file with the provided diff. Required. UPDATE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.UPDATE_FILE
+ :ivar path: Path of the file to update. Required.
+ :vartype path: str
+ :ivar diff: Diff to apply. Required.
+ :vartype diff: str
+ """
+
+ type: Literal[ApplyPatchFileOperationType.UPDATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Update an existing file with the provided diff. Required. UPDATE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to update. Required."""
+ diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Diff to apply. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ diff: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchFileOperationType.UPDATE_FILE # type: ignore
+
+
+class ApplyPatchUpdateFileOperationParam(ApplyPatchOperationParam, discriminator="update_file"):
+ """Apply patch update file operation.
+
+ :ivar type: The operation type. Always ``update_file``. Required. UPDATE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.UPDATE_FILE
+ :ivar path: Path of the file to update relative to the workspace root. Required.
+ :vartype path: str
+ :ivar diff: Unified diff content to apply to the existing file. Required.
+ :vartype diff: str
+ """
+
+ type: Literal[ApplyPatchOperationParamType.UPDATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The operation type. Always ``update_file``. Required. UPDATE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to update relative to the workspace root. Required."""
+ diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unified diff content to apply to the existing file. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ diff: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchOperationParamType.UPDATE_FILE # type: ignore
+
+
+class ApproximateLocation(_Model):
+ """ApproximateLocation.
+
+ :ivar type: The type of location approximation. Always ``approximate``. Required. Default value
+ is "approximate".
+ :vartype type: str
+ :ivar country:
+ :vartype country: str
+ :ivar region:
+ :vartype region: str
+ :ivar city:
+ :vartype city: str
+ :ivar timezone:
+ :vartype timezone: str
+ """
+
+ type: Literal["approximate"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of location approximation. Always ``approximate``. Required. Default value is
+ \"approximate\"."""
+ country: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ region: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ city: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ timezone: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ country: Optional[str] = None,
+ region: Optional[str] = None,
+ city: Optional[str] = None,
+ timezone: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["approximate"] = "approximate"
+
+
+class AutoCodeInterpreterToolParam(_Model):
+ """Automatic Code Interpreter Tool Parameters.
+
+ :ivar type: Always ``auto``. Required. Default value is "auto".
+ :vartype type: str
+ :ivar file_ids: An optional list of uploaded files to make available to your code.
+ :vartype file_ids: list[str]
+ :ivar memory_limit: Known values are: "1g", "4g", "16g", and "64g".
+ :vartype memory_limit: str or ~azure.ai.responses.server.sdk.models.models.ContainerMemoryLimit
+ :ivar network_policy:
+ :vartype network_policy:
+ ~azure.ai.responses.server.sdk.models.models.ContainerNetworkPolicyParam
+ """
+
+ type: Literal["auto"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Always ``auto``. Required. Default value is \"auto\"."""
+ file_ids: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An optional list of uploaded files to make available to your code."""
+ memory_limit: Optional[Union[str, "_models.ContainerMemoryLimit"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"1g\", \"4g\", \"16g\", and \"64g\"."""
+ network_policy: Optional["_models.ContainerNetworkPolicyParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_ids: Optional[list[str]] = None,
+ memory_limit: Optional[Union[str, "_models.ContainerMemoryLimit"]] = None,
+ network_policy: Optional["_models.ContainerNetworkPolicyParam"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["auto"] = "auto"
+
+
+class AzureAISearchTool(Tool, discriminator="azure_ai_search"):
+ """The input definition information for an Azure AI search tool as used to configure an agent.
+
+ :ivar type: The object type, which is always 'azure_ai_search'. Required. AZURE_AI_SEARCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_AI_SEARCH
+ :ivar azure_ai_search: The azure ai search index resource. Required.
+ :vartype azure_ai_search:
+ ~azure.ai.responses.server.sdk.models.models.AzureAISearchToolResource
+ """
+
+ type: Literal[ToolType.AZURE_AI_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'azure_ai_search'. Required. AZURE_AI_SEARCH."""
+ azure_ai_search: "_models.AzureAISearchToolResource" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The azure ai search index resource. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ azure_ai_search: "_models.AzureAISearchToolResource",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.AZURE_AI_SEARCH # type: ignore
+
+
+class AzureAISearchToolCall(OutputItem, discriminator="azure_ai_search_call"):
+ """An Azure AI Search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. AZURE_AI_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_AI_SEARCH_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.AZURE_AI_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. AZURE_AI_SEARCH_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.AZURE_AI_SEARCH_CALL # type: ignore
+
+
+class AzureAISearchToolCallOutput(OutputItem, discriminator="azure_ai_search_call_output"):
+ """The output of an Azure AI Search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. AZURE_AI_SEARCH_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_AI_SEARCH_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the Azure AI Search tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.AZURE_AI_SEARCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. AZURE_AI_SEARCH_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the Azure AI Search tool call. Is one of the following types: {str: Any}, str,
+ [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.AZURE_AI_SEARCH_CALL_OUTPUT # type: ignore
+
+
+class AzureAISearchToolResource(_Model):
+ """A set of index resources used by the ``azure_ai_search`` tool.
+
+ :ivar indexes: The indices attached to this agent. There can be a maximum of 1 index resource
+ attached to the agent. Required.
+ :vartype indexes: list[~azure.ai.responses.server.sdk.models.models.AISearchIndexResource]
+ """
+
+ indexes: list["_models.AISearchIndexResource"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The indices attached to this agent. There can be a maximum of 1 index resource attached to the
+ agent. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ indexes: list["_models.AISearchIndexResource"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class AzureFunctionBinding(_Model):
+ """The structure for keeping storage queue name and URI.
+
+ :ivar type: The type of binding, which is always 'storage_queue'. Required. Default value is
+ "storage_queue".
+ :vartype type: str
+ :ivar storage_queue: Storage queue. Required.
+ :vartype storage_queue: ~azure.ai.responses.server.sdk.models.models.AzureFunctionStorageQueue
+ """
+
+ type: Literal["storage_queue"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of binding, which is always 'storage_queue'. Required. Default value is
+ \"storage_queue\"."""
+ storage_queue: "_models.AzureFunctionStorageQueue" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Storage queue. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ storage_queue: "_models.AzureFunctionStorageQueue",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["storage_queue"] = "storage_queue"
+
+
+class AzureFunctionDefinition(_Model):
+ """The definition of Azure function.
+
+ :ivar function: The definition of azure function and its parameters. Required.
+ :vartype function: ~azure.ai.responses.server.sdk.models.models.AzureFunctionDefinitionFunction
+ :ivar input_binding: Input storage queue. The queue storage trigger runs a function as messages
+ are added to it. Required.
+ :vartype input_binding: ~azure.ai.responses.server.sdk.models.models.AzureFunctionBinding
+ :ivar output_binding: Output storage queue. The function writes output to this queue when the
+ input items are processed. Required.
+ :vartype output_binding: ~azure.ai.responses.server.sdk.models.models.AzureFunctionBinding
+ """
+
+ function: "_models.AzureFunctionDefinitionFunction" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The definition of azure function and its parameters. Required."""
+ input_binding: "_models.AzureFunctionBinding" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Input storage queue. The queue storage trigger runs a function as messages are added to it.
+ Required."""
+ output_binding: "_models.AzureFunctionBinding" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Output storage queue. The function writes output to this queue when the input items are
+ processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ function: "_models.AzureFunctionDefinitionFunction",
+ input_binding: "_models.AzureFunctionBinding",
+ output_binding: "_models.AzureFunctionBinding",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class AzureFunctionDefinitionFunction(_Model):
+ """AzureFunctionDefinitionFunction.
+
+ :ivar name: The name of the function to be called. Required.
+ :vartype name: str
+ :ivar description: A description of what the function does, used by the model to choose when
+ and how to call the function.
+ :vartype description: str
+ :ivar parameters: The parameters the functions accepts, described as a JSON Schema object.
+ Required.
+ :vartype parameters: dict[str, any]
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to be called. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A description of what the function does, used by the model to choose when and how to call the
+ function."""
+ parameters: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The parameters the functions accepts, described as a JSON Schema object. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ parameters: dict[str, Any],
+ description: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class AzureFunctionStorageQueue(_Model):
+ """The structure for keeping storage queue name and URI.
+
+ :ivar queue_service_endpoint: URI to the Azure Storage Queue service allowing you to manipulate
+ a queue. Required.
+ :vartype queue_service_endpoint: str
+ :ivar queue_name: The name of an Azure function storage queue. Required.
+ :vartype queue_name: str
+ """
+
+ queue_service_endpoint: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """URI to the Azure Storage Queue service allowing you to manipulate a queue. Required."""
+ queue_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of an Azure function storage queue. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ queue_service_endpoint: str,
+ queue_name: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class AzureFunctionTool(Tool, discriminator="azure_function"):
+ """The input definition information for an Azure Function Tool, as used to configure an Agent.
+
+ :ivar type: The object type, which is always 'browser_automation'. Required. AZURE_FUNCTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_FUNCTION
+ :ivar azure_function: The Azure Function Tool definition. Required.
+ :vartype azure_function: ~azure.ai.responses.server.sdk.models.models.AzureFunctionDefinition
+ """
+
+ type: Literal[ToolType.AZURE_FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'browser_automation'. Required. AZURE_FUNCTION."""
+ azure_function: "_models.AzureFunctionDefinition" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The Azure Function Tool definition. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ azure_function: "_models.AzureFunctionDefinition",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.AZURE_FUNCTION # type: ignore
+
+
+class AzureFunctionToolCall(OutputItem, discriminator="azure_function_call"):
+ """An Azure Function tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. AZURE_FUNCTION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_FUNCTION_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the Azure Function being called. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.AZURE_FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. AZURE_FUNCTION_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the Azure Function being called. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.AZURE_FUNCTION_CALL # type: ignore
+
+
+class AzureFunctionToolCallOutput(OutputItem, discriminator="azure_function_call_output"):
+ """The output of an Azure Function tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. AZURE_FUNCTION_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_FUNCTION_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the Azure Function that was called. Required.
+ :vartype name: str
+ :ivar output: The output from the Azure Function tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.AZURE_FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. AZURE_FUNCTION_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the Azure Function that was called. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the Azure Function tool call. Is one of the following types: {str: Any}, str,
+ [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.AZURE_FUNCTION_CALL_OUTPUT # type: ignore
+
+
+class BingCustomSearchConfiguration(_Model):
+ """A bing custom search configuration.
+
+ :ivar project_connection_id: Project connection id for grounding with bing search. Required.
+ :vartype project_connection_id: str
+ :ivar instance_name: Name of the custom configuration instance given to config. Required.
+ :vartype instance_name: str
+ :ivar market: The market where the results come from.
+ :vartype market: str
+ :ivar set_lang: The language to use for user interface strings when calling Bing API.
+ :vartype set_lang: str
+ :ivar count: The number of search results to return in the bing api response.
+ :vartype count: int
+ :ivar freshness: Filter search results by a specific time range. See `accepted values here
+ `_.
+ :vartype freshness: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Project connection id for grounding with bing search. Required."""
+ instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Name of the custom configuration instance given to config. Required."""
+ market: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The market where the results come from."""
+ set_lang: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The language to use for user interface strings when calling Bing API."""
+ count: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The number of search results to return in the bing api response."""
+ freshness: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Filter search results by a specific time range. See `accepted values here
+ `_."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ instance_name: str,
+ market: Optional[str] = None,
+ set_lang: Optional[str] = None,
+ count: Optional[int] = None,
+ freshness: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class BingCustomSearchPreviewTool(Tool, discriminator="bing_custom_search_preview"):
+ """The input definition information for a Bing custom search tool as used to configure an agent.
+
+ :ivar type: The object type, which is always 'bing_custom_search_preview'. Required.
+ BING_CUSTOM_SEARCH_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.BING_CUSTOM_SEARCH_PREVIEW
+ :ivar bing_custom_search_preview: The bing custom search tool parameters. Required.
+ :vartype bing_custom_search_preview:
+ ~azure.ai.responses.server.sdk.models.models.BingCustomSearchToolParameters
+ """
+
+ type: Literal[ToolType.BING_CUSTOM_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'bing_custom_search_preview'. Required.
+ BING_CUSTOM_SEARCH_PREVIEW."""
+ bing_custom_search_preview: "_models.BingCustomSearchToolParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The bing custom search tool parameters. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ bing_custom_search_preview: "_models.BingCustomSearchToolParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.BING_CUSTOM_SEARCH_PREVIEW # type: ignore
+
+
+class BingCustomSearchToolCall(OutputItem, discriminator="bing_custom_search_preview_call"):
+ """A Bing custom search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BING_CUSTOM_SEARCH_PREVIEW_CALL.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.BING_CUSTOM_SEARCH_PREVIEW_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BING_CUSTOM_SEARCH_PREVIEW_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BING_CUSTOM_SEARCH_PREVIEW_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BING_CUSTOM_SEARCH_PREVIEW_CALL # type: ignore
+
+
+class BingCustomSearchToolCallOutput(OutputItem, discriminator="bing_custom_search_preview_call_output"):
+ """The output of a Bing custom search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the Bing custom search tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the Bing custom search tool call. Is one of the following types: {str: Any},
+ str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT # type: ignore
+
+
+class BingCustomSearchToolParameters(_Model):
+ """The bing custom search tool parameters.
+
+ :ivar search_configurations: The project connections attached to this tool. There can be a
+ maximum of 1 connection resource attached to the tool. Required.
+ :vartype search_configurations:
+ list[~azure.ai.responses.server.sdk.models.models.BingCustomSearchConfiguration]
+ """
+
+ search_configurations: list["_models.BingCustomSearchConfiguration"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The project connections attached to this tool. There can be a maximum of 1 connection resource
+ attached to the tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ search_configurations: list["_models.BingCustomSearchConfiguration"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class BingGroundingSearchConfiguration(_Model):
+ """Search configuration for Bing Grounding.
+
+ :ivar project_connection_id: Project connection id for grounding with bing search. Required.
+ :vartype project_connection_id: str
+ :ivar market: The market where the results come from.
+ :vartype market: str
+ :ivar set_lang: The language to use for user interface strings when calling Bing API.
+ :vartype set_lang: str
+ :ivar count: The number of search results to return in the bing api response.
+ :vartype count: int
+ :ivar freshness: Filter search results by a specific time range. See `accepted values here
+ `_.
+ :vartype freshness: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Project connection id for grounding with bing search. Required."""
+ market: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The market where the results come from."""
+ set_lang: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The language to use for user interface strings when calling Bing API."""
+ count: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The number of search results to return in the bing api response."""
+ freshness: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Filter search results by a specific time range. See `accepted values here
+ `_."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ market: Optional[str] = None,
+ set_lang: Optional[str] = None,
+ count: Optional[int] = None,
+ freshness: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class BingGroundingSearchToolParameters(_Model):
+ """The bing grounding search tool parameters.
+
+ :ivar search_configurations: The search configurations attached to this tool. There can be a
+ maximum of 1 search configuration resource attached to the tool. Required.
+ :vartype search_configurations:
+ list[~azure.ai.responses.server.sdk.models.models.BingGroundingSearchConfiguration]
+ """
+
+ search_configurations: list["_models.BingGroundingSearchConfiguration"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The search configurations attached to this tool. There can be a maximum of 1 search
+ configuration resource attached to the tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ search_configurations: list["_models.BingGroundingSearchConfiguration"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class BingGroundingTool(Tool, discriminator="bing_grounding"):
+ """The input definition information for a bing grounding search tool as used to configure an
+ agent.
+
+ :ivar type: The object type, which is always 'bing_grounding'. Required. BING_GROUNDING.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.BING_GROUNDING
+ :ivar bing_grounding: The bing grounding search tool parameters. Required.
+ :vartype bing_grounding:
+ ~azure.ai.responses.server.sdk.models.models.BingGroundingSearchToolParameters
+ """
+
+ type: Literal[ToolType.BING_GROUNDING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'bing_grounding'. Required. BING_GROUNDING."""
+ bing_grounding: "_models.BingGroundingSearchToolParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The bing grounding search tool parameters. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ bing_grounding: "_models.BingGroundingSearchToolParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.BING_GROUNDING # type: ignore
+
+
+class BingGroundingToolCall(OutputItem, discriminator="bing_grounding_call"):
+ """A Bing grounding tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BING_GROUNDING_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.BING_GROUNDING_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BING_GROUNDING_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BING_GROUNDING_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BING_GROUNDING_CALL # type: ignore
+
+
+class BingGroundingToolCallOutput(OutputItem, discriminator="bing_grounding_call_output"):
+ """The output of a Bing grounding tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BING_GROUNDING_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.BING_GROUNDING_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the Bing grounding tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BING_GROUNDING_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BING_GROUNDING_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the Bing grounding tool call. Is one of the following types: {str: Any}, str,
+ [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BING_GROUNDING_CALL_OUTPUT # type: ignore
+
+
+class BrowserAutomationPreviewTool(Tool, discriminator="browser_automation_preview"):
+ """The input definition information for a Browser Automation Tool, as used to configure an Agent.
+
+ :ivar type: The object type, which is always 'browser_automation_preview'. Required.
+ BROWSER_AUTOMATION_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.BROWSER_AUTOMATION_PREVIEW
+ :ivar browser_automation_preview: The Browser Automation Tool parameters. Required.
+ :vartype browser_automation_preview:
+ ~azure.ai.responses.server.sdk.models.models.BrowserAutomationToolParameters
+ """
+
+ type: Literal[ToolType.BROWSER_AUTOMATION_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'browser_automation_preview'. Required.
+ BROWSER_AUTOMATION_PREVIEW."""
+ browser_automation_preview: "_models.BrowserAutomationToolParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The Browser Automation Tool parameters. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ browser_automation_preview: "_models.BrowserAutomationToolParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.BROWSER_AUTOMATION_PREVIEW # type: ignore
+
+
+class BrowserAutomationToolCall(OutputItem, discriminator="browser_automation_preview_call"):
+ """A browser automation tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BROWSER_AUTOMATION_PREVIEW_CALL.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.BROWSER_AUTOMATION_PREVIEW_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BROWSER_AUTOMATION_PREVIEW_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BROWSER_AUTOMATION_PREVIEW_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BROWSER_AUTOMATION_PREVIEW_CALL # type: ignore
+
+
+class BrowserAutomationToolCallOutput(OutputItem, discriminator="browser_automation_preview_call_output"):
+ """The output of a browser automation tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the browser automation tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the browser automation tool call. Is one of the following types: {str: Any},
+ str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT # type: ignore
+
+
+class BrowserAutomationToolConnectionParameters(_Model): # pylint: disable=name-too-long
+ """Definition of input parameters for the connection used by the Browser Automation Tool.
+
+ :ivar project_connection_id: The ID of the project connection to your Azure Playwright
+ resource. Required.
+ :vartype project_connection_id: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the project connection to your Azure Playwright resource. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class BrowserAutomationToolParameters(_Model):
+ """Definition of input parameters for the Browser Automation Tool.
+
+ :ivar connection: The project connection parameters associated with the Browser Automation
+ Tool. Required.
+ :vartype connection:
+ ~azure.ai.responses.server.sdk.models.models.BrowserAutomationToolConnectionParameters
+ """
+
+ connection: "_models.BrowserAutomationToolConnectionParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The project connection parameters associated with the Browser Automation Tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ connection: "_models.BrowserAutomationToolConnectionParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CaptureStructuredOutputsTool(Tool, discriminator="capture_structured_outputs"):
+ """A tool for capturing structured outputs.
+
+ :ivar type: The type of the tool. Always ``capture_structured_outputs``. Required.
+ CAPTURE_STRUCTURED_OUTPUTS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CAPTURE_STRUCTURED_OUTPUTS
+ :ivar outputs: The structured outputs to capture from the model. Required.
+ :vartype outputs: ~azure.ai.responses.server.sdk.models.models.StructuredOutputDefinition
+ """
+
+ type: Literal[ToolType.CAPTURE_STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the tool. Always ``capture_structured_outputs``. Required.
+ CAPTURE_STRUCTURED_OUTPUTS."""
+ outputs: "_models.StructuredOutputDefinition" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The structured outputs to capture from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ outputs: "_models.StructuredOutputDefinition",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.CAPTURE_STRUCTURED_OUTPUTS # type: ignore
+
+
+class MemoryItem(_Model):
+ """A single memory item stored in the memory store, containing content and metadata.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ChatSummaryMemoryItem, UserProfileMemoryItem
+
+ :ivar memory_id: The unique ID of the memory item. Required.
+ :vartype memory_id: str
+ :ivar updated_at: The last update time of the memory item. Required.
+ :vartype updated_at: ~datetime.datetime
+ :ivar scope: The namespace that logically groups and isolates memories, such as a user ID.
+ Required.
+ :vartype scope: str
+ :ivar content: The content of the memory. Required.
+ :vartype content: str
+ :ivar kind: The kind of the memory item. Required. Known values are: "user_profile" and
+ "chat_summary".
+ :vartype kind: str or ~azure.ai.responses.server.sdk.models.models.MemoryItemKind
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ memory_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the memory item. Required."""
+ updated_at: datetime.datetime = rest_field(
+ visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp"
+ )
+ """The last update time of the memory item. Required."""
+ scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The namespace that logically groups and isolates memories, such as a user ID. Required."""
+ content: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the memory. Required."""
+ kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"])
+ """The kind of the memory item. Required. Known values are: \"user_profile\" and \"chat_summary\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_id: str,
+ updated_at: datetime.datetime,
+ scope: str,
+ content: str,
+ kind: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ChatSummaryMemoryItem(MemoryItem, discriminator="chat_summary"):
+ """A memory item containing a summary extracted from conversations.
+
+ :ivar memory_id: The unique ID of the memory item. Required.
+ :vartype memory_id: str
+ :ivar updated_at: The last update time of the memory item. Required.
+ :vartype updated_at: ~datetime.datetime
+ :ivar scope: The namespace that logically groups and isolates memories, such as a user ID.
+ Required.
+ :vartype scope: str
+ :ivar content: The content of the memory. Required.
+ :vartype content: str
+ :ivar kind: The kind of the memory item. Required. Summary of chat conversations.
+ :vartype kind: str or ~azure.ai.responses.server.sdk.models.models.CHAT_SUMMARY
+ """
+
+ kind: Literal[MemoryItemKind.CHAT_SUMMARY] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The kind of the memory item. Required. Summary of chat conversations."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_id: str,
+ updated_at: datetime.datetime,
+ scope: str,
+ content: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.kind = MemoryItemKind.CHAT_SUMMARY # type: ignore
+
+
+class ComputerAction(_Model):
+ """ComputerAction.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ClickParam, DoubleClickAction, DragParam, KeyPressAction, MoveParam, ScreenshotParam,
+ ScrollParam, TypeParam, WaitParam
+
+ :ivar type: Required. Known values are: "click", "double_click", "drag", "keypress", "move",
+ "screenshot", "scroll", "type", and "wait".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ComputerActionType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"click\", \"double_click\", \"drag\", \"keypress\", \"move\",
+ \"screenshot\", \"scroll\", \"type\", and \"wait\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ClickParam(ComputerAction, discriminator="click"):
+ """Click.
+
+ :ivar type: Specifies the event type. For a click action, this property is always ``click``.
+ Required. CLICK.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CLICK
+ :ivar button: Indicates which mouse button was pressed during the click. One of ``left``,
+ ``right``, ``wheel``, ``back``, or ``forward``. Required. Known values are: "left", "right",
+ "wheel", "back", and "forward".
+ :vartype button: str or ~azure.ai.responses.server.sdk.models.models.ClickButtonType
+ :ivar x: The x-coordinate where the click occurred. Required.
+ :vartype x: int
+ :ivar y: The y-coordinate where the click occurred. Required.
+ :vartype y: int
+ """
+
+ type: Literal[ComputerActionType.CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a click action, this property is always ``click``. Required.
+ CLICK."""
+ button: Union[str, "_models.ClickButtonType"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Indicates which mouse button was pressed during the click. One of ``left``, ``right``,
+ ``wheel``, ``back``, or ``forward``. Required. Known values are: \"left\", \"right\",
+ \"wheel\", \"back\", and \"forward\"."""
+ x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The x-coordinate where the click occurred. Required."""
+ y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The y-coordinate where the click occurred. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ button: Union[str, "_models.ClickButtonType"],
+ x: int,
+ y: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.CLICK # type: ignore
+
+
+class CodeInterpreterOutputImage(_Model):
+ """Code interpreter output image.
+
+ :ivar type: The type of the output. Always ``image``. Required. Default value is "image".
+ :vartype type: str
+ :ivar url: The URL of the image output from the code interpreter. Required.
+ :vartype url: str
+ """
+
+ type: Literal["image"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the output. Always ``image``. Required. Default value is \"image\"."""
+ url: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the image output from the code interpreter. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ url: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["image"] = "image"
+
+
+class CodeInterpreterOutputLogs(_Model):
+ """Code interpreter output logs.
+
+ :ivar type: The type of the output. Always ``logs``. Required. Default value is "logs".
+ :vartype type: str
+ :ivar logs: The logs output from the code interpreter. Required.
+ :vartype logs: str
+ """
+
+ type: Literal["logs"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the output. Always ``logs``. Required. Default value is \"logs\"."""
+ logs: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The logs output from the code interpreter. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ logs: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["logs"] = "logs"
+
+
+class CodeInterpreterTool(Tool, discriminator="code_interpreter"):
+ """Code interpreter.
+
+ :ivar type: The type of the code interpreter tool. Always ``code_interpreter``. Required.
+ CODE_INTERPRETER.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CODE_INTERPRETER
+ :ivar container: The code interpreter container. Can be a container ID or an object that
+ specifies uploaded file IDs to make available to your code, along with an optional
+ ``memory_limit`` setting. If not provided, the service assumes auto. Is either a str type or a
+ AutoCodeInterpreterToolParam type.
+ :vartype container: str or
+ ~azure.ai.responses.server.sdk.models.models.AutoCodeInterpreterToolParam
+ """
+
+ type: Literal[ToolType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the code interpreter tool. Always ``code_interpreter``. Required. CODE_INTERPRETER."""
+ container: Optional[Union[str, "_models.AutoCodeInterpreterToolParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The code interpreter container. Can be a container ID or an object that specifies uploaded file
+ IDs to make available to your code, along with an optional ``memory_limit`` setting. If not
+ provided, the service assumes auto. Is either a str type or a AutoCodeInterpreterToolParam
+ type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ container: Optional[Union[str, "_models.AutoCodeInterpreterToolParam"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.CODE_INTERPRETER # type: ignore
+
+
+class CompactionSummaryItemParam(Item, discriminator="compaction"):
+ """Compaction item.
+
+ :ivar id:
+ :vartype id: str
+ :ivar type: The type of the item. Always ``compaction``. Required. COMPACTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPACTION
+ :ivar encrypted_content: The encrypted content of the compaction summary. Required.
+ :vartype encrypted_content: str
+ """
+
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ type: Literal[ItemType.COMPACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``compaction``. Required. COMPACTION."""
+ encrypted_content: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The encrypted content of the compaction summary. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ encrypted_content: str,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.COMPACTION # type: ignore
+
+
+class CompactResource(_Model):
+ """The compacted response object.
+
+ :ivar id: The unique identifier for the compacted response. Required.
+ :vartype id: str
+ :ivar object: The object type. Always ``response.compaction``. Required. Default value is
+ "response.compaction".
+ :vartype object: str
+ :ivar output: The compacted list of output items. Required.
+ :vartype output: list[~azure.ai.responses.server.sdk.models.models.ItemField]
+ :ivar created_at: Unix timestamp (in seconds) when the compacted conversation was created.
+ Required.
+ :vartype created_at: ~datetime.datetime
+ :ivar usage: Token accounting for the compaction pass, including cached, reasoning, and total
+ tokens. Required.
+ :vartype usage: ~azure.ai.responses.server.sdk.models.models.ResponseUsage
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier for the compacted response. Required."""
+ object: Literal["response.compaction"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The object type. Always ``response.compaction``. Required. Default value is
+ \"response.compaction\"."""
+ output: list["_models.ItemField"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The compacted list of output items. Required."""
+ created_at: datetime.datetime = rest_field(
+ visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp"
+ )
+ """Unix timestamp (in seconds) when the compacted conversation was created. Required."""
+ usage: "_models.ResponseUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Token accounting for the compaction pass, including cached, reasoning, and total tokens.
+ Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ output: list["_models.ItemField"],
+ created_at: datetime.datetime,
+ usage: "_models.ResponseUsage",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.object: Literal["response.compaction"] = "response.compaction"
+
+
+class ComparisonFilter(_Model):
+ """Comparison Filter.
+
+ :ivar type: Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``,
+ ``lte``, ``in``, ``nin``.
+
+ * `eq`: equals
+ * `ne`: not equal
+ * `gt`: greater than
+ * `gte`: greater than or equal
+ * `lt`: less than
+ * `lte`: less than or equal
+ * `in`: in
+ * `nin`: not in. Required. Is one of the following types: Literal["eq"], Literal["ne"],
+ Literal["gt"], Literal["gte"], Literal["lt"], Literal["lte"]
+ :vartype type: str or str or str or str or str or str
+ :ivar key: The key to compare against the value. Required.
+ :vartype key: str
+ :ivar value: The value to compare against the attribute key; supports string, number, or
+ boolean types. Required. Is one of the following types: str, int, bool, [Union[str, int]]
+ :vartype value: str or int or bool or list[str or int]
+ """
+
+ type: Literal["eq", "ne", "gt", "gte", "lt", "lte"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``, ``lte``, ``in``,
+ ``nin``.
+
+ * `eq`: equals
+ * `ne`: not equal
+ * `gt`: greater than
+ * `gte`: greater than or equal
+ * `lt`: less than
+ * `lte`: less than or equal
+ * `in`: in
+ * `nin`: not in. Required. Is one of the following types: Literal[\"eq\"],
+ Literal[\"ne\"], Literal[\"gt\"], Literal[\"gte\"], Literal[\"lt\"], Literal[\"lte\"]"""
+ key: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The key to compare against the value. Required."""
+ value: Union[str, int, bool, list[Union[str, int]]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The value to compare against the attribute key; supports string, number, or boolean types.
+ Required. Is one of the following types: str, int, bool, [Union[str, int]]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: Literal["eq", "ne", "gt", "gte", "lt", "lte"],
+ key: str,
+ value: Union[str, int, bool, list[Union[str, int]]],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CompoundFilter(_Model):
+ """Compound Filter.
+
+ :ivar type: Type of operation: ``and`` or ``or``. Required. Is either a Literal["and"] type or
+ a Literal["or"] type.
+ :vartype type: str or str
+ :ivar filters: Array of filters to combine. Items can be ``ComparisonFilter`` or
+ ``CompoundFilter``. Required.
+ :vartype filters: list[~azure.ai.responses.server.sdk.models.models.ComparisonFilter or any]
+ """
+
+ type: Literal["and", "or"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Type of operation: ``and`` or ``or``. Required. Is either a Literal[\"and\"] type or a
+ Literal[\"or\"] type."""
+ filters: list[Union["_models.ComparisonFilter", Any]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Array of filters to combine. Items can be ``ComparisonFilter`` or ``CompoundFilter``. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: Literal["and", "or"],
+ filters: list[Union["_models.ComparisonFilter", Any]],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ComputerCallOutputItemParam(Item, discriminator="computer_call_output"):
+ """Computer tool call output.
+
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The ID of the computer tool call that produced the output. Required.
+ :vartype call_id: str
+ :ivar type: The type of the computer tool call output. Always ``computer_call_output``.
+ Required. COMPUTER_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL_OUTPUT
+ :ivar output: Required.
+ :vartype output: ~azure.ai.responses.server.sdk.models.models.ComputerScreenshotImage
+ :ivar acknowledged_safety_checks:
+ :vartype acknowledged_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar status: Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.FunctionCallItemStatus
+ """
+
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the computer tool call that produced the output. Required."""
+ type: Literal[ItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer tool call output. Always ``computer_call_output``. Required.
+ COMPUTER_CALL_OUTPUT."""
+ output: "_models.ComputerScreenshotImage" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: "_models.ComputerScreenshotImage",
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = None,
+ status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.COMPUTER_CALL_OUTPUT # type: ignore
+
+
+class ComputerCallSafetyCheckParam(_Model):
+ """A pending safety check for the computer call.
+
+ :ivar id: The ID of the pending safety check. Required.
+ :vartype id: str
+ :ivar code:
+ :vartype code: str
+ :ivar message:
+ :vartype message: str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the pending safety check. Required."""
+ code: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ message: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ code: Optional[str] = None,
+ message: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MessageContent(_Model):
+ """A content part that makes up an input or output item.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ComputerScreenshotContent, MessageContentInputFileContent, MessageContentInputImageContent,
+ MessageContentInputTextContent, MessageContentOutputTextContent,
+ MessageContentReasoningTextContent, MessageContentRefusalContent, SummaryTextContent,
+ TextContent
+
+ :ivar type: Required. Known values are: "input_text", "output_text", "text", "summary_text",
+ "reasoning_text", "refusal", "input_image", "computer_screenshot", and "input_file".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MessageContentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"input_text\", \"output_text\", \"text\", \"summary_text\",
+ \"reasoning_text\", \"refusal\", \"input_image\", \"computer_screenshot\", and \"input_file\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ComputerScreenshotContent(MessageContent, discriminator="computer_screenshot"):
+ """Computer screenshot.
+
+ :ivar type: Specifies the event type. For a computer screenshot, this property is always set to
+ ``computer_screenshot``. Required. COMPUTER_SCREENSHOT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_SCREENSHOT
+ :ivar image_url: Required.
+ :vartype image_url: str
+ :ivar file_id: Required.
+ :vartype file_id: str
+ """
+
+ type: Literal[MessageContentType.COMPUTER_SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a computer screenshot, this property is always set to
+ ``computer_screenshot``. Required. COMPUTER_SCREENSHOT."""
+ image_url: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ image_url: str,
+ file_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.COMPUTER_SCREENSHOT # type: ignore
+
+
+class ComputerScreenshotImage(_Model):
+ """A computer screenshot image used with the computer use tool.
+
+ :ivar type: Specifies the event type. For a computer screenshot, this property is always set to
+ ``computer_screenshot``. Required. Default value is "computer_screenshot".
+ :vartype type: str
+ :ivar image_url: The URL of the screenshot image.
+ :vartype image_url: str
+ :ivar file_id: The identifier of an uploaded file that contains the screenshot.
+ :vartype file_id: str
+ """
+
+ type: Literal["computer_screenshot"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Specifies the event type. For a computer screenshot, this property is always set to
+ ``computer_screenshot``. Required. Default value is \"computer_screenshot\"."""
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the screenshot image."""
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The identifier of an uploaded file that contains the screenshot."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["computer_screenshot"] = "computer_screenshot"
+
+
+class ComputerUsePreviewTool(Tool, discriminator="computer_use_preview"):
+ """Computer use preview.
+
+ :ivar type: The type of the computer use tool. Always ``computer_use_preview``. Required.
+ COMPUTER_USE_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_USE_PREVIEW
+ :ivar environment: The type of computer environment to control. Required. Known values are:
+ "windows", "mac", "linux", "ubuntu", and "browser".
+ :vartype environment: str or ~azure.ai.responses.server.sdk.models.models.ComputerEnvironment
+ :ivar display_width: The width of the computer display. Required.
+ :vartype display_width: int
+ :ivar display_height: The height of the computer display. Required.
+ :vartype display_height: int
+ """
+
+ type: Literal[ToolType.COMPUTER_USE_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer use tool. Always ``computer_use_preview``. Required.
+ COMPUTER_USE_PREVIEW."""
+ environment: Union[str, "_models.ComputerEnvironment"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The type of computer environment to control. Required. Known values are: \"windows\", \"mac\",
+ \"linux\", \"ubuntu\", and \"browser\"."""
+ display_width: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The width of the computer display. Required."""
+ display_height: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The height of the computer display. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ environment: Union[str, "_models.ComputerEnvironment"],
+ display_width: int,
+ display_height: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.COMPUTER_USE_PREVIEW # type: ignore
+
+
+class FunctionShellToolParamEnvironment(_Model):
+ """FunctionShellToolParamEnvironment.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ContainerAutoParam, FunctionShellToolParamEnvironmentContainerReferenceParam,
+ FunctionShellToolParamEnvironmentLocalEnvironmentParam
+
+ :ivar type: Required. Known values are: "container_auto", "local", and "container_reference".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellToolParamEnvironmentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"container_auto\", \"local\", and \"container_reference\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ContainerAutoParam(FunctionShellToolParamEnvironment, discriminator="container_auto"):
+ """ContainerAutoParam.
+
+ :ivar type: Automatically creates a container for this request. Required. CONTAINER_AUTO.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CONTAINER_AUTO
+ :ivar file_ids: An optional list of uploaded files to make available to your code.
+ :vartype file_ids: list[str]
+ :ivar memory_limit: Known values are: "1g", "4g", "16g", and "64g".
+ :vartype memory_limit: str or ~azure.ai.responses.server.sdk.models.models.ContainerMemoryLimit
+ :ivar skills: An optional list of skills referenced by id or inline data.
+ :vartype skills: list[~azure.ai.responses.server.sdk.models.models.ContainerSkill]
+ :ivar network_policy:
+ :vartype network_policy:
+ ~azure.ai.responses.server.sdk.models.models.ContainerNetworkPolicyParam
+ """
+
+ type: Literal[FunctionShellToolParamEnvironmentType.CONTAINER_AUTO] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Automatically creates a container for this request. Required. CONTAINER_AUTO."""
+ file_ids: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An optional list of uploaded files to make available to your code."""
+ memory_limit: Optional[Union[str, "_models.ContainerMemoryLimit"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"1g\", \"4g\", \"16g\", and \"64g\"."""
+ skills: Optional[list["_models.ContainerSkill"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """An optional list of skills referenced by id or inline data."""
+ network_policy: Optional["_models.ContainerNetworkPolicyParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_ids: Optional[list[str]] = None,
+ memory_limit: Optional[Union[str, "_models.ContainerMemoryLimit"]] = None,
+ skills: Optional[list["_models.ContainerSkill"]] = None,
+ network_policy: Optional["_models.ContainerNetworkPolicyParam"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellToolParamEnvironmentType.CONTAINER_AUTO # type: ignore
+
+
+class ContainerFileCitationBody(Annotation, discriminator="container_file_citation"):
+ """Container file citation.
+
+ :ivar type: The type of the container file citation. Always ``container_file_citation``.
+ Required. CONTAINER_FILE_CITATION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CONTAINER_FILE_CITATION
+ :ivar container_id: The ID of the container file. Required.
+ :vartype container_id: str
+ :ivar file_id: The ID of the file. Required.
+ :vartype file_id: str
+ :ivar start_index: The index of the first character of the container file citation in the
+ message. Required.
+ :vartype start_index: int
+ :ivar end_index: The index of the last character of the container file citation in the message.
+ Required.
+ :vartype end_index: int
+ :ivar filename: The filename of the container file cited. Required.
+ :vartype filename: str
+ """
+
+ type: Literal[AnnotationType.CONTAINER_FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the container file citation. Always ``container_file_citation``. Required.
+ CONTAINER_FILE_CITATION."""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the container file. Required."""
+ file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the file. Required."""
+ start_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the first character of the container file citation in the message. Required."""
+ end_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the last character of the container file citation in the message. Required."""
+ filename: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The filename of the container file cited. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ container_id: str,
+ file_id: str,
+ start_index: int,
+ end_index: int,
+ filename: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = AnnotationType.CONTAINER_FILE_CITATION # type: ignore
+
+
+class ContainerNetworkPolicyParam(_Model):
+ """Network access policy for the container.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ContainerNetworkPolicyAllowlistParam, ContainerNetworkPolicyDisabledParam
+
+ :ivar type: Required. Known values are: "disabled" and "allowlist".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.ContainerNetworkPolicyParamType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"disabled\" and \"allowlist\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ContainerNetworkPolicyAllowlistParam(ContainerNetworkPolicyParam, discriminator="allowlist"):
+ """ContainerNetworkPolicyAllowlistParam.
+
+ :ivar type: Allow outbound network access only to specified domains. Always ``allowlist``.
+ Required. ALLOWLIST.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ALLOWLIST
+ :ivar allowed_domains: A list of allowed domains when type is ``allowlist``. Required.
+ :vartype allowed_domains: list[str]
+ :ivar domain_secrets: Optional domain-scoped secrets for allowlisted domains.
+ :vartype domain_secrets:
+ list[~azure.ai.responses.server.sdk.models.models.ContainerNetworkPolicyDomainSecretParam]
+ """
+
+ type: Literal[ContainerNetworkPolicyParamType.ALLOWLIST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Allow outbound network access only to specified domains. Always ``allowlist``. Required.
+ ALLOWLIST."""
+ allowed_domains: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A list of allowed domains when type is ``allowlist``. Required."""
+ domain_secrets: Optional[list["_models.ContainerNetworkPolicyDomainSecretParam"]] = rest_field(
+ visibility=["create"]
+ )
+ """Optional domain-scoped secrets for allowlisted domains."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ allowed_domains: list[str],
+ domain_secrets: Optional[list["_models.ContainerNetworkPolicyDomainSecretParam"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ContainerNetworkPolicyParamType.ALLOWLIST # type: ignore
+
+
+class ContainerNetworkPolicyDisabledParam(ContainerNetworkPolicyParam, discriminator="disabled"):
+ """ContainerNetworkPolicyDisabledParam.
+
+ :ivar type: Disable outbound network access. Always ``disabled``. Required. DISABLED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.DISABLED
+ """
+
+ type: Literal[ContainerNetworkPolicyParamType.DISABLED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Disable outbound network access. Always ``disabled``. Required. DISABLED."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ContainerNetworkPolicyParamType.DISABLED # type: ignore
+
+
+class ContainerNetworkPolicyDomainSecretParam(_Model):
+ """ContainerNetworkPolicyDomainSecretParam.
+
+ :ivar domain: The domain associated with the secret. Required.
+ :vartype domain: str
+ :ivar name: The name of the secret to inject for the domain. Required.
+ :vartype name: str
+ :ivar value: The secret value to inject for the domain. Required.
+ :vartype value: str
+ """
+
+ domain: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The domain associated with the secret. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the secret to inject for the domain. Required."""
+ value: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The secret value to inject for the domain. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ domain: str,
+ name: str,
+ value: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallEnvironment(_Model):
+ """FunctionShellCallEnvironment.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ContainerReferenceResource, LocalEnvironmentResource
+
+ :ivar type: Required. Known values are: "local" and "container_reference".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallEnvironmentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"local\" and \"container_reference\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ContainerReferenceResource(FunctionShellCallEnvironment, discriminator="container_reference"):
+ """Container Reference.
+
+ :ivar type: The environment type. Always ``container_reference``. Required.
+ CONTAINER_REFERENCE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CONTAINER_REFERENCE
+ :ivar container_id: Required.
+ :vartype container_id: str
+ """
+
+ type: Literal[FunctionShellCallEnvironmentType.CONTAINER_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The environment type. Always ``container_reference``. Required. CONTAINER_REFERENCE."""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ container_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallEnvironmentType.CONTAINER_REFERENCE # type: ignore
+
+
+class ContainerSkill(_Model):
+ """ContainerSkill.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ InlineSkillParam, SkillReferenceParam
+
+ :ivar type: Required. Known values are: "skill_reference" and "inline".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ContainerSkillType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"skill_reference\" and \"inline\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ContextManagementParam(_Model):
+ """ContextManagementParam.
+
+ :ivar type: The context management entry type. Currently only 'compaction' is supported.
+ Required.
+ :vartype type: str
+ :ivar compact_threshold:
+ :vartype compact_threshold: int
+ """
+
+ type: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The context management entry type. Currently only 'compaction' is supported. Required."""
+ compact_threshold: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ compact_threshold: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ConversationParam_2(_Model):
+ """Conversation object.
+
+ :ivar id: The unique ID of the conversation. Required.
+ :vartype id: str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the conversation. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ConversationReference(_Model):
+ """Conversation.
+
+ :ivar id: The unique ID of the conversation that this response was associated with. Required.
+ :vartype id: str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the conversation that this response was associated with. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CoordParam(_Model):
+ """Coordinate.
+
+ :ivar x: The x-coordinate. Required.
+ :vartype x: int
+ :ivar y: The y-coordinate. Required.
+ :vartype y: int
+ """
+
+ x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The x-coordinate. Required."""
+ y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The y-coordinate. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ x: int,
+ y: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CreatedBy(_Model):
+ """CreatedBy.
+
+ :ivar agent: The agent that created the item.
+ :vartype agent: ~azure.ai.responses.server.sdk.models.models.AgentId
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ """
+
+ agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The agent that created the item."""
+ response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response on which the item is created."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ agent: Optional["_models.AgentId"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CreateResponse(_Model):
+ """CreateResponse.
+
+ :ivar metadata:
+ :vartype metadata: ~azure.ai.responses.server.sdk.models.models.Metadata
+ :ivar top_logprobs:
+ :vartype top_logprobs: int
+ :ivar temperature:
+ :vartype temperature: int
+ :ivar top_p:
+ :vartype top_p: int
+ :ivar user: This field is being replaced by ``safety_identifier`` and ``prompt_cache_key``. Use
+ ``prompt_cache_key`` instead to maintain caching optimizations. A stable identifier for your
+ end-users. Used to boost cache hit rates by better bucketing similar requests and to help
+ OpenAI detect and prevent abuse. `Learn more
+ `_.
+ :vartype user: str
+ :ivar safety_identifier: A stable identifier used to help detect users of your application that
+ may be violating OpenAI's usage policies. The IDs should be a string that uniquely identifies
+ each user, with a maximum length of 64 characters. We recommend hashing their username or email
+ address, in order to avoid sending us any identifying information. `Learn more
+ `_.
+ :vartype safety_identifier: str
+ :ivar prompt_cache_key: Used by OpenAI to cache responses for similar requests to optimize your
+ cache hit rates. Replaces the ``user`` field. `Learn more `_.
+ :vartype prompt_cache_key: str
+ :ivar service_tier: Is one of the following types: Literal["auto"], Literal["default"],
+ Literal["flex"], Literal["scale"], Literal["priority"]
+ :vartype service_tier: str or str or str or str or str
+ :ivar prompt_cache_retention: Is either a Literal["in-memory"] type or a Literal["24h"] type.
+ :vartype prompt_cache_retention: str or str
+ :ivar previous_response_id:
+ :vartype previous_response_id: str
+ :ivar model: The model deployment to use for the creation of this response.
+ :vartype model: str
+ :ivar reasoning:
+ :vartype reasoning: ~azure.ai.responses.server.sdk.models.models.Reasoning
+ :ivar background:
+ :vartype background: bool
+ :ivar max_output_tokens:
+ :vartype max_output_tokens: int
+ :ivar max_tool_calls:
+ :vartype max_tool_calls: int
+ :ivar text:
+ :vartype text: ~azure.ai.responses.server.sdk.models.models.ResponseTextParam
+ :ivar tools:
+ :vartype tools: list[~azure.ai.responses.server.sdk.models.models.Tool]
+ :ivar tool_choice: Is either a Union[str, "_models.ToolChoiceOptions"] type or a
+ ToolChoiceParam type.
+ :vartype tool_choice: str or ~azure.ai.responses.server.sdk.models.models.ToolChoiceOptions or
+ ~azure.ai.responses.server.sdk.models.models.ToolChoiceParam
+ :ivar prompt:
+ :vartype prompt: ~azure.ai.responses.server.sdk.models.models.Prompt
+ :ivar truncation: Is either a Literal["auto"] type or a Literal["disabled"] type.
+ :vartype truncation: str or str
+ :ivar input: Is either a str type or a [Item] type.
+ :vartype input: str or list[~azure.ai.responses.server.sdk.models.models.Item]
+ :ivar include:
+ :vartype include: list[str or ~azure.ai.responses.server.sdk.models.models.IncludeEnum]
+ :ivar parallel_tool_calls:
+ :vartype parallel_tool_calls: bool
+ :ivar store:
+ :vartype store: bool
+ :ivar instructions:
+ :vartype instructions: str
+ :ivar stream:
+ :vartype stream: bool
+ :ivar stream_options:
+ :vartype stream_options: ~azure.ai.responses.server.sdk.models.models.ResponseStreamOptions
+ :ivar conversation: Is either a str type or a ConversationParam_2 type.
+ :vartype conversation: str or ~azure.ai.responses.server.sdk.models.models.ConversationParam_2
+ :ivar context_management: Context management configuration for this request.
+ :vartype context_management:
+ list[~azure.ai.responses.server.sdk.models.models.ContextManagementParam]
+ :ivar agent: (Deprecated) Use agent_reference instead. The agent to use for generating the
+ response.
+ :vartype agent: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar agent_reference: The agent to use for generating the response.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar structured_inputs: The structured inputs to the response that can participate in prompt
+ template substitution or tool argument bindings.
+ :vartype structured_inputs: dict[str, any]
+ :ivar agent_session_id: Optional session identifier for sandbox affinity. Currently only
+ relevant for hosted agents. When provided, the request is routed to the same sandbox. When
+ omitted, auto-derived from conversation_id/prev_response_id or a new UUID is generated.
+ :vartype agent_session_id: str
+ """
+
+ metadata: Optional["_models.Metadata"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ top_logprobs: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ temperature: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ top_p: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ user: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """This field is being replaced by ``safety_identifier`` and ``prompt_cache_key``. Use
+ ``prompt_cache_key`` instead to maintain caching optimizations. A stable identifier for your
+ end-users. Used to boost cache hit rates by better bucketing similar requests and to help
+ OpenAI detect and prevent abuse. `Learn more
+ `_."""
+ safety_identifier: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A stable identifier used to help detect users of your application that may be violating
+ OpenAI's usage policies. The IDs should be a string that uniquely identifies each user, with a
+ maximum length of 64 characters. We recommend hashing their username or email address, in order
+ to avoid sending us any identifying information. `Learn more
+ `_."""
+ prompt_cache_key: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Used by OpenAI to cache responses for similar requests to optimize your cache hit rates.
+ Replaces the ``user`` field. `Learn more `_."""
+ service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"auto\"], Literal[\"default\"], Literal[\"flex\"],
+ Literal[\"scale\"], Literal[\"priority\"]"""
+ prompt_cache_retention: Optional[Literal["in-memory", "24h"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Literal[\"in-memory\"] type or a Literal[\"24h\"] type."""
+ previous_response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ model: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The model deployment to use for the creation of this response."""
+ reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ background: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ max_output_tokens: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ max_tool_calls: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ text: Optional["_models.ResponseTextParam"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Union[str, \"_models.ToolChoiceOptions\"] type or a ToolChoiceParam type."""
+ prompt: Optional["_models.Prompt"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ truncation: Optional[Literal["auto", "disabled"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Literal[\"auto\"] type or a Literal[\"disabled\"] type."""
+ input: Optional["_types.InputParam"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Is either a str type or a [Item] type."""
+ include: Optional[list[Union[str, "_models.IncludeEnum"]]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ parallel_tool_calls: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ store: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ instructions: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ stream: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ stream_options: Optional["_models.ResponseStreamOptions"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ conversation: Optional["_types.ConversationParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a str type or a ConversationParam_2 type."""
+ context_management: Optional[list["_models.ContextManagementParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Context management configuration for this request."""
+ agent: Optional["_models.AgentReference"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """(Deprecated) Use agent_reference instead. The agent to use for generating the response."""
+ agent_reference: Optional["_models.AgentReference"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The agent to use for generating the response."""
+ structured_inputs: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The structured inputs to the response that can participate in prompt template substitution or
+ tool argument bindings."""
+ agent_session_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Optional session identifier for sandbox affinity. Currently only relevant for hosted agents.
+ When provided, the request is routed to the same sandbox. When omitted, auto-derived from
+ conversation_id/prev_response_id or a new UUID is generated."""
+
+ @overload
+ def __init__( # pylint: disable=too-many-locals
+ self,
+ *,
+ metadata: Optional["_models.Metadata"] = None,
+ top_logprobs: Optional[int] = None,
+ temperature: Optional[int] = None,
+ top_p: Optional[int] = None,
+ user: Optional[str] = None,
+ safety_identifier: Optional[str] = None,
+ prompt_cache_key: Optional[str] = None,
+ service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] = None,
+ prompt_cache_retention: Optional[Literal["in-memory", "24h"]] = None,
+ previous_response_id: Optional[str] = None,
+ model: Optional[str] = None,
+ reasoning: Optional["_models.Reasoning"] = None,
+ background: Optional[bool] = None,
+ max_output_tokens: Optional[int] = None,
+ max_tool_calls: Optional[int] = None,
+ text: Optional["_models.ResponseTextParam"] = None,
+ tools: Optional[list["_models.Tool"]] = None,
+ tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceParam"]] = None,
+ prompt: Optional["_models.Prompt"] = None,
+ truncation: Optional[Literal["auto", "disabled"]] = None,
+ input: Optional["_types.InputParam"] = None,
+ include: Optional[list[Union[str, "_models.IncludeEnum"]]] = None,
+ parallel_tool_calls: Optional[bool] = None,
+ store: Optional[bool] = None,
+ instructions: Optional[str] = None,
+ stream: Optional[bool] = None,
+ stream_options: Optional["_models.ResponseStreamOptions"] = None,
+ conversation: Optional["_types.ConversationParam"] = None,
+ context_management: Optional[list["_models.ContextManagementParam"]] = None,
+ agent: Optional["_models.AgentReference"] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ structured_inputs: Optional[dict[str, Any]] = None,
+ agent_session_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CustomToolParamFormat(_Model):
+ """The input format for the custom tool. Default is unconstrained text.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ CustomGrammarFormatParam, CustomTextFormatParam
+
+ :ivar type: Required. Known values are: "text" and "grammar".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CustomToolParamFormatType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"text\" and \"grammar\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CustomGrammarFormatParam(CustomToolParamFormat, discriminator="grammar"):
+ """Grammar format.
+
+ :ivar type: Grammar format. Always ``grammar``. Required. GRAMMAR.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.GRAMMAR
+ :ivar syntax: The syntax of the grammar definition. One of ``lark`` or ``regex``. Required.
+ Known values are: "lark" and "regex".
+ :vartype syntax: str or ~azure.ai.responses.server.sdk.models.models.GrammarSyntax1
+ :ivar definition: The grammar definition. Required.
+ :vartype definition: str
+ """
+
+ type: Literal[CustomToolParamFormatType.GRAMMAR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Grammar format. Always ``grammar``. Required. GRAMMAR."""
+ syntax: Union[str, "_models.GrammarSyntax1"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The syntax of the grammar definition. One of ``lark`` or ``regex``. Required. Known values are:
+ \"lark\" and \"regex\"."""
+ definition: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The grammar definition. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ syntax: Union[str, "_models.GrammarSyntax1"],
+ definition: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = CustomToolParamFormatType.GRAMMAR # type: ignore
+
+
+class CustomTextFormatParam(CustomToolParamFormat, discriminator="text"):
+ """Text format.
+
+ :ivar type: Unconstrained text format. Always ``text``. Required. TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TEXT
+ """
+
+ type: Literal[CustomToolParamFormatType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Unconstrained text format. Always ``text``. Required. TEXT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = CustomToolParamFormatType.TEXT # type: ignore
+
+
+class CustomToolParam(Tool, discriminator="custom"):
+ """Custom tool.
+
+ :ivar type: The type of the custom tool. Always ``custom``. Required. CUSTOM.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM
+ :ivar name: The name of the custom tool, used to identify it in tool calls. Required.
+ :vartype name: str
+ :ivar description: Optional description of the custom tool, used to provide more context.
+ :vartype description: str
+ :ivar format: The input format for the custom tool. Default is unconstrained text.
+ :vartype format: ~azure.ai.responses.server.sdk.models.models.CustomToolParamFormat
+ """
+
+ type: Literal[ToolType.CUSTOM] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool. Always ``custom``. Required. CUSTOM."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the custom tool, used to identify it in tool calls. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Optional description of the custom tool, used to provide more context."""
+ format: Optional["_models.CustomToolParamFormat"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The input format for the custom tool. Default is unconstrained text."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: Optional[str] = None,
+ format: Optional["_models.CustomToolParamFormat"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.CUSTOM # type: ignore
+
+
+class DeleteResponseResult(_Model):
+ """The result of a delete response operation.
+
+ :ivar id: The operation ID. Required.
+ :vartype id: str
+ :ivar deleted: Always return true. Required. Default value is True.
+ :vartype deleted: bool
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The operation ID. Required."""
+ deleted: Literal[True] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Always return true. Required. Default value is True."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.deleted: Literal[True] = True
+
+
+class DoubleClickAction(ComputerAction, discriminator="double_click"):
+ """DoubleClick.
+
+ :ivar type: Specifies the event type. For a double click action, this property is always set to
+ ``double_click``. Required. DOUBLE_CLICK.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.DOUBLE_CLICK
+ :ivar x: The x-coordinate where the double click occurred. Required.
+ :vartype x: int
+ :ivar y: The y-coordinate where the double click occurred. Required.
+ :vartype y: int
+ """
+
+ type: Literal[ComputerActionType.DOUBLE_CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a double click action, this property is always set to
+ ``double_click``. Required. DOUBLE_CLICK."""
+ x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The x-coordinate where the double click occurred. Required."""
+ y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The y-coordinate where the double click occurred. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ x: int,
+ y: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.DOUBLE_CLICK # type: ignore
+
+
+class DragParam(ComputerAction, discriminator="drag"):
+ """Drag.
+
+ :ivar type: Specifies the event type. For a drag action, this property is always set to
+ ``drag``. Required. DRAG.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.DRAG
+ :ivar path: An array of coordinates representing the path of the drag action. Coordinates will
+ appear as an array of objects, eg
+
+ .. code-block::
+
+ [
+ { x: 100, y: 200 },
+ { x: 200, y: 300 }
+ ]. Required.
+ :vartype path: list[~azure.ai.responses.server.sdk.models.models.CoordParam]
+ """
+
+ type: Literal[ComputerActionType.DRAG] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a drag action, this property is always set to ``drag``. Required.
+ DRAG."""
+ path: list["_models.CoordParam"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An array of coordinates representing the path of the drag action. Coordinates will appear as an
+ array of objects, eg
+
+ .. code-block::
+
+ [
+ { x: 100, y: 200 },
+ { x: 200, y: 300 }
+ ]. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: list["_models.CoordParam"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.DRAG # type: ignore
+
+
+class Error(_Model):
+ """Error.
+
+ :ivar code: Required.
+ :vartype code: str
+ :ivar message: Required.
+ :vartype message: str
+ :ivar param:
+ :vartype param: str
+ :ivar type:
+ :vartype type: str
+ :ivar details:
+ :vartype details: list[~azure.ai.responses.server.sdk.models.models.Error]
+ :ivar additional_info:
+ :vartype additional_info: dict[str, any]
+ :ivar debug_info:
+ :vartype debug_info: dict[str, any]
+ """
+
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ param: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ type: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ details: Optional[list["_models.Error"]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ additional_info: Optional[dict[str, Any]] = rest_field(
+ name="additionalInfo", visibility=["read", "create", "update", "delete", "query"]
+ )
+ debug_info: Optional[dict[str, Any]] = rest_field(
+ name="debugInfo", visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ code: str,
+ message: str,
+ param: Optional[str] = None,
+ type: Optional[str] = None,
+ details: Optional[list["_models.Error"]] = None,
+ additional_info: Optional[dict[str, Any]] = None,
+ debug_info: Optional[dict[str, Any]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FabricDataAgentToolCall(OutputItem, discriminator="fabric_dataagent_preview_call"):
+ """A Fabric data agent tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. FABRIC_DATAAGENT_PREVIEW_CALL.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FABRIC_DATAAGENT_PREVIEW_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.FABRIC_DATAAGENT_PREVIEW_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. FABRIC_DATAAGENT_PREVIEW_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.FABRIC_DATAAGENT_PREVIEW_CALL # type: ignore
+
+
+class FabricDataAgentToolCallOutput(OutputItem, discriminator="fabric_dataagent_preview_call_output"):
+ """The output of a Fabric data agent tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the Fabric data agent tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the Fabric data agent tool call. Is one of the following types: {str: Any},
+ str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT # type: ignore
+
+
+class FabricDataAgentToolParameters(_Model):
+ """The fabric data agent tool parameters.
+
+ :ivar project_connections: The project connections attached to this tool. There can be a
+ maximum of 1 connection resource attached to the tool.
+ :vartype project_connections:
+ list[~azure.ai.responses.server.sdk.models.models.ToolProjectConnection]
+ """
+
+ project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The project connections attached to this tool. There can be a maximum of 1 connection resource
+ attached to the tool."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connections: Optional[list["_models.ToolProjectConnection"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FileCitationBody(Annotation, discriminator="file_citation"):
+ """File citation.
+
+ :ivar type: The type of the file citation. Always ``file_citation``. Required. FILE_CITATION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_CITATION
+ :ivar file_id: The ID of the file. Required.
+ :vartype file_id: str
+ :ivar index: The index of the file in the list of files. Required.
+ :vartype index: int
+ :ivar filename: The filename of the file cited. Required.
+ :vartype filename: str
+ """
+
+ type: Literal[AnnotationType.FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file citation. Always ``file_citation``. Required. FILE_CITATION."""
+ file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the file. Required."""
+ index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the file in the list of files. Required."""
+ filename: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The filename of the file cited. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: str,
+ index: int,
+ filename: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = AnnotationType.FILE_CITATION # type: ignore
+
+
+class FilePath(Annotation, discriminator="file_path"):
+ """File path.
+
+ :ivar type: The type of the file path. Always ``file_path``. Required. FILE_PATH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_PATH
+ :ivar file_id: The ID of the file. Required.
+ :vartype file_id: str
+ :ivar index: The index of the file in the list of files. Required.
+ :vartype index: int
+ """
+
+ type: Literal[AnnotationType.FILE_PATH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file path. Always ``file_path``. Required. FILE_PATH."""
+ file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the file. Required."""
+ index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the file in the list of files. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: str,
+ index: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = AnnotationType.FILE_PATH # type: ignore
+
+
+class FileSearchTool(Tool, discriminator="file_search"):
+ """File search.
+
+ :ivar type: The type of the file search tool. Always ``file_search``. Required. FILE_SEARCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_SEARCH
+ :ivar vector_store_ids: The IDs of the vector stores to search. Required.
+ :vartype vector_store_ids: list[str]
+ :ivar max_num_results: The maximum number of results to return. This number should be between 1
+ and 50 inclusive.
+ :vartype max_num_results: int
+ :ivar ranking_options: Ranking options for search.
+ :vartype ranking_options: ~azure.ai.responses.server.sdk.models.models.RankingOptions
+ :ivar filters: Is either a ComparisonFilter type or a CompoundFilter type.
+ :vartype filters: ~azure.ai.responses.server.sdk.models.models.ComparisonFilter or
+ ~azure.ai.responses.server.sdk.models.models.CompoundFilter
+ """
+
+ type: Literal[ToolType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file search tool. Always ``file_search``. Required. FILE_SEARCH."""
+ vector_store_ids: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The IDs of the vector stores to search. Required."""
+ max_num_results: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The maximum number of results to return. This number should be between 1 and 50 inclusive."""
+ ranking_options: Optional["_models.RankingOptions"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Ranking options for search."""
+ filters: Optional["_types.Filters"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Is either a ComparisonFilter type or a CompoundFilter type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ vector_store_ids: list[str],
+ max_num_results: Optional[int] = None,
+ ranking_options: Optional["_models.RankingOptions"] = None,
+ filters: Optional["_types.Filters"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.FILE_SEARCH # type: ignore
+
+
+class FileSearchToolCallResults(_Model):
+ """FileSearchToolCallResults.
+
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar text:
+ :vartype text: str
+ :ivar filename:
+ :vartype filename: str
+ :ivar attributes:
+ :vartype attributes: ~azure.ai.responses.server.sdk.models.models.VectorStoreFileAttributes
+ :ivar score:
+ :vartype score: float
+ """
+
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ attributes: Optional["_models.VectorStoreFileAttributes"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ score: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: Optional[str] = None,
+ text: Optional[str] = None,
+ filename: Optional[str] = None,
+ attributes: Optional["_models.VectorStoreFileAttributes"] = None,
+ score: Optional[float] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionAndCustomToolCallOutput(_Model):
+ """FunctionAndCustomToolCallOutput.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ FunctionAndCustomToolCallOutputInputFileContent,
+ FunctionAndCustomToolCallOutputInputImageContent,
+ FunctionAndCustomToolCallOutputInputTextContent
+
+ :ivar type: Required. Known values are: "input_text", "input_image", and "input_file".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutputType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"input_text\", \"input_image\", and \"input_file\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionAndCustomToolCallOutputInputFileContent(
+ FunctionAndCustomToolCallOutput, discriminator="input_file"
+): # pylint: disable=name-too-long
+ """Input file.
+
+ :ivar type: The type of the input item. Always ``input_file``. Required. INPUT_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_FILE
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar filename: The name of the file to be sent to the model.
+ :vartype filename: str
+ :ivar file_url: The URL of the file to be sent to the model.
+ :vartype file_url: str
+ :ivar file_data: The content of the file to be sent to the model.
+ :vartype file_data: str
+ """
+
+ type: Literal[FunctionAndCustomToolCallOutputType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_file``. Required. INPUT_FILE."""
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the file to be sent to the model."""
+ file_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the file to be sent to the model."""
+ file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the file to be sent to the model."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: Optional[str] = None,
+ filename: Optional[str] = None,
+ file_url: Optional[str] = None,
+ file_data: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionAndCustomToolCallOutputType.INPUT_FILE # type: ignore
+
+
+class FunctionAndCustomToolCallOutputInputImageContent(
+ FunctionAndCustomToolCallOutput, discriminator="input_image"
+): # pylint: disable=name-too-long
+ """Input image.
+
+ :ivar type: The type of the input item. Always ``input_image``. Required. INPUT_IMAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_IMAGE
+ :ivar image_url:
+ :vartype image_url: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``,
+ or ``auto``. Defaults to ``auto``. Required. Known values are: "low", "high", and "auto".
+ :vartype detail: str or ~azure.ai.responses.server.sdk.models.models.ImageDetail
+ """
+
+ type: Literal[FunctionAndCustomToolCallOutputType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_image``. Required. INPUT_IMAGE."""
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ detail: Union[str, "_models.ImageDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``.
+ Defaults to ``auto``. Required. Known values are: \"low\", \"high\", and \"auto\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ detail: Union[str, "_models.ImageDetail"],
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionAndCustomToolCallOutputType.INPUT_IMAGE # type: ignore
+
+
+class FunctionAndCustomToolCallOutputInputTextContent(
+ FunctionAndCustomToolCallOutput, discriminator="input_text"
+): # pylint: disable=name-too-long
+ """Input text.
+
+ :ivar type: The type of the input item. Always ``input_text``. Required. INPUT_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_TEXT
+ :ivar text: The text input to the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal[FunctionAndCustomToolCallOutputType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_text``. Required. INPUT_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text input to the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionAndCustomToolCallOutputType.INPUT_TEXT # type: ignore
+
+
+class FunctionCallOutputItemParam(Item, discriminator="function_call_output"):
+ """Function tool call output.
+
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar type: The type of the function tool call output. Always ``function_call_output``.
+ Required. FUNCTION_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL_OUTPUT
+ :ivar output: Text, image, or file output of the function tool call. Required. Is either a str
+ type or a [Union["_models.InputTextContentParam", "_models.InputImageContentParamAutoParam",
+ "_models.InputFileContentParam"]] type.
+ :vartype output: str or list[~azure.ai.responses.server.sdk.models.models.InputTextContentParam
+ or ~azure.ai.responses.server.sdk.models.models.InputImageContentParamAutoParam or
+ ~azure.ai.responses.server.sdk.models.models.InputFileContentParam]
+ :ivar status: Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.FunctionCallItemStatus
+ """
+
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ type: Literal[ItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call output. Always ``function_call_output``. Required.
+ FUNCTION_CALL_OUTPUT."""
+ output: Union[
+ str,
+ list[
+ Union[
+ "_models.InputTextContentParam",
+ "_models.InputImageContentParamAutoParam",
+ "_models.InputFileContentParam",
+ ]
+ ],
+ ] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Text, image, or file output of the function tool call. Required. Is either a str type or a
+ [Union[\"_models.InputTextContentParam\", \"_models.InputImageContentParamAutoParam\",
+ \"_models.InputFileContentParam\"]] type."""
+ status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[
+ str,
+ list[
+ Union[
+ "_models.InputTextContentParam",
+ "_models.InputImageContentParamAutoParam",
+ "_models.InputFileContentParam",
+ ]
+ ],
+ ],
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.FUNCTION_CALL_OUTPUT # type: ignore
+
+
+class FunctionShellAction(_Model):
+ """Shell exec action.
+
+ :ivar commands: Required.
+ :vartype commands: list[str]
+ :ivar timeout_ms: Required.
+ :vartype timeout_ms: int
+ :ivar max_output_length: Required.
+ :vartype max_output_length: int
+ """
+
+ commands: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ timeout_ms: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ max_output_length: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ commands: list[str],
+ timeout_ms: int,
+ max_output_length: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellActionParam(_Model):
+ """Shell action.
+
+ :ivar commands: Ordered shell commands for the execution environment to run. Required.
+ :vartype commands: list[str]
+ :ivar timeout_ms:
+ :vartype timeout_ms: int
+ :ivar max_output_length:
+ :vartype max_output_length: int
+ """
+
+ commands: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Ordered shell commands for the execution environment to run. Required."""
+ timeout_ms: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ max_output_length: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ commands: list[str],
+ timeout_ms: Optional[int] = None,
+ max_output_length: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallItemParam(Item, discriminator="shell_call"):
+ """Shell tool call.
+
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar type: The type of the item. Always ``shell_call``. Required. SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL
+ :ivar action: The shell commands and limits that describe how to run the tool call. Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.FunctionShellActionParam
+ :ivar status: Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallItemStatus
+ :ivar environment:
+ :vartype environment:
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallItemParamEnvironment
+ """
+
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ type: Literal[ItemType.SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``shell_call``. Required. SHELL_CALL."""
+ action: "_models.FunctionShellActionParam" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The shell commands and limits that describe how to run the tool call. Required."""
+ status: Optional[Union[str, "_models.FunctionShellCallItemStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ environment: Optional["_models.FunctionShellCallItemParamEnvironment"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ action: "_models.FunctionShellActionParam",
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ status: Optional[Union[str, "_models.FunctionShellCallItemStatus"]] = None,
+ environment: Optional["_models.FunctionShellCallItemParamEnvironment"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.SHELL_CALL # type: ignore
+
+
+class FunctionShellCallItemParamEnvironment(_Model):
+ """The environment to execute the shell commands in.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ FunctionShellCallItemParamEnvironmentContainerReferenceParam,
+ FunctionShellCallItemParamEnvironmentLocalEnvironmentParam
+
+ :ivar type: Required. Known values are: "local" and "container_reference".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallItemParamEnvironmentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"local\" and \"container_reference\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallItemParamEnvironmentContainerReferenceParam(
+ FunctionShellCallItemParamEnvironment, discriminator="container_reference"
+): # pylint: disable=name-too-long
+ """FunctionShellCallItemParamEnvironmentContainerReferenceParam.
+
+ :ivar type: References a container created with the /v1/containers endpoint. Required.
+ CONTAINER_REFERENCE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CONTAINER_REFERENCE
+ :ivar container_id: The ID of the referenced container. Required.
+ :vartype container_id: str
+ """
+
+ type: Literal[FunctionShellCallItemParamEnvironmentType.CONTAINER_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """References a container created with the /v1/containers endpoint. Required. CONTAINER_REFERENCE."""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the referenced container. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ container_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallItemParamEnvironmentType.CONTAINER_REFERENCE # type: ignore
+
+
+class FunctionShellCallItemParamEnvironmentLocalEnvironmentParam(
+ FunctionShellCallItemParamEnvironment, discriminator="local"
+): # pylint: disable=name-too-long
+ """FunctionShellCallItemParamEnvironmentLocalEnvironmentParam.
+
+ :ivar type: Use a local computer environment. Required. LOCAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL
+ :ivar skills: An optional list of skills.
+ :vartype skills: list[~azure.ai.responses.server.sdk.models.models.LocalSkillParam]
+ """
+
+ type: Literal[FunctionShellCallItemParamEnvironmentType.LOCAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Use a local computer environment. Required. LOCAL."""
+ skills: Optional[list["_models.LocalSkillParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """An optional list of skills."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ skills: Optional[list["_models.LocalSkillParam"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallItemParamEnvironmentType.LOCAL # type: ignore
+
+
+class FunctionShellCallOutputContent(_Model):
+ """Shell call output content.
+
+ :ivar stdout: The standard output that was captured. Required.
+ :vartype stdout: str
+ :ivar stderr: The standard error output that was captured. Required.
+ :vartype stderr: str
+ :ivar outcome: Shell call outcome. Required.
+ :vartype outcome: ~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputOutcome
+ :ivar created_by: The identifier of the actor that created the item.
+ :vartype created_by: str
+ """
+
+ stdout: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The standard output that was captured. Required."""
+ stderr: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The standard error output that was captured. Required."""
+ outcome: "_models.FunctionShellCallOutputOutcome" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Shell call outcome. Required."""
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The identifier of the actor that created the item."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ stdout: str,
+ stderr: str,
+ outcome: "_models.FunctionShellCallOutputOutcome",
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallOutputContentParam(_Model):
+ """Shell output content.
+
+ :ivar stdout: Captured stdout output for the shell call. Required.
+ :vartype stdout: str
+ :ivar stderr: Captured stderr output for the shell call. Required.
+ :vartype stderr: str
+ :ivar outcome: The exit or timeout outcome associated with this shell call. Required.
+ :vartype outcome:
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputOutcomeParam
+ """
+
+ stdout: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Captured stdout output for the shell call. Required."""
+ stderr: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Captured stderr output for the shell call. Required."""
+ outcome: "_models.FunctionShellCallOutputOutcomeParam" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The exit or timeout outcome associated with this shell call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ stdout: str,
+ stderr: str,
+ outcome: "_models.FunctionShellCallOutputOutcomeParam",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallOutputOutcome(_Model):
+ """Shell call outcome.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ FunctionShellCallOutputExitOutcome, FunctionShellCallOutputTimeoutOutcome
+
+ :ivar type: Required. Known values are: "timeout" and "exit".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputOutcomeType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"timeout\" and \"exit\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallOutputExitOutcome(FunctionShellCallOutputOutcome, discriminator="exit"):
+ """Shell call exit outcome.
+
+ :ivar type: The outcome type. Always ``exit``. Required. EXIT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.EXIT
+ :ivar exit_code: Exit code from the shell process. Required.
+ :vartype exit_code: int
+ """
+
+ type: Literal[FunctionShellCallOutputOutcomeType.EXIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The outcome type. Always ``exit``. Required. EXIT."""
+ exit_code: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Exit code from the shell process. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ exit_code: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallOutputOutcomeType.EXIT # type: ignore
+
+
+class FunctionShellCallOutputOutcomeParam(_Model):
+ """Shell call outcome.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ FunctionShellCallOutputExitOutcomeParam, FunctionShellCallOutputTimeoutOutcomeParam
+
+ :ivar type: Required. Known values are: "timeout" and "exit".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputOutcomeParamType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"timeout\" and \"exit\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallOutputExitOutcomeParam(FunctionShellCallOutputOutcomeParam, discriminator="exit"):
+ """Shell call exit outcome.
+
+ :ivar type: The outcome type. Always ``exit``. Required. EXIT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.EXIT
+ :ivar exit_code: The exit code returned by the shell process. Required.
+ :vartype exit_code: int
+ """
+
+ type: Literal[FunctionShellCallOutputOutcomeParamType.EXIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The outcome type. Always ``exit``. Required. EXIT."""
+ exit_code: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The exit code returned by the shell process. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ exit_code: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallOutputOutcomeParamType.EXIT # type: ignore
+
+
+class FunctionShellCallOutputItemParam(Item, discriminator="shell_call_output"):
+ """Shell tool call output.
+
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar type: The type of the item. Always ``shell_call_output``. Required. SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL_OUTPUT
+ :ivar output: Captured chunks of stdout and stderr output, along with their associated
+ outcomes. Required.
+ :vartype output:
+ list[~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputContentParam]
+ :ivar status: Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallItemStatus
+ :ivar max_output_length:
+ :vartype max_output_length: int
+ """
+
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ type: Literal[ItemType.SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``shell_call_output``. Required. SHELL_CALL_OUTPUT."""
+ output: list["_models.FunctionShellCallOutputContentParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Captured chunks of stdout and stderr output, along with their associated outcomes. Required."""
+ status: Optional[Union[str, "_models.FunctionShellCallItemStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ max_output_length: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: list["_models.FunctionShellCallOutputContentParam"],
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ status: Optional[Union[str, "_models.FunctionShellCallItemStatus"]] = None,
+ max_output_length: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.SHELL_CALL_OUTPUT # type: ignore
+
+
+class FunctionShellCallOutputTimeoutOutcome(FunctionShellCallOutputOutcome, discriminator="timeout"):
+ """Shell call timeout outcome.
+
+ :ivar type: The outcome type. Always ``timeout``. Required. TIMEOUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TIMEOUT
+ """
+
+ type: Literal[FunctionShellCallOutputOutcomeType.TIMEOUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The outcome type. Always ``timeout``. Required. TIMEOUT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallOutputOutcomeType.TIMEOUT # type: ignore
+
+
+class FunctionShellCallOutputTimeoutOutcomeParam(
+ FunctionShellCallOutputOutcomeParam, discriminator="timeout"
+): # pylint: disable=name-too-long
+ """Shell call timeout outcome.
+
+ :ivar type: The outcome type. Always ``timeout``. Required. TIMEOUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TIMEOUT
+ """
+
+ type: Literal[FunctionShellCallOutputOutcomeParamType.TIMEOUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The outcome type. Always ``timeout``. Required. TIMEOUT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallOutputOutcomeParamType.TIMEOUT # type: ignore
+
+
+class FunctionShellToolParam(Tool, discriminator="shell"):
+ """Shell tool.
+
+ :ivar type: The type of the shell tool. Always ``shell``. Required. SHELL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL
+ :ivar environment:
+ :vartype environment:
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellToolParamEnvironment
+ """
+
+ type: Literal[ToolType.SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the shell tool. Always ``shell``. Required. SHELL."""
+ environment: Optional["_models.FunctionShellToolParamEnvironment"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ environment: Optional["_models.FunctionShellToolParamEnvironment"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.SHELL # type: ignore
+
+
+class FunctionShellToolParamEnvironmentContainerReferenceParam(
+ FunctionShellToolParamEnvironment, discriminator="container_reference"
+): # pylint: disable=name-too-long
+ """FunctionShellToolParamEnvironmentContainerReferenceParam.
+
+ :ivar type: References a container created with the /v1/containers endpoint. Required.
+ CONTAINER_REFERENCE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CONTAINER_REFERENCE
+ :ivar container_id: The ID of the referenced container. Required.
+ :vartype container_id: str
+ """
+
+ type: Literal[FunctionShellToolParamEnvironmentType.CONTAINER_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """References a container created with the /v1/containers endpoint. Required. CONTAINER_REFERENCE."""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the referenced container. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ container_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellToolParamEnvironmentType.CONTAINER_REFERENCE # type: ignore
+
+
+class FunctionShellToolParamEnvironmentLocalEnvironmentParam(
+ FunctionShellToolParamEnvironment, discriminator="local"
+): # pylint: disable=name-too-long
+ """FunctionShellToolParamEnvironmentLocalEnvironmentParam.
+
+ :ivar type: Use a local computer environment. Required. LOCAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL
+ :ivar skills: An optional list of skills.
+ :vartype skills: list[~azure.ai.responses.server.sdk.models.models.LocalSkillParam]
+ """
+
+ type: Literal[FunctionShellToolParamEnvironmentType.LOCAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Use a local computer environment. Required. LOCAL."""
+ skills: Optional[list["_models.LocalSkillParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """An optional list of skills."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ skills: Optional[list["_models.LocalSkillParam"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellToolParamEnvironmentType.LOCAL # type: ignore
+
+
+class FunctionTool(Tool, discriminator="function"):
+ """Function.
+
+ :ivar type: The type of the function tool. Always ``function``. Required. FUNCTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION
+ :ivar name: The name of the function to call. Required.
+ :vartype name: str
+ :ivar description:
+ :vartype description: str
+ :ivar parameters: Required.
+ :vartype parameters: dict[str, any]
+ :ivar strict: Required.
+ :vartype strict: bool
+ """
+
+ type: Literal[ToolType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool. Always ``function``. Required. FUNCTION."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to call. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ parameters: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ parameters: dict[str, Any],
+ strict: bool,
+ description: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.FUNCTION # type: ignore
+
+
+class HybridSearchOptions(_Model):
+ """HybridSearchOptions.
+
+ :ivar embedding_weight: The weight of the embedding in the reciprocal ranking fusion. Required.
+ :vartype embedding_weight: int
+ :ivar text_weight: The weight of the text in the reciprocal ranking fusion. Required.
+ :vartype text_weight: int
+ """
+
+ embedding_weight: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The weight of the embedding in the reciprocal ranking fusion. Required."""
+ text_weight: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The weight of the text in the reciprocal ranking fusion. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ embedding_weight: int,
+ text_weight: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ImageGenTool(Tool, discriminator="image_generation"):
+ """Image generation tool.
+
+ :ivar type: The type of the image generation tool. Always ``image_generation``. Required.
+ IMAGE_GENERATION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.IMAGE_GENERATION
+ :ivar model: Is one of the following types: Literal["gpt-image-1"],
+ Literal["gpt-image-1-mini"], Literal["gpt-image-1.5"], str
+ :vartype model: str or str or str or str
+ :ivar quality: The quality of the generated image. One of ``low``, ``medium``, ``high``, or
+ ``auto``. Default: ``auto``. Is one of the following types: Literal["low"], Literal["medium"],
+ Literal["high"], Literal["auto"]
+ :vartype quality: str or str or str or str
+ :ivar size: The size of the generated image. One of ``1024x1024``, ``1024x1536``,
+ ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types:
+ Literal["1024x1024"], Literal["1024x1536"], Literal["1536x1024"], Literal["auto"]
+ :vartype size: str or str or str or str
+ :ivar output_format: The output format of the generated image. One of ``png``, ``webp``, or
+ ``jpeg``. Default: ``png``. Is one of the following types: Literal["png"], Literal["webp"],
+ Literal["jpeg"]
+ :vartype output_format: str or str or str
+ :ivar output_compression: Compression level for the output image. Default: 100.
+ :vartype output_compression: int
+ :ivar moderation: Moderation level for the generated image. Default: ``auto``. Is either a
+ Literal["auto"] type or a Literal["low"] type.
+ :vartype moderation: str or str
+ :ivar background: Background type for the generated image. One of ``transparent``, ``opaque``,
+ or ``auto``. Default: ``auto``. Is one of the following types: Literal["transparent"],
+ Literal["opaque"], Literal["auto"]
+ :vartype background: str or str or str
+ :ivar input_fidelity: Known values are: "high" and "low".
+ :vartype input_fidelity: str or ~azure.ai.responses.server.sdk.models.models.InputFidelity
+ :ivar input_image_mask: Optional mask for inpainting. Contains ``image_url`` (string, optional)
+ and ``file_id`` (string, optional).
+ :vartype input_image_mask:
+ ~azure.ai.responses.server.sdk.models.models.ImageGenToolInputImageMask
+ :ivar partial_images: Number of partial images to generate in streaming mode, from 0 (default
+ value) to 3.
+ :vartype partial_images: int
+ :ivar action: Whether to generate a new image or edit an existing image. Default: ``auto``.
+ Known values are: "generate", "edit", and "auto".
+ :vartype action: str or ~azure.ai.responses.server.sdk.models.models.ImageGenActionEnum
+ """
+
+ type: Literal[ToolType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the image generation tool. Always ``image_generation``. Required. IMAGE_GENERATION."""
+ model: Optional[Union[Literal["gpt-image-1"], Literal["gpt-image-1-mini"], Literal["gpt-image-1.5"], str]] = (
+ rest_field(visibility=["read", "create", "update", "delete", "query"])
+ )
+ """Is one of the following types: Literal[\"gpt-image-1\"], Literal[\"gpt-image-1-mini\"],
+ Literal[\"gpt-image-1.5\"], str"""
+ quality: Optional[Literal["low", "medium", "high", "auto"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The quality of the generated image. One of ``low``, ``medium``, ``high``, or ``auto``. Default:
+ ``auto``. Is one of the following types: Literal[\"low\"], Literal[\"medium\"],
+ Literal[\"high\"], Literal[\"auto\"]"""
+ size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The size of the generated image. One of ``1024x1024``, ``1024x1536``, ``1536x1024``, or
+ ``auto``. Default: ``auto``. Is one of the following types: Literal[\"1024x1024\"],
+ Literal[\"1024x1536\"], Literal[\"1536x1024\"], Literal[\"auto\"]"""
+ output_format: Optional[Literal["png", "webp", "jpeg"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output format of the generated image. One of ``png``, ``webp``, or ``jpeg``. Default:
+ ``png``. Is one of the following types: Literal[\"png\"], Literal[\"webp\"], Literal[\"jpeg\"]"""
+ output_compression: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Compression level for the output image. Default: 100."""
+ moderation: Optional[Literal["auto", "low"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Moderation level for the generated image. Default: ``auto``. Is either a Literal[\"auto\"] type
+ or a Literal[\"low\"] type."""
+ background: Optional[Literal["transparent", "opaque", "auto"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Background type for the generated image. One of ``transparent``, ``opaque``, or ``auto``.
+ Default: ``auto``. Is one of the following types: Literal[\"transparent\"],
+ Literal[\"opaque\"], Literal[\"auto\"]"""
+ input_fidelity: Optional[Union[str, "_models.InputFidelity"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"high\" and \"low\"."""
+ input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Optional mask for inpainting. Contains ``image_url`` (string, optional) and ``file_id``
+ (string, optional)."""
+ partial_images: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Number of partial images to generate in streaming mode, from 0 (default value) to 3."""
+ action: Optional[Union[str, "_models.ImageGenActionEnum"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Whether to generate a new image or edit an existing image. Default: ``auto``. Known values are:
+ \"generate\", \"edit\", and \"auto\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ model: Optional[
+ Union[Literal["gpt-image-1"], Literal["gpt-image-1-mini"], Literal["gpt-image-1.5"], str]
+ ] = None,
+ quality: Optional[Literal["low", "medium", "high", "auto"]] = None,
+ size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = None,
+ output_format: Optional[Literal["png", "webp", "jpeg"]] = None,
+ output_compression: Optional[int] = None,
+ moderation: Optional[Literal["auto", "low"]] = None,
+ background: Optional[Literal["transparent", "opaque", "auto"]] = None,
+ input_fidelity: Optional[Union[str, "_models.InputFidelity"]] = None,
+ input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = None,
+ partial_images: Optional[int] = None,
+ action: Optional[Union[str, "_models.ImageGenActionEnum"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.IMAGE_GENERATION # type: ignore
+
+
+class ImageGenToolInputImageMask(_Model):
+ """ImageGenToolInputImageMask.
+
+ :ivar image_url:
+ :vartype image_url: str
+ :ivar file_id:
+ :vartype file_id: str
+ """
+
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class InlineSkillParam(ContainerSkill, discriminator="inline"):
+ """InlineSkillParam.
+
+ :ivar type: Defines an inline skill for this request. Required. INLINE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INLINE
+ :ivar name: The name of the skill. Required.
+ :vartype name: str
+ :ivar description: The description of the skill. Required.
+ :vartype description: str
+ :ivar source: Inline skill payload. Required.
+ :vartype source: ~azure.ai.responses.server.sdk.models.models.InlineSkillSourceParam
+ """
+
+ type: Literal[ContainerSkillType.INLINE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Defines an inline skill for this request. Required. INLINE."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the skill. Required."""
+ description: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The description of the skill. Required."""
+ source: "_models.InlineSkillSourceParam" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Inline skill payload. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: str,
+ source: "_models.InlineSkillSourceParam",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ContainerSkillType.INLINE # type: ignore
+
+
+class InlineSkillSourceParam(_Model):
+ """Inline skill payload.
+
+ :ivar type: The type of the inline skill source. Must be ``base64``. Required. Default value is
+ "base64".
+ :vartype type: str
+ :ivar media_type: The media type of the inline skill payload. Must be ``application/zip``.
+ Required. Default value is "application/zip".
+ :vartype media_type: str
+ :ivar data: Base64-encoded skill zip bundle. Required.
+ :vartype data: str
+ """
+
+ type: Literal["base64"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the inline skill source. Must be ``base64``. Required. Default value is \"base64\"."""
+ media_type: Literal["application/zip"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The media type of the inline skill payload. Must be ``application/zip``. Required. Default
+ value is \"application/zip\"."""
+ data: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Base64-encoded skill zip bundle. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ data: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["base64"] = "base64"
+ self.media_type: Literal["application/zip"] = "application/zip"
+
+
+class InputFileContent(_Model):
+ """Input file.
+
+ :ivar type: The type of the input item. Always ``input_file``. Required. Default value is
+ "input_file".
+ :vartype type: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar filename: The name of the file to be sent to the model.
+ :vartype filename: str
+ :ivar file_url: The URL of the file to be sent to the model.
+ :vartype file_url: str
+ :ivar file_data: The content of the file to be sent to the model.
+ :vartype file_data: str
+ """
+
+ type: Literal["input_file"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_file``. Required. Default value is \"input_file\"."""
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the file to be sent to the model."""
+ file_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the file to be sent to the model."""
+ file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the file to be sent to the model."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: Optional[str] = None,
+ filename: Optional[str] = None,
+ file_url: Optional[str] = None,
+ file_data: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_file"] = "input_file"
+
+
+class InputFileContentParam(_Model):
+ """Input file.
+
+ :ivar type: The type of the input item. Always ``input_file``. Required. Default value is
+ "input_file".
+ :vartype type: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar filename:
+ :vartype filename: str
+ :ivar file_data:
+ :vartype file_data: str
+ :ivar file_url:
+ :vartype file_url: str
+ """
+
+ type: Literal["input_file"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_file``. Required. Default value is \"input_file\"."""
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: Optional[str] = None,
+ filename: Optional[str] = None,
+ file_data: Optional[str] = None,
+ file_url: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_file"] = "input_file"
+
+
+class InputImageContent(_Model):
+ """Input image.
+
+ :ivar type: The type of the input item. Always ``input_image``. Required. Default value is
+ "input_image".
+ :vartype type: str
+ :ivar image_url:
+ :vartype image_url: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``,
+ or ``auto``. Defaults to ``auto``. Required. Known values are: "low", "high", and "auto".
+ :vartype detail: str or ~azure.ai.responses.server.sdk.models.models.ImageDetail
+ """
+
+ type: Literal["input_image"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_image``. Required. Default value is \"input_image\"."""
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ detail: Union[str, "_models.ImageDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``.
+ Defaults to ``auto``. Required. Known values are: \"low\", \"high\", and \"auto\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ detail: Union[str, "_models.ImageDetail"],
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_image"] = "input_image"
+
+
+class InputImageContentParamAutoParam(_Model):
+ """Input image.
+
+ :ivar type: The type of the input item. Always ``input_image``. Required. Default value is
+ "input_image".
+ :vartype type: str
+ :ivar image_url:
+ :vartype image_url: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar detail: Known values are: "low", "high", and "auto".
+ :vartype detail: str or ~azure.ai.responses.server.sdk.models.models.DetailEnum
+ """
+
+ type: Literal["input_image"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_image``. Required. Default value is \"input_image\"."""
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ detail: Optional[Union[str, "_models.DetailEnum"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"low\", \"high\", and \"auto\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ detail: Optional[Union[str, "_models.DetailEnum"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_image"] = "input_image"
+
+
+class InputTextContent(_Model):
+ """Input text.
+
+ :ivar type: The type of the input item. Always ``input_text``. Required. Default value is
+ "input_text".
+ :vartype type: str
+ :ivar text: The text input to the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal["input_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_text``. Required. Default value is \"input_text\"."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text input to the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_text"] = "input_text"
+
+
+class InputTextContentParam(_Model):
+ """Input text.
+
+ :ivar type: The type of the input item. Always ``input_text``. Required. Default value is
+ "input_text".
+ :vartype type: str
+ :ivar text: The text input to the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal["input_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_text``. Required. Default value is \"input_text\"."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text input to the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_text"] = "input_text"
+
+
+class ItemCodeInterpreterToolCall(Item, discriminator="code_interpreter_call"):
+ """Code interpreter tool call.
+
+ :ivar type: The type of the code interpreter tool call. Always ``code_interpreter_call``.
+ Required. CODE_INTERPRETER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CODE_INTERPRETER_CALL
+ :ivar id: The unique ID of the code interpreter tool call. Required.
+ :vartype id: str
+ :ivar status: The status of the code interpreter tool call. Valid values are ``in_progress``,
+ ``completed``, ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the
+ following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"],
+ Literal["interpreting"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar container_id: The ID of the container used to run the code. Required.
+ :vartype container_id: str
+ :ivar code: Required.
+ :vartype code: str
+ :ivar outputs: Required.
+ :vartype outputs: list[~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputLogs
+ or ~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputImage]
+ """
+
+ type: Literal[ItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the code interpreter tool call. Always ``code_interpreter_call``. Required.
+ CODE_INTERPRETER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the code interpreter tool call. Required."""
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the code interpreter tool call. Valid values are ``in_progress``, ``completed``,
+ ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"],
+ Literal[\"interpreting\"], Literal[\"failed\"]"""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the container used to run the code. Required."""
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"],
+ container_id: str,
+ code: str,
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.CODE_INTERPRETER_CALL # type: ignore
+
+
+class ItemComputerToolCall(Item, discriminator="computer_call"):
+ """Computer tool call.
+
+ :ivar type: The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL
+ :ivar id: The unique ID of the computer call. Required.
+ :vartype id: str
+ :ivar call_id: An identifier used when responding to the tool call with output. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.ComputerAction
+ :ivar pending_safety_checks: The pending safety checks for the computer call. Required.
+ :vartype pending_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the computer call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used when responding to the tool call with output. Required."""
+ action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The pending safety checks for the computer call. Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.ComputerAction",
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"],
+ status: Literal["in_progress", "completed", "incomplete"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.COMPUTER_CALL # type: ignore
+
+
+class ItemCustomToolCall(Item, discriminator="custom_tool_call"):
+ """Custom tool call.
+
+ :ivar type: The type of the custom tool call. Always ``custom_tool_call``. Required.
+ CUSTOM_TOOL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL
+ :ivar id: The unique ID of the custom tool call in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: An identifier used to map this custom tool call to a tool call output. Required.
+ :vartype call_id: str
+ :ivar name: The name of the custom tool being called. Required.
+ :vartype name: str
+ :ivar input: The input for the custom tool call generated by the model. Required.
+ :vartype input: str
+ """
+
+ type: Literal[ItemType.CUSTOM_TOOL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call. Always ``custom_tool_call``. Required. CUSTOM_TOOL_CALL."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used to map this custom tool call to a tool call output. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the custom tool being called. Required."""
+ input: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The input for the custom tool call generated by the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ input: str,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.CUSTOM_TOOL_CALL # type: ignore
+
+
+class ItemCustomToolCallOutput(Item, discriminator="custom_tool_call_output"):
+ """Custom tool call output.
+
+ :ivar type: The type of the custom tool call output. Always ``custom_tool_call_output``.
+ Required. CUSTOM_TOOL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL_OUTPUT
+ :ivar id: The unique ID of the custom tool call output in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: The call ID, used to map this custom tool call output to a custom tool call.
+ Required.
+ :vartype call_id: str
+ :ivar output: The output from the custom tool call generated by your code. Can be a string or
+ an list of output content. Required. Is either a str type or a
+ [FunctionAndCustomToolCallOutput] type.
+ :vartype output: str or
+ list[~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutput]
+ """
+
+ type: Literal[ItemType.CUSTOM_TOOL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call output. Always ``custom_tool_call_output``. Required.
+ CUSTOM_TOOL_CALL_OUTPUT."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call output in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The call ID, used to map this custom tool call output to a custom tool call. Required."""
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the custom tool call generated by your code. Can be a string or an list of
+ output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]],
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.CUSTOM_TOOL_CALL_OUTPUT # type: ignore
+
+
+class ItemField(_Model):
+ """An item representing a message, tool call, tool output, reasoning, or other response element.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ItemFieldApplyPatchToolCall, ItemFieldApplyPatchToolCallOutput,
+ ItemFieldCodeInterpreterToolCall, ItemFieldCompactionBody, ItemFieldComputerToolCall,
+ ItemFieldComputerToolCallOutput, ItemFieldCustomToolCall, ItemFieldCustomToolCallOutput,
+ ItemFieldFileSearchToolCall, ItemFieldFunctionToolCall, ItemFieldFunctionToolCallOutput,
+ ItemFieldImageGenToolCall, ItemFieldLocalShellToolCall, ItemFieldLocalShellToolCallOutput,
+ ItemFieldMcpApprovalRequest, ItemFieldMcpApprovalResponseResource, ItemFieldMcpToolCall,
+ ItemFieldMcpListTools, ItemFieldMessage, ItemFieldReasoningItem, ItemFieldFunctionShellCall,
+ ItemFieldFunctionShellCallOutput, ItemFieldWebSearchToolCall
+
+ :ivar type: Required. Known values are: "message", "function_call", "function_call_output",
+ "file_search_call", "web_search_call", "image_generation_call", "computer_call",
+ "computer_call_output", "reasoning", "compaction", "code_interpreter_call", "local_shell_call",
+ "local_shell_call_output", "shell_call", "shell_call_output", "apply_patch_call",
+ "apply_patch_call_output", "mcp_list_tools", "mcp_approval_request", "mcp_approval_response",
+ "mcp_call", "custom_tool_call", and "custom_tool_call_output".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ItemFieldType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"message\", \"function_call\", \"function_call_output\",
+ \"file_search_call\", \"web_search_call\", \"image_generation_call\", \"computer_call\",
+ \"computer_call_output\", \"reasoning\", \"compaction\", \"code_interpreter_call\",
+ \"local_shell_call\", \"local_shell_call_output\", \"shell_call\", \"shell_call_output\",
+ \"apply_patch_call\", \"apply_patch_call_output\", \"mcp_list_tools\",
+ \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"custom_tool_call\", and
+ \"custom_tool_call_output\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ItemFieldApplyPatchToolCall(ItemField, discriminator="apply_patch_call"):
+ """Apply patch tool call.
+
+ :ivar type: The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL
+ :ivar id: The unique ID of the apply patch tool call. Populated when this item is returned via
+ API. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call. One of ``in_progress`` or ``completed``.
+ Required. Known values are: "in_progress" and "completed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallStatus
+ :ivar operation: Apply patch operation. Required.
+ :vartype operation: ~azure.ai.responses.server.sdk.models.models.ApplyPatchFileOperation
+ :ivar created_by: The ID of the entity that created this tool call.
+ :vartype created_by: str
+ """
+
+ type: Literal[ItemFieldType.APPLY_PATCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call. Populated when this item is returned via API.
+ Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call. One of ``in_progress`` or ``completed``. Required.
+ Known values are: \"in_progress\" and \"completed\"."""
+ operation: "_models.ApplyPatchFileOperation" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Apply patch operation. Required."""
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the entity that created this tool call."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallStatus"],
+ operation: "_models.ApplyPatchFileOperation",
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.APPLY_PATCH_CALL # type: ignore
+
+
+class ItemFieldApplyPatchToolCallOutput(ItemField, discriminator="apply_patch_call_output"):
+ """Apply patch tool call output.
+
+ :ivar type: The type of the item. Always ``apply_patch_call_output``. Required.
+ APPLY_PATCH_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL_OUTPUT
+ :ivar id: The unique ID of the apply patch tool call output. Populated when this item is
+ returned via API. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call output. One of ``completed`` or
+ ``failed``. Required. Known values are: "completed" and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallOutputStatus
+ :ivar output:
+ :vartype output: str
+ :ivar created_by: The ID of the entity that created this tool call output.
+ :vartype created_by: str
+ """
+
+ type: Literal[ItemFieldType.APPLY_PATCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call_output``. Required. APPLY_PATCH_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call output. Populated when this item is returned via
+ API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallOutputStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call output. One of ``completed`` or ``failed``. Required.
+ Known values are: \"completed\" and \"failed\"."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the entity that created this tool call output."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallOutputStatus"],
+ output: Optional[str] = None,
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.APPLY_PATCH_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldCodeInterpreterToolCall(ItemField, discriminator="code_interpreter_call"):
+ """Code interpreter tool call.
+
+ :ivar type: The type of the code interpreter tool call. Always ``code_interpreter_call``.
+ Required. CODE_INTERPRETER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CODE_INTERPRETER_CALL
+ :ivar id: The unique ID of the code interpreter tool call. Required.
+ :vartype id: str
+ :ivar status: The status of the code interpreter tool call. Valid values are ``in_progress``,
+ ``completed``, ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the
+ following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"],
+ Literal["interpreting"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar container_id: The ID of the container used to run the code. Required.
+ :vartype container_id: str
+ :ivar code: Required.
+ :vartype code: str
+ :ivar outputs: Required.
+ :vartype outputs: list[~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputLogs
+ or ~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputImage]
+ """
+
+ type: Literal[ItemFieldType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the code interpreter tool call. Always ``code_interpreter_call``. Required.
+ CODE_INTERPRETER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the code interpreter tool call. Required."""
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the code interpreter tool call. Valid values are ``in_progress``, ``completed``,
+ ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"],
+ Literal[\"interpreting\"], Literal[\"failed\"]"""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the container used to run the code. Required."""
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"],
+ container_id: str,
+ code: str,
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.CODE_INTERPRETER_CALL # type: ignore
+
+
+class ItemFieldCompactionBody(ItemField, discriminator="compaction"):
+ """Compaction item.
+
+ :ivar type: The type of the item. Always ``compaction``. Required. COMPACTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPACTION
+ :ivar id: The unique ID of the compaction item. Required.
+ :vartype id: str
+ :ivar encrypted_content: The encrypted content that was produced by compaction. Required.
+ :vartype encrypted_content: str
+ :ivar created_by: The identifier of the actor that created the item.
+ :vartype created_by: str
+ """
+
+ type: Literal[ItemFieldType.COMPACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``compaction``. Required. COMPACTION."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the compaction item. Required."""
+ encrypted_content: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The encrypted content that was produced by compaction. Required."""
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The identifier of the actor that created the item."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ encrypted_content: str,
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.COMPACTION # type: ignore
+
+
+class ItemFieldComputerToolCall(ItemField, discriminator="computer_call"):
+ """Computer tool call.
+
+ :ivar type: The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL
+ :ivar id: The unique ID of the computer call. Required.
+ :vartype id: str
+ :ivar call_id: An identifier used when responding to the tool call with output. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.ComputerAction
+ :ivar pending_safety_checks: The pending safety checks for the computer call. Required.
+ :vartype pending_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemFieldType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the computer call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used when responding to the tool call with output. Required."""
+ action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The pending safety checks for the computer call. Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.ComputerAction",
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"],
+ status: Literal["in_progress", "completed", "incomplete"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.COMPUTER_CALL # type: ignore
+
+
+class ItemFieldComputerToolCallOutput(ItemField, discriminator="computer_call_output"):
+ """Computer tool call output.
+
+ :ivar type: The type of the computer tool call output. Always ``computer_call_output``.
+ Required. COMPUTER_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL_OUTPUT
+ :ivar id: The ID of the computer tool call output. Required.
+ :vartype id: str
+ :ivar call_id: The ID of the computer tool call that produced the output. Required.
+ :vartype call_id: str
+ :ivar acknowledged_safety_checks: The safety checks reported by the API that have been
+ acknowledged by the developer.
+ :vartype acknowledged_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar output: Required.
+ :vartype output: ~azure.ai.responses.server.sdk.models.models.ComputerScreenshotImage
+ :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Populated when input items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemFieldType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer tool call output. Always ``computer_call_output``. Required.
+ COMPUTER_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read"])
+ """The ID of the computer tool call output. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the computer tool call that produced the output. Required."""
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The safety checks reported by the API that have been acknowledged by the developer."""
+ output: "_models.ComputerScreenshotImage" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the message input. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when input items are returned via API. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: "_models.ComputerScreenshotImage",
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.COMPUTER_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldCustomToolCall(ItemField, discriminator="custom_tool_call"):
+ """Custom tool call.
+
+ :ivar type: The type of the custom tool call. Always ``custom_tool_call``. Required.
+ CUSTOM_TOOL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL
+ :ivar id: The unique ID of the custom tool call in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: An identifier used to map this custom tool call to a tool call output. Required.
+ :vartype call_id: str
+ :ivar name: The name of the custom tool being called. Required.
+ :vartype name: str
+ :ivar input: The input for the custom tool call generated by the model. Required.
+ :vartype input: str
+ """
+
+ type: Literal[ItemFieldType.CUSTOM_TOOL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call. Always ``custom_tool_call``. Required. CUSTOM_TOOL_CALL."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used to map this custom tool call to a tool call output. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the custom tool being called. Required."""
+ input: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The input for the custom tool call generated by the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ input: str,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.CUSTOM_TOOL_CALL # type: ignore
+
+
+class ItemFieldCustomToolCallOutput(ItemField, discriminator="custom_tool_call_output"):
+ """Custom tool call output.
+
+ :ivar type: The type of the custom tool call output. Always ``custom_tool_call_output``.
+ Required. CUSTOM_TOOL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL_OUTPUT
+ :ivar id: The unique ID of the custom tool call output in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: The call ID, used to map this custom tool call output to a custom tool call.
+ Required.
+ :vartype call_id: str
+ :ivar output: The output from the custom tool call generated by your code. Can be a string or
+ an list of output content. Required. Is either a str type or a
+ [FunctionAndCustomToolCallOutput] type.
+ :vartype output: str or
+ list[~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutput]
+ """
+
+ type: Literal[ItemFieldType.CUSTOM_TOOL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call output. Always ``custom_tool_call_output``. Required.
+ CUSTOM_TOOL_CALL_OUTPUT."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call output in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The call ID, used to map this custom tool call output to a custom tool call. Required."""
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the custom tool call generated by your code. Can be a string or an list of
+ output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]],
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.CUSTOM_TOOL_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldFileSearchToolCall(ItemField, discriminator="file_search_call"):
+ """File search tool call.
+
+ :ivar id: The unique ID of the file search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the file search tool call. Always ``file_search_call``. Required.
+ FILE_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_SEARCH_CALL
+ :ivar status: The status of the file search tool call. One of ``in_progress``, ``searching``,
+ ``incomplete`` or ``failed``,. Required. Is one of the following types: Literal["in_progress"],
+ Literal["searching"], Literal["completed"], Literal["incomplete"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar queries: The queries used to search for files. Required.
+ :vartype queries: list[str]
+ :ivar results:
+ :vartype results: list[~azure.ai.responses.server.sdk.models.models.FileSearchToolCallResults]
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the file search tool call. Required."""
+ type: Literal[ItemFieldType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file search tool call. Always ``file_search_call``. Required. FILE_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the file search tool call. One of ``in_progress``, ``searching``, ``incomplete``
+ or ``failed``,. Required. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"searching\"], Literal[\"completed\"], Literal[\"incomplete\"], Literal[\"failed\"]"""
+ queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The queries used to search for files. Required."""
+ results: Optional[list["_models.FileSearchToolCallResults"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"],
+ queries: list[str],
+ results: Optional[list["_models.FileSearchToolCallResults"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.FILE_SEARCH_CALL # type: ignore
+
+
+class ItemFieldFunctionShellCall(ItemField, discriminator="shell_call"):
+ """Shell tool call.
+
+ :ivar type: The type of the item. Always ``shell_call``. Required. SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL
+ :ivar id: The unique ID of the shell tool call. Populated when this item is returned via API.
+ Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar action: The shell commands and limits that describe how to run the tool call. Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.FunctionShellAction
+ :ivar status: The status of the shell call. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Required. Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.LocalShellCallStatus
+ :ivar environment: Required.
+ :vartype environment: ~azure.ai.responses.server.sdk.models.models.FunctionShellCallEnvironment
+ :ivar created_by: The ID of the entity that created this tool call.
+ :vartype created_by: str
+ """
+
+ type: Literal[ItemFieldType.SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``shell_call``. Required. SHELL_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call. Populated when this item is returned via API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ action: "_models.FunctionShellAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The shell commands and limits that describe how to run the tool call. Required."""
+ status: Union[str, "_models.LocalShellCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the shell call. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Required. Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ environment: "_models.FunctionShellCallEnvironment" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the entity that created this tool call."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.FunctionShellAction",
+ status: Union[str, "_models.LocalShellCallStatus"],
+ environment: "_models.FunctionShellCallEnvironment",
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.SHELL_CALL # type: ignore
+
+
+class ItemFieldFunctionShellCallOutput(ItemField, discriminator="shell_call_output"):
+ """Shell call output.
+
+ :ivar type: The type of the shell call output. Always ``shell_call_output``. Required.
+ SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL_OUTPUT
+ :ivar id: The unique ID of the shell call output. Populated when this item is returned via API.
+ Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the shell call output. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Required. Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or
+ ~azure.ai.responses.server.sdk.models.models.LocalShellCallOutputStatusEnum
+ :ivar output: An array of shell call output contents. Required.
+ :vartype output:
+ list[~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputContent]
+ :ivar max_output_length: Required.
+ :vartype max_output_length: int
+ :ivar created_by: The identifier of the actor that created the item.
+ :vartype created_by: str
+ """
+
+ type: Literal[ItemFieldType.SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the shell call output. Always ``shell_call_output``. Required. SHELL_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell call output. Populated when this item is returned via API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ status: Union[str, "_models.LocalShellCallOutputStatusEnum"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the shell call output. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Required. Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ output: list["_models.FunctionShellCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """An array of shell call output contents. Required."""
+ max_output_length: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The identifier of the actor that created the item."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.LocalShellCallOutputStatusEnum"],
+ output: list["_models.FunctionShellCallOutputContent"],
+ max_output_length: int,
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.SHELL_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldFunctionToolCall(ItemField, discriminator="function_call"):
+ """Function tool call.
+
+ :ivar id: The unique ID of the function tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the function tool call. Always ``function_call``. Required.
+ FUNCTION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the function to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the function. Required.
+ :vartype arguments: str
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read"])
+ """The unique ID of the function tool call. Required."""
+ type: Literal[ItemFieldType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call. Always ``function_call``. Required. FUNCTION_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the function. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.FUNCTION_CALL # type: ignore
+
+
+class ItemFieldFunctionToolCallOutput(ItemField, discriminator="function_call_output"):
+ """Function tool call output.
+
+ :ivar id: The unique ID of the function tool call output. Populated when this item is returned
+ via API. Required.
+ :vartype id: str
+ :ivar type: The type of the function tool call output. Always ``function_call_output``.
+ Required. FUNCTION_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL_OUTPUT
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the function call generated by your code. Can be a string or an
+ list of output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput]
+ type.
+ :vartype output: str or
+ list[~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutput]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read"])
+ """The unique ID of the function tool call output. Populated when this item is returned via API.
+ Required."""
+ type: Literal[ItemFieldType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call output. Always ``function_call_output``. Required.
+ FUNCTION_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the function call generated by your code. Can be a string or an list of output
+ content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]],
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.FUNCTION_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldImageGenToolCall(ItemField, discriminator="image_generation_call"):
+ """Image generation call.
+
+ :ivar type: The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.IMAGE_GENERATION_CALL
+ :ivar id: The unique ID of the image generation call. Required.
+ :vartype id: str
+ :ivar status: The status of the image generation call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["generating"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar result: Required.
+ :vartype result: str
+ """
+
+ type: Literal[ItemFieldType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the image generation call. Required."""
+ status: Literal["in_progress", "completed", "generating", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the image generation call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"generating\"], Literal[\"failed\"]"""
+ result: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "generating", "failed"],
+ result: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.IMAGE_GENERATION_CALL # type: ignore
+
+
+class ItemFieldLocalShellToolCall(ItemField, discriminator="local_shell_call"):
+ """Local shell call.
+
+ :ivar type: The type of the local shell call. Always ``local_shell_call``. Required.
+ LOCAL_SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL
+ :ivar id: The unique ID of the local shell call. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.LocalShellExecAction
+ :ivar status: The status of the local shell call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemFieldType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell call. Always ``local_shell_call``. Required. LOCAL_SHELL_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the local shell call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.LocalShellExecAction",
+ status: Literal["in_progress", "completed", "incomplete"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.LOCAL_SHELL_CALL # type: ignore
+
+
+class ItemFieldLocalShellToolCallOutput(ItemField, discriminator="local_shell_call_output"):
+ """Local shell call output.
+
+ :ivar type: The type of the local shell tool call output. Always ``local_shell_call_output``.
+ Required. LOCAL_SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL_OUTPUT
+ :ivar id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype id: str
+ :ivar output: A JSON string of the output of the local shell tool call. Required.
+ :vartype output: str
+ :ivar status: Is one of the following types: Literal["in_progress"], Literal["completed"],
+ Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemFieldType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell tool call output. Always ``local_shell_call_output``. Required.
+ LOCAL_SHELL_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ output: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the output of the local shell tool call. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"],
+ Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ output: str,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.LOCAL_SHELL_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldMcpApprovalRequest(ItemField, discriminator="mcp_approval_request"):
+ """MCP approval request.
+
+ :ivar type: The type of the item. Always ``mcp_approval_request``. Required.
+ MCP_APPROVAL_REQUEST.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_REQUEST
+ :ivar id: The unique ID of the approval request. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server making the request. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of arguments for the tool. Required.
+ :vartype arguments: str
+ """
+
+ type: Literal[ItemFieldType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_request``. Required. MCP_APPROVAL_REQUEST."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the approval request. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server making the request. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of arguments for the tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.MCP_APPROVAL_REQUEST # type: ignore
+
+
+class ItemFieldMcpApprovalResponseResource(ItemField, discriminator="mcp_approval_response"):
+ """MCP approval response.
+
+ :ivar type: The type of the item. Always ``mcp_approval_response``. Required.
+ MCP_APPROVAL_RESPONSE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_RESPONSE
+ :ivar id: The unique ID of the approval response. Required.
+ :vartype id: str
+ :ivar approval_request_id: The ID of the approval request being answered. Required.
+ :vartype approval_request_id: str
+ :ivar approve: Whether the request was approved. Required.
+ :vartype approve: bool
+ :ivar reason:
+ :vartype reason: str
+ """
+
+ type: Literal[ItemFieldType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_response``. Required. MCP_APPROVAL_RESPONSE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the approval response. Required."""
+ approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the approval request being answered. Required."""
+ approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Whether the request was approved. Required."""
+ reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ approval_request_id: str,
+ approve: bool,
+ reason: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.MCP_APPROVAL_RESPONSE # type: ignore
+
+
+class ItemFieldMcpListTools(ItemField, discriminator="mcp_list_tools"):
+ """MCP list tools.
+
+ :ivar type: The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_LIST_TOOLS
+ :ivar id: The unique ID of the list. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server. Required.
+ :vartype server_label: str
+ :ivar tools: The tools available on the server. Required.
+ :vartype tools: list[~azure.ai.responses.server.sdk.models.models.MCPListToolsTool]
+ :ivar error:
+ :vartype error: ~azure.ai.responses.server.sdk.models.models.RealtimeMCPError
+ """
+
+ type: Literal[ItemFieldType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the list. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server. Required."""
+ tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The tools available on the server. Required."""
+ error: Optional["_models.RealtimeMCPError"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ tools: list["_models.MCPListToolsTool"],
+ error: Optional["_models.RealtimeMCPError"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.MCP_LIST_TOOLS # type: ignore
+
+
+class ItemFieldMcpToolCall(ItemField, discriminator="mcp_call"):
+ """MCP tool call.
+
+ :ivar type: The type of the item. Always ``mcp_call``. Required. MCP_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_CALL
+ :ivar id: The unique ID of the tool call. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server running the tool. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool that was run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments passed to the tool. Required.
+ :vartype arguments: str
+ :ivar output:
+ :vartype output: str
+ :ivar error:
+ :vartype error: dict[str, any]
+ :ivar status: The status of the tool call. One of ``in_progress``, ``completed``,
+ ``incomplete``, ``calling``, or ``failed``. Known values are: "in_progress", "completed",
+ "incomplete", "calling", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MCPToolCallStatus
+ :ivar approval_request_id:
+ :vartype approval_request_id: str
+ """
+
+ type: Literal[ItemFieldType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_call``. Required. MCP_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server running the tool. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool that was run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments passed to the tool. Required."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ error: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. One of ``in_progress``, ``completed``, ``incomplete``,
+ ``calling``, or ``failed``. Known values are: \"in_progress\", \"completed\", \"incomplete\",
+ \"calling\", and \"failed\"."""
+ approval_request_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ output: Optional[str] = None,
+ error: Optional[dict[str, Any]] = None,
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = None,
+ approval_request_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.MCP_CALL # type: ignore
+
+
+class ItemFieldMessage(ItemField, discriminator="message"):
+ """Message.
+
+ :ivar type: The type of the message. Always set to ``message``. Required. MESSAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MESSAGE
+ :ivar id: The unique ID of the message. Required.
+ :vartype id: str
+ :ivar status: The status of item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Known values are: "in_progress",
+ "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MessageStatus
+ :ivar role: The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``,
+ ``critic``, ``discriminator``, ``developer``, or ``tool``. Required. Known values are:
+ "unknown", "user", "assistant", "system", "critic", "discriminator", "developer", and "tool".
+ :vartype role: str or ~azure.ai.responses.server.sdk.models.models.MessageRole
+ :ivar content: The content of the message. Required.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.MessageContent]
+ """
+
+ type: Literal[ItemFieldType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the message. Always set to ``message``. Required. MESSAGE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the message. Required."""
+ status: Union[str, "_models.MessageStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The status of item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated when
+ items are returned via API. Required. Known values are: \"in_progress\", \"completed\", and
+ \"incomplete\"."""
+ role: Union[str, "_models.MessageRole"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``, ``critic``,
+ ``discriminator``, ``developer``, or ``tool``. Required. Known values are: \"unknown\",
+ \"user\", \"assistant\", \"system\", \"critic\", \"discriminator\", \"developer\", and
+ \"tool\"."""
+ content: list["_models.MessageContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the message. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Union[str, "_models.MessageStatus"],
+ role: Union[str, "_models.MessageRole"],
+ content: list["_models.MessageContent"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.MESSAGE # type: ignore
+
+
+class ItemFieldReasoningItem(ItemField, discriminator="reasoning"):
+ """Reasoning.
+
+ :ivar type: The type of the object. Always ``reasoning``. Required. REASONING.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REASONING
+ :ivar id: The unique identifier of the reasoning content. Required.
+ :vartype id: str
+ :ivar encrypted_content:
+ :vartype encrypted_content: str
+ :ivar summary: Reasoning summary content. Required.
+ :vartype summary: list[~azure.ai.responses.server.sdk.models.models.SummaryTextContent]
+ :ivar content: Reasoning text content.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.ReasoningTextContent]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemFieldType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the object. Always ``reasoning``. Required. REASONING."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the reasoning content. Required."""
+ encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ summary: list["_models.SummaryTextContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Reasoning summary content. Required."""
+ content: Optional[list["_models.ReasoningTextContent"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Reasoning text content."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ summary: list["_models.SummaryTextContent"],
+ encrypted_content: Optional[str] = None,
+ content: Optional[list["_models.ReasoningTextContent"]] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.REASONING # type: ignore
+
+
+class ItemFieldWebSearchToolCall(ItemField, discriminator="web_search_call"):
+ """Web search tool call.
+
+ :ivar id: The unique ID of the web search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the web search tool call. Always ``web_search_call``. Required.
+ WEB_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_CALL
+ :ivar status: The status of the web search tool call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar action: An object describing the specific action taken in this web search call. Includes
+ details on how the model used the web (search, open_page, find_in_page). Required. Is one of
+ the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.WebSearchActionSearch or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionOpenPage or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionFind
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the web search tool call. Required."""
+ type: Literal[ItemFieldType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the web search tool call. Always ``web_search_call``. Required. WEB_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the web search tool call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]"""
+ action: Union["_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"] = (
+ rest_field(visibility=["read", "create", "update", "delete", "query"])
+ )
+ """An object describing the specific action taken in this web search call. Includes details on how
+ the model used the web (search, open_page, find_in_page). Required. Is one of the following
+ types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "failed"],
+ action: Union[
+ "_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"
+ ],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.WEB_SEARCH_CALL # type: ignore
+
+
+class ItemFileSearchToolCall(Item, discriminator="file_search_call"):
+ """File search tool call.
+
+ :ivar id: The unique ID of the file search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the file search tool call. Always ``file_search_call``. Required.
+ FILE_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_SEARCH_CALL
+ :ivar status: The status of the file search tool call. One of ``in_progress``, ``searching``,
+ ``incomplete`` or ``failed``,. Required. Is one of the following types: Literal["in_progress"],
+ Literal["searching"], Literal["completed"], Literal["incomplete"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar queries: The queries used to search for files. Required.
+ :vartype queries: list[str]
+ :ivar results:
+ :vartype results: list[~azure.ai.responses.server.sdk.models.models.FileSearchToolCallResults]
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the file search tool call. Required."""
+ type: Literal[ItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file search tool call. Always ``file_search_call``. Required. FILE_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the file search tool call. One of ``in_progress``, ``searching``, ``incomplete``
+ or ``failed``,. Required. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"searching\"], Literal[\"completed\"], Literal[\"incomplete\"], Literal[\"failed\"]"""
+ queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The queries used to search for files. Required."""
+ results: Optional[list["_models.FileSearchToolCallResults"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"],
+ queries: list[str],
+ results: Optional[list["_models.FileSearchToolCallResults"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.FILE_SEARCH_CALL # type: ignore
+
+
+class ItemFunctionToolCall(Item, discriminator="function_call"):
+ """Function tool call.
+
+ :ivar id: The unique ID of the function tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the function tool call. Always ``function_call``. Required.
+ FUNCTION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the function to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the function. Required.
+ :vartype arguments: str
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read"])
+ """The unique ID of the function tool call. Required."""
+ type: Literal[ItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call. Always ``function_call``. Required. FUNCTION_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the function. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.FUNCTION_CALL # type: ignore
+
+
+class ItemImageGenToolCall(Item, discriminator="image_generation_call"):
+ """Image generation call.
+
+ :ivar type: The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.IMAGE_GENERATION_CALL
+ :ivar id: The unique ID of the image generation call. Required.
+ :vartype id: str
+ :ivar status: The status of the image generation call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["generating"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar result: Required.
+ :vartype result: str
+ """
+
+ type: Literal[ItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the image generation call. Required."""
+ status: Literal["in_progress", "completed", "generating", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the image generation call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"generating\"], Literal[\"failed\"]"""
+ result: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "generating", "failed"],
+ result: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.IMAGE_GENERATION_CALL # type: ignore
+
+
+class ItemLocalShellToolCall(Item, discriminator="local_shell_call"):
+ """Local shell call.
+
+ :ivar type: The type of the local shell call. Always ``local_shell_call``. Required.
+ LOCAL_SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL
+ :ivar id: The unique ID of the local shell call. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.LocalShellExecAction
+ :ivar status: The status of the local shell call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell call. Always ``local_shell_call``. Required. LOCAL_SHELL_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the local shell call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.LocalShellExecAction",
+ status: Literal["in_progress", "completed", "incomplete"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.LOCAL_SHELL_CALL # type: ignore
+
+
+class ItemLocalShellToolCallOutput(Item, discriminator="local_shell_call_output"):
+ """Local shell call output.
+
+ :ivar type: The type of the local shell tool call output. Always ``local_shell_call_output``.
+ Required. LOCAL_SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL_OUTPUT
+ :ivar id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype id: str
+ :ivar output: A JSON string of the output of the local shell tool call. Required.
+ :vartype output: str
+ :ivar status: Is one of the following types: Literal["in_progress"], Literal["completed"],
+ Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell tool call output. Always ``local_shell_call_output``. Required.
+ LOCAL_SHELL_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ output: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the output of the local shell tool call. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"],
+ Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ output: str,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore
+
+
+class ItemMcpApprovalRequest(Item, discriminator="mcp_approval_request"):
+ """MCP approval request.
+
+ :ivar type: The type of the item. Always ``mcp_approval_request``. Required.
+ MCP_APPROVAL_REQUEST.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_REQUEST
+ :ivar id: The unique ID of the approval request. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server making the request. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of arguments for the tool. Required.
+ :vartype arguments: str
+ """
+
+ type: Literal[ItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_request``. Required. MCP_APPROVAL_REQUEST."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the approval request. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server making the request. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of arguments for the tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MCP_APPROVAL_REQUEST # type: ignore
+
+
+class ItemMcpListTools(Item, discriminator="mcp_list_tools"):
+ """MCP list tools.
+
+ :ivar type: The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_LIST_TOOLS
+ :ivar id: The unique ID of the list. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server. Required.
+ :vartype server_label: str
+ :ivar tools: The tools available on the server. Required.
+ :vartype tools: list[~azure.ai.responses.server.sdk.models.models.MCPListToolsTool]
+ :ivar error:
+ :vartype error: ~azure.ai.responses.server.sdk.models.models.RealtimeMCPError
+ """
+
+ type: Literal[ItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the list. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server. Required."""
+ tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The tools available on the server. Required."""
+ error: Optional["_models.RealtimeMCPError"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ tools: list["_models.MCPListToolsTool"],
+ error: Optional["_models.RealtimeMCPError"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MCP_LIST_TOOLS # type: ignore
+
+
+class ItemMcpToolCall(Item, discriminator="mcp_call"):
+ """MCP tool call.
+
+ :ivar type: The type of the item. Always ``mcp_call``. Required. MCP_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_CALL
+ :ivar id: The unique ID of the tool call. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server running the tool. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool that was run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments passed to the tool. Required.
+ :vartype arguments: str
+ :ivar output:
+ :vartype output: str
+ :ivar error:
+ :vartype error: dict[str, any]
+ :ivar status: The status of the tool call. One of ``in_progress``, ``completed``,
+ ``incomplete``, ``calling``, or ``failed``. Known values are: "in_progress", "completed",
+ "incomplete", "calling", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MCPToolCallStatus
+ :ivar approval_request_id:
+ :vartype approval_request_id: str
+ """
+
+ type: Literal[ItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_call``. Required. MCP_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server running the tool. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool that was run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments passed to the tool. Required."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ error: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. One of ``in_progress``, ``completed``, ``incomplete``,
+ ``calling``, or ``failed``. Known values are: \"in_progress\", \"completed\", \"incomplete\",
+ \"calling\", and \"failed\"."""
+ approval_request_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ output: Optional[str] = None,
+ error: Optional[dict[str, Any]] = None,
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = None,
+ approval_request_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MCP_CALL # type: ignore
+
+
+class ItemMessage(Item, discriminator="message"):
+ """Message.
+
+ :ivar type: The type of the message. Always set to ``message``. Required. MESSAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MESSAGE
+ :ivar id: The unique ID of the message. Required.
+ :vartype id: str
+ :ivar status: The status of item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Known values are: "in_progress",
+ "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MessageStatus
+ :ivar role: The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``,
+ ``critic``, ``discriminator``, ``developer``, or ``tool``. Required. Known values are:
+ "unknown", "user", "assistant", "system", "critic", "discriminator", "developer", and "tool".
+ :vartype role: str or ~azure.ai.responses.server.sdk.models.models.MessageRole
+ :ivar content: The content of the message. Required.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.MessageContent]
+ """
+
+ type: Literal[ItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the message. Always set to ``message``. Required. MESSAGE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the message. Required."""
+ status: Union[str, "_models.MessageStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The status of item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated when
+ items are returned via API. Required. Known values are: \"in_progress\", \"completed\", and
+ \"incomplete\"."""
+ role: Union[str, "_models.MessageRole"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``, ``critic``,
+ ``discriminator``, ``developer``, or ``tool``. Required. Known values are: \"unknown\",
+ \"user\", \"assistant\", \"system\", \"critic\", \"discriminator\", \"developer\", and
+ \"tool\"."""
+ content: list["_models.MessageContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the message. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Union[str, "_models.MessageStatus"],
+ role: Union[str, "_models.MessageRole"],
+ content: list["_models.MessageContent"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MESSAGE # type: ignore
+
+
+class ItemOutputMessage(Item, discriminator="output_message"):
+ """Output message.
+
+ :ivar id: The unique ID of the output message. Required.
+ :vartype id: str
+ :ivar type: The type of the output message. Always ``message``. Required. OUTPUT_MESSAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OUTPUT_MESSAGE
+ :ivar role: The role of the output message. Always ``assistant``. Required. Default value is
+ "assistant".
+ :vartype role: str
+ :ivar content: The content of the output message. Required.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.OutputMessageContent]
+ :ivar phase: Known values are: "commentary" and "final_answer".
+ :vartype phase: str or ~azure.ai.responses.server.sdk.models.models.MessagePhase
+ :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Populated when input items are returned via API. Required. Is one of the
+ following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the output message. Required."""
+ type: Literal[ItemType.OUTPUT_MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the output message. Always ``message``. Required. OUTPUT_MESSAGE."""
+ role: Literal["assistant"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The role of the output message. Always ``assistant``. Required. Default value is \"assistant\"."""
+ content: list["_models.OutputMessageContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The content of the output message. Required."""
+ phase: Optional[Union[str, "_models.MessagePhase"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"commentary\" and \"final_answer\"."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the message input. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when input items are returned via API. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ content: list["_models.OutputMessageContent"],
+ status: Literal["in_progress", "completed", "incomplete"],
+ phase: Optional[Union[str, "_models.MessagePhase"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.OUTPUT_MESSAGE # type: ignore
+ self.role: Literal["assistant"] = "assistant"
+
+
+class ItemReasoningItem(Item, discriminator="reasoning"):
+ """Reasoning.
+
+ :ivar type: The type of the object. Always ``reasoning``. Required. REASONING.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REASONING
+ :ivar id: The unique identifier of the reasoning content. Required.
+ :vartype id: str
+ :ivar encrypted_content:
+ :vartype encrypted_content: str
+ :ivar summary: Reasoning summary content. Required.
+ :vartype summary: list[~azure.ai.responses.server.sdk.models.models.SummaryTextContent]
+ :ivar content: Reasoning text content.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.ReasoningTextContent]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the object. Always ``reasoning``. Required. REASONING."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the reasoning content. Required."""
+ encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ summary: list["_models.SummaryTextContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Reasoning summary content. Required."""
+ content: Optional[list["_models.ReasoningTextContent"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Reasoning text content."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ summary: list["_models.SummaryTextContent"],
+ encrypted_content: Optional[str] = None,
+ content: Optional[list["_models.ReasoningTextContent"]] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.REASONING # type: ignore
+
+
+class ItemReferenceParam(Item, discriminator="item_reference"):
+ """Item reference.
+
+ :ivar type: The type of item to reference. Always ``item_reference``. Required. ITEM_REFERENCE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ITEM_REFERENCE
+ :ivar id: The ID of the item to reference. Required.
+ :vartype id: str
+ """
+
+ type: Literal[ItemType.ITEM_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of item to reference. Always ``item_reference``. Required. ITEM_REFERENCE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item to reference. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.ITEM_REFERENCE # type: ignore
+
+
+class ItemWebSearchToolCall(Item, discriminator="web_search_call"):
+ """Web search tool call.
+
+ :ivar id: The unique ID of the web search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the web search tool call. Always ``web_search_call``. Required.
+ WEB_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_CALL
+ :ivar status: The status of the web search tool call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar action: An object describing the specific action taken in this web search call. Includes
+ details on how the model used the web (search, open_page, find_in_page). Required. Is one of
+ the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.WebSearchActionSearch or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionOpenPage or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionFind
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the web search tool call. Required."""
+ type: Literal[ItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the web search tool call. Always ``web_search_call``. Required. WEB_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the web search tool call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]"""
+ action: Union["_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"] = (
+ rest_field(visibility=["read", "create", "update", "delete", "query"])
+ )
+ """An object describing the specific action taken in this web search call. Includes details on how
+ the model used the web (search, open_page, find_in_page). Required. Is one of the following
+ types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "failed"],
+ action: Union[
+ "_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"
+ ],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.WEB_SEARCH_CALL # type: ignore
+
+
+class KeyPressAction(ComputerAction, discriminator="keypress"):
+ """KeyPress.
+
+ :ivar type: Specifies the event type. For a keypress action, this property is always set to
+ ``keypress``. Required. KEYPRESS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.KEYPRESS
+ :ivar keys_property: The combination of keys the model is requesting to be pressed. This is an
+ array of strings, each representing a key. Required.
+ :vartype keys_property: list[str]
+ """
+
+ type: Literal[ComputerActionType.KEYPRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a keypress action, this property is always set to ``keypress``.
+ Required. KEYPRESS."""
+ keys_property: list[str] = rest_field(
+ name="keys", visibility=["read", "create", "update", "delete", "query"], original_tsp_name="keys"
+ )
+ """The combination of keys the model is requesting to be pressed. This is an array of strings,
+ each representing a key. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ keys_property: list[str],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.KEYPRESS # type: ignore
+
+
+class LocalEnvironmentResource(FunctionShellCallEnvironment, discriminator="local"):
+ """Local Environment.
+
+ :ivar type: The environment type. Always ``local``. Required. LOCAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL
+ """
+
+ type: Literal[FunctionShellCallEnvironmentType.LOCAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The environment type. Always ``local``. Required. LOCAL."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallEnvironmentType.LOCAL # type: ignore
+
+
+class LocalShellExecAction(_Model):
+ """Local shell exec action.
+
+ :ivar type: The type of the local shell action. Always ``exec``. Required. Default value is
+ "exec".
+ :vartype type: str
+ :ivar command: The command to run. Required.
+ :vartype command: list[str]
+ :ivar timeout_ms:
+ :vartype timeout_ms: int
+ :ivar working_directory:
+ :vartype working_directory: str
+ :ivar env: Environment variables to set for the command. Required.
+ :vartype env: dict[str, str]
+ :ivar user:
+ :vartype user: str
+ """
+
+ type: Literal["exec"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the local shell action. Always ``exec``. Required. Default value is \"exec\"."""
+ command: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The command to run. Required."""
+ timeout_ms: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ working_directory: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ env: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Environment variables to set for the command. Required."""
+ user: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ command: list[str],
+ env: dict[str, str],
+ timeout_ms: Optional[int] = None,
+ working_directory: Optional[str] = None,
+ user: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["exec"] = "exec"
+
+
+class LocalShellToolParam(Tool, discriminator="local_shell"):
+ """Local shell tool.
+
+ :ivar type: The type of the local shell tool. Always ``local_shell``. Required. LOCAL_SHELL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL
+ """
+
+ type: Literal[ToolType.LOCAL_SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell tool. Always ``local_shell``. Required. LOCAL_SHELL."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.LOCAL_SHELL # type: ignore
+
+
+class LocalSkillParam(_Model):
+ """LocalSkillParam.
+
+ :ivar name: The name of the skill. Required.
+ :vartype name: str
+ :ivar description: The description of the skill. Required.
+ :vartype description: str
+ :ivar path: The path to the directory containing the skill. Required.
+ :vartype path: str
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the skill. Required."""
+ description: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The description of the skill. Required."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The path to the directory containing the skill. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: str,
+ path: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class LogProb(_Model):
+ """Log probability.
+
+ :ivar token: Required.
+ :vartype token: str
+ :ivar logprob: Required.
+ :vartype logprob: int
+ :ivar bytes: Required.
+ :vartype bytes: list[int]
+ :ivar top_logprobs: Required.
+ :vartype top_logprobs: list[~azure.ai.responses.server.sdk.models.models.TopLogProb]
+ """
+
+ token: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ logprob: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ top_logprobs: list["_models.TopLogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ token: str,
+ logprob: int,
+ bytes: list[int],
+ top_logprobs: list["_models.TopLogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MCPApprovalResponse(Item, discriminator="mcp_approval_response"):
+ """MCP approval response.
+
+ :ivar type: The type of the item. Always ``mcp_approval_response``. Required.
+ MCP_APPROVAL_RESPONSE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_RESPONSE
+ :ivar id:
+ :vartype id: str
+ :ivar approval_request_id: The ID of the approval request being answered. Required.
+ :vartype approval_request_id: str
+ :ivar approve: Whether the request was approved. Required.
+ :vartype approve: bool
+ :ivar reason:
+ :vartype reason: str
+ """
+
+ type: Literal[ItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_response``. Required. MCP_APPROVAL_RESPONSE."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the approval request being answered. Required."""
+ approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Whether the request was approved. Required."""
+ reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ approval_request_id: str,
+ approve: bool,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ reason: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MCP_APPROVAL_RESPONSE # type: ignore
+
+
+class MCPListToolsTool(_Model):
+ """MCP list tools tool.
+
+ :ivar name: The name of the tool. Required.
+ :vartype name: str
+ :ivar description:
+ :vartype description: str
+ :ivar input_schema: The JSON schema describing the tool's input. Required.
+ :vartype input_schema: ~azure.ai.responses.server.sdk.models.models.MCPListToolsToolInputSchema
+ :ivar annotations:
+ :vartype annotations: ~azure.ai.responses.server.sdk.models.models.MCPListToolsToolAnnotations
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ input_schema: "_models.MCPListToolsToolInputSchema" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The JSON schema describing the tool's input. Required."""
+ annotations: Optional["_models.MCPListToolsToolAnnotations"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ input_schema: "_models.MCPListToolsToolInputSchema",
+ description: Optional[str] = None,
+ annotations: Optional["_models.MCPListToolsToolAnnotations"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MCPListToolsToolAnnotations(_Model):
+ """MCPListToolsToolAnnotations."""
+
+
+class MCPListToolsToolInputSchema(_Model):
+ """MCPListToolsToolInputSchema."""
+
+
+class MCPTool(Tool, discriminator="mcp"):
+ """MCP tool.
+
+ :ivar type: The type of the MCP tool. Always ``mcp``. Required. MCP.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP
+ :ivar server_label: A label for this MCP server, used to identify it in tool calls. Required.
+ :vartype server_label: str
+ :ivar server_url: The URL for the MCP server. One of ``server_url`` or ``connector_id`` must be
+ provided.
+ :vartype server_url: str
+ :ivar connector_id: Identifier for service connectors, like those available in ChatGPT. One of
+ ``server_url`` or ``connector_id`` must be provided. Learn more about service connectors `here
+ `_. Currently supported ``connector_id`` values are:
+
+ * Dropbox: `connector_dropbox`
+ * Gmail: `connector_gmail`
+ * Google Calendar: `connector_googlecalendar`
+ * Google Drive: `connector_googledrive`
+ * Microsoft Teams: `connector_microsoftteams`
+ * Outlook Calendar: `connector_outlookcalendar`
+ * Outlook Email: `connector_outlookemail`
+ * SharePoint: `connector_sharepoint`. Is one of the following types:
+ Literal["connector_dropbox"], Literal["connector_gmail"], Literal["connector_googlecalendar"],
+ Literal["connector_googledrive"], Literal["connector_microsoftteams"],
+ Literal["connector_outlookcalendar"], Literal["connector_outlookemail"],
+ Literal["connector_sharepoint"]
+ :vartype connector_id: str or str or str or str or str or str or str or str
+ :ivar authorization: An OAuth access token that can be used with a remote MCP server, either
+ with a custom MCP server URL or a service connector. Your application must handle the OAuth
+ authorization flow and provide the token here.
+ :vartype authorization: str
+ :ivar server_description: Optional description of the MCP server, used to provide more context.
+ :vartype server_description: str
+ :ivar headers:
+ :vartype headers: dict[str, str]
+ :ivar allowed_tools: Is either a [str] type or a MCPToolFilter type.
+ :vartype allowed_tools: list[str] or ~azure.ai.responses.server.sdk.models.models.MCPToolFilter
+ :ivar require_approval: Is one of the following types: MCPToolRequireApproval,
+ Literal["always"], Literal["never"]
+ :vartype require_approval: ~azure.ai.responses.server.sdk.models.models.MCPToolRequireApproval
+ or str or str
+ :ivar project_connection_id: The connection ID in the project for the MCP server. The
+ connection stores authentication and other connection details needed to connect to the MCP
+ server.
+ :vartype project_connection_id: str
+ """
+
+ type: Literal[ToolType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the MCP tool. Always ``mcp``. Required. MCP."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A label for this MCP server, used to identify it in tool calls. Required."""
+ server_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL for the MCP server. One of ``server_url`` or ``connector_id`` must be provided."""
+ connector_id: Optional[
+ Literal[
+ "connector_dropbox",
+ "connector_gmail",
+ "connector_googlecalendar",
+ "connector_googledrive",
+ "connector_microsoftteams",
+ "connector_outlookcalendar",
+ "connector_outlookemail",
+ "connector_sharepoint",
+ ]
+ ] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Identifier for service connectors, like those available in ChatGPT. One of ``server_url`` or
+ ``connector_id`` must be provided. Learn more about service connectors `here
+ `_. Currently supported ``connector_id`` values are:
+
+ * Dropbox: `connector_dropbox`
+ * Gmail: `connector_gmail`
+ * Google Calendar: `connector_googlecalendar`
+ * Google Drive: `connector_googledrive`
+ * Microsoft Teams: `connector_microsoftteams`
+ * Outlook Calendar: `connector_outlookcalendar`
+ * Outlook Email: `connector_outlookemail`
+ * SharePoint: `connector_sharepoint`. Is one of the following types:
+ Literal[\"connector_dropbox\"], Literal[\"connector_gmail\"],
+ Literal[\"connector_googlecalendar\"], Literal[\"connector_googledrive\"],
+ Literal[\"connector_microsoftteams\"], Literal[\"connector_outlookcalendar\"],
+ Literal[\"connector_outlookemail\"], Literal[\"connector_sharepoint\"]"""
+ authorization: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An OAuth access token that can be used with a remote MCP server, either with a custom MCP
+ server URL or a service connector. Your application must handle the OAuth authorization flow
+ and provide the token here."""
+ server_description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Optional description of the MCP server, used to provide more context."""
+ headers: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ allowed_tools: Optional[Union[list[str], "_models.MCPToolFilter"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a [str] type or a MCPToolFilter type."""
+ require_approval: Optional[Union["_models.MCPToolRequireApproval", Literal["always"], Literal["never"]]] = (
+ rest_field(visibility=["read", "create", "update", "delete", "query"])
+ )
+ """Is one of the following types: MCPToolRequireApproval, Literal[\"always\"], Literal[\"never\"]"""
+ project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The connection ID in the project for the MCP server. The connection stores authentication and
+ other connection details needed to connect to the MCP server."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ server_label: str,
+ server_url: Optional[str] = None,
+ connector_id: Optional[
+ Literal[
+ "connector_dropbox",
+ "connector_gmail",
+ "connector_googlecalendar",
+ "connector_googledrive",
+ "connector_microsoftteams",
+ "connector_outlookcalendar",
+ "connector_outlookemail",
+ "connector_sharepoint",
+ ]
+ ] = None,
+ authorization: Optional[str] = None,
+ server_description: Optional[str] = None,
+ headers: Optional[dict[str, str]] = None,
+ allowed_tools: Optional[Union[list[str], "_models.MCPToolFilter"]] = None,
+ require_approval: Optional[Union["_models.MCPToolRequireApproval", Literal["always"], Literal["never"]]] = None,
+ project_connection_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.MCP # type: ignore
+
+
+class MCPToolFilter(_Model):
+ """MCP tool filter.
+
+ :ivar tool_names: MCP allowed tools.
+ :vartype tool_names: list[str]
+ :ivar read_only: Indicates whether or not a tool modifies data or is read-only. If an MCP
+ server is `annotated with `readOnlyHint`
+ `_,
+ it will match this filter.
+ :vartype read_only: bool
+ """
+
+ tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """MCP allowed tools."""
+ read_only: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Indicates whether or not a tool modifies data or is read-only. If an MCP server is `annotated
+ with `readOnlyHint`
+ `_,
+ it will match this filter."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ tool_names: Optional[list[str]] = None,
+ read_only: Optional[bool] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MCPToolRequireApproval(_Model):
+ """MCPToolRequireApproval.
+
+ :ivar always:
+ :vartype always: ~azure.ai.responses.server.sdk.models.models.MCPToolFilter
+ :ivar never:
+ :vartype never: ~azure.ai.responses.server.sdk.models.models.MCPToolFilter
+ """
+
+ always: Optional["_models.MCPToolFilter"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ never: Optional["_models.MCPToolFilter"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ always: Optional["_models.MCPToolFilter"] = None,
+ never: Optional["_models.MCPToolFilter"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MemorySearchItem(_Model):
+ """A retrieved memory item from memory search.
+
+ :ivar memory_item: Retrieved memory item. Required.
+ :vartype memory_item: ~azure.ai.responses.server.sdk.models.models.MemoryItem
+ """
+
+ memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Retrieved memory item. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_item: "_models.MemoryItem",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MemorySearchOptions(_Model):
+ """Memory search options.
+
+ :ivar max_memories: Maximum number of memory items to return.
+ :vartype max_memories: int
+ """
+
+ max_memories: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Maximum number of memory items to return."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ max_memories: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MemorySearchPreviewTool(Tool, discriminator="memory_search_preview"):
+ """A tool for integrating memories into the agent.
+
+ :ivar type: The type of the tool. Always ``memory_search_preview``. Required.
+ MEMORY_SEARCH_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MEMORY_SEARCH_PREVIEW
+ :ivar memory_store_name: The name of the memory store to use. Required.
+ :vartype memory_store_name: str
+ :ivar scope: The namespace used to group and isolate memories, such as a user ID. Limits which
+ memories can be retrieved or updated. Use special variable ``{{$userId}}`` to scope memories to
+ the current signed-in user. Required.
+ :vartype scope: str
+ :ivar search_options: Options for searching the memory store.
+ :vartype search_options: ~azure.ai.responses.server.sdk.models.models.MemorySearchOptions
+ :ivar update_delay: Time to wait before updating memories after inactivity (seconds). Default
+ 300.
+ :vartype update_delay: int
+ """
+
+ type: Literal[ToolType.MEMORY_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the tool. Always ``memory_search_preview``. Required. MEMORY_SEARCH_PREVIEW."""
+ memory_store_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the memory store to use. Required."""
+ scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The namespace used to group and isolate memories, such as a user ID. Limits which memories can
+ be retrieved or updated. Use special variable ``{{$userId}}`` to scope memories to the current
+ signed-in user. Required."""
+ search_options: Optional["_models.MemorySearchOptions"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Options for searching the memory store."""
+ update_delay: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Time to wait before updating memories after inactivity (seconds). Default 300."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_store_name: str,
+ scope: str,
+ search_options: Optional["_models.MemorySearchOptions"] = None,
+ update_delay: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.MEMORY_SEARCH_PREVIEW # type: ignore
+
+
+class MemorySearchTool(Tool, discriminator="memory_search"):
+ """A tool for integrating memories into the agent.
+
+ :ivar type: The type of the tool. Always ``memory_search_preview``. Required. MEMORY_SEARCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MEMORY_SEARCH
+ :ivar memory_store_name: The name of the memory store to use. Required.
+ :vartype memory_store_name: str
+ :ivar scope: The namespace used to group and isolate memories, such as a user ID. Limits which
+ memories can be retrieved or updated. Use special variable ``{{$userId}}`` to scope memories to
+ the current signed-in user. Required.
+ :vartype scope: str
+ :ivar search_options: Options for searching the memory store.
+ :vartype search_options: ~azure.ai.responses.server.sdk.models.models.MemorySearchOptions
+ :ivar update_delay: Time to wait before updating memories after inactivity (seconds). Default
+ 300.
+ :vartype update_delay: int
+ """
+
+ type: Literal[ToolType.MEMORY_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the tool. Always ``memory_search_preview``. Required. MEMORY_SEARCH."""
+ memory_store_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the memory store to use. Required."""
+ scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The namespace used to group and isolate memories, such as a user ID. Limits which memories can
+ be retrieved or updated. Use special variable ``{{$userId}}`` to scope memories to the current
+ signed-in user. Required."""
+ search_options: Optional["_models.MemorySearchOptions"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Options for searching the memory store."""
+ update_delay: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Time to wait before updating memories after inactivity (seconds). Default 300."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_store_name: str,
+ scope: str,
+ search_options: Optional["_models.MemorySearchOptions"] = None,
+ update_delay: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.MEMORY_SEARCH # type: ignore
+
+
+class MemorySearchToolCallItemParam(Item, discriminator="memory_search_call"):
+ """MemorySearchToolCallItemParam.
+
+ :ivar type: Required. MEMORY_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MEMORY_SEARCH_CALL
+ :ivar results: The results returned from the memory search.
+ :vartype results: list[~azure.ai.responses.server.sdk.models.models.MemorySearchItem]
+ """
+
+ type: Literal[ItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. MEMORY_SEARCH_CALL."""
+ results: Optional[list["_models.MemorySearchItem"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The results returned from the memory search."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ results: Optional[list["_models.MemorySearchItem"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MEMORY_SEARCH_CALL # type: ignore
+
+
+class MemorySearchToolCallItemResource(OutputItem, discriminator="memory_search_call"):
+ """MemorySearchToolCallItemResource.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. MEMORY_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MEMORY_SEARCH_CALL
+ :ivar status: The status of the memory search tool call. One of ``in_progress``, ``searching``,
+ ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following types:
+ Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["incomplete"],
+ Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar results: The results returned from the memory search.
+ :vartype results: list[~azure.ai.responses.server.sdk.models.models.MemorySearchItem]
+ """
+
+ type: Literal[OutputItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. MEMORY_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the memory search tool call. One of ``in_progress``, ``searching``,
+ ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"],
+ Literal[\"incomplete\"], Literal[\"failed\"]"""
+ results: Optional[list["_models.MemorySearchItem"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The results returned from the memory search."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ results: Optional[list["_models.MemorySearchItem"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MEMORY_SEARCH_CALL # type: ignore
+
+
+class MessageContentInputFileContent(MessageContent, discriminator="input_file"):
+ """Input file.
+
+ :ivar type: The type of the input item. Always ``input_file``. Required. INPUT_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_FILE
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar filename: The name of the file to be sent to the model.
+ :vartype filename: str
+ :ivar file_url: The URL of the file to be sent to the model.
+ :vartype file_url: str
+ :ivar file_data: The content of the file to be sent to the model.
+ :vartype file_data: str
+ """
+
+ type: Literal[MessageContentType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_file``. Required. INPUT_FILE."""
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the file to be sent to the model."""
+ file_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the file to be sent to the model."""
+ file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the file to be sent to the model."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: Optional[str] = None,
+ filename: Optional[str] = None,
+ file_url: Optional[str] = None,
+ file_data: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.INPUT_FILE # type: ignore
+
+
+class MessageContentInputImageContent(MessageContent, discriminator="input_image"):
+ """Input image.
+
+ :ivar type: The type of the input item. Always ``input_image``. Required. INPUT_IMAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_IMAGE
+ :ivar image_url:
+ :vartype image_url: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``,
+ or ``auto``. Defaults to ``auto``. Required. Known values are: "low", "high", and "auto".
+ :vartype detail: str or ~azure.ai.responses.server.sdk.models.models.ImageDetail
+ """
+
+ type: Literal[MessageContentType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_image``. Required. INPUT_IMAGE."""
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ detail: Union[str, "_models.ImageDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``.
+ Defaults to ``auto``. Required. Known values are: \"low\", \"high\", and \"auto\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ detail: Union[str, "_models.ImageDetail"],
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.INPUT_IMAGE # type: ignore
+
+
+class MessageContentInputTextContent(MessageContent, discriminator="input_text"):
+ """Input text.
+
+ :ivar type: The type of the input item. Always ``input_text``. Required. INPUT_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_TEXT
+ :ivar text: The text input to the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal[MessageContentType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_text``. Required. INPUT_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text input to the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.INPUT_TEXT # type: ignore
+
+
+class MessageContentOutputTextContent(MessageContent, discriminator="output_text"):
+ """Output text.
+
+ :ivar type: The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OUTPUT_TEXT
+ :ivar text: The text output from the model. Required.
+ :vartype text: str
+ :ivar annotations: The annotations of the text output. Required.
+ :vartype annotations: list[~azure.ai.responses.server.sdk.models.models.Annotation]
+ :ivar logprobs: Required.
+ :vartype logprobs: list[~azure.ai.responses.server.sdk.models.models.LogProb]
+ """
+
+ type: Literal[MessageContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text output from the model. Required."""
+ annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The annotations of the text output. Required."""
+ logprobs: list["_models.LogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ annotations: list["_models.Annotation"],
+ logprobs: list["_models.LogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.OUTPUT_TEXT # type: ignore
+
+
+class MessageContentReasoningTextContent(MessageContent, discriminator="reasoning_text"):
+ """Reasoning text.
+
+ :ivar type: The type of the reasoning text. Always ``reasoning_text``. Required.
+ REASONING_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REASONING_TEXT
+ :ivar text: The reasoning text from the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal[MessageContentType.REASONING_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the reasoning text. Always ``reasoning_text``. Required. REASONING_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The reasoning text from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.REASONING_TEXT # type: ignore
+
+
+class MessageContentRefusalContent(MessageContent, discriminator="refusal"):
+ """Refusal.
+
+ :ivar type: The type of the refusal. Always ``refusal``. Required. REFUSAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REFUSAL
+ :ivar refusal: The refusal explanation from the model. Required.
+ :vartype refusal: str
+ """
+
+ type: Literal[MessageContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the refusal. Always ``refusal``. Required. REFUSAL."""
+ refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The refusal explanation from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ refusal: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.REFUSAL # type: ignore
+
+
+class Metadata(_Model):
+ """Set of 16 key-value pairs that can be attached to an object. This can be useful for storing
+ additional information about the object in a structured format, and querying for objects via
+ API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are
+ strings with a maximum length of 512 characters.
+
+ """
+
+
+class MicrosoftFabricPreviewTool(Tool, discriminator="fabric_dataagent_preview"):
+ """The input definition information for a Microsoft Fabric tool as used to configure an agent.
+
+ :ivar type: The object type, which is always 'fabric_dataagent_preview'. Required.
+ FABRIC_DATAAGENT_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FABRIC_DATAAGENT_PREVIEW
+ :ivar fabric_dataagent_preview: The fabric data agent tool parameters. Required.
+ :vartype fabric_dataagent_preview:
+ ~azure.ai.responses.server.sdk.models.models.FabricDataAgentToolParameters
+ """
+
+ type: Literal[ToolType.FABRIC_DATAAGENT_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'fabric_dataagent_preview'. Required.
+ FABRIC_DATAAGENT_PREVIEW."""
+ fabric_dataagent_preview: "_models.FabricDataAgentToolParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The fabric data agent tool parameters. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ fabric_dataagent_preview: "_models.FabricDataAgentToolParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.FABRIC_DATAAGENT_PREVIEW # type: ignore
+
+
+class MoveParam(ComputerAction, discriminator="move"):
+ """Move.
+
+ :ivar type: Specifies the event type. For a move action, this property is always set to
+ ``move``. Required. MOVE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MOVE
+ :ivar x: The x-coordinate to move to. Required.
+ :vartype x: int
+ :ivar y: The y-coordinate to move to. Required.
+ :vartype y: int
+ """
+
+ type: Literal[ComputerActionType.MOVE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a move action, this property is always set to ``move``. Required.
+ MOVE."""
+ x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The x-coordinate to move to. Required."""
+ y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The y-coordinate to move to. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ x: int,
+ y: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.MOVE # type: ignore
+
+
+class OAuthConsentRequestOutputItem(OutputItem, discriminator="oauth_consent_request"):
+ """Request from the service for the user to perform OAuth consent.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: Required.
+ :vartype id: str
+ :ivar type: Required. OAUTH_CONSENT_REQUEST.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OAUTH_CONSENT_REQUEST
+ :ivar consent_link: The link the user can use to perform OAuth consent. Required.
+ :vartype consent_link: str
+ :ivar server_label: The server label for the OAuth consent request. Required.
+ :vartype server_label: str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ type: Literal[OutputItemType.OAUTH_CONSENT_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. OAUTH_CONSENT_REQUEST."""
+ consent_link: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The link the user can use to perform OAuth consent. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The server label for the OAuth consent request. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ consent_link: str,
+ server_label: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.OAUTH_CONSENT_REQUEST # type: ignore
+
+
+class OpenApiAuthDetails(_Model):
+ """authentication details for OpenApiFunctionDefinition.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ OpenApiAnonymousAuthDetails, OpenApiManagedAuthDetails, OpenApiProjectConnectionAuthDetails
+
+ :ivar type: The type of authentication, must be anonymous/project_connection/managed_identity.
+ Required. Known values are: "anonymous", "project_connection", and "managed_identity".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OpenApiAuthType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """The type of authentication, must be anonymous/project_connection/managed_identity. Required.
+ Known values are: \"anonymous\", \"project_connection\", and \"managed_identity\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OpenApiAnonymousAuthDetails(OpenApiAuthDetails, discriminator="anonymous"):
+ """Security details for OpenApi anonymous authentication.
+
+ :ivar type: The object type, which is always 'anonymous'. Required. ANONYMOUS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ANONYMOUS
+ """
+
+ type: Literal[OpenApiAuthType.ANONYMOUS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'anonymous'. Required. ANONYMOUS."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OpenApiAuthType.ANONYMOUS # type: ignore
+
+
+class OpenApiFunctionDefinition(_Model):
+ """The input definition information for an openapi function.
+
+ :ivar name: The name of the function to be called. Required.
+ :vartype name: str
+ :ivar description: A description of what the function does, used by the model to choose when
+ and how to call the function.
+ :vartype description: str
+ :ivar spec: The openapi function shape, described as a JSON Schema object. Required.
+ :vartype spec: dict[str, any]
+ :ivar auth: Open API authentication details. Required.
+ :vartype auth: ~azure.ai.responses.server.sdk.models.models.OpenApiAuthDetails
+ :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults.
+ :vartype default_params: list[str]
+ :ivar functions: List of function definitions used by OpenApi tool.
+ :vartype functions:
+ list[~azure.ai.responses.server.sdk.models.models.OpenApiFunctionDefinitionFunction]
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to be called. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A description of what the function does, used by the model to choose when and how to call the
+ function."""
+ spec: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The openapi function shape, described as a JSON Schema object. Required."""
+ auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Open API authentication details. Required."""
+ default_params: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """List of OpenAPI spec parameters that will use user-provided defaults."""
+ functions: Optional[list["_models.OpenApiFunctionDefinitionFunction"]] = rest_field(visibility=["read"])
+ """List of function definitions used by OpenApi tool."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ spec: dict[str, Any],
+ auth: "_models.OpenApiAuthDetails",
+ description: Optional[str] = None,
+ default_params: Optional[list[str]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OpenApiFunctionDefinitionFunction(_Model):
+ """OpenApiFunctionDefinitionFunction.
+
+ :ivar name: The name of the function to be called. Required.
+ :vartype name: str
+ :ivar description: A description of what the function does, used by the model to choose when
+ and how to call the function.
+ :vartype description: str
+ :ivar parameters: The parameters the functions accepts, described as a JSON Schema object.
+ Required.
+ :vartype parameters: dict[str, any]
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to be called. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A description of what the function does, used by the model to choose when and how to call the
+ function."""
+ parameters: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The parameters the functions accepts, described as a JSON Schema object. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ parameters: dict[str, Any],
+ description: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OpenApiManagedAuthDetails(OpenApiAuthDetails, discriminator="managed_identity"):
+ """Security details for OpenApi managed_identity authentication.
+
+ :ivar type: The object type, which is always 'managed_identity'. Required. MANAGED_IDENTITY.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MANAGED_IDENTITY
+ :ivar security_scheme: Connection auth security details. Required.
+ :vartype security_scheme:
+ ~azure.ai.responses.server.sdk.models.models.OpenApiManagedSecurityScheme
+ """
+
+ type: Literal[OpenApiAuthType.MANAGED_IDENTITY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'managed_identity'. Required. MANAGED_IDENTITY."""
+ security_scheme: "_models.OpenApiManagedSecurityScheme" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Connection auth security details. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ security_scheme: "_models.OpenApiManagedSecurityScheme",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OpenApiAuthType.MANAGED_IDENTITY # type: ignore
+
+
+class OpenApiManagedSecurityScheme(_Model):
+ """Security scheme for OpenApi managed_identity authentication.
+
+ :ivar audience: Authentication scope for managed_identity auth type. Required.
+ :vartype audience: str
+ """
+
+ audience: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Authentication scope for managed_identity auth type. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ audience: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OpenApiProjectConnectionAuthDetails(OpenApiAuthDetails, discriminator="project_connection"):
+ """Security details for OpenApi project connection authentication.
+
+ :ivar type: The object type, which is always 'project_connection'. Required.
+ PROJECT_CONNECTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.PROJECT_CONNECTION
+ :ivar security_scheme: Project connection auth security details. Required.
+ :vartype security_scheme:
+ ~azure.ai.responses.server.sdk.models.models.OpenApiProjectConnectionSecurityScheme
+ """
+
+ type: Literal[OpenApiAuthType.PROJECT_CONNECTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'project_connection'. Required. PROJECT_CONNECTION."""
+ security_scheme: "_models.OpenApiProjectConnectionSecurityScheme" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Project connection auth security details. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ security_scheme: "_models.OpenApiProjectConnectionSecurityScheme",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OpenApiAuthType.PROJECT_CONNECTION # type: ignore
+
+
+class OpenApiProjectConnectionSecurityScheme(_Model):
+ """Security scheme for OpenApi managed_identity authentication.
+
+ :ivar project_connection_id: Project connection id for Project Connection auth type. Required.
+ :vartype project_connection_id: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Project connection id for Project Connection auth type. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OpenApiTool(Tool, discriminator="openapi"):
+ """The input definition information for an OpenAPI tool as used to configure an agent.
+
+ :ivar type: The object type, which is always 'openapi'. Required. OPENAPI.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OPENAPI
+ :ivar openapi: The openapi function definition. Required.
+ :vartype openapi: ~azure.ai.responses.server.sdk.models.models.OpenApiFunctionDefinition
+ """
+
+ type: Literal[ToolType.OPENAPI] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'openapi'. Required. OPENAPI."""
+ openapi: "_models.OpenApiFunctionDefinition" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The openapi function definition. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ openapi: "_models.OpenApiFunctionDefinition",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.OPENAPI # type: ignore
+
+
+class OpenApiToolCall(OutputItem, discriminator="openapi_call"):
+ """An OpenAPI tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. OPENAPI_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OPENAPI_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the OpenAPI operation being called. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.OPENAPI_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. OPENAPI_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the OpenAPI operation being called. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.OPENAPI_CALL # type: ignore
+
+
+class OpenApiToolCallOutput(OutputItem, discriminator="openapi_call_output"):
+ """The output of an OpenAPI tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. OPENAPI_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OPENAPI_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the OpenAPI operation that was called. Required.
+ :vartype name: str
+ :ivar output: The output from the OpenAPI tool call. Is one of the following types: {str: Any},
+ str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.OPENAPI_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. OPENAPI_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the OpenAPI operation that was called. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the OpenAPI tool call. Is one of the following types: {str: Any}, str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.OPENAPI_CALL_OUTPUT # type: ignore
+
+
+class OutputContent(_Model):
+ """OutputContent.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ OutputContentOutputTextContent, OutputContentReasoningTextContent, OutputContentRefusalContent
+
+ :ivar type: Required. Known values are: "output_text", "refusal", and "reasoning_text".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OutputContentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"output_text\", \"refusal\", and \"reasoning_text\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OutputContentOutputTextContent(OutputContent, discriminator="output_text"):
+ """Output text.
+
+ :ivar type: The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OUTPUT_TEXT
+ :ivar text: The text output from the model. Required.
+ :vartype text: str
+ :ivar annotations: The annotations of the text output. Required.
+ :vartype annotations: list[~azure.ai.responses.server.sdk.models.models.Annotation]
+ :ivar logprobs: Required.
+ :vartype logprobs: list[~azure.ai.responses.server.sdk.models.models.LogProb]
+ """
+
+ type: Literal[OutputContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text output from the model. Required."""
+ annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The annotations of the text output. Required."""
+ logprobs: list["_models.LogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ annotations: list["_models.Annotation"],
+ logprobs: list["_models.LogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputContentType.OUTPUT_TEXT # type: ignore
+
+
+class OutputContentReasoningTextContent(OutputContent, discriminator="reasoning_text"):
+ """Reasoning text.
+
+ :ivar type: The type of the reasoning text. Always ``reasoning_text``. Required.
+ REASONING_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REASONING_TEXT
+ :ivar text: The reasoning text from the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal[OutputContentType.REASONING_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the reasoning text. Always ``reasoning_text``. Required. REASONING_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The reasoning text from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputContentType.REASONING_TEXT # type: ignore
+
+
+class OutputContentRefusalContent(OutputContent, discriminator="refusal"):
+ """Refusal.
+
+ :ivar type: The type of the refusal. Always ``refusal``. Required. REFUSAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REFUSAL
+ :ivar refusal: The refusal explanation from the model. Required.
+ :vartype refusal: str
+ """
+
+ type: Literal[OutputContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the refusal. Always ``refusal``. Required. REFUSAL."""
+ refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The refusal explanation from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ refusal: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputContentType.REFUSAL # type: ignore
+
+
+class OutputItemApplyPatchToolCall(OutputItem, discriminator="apply_patch_call"):
+ """Apply patch tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL
+ :ivar id: The unique ID of the apply patch tool call. Populated when this item is returned via
+ API. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call. One of ``in_progress`` or ``completed``.
+ Required. Known values are: "in_progress" and "completed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallStatus
+ :ivar operation: Apply patch operation. Required.
+ :vartype operation: ~azure.ai.responses.server.sdk.models.models.ApplyPatchFileOperation
+ """
+
+ type: Literal[OutputItemType.APPLY_PATCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call. Populated when this item is returned via API.
+ Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call. One of ``in_progress`` or ``completed``. Required.
+ Known values are: \"in_progress\" and \"completed\"."""
+ operation: "_models.ApplyPatchFileOperation" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Apply patch operation. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallStatus"],
+ operation: "_models.ApplyPatchFileOperation",
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.APPLY_PATCH_CALL # type: ignore
+
+
+class OutputItemApplyPatchToolCallOutput(OutputItem, discriminator="apply_patch_call_output"):
+ """Apply patch tool call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``apply_patch_call_output``. Required.
+ APPLY_PATCH_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL_OUTPUT
+ :ivar id: The unique ID of the apply patch tool call output. Populated when this item is
+ returned via API. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call output. One of ``completed`` or
+ ``failed``. Required. Known values are: "completed" and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallOutputStatus
+ :ivar output:
+ :vartype output: str
+ """
+
+ type: Literal[OutputItemType.APPLY_PATCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call_output``. Required. APPLY_PATCH_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call output. Populated when this item is returned via
+ API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallOutputStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call output. One of ``completed`` or ``failed``. Required.
+ Known values are: \"completed\" and \"failed\"."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallOutputStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.APPLY_PATCH_CALL_OUTPUT # type: ignore
+
+
+class OutputItemCodeInterpreterToolCall(OutputItem, discriminator="code_interpreter_call"):
+ """Code interpreter tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the code interpreter tool call. Always ``code_interpreter_call``.
+ Required. CODE_INTERPRETER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CODE_INTERPRETER_CALL
+ :ivar id: The unique ID of the code interpreter tool call. Required.
+ :vartype id: str
+ :ivar status: The status of the code interpreter tool call. Valid values are ``in_progress``,
+ ``completed``, ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the
+ following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"],
+ Literal["interpreting"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar container_id: The ID of the container used to run the code. Required.
+ :vartype container_id: str
+ :ivar code: Required.
+ :vartype code: str
+ :ivar outputs: Required.
+ :vartype outputs: list[~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputLogs
+ or ~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputImage]
+ """
+
+ type: Literal[OutputItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the code interpreter tool call. Always ``code_interpreter_call``. Required.
+ CODE_INTERPRETER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the code interpreter tool call. Required."""
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the code interpreter tool call. Valid values are ``in_progress``, ``completed``,
+ ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"],
+ Literal[\"interpreting\"], Literal[\"failed\"]"""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the container used to run the code. Required."""
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"],
+ container_id: str,
+ code: str,
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.CODE_INTERPRETER_CALL # type: ignore
+
+
+class OutputItemCompactionBody(OutputItem, discriminator="compaction"):
+ """Compaction item.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``compaction``. Required. COMPACTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPACTION
+ :ivar id: The unique ID of the compaction item. Required.
+ :vartype id: str
+ :ivar encrypted_content: The encrypted content that was produced by compaction. Required.
+ :vartype encrypted_content: str
+ """
+
+ type: Literal[OutputItemType.COMPACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``compaction``. Required. COMPACTION."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the compaction item. Required."""
+ encrypted_content: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The encrypted content that was produced by compaction. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ encrypted_content: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.COMPACTION # type: ignore
+
+
+class OutputItemComputerToolCall(OutputItem, discriminator="computer_call"):
+ """Computer tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL
+ :ivar id: The unique ID of the computer call. Required.
+ :vartype id: str
+ :ivar call_id: An identifier used when responding to the tool call with output. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.ComputerAction
+ :ivar pending_safety_checks: The pending safety checks for the computer call. Required.
+ :vartype pending_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[OutputItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the computer call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used when responding to the tool call with output. Required."""
+ action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The pending safety checks for the computer call. Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.ComputerAction",
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"],
+ status: Literal["in_progress", "completed", "incomplete"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.COMPUTER_CALL # type: ignore
+
+
+class OutputItemComputerToolCallOutput(OutputItem, discriminator="computer_call_output"):
+ """Computer tool call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the computer tool call output. Always ``computer_call_output``.
+ Required. COMPUTER_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL_OUTPUT
+ :ivar id: The ID of the computer tool call output. Required.
+ :vartype id: str
+ :ivar call_id: The ID of the computer tool call that produced the output. Required.
+ :vartype call_id: str
+ :ivar acknowledged_safety_checks: The safety checks reported by the API that have been
+ acknowledged by the developer.
+ :vartype acknowledged_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar output: Required.
+ :vartype output: ~azure.ai.responses.server.sdk.models.models.ComputerScreenshotImage
+ :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Populated when input items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[OutputItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer tool call output. Always ``computer_call_output``. Required.
+ COMPUTER_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read"])
+ """The ID of the computer tool call output. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the computer tool call that produced the output. Required."""
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The safety checks reported by the API that have been acknowledged by the developer."""
+ output: "_models.ComputerScreenshotImage" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the message input. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when input items are returned via API. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: "_models.ComputerScreenshotImage",
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.COMPUTER_CALL_OUTPUT # type: ignore
+
+
+class OutputItemCustomToolCall(OutputItem, discriminator="custom_tool_call"):
+ """Custom tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the custom tool call. Always ``custom_tool_call``. Required.
+ CUSTOM_TOOL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL
+ :ivar id: The unique ID of the custom tool call in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: An identifier used to map this custom tool call to a tool call output. Required.
+ :vartype call_id: str
+ :ivar name: The name of the custom tool being called. Required.
+ :vartype name: str
+ :ivar input: The input for the custom tool call generated by the model. Required.
+ :vartype input: str
+ """
+
+ type: Literal[OutputItemType.CUSTOM_TOOL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call. Always ``custom_tool_call``. Required. CUSTOM_TOOL_CALL."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used to map this custom tool call to a tool call output. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the custom tool being called. Required."""
+ input: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The input for the custom tool call generated by the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ input: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.CUSTOM_TOOL_CALL # type: ignore
+
+
+class OutputItemCustomToolCallOutput(OutputItem, discriminator="custom_tool_call_output"):
+ """Custom tool call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the custom tool call output. Always ``custom_tool_call_output``.
+ Required. CUSTOM_TOOL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL_OUTPUT
+ :ivar id: The unique ID of the custom tool call output in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: The call ID, used to map this custom tool call output to a custom tool call.
+ Required.
+ :vartype call_id: str
+ :ivar output: The output from the custom tool call generated by your code. Can be a string or
+ an list of output content. Required. Is either a str type or a
+ [FunctionAndCustomToolCallOutput] type.
+ :vartype output: str or
+ list[~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutput]
+ """
+
+ type: Literal[OutputItemType.CUSTOM_TOOL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call output. Always ``custom_tool_call_output``. Required.
+ CUSTOM_TOOL_CALL_OUTPUT."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call output in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The call ID, used to map this custom tool call output to a custom tool call. Required."""
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the custom tool call generated by your code. Can be a string or an list of
+ output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.CUSTOM_TOOL_CALL_OUTPUT # type: ignore
+
+
+class OutputItemFileSearchToolCall(OutputItem, discriminator="file_search_call"):
+ """File search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: The unique ID of the file search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the file search tool call. Always ``file_search_call``. Required.
+ FILE_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_SEARCH_CALL
+ :ivar status: The status of the file search tool call. One of ``in_progress``, ``searching``,
+ ``incomplete`` or ``failed``,. Required. Is one of the following types: Literal["in_progress"],
+ Literal["searching"], Literal["completed"], Literal["incomplete"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar queries: The queries used to search for files. Required.
+ :vartype queries: list[str]
+ :ivar results:
+ :vartype results: list[~azure.ai.responses.server.sdk.models.models.FileSearchToolCallResults]
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the file search tool call. Required."""
+ type: Literal[OutputItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file search tool call. Always ``file_search_call``. Required. FILE_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the file search tool call. One of ``in_progress``, ``searching``, ``incomplete``
+ or ``failed``,. Required. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"searching\"], Literal[\"completed\"], Literal[\"incomplete\"], Literal[\"failed\"]"""
+ queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The queries used to search for files. Required."""
+ results: Optional[list["_models.FileSearchToolCallResults"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"],
+ queries: list[str],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ results: Optional[list["_models.FileSearchToolCallResults"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.FILE_SEARCH_CALL # type: ignore
+
+
+class OutputItemFunctionShellCall(OutputItem, discriminator="shell_call"):
+ """Shell tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``shell_call``. Required. SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL
+ :ivar id: The unique ID of the shell tool call. Populated when this item is returned via API.
+ Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar action: The shell commands and limits that describe how to run the tool call. Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.FunctionShellAction
+ :ivar status: The status of the shell call. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Required. Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.LocalShellCallStatus
+ :ivar environment: Required.
+ :vartype environment: ~azure.ai.responses.server.sdk.models.models.FunctionShellCallEnvironment
+ """
+
+ type: Literal[OutputItemType.SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``shell_call``. Required. SHELL_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call. Populated when this item is returned via API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ action: "_models.FunctionShellAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The shell commands and limits that describe how to run the tool call. Required."""
+ status: Union[str, "_models.LocalShellCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the shell call. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Required. Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ environment: "_models.FunctionShellCallEnvironment" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.FunctionShellAction",
+ status: Union[str, "_models.LocalShellCallStatus"],
+ environment: "_models.FunctionShellCallEnvironment",
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.SHELL_CALL # type: ignore
+
+
+class OutputItemFunctionShellCallOutput(OutputItem, discriminator="shell_call_output"):
+ """Shell call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the shell call output. Always ``shell_call_output``. Required.
+ SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL_OUTPUT
+ :ivar id: The unique ID of the shell call output. Populated when this item is returned via API.
+ Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the shell call output. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Required. Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or
+ ~azure.ai.responses.server.sdk.models.models.LocalShellCallOutputStatusEnum
+ :ivar output: An array of shell call output contents. Required.
+ :vartype output:
+ list[~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputContent]
+ :ivar max_output_length: Required.
+ :vartype max_output_length: int
+ """
+
+ type: Literal[OutputItemType.SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the shell call output. Always ``shell_call_output``. Required. SHELL_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell call output. Populated when this item is returned via API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ status: Union[str, "_models.LocalShellCallOutputStatusEnum"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the shell call output. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Required. Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ output: list["_models.FunctionShellCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """An array of shell call output contents. Required."""
+ max_output_length: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.LocalShellCallOutputStatusEnum"],
+ output: list["_models.FunctionShellCallOutputContent"],
+ max_output_length: int,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.SHELL_CALL_OUTPUT # type: ignore
+
+
+class OutputItemFunctionToolCall(OutputItem, discriminator="function_call"):
+ """Function tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: The unique ID of the function tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the function tool call. Always ``function_call``. Required.
+ FUNCTION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the function to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the function. Required.
+ :vartype arguments: str
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read"])
+ """The unique ID of the function tool call. Required."""
+ type: Literal[OutputItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call. Always ``function_call``. Required. FUNCTION_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the function. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.FUNCTION_CALL # type: ignore
+
+
+class OutputItemFunctionToolCallOutput(OutputItem, discriminator="function_call_output"):
+ """Function tool call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: The unique ID of the function tool call output. Populated when this item is returned
+ via API. Required.
+ :vartype id: str
+ :ivar type: The type of the function tool call output. Always ``function_call_output``.
+ Required. FUNCTION_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL_OUTPUT
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the function call generated by your code. Can be a string or an
+ list of output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput]
+ type.
+ :vartype output: str or
+ list[~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutput]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read"])
+ """The unique ID of the function tool call output. Populated when this item is returned via API.
+ Required."""
+ type: Literal[OutputItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call output. Always ``function_call_output``. Required.
+ FUNCTION_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the function call generated by your code. Can be a string or an list of output
+ content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.FUNCTION_CALL_OUTPUT # type: ignore
+
+
+class OutputItemImageGenToolCall(OutputItem, discriminator="image_generation_call"):
+ """Image generation call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.IMAGE_GENERATION_CALL
+ :ivar id: The unique ID of the image generation call. Required.
+ :vartype id: str
+ :ivar status: The status of the image generation call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["generating"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar result: Required.
+ :vartype result: str
+ """
+
+ type: Literal[OutputItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the image generation call. Required."""
+ status: Literal["in_progress", "completed", "generating", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the image generation call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"generating\"], Literal[\"failed\"]"""
+ result: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "generating", "failed"],
+ result: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.IMAGE_GENERATION_CALL # type: ignore
+
+
+class OutputItemLocalShellToolCall(OutputItem, discriminator="local_shell_call"):
+ """Local shell call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the local shell call. Always ``local_shell_call``. Required.
+ LOCAL_SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL
+ :ivar id: The unique ID of the local shell call. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.LocalShellExecAction
+ :ivar status: The status of the local shell call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[OutputItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell call. Always ``local_shell_call``. Required. LOCAL_SHELL_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the local shell call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.LocalShellExecAction",
+ status: Literal["in_progress", "completed", "incomplete"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.LOCAL_SHELL_CALL # type: ignore
+
+
+class OutputItemLocalShellToolCallOutput(OutputItem, discriminator="local_shell_call_output"):
+ """Local shell call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the local shell tool call output. Always ``local_shell_call_output``.
+ Required. LOCAL_SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL_OUTPUT
+ :ivar id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype id: str
+ :ivar output: A JSON string of the output of the local shell tool call. Required.
+ :vartype output: str
+ :ivar status: Is one of the following types: Literal["in_progress"], Literal["completed"],
+ Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[OutputItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell tool call output. Always ``local_shell_call_output``. Required.
+ LOCAL_SHELL_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ output: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the output of the local shell tool call. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"],
+ Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ output: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore
+
+
+class OutputItemMcpApprovalRequest(OutputItem, discriminator="mcp_approval_request"):
+ """MCP approval request.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``mcp_approval_request``. Required.
+ MCP_APPROVAL_REQUEST.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_REQUEST
+ :ivar id: The unique ID of the approval request. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server making the request. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of arguments for the tool. Required.
+ :vartype arguments: str
+ """
+
+ type: Literal[OutputItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_request``. Required. MCP_APPROVAL_REQUEST."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the approval request. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server making the request. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of arguments for the tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MCP_APPROVAL_REQUEST # type: ignore
+
+
+class OutputItemMcpApprovalResponseResource(OutputItem, discriminator="mcp_approval_response"):
+ """MCP approval response.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``mcp_approval_response``. Required.
+ MCP_APPROVAL_RESPONSE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_RESPONSE
+ :ivar id: The unique ID of the approval response. Required.
+ :vartype id: str
+ :ivar approval_request_id: The ID of the approval request being answered. Required.
+ :vartype approval_request_id: str
+ :ivar approve: Whether the request was approved. Required.
+ :vartype approve: bool
+ :ivar reason:
+ :vartype reason: str
+ """
+
+ type: Literal[OutputItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_response``. Required. MCP_APPROVAL_RESPONSE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the approval response. Required."""
+ approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the approval request being answered. Required."""
+ approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Whether the request was approved. Required."""
+ reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ approval_request_id: str,
+ approve: bool,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ reason: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MCP_APPROVAL_RESPONSE # type: ignore
+
+
+class OutputItemMcpListTools(OutputItem, discriminator="mcp_list_tools"):
+ """MCP list tools.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_LIST_TOOLS
+ :ivar id: The unique ID of the list. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server. Required.
+ :vartype server_label: str
+ :ivar tools: The tools available on the server. Required.
+ :vartype tools: list[~azure.ai.responses.server.sdk.models.models.MCPListToolsTool]
+ :ivar error:
+ :vartype error: ~azure.ai.responses.server.sdk.models.models.RealtimeMCPError
+ """
+
+ type: Literal[OutputItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the list. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server. Required."""
+ tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The tools available on the server. Required."""
+ error: Optional["_models.RealtimeMCPError"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ tools: list["_models.MCPListToolsTool"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ error: Optional["_models.RealtimeMCPError"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MCP_LIST_TOOLS # type: ignore
+
+
+class OutputItemMcpToolCall(OutputItem, discriminator="mcp_call"):
+ """MCP tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``mcp_call``. Required. MCP_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_CALL
+ :ivar id: The unique ID of the tool call. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server running the tool. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool that was run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments passed to the tool. Required.
+ :vartype arguments: str
+ :ivar output:
+ :vartype output: str
+ :ivar error:
+ :vartype error: dict[str, any]
+ :ivar status: The status of the tool call. One of ``in_progress``, ``completed``,
+ ``incomplete``, ``calling``, or ``failed``. Known values are: "in_progress", "completed",
+ "incomplete", "calling", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MCPToolCallStatus
+ :ivar approval_request_id:
+ :vartype approval_request_id: str
+ """
+
+ type: Literal[OutputItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_call``. Required. MCP_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server running the tool. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool that was run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments passed to the tool. Required."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ error: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. One of ``in_progress``, ``completed``, ``incomplete``,
+ ``calling``, or ``failed``. Known values are: \"in_progress\", \"completed\", \"incomplete\",
+ \"calling\", and \"failed\"."""
+ approval_request_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional[str] = None,
+ error: Optional[dict[str, Any]] = None,
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = None,
+ approval_request_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MCP_CALL # type: ignore
+
+
+class OutputItemMessage(OutputItem, discriminator="message"):
+ """Message.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the message. Always set to ``message``. Required. MESSAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MESSAGE
+ :ivar id: The unique ID of the message. Required.
+ :vartype id: str
+ :ivar status: The status of item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Known values are: "in_progress",
+ "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MessageStatus
+ :ivar role: The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``,
+ ``critic``, ``discriminator``, ``developer``, or ``tool``. Required. Known values are:
+ "unknown", "user", "assistant", "system", "critic", "discriminator", "developer", and "tool".
+ :vartype role: str or ~azure.ai.responses.server.sdk.models.models.MessageRole
+ :ivar content: The content of the message. Required.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.MessageContent]
+ """
+
+ type: Literal[OutputItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the message. Always set to ``message``. Required. MESSAGE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the message. Required."""
+ status: Union[str, "_models.MessageStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The status of item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated when
+ items are returned via API. Required. Known values are: \"in_progress\", \"completed\", and
+ \"incomplete\"."""
+ role: Union[str, "_models.MessageRole"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``, ``critic``,
+ ``discriminator``, ``developer``, or ``tool``. Required. Known values are: \"unknown\",
+ \"user\", \"assistant\", \"system\", \"critic\", \"discriminator\", \"developer\", and
+ \"tool\"."""
+ content: list["_models.MessageContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the message. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Union[str, "_models.MessageStatus"],
+ role: Union[str, "_models.MessageRole"],
+ content: list["_models.MessageContent"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MESSAGE # type: ignore
+
+
+class OutputItemOutputMessage(OutputItem, discriminator="output_message"):
+ """Output message.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: The unique ID of the output message. Required.
+ :vartype id: str
+ :ivar type: The type of the output message. Always ``message``. Required. OUTPUT_MESSAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OUTPUT_MESSAGE
+ :ivar role: The role of the output message. Always ``assistant``. Required. Default value is
+ "assistant".
+ :vartype role: str
+ :ivar content: The content of the output message. Required.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.OutputMessageContent]
+ :ivar phase: Known values are: "commentary" and "final_answer".
+ :vartype phase: str or ~azure.ai.responses.server.sdk.models.models.MessagePhase
+ :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Populated when input items are returned via API. Required. Is one of the
+ following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the output message. Required."""
+ type: Literal[OutputItemType.OUTPUT_MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the output message. Always ``message``. Required. OUTPUT_MESSAGE."""
+ role: Literal["assistant"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The role of the output message. Always ``assistant``. Required. Default value is \"assistant\"."""
+ content: list["_models.OutputMessageContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The content of the output message. Required."""
+ phase: Optional[Union[str, "_models.MessagePhase"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"commentary\" and \"final_answer\"."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the message input. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when input items are returned via API. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ content: list["_models.OutputMessageContent"],
+ status: Literal["in_progress", "completed", "incomplete"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ phase: Optional[Union[str, "_models.MessagePhase"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.OUTPUT_MESSAGE # type: ignore
+ self.role: Literal["assistant"] = "assistant"
+
+
+class OutputItemReasoningItem(OutputItem, discriminator="reasoning"):
+ """Reasoning.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the object. Always ``reasoning``. Required. REASONING.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REASONING
+ :ivar id: The unique identifier of the reasoning content. Required.
+ :vartype id: str
+ :ivar encrypted_content:
+ :vartype encrypted_content: str
+ :ivar summary: Reasoning summary content. Required.
+ :vartype summary: list[~azure.ai.responses.server.sdk.models.models.SummaryTextContent]
+ :ivar content: Reasoning text content.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.ReasoningTextContent]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[OutputItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the object. Always ``reasoning``. Required. REASONING."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the reasoning content. Required."""
+ encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ summary: list["_models.SummaryTextContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Reasoning summary content. Required."""
+ content: Optional[list["_models.ReasoningTextContent"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Reasoning text content."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ summary: list["_models.SummaryTextContent"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ encrypted_content: Optional[str] = None,
+ content: Optional[list["_models.ReasoningTextContent"]] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.REASONING # type: ignore
+
+
+class OutputItemWebSearchToolCall(OutputItem, discriminator="web_search_call"):
+ """Web search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: The unique ID of the web search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the web search tool call. Always ``web_search_call``. Required.
+ WEB_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_CALL
+ :ivar status: The status of the web search tool call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar action: An object describing the specific action taken in this web search call. Includes
+ details on how the model used the web (search, open_page, find_in_page). Required. Is one of
+ the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.WebSearchActionSearch or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionOpenPage or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionFind
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the web search tool call. Required."""
+ type: Literal[OutputItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the web search tool call. Always ``web_search_call``. Required. WEB_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the web search tool call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]"""
+ action: Union["_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"] = (
+ rest_field(visibility=["read", "create", "update", "delete", "query"])
+ )
+ """An object describing the specific action taken in this web search call. Includes details on how
+ the model used the web (search, open_page, find_in_page). Required. Is one of the following
+ types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "failed"],
+ action: Union[
+ "_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"
+ ],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.WEB_SEARCH_CALL # type: ignore
+
+
+class OutputMessageContent(_Model):
+ """OutputMessageContent.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ OutputMessageContentOutputTextContent, OutputMessageContentRefusalContent
+
+ :ivar type: Required. Known values are: "output_text" and "refusal".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OutputMessageContentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"output_text\" and \"refusal\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OutputMessageContentOutputTextContent(OutputMessageContent, discriminator="output_text"):
+ """Output text.
+
+ :ivar type: The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OUTPUT_TEXT
+ :ivar text: The text output from the model. Required.
+ :vartype text: str
+ :ivar annotations: The annotations of the text output. Required.
+ :vartype annotations: list[~azure.ai.responses.server.sdk.models.models.Annotation]
+ :ivar logprobs: Required.
+ :vartype logprobs: list[~azure.ai.responses.server.sdk.models.models.LogProb]
+ """
+
+ type: Literal[OutputMessageContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text output from the model. Required."""
+ annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The annotations of the text output. Required."""
+ logprobs: list["_models.LogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ annotations: list["_models.Annotation"],
+ logprobs: list["_models.LogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputMessageContentType.OUTPUT_TEXT # type: ignore
+
+
+class OutputMessageContentRefusalContent(OutputMessageContent, discriminator="refusal"):
+ """Refusal.
+
+ :ivar type: The type of the refusal. Always ``refusal``. Required. REFUSAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REFUSAL
+ :ivar refusal: The refusal explanation from the model. Required.
+ :vartype refusal: str
+ """
+
+ type: Literal[OutputMessageContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the refusal. Always ``refusal``. Required. REFUSAL."""
+ refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The refusal explanation from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ refusal: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputMessageContentType.REFUSAL # type: ignore
+
+
+class Prompt(_Model):
+ """Reference to a prompt template and its variables. `Learn more
+ `_.
+
+ :ivar id: The unique identifier of the prompt template to use. Required.
+ :vartype id: str
+ :ivar version:
+ :vartype version: str
+ :ivar variables:
+ :vartype variables: ~azure.ai.responses.server.sdk.models.models.ResponsePromptVariables
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the prompt template to use. Required."""
+ version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ variables: Optional["_models.ResponsePromptVariables"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ version: Optional[str] = None,
+ variables: Optional["_models.ResponsePromptVariables"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class RankingOptions(_Model):
+ """RankingOptions.
+
+ :ivar ranker: The ranker to use for the file search. Known values are: "auto" and
+ "default-2024-11-15".
+ :vartype ranker: str or ~azure.ai.responses.server.sdk.models.models.RankerVersionType
+ :ivar score_threshold: The score threshold for the file search, a number between 0 and 1.
+ Numbers closer to 1 will attempt to return only the most relevant results, but may return fewer
+ results.
+ :vartype score_threshold: int
+ :ivar hybrid_search: Weights that control how reciprocal rank fusion balances semantic
+ embedding matches versus sparse keyword matches when hybrid search is enabled.
+ :vartype hybrid_search: ~azure.ai.responses.server.sdk.models.models.HybridSearchOptions
+ """
+
+ ranker: Optional[Union[str, "_models.RankerVersionType"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The ranker to use for the file search. Known values are: \"auto\" and \"default-2024-11-15\"."""
+ score_threshold: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The score threshold for the file search, a number between 0 and 1. Numbers closer to 1 will
+ attempt to return only the most relevant results, but may return fewer results."""
+ hybrid_search: Optional["_models.HybridSearchOptions"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Weights that control how reciprocal rank fusion balances semantic embedding matches versus
+ sparse keyword matches when hybrid search is enabled."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ ranker: Optional[Union[str, "_models.RankerVersionType"]] = None,
+ score_threshold: Optional[int] = None,
+ hybrid_search: Optional["_models.HybridSearchOptions"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class RealtimeMCPError(_Model):
+ """RealtimeMCPError.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ RealtimeMCPHTTPError, RealtimeMCPProtocolError, RealtimeMCPToolExecutionError
+
+ :ivar type: Required. Known values are: "protocol_error", "tool_execution_error", and
+ "http_error".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RealtimeMcpErrorType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"protocol_error\", \"tool_execution_error\", and \"http_error\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class RealtimeMCPHTTPError(RealtimeMCPError, discriminator="http_error"):
+ """Realtime MCP HTTP error.
+
+ :ivar type: Required. HTTP_ERROR.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.HTTP_ERROR
+ :ivar code: Required.
+ :vartype code: int
+ :ivar message: Required.
+ :vartype message: str
+ """
+
+ type: Literal[RealtimeMcpErrorType.HTTP_ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. HTTP_ERROR."""
+ code: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ code: int,
+ message: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = RealtimeMcpErrorType.HTTP_ERROR # type: ignore
+
+
+class RealtimeMCPProtocolError(RealtimeMCPError, discriminator="protocol_error"):
+ """Realtime MCP protocol error.
+
+ :ivar type: Required. PROTOCOL_ERROR.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.PROTOCOL_ERROR
+ :ivar code: Required.
+ :vartype code: int
+ :ivar message: Required.
+ :vartype message: str
+ """
+
+ type: Literal[RealtimeMcpErrorType.PROTOCOL_ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. PROTOCOL_ERROR."""
+ code: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ code: int,
+ message: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = RealtimeMcpErrorType.PROTOCOL_ERROR # type: ignore
+
+
+class RealtimeMCPToolExecutionError(RealtimeMCPError, discriminator="tool_execution_error"):
+ """Realtime MCP tool execution error.
+
+ :ivar type: Required. TOOL_EXECUTION_ERROR.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TOOL_EXECUTION_ERROR
+ :ivar message: Required.
+ :vartype message: str
+ """
+
+ type: Literal[RealtimeMcpErrorType.TOOL_EXECUTION_ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. TOOL_EXECUTION_ERROR."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ message: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = RealtimeMcpErrorType.TOOL_EXECUTION_ERROR # type: ignore
+
+
+class Reasoning(_Model):
+ """Reasoning.
+
+ :ivar effort: Is one of the following types: Literal["none"], Literal["minimal"],
+ Literal["low"], Literal["medium"], Literal["high"], Literal["xhigh"]
+ :vartype effort: str or str or str or str or str or str
+ :ivar summary: Is one of the following types: Literal["auto"], Literal["concise"],
+ Literal["detailed"]
+ :vartype summary: str or str or str
+ :ivar generate_summary: Is one of the following types: Literal["auto"], Literal["concise"],
+ Literal["detailed"]
+ :vartype generate_summary: str or str or str
+ """
+
+ effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"none\"], Literal[\"minimal\"], Literal[\"low\"],
+ Literal[\"medium\"], Literal[\"high\"], Literal[\"xhigh\"]"""
+ summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]"""
+ generate_summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] = None,
+ summary: Optional[Literal["auto", "concise", "detailed"]] = None,
+ generate_summary: Optional[Literal["auto", "concise", "detailed"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ReasoningTextContent(_Model):
+ """Reasoning text.
+
+ :ivar type: The type of the reasoning text. Always ``reasoning_text``. Required. Default value
+ is "reasoning_text".
+ :vartype type: str
+ :ivar text: The reasoning text from the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal["reasoning_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the reasoning text. Always ``reasoning_text``. Required. Default value is
+ \"reasoning_text\"."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The reasoning text from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["reasoning_text"] = "reasoning_text"
+
+
+class Response(_Model):
+ """The response object.
+
+ :ivar metadata:
+ :vartype metadata: ~azure.ai.responses.server.sdk.models.models.Metadata
+ :ivar top_logprobs:
+ :vartype top_logprobs: int
+ :ivar temperature:
+ :vartype temperature: int
+ :ivar top_p:
+ :vartype top_p: int
+ :ivar user: This field is being replaced by ``safety_identifier`` and ``prompt_cache_key``. Use
+ ``prompt_cache_key`` instead to maintain caching optimizations. A stable identifier for your
+ end-users. Used to boost cache hit rates by better bucketing similar requests and to help
+ OpenAI detect and prevent abuse. `Learn more
+ `_.
+ :vartype user: str
+ :ivar safety_identifier: A stable identifier used to help detect users of your application that
+ may be violating OpenAI's usage policies. The IDs should be a string that uniquely identifies
+ each user, with a maximum length of 64 characters. We recommend hashing their username or email
+ address, in order to avoid sending us any identifying information. `Learn more
+ `_.
+ :vartype safety_identifier: str
+ :ivar prompt_cache_key: Used by OpenAI to cache responses for similar requests to optimize your
+ cache hit rates. Replaces the ``user`` field. `Learn more `_.
+ :vartype prompt_cache_key: str
+ :ivar service_tier: Is one of the following types: Literal["auto"], Literal["default"],
+ Literal["flex"], Literal["scale"], Literal["priority"]
+ :vartype service_tier: str or str or str or str or str
+ :ivar prompt_cache_retention: Is either a Literal["in-memory"] type or a Literal["24h"] type.
+ :vartype prompt_cache_retention: str or str
+ :ivar previous_response_id:
+ :vartype previous_response_id: str
+ :ivar model: The model deployment to use for the creation of this response.
+ :vartype model: str
+ :ivar reasoning:
+ :vartype reasoning: ~azure.ai.responses.server.sdk.models.models.Reasoning
+ :ivar background:
+ :vartype background: bool
+ :ivar max_output_tokens:
+ :vartype max_output_tokens: int
+ :ivar max_tool_calls:
+ :vartype max_tool_calls: int
+ :ivar text:
+ :vartype text: ~azure.ai.responses.server.sdk.models.models.ResponseTextParam
+ :ivar tools:
+ :vartype tools: list[~azure.ai.responses.server.sdk.models.models.Tool]
+ :ivar tool_choice: Is either a Union[str, "_models.ToolChoiceOptions"] type or a
+ ToolChoiceParam type.
+ :vartype tool_choice: str or ~azure.ai.responses.server.sdk.models.models.ToolChoiceOptions or
+ ~azure.ai.responses.server.sdk.models.models.ToolChoiceParam
+ :ivar prompt:
+ :vartype prompt: ~azure.ai.responses.server.sdk.models.models.Prompt
+ :ivar truncation: Is either a Literal["auto"] type or a Literal["disabled"] type.
+ :vartype truncation: str or str
+ :ivar id: Unique identifier for this Response. Required.
+ :vartype id: str
+ :ivar object: The object type of this resource - always set to ``response``. Required. Default
+ value is "response".
+ :vartype object: str
+ :ivar status: The status of the response generation. One of ``completed``, ``failed``,
+ ``in_progress``, ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types:
+ Literal["completed"], Literal["failed"], Literal["in_progress"], Literal["cancelled"],
+ Literal["queued"], Literal["incomplete"]
+ :vartype status: str or str or str or str or str or str
+ :ivar created_at: Unix timestamp (in seconds) of when this Response was created. Required.
+ :vartype created_at: ~datetime.datetime
+ :ivar completed_at:
+ :vartype completed_at: ~datetime.datetime
+ :ivar error: Required.
+ :vartype error: ~azure.ai.responses.server.sdk.models.models.ResponseError
+ :ivar incomplete_details: Required.
+ :vartype incomplete_details:
+ ~azure.ai.responses.server.sdk.models.models.ResponseIncompleteDetails
+ :ivar output: An array of content items generated by the model.
+
+ * The length and order of items in the `output` array is dependent
+ on the model's response.
+ * Rather than accessing the first item in the `output` array and
+ assuming it's an `assistant` message with the content generated by
+ the model, you might consider using the `output_text` property where
+ supported in SDKs. Required.
+ :vartype output: list[~azure.ai.responses.server.sdk.models.models.OutputItem]
+ :ivar instructions: Required. Is either a str type or a [Item] type.
+ :vartype instructions: str or list[~azure.ai.responses.server.sdk.models.models.Item]
+ :ivar output_text:
+ :vartype output_text: str
+ :ivar usage:
+ :vartype usage: ~azure.ai.responses.server.sdk.models.models.ResponseUsage
+ :ivar parallel_tool_calls: Whether to allow the model to run tool calls in parallel. Required.
+ :vartype parallel_tool_calls: bool
+ :ivar conversation:
+ :vartype conversation: ~azure.ai.responses.server.sdk.models.models.ConversationReference
+ :ivar agent: (Deprecated) Use agent_reference instead. The agent used for this response.
+ :vartype agent: ~azure.ai.responses.server.sdk.models.models.AgentId
+ :ivar agent_reference: The agent used for this response. Required.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar agent_session_id: The session identifier for this response. Currently only relevant for
+ hosted agents. Always returned for hosted agents — either the caller-provided value, the
+ auto-derived value, or an auto-generated UUID. Use for session-scoped operations and to
+ maintain sandbox affinity in follow-up calls.
+ :vartype agent_session_id: str
+ """
+
+ metadata: Optional["_models.Metadata"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ top_logprobs: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ temperature: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ top_p: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ user: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """This field is being replaced by ``safety_identifier`` and ``prompt_cache_key``. Use
+ ``prompt_cache_key`` instead to maintain caching optimizations. A stable identifier for your
+ end-users. Used to boost cache hit rates by better bucketing similar requests and to help
+ OpenAI detect and prevent abuse. `Learn more
+ `_."""
+ safety_identifier: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A stable identifier used to help detect users of your application that may be violating
+ OpenAI's usage policies. The IDs should be a string that uniquely identifies each user, with a
+ maximum length of 64 characters. We recommend hashing their username or email address, in order
+ to avoid sending us any identifying information. `Learn more
+ `_."""
+ prompt_cache_key: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Used by OpenAI to cache responses for similar requests to optimize your cache hit rates.
+ Replaces the ``user`` field. `Learn more `_."""
+ service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"auto\"], Literal[\"default\"], Literal[\"flex\"],
+ Literal[\"scale\"], Literal[\"priority\"]"""
+ prompt_cache_retention: Optional[Literal["in-memory", "24h"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Literal[\"in-memory\"] type or a Literal[\"24h\"] type."""
+ previous_response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ model: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The model deployment to use for the creation of this response."""
+ reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ background: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ max_output_tokens: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ max_tool_calls: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ text: Optional["_models.ResponseTextParam"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Union[str, \"_models.ToolChoiceOptions\"] type or a ToolChoiceParam type."""
+ prompt: Optional["_models.Prompt"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ truncation: Optional[Literal["auto", "disabled"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Literal[\"auto\"] type or a Literal[\"disabled\"] type."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique identifier for this Response. Required."""
+ object: Literal["response"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The object type of this resource - always set to ``response``. Required. Default value is
+ \"response\"."""
+ status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the response generation. One of ``completed``, ``failed``, ``in_progress``,
+ ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types:
+ Literal[\"completed\"], Literal[\"failed\"], Literal[\"in_progress\"], Literal[\"cancelled\"],
+ Literal[\"queued\"], Literal[\"incomplete\"]"""
+ created_at: datetime.datetime = rest_field(
+ visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp"
+ )
+ """Unix timestamp (in seconds) of when this Response was created. Required."""
+ completed_at: Optional[datetime.datetime] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp"
+ )
+ error: "_models.ResponseError" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ incomplete_details: "_models.ResponseIncompleteDetails" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+ output: list["_models.OutputItem"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An array of content items generated by the model.
+
+ * The length and order of items in the `output` array is dependent
+ on the model's response.
+ * Rather than accessing the first item in the `output` array and
+ assuming it's an `assistant` message with the content generated by
+ the model, you might consider using the `output_text` property where
+ supported in SDKs. Required."""
+ instructions: Union[str, list["_models.Item"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required. Is either a str type or a [Item] type."""
+ output_text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ usage: Optional["_models.ResponseUsage"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ parallel_tool_calls: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Whether to allow the model to run tool calls in parallel. Required."""
+ conversation: Optional["_models.ConversationReference"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """(Deprecated) Use agent_reference instead. The agent used for this response."""
+ agent_reference: "_models.AgentReference" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The agent used for this response. Required."""
+ agent_session_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The session identifier for this response. Currently only relevant for hosted agents. Always
+ returned for hosted agents — either the caller-provided value, the auto-derived value, or an
+ auto-generated UUID. Use for session-scoped operations and to maintain sandbox affinity in
+ follow-up calls."""
+
+ @overload
+ def __init__( # pylint: disable=too-many-locals
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ created_at: datetime.datetime,
+ error: "_models.ResponseError",
+ incomplete_details: "_models.ResponseIncompleteDetails",
+ output: list["_models.OutputItem"],
+ instructions: Union[str, list["_models.Item"]],
+ parallel_tool_calls: bool,
+ agent_reference: "_models.AgentReference",
+ metadata: Optional["_models.Metadata"] = None,
+ top_logprobs: Optional[int] = None,
+ temperature: Optional[int] = None,
+ top_p: Optional[int] = None,
+ user: Optional[str] = None,
+ safety_identifier: Optional[str] = None,
+ prompt_cache_key: Optional[str] = None,
+ service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] = None,
+ prompt_cache_retention: Optional[Literal["in-memory", "24h"]] = None,
+ previous_response_id: Optional[str] = None,
+ model: Optional[str] = None,
+ reasoning: Optional["_models.Reasoning"] = None,
+ background: Optional[bool] = None,
+ max_output_tokens: Optional[int] = None,
+ max_tool_calls: Optional[int] = None,
+ text: Optional["_models.ResponseTextParam"] = None,
+ tools: Optional[list["_models.Tool"]] = None,
+ tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceParam"]] = None,
+ prompt: Optional["_models.Prompt"] = None,
+ truncation: Optional[Literal["auto", "disabled"]] = None,
+ status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = None,
+ completed_at: Optional[datetime.datetime] = None,
+ output_text: Optional[str] = None,
+ usage: Optional["_models.ResponseUsage"] = None,
+ conversation: Optional["_models.ConversationReference"] = None,
+ agent: Optional["_models.AgentId"] = None,
+ agent_session_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.object: Literal["response"] = "response"
+
+
+class ResponseStreamEvent(_Model):
+ """ResponseStreamEvent.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ResponseErrorEvent, ResponseAudioDeltaEvent, ResponseAudioDoneEvent,
+ ResponseAudioTranscriptDeltaEvent, ResponseAudioTranscriptDoneEvent,
+ ResponseCodeInterpreterCallCompletedEvent, ResponseCodeInterpreterCallInProgressEvent,
+ ResponseCodeInterpreterCallInterpretingEvent, ResponseCodeInterpreterCallCodeDeltaEvent,
+ ResponseCodeInterpreterCallCodeDoneEvent, ResponseCompletedEvent,
+ ResponseContentPartAddedEvent, ResponseContentPartDoneEvent, ResponseCreatedEvent,
+ ResponseCustomToolCallInputDeltaEvent, ResponseCustomToolCallInputDoneEvent,
+ ResponseFailedEvent, ResponseFileSearchCallCompletedEvent,
+ ResponseFileSearchCallInProgressEvent, ResponseFileSearchCallSearchingEvent,
+ ResponseFunctionCallArgumentsDeltaEvent, ResponseFunctionCallArgumentsDoneEvent,
+ ResponseImageGenCallCompletedEvent, ResponseImageGenCallGeneratingEvent,
+ ResponseImageGenCallInProgressEvent, ResponseImageGenCallPartialImageEvent,
+ ResponseInProgressEvent, ResponseIncompleteEvent, ResponseMCPCallCompletedEvent,
+ ResponseMCPCallFailedEvent, ResponseMCPCallInProgressEvent, ResponseMCPCallArgumentsDeltaEvent,
+ ResponseMCPCallArgumentsDoneEvent, ResponseMCPListToolsCompletedEvent,
+ ResponseMCPListToolsFailedEvent, ResponseMCPListToolsInProgressEvent,
+ ResponseOutputItemAddedEvent, ResponseOutputItemDoneEvent,
+ ResponseOutputTextAnnotationAddedEvent, ResponseTextDeltaEvent, ResponseTextDoneEvent,
+ ResponseQueuedEvent, ResponseReasoningSummaryPartAddedEvent,
+ ResponseReasoningSummaryPartDoneEvent, ResponseReasoningSummaryTextDeltaEvent,
+ ResponseReasoningSummaryTextDoneEvent, ResponseReasoningTextDeltaEvent,
+ ResponseReasoningTextDoneEvent, ResponseRefusalDeltaEvent, ResponseRefusalDoneEvent,
+ ResponseWebSearchCallCompletedEvent, ResponseWebSearchCallInProgressEvent,
+ ResponseWebSearchCallSearchingEvent
+
+ :ivar type: Required. Known values are: "response.audio.delta", "response.audio.done",
+ "response.audio.transcript.delta", "response.audio.transcript.done",
+ "response.code_interpreter_call_code.delta", "response.code_interpreter_call_code.done",
+ "response.code_interpreter_call.completed", "response.code_interpreter_call.in_progress",
+ "response.code_interpreter_call.interpreting", "response.completed",
+ "response.content_part.added", "response.content_part.done", "response.created", "error",
+ "response.file_search_call.completed", "response.file_search_call.in_progress",
+ "response.file_search_call.searching", "response.function_call_arguments.delta",
+ "response.function_call_arguments.done", "response.in_progress", "response.failed",
+ "response.incomplete", "response.output_item.added", "response.output_item.done",
+ "response.reasoning_summary_part.added", "response.reasoning_summary_part.done",
+ "response.reasoning_summary_text.delta", "response.reasoning_summary_text.done",
+ "response.reasoning_text.delta", "response.reasoning_text.done", "response.refusal.delta",
+ "response.refusal.done", "response.output_text.delta", "response.output_text.done",
+ "response.web_search_call.completed", "response.web_search_call.in_progress",
+ "response.web_search_call.searching", "response.image_generation_call.completed",
+ "response.image_generation_call.generating", "response.image_generation_call.in_progress",
+ "response.image_generation_call.partial_image", "response.mcp_call_arguments.delta",
+ "response.mcp_call_arguments.done", "response.mcp_call.completed", "response.mcp_call.failed",
+ "response.mcp_call.in_progress", "response.mcp_list_tools.completed",
+ "response.mcp_list_tools.failed", "response.mcp_list_tools.in_progress",
+ "response.output_text.annotation.added", "response.queued",
+ "response.custom_tool_call_input.delta", and "response.custom_tool_call_input.done".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ResponseStreamEventType
+ :ivar sequence_number: Required.
+ :vartype sequence_number: int
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"response.audio.delta\", \"response.audio.done\",
+ \"response.audio.transcript.delta\", \"response.audio.transcript.done\",
+ \"response.code_interpreter_call_code.delta\", \"response.code_interpreter_call_code.done\",
+ \"response.code_interpreter_call.completed\", \"response.code_interpreter_call.in_progress\",
+ \"response.code_interpreter_call.interpreting\", \"response.completed\",
+ \"response.content_part.added\", \"response.content_part.done\", \"response.created\",
+ \"error\", \"response.file_search_call.completed\", \"response.file_search_call.in_progress\",
+ \"response.file_search_call.searching\", \"response.function_call_arguments.delta\",
+ \"response.function_call_arguments.done\", \"response.in_progress\", \"response.failed\",
+ \"response.incomplete\", \"response.output_item.added\", \"response.output_item.done\",
+ \"response.reasoning_summary_part.added\", \"response.reasoning_summary_part.done\",
+ \"response.reasoning_summary_text.delta\", \"response.reasoning_summary_text.done\",
+ \"response.reasoning_text.delta\", \"response.reasoning_text.done\",
+ \"response.refusal.delta\", \"response.refusal.done\", \"response.output_text.delta\",
+ \"response.output_text.done\", \"response.web_search_call.completed\",
+ \"response.web_search_call.in_progress\", \"response.web_search_call.searching\",
+ \"response.image_generation_call.completed\", \"response.image_generation_call.generating\",
+ \"response.image_generation_call.in_progress\",
+ \"response.image_generation_call.partial_image\", \"response.mcp_call_arguments.delta\",
+ \"response.mcp_call_arguments.done\", \"response.mcp_call.completed\",
+ \"response.mcp_call.failed\", \"response.mcp_call.in_progress\",
+ \"response.mcp_list_tools.completed\", \"response.mcp_list_tools.failed\",
+ \"response.mcp_list_tools.in_progress\", \"response.output_text.annotation.added\",
+ \"response.queued\", \"response.custom_tool_call_input.delta\", and
+ \"response.custom_tool_call_input.done\"."""
+ sequence_number: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseAudioDeltaEvent(ResponseStreamEvent, discriminator="response.audio.delta"):
+ """Emitted when there is a partial audio response.
+
+ :ivar type: The type of the event. Always ``response.audio.delta``. Required.
+ RESPONSE_AUDIO_DELTA.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_AUDIO_DELTA
+ :ivar sequence_number: A sequence number for this chunk of the stream response. Required.
+ :vartype sequence_number: int
+ :ivar delta: A chunk of Base64 encoded response audio bytes. Required.
+ :vartype delta: bytes
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_AUDIO_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.audio.delta``. Required. RESPONSE_AUDIO_DELTA."""
+ delta: bytes = rest_field(visibility=["read", "create", "update", "delete", "query"], format="base64")
+ """A chunk of Base64 encoded response audio bytes. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ delta: bytes,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_AUDIO_DELTA # type: ignore
+
+
+class ResponseAudioDoneEvent(ResponseStreamEvent, discriminator="response.audio.done"):
+ """Emitted when the audio response is complete.
+
+ :ivar type: The type of the event. Always ``response.audio.done``. Required.
+ RESPONSE_AUDIO_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_AUDIO_DONE
+ :ivar sequence_number: The sequence number of the delta. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_AUDIO_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.audio.done``. Required. RESPONSE_AUDIO_DONE."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_AUDIO_DONE # type: ignore
+
+
+class ResponseAudioTranscriptDeltaEvent(ResponseStreamEvent, discriminator="response.audio.transcript.delta"):
+ """Emitted when there is a partial transcript of audio.
+
+ :ivar type: The type of the event. Always ``response.audio.transcript.delta``. Required.
+ RESPONSE_AUDIO_TRANSCRIPT_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_AUDIO_TRANSCRIPT_DELTA
+ :ivar delta: The partial transcript of the audio response. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_AUDIO_TRANSCRIPT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.audio.transcript.delta``. Required.
+ RESPONSE_AUDIO_TRANSCRIPT_DELTA."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The partial transcript of the audio response. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_AUDIO_TRANSCRIPT_DELTA # type: ignore
+
+
+class ResponseAudioTranscriptDoneEvent(ResponseStreamEvent, discriminator="response.audio.transcript.done"):
+ """Emitted when the full audio transcript is completed.
+
+ :ivar type: The type of the event. Always ``response.audio.transcript.done``. Required.
+ RESPONSE_AUDIO_TRANSCRIPT_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_AUDIO_TRANSCRIPT_DONE
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_AUDIO_TRANSCRIPT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.audio.transcript.done``. Required.
+ RESPONSE_AUDIO_TRANSCRIPT_DONE."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_AUDIO_TRANSCRIPT_DONE # type: ignore
+
+
+class ResponseCodeInterpreterCallCodeDeltaEvent(
+ ResponseStreamEvent, discriminator="response.code_interpreter_call_code.delta"
+): # pylint: disable=name-too-long
+ """Emitted when a partial code snippet is streamed by the code interpreter.
+
+ :ivar type: The type of the event. Always ``response.code_interpreter_call_code.delta``.
+ Required. RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA
+ :ivar output_index: The index of the output item in the response for which the code is being
+ streamed. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the code interpreter tool call item. Required.
+ :vartype item_id: str
+ :ivar delta: The partial code snippet being streamed by the code interpreter. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event, used to order streaming events.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.code_interpreter_call_code.delta``. Required.
+ RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response for which the code is being streamed. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the code interpreter tool call item. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The partial code snippet being streamed by the code interpreter. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA # type: ignore
+
+
+class ResponseCodeInterpreterCallCodeDoneEvent(
+ ResponseStreamEvent, discriminator="response.code_interpreter_call_code.done"
+):
+ """Emitted when the code snippet is finalized by the code interpreter.
+
+ :ivar type: The type of the event. Always ``response.code_interpreter_call_code.done``.
+ Required. RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE
+ :ivar output_index: The index of the output item in the response for which the code is
+ finalized. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the code interpreter tool call item. Required.
+ :vartype item_id: str
+ :ivar code: The final code snippet output by the code interpreter. Required.
+ :vartype code: str
+ :ivar sequence_number: The sequence number of this event, used to order streaming events.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.code_interpreter_call_code.done``. Required.
+ RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response for which the code is finalized. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the code interpreter tool call item. Required."""
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The final code snippet output by the code interpreter. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ code: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE # type: ignore
+
+
+class ResponseCodeInterpreterCallCompletedEvent(
+ ResponseStreamEvent, discriminator="response.code_interpreter_call.completed"
+): # pylint: disable=name-too-long
+ """Emitted when the code interpreter call is completed.
+
+ :ivar type: The type of the event. Always ``response.code_interpreter_call.completed``.
+ Required. RESPONSE_CODE_INTERPRETER_CALL_COMPLETED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED
+ :ivar output_index: The index of the output item in the response for which the code interpreter
+ call is completed. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the code interpreter tool call item. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event, used to order streaming events.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.code_interpreter_call.completed``. Required.
+ RESPONSE_CODE_INTERPRETER_CALL_COMPLETED."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response for which the code interpreter call is completed.
+ Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the code interpreter tool call item. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED # type: ignore
+
+
+class ResponseCodeInterpreterCallInProgressEvent(
+ ResponseStreamEvent, discriminator="response.code_interpreter_call.in_progress"
+): # pylint: disable=name-too-long
+ """Emitted when a code interpreter call is in progress.
+
+ :ivar type: The type of the event. Always ``response.code_interpreter_call.in_progress``.
+ Required. RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS
+ :ivar output_index: The index of the output item in the response for which the code interpreter
+ call is in progress. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the code interpreter tool call item. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event, used to order streaming events.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.code_interpreter_call.in_progress``. Required.
+ RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response for which the code interpreter call is in
+ progress. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the code interpreter tool call item. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS # type: ignore
+
+
+class ResponseCodeInterpreterCallInterpretingEvent(
+ ResponseStreamEvent, discriminator="response.code_interpreter_call.interpreting"
+): # pylint: disable=name-too-long
+ """Emitted when the code interpreter is actively interpreting the code snippet.
+
+ :ivar type: The type of the event. Always ``response.code_interpreter_call.interpreting``.
+ Required. RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING
+ :ivar output_index: The index of the output item in the response for which the code interpreter
+ is interpreting code. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the code interpreter tool call item. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event, used to order streaming events.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.code_interpreter_call.interpreting``. Required.
+ RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response for which the code interpreter is interpreting
+ code. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the code interpreter tool call item. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING # type: ignore
+
+
+class ResponseCompletedEvent(ResponseStreamEvent, discriminator="response.completed"):
+ """Emitted when the model response is complete.
+
+ :ivar type: The type of the event. Always ``response.completed``. Required. RESPONSE_COMPLETED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_COMPLETED
+ :ivar response: Properties of the completed response. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ :ivar sequence_number: The sequence number for this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.completed``. Required. RESPONSE_COMPLETED."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Properties of the completed response. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ response: "_models.Response",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_COMPLETED # type: ignore
+
+
+class ResponseContentPartAddedEvent(ResponseStreamEvent, discriminator="response.content_part.added"):
+ """Emitted when a new content part is added.
+
+ :ivar type: The type of the event. Always ``response.content_part.added``. Required.
+ RESPONSE_CONTENT_PART_ADDED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_CONTENT_PART_ADDED
+ :ivar item_id: The ID of the output item that the content part was added to. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the content part was added to. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that was added. Required.
+ :vartype content_index: int
+ :ivar part: The content part that was added. Required.
+ :vartype part: ~azure.ai.responses.server.sdk.models.models.OutputContent
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.content_part.added``. Required.
+ RESPONSE_CONTENT_PART_ADDED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the content part was added to. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the content part was added to. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that was added. Required."""
+ part: "_models.OutputContent" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content part that was added. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ part: "_models.OutputContent",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED # type: ignore
+
+
+class ResponseContentPartDoneEvent(ResponseStreamEvent, discriminator="response.content_part.done"):
+ """Emitted when a content part is done.
+
+ :ivar type: The type of the event. Always ``response.content_part.done``. Required.
+ RESPONSE_CONTENT_PART_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_CONTENT_PART_DONE
+ :ivar item_id: The ID of the output item that the content part was added to. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the content part was added to. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that is done. Required.
+ :vartype content_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar part: The content part that is done. Required.
+ :vartype part: ~azure.ai.responses.server.sdk.models.models.OutputContent
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.content_part.done``. Required.
+ RESPONSE_CONTENT_PART_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the content part was added to. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the content part was added to. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that is done. Required."""
+ part: "_models.OutputContent" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content part that is done. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ sequence_number: int,
+ part: "_models.OutputContent",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE # type: ignore
+
+
+class ResponseCreatedEvent(ResponseStreamEvent, discriminator="response.created"):
+ """An event that is emitted when a response is created.
+
+ :ivar type: The type of the event. Always ``response.created``. Required. RESPONSE_CREATED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_CREATED
+ :ivar response: The response that was created. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ :ivar sequence_number: The sequence number for this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CREATED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.created``. Required. RESPONSE_CREATED."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response that was created. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ response: "_models.Response",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CREATED # type: ignore
+
+
+class ResponseCustomToolCallInputDeltaEvent(ResponseStreamEvent, discriminator="response.custom_tool_call_input.delta"):
+ """ResponseCustomToolCallInputDelta.
+
+ :ivar type: The event type identifier. Required. RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar output_index: The index of the output this delta applies to. Required.
+ :vartype output_index: int
+ :ivar item_id: Unique identifier for the API item associated with this event. Required.
+ :vartype item_id: str
+ :ivar delta: The incremental input data (delta) for the custom tool call. Required.
+ :vartype delta: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The event type identifier. Required. RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output this delta applies to. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique identifier for the API item associated with this event. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The incremental input data (delta) for the custom tool call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ output_index: int,
+ item_id: str,
+ delta: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA # type: ignore
+
+
+class ResponseCustomToolCallInputDoneEvent(ResponseStreamEvent, discriminator="response.custom_tool_call_input.done"):
+ """ResponseCustomToolCallInputDone.
+
+ :ivar type: The event type identifier. Required. RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar output_index: The index of the output this event applies to. Required.
+ :vartype output_index: int
+ :ivar item_id: Unique identifier for the API item associated with this event. Required.
+ :vartype item_id: str
+ :ivar input: The complete input data for the custom tool call. Required.
+ :vartype input: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The event type identifier. Required. RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output this event applies to. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique identifier for the API item associated with this event. Required."""
+ input: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The complete input data for the custom tool call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ output_index: int,
+ item_id: str,
+ input: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE # type: ignore
+
+
+class ResponseError(_Model):
+ """An error object returned when the model fails to generate a Response.
+
+ :ivar code: Required. Known values are: "server_error", "rate_limit_exceeded",
+ "invalid_prompt", "vector_store_timeout", "invalid_image", "invalid_image_format",
+ "invalid_base64_image", "invalid_image_url", "image_too_large", "image_too_small",
+ "image_parse_error", "image_content_policy_violation", "invalid_image_mode",
+ "image_file_too_large", "unsupported_image_media_type", "empty_image_file",
+ "failed_to_download_image", and "image_file_not_found".
+ :vartype code: str or ~azure.ai.responses.server.sdk.models.models.ResponseErrorCode
+ :ivar message: A human-readable description of the error. Required.
+ :vartype message: str
+ """
+
+ code: Union[str, "_models.ResponseErrorCode"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required. Known values are: \"server_error\", \"rate_limit_exceeded\", \"invalid_prompt\",
+ \"vector_store_timeout\", \"invalid_image\", \"invalid_image_format\",
+ \"invalid_base64_image\", \"invalid_image_url\", \"image_too_large\", \"image_too_small\",
+ \"image_parse_error\", \"image_content_policy_violation\", \"invalid_image_mode\",
+ \"image_file_too_large\", \"unsupported_image_media_type\", \"empty_image_file\",
+ \"failed_to_download_image\", and \"image_file_not_found\"."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A human-readable description of the error. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ code: Union[str, "_models.ResponseErrorCode"],
+ message: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseErrorEvent(ResponseStreamEvent, discriminator="error"):
+ """Emitted when an error occurs.
+
+ :ivar type: The type of the event. Always ``error``. Required. ERROR.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ERROR
+ :ivar code: Required.
+ :vartype code: str
+ :ivar message: The error message. Required.
+ :vartype message: str
+ :ivar param: Required.
+ :vartype param: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``error``. Required. ERROR."""
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The error message. Required."""
+ param: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ code: str,
+ message: str,
+ param: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.ERROR # type: ignore
+
+
+class ResponseFailedEvent(ResponseStreamEvent, discriminator="response.failed"):
+ """An event that is emitted when a response fails.
+
+ :ivar type: The type of the event. Always ``response.failed``. Required. RESPONSE_FAILED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_FAILED
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar response: The response that failed. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.failed``. Required. RESPONSE_FAILED."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response that failed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ response: "_models.Response",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FAILED # type: ignore
+
+
+class ResponseFileSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.file_search_call.completed"):
+ """Emitted when a file search call is completed (results found).
+
+ :ivar type: The type of the event. Always ``response.file_search_call.completed``. Required.
+ RESPONSE_FILE_SEARCH_CALL_COMPLETED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_FILE_SEARCH_CALL_COMPLETED
+ :ivar output_index: The index of the output item that the file search call is initiated.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: The ID of the output item that the file search call is initiated. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.file_search_call.completed``. Required.
+ RESPONSE_FILE_SEARCH_CALL_COMPLETED."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the file search call is initiated. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the file search call is initiated. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED # type: ignore
+
+
+class ResponseFileSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.file_search_call.in_progress"):
+ """Emitted when a file search call is initiated.
+
+ :ivar type: The type of the event. Always ``response.file_search_call.in_progress``. Required.
+ RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS
+ :ivar output_index: The index of the output item that the file search call is initiated.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: The ID of the output item that the file search call is initiated. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.file_search_call.in_progress``. Required.
+ RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the file search call is initiated. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the file search call is initiated. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS # type: ignore
+
+
+class ResponseFileSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.file_search_call.searching"):
+ """Emitted when a file search is currently searching.
+
+ :ivar type: The type of the event. Always ``response.file_search_call.searching``. Required.
+ RESPONSE_FILE_SEARCH_CALL_SEARCHING.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_FILE_SEARCH_CALL_SEARCHING
+ :ivar output_index: The index of the output item that the file search call is searching.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: The ID of the output item that the file search call is initiated. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.file_search_call.searching``. Required.
+ RESPONSE_FILE_SEARCH_CALL_SEARCHING."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the file search call is searching. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the file search call is initiated. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING # type: ignore
+
+
+class ResponseFormatJsonSchemaSchema(_Model):
+ """JSON schema."""
+
+
+class ResponseFunctionCallArgumentsDeltaEvent(
+ ResponseStreamEvent, discriminator="response.function_call_arguments.delta"
+):
+ """Emitted when there is a partial function-call arguments delta.
+
+ :ivar type: The type of the event. Always ``response.function_call_arguments.delta``. Required.
+ RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA
+ :ivar item_id: The ID of the output item that the function-call arguments delta is added to.
+ Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the function-call arguments delta is
+ added to. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar delta: The function-call arguments delta that is added. Required.
+ :vartype delta: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.function_call_arguments.delta``. Required.
+ RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the function-call arguments delta is added to. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the function-call arguments delta is added to. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The function-call arguments delta that is added. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ delta: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA # type: ignore
+
+
+class ResponseFunctionCallArgumentsDoneEvent(
+ ResponseStreamEvent, discriminator="response.function_call_arguments.done"
+):
+ """Emitted when function-call arguments are finalized.
+
+ :ivar type: Required. RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE
+ :ivar item_id: The ID of the item. Required.
+ :vartype item_id: str
+ :ivar name: The name of the function that was called. Required.
+ :vartype name: str
+ :ivar output_index: The index of the output item. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar arguments: The function-call arguments. Required.
+ :vartype arguments: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function that was called. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The function-call arguments. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ name: str,
+ output_index: int,
+ sequence_number: int,
+ arguments: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE # type: ignore
+
+
+class ResponseImageGenCallCompletedEvent(ResponseStreamEvent, discriminator="response.image_generation_call.completed"):
+ """ResponseImageGenCallCompletedEvent.
+
+ :ivar type: The type of the event. Always 'response.image_generation_call.completed'. Required.
+ RESPONSE_IMAGE_GENERATION_CALL_COMPLETED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar item_id: The unique identifier of the image generation item being processed. Required.
+ :vartype item_id: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.image_generation_call.completed'. Required.
+ RESPONSE_IMAGE_GENERATION_CALL_COMPLETED."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the image generation item being processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ sequence_number: int,
+ item_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED # type: ignore
+
+
+class ResponseImageGenCallGeneratingEvent(
+ ResponseStreamEvent, discriminator="response.image_generation_call.generating"
+):
+ """ResponseImageGenCallGeneratingEvent.
+
+ :ivar type: The type of the event. Always 'response.image_generation_call.generating'.
+ Required. RESPONSE_IMAGE_GENERATION_CALL_GENERATING.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_IMAGE_GENERATION_CALL_GENERATING
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the image generation item being processed. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the image generation item being processed.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.image_generation_call.generating'. Required.
+ RESPONSE_IMAGE_GENERATION_CALL_GENERATING."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the image generation item being processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING # type: ignore
+
+
+class ResponseImageGenCallInProgressEvent(
+ ResponseStreamEvent, discriminator="response.image_generation_call.in_progress"
+):
+ """ResponseImageGenCallInProgressEvent.
+
+ :ivar type: The type of the event. Always 'response.image_generation_call.in_progress'.
+ Required. RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the image generation item being processed. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the image generation item being processed.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.image_generation_call.in_progress'. Required.
+ RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the image generation item being processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS # type: ignore
+
+
+class ResponseImageGenCallPartialImageEvent(
+ ResponseStreamEvent, discriminator="response.image_generation_call.partial_image"
+):
+ """ResponseImageGenCallPartialImageEvent.
+
+ :ivar type: The type of the event. Always 'response.image_generation_call.partial_image'.
+ Required. RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the image generation item being processed. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the image generation item being processed.
+ Required.
+ :vartype sequence_number: int
+ :ivar partial_image_index: 0-based index for the partial image (backend is 1-based, but this is
+ 0-based for the user). Required.
+ :vartype partial_image_index: int
+ :ivar partial_image_b64: Base64-encoded partial image data, suitable for rendering as an image.
+ Required.
+ :vartype partial_image_b64: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.image_generation_call.partial_image'. Required.
+ RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the image generation item being processed. Required."""
+ partial_image_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """0-based index for the partial image (backend is 1-based, but this is 0-based for the user).
+ Required."""
+ partial_image_b64: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Base64-encoded partial image data, suitable for rendering as an image. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ partial_image_index: int,
+ partial_image_b64: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE # type: ignore
+
+
+class ResponseIncompleteDetails(_Model):
+ """ResponseIncompleteDetails.
+
+ :ivar reason: Is either a Literal["max_output_tokens"] type or a Literal["content_filter"]
+ type.
+ :vartype reason: str or str
+ """
+
+ reason: Optional[Literal["max_output_tokens", "content_filter"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Literal[\"max_output_tokens\"] type or a Literal[\"content_filter\"] type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ reason: Optional[Literal["max_output_tokens", "content_filter"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseIncompleteEvent(ResponseStreamEvent, discriminator="response.incomplete"):
+ """An event that is emitted when a response finishes as incomplete.
+
+ :ivar type: The type of the event. Always ``response.incomplete``. Required.
+ RESPONSE_INCOMPLETE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_INCOMPLETE
+ :ivar response: The response that was incomplete. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_INCOMPLETE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.incomplete``. Required. RESPONSE_INCOMPLETE."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response that was incomplete. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ response: "_models.Response",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_INCOMPLETE # type: ignore
+
+
+class ResponseInProgressEvent(ResponseStreamEvent, discriminator="response.in_progress"):
+ """Emitted when the response is in progress.
+
+ :ivar type: The type of the event. Always ``response.in_progress``. Required.
+ RESPONSE_IN_PROGRESS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_IN_PROGRESS
+ :ivar response: The response that is in progress. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.in_progress``. Required. RESPONSE_IN_PROGRESS."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response that is in progress. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ response: "_models.Response",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_IN_PROGRESS # type: ignore
+
+
+class ResponseLogProb(_Model):
+ """A logprob is the logarithmic probability that the model assigns to producing a particular token
+ at a given position in the sequence. Less-negative (higher) logprob values indicate greater
+ model confidence in that token choice.
+
+ :ivar token: A possible text token. Required.
+ :vartype token: str
+ :ivar logprob: The log probability of this token. Required.
+ :vartype logprob: int
+ :ivar top_logprobs: The log probability of the top 20 most likely tokens.
+ :vartype top_logprobs:
+ list[~azure.ai.responses.server.sdk.models.models.ResponseLogProbTopLogprobs]
+ """
+
+ token: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A possible text token. Required."""
+ logprob: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The log probability of this token. Required."""
+ top_logprobs: Optional[list["_models.ResponseLogProbTopLogprobs"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The log probability of the top 20 most likely tokens."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ token: str,
+ logprob: int,
+ top_logprobs: Optional[list["_models.ResponseLogProbTopLogprobs"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseLogProbTopLogprobs(_Model):
+ """ResponseLogProbTopLogprobs.
+
+ :ivar token:
+ :vartype token: str
+ :ivar logprob:
+ :vartype logprob: int
+ """
+
+ token: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ logprob: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ token: Optional[str] = None,
+ logprob: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseMCPCallArgumentsDeltaEvent(ResponseStreamEvent, discriminator="response.mcp_call_arguments.delta"):
+ """ResponseMCPCallArgumentsDeltaEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_call_arguments.delta'. Required.
+ RESPONSE_MCP_CALL_ARGUMENTS_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_CALL_ARGUMENTS_DELTA
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the MCP tool call item being processed. Required.
+ :vartype item_id: str
+ :ivar delta: A JSON string containing the partial update to the arguments for the MCP tool
+ call. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_call_arguments.delta'. Required.
+ RESPONSE_MCP_CALL_ARGUMENTS_DELTA."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the MCP tool call item being processed. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string containing the partial update to the arguments for the MCP tool call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA # type: ignore
+
+
+class ResponseMCPCallArgumentsDoneEvent(ResponseStreamEvent, discriminator="response.mcp_call_arguments.done"):
+ """ResponseMCPCallArgumentsDoneEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_call_arguments.done'. Required.
+ RESPONSE_MCP_CALL_ARGUMENTS_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_CALL_ARGUMENTS_DONE
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the MCP tool call item being processed. Required.
+ :vartype item_id: str
+ :ivar arguments: A JSON string containing the finalized arguments for the MCP tool call.
+ Required.
+ :vartype arguments: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_call_arguments.done'. Required.
+ RESPONSE_MCP_CALL_ARGUMENTS_DONE."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the MCP tool call item being processed. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string containing the finalized arguments for the MCP tool call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ arguments: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE # type: ignore
+
+
+class ResponseMCPCallCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_call.completed"):
+ """ResponseMCPCallCompletedEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_call.completed'. Required.
+ RESPONSE_MCP_CALL_COMPLETED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_CALL_COMPLETED
+ :ivar item_id: The ID of the MCP tool call item that completed. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that completed. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_call.completed'. Required.
+ RESPONSE_MCP_CALL_COMPLETED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the MCP tool call item that completed. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that completed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED # type: ignore
+
+
+class ResponseMCPCallFailedEvent(ResponseStreamEvent, discriminator="response.mcp_call.failed"):
+ """ResponseMCPCallFailedEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_call.failed'. Required.
+ RESPONSE_MCP_CALL_FAILED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_CALL_FAILED
+ :ivar item_id: The ID of the MCP tool call item that failed. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that failed. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_call.failed'. Required. RESPONSE_MCP_CALL_FAILED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the MCP tool call item that failed. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that failed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED # type: ignore
+
+
+class ResponseMCPCallInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_call.in_progress"):
+ """ResponseMCPCallInProgressEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_call.in_progress'. Required.
+ RESPONSE_MCP_CALL_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_CALL_IN_PROGRESS
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the MCP tool call item being processed. Required.
+ :vartype item_id: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_call.in_progress'. Required.
+ RESPONSE_MCP_CALL_IN_PROGRESS."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the MCP tool call item being processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ output_index: int,
+ item_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS # type: ignore
+
+
+class ResponseMCPListToolsCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.completed"):
+ """ResponseMCPListToolsCompletedEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_list_tools.completed'. Required.
+ RESPONSE_MCP_LIST_TOOLS_COMPLETED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_LIST_TOOLS_COMPLETED
+ :ivar item_id: The ID of the MCP tool call item that produced this output. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that was processed. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_list_tools.completed'. Required.
+ RESPONSE_MCP_LIST_TOOLS_COMPLETED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the MCP tool call item that produced this output. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that was processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED # type: ignore
+
+
+class ResponseMCPListToolsFailedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.failed"):
+ """ResponseMCPListToolsFailedEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_list_tools.failed'. Required.
+ RESPONSE_MCP_LIST_TOOLS_FAILED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_LIST_TOOLS_FAILED
+ :ivar item_id: The ID of the MCP tool call item that failed. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that failed. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_list_tools.failed'. Required.
+ RESPONSE_MCP_LIST_TOOLS_FAILED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the MCP tool call item that failed. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that failed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED # type: ignore
+
+
+class ResponseMCPListToolsInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.in_progress"):
+ """ResponseMCPListToolsInProgressEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_list_tools.in_progress'. Required.
+ RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS
+ :ivar item_id: The ID of the MCP tool call item that is being processed. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that is being processed. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_list_tools.in_progress'. Required.
+ RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the MCP tool call item that is being processed. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that is being processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS # type: ignore
+
+
+class ResponseOutputItemAddedEvent(ResponseStreamEvent, discriminator="response.output_item.added"):
+ """Emitted when a new output item is added.
+
+ :ivar type: The type of the event. Always ``response.output_item.added``. Required.
+ RESPONSE_OUTPUT_ITEM_ADDED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_OUTPUT_ITEM_ADDED
+ :ivar output_index: The index of the output item that was added. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar item: The output item that was added. Required.
+ :vartype item: ~azure.ai.responses.server.sdk.models.models.OutputItem
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.output_item.added``. Required.
+ RESPONSE_OUTPUT_ITEM_ADDED."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that was added. Required."""
+ item: "_models.OutputItem" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The output item that was added. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ sequence_number: int,
+ item: "_models.OutputItem",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED # type: ignore
+
+
+class ResponseOutputItemDoneEvent(ResponseStreamEvent, discriminator="response.output_item.done"):
+ """Emitted when an output item is marked done.
+
+ :ivar type: The type of the event. Always ``response.output_item.done``. Required.
+ RESPONSE_OUTPUT_ITEM_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_OUTPUT_ITEM_DONE
+ :ivar output_index: The index of the output item that was marked done. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar item: The output item that was marked done. Required.
+ :vartype item: ~azure.ai.responses.server.sdk.models.models.OutputItem
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.output_item.done``. Required.
+ RESPONSE_OUTPUT_ITEM_DONE."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that was marked done. Required."""
+ item: "_models.OutputItem" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The output item that was marked done. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ sequence_number: int,
+ item: "_models.OutputItem",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE # type: ignore
+
+
+class ResponseOutputTextAnnotationAddedEvent(
+ ResponseStreamEvent, discriminator="response.output_text.annotation.added"
+):
+ """ResponseOutputTextAnnotationAddedEvent.
+
+ :ivar type: The type of the event. Always 'response.output_text.annotation.added'. Required.
+ RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED
+ :ivar item_id: The unique identifier of the item to which the annotation is being added.
+ Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part within the output item. Required.
+ :vartype content_index: int
+ :ivar annotation_index: The index of the annotation within the content part. Required.
+ :vartype annotation_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar annotation: The annotation object being added. (See annotation schema for details.).
+ Required.
+ :vartype annotation: ~azure.ai.responses.server.sdk.models.models.Annotation
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.output_text.annotation.added'. Required.
+ RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the item to which the annotation is being added. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part within the output item. Required."""
+ annotation_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the annotation within the content part. Required."""
+ annotation: "_models.Annotation" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The annotation object being added. (See annotation schema for details.). Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ annotation_index: int,
+ sequence_number: int,
+ annotation: "_models.Annotation",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED # type: ignore
+
+
+class ResponsePromptVariables(_Model):
+ """Prompt Variables."""
+
+
+class ResponseQueuedEvent(ResponseStreamEvent, discriminator="response.queued"):
+ """ResponseQueuedEvent.
+
+ :ivar type: The type of the event. Always 'response.queued'. Required. RESPONSE_QUEUED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_QUEUED
+ :ivar response: The full response object that is queued. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ :ivar sequence_number: The sequence number for this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_QUEUED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.queued'. Required. RESPONSE_QUEUED."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The full response object that is queued. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ response: "_models.Response",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_QUEUED # type: ignore
+
+
+class ResponseReasoningSummaryPartAddedEvent(
+ ResponseStreamEvent, discriminator="response.reasoning_summary_part.added"
+):
+ """Emitted when a new reasoning summary part is added.
+
+ :ivar type: The type of the event. Always ``response.reasoning_summary_part.added``. Required.
+ RESPONSE_REASONING_SUMMARY_PART_ADDED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_SUMMARY_PART_ADDED
+ :ivar item_id: The ID of the item this summary part is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this summary part is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar summary_index: The index of the summary part within the reasoning summary. Required.
+ :vartype summary_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar part: The summary part that was added. Required.
+ :vartype part:
+ ~azure.ai.responses.server.sdk.models.models.ResponseReasoningSummaryPartAddedEventPart
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_summary_part.added``. Required.
+ RESPONSE_REASONING_SUMMARY_PART_ADDED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this summary part is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this summary part is associated with. Required."""
+ summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the summary part within the reasoning summary. Required."""
+ part: "_models.ResponseReasoningSummaryPartAddedEventPart" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The summary part that was added. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ summary_index: int,
+ sequence_number: int,
+ part: "_models.ResponseReasoningSummaryPartAddedEventPart",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED # type: ignore
+
+
+class ResponseReasoningSummaryPartAddedEventPart(_Model): # pylint: disable=name-too-long
+ """ResponseReasoningSummaryPartAddedEventPart.
+
+ :ivar type: Required. Default value is "summary_text".
+ :vartype type: str
+ :ivar text: Required.
+ :vartype text: str
+ """
+
+ type: Literal["summary_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required. Default value is \"summary_text\"."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["summary_text"] = "summary_text"
+
+
+class ResponseReasoningSummaryPartDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_part.done"):
+ """Emitted when a reasoning summary part is completed.
+
+ :ivar type: The type of the event. Always ``response.reasoning_summary_part.done``. Required.
+ RESPONSE_REASONING_SUMMARY_PART_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_SUMMARY_PART_DONE
+ :ivar item_id: The ID of the item this summary part is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this summary part is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar summary_index: The index of the summary part within the reasoning summary. Required.
+ :vartype summary_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar part: The completed summary part. Required.
+ :vartype part:
+ ~azure.ai.responses.server.sdk.models.models.ResponseReasoningSummaryPartDoneEventPart
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_summary_part.done``. Required.
+ RESPONSE_REASONING_SUMMARY_PART_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this summary part is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this summary part is associated with. Required."""
+ summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the summary part within the reasoning summary. Required."""
+ part: "_models.ResponseReasoningSummaryPartDoneEventPart" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The completed summary part. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ summary_index: int,
+ sequence_number: int,
+ part: "_models.ResponseReasoningSummaryPartDoneEventPart",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE # type: ignore
+
+
+class ResponseReasoningSummaryPartDoneEventPart(_Model): # pylint: disable=name-too-long
+ """ResponseReasoningSummaryPartDoneEventPart.
+
+ :ivar type: Required. Default value is "summary_text".
+ :vartype type: str
+ :ivar text: Required.
+ :vartype text: str
+ """
+
+ type: Literal["summary_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required. Default value is \"summary_text\"."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["summary_text"] = "summary_text"
+
+
+class ResponseReasoningSummaryTextDeltaEvent(
+ ResponseStreamEvent, discriminator="response.reasoning_summary_text.delta"
+):
+ """Emitted when a delta is added to a reasoning summary text.
+
+ :ivar type: The type of the event. Always ``response.reasoning_summary_text.delta``. Required.
+ RESPONSE_REASONING_SUMMARY_TEXT_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_SUMMARY_TEXT_DELTA
+ :ivar item_id: The ID of the item this summary text delta is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this summary text delta is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar summary_index: The index of the summary part within the reasoning summary. Required.
+ :vartype summary_index: int
+ :ivar delta: The text delta that was added to the summary. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_summary_text.delta``. Required.
+ RESPONSE_REASONING_SUMMARY_TEXT_DELTA."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this summary text delta is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this summary text delta is associated with. Required."""
+ summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the summary part within the reasoning summary. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text delta that was added to the summary. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ summary_index: int,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA # type: ignore
+
+
+class ResponseReasoningSummaryTextDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_text.done"):
+ """Emitted when a reasoning summary text is completed.
+
+ :ivar type: The type of the event. Always ``response.reasoning_summary_text.done``. Required.
+ RESPONSE_REASONING_SUMMARY_TEXT_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_SUMMARY_TEXT_DONE
+ :ivar item_id: The ID of the item this summary text is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this summary text is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar summary_index: The index of the summary part within the reasoning summary. Required.
+ :vartype summary_index: int
+ :ivar text: The full text of the completed reasoning summary. Required.
+ :vartype text: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_summary_text.done``. Required.
+ RESPONSE_REASONING_SUMMARY_TEXT_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this summary text is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this summary text is associated with. Required."""
+ summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the summary part within the reasoning summary. Required."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The full text of the completed reasoning summary. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ summary_index: int,
+ text: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE # type: ignore
+
+
+class ResponseReasoningTextDeltaEvent(ResponseStreamEvent, discriminator="response.reasoning_text.delta"):
+ """Emitted when a delta is added to a reasoning text.
+
+ :ivar type: The type of the event. Always ``response.reasoning_text.delta``. Required.
+ RESPONSE_REASONING_TEXT_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_TEXT_DELTA
+ :ivar item_id: The ID of the item this reasoning text delta is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this reasoning text delta is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the reasoning content part this delta is associated with.
+ Required.
+ :vartype content_index: int
+ :ivar delta: The text delta that was added to the reasoning content. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_text.delta``. Required.
+ RESPONSE_REASONING_TEXT_DELTA."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this reasoning text delta is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this reasoning text delta is associated with. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the reasoning content part this delta is associated with. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text delta that was added to the reasoning content. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_TEXT_DELTA # type: ignore
+
+
+class ResponseReasoningTextDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_text.done"):
+ """Emitted when a reasoning text is completed.
+
+ :ivar type: The type of the event. Always ``response.reasoning_text.done``. Required.
+ RESPONSE_REASONING_TEXT_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_TEXT_DONE
+ :ivar item_id: The ID of the item this reasoning text is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this reasoning text is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the reasoning content part. Required.
+ :vartype content_index: int
+ :ivar text: The full text of the completed reasoning content. Required.
+ :vartype text: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_text.done``. Required.
+ RESPONSE_REASONING_TEXT_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this reasoning text is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this reasoning text is associated with. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the reasoning content part. Required."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The full text of the completed reasoning content. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ text: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_TEXT_DONE # type: ignore
+
+
+class ResponseRefusalDeltaEvent(ResponseStreamEvent, discriminator="response.refusal.delta"):
+ """Emitted when there is a partial refusal text.
+
+ :ivar type: The type of the event. Always ``response.refusal.delta``. Required.
+ RESPONSE_REFUSAL_DELTA.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_REFUSAL_DELTA
+ :ivar item_id: The ID of the output item that the refusal text is added to. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the refusal text is added to. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that the refusal text is added to. Required.
+ :vartype content_index: int
+ :ivar delta: The refusal text that is added. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.refusal.delta``. Required. RESPONSE_REFUSAL_DELTA."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the refusal text is added to. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the refusal text is added to. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that the refusal text is added to. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The refusal text that is added. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DELTA # type: ignore
+
+
+class ResponseRefusalDoneEvent(ResponseStreamEvent, discriminator="response.refusal.done"):
+ """Emitted when refusal text is finalized.
+
+ :ivar type: The type of the event. Always ``response.refusal.done``. Required.
+ RESPONSE_REFUSAL_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_REFUSAL_DONE
+ :ivar item_id: The ID of the output item that the refusal text is finalized. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the refusal text is finalized. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that the refusal text is finalized.
+ Required.
+ :vartype content_index: int
+ :ivar refusal: The refusal text that is finalized. Required.
+ :vartype refusal: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.refusal.done``. Required. RESPONSE_REFUSAL_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the refusal text is finalized. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the refusal text is finalized. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that the refusal text is finalized. Required."""
+ refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The refusal text that is finalized. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ refusal: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DONE # type: ignore
+
+
+class ResponseStreamOptions(_Model):
+ """Options for streaming responses. Only set this when you set ``stream: true``.
+
+ :ivar include_obfuscation: When true, stream obfuscation will be enabled. Stream obfuscation
+ adds random characters to an ``obfuscation`` field on streaming delta events to normalize
+ payload sizes as a mitigation to certain side-channel attacks. These obfuscation fields are
+ included by default, but add a small amount of overhead to the data stream. You can set
+ ``include_obfuscation`` to false to optimize for bandwidth if you trust the network links
+ between your application and the OpenAI API.
+ :vartype include_obfuscation: bool
+ """
+
+ include_obfuscation: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """When true, stream obfuscation will be enabled. Stream obfuscation adds random characters to an
+ ``obfuscation`` field on streaming delta events to normalize payload sizes as a mitigation to
+ certain side-channel attacks. These obfuscation fields are included by default, but add a small
+ amount of overhead to the data stream. You can set ``include_obfuscation`` to false to optimize
+ for bandwidth if you trust the network links between your application and the OpenAI API."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ include_obfuscation: Optional[bool] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseTextDeltaEvent(ResponseStreamEvent, discriminator="response.output_text.delta"):
+ """Emitted when there is an additional text delta.
+
+ :ivar type: The type of the event. Always ``response.output_text.delta``. Required.
+ RESPONSE_OUTPUT_TEXT_DELTA.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_OUTPUT_TEXT_DELTA
+ :ivar item_id: The ID of the output item that the text delta was added to. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the text delta was added to. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that the text delta was added to. Required.
+ :vartype content_index: int
+ :ivar delta: The text delta that was added. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number for this event. Required.
+ :vartype sequence_number: int
+ :ivar logprobs: The log probabilities of the tokens in the delta. Required.
+ :vartype logprobs: list[~azure.ai.responses.server.sdk.models.models.ResponseLogProb]
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.output_text.delta``. Required.
+ RESPONSE_OUTPUT_TEXT_DELTA."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the text delta was added to. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the text delta was added to. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that the text delta was added to. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text delta that was added. Required."""
+ logprobs: list["_models.ResponseLogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The log probabilities of the tokens in the delta. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ delta: str,
+ sequence_number: int,
+ logprobs: list["_models.ResponseLogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA # type: ignore
+
+
+class ResponseTextDoneEvent(ResponseStreamEvent, discriminator="response.output_text.done"):
+ """Emitted when text content is finalized.
+
+ :ivar type: The type of the event. Always ``response.output_text.done``. Required.
+ RESPONSE_OUTPUT_TEXT_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_OUTPUT_TEXT_DONE
+ :ivar item_id: The ID of the output item that the text content is finalized. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the text content is finalized. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that the text content is finalized.
+ Required.
+ :vartype content_index: int
+ :ivar text: The text content that is finalized. Required.
+ :vartype text: str
+ :ivar sequence_number: The sequence number for this event. Required.
+ :vartype sequence_number: int
+ :ivar logprobs: The log probabilities of the tokens in the delta. Required.
+ :vartype logprobs: list[~azure.ai.responses.server.sdk.models.models.ResponseLogProb]
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.output_text.done``. Required.
+ RESPONSE_OUTPUT_TEXT_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the text content is finalized. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the text content is finalized. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that the text content is finalized. Required."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text content that is finalized. Required."""
+ logprobs: list["_models.ResponseLogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The log probabilities of the tokens in the delta. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ text: str,
+ sequence_number: int,
+ logprobs: list["_models.ResponseLogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE # type: ignore
+
+
+class ResponseTextParam(_Model):
+ """Configuration options for a text response from the model. Can be plain
+ text or structured JSON data. Learn more:
+
+ * [Text inputs and outputs](/docs/guides/text)
+ * [Structured Outputs](/docs/guides/structured-outputs).
+
+ :ivar format:
+ :vartype format: ~azure.ai.responses.server.sdk.models.models.TextResponseFormatConfiguration
+ :ivar verbosity: Is one of the following types: Literal["low"], Literal["medium"],
+ Literal["high"]
+ :vartype verbosity: str or str or str
+ """
+
+ format: Optional["_models.TextResponseFormatConfiguration"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ verbosity: Optional[Literal["low", "medium", "high"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"low\"], Literal[\"medium\"], Literal[\"high\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ format: Optional["_models.TextResponseFormatConfiguration"] = None,
+ verbosity: Optional[Literal["low", "medium", "high"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseUsage(_Model):
+ """Represents token usage details including input tokens, output tokens, a breakdown of output
+ tokens, and the total tokens used.
+
+ :ivar input_tokens: The number of input tokens. Required.
+ :vartype input_tokens: int
+ :ivar input_tokens_details: A detailed breakdown of the input tokens. Required.
+ :vartype input_tokens_details:
+ ~azure.ai.responses.server.sdk.models.models.ResponseUsageInputTokensDetails
+ :ivar output_tokens: The number of output tokens. Required.
+ :vartype output_tokens: int
+ :ivar output_tokens_details: A detailed breakdown of the output tokens. Required.
+ :vartype output_tokens_details:
+ ~azure.ai.responses.server.sdk.models.models.ResponseUsageOutputTokensDetails
+ :ivar total_tokens: The total number of tokens used. Required.
+ :vartype total_tokens: int
+ """
+
+ input_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The number of input tokens. Required."""
+ input_tokens_details: "_models.ResponseUsageInputTokensDetails" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """A detailed breakdown of the input tokens. Required."""
+ output_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The number of output tokens. Required."""
+ output_tokens_details: "_models.ResponseUsageOutputTokensDetails" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """A detailed breakdown of the output tokens. Required."""
+ total_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The total number of tokens used. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ input_tokens: int,
+ input_tokens_details: "_models.ResponseUsageInputTokensDetails",
+ output_tokens: int,
+ output_tokens_details: "_models.ResponseUsageOutputTokensDetails",
+ total_tokens: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseUsageInputTokensDetails(_Model):
+ """ResponseUsageInputTokensDetails.
+
+ :ivar cached_tokens: Required.
+ :vartype cached_tokens: int
+ """
+
+ cached_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ cached_tokens: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseUsageOutputTokensDetails(_Model):
+ """ResponseUsageOutputTokensDetails.
+
+ :ivar reasoning_tokens: Required.
+ :vartype reasoning_tokens: int
+ """
+
+ reasoning_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ reasoning_tokens: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseWebSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.web_search_call.completed"):
+ """Emitted when a web search call is completed.
+
+ :ivar type: The type of the event. Always ``response.web_search_call.completed``. Required.
+ RESPONSE_WEB_SEARCH_CALL_COMPLETED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_WEB_SEARCH_CALL_COMPLETED
+ :ivar output_index: The index of the output item that the web search call is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: Unique ID for the output item associated with the web search call. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the web search call being processed. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.web_search_call.completed``. Required.
+ RESPONSE_WEB_SEARCH_CALL_COMPLETED."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the web search call is associated with. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique ID for the output item associated with the web search call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED # type: ignore
+
+
+class ResponseWebSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.web_search_call.in_progress"):
+ """Emitted when a web search call is initiated.
+
+ :ivar type: The type of the event. Always ``response.web_search_call.in_progress``. Required.
+ RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS
+ :ivar output_index: The index of the output item that the web search call is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: Unique ID for the output item associated with the web search call. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the web search call being processed. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.web_search_call.in_progress``. Required.
+ RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the web search call is associated with. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique ID for the output item associated with the web search call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS # type: ignore
+
+
+class ResponseWebSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.web_search_call.searching"):
+ """Emitted when a web search call is executing.
+
+ :ivar type: The type of the event. Always ``response.web_search_call.searching``. Required.
+ RESPONSE_WEB_SEARCH_CALL_SEARCHING.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_WEB_SEARCH_CALL_SEARCHING
+ :ivar output_index: The index of the output item that the web search call is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: Unique ID for the output item associated with the web search call. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the web search call being processed. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.web_search_call.searching``. Required.
+ RESPONSE_WEB_SEARCH_CALL_SEARCHING."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the web search call is associated with. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique ID for the output item associated with the web search call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING # type: ignore
+
+
+class ScreenshotParam(ComputerAction, discriminator="screenshot"):
+ """Screenshot.
+
+ :ivar type: Specifies the event type. For a screenshot action, this property is always set to
+ ``screenshot``. Required. SCREENSHOT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SCREENSHOT
+ """
+
+ type: Literal[ComputerActionType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a screenshot action, this property is always set to
+ ``screenshot``. Required. SCREENSHOT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.SCREENSHOT # type: ignore
+
+
+class ScrollParam(ComputerAction, discriminator="scroll"):
+ """Scroll.
+
+ :ivar type: Specifies the event type. For a scroll action, this property is always set to
+ ``scroll``. Required. SCROLL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SCROLL
+ :ivar x: The x-coordinate where the scroll occurred. Required.
+ :vartype x: int
+ :ivar y: The y-coordinate where the scroll occurred. Required.
+ :vartype y: int
+ :ivar scroll_x: The horizontal scroll distance. Required.
+ :vartype scroll_x: int
+ :ivar scroll_y: The vertical scroll distance. Required.
+ :vartype scroll_y: int
+ """
+
+ type: Literal[ComputerActionType.SCROLL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a scroll action, this property is always set to ``scroll``.
+ Required. SCROLL."""
+ x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The x-coordinate where the scroll occurred. Required."""
+ y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The y-coordinate where the scroll occurred. Required."""
+ scroll_x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The horizontal scroll distance. Required."""
+ scroll_y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The vertical scroll distance. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ x: int,
+ y: int,
+ scroll_x: int,
+ scroll_y: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.SCROLL # type: ignore
+
+
+class SharepointGroundingToolCall(OutputItem, discriminator="sharepoint_grounding_preview_call"):
+ """A SharePoint grounding tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. SHAREPOINT_GROUNDING_PREVIEW_CALL.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.SHAREPOINT_GROUNDING_PREVIEW_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.SHAREPOINT_GROUNDING_PREVIEW_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. SHAREPOINT_GROUNDING_PREVIEW_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.SHAREPOINT_GROUNDING_PREVIEW_CALL # type: ignore
+
+
+class SharepointGroundingToolCallOutput(OutputItem, discriminator="sharepoint_grounding_preview_call_output"):
+ """The output of a SharePoint grounding tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the SharePoint grounding tool call. Is one of the following
+ types: {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the SharePoint grounding tool call. Is one of the following types: {str: Any},
+ str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT # type: ignore
+
+
+class SharepointGroundingToolParameters(_Model):
+ """The sharepoint grounding tool parameters.
+
+ :ivar project_connections: The project connections attached to this tool. There can be a
+ maximum of 1 connection resource attached to the tool.
+ :vartype project_connections:
+ list[~azure.ai.responses.server.sdk.models.models.ToolProjectConnection]
+ """
+
+ project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The project connections attached to this tool. There can be a maximum of 1 connection resource
+ attached to the tool."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connections: Optional[list["_models.ToolProjectConnection"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class SharepointPreviewTool(Tool, discriminator="sharepoint_grounding_preview"):
+ """The input definition information for a sharepoint tool as used to configure an agent.
+
+ :ivar type: The object type, which is always 'sharepoint_grounding_preview'. Required.
+ SHAREPOINT_GROUNDING_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHAREPOINT_GROUNDING_PREVIEW
+ :ivar sharepoint_grounding_preview: The sharepoint grounding tool parameters. Required.
+ :vartype sharepoint_grounding_preview:
+ ~azure.ai.responses.server.sdk.models.models.SharepointGroundingToolParameters
+ """
+
+ type: Literal[ToolType.SHAREPOINT_GROUNDING_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'sharepoint_grounding_preview'. Required.
+ SHAREPOINT_GROUNDING_PREVIEW."""
+ sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The sharepoint grounding tool parameters. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.SHAREPOINT_GROUNDING_PREVIEW # type: ignore
+
+
+class SkillReferenceParam(ContainerSkill, discriminator="skill_reference"):
+ """SkillReferenceParam.
+
+ :ivar type: References a skill created with the /v1/skills endpoint. Required. SKILL_REFERENCE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SKILL_REFERENCE
+ :ivar skill_id: The ID of the referenced skill. Required.
+ :vartype skill_id: str
+ :ivar version: Optional skill version. Use a positive integer or 'latest'. Omit for default.
+ :vartype version: str
+ """
+
+ type: Literal[ContainerSkillType.SKILL_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """References a skill created with the /v1/skills endpoint. Required. SKILL_REFERENCE."""
+ skill_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the referenced skill. Required."""
+ version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Optional skill version. Use a positive integer or 'latest'. Omit for default."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ skill_id: str,
+ version: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ContainerSkillType.SKILL_REFERENCE # type: ignore
+
+
+class ToolChoiceParam(_Model):
+ """How the model should select which tool (or tools) to use when generating a response. See the
+ ``tools`` parameter to see how to specify which tools the model can call.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ToolChoiceAllowed, SpecificApplyPatchParam, ToolChoiceCodeInterpreter,
+ ToolChoiceComputerUsePreview, ToolChoiceCustom, ToolChoiceFileSearch, ToolChoiceFunction,
+ ToolChoiceImageGeneration, ToolChoiceMCP, SpecificFunctionShellParam,
+ ToolChoiceWebSearchPreview, ToolChoiceWebSearchPreview20250311
+
+ :ivar type: Required. Known values are: "allowed_tools", "function", "mcp", "custom",
+ "apply_patch", "shell", "file_search", "web_search_preview", "computer_use_preview",
+ "web_search_preview_2025_03_11", "image_generation", and "code_interpreter".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ToolChoiceParamType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"allowed_tools\", \"function\", \"mcp\", \"custom\",
+ \"apply_patch\", \"shell\", \"file_search\", \"web_search_preview\", \"computer_use_preview\",
+ \"web_search_preview_2025_03_11\", \"image_generation\", and \"code_interpreter\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class SpecificApplyPatchParam(ToolChoiceParam, discriminator="apply_patch"):
+ """Specific apply patch tool choice.
+
+ :ivar type: The tool to call. Always ``apply_patch``. Required. APPLY_PATCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH
+ """
+
+ type: Literal[ToolChoiceParamType.APPLY_PATCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The tool to call. Always ``apply_patch``. Required. APPLY_PATCH."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.APPLY_PATCH # type: ignore
+
+
+class SpecificFunctionShellParam(ToolChoiceParam, discriminator="shell"):
+ """Specific shell tool choice.
+
+ :ivar type: The tool to call. Always ``shell``. Required. SHELL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL
+ """
+
+ type: Literal[ToolChoiceParamType.SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The tool to call. Always ``shell``. Required. SHELL."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.SHELL # type: ignore
+
+
+class StructuredOutputDefinition(_Model):
+ """A structured output that can be produced by the agent.
+
+ :ivar name: The name of the structured output. Required.
+ :vartype name: str
+ :ivar description: A description of the output to emit. Used by the model to determine when to
+ emit the output. Required.
+ :vartype description: str
+ :ivar schema: The JSON schema for the structured output. Required.
+ :vartype schema: dict[str, any]
+ :ivar strict: Whether to enforce strict validation. Default ``true``. Required.
+ :vartype strict: bool
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the structured output. Required."""
+ description: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A description of the output to emit. Used by the model to determine when to emit the output.
+ Required."""
+ schema: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The JSON schema for the structured output. Required."""
+ strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Whether to enforce strict validation. Default ``true``. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: str,
+ schema: dict[str, Any],
+ strict: bool,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class StructuredOutputsOutputItem(OutputItem, discriminator="structured_outputs"):
+ """StructuredOutputsOutputItem.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. STRUCTURED_OUTPUTS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.STRUCTURED_OUTPUTS
+ :ivar output: The structured output captured during the response. Required.
+ :vartype output: any
+ """
+
+ type: Literal[OutputItemType.STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. STRUCTURED_OUTPUTS."""
+ output: Any = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The structured output captured during the response. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output: Any,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.STRUCTURED_OUTPUTS # type: ignore
+
+
+class SummaryTextContent(MessageContent, discriminator="summary_text"):
+ """Summary text.
+
+ :ivar type: The type of the object. Always ``summary_text``. Required. SUMMARY_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SUMMARY_TEXT
+ :ivar text: A summary of the reasoning output from the model so far. Required.
+ :vartype text: str
+ """
+
+ type: Literal[MessageContentType.SUMMARY_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the object. Always ``summary_text``. Required. SUMMARY_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A summary of the reasoning output from the model so far. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.SUMMARY_TEXT # type: ignore
+
+
+class TextContent(MessageContent, discriminator="text"):
+ """Text Content.
+
+ :ivar type: Required. TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TEXT
+ :ivar text: Required.
+ :vartype text: str
+ """
+
+ type: Literal[MessageContentType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.TEXT # type: ignore
+
+
+class TextResponseFormatConfiguration(_Model):
+ """An object specifying the format that the model must output. Configuring ``{ "type":
+ "json_schema" }`` enables Structured Outputs, which ensures the model will match your supplied
+ JSON schema. Learn more in the `Structured Outputs guide `_.
+ The default format is ``{ "type": "text" }`` with no additional options. *Not recommended for
+ gpt-4o and newer models:** Setting to ``{ "type": "json_object" }`` enables the older JSON
+ mode, which ensures the message the model generates is valid JSON. Using ``json_schema`` is
+ preferred for models that support it.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ TextResponseFormatConfigurationResponseFormatJsonObject, TextResponseFormatJsonSchema,
+ TextResponseFormatConfigurationResponseFormatText
+
+ :ivar type: Required. Known values are: "text", "json_schema", and "json_object".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.TextResponseFormatConfigurationType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"text\", \"json_schema\", and \"json_object\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class TextResponseFormatConfigurationResponseFormatJsonObject(
+ TextResponseFormatConfiguration, discriminator="json_object"
+): # pylint: disable=name-too-long
+ """JSON object.
+
+ :ivar type: The type of response format being defined. Always ``json_object``. Required.
+ JSON_OBJECT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.JSON_OBJECT
+ """
+
+ type: Literal[TextResponseFormatConfigurationType.JSON_OBJECT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of response format being defined. Always ``json_object``. Required. JSON_OBJECT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = TextResponseFormatConfigurationType.JSON_OBJECT # type: ignore
+
+
+class TextResponseFormatConfigurationResponseFormatText(
+ TextResponseFormatConfiguration, discriminator="text"
+): # pylint: disable=name-too-long
+ """Text.
+
+ :ivar type: The type of response format being defined. Always ``text``. Required. TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TEXT
+ """
+
+ type: Literal[TextResponseFormatConfigurationType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of response format being defined. Always ``text``. Required. TEXT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = TextResponseFormatConfigurationType.TEXT # type: ignore
+
+
+class TextResponseFormatJsonSchema(TextResponseFormatConfiguration, discriminator="json_schema"):
+ """JSON schema.
+
+ :ivar type: The type of response format being defined. Always ``json_schema``. Required.
+ JSON_SCHEMA.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.JSON_SCHEMA
+ :ivar description: A description of what the response format is for, used by the model to
+ determine how to respond in the format.
+ :vartype description: str
+ :ivar name: The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores and
+ dashes, with a maximum length of 64. Required.
+ :vartype name: str
+ :ivar schema: Required.
+ :vartype schema: ~azure.ai.responses.server.sdk.models.models.ResponseFormatJsonSchemaSchema
+ :ivar strict:
+ :vartype strict: bool
+ """
+
+ type: Literal[TextResponseFormatConfigurationType.JSON_SCHEMA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of response format being defined. Always ``json_schema``. Required. JSON_SCHEMA."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A description of what the response format is for, used by the model to determine how to respond
+ in the format."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with
+ a maximum length of 64. Required."""
+ schema: "_models.ResponseFormatJsonSchemaSchema" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+ strict: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ schema: "_models.ResponseFormatJsonSchemaSchema",
+ description: Optional[str] = None,
+ strict: Optional[bool] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = TextResponseFormatConfigurationType.JSON_SCHEMA # type: ignore
+
+
+class ToolChoiceAllowed(ToolChoiceParam, discriminator="allowed_tools"):
+ """Allowed tools.
+
+ :ivar type: Allowed tool configuration type. Always ``allowed_tools``. Required. ALLOWED_TOOLS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ALLOWED_TOOLS
+ :ivar mode: Constrains the tools available to the model to a pre-defined set. ``auto`` allows
+ the model to pick from among the allowed tools and generate a message. ``required`` requires
+ the model to call one or more of the allowed tools. Required. Is either a Literal["auto"] type
+ or a Literal["required"] type.
+ :vartype mode: str or str
+ :ivar tools: A list of tool definitions that the model should be allowed to call. For the
+ Responses API, the list of tool definitions might look like:
+
+ .. code-block:: json
+
+ [
+ { "type": "function", "name": "get_weather" },
+ { "type": "mcp", "server_label": "deepwiki" },
+ { "type": "image_generation" }
+ ]. Required.
+ :vartype tools: list[dict[str, any]]
+ """
+
+ type: Literal[ToolChoiceParamType.ALLOWED_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Allowed tool configuration type. Always ``allowed_tools``. Required. ALLOWED_TOOLS."""
+ mode: Literal["auto", "required"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Constrains the tools available to the model to a pre-defined set. ``auto`` allows the model to
+ pick from among the allowed tools and generate a message. ``required`` requires the model to
+ call one or more of the allowed tools. Required. Is either a Literal[\"auto\"] type or a
+ Literal[\"required\"] type."""
+ tools: list[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A list of tool definitions that the model should be allowed to call. For the Responses API, the
+ list of tool definitions might look like:
+
+ .. code-block:: json
+
+ [
+ { \"type\": \"function\", \"name\": \"get_weather\" },
+ { \"type\": \"mcp\", \"server_label\": \"deepwiki\" },
+ { \"type\": \"image_generation\" }
+ ]. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ mode: Literal["auto", "required"],
+ tools: list[dict[str, Any]],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.ALLOWED_TOOLS # type: ignore
+
+
+class ToolChoiceCodeInterpreter(ToolChoiceParam, discriminator="code_interpreter"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. CODE_INTERPRETER.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CODE_INTERPRETER
+ """
+
+ type: Literal[ToolChoiceParamType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. CODE_INTERPRETER."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.CODE_INTERPRETER # type: ignore
+
+
+class ToolChoiceComputerUsePreview(ToolChoiceParam, discriminator="computer_use_preview"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. COMPUTER_USE_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_USE_PREVIEW
+ """
+
+ type: Literal[ToolChoiceParamType.COMPUTER_USE_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. COMPUTER_USE_PREVIEW."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.COMPUTER_USE_PREVIEW # type: ignore
+
+
+class ToolChoiceCustom(ToolChoiceParam, discriminator="custom"):
+ """Custom tool.
+
+ :ivar type: For custom tool calling, the type is always ``custom``. Required. CUSTOM.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM
+ :ivar name: The name of the custom tool to call. Required.
+ :vartype name: str
+ """
+
+ type: Literal[ToolChoiceParamType.CUSTOM] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """For custom tool calling, the type is always ``custom``. Required. CUSTOM."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the custom tool to call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.CUSTOM # type: ignore
+
+
+class ToolChoiceFileSearch(ToolChoiceParam, discriminator="file_search"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. FILE_SEARCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_SEARCH
+ """
+
+ type: Literal[ToolChoiceParamType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. FILE_SEARCH."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.FILE_SEARCH # type: ignore
+
+
+class ToolChoiceFunction(ToolChoiceParam, discriminator="function"):
+ """Function tool.
+
+ :ivar type: For function calling, the type is always ``function``. Required. FUNCTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION
+ :ivar name: The name of the function to call. Required.
+ :vartype name: str
+ """
+
+ type: Literal[ToolChoiceParamType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """For function calling, the type is always ``function``. Required. FUNCTION."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.FUNCTION # type: ignore
+
+
+class ToolChoiceImageGeneration(ToolChoiceParam, discriminator="image_generation"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. IMAGE_GENERATION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.IMAGE_GENERATION
+ """
+
+ type: Literal[ToolChoiceParamType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. IMAGE_GENERATION."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.IMAGE_GENERATION # type: ignore
+
+
+class ToolChoiceMCP(ToolChoiceParam, discriminator="mcp"):
+ """MCP tool.
+
+ :ivar type: For MCP tools, the type is always ``mcp``. Required. MCP.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP
+ :ivar server_label: The label of the MCP server to use. Required.
+ :vartype server_label: str
+ :ivar name:
+ :vartype name: str
+ """
+
+ type: Literal[ToolChoiceParamType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """For MCP tools, the type is always ``mcp``. Required. MCP."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server to use. Required."""
+ name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ server_label: str,
+ name: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.MCP # type: ignore
+
+
+class ToolChoiceWebSearchPreview(ToolChoiceParam, discriminator="web_search_preview"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. WEB_SEARCH_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_PREVIEW
+ """
+
+ type: Literal[ToolChoiceParamType.WEB_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. WEB_SEARCH_PREVIEW."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.WEB_SEARCH_PREVIEW # type: ignore
+
+
+class ToolChoiceWebSearchPreview20250311(ToolChoiceParam, discriminator="web_search_preview_2025_03_11"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. WEB_SEARCH_PREVIEW2025_03_11.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_PREVIEW2025_03_11
+ """
+
+ type: Literal[ToolChoiceParamType.WEB_SEARCH_PREVIEW2025_03_11] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. WEB_SEARCH_PREVIEW2025_03_11."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.WEB_SEARCH_PREVIEW2025_03_11 # type: ignore
+
+
+class ToolProjectConnection(_Model):
+ """A project connection resource.
+
+ :ivar project_connection_id: A project connection in a ToolProjectConnectionList attached to
+ this tool. Required.
+ :vartype project_connection_id: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A project connection in a ToolProjectConnectionList attached to this tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class TopLogProb(_Model):
+ """Top log probability.
+
+ :ivar token: Required.
+ :vartype token: str
+ :ivar logprob: Required.
+ :vartype logprob: int
+ :ivar bytes: Required.
+ :vartype bytes: list[int]
+ """
+
+ token: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ logprob: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ token: str,
+ logprob: int,
+ bytes: list[int],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class TypeParam(ComputerAction, discriminator="type"):
+ """Type.
+
+ :ivar type: Specifies the event type. For a type action, this property is always set to
+ ``type``. Required. TYPE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TYPE
+ :ivar text: The text to type. Required.
+ :vartype text: str
+ """
+
+ type: Literal[ComputerActionType.TYPE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a type action, this property is always set to ``type``. Required.
+ TYPE."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text to type. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.TYPE # type: ignore
+
+
+class UrlCitationBody(Annotation, discriminator="url_citation"):
+ """URL citation.
+
+ :ivar type: The type of the URL citation. Always ``url_citation``. Required. URL_CITATION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.URL_CITATION
+ :ivar url: The URL of the web resource. Required.
+ :vartype url: str
+ :ivar start_index: The index of the first character of the URL citation in the message.
+ Required.
+ :vartype start_index: int
+ :ivar end_index: The index of the last character of the URL citation in the message. Required.
+ :vartype end_index: int
+ :ivar title: The title of the web resource. Required.
+ :vartype title: str
+ """
+
+ type: Literal[AnnotationType.URL_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the URL citation. Always ``url_citation``. Required. URL_CITATION."""
+ url: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the web resource. Required."""
+ start_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the first character of the URL citation in the message. Required."""
+ end_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the last character of the URL citation in the message. Required."""
+ title: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The title of the web resource. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ url: str,
+ start_index: int,
+ end_index: int,
+ title: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = AnnotationType.URL_CITATION # type: ignore
+
+
+class UserProfileMemoryItem(MemoryItem, discriminator="user_profile"):
+ """A memory item specifically containing user profile information extracted from conversations,
+ such as preferences, interests, and personal details.
+
+ :ivar memory_id: The unique ID of the memory item. Required.
+ :vartype memory_id: str
+ :ivar updated_at: The last update time of the memory item. Required.
+ :vartype updated_at: ~datetime.datetime
+ :ivar scope: The namespace that logically groups and isolates memories, such as a user ID.
+ Required.
+ :vartype scope: str
+ :ivar content: The content of the memory. Required.
+ :vartype content: str
+ :ivar kind: The kind of the memory item. Required. User profile information extracted from
+ conversations.
+ :vartype kind: str or ~azure.ai.responses.server.sdk.models.models.USER_PROFILE
+ """
+
+ kind: Literal[MemoryItemKind.USER_PROFILE] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The kind of the memory item. Required. User profile information extracted from conversations."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_id: str,
+ updated_at: datetime.datetime,
+ scope: str,
+ content: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.kind = MemoryItemKind.USER_PROFILE # type: ignore
+
+
+class VectorStoreFileAttributes(_Model):
+ """Set of 16 key-value pairs that can be attached to an object. This can be useful for storing
+ additional information about the object in a structured format, and querying for objects via
+ API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are
+ strings with a maximum length of 512 characters, booleans, or numbers.
+
+ """
+
+
+class WaitParam(ComputerAction, discriminator="wait"):
+ """Wait.
+
+ :ivar type: Specifies the event type. For a wait action, this property is always set to
+ ``wait``. Required. WAIT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WAIT
+ """
+
+ type: Literal[ComputerActionType.WAIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a wait action, this property is always set to ``wait``. Required.
+ WAIT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.WAIT # type: ignore
+
+
+class WebSearchActionFind(_Model):
+ """Find action.
+
+ :ivar type: The action type. Required. Default value is "find_in_page".
+ :vartype type: str
+ :ivar url: The URL of the page searched for the pattern. Required.
+ :vartype url: str
+ :ivar pattern: The pattern or text to search for within the page. Required.
+ :vartype pattern: str
+ """
+
+ type: Literal["find_in_page"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The action type. Required. Default value is \"find_in_page\"."""
+ url: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the page searched for the pattern. Required."""
+ pattern: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The pattern or text to search for within the page. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ url: str,
+ pattern: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["find_in_page"] = "find_in_page"
+
+
+class WebSearchActionOpenPage(_Model):
+ """Open page action.
+
+ :ivar type: The action type. Required. Default value is "open_page".
+ :vartype type: str
+ :ivar url: The URL opened by the model.
+ :vartype url: str
+ """
+
+ type: Literal["open_page"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The action type. Required. Default value is \"open_page\"."""
+ url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL opened by the model."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ url: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["open_page"] = "open_page"
+
+
+class WebSearchActionSearch(_Model):
+ """Search action.
+
+ :ivar type: The action type. Required. Default value is "search".
+ :vartype type: str
+ :ivar query: [DEPRECATED] The search query. Required.
+ :vartype query: str
+ :ivar queries: Search queries.
+ :vartype queries: list[str]
+ :ivar sources: Web search sources.
+ :vartype sources:
+ list[~azure.ai.responses.server.sdk.models.models.WebSearchActionSearchSources]
+ """
+
+ type: Literal["search"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The action type. Required. Default value is \"search\"."""
+ query: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """[DEPRECATED] The search query. Required."""
+ queries: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Search queries."""
+ sources: Optional[list["_models.WebSearchActionSearchSources"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Web search sources."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ query: str,
+ queries: Optional[list[str]] = None,
+ sources: Optional[list["_models.WebSearchActionSearchSources"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["search"] = "search"
+
+
+class WebSearchActionSearchSources(_Model):
+ """WebSearchActionSearchSources.
+
+ :ivar type: Required. Default value is "url".
+ :vartype type: str
+ :ivar url: Required.
+ :vartype url: str
+ """
+
+ type: Literal["url"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required. Default value is \"url\"."""
+ url: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ url: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["url"] = "url"
+
+
+class WebSearchApproximateLocation(_Model):
+ """Web search approximate location.
+
+ :ivar type: The type of location approximation. Always ``approximate``. Required. Default value
+ is "approximate".
+ :vartype type: str
+ :ivar country:
+ :vartype country: str
+ :ivar region:
+ :vartype region: str
+ :ivar city:
+ :vartype city: str
+ :ivar timezone:
+ :vartype timezone: str
+ """
+
+ type: Literal["approximate"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of location approximation. Always ``approximate``. Required. Default value is
+ \"approximate\"."""
+ country: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ region: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ city: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ timezone: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ country: Optional[str] = None,
+ region: Optional[str] = None,
+ city: Optional[str] = None,
+ timezone: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["approximate"] = "approximate"
+
+
+class WebSearchConfiguration(_Model):
+ """A web search configuration for bing custom search.
+
+ :ivar project_connection_id: Project connection id for grounding with bing custom search.
+ Required.
+ :vartype project_connection_id: str
+ :ivar instance_name: Name of the custom configuration instance given to config. Required.
+ :vartype instance_name: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Project connection id for grounding with bing custom search. Required."""
+ instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Name of the custom configuration instance given to config. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ instance_name: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class WebSearchPreviewTool(Tool, discriminator="web_search_preview"):
+ """Web search preview.
+
+ :ivar type: The type of the web search tool. One of ``web_search_preview`` or
+ ``web_search_preview_2025_03_11``. Required. WEB_SEARCH_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_PREVIEW
+ :ivar user_location:
+ :vartype user_location: ~azure.ai.responses.server.sdk.models.models.ApproximateLocation
+ :ivar search_context_size: High level guidance for the amount of context window space to use
+ for the search. One of ``low``, ``medium``, or ``high``. ``medium`` is the default. Known
+ values are: "low", "medium", and "high".
+ :vartype search_context_size: str or
+ ~azure.ai.responses.server.sdk.models.models.SearchContextSize
+ """
+
+ type: Literal[ToolType.WEB_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the web search tool. One of ``web_search_preview`` or
+ ``web_search_preview_2025_03_11``. Required. WEB_SEARCH_PREVIEW."""
+ user_location: Optional["_models.ApproximateLocation"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ search_context_size: Optional[Union[str, "_models.SearchContextSize"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """High level guidance for the amount of context window space to use for the search. One of
+ ``low``, ``medium``, or ``high``. ``medium`` is the default. Known values are: \"low\",
+ \"medium\", and \"high\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ user_location: Optional["_models.ApproximateLocation"] = None,
+ search_context_size: Optional[Union[str, "_models.SearchContextSize"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.WEB_SEARCH_PREVIEW # type: ignore
+
+
+class WebSearchTool(Tool, discriminator="web_search"):
+ """Web search.
+
+ :ivar type: The type of the web search tool. One of ``web_search`` or
+ ``web_search_2025_08_26``. Required. WEB_SEARCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH
+ :ivar filters:
+ :vartype filters: ~azure.ai.responses.server.sdk.models.models.WebSearchToolFilters
+ :ivar user_location:
+ :vartype user_location:
+ ~azure.ai.responses.server.sdk.models.models.WebSearchApproximateLocation
+ :ivar search_context_size: High level guidance for the amount of context window space to use
+ for the search. One of ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of
+ the following types: Literal["low"], Literal["medium"], Literal["high"]
+ :vartype search_context_size: str or str or str
+ :ivar custom_search_configuration: The project connections attached to this tool. There can be
+ a maximum of 1 connection resource attached to the tool.
+ :vartype custom_search_configuration:
+ ~azure.ai.responses.server.sdk.models.models.WebSearchConfiguration
+ """
+
+ type: Literal[ToolType.WEB_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the web search tool. One of ``web_search`` or ``web_search_2025_08_26``. Required.
+ WEB_SEARCH."""
+ filters: Optional["_models.WebSearchToolFilters"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ user_location: Optional["_models.WebSearchApproximateLocation"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ search_context_size: Optional[Literal["low", "medium", "high"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """High level guidance for the amount of context window space to use for the search. One of
+ ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of the following types:
+ Literal[\"low\"], Literal[\"medium\"], Literal[\"high\"]"""
+ custom_search_configuration: Optional["_models.WebSearchConfiguration"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The project connections attached to this tool. There can be a maximum of 1 connection resource
+ attached to the tool."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ filters: Optional["_models.WebSearchToolFilters"] = None,
+ user_location: Optional["_models.WebSearchApproximateLocation"] = None,
+ search_context_size: Optional[Literal["low", "medium", "high"]] = None,
+ custom_search_configuration: Optional["_models.WebSearchConfiguration"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.WEB_SEARCH # type: ignore
+
+
+class WebSearchToolFilters(_Model):
+ """WebSearchToolFilters.
+
+ :ivar allowed_domains:
+ :vartype allowed_domains: list[str]
+ """
+
+ allowed_domains: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ allowed_domains: Optional[list[str]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class WorkflowActionOutputItem(OutputItem, discriminator="workflow_action"):
+ """WorkflowActionOutputItem.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. WORKFLOW_ACTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WORKFLOW_ACTION
+ :ivar kind: The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required.
+ :vartype kind: str
+ :ivar action_id: Unique identifier for the action. Required.
+ :vartype action_id: str
+ :ivar parent_action_id: ID of the parent action if this is a nested action.
+ :vartype parent_action_id: str
+ :ivar previous_action_id: ID of the previous action if this action follows another.
+ :vartype previous_action_id: str
+ :ivar status: Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled').
+ Required. Is one of the following types: Literal["completed"], Literal["failed"],
+ Literal["in_progress"], Literal["cancelled"]
+ :vartype status: str or str or str or str
+ """
+
+ type: Literal[OutputItemType.WORKFLOW_ACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. WORKFLOW_ACTION."""
+ kind: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required."""
+ action_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique identifier for the action. Required."""
+ parent_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """ID of the parent action if this is a nested action."""
+ previous_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """ID of the previous action if this action follows another."""
+ status: Literal["completed", "failed", "in_progress", "cancelled"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). Required. Is
+ one of the following types: Literal[\"completed\"], Literal[\"failed\"],
+ Literal[\"in_progress\"], Literal[\"cancelled\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ kind: str,
+ action_id: str,
+ status: Literal["completed", "failed", "in_progress", "cancelled"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ parent_action_id: Optional[str] = None,
+ previous_action_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.WORKFLOW_ACTION # type: ignore
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/_patch.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/_patch.py
new file mode 100644
index 000000000000..87676c65a8f0
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/models/_patch.py
@@ -0,0 +1,21 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/py.typed b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/py.typed
new file mode 100644
index 000000000000..e5aff4f83af8
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/_generated/sdk/models/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/errors.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/errors.py
new file mode 100644
index 000000000000..f9985521c54f
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/errors.py
@@ -0,0 +1,54 @@
+"""Error model types for request validation failures."""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING:
+ from ._generated import ApiErrorResponse as ApiErrorResponseType
+ from ._generated import Error as ErrorType
+else:
+ ApiErrorResponseType = Any
+ ErrorType = Any
+
+try:
+ from ._generated import ApiErrorResponse, Error
+except Exception: # pragma: no cover - allows isolated unit testing when generated deps are unavailable.
+ class _GeneratedUnavailable:
+ def __init__(self, *_args: Any, **_kwargs: Any) -> None:
+ raise ModuleNotFoundError(
+ "generated contract models are unavailable; run generation to restore runtime dependencies"
+ )
+
+ ApiErrorResponse = _GeneratedUnavailable # type: ignore[assignment]
+ Error = _GeneratedUnavailable # type: ignore[assignment]
+
+
+@dataclass(slots=True)
+class RequestValidationError(ValueError):
+ """Represents a client-visible request validation failure."""
+
+ message: str
+ code: str = "invalid_request"
+ param: str | None = None
+ error_type: str = "invalid_request_error"
+ debug_info: dict[str, Any] | None = None
+
+ def __post_init__(self) -> None:
+ """Initialize the parent :class:`ValueError` message."""
+ ValueError.__init__(self, self.message)
+
+ def to_error(self) -> ErrorType:
+ """Convert this validation error to the generated ``Error`` model."""
+ return Error(
+ code=self.code,
+ message=self.message,
+ param=self.param,
+ type=self.error_type,
+ debug_info=self.debug_info,
+ )
+
+ def to_api_error_response(self) -> ApiErrorResponseType:
+ """Convert this validation error to the generated API error envelope."""
+ return ApiErrorResponse(error=self.to_error())
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/runtime.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/runtime.py
new file mode 100644
index 000000000000..997e3679abe1
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/models/runtime.py
@@ -0,0 +1,129 @@
+"""Runtime domain models for response sessions and stream events."""
+
+from __future__ import annotations
+
+import asyncio
+from dataclasses import dataclass, field
+from datetime import datetime, timezone
+from typing import Any, Literal, Mapping
+
+from ._generated import Response, ResponseStreamEvent
+
+ResponseStatus = Literal["queued", "in_progress", "completed", "failed", "cancelled", "incomplete"]
+TerminalResponseStatus = Literal["completed", "failed", "cancelled", "incomplete"]
+
+
+@dataclass(slots=True)
+class ResponseModeFlags:
+ """Execution mode flags captured from the create request."""
+
+ stream: bool
+ store: bool
+ background: bool
+
+
+@dataclass(slots=True)
+class StreamEventRecord:
+ """A persisted record for one emitted stream event."""
+
+ sequence_number: int
+ event_type: str
+ payload: Mapping[str, Any]
+ emitted_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
+
+ @property
+ def terminal(self) -> bool:
+ """Return True when this event is one of the terminal response events."""
+ return self.event_type in {
+ "response.completed",
+ "response.failed",
+ "response.cancelled",
+ "response.incomplete",
+ }
+
+ @classmethod
+ def from_generated(cls, event: ResponseStreamEvent, payload: Mapping[str, Any]) -> "StreamEventRecord":
+ """Create a stream event record from a generated response stream event model."""
+ return cls(sequence_number=event.sequence_number, event_type=event.type, payload=payload)
+
+
+@dataclass(slots=True)
+class ResponseExecution:
+ """Lightweight pipeline state for one response execution.
+
+ This type intentionally does not own persisted stream history. Stream replay
+ concerns are modeled separately in :class:`StreamReplayState`.
+ """
+
+ response_id: str
+ mode_flags: ResponseModeFlags
+ created_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
+ updated_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
+ completed_at: datetime | None = None
+ status: ResponseStatus = "queued"
+ response: Response | None = None
+ execution_task: asyncio.Task[Any] | None = None
+ cancel_requested: bool = False
+ client_disconnected: bool = False
+ response_created_seen: bool = False
+
+ def transition_to(self, next_status: ResponseStatus) -> None:
+ """Transition this execution to a valid lifecycle status.
+
+ :raises ValueError: If the requested transition is not allowed.
+ """
+ allowed: dict[ResponseStatus, set[ResponseStatus]] = {
+ "queued": {"in_progress", "failed"},
+ "in_progress": {"completed", "failed", "cancelled", "incomplete"},
+ "completed": set(),
+ "failed": set(),
+ "cancelled": set(),
+ "incomplete": set(),
+ }
+
+ if next_status == self.status:
+ self.updated_at = datetime.now(timezone.utc)
+ return
+
+ if next_status not in allowed[self.status]:
+ raise ValueError(f"invalid status transition: {self.status} -> {next_status}")
+
+ self.status = next_status
+ now = datetime.now(timezone.utc)
+ self.updated_at = now
+ if self.is_terminal:
+ self.completed_at = now
+
+ @property
+ def is_terminal(self) -> bool:
+ """Return whether the execution has reached a terminal state."""
+ return self.status in {"completed", "failed", "cancelled", "incomplete"}
+
+ def set_response_snapshot(self, response: Response) -> None:
+ """Replace the current response snapshot from handler-emitted events."""
+ self.response = response
+ self.updated_at = datetime.now(timezone.utc)
+
+
+@dataclass(slots=True)
+class StreamReplayState:
+ """Persisted stream replay state for one response identifier."""
+
+ response_id: str
+ events: list[StreamEventRecord] = field(default_factory=list)
+
+ def append(self, event: StreamEventRecord) -> None:
+ """Append a stream event and enforce replay sequence integrity."""
+ if self.events and event.sequence_number <= self.events[-1].sequence_number:
+ raise ValueError("stream event sequence numbers must be strictly increasing")
+
+ if self.events and self.events[-1].terminal:
+ raise ValueError("cannot append events after a terminal event")
+
+ self.events.append(event)
+
+ @property
+ def terminal_event_seen(self) -> bool:
+ """Return whether replay state has already recorded a terminal event."""
+ return bool(self.events and self.events[-1].terminal)
+
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/store/_base.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/store/_base.py
new file mode 100644
index 000000000000..78e5829fed3f
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/store/_base.py
@@ -0,0 +1,81 @@
+"""Persistence abstraction for response execution and replay state."""
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import Protocol
+
+from ..models._generated import Response
+from ..models import ResponseExecution, ResponseStatus, StreamEventRecord
+
+
+class ResponseStore(Protocol):
+ """Protocol implemented by response persistence backends.
+
+ Store implementations must be concurrency-safe for async request handling.
+ """
+
+ async def create_execution(self, execution: ResponseExecution, *, ttl_seconds: int | None = None) -> None:
+ """Create a new execution entry.
+
+ :raises ValueError: If an entry for ``execution.response_id`` already exists.
+ """
+
+ async def get_execution(self, response_id: str) -> ResponseExecution | None:
+ """Load execution state by response ID, or ``None`` when not found/expired."""
+
+ async def set_response_snapshot(
+ self,
+ response_id: str,
+ response: Response,
+ *,
+ ttl_seconds: int | None = None,
+ ) -> bool:
+ """Set latest response snapshot for an existing execution.
+
+ Returns ``True`` when updated and ``False`` when the response ID does not exist.
+ """
+
+ async def transition_execution_status(
+ self,
+ response_id: str,
+ next_status: ResponseStatus,
+ *,
+ ttl_seconds: int | None = None,
+ ) -> bool:
+ """Transition execution lifecycle status for an existing entry.
+
+ Returns ``True`` when updated and ``False`` when the response ID does not exist.
+ :raises ValueError: If the transition is invalid.
+ """
+
+ async def set_cancel_requested(self, response_id: str, *, ttl_seconds: int | None = None) -> bool:
+ """Mark execution cancel intent for an existing entry.
+
+ Returns ``True`` when updated and ``False`` when the response ID does not exist.
+ """
+
+ async def append_stream_event(
+ self,
+ response_id: str,
+ event: StreamEventRecord,
+ *,
+ ttl_seconds: int | None = None,
+ ) -> bool:
+ """Append one stream event to replay history.
+
+ Returns ``True`` when appended and ``False`` when the response ID does not exist.
+ :raises ValueError: If replay sequence integrity is violated.
+ """
+
+ async def get_stream_events(self, response_id: str) -> list[StreamEventRecord] | None:
+ """Get replay events for a response ID, or ``None`` when not found/expired."""
+
+ async def delete(self, response_id: str) -> bool:
+ """Delete an execution entry and replay history.
+
+ Returns ``True`` when deleted and ``False`` when not found.
+ """
+
+ async def purge_expired(self, *, now: datetime | None = None) -> int:
+ """Purge expired entries and return the number of removed records."""
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/store/_memory.py b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/store/_memory.py
new file mode 100644
index 000000000000..0fa697d2b976
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/azure/ai/agentserver/responses/store/_memory.py
@@ -0,0 +1,169 @@
+"""In-memory response store implementation."""
+
+from __future__ import annotations
+
+import asyncio
+from copy import deepcopy
+from dataclasses import dataclass
+from datetime import datetime, timedelta, timezone
+from typing import Dict
+
+from ..models._generated import Response
+from ..models import ResponseExecution, ResponseStatus, StreamEventRecord, StreamReplayState
+from ._base import ResponseStore
+
+
+@dataclass(slots=True)
+class _StoreEntry:
+ """Container for one response execution and its replay state."""
+
+ execution: ResponseExecution
+ replay: StreamReplayState
+ expires_at: datetime | None = None
+
+
+class InMemoryResponseStore(ResponseStore):
+ """In-memory response store with TTL and lifecycle-safe mutation APIs."""
+
+ def __init__(self) -> None:
+ """Initialize in-memory state and an async mutation lock."""
+ self._entries: Dict[str, _StoreEntry] = {}
+ self._lock = asyncio.Lock()
+
+ async def create_execution(self, execution: ResponseExecution, *, ttl_seconds: int | None = None) -> None:
+ """Create a new execution and replay container for ``execution.response_id``."""
+ async with self._lock:
+ self._purge_expired_unlocked()
+
+ if execution.response_id in self._entries:
+ raise ValueError(f"response '{execution.response_id}' already exists")
+
+ self._entries[execution.response_id] = _StoreEntry(
+ execution=deepcopy(execution),
+ replay=StreamReplayState(response_id=execution.response_id),
+ expires_at=self._compute_expiry(ttl_seconds),
+ )
+
+ async def get_execution(self, response_id: str) -> ResponseExecution | None:
+ """Get a defensive copy of execution state for ``response_id`` if present."""
+ async with self._lock:
+ self._purge_expired_unlocked()
+ entry = self._entries.get(response_id)
+ if entry is None:
+ return None
+ return deepcopy(entry.execution)
+
+ async def set_response_snapshot(
+ self,
+ response_id: str,
+ response: Response,
+ *,
+ ttl_seconds: int | None = None,
+ ) -> bool:
+ """Set the latest response snapshot for an existing response execution."""
+ async with self._lock:
+ self._purge_expired_unlocked()
+ entry = self._entries.get(response_id)
+ if entry is None:
+ return False
+
+ entry.execution.set_response_snapshot(response)
+ self._apply_ttl_unlocked(entry, ttl_seconds)
+ return True
+
+ async def transition_execution_status(
+ self,
+ response_id: str,
+ next_status: ResponseStatus,
+ *,
+ ttl_seconds: int | None = None,
+ ) -> bool:
+ """Transition execution state while preserving lifecycle invariants."""
+ async with self._lock:
+ self._purge_expired_unlocked()
+ entry = self._entries.get(response_id)
+ if entry is None:
+ return False
+
+ entry.execution.transition_to(next_status)
+ self._apply_ttl_unlocked(entry, ttl_seconds)
+ return True
+
+ async def set_cancel_requested(self, response_id: str, *, ttl_seconds: int | None = None) -> bool:
+ """Mark cancellation requested for an existing execution record."""
+ async with self._lock:
+ self._purge_expired_unlocked()
+ entry = self._entries.get(response_id)
+ if entry is None:
+ return False
+
+ entry.execution.cancel_requested = True
+ entry.execution.updated_at = datetime.now(timezone.utc)
+ self._apply_ttl_unlocked(entry, ttl_seconds)
+ return True
+
+ async def append_stream_event(
+ self,
+ response_id: str,
+ event: StreamEventRecord,
+ *,
+ ttl_seconds: int | None = None,
+ ) -> bool:
+ """Append one stream event to replay state for an existing execution."""
+ async with self._lock:
+ self._purge_expired_unlocked()
+ entry = self._entries.get(response_id)
+ if entry is None:
+ return False
+
+ entry.replay.append(deepcopy(event))
+ self._apply_ttl_unlocked(entry, ttl_seconds)
+ return True
+
+ async def get_stream_events(self, response_id: str) -> list[StreamEventRecord] | None:
+ """Get defensive copies of all replay events for ``response_id``."""
+ async with self._lock:
+ self._purge_expired_unlocked()
+ entry = self._entries.get(response_id)
+ if entry is None:
+ return None
+ return deepcopy(entry.replay.events)
+
+ async def delete(self, response_id: str) -> bool:
+ """Delete all state for a response ID if present."""
+ async with self._lock:
+ self._purge_expired_unlocked()
+ return self._entries.pop(response_id, None) is not None
+
+ async def purge_expired(self, *, now: datetime | None = None) -> int:
+ """Remove expired entries and return count."""
+ async with self._lock:
+ return self._purge_expired_unlocked(now=now)
+
+ @staticmethod
+ def _compute_expiry(ttl_seconds: int | None) -> datetime | None:
+ """Compute an absolute expiration timestamp from a TTL."""
+ if ttl_seconds is None:
+ return None
+ if ttl_seconds <= 0:
+ raise ValueError("ttl_seconds must be > 0 when set")
+ return datetime.now(timezone.utc) + timedelta(seconds=ttl_seconds)
+
+ def _apply_ttl_unlocked(self, entry: _StoreEntry, ttl_seconds: int | None) -> None:
+ """Update entry expiration timestamp when a TTL value is supplied."""
+ if ttl_seconds is not None:
+ entry.expires_at = self._compute_expiry(ttl_seconds)
+
+ def _purge_expired_unlocked(self, *, now: datetime | None = None) -> int:
+ """Remove expired entries without acquiring the lock."""
+ current_time = now or datetime.now(timezone.utc)
+ expired_ids = [
+ response_id
+ for response_id, entry in self._entries.items()
+ if entry.expires_at is not None and entry.expires_at <= current_time
+ ]
+
+ for response_id in expired_ids:
+ del self._entries[response_id]
+
+ return len(expired_ids)
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-responses/pyproject.toml
new file mode 100644
index 000000000000..7d0b1ee38e74
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/pyproject.toml
@@ -0,0 +1,62 @@
+[build-system]
+requires = ["setuptools>=64"]
+
+[project]
+name = "azure-ai-agentserver-responses"
+version = "0.1.0"
+description = "Python SDK for building servers implementing the Azure AI Responses protocol"
+readme = "README.md"
+requires-python = ">=3.10"
+license = "MIT"
+authors = [
+ { name = "Microsoft Corporation" },
+]
+classifiers = [
+ "Development Status :: 3 - Alpha",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+]
+dependencies = [
+ "azure-core>=1.30.0",
+ "starlette>=0.45.0",
+ "uvicorn>=0.31.0",
+]
+
+[project.optional-dependencies]
+dev = [
+ "pytest>=7.0",
+ "pytest-asyncio>=0.21",
+ "ruff>=0.4",
+ "mypy>=1.0",
+]
+
+[tool.setuptools.packages.find]
+exclude = [
+ "tests*",
+ "type_spec*",
+ "specs",
+ "samples*",
+ "doc*",
+ "azure",
+ "azure.ai",
+]
+
+[tool.ruff]
+target-version = "py310"
+line-length = 120
+
+[tool.ruff.lint]
+select = ["E", "F", "W", "I"]
+
+[tool.mypy]
+python_version = "3.10"
+warn_return_any = true
+warn_unused_configs = true
+disallow_untyped_defs = true
+
+[tool.pytest.ini_options]
+asyncio_mode = "auto"
+testpaths = ["tests"]
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/scripts/__init__.py b/sdk/agentserver/azure-ai-agentserver-responses/scripts/__init__.py
new file mode 100644
index 000000000000..12c2c26a9901
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/scripts/__init__.py
@@ -0,0 +1 @@
+"""Code generation scripts for the responses server."""
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/scripts/generate_validators.py b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generate_validators.py
new file mode 100644
index 000000000000..3ed1ea65b213
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generate_validators.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python3
+"""Generate Python payload validators from an OpenAPI document."""
+
+from __future__ import annotations
+
+import argparse
+import json
+from pathlib import Path
+from typing import Any
+
+try:
+ from scripts.validator_emitter import build_validator_module
+ from scripts.validator_schema_walker import SchemaWalker, discover_post_request_roots
+except ModuleNotFoundError:
+ from validator_emitter import build_validator_module
+ from validator_schema_walker import SchemaWalker, discover_post_request_roots
+
+
+def _load_spec(input_path: Path) -> dict[str, Any]:
+ """Load a JSON or YAML OpenAPI document from disk."""
+ text = input_path.read_text(encoding="utf-8")
+ try:
+ loaded = json.loads(text)
+ if isinstance(loaded, dict):
+ return loaded
+ except json.JSONDecodeError:
+ pass
+
+ try:
+ import yaml # type: ignore[import-not-found]
+ except ModuleNotFoundError as exc:
+ raise ValueError(
+ f"unable to parse OpenAPI file '{input_path}'. Expected JSON, or install PyYAML for YAML input."
+ ) from exc
+
+ loaded_yaml = yaml.safe_load(text)
+ if not isinstance(loaded_yaml, dict):
+ raise ValueError(f"OpenAPI file '{input_path}' must contain a top-level object")
+ return loaded_yaml
+
+
+def _build_output(spec: dict[str, Any], roots: list[str]) -> str:
+ """Create deterministic validator module source text."""
+ schemas = spec.get("components", {}).get("schemas", {})
+ if not isinstance(schemas, dict):
+ schemas = {}
+ else:
+ schemas = dict(schemas)
+
+ def _find_create_response_inline_schema() -> dict[str, Any] | None:
+ paths = spec.get("paths", {})
+ for path, methods in paths.items():
+ if not isinstance(methods, dict):
+ continue
+ if "responses" not in str(path).lower():
+ continue
+ post = methods.get("post")
+ if not isinstance(post, dict):
+ continue
+ request_body = post.get("requestBody", {})
+ content = request_body.get("content", {}).get("application/json", {})
+ schema = content.get("schema", {})
+ if isinstance(schema, dict) and "anyOf" in schema:
+ branches = schema.get("anyOf", [])
+ if isinstance(branches, list) and branches and isinstance(branches[0], dict):
+ return branches[0]
+ if isinstance(schema, dict) and "oneOf" in schema:
+ branches = schema.get("oneOf", [])
+ if isinstance(branches, list) and branches and isinstance(branches[0], dict):
+ return branches[0]
+ if isinstance(schema, dict):
+ return schema
+ return None
+
+ for root in roots:
+ if root in schemas:
+ continue
+ if root == "CreateResponse":
+ inline_schema = _find_create_response_inline_schema()
+ if isinstance(inline_schema, dict):
+ schemas[root] = inline_schema
+
+ # If explicit roots are provided, respect them and skip route-wide discovery.
+ discovered_roots = [] if roots else discover_post_request_roots(spec)
+ merged_roots: list[str] = []
+ seen: set[str] = set()
+ for root in [*roots, *discovered_roots]:
+ if root and root not in seen:
+ seen.add(root)
+ merged_roots.append(root)
+
+ walker = SchemaWalker(schemas)
+ for root in merged_roots:
+ walker.walk(root)
+
+ reachable = walker.reachable if walker.reachable else schemas
+ effective_roots = merged_roots if merged_roots else sorted(reachable)
+ return build_validator_module(reachable, effective_roots)
+
+
+def main() -> int:
+ """Run the validator generator CLI."""
+ parser = argparse.ArgumentParser(description="Generate Python payload validators from OpenAPI")
+ parser.add_argument("--input", required=True, help="Path to OpenAPI JSON file")
+ parser.add_argument("--output", required=True, help="Output Python module path")
+ parser.add_argument("--root-schemas", default="", help="Comma-separated root schema names")
+ args = parser.parse_args()
+
+ input_path = Path(args.input)
+ output_path = Path(args.output)
+ roots = [part.strip() for part in args.root_schemas.split(",") if part.strip()]
+
+ spec = _load_spec(input_path)
+ output = _build_output(spec, roots)
+
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ output_path.write_text(output, encoding="utf-8")
+ return 0
+
+
+if __name__ == "__main__":
+ raise SystemExit(main())
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/__init__.py b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/__init__.py
new file mode 100644
index 000000000000..013008e395b4
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/__init__.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility re-exports for generated models preserved under sdk/models."""
+
+from .sdk.models.models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/_enums.py b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/_enums.py
new file mode 100644
index 000000000000..ffeb0d1362db
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/_enums.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated enum symbols."""
+
+from .sdk.models.models._enums import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/_models.py b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/_models.py
new file mode 100644
index 000000000000..8c6878d69796
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/_models.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated model symbols."""
+
+from .sdk.models.models._models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/_patch.py b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/_patch.py
new file mode 100644
index 000000000000..3d222c31c566
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/_patch.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated patch helpers."""
+
+from .sdk.models.models._patch import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/sdk_models__init__.py b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/sdk_models__init__.py
new file mode 100644
index 000000000000..784a3edcc881
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/scripts/generated_shims/sdk_models__init__.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Model-only generated package surface."""
+
+from .models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/scripts/validator_emitter.py b/sdk/agentserver/azure-ai-agentserver-responses/scripts/validator_emitter.py
new file mode 100644
index 000000000000..ccc1330faa9f
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/scripts/validator_emitter.py
@@ -0,0 +1,421 @@
+"""Emitter that builds deterministic Python validator modules from schemas."""
+
+from __future__ import annotations
+
+from typing import Any
+
+
+def _sanitize_identifier(name: str) -> str:
+ normalized = "".join(ch if ch.isalnum() else "_" for ch in name)
+ while "__" in normalized:
+ normalized = normalized.replace("__", "_")
+ normalized = normalized.strip("_")
+ return normalized or "schema"
+
+
+def _resolve_ref(ref: str) -> str:
+ return ref.rsplit("/", 1)[-1]
+
+
+def _ordered(value: Any) -> Any:
+ if isinstance(value, dict):
+ return {k: _ordered(value[k]) for k in sorted(value)}
+ if isinstance(value, list):
+ return [_ordered(v) for v in value]
+ return value
+
+
+def _header() -> str:
+ return (
+ "# pylint: disable=line-too-long,useless-suppression,too-many-lines\n"
+ "# coding=utf-8\n"
+ "# --------------------------------------------------------------------------\n"
+ "# Copyright (c) Microsoft Corporation. All rights reserved.\n"
+ "# Licensed under the MIT License. See License.txt in the project root for license information.\n"
+ "# Code generated by Microsoft (R) Python Code Generator.\n"
+ "# Changes may cause incorrect behavior and will be lost if the code is regenerated.\n"
+ "# --------------------------------------------------------------------------\n"
+ )
+
+
+def _schema_kind(schema: dict[str, Any]) -> str | None:
+ schema_type = schema.get("type")
+ if isinstance(schema_type, str):
+ return schema_type
+ if "properties" in schema or "additionalProperties" in schema or "discriminator" in schema:
+ return "object"
+ if "oneOf" in schema or "anyOf" in schema:
+ return "union"
+ return None
+
+
+def build_validator_module(schemas: dict[str, dict[str, Any]], roots: list[str]) -> str:
+ """Build generated validator module source code without runtime schema blobs."""
+ ordered_schemas = _ordered(schemas)
+ target_roots = sorted(dict.fromkeys(roots)) if roots else sorted(ordered_schemas)
+
+ lines: list[str] = [_header(), "", "from __future__ import annotations", "", "from typing import Any", ""]
+ lines.extend(
+ [
+ "try:",
+ " from . import _enums as _generated_enums",
+ "except Exception:",
+ " _generated_enums = None",
+ "",
+ "def _append_error(errors: list[dict[str, str]], path: str, message: str) -> None:",
+ " errors.append({'path': path, 'message': message})",
+ "",
+ "def _type_label(value: Any) -> str:",
+ " if value is None:",
+ " return 'null'",
+ " if isinstance(value, bool):",
+ " return 'boolean'",
+ " if isinstance(value, int):",
+ " return 'integer'",
+ " if isinstance(value, float):",
+ " return 'number'",
+ " if isinstance(value, str):",
+ " return 'string'",
+ " if isinstance(value, dict):",
+ " return 'object'",
+ " if isinstance(value, list):",
+ " return 'array'",
+ " return type(value).__name__",
+ "",
+ "def _is_type(value: Any, expected: str) -> bool:",
+ " if expected == 'string':",
+ " return isinstance(value, str)",
+ " if expected == 'integer':",
+ " return isinstance(value, int) and not isinstance(value, bool)",
+ " if expected == 'number':",
+ " return (isinstance(value, int) and not isinstance(value, bool)) or isinstance(value, float)",
+ " if expected == 'boolean':",
+ " return isinstance(value, bool)",
+ " if expected == 'object':",
+ " return isinstance(value, dict)",
+ " if expected == 'array':",
+ " return isinstance(value, list)",
+ " return True",
+ "",
+ "def _append_type_mismatch(errors: list[dict[str, str]], path: str, expected: str, value: Any) -> None:",
+ " _append_error(errors, path, f\"Expected {expected}, got {_type_label(value)}\")",
+ "",
+ "def _enum_values(enum_name: str) -> tuple[tuple[str, ...] | None, str | None]:",
+ " if _generated_enums is None:",
+ " return None, f'enum type _enums.{enum_name} is unavailable'",
+ " enum_cls = getattr(_generated_enums, enum_name, None)",
+ " if enum_cls is None:",
+ " return None, f'enum type _enums.{enum_name} is not defined'",
+ " try:",
+ " return tuple(str(member.value) for member in enum_cls), None",
+ " except Exception:",
+ " return None, f'enum type _enums.{enum_name} failed to load values'",
+ "",
+ ]
+ )
+
+ function_schemas: dict[str, dict[str, Any]] = {}
+ function_hints: dict[str, str | None] = {}
+ function_order: list[str] = []
+ anonymous_by_key: dict[str, str] = {}
+
+ def make_unique_function_name(hint: str | None) -> str:
+ base = _sanitize_identifier(hint or "branch")
+ candidate = f"_validate_{base}"
+ if candidate not in function_schemas:
+ return candidate
+
+ suffix = 2
+ while True:
+ candidate = f"_validate_{base}_{suffix}"
+ if candidate not in function_schemas:
+ return candidate
+ suffix += 1
+
+ def ensure_schema_function(schema_name: str) -> str:
+ fn_name = f"_validate_{_sanitize_identifier(schema_name)}"
+ if fn_name not in function_schemas:
+ schema = ordered_schemas.get(schema_name)
+ if isinstance(schema, dict):
+ function_schemas[fn_name] = schema
+ function_hints[fn_name] = schema_name
+ function_order.append(fn_name)
+ return fn_name
+
+ def ensure_anonymous_function(schema: dict[str, Any], hint: str | None = None) -> str:
+ key = repr(_ordered(schema))
+ if key in anonymous_by_key:
+ existing = anonymous_by_key[key]
+ if function_hints.get(existing) is None and hint is not None:
+ function_hints[existing] = hint
+ return existing
+ fn_name = make_unique_function_name(hint)
+ anonymous_by_key[key] = fn_name
+ function_schemas[fn_name] = schema
+ function_hints[fn_name] = hint
+ function_order.append(fn_name)
+ return fn_name
+
+ for root in target_roots:
+ ensure_schema_function(root)
+
+ def emit_line(block: list[str], indent: int, text: str) -> None:
+ block.append((" " * indent) + text)
+
+ def emit_union(
+ schema: dict[str, Any],
+ block: list[str],
+ indent: int,
+ value_expr: str,
+ path_expr: str,
+ errors_expr: str,
+ schema_name_hint: str | None,
+ ) -> None:
+ branches = schema.get("oneOf", schema.get("anyOf", []))
+ branch_funcs: list[tuple[str, str]] = []
+ expected_labels: list[str] = []
+ has_inline_enum_branch = False
+
+ for branch in branches:
+ if not isinstance(branch, dict):
+ continue
+
+ if "$ref" in branch:
+ ref_name = _resolve_ref(str(branch["$ref"]))
+ ref_schema = ordered_schemas.get(ref_name)
+ if isinstance(ref_schema, dict):
+ branch_funcs.append((ensure_schema_function(ref_name), _schema_kind(ref_schema) or "value"))
+ expected_labels.append(ref_name)
+ continue
+
+ if schema_name_hint and "enum" in branch:
+ # Keep enum branches tied to the logical schema name so enum-class resolution stays stable.
+ branch_hint = schema_name_hint
+ has_inline_enum_branch = True
+ else:
+ branch_type = branch.get("type") if isinstance(branch.get("type"), str) else (_schema_kind(branch) or "branch")
+ branch_hint = f"{schema_name_hint}_{branch_type}" if schema_name_hint else str(branch_type)
+ fn_name = ensure_anonymous_function(branch, hint=branch_hint)
+ branch_funcs.append((fn_name, _schema_kind(branch) or "value"))
+ label = branch.get("type") if isinstance(branch.get("type"), str) else (_schema_kind(branch) or "value")
+ expected_labels.append(str(label))
+
+ if not branch_funcs:
+ return
+
+ emit_line(block, indent, "_matched_union = False")
+ for idx, (fn_name, kind) in enumerate(branch_funcs):
+ condition = "True" if kind in ("value", "union", None) else f"_is_type({value_expr}, {kind!r})"
+ emit_line(block, indent, f"if not _matched_union and {condition}:")
+ emit_line(block, indent + 1, f"_branch_errors_{idx}: list[dict[str, str]] = []")
+ emit_line(block, indent + 1, f"{fn_name}({value_expr}, {path_expr}, _branch_errors_{idx})")
+ emit_line(block, indent + 1, f"if not _branch_errors_{idx}:")
+ emit_line(block, indent + 2, "_matched_union = True")
+
+ unique_expected_labels = list(dict.fromkeys(expected_labels))
+ emit_line(block, indent, "if not _matched_union:")
+ if len(unique_expected_labels) == 1:
+ only_label = unique_expected_labels[0]
+ if schema_name_hint and only_label == "string" and has_inline_enum_branch:
+ schema_label = schema_name_hint.rsplit(".", 1)[-1]
+ emit_line(
+ block,
+ indent + 1,
+ f"_append_error({errors_expr}, {path_expr}, f\"Expected {schema_label} to be a string value, got {{_type_label({value_expr})}}\")",
+ )
+ else:
+ emit_line(block, indent + 1, f"_append_error({errors_expr}, {path_expr}, 'Expected {only_label}')")
+ else:
+ expected = ", ".join(unique_expected_labels) if unique_expected_labels else "valid branch"
+ emit_line(
+ block,
+ indent + 1,
+ f"_append_error({errors_expr}, {path_expr}, f\"Expected one of: {expected}; got {{_type_label({value_expr})}}\")",
+ )
+ emit_line(block, indent + 1, "return")
+
+ def emit_schema_body(
+ schema: dict[str, Any],
+ block: list[str],
+ indent: int,
+ value_expr: str,
+ path_expr: str,
+ errors_expr: str,
+ schema_name_hint: str | None = None,
+ ) -> None:
+ if schema.get("nullable"):
+ emit_line(block, indent, f"if {value_expr} is None:")
+ emit_line(block, indent + 1, "return")
+
+ if "$ref" in schema:
+ ref_name = _resolve_ref(str(schema["$ref"]))
+ ref_schema = ordered_schemas.get(ref_name)
+ if isinstance(ref_schema, dict):
+ emit_line(block, indent, f"{ensure_schema_function(ref_name)}({value_expr}, {path_expr}, {errors_expr})")
+ return
+
+ if "enum" in schema:
+ allowed = tuple(schema.get("enum", []))
+ enum_class_name = None
+ if schema_name_hint:
+ hint_schema = ordered_schemas.get(schema_name_hint)
+ hint_is_enum_like = False
+ if isinstance(hint_schema, dict):
+ if "enum" in hint_schema:
+ hint_is_enum_like = True
+ else:
+ for combo in ("oneOf", "anyOf"):
+ branches = hint_schema.get(combo, [])
+ if isinstance(branches, list) and any(
+ isinstance(b, dict) and "enum" in b for b in branches
+ ):
+ hint_is_enum_like = True
+ break
+ if hint_is_enum_like:
+ candidate = schema_name_hint.rsplit(".", 1)[-1]
+ if candidate and candidate[0].isalpha():
+ enum_class_name = candidate
+
+ if enum_class_name:
+ emit_line(
+ block,
+ indent,
+ f"_allowed_values, _enum_error = _enum_values({enum_class_name!r})",
+ )
+ emit_line(block, indent, "if _enum_error is not None:")
+ emit_line(block, indent + 1, f"_append_error({errors_expr}, {path_expr}, _enum_error)")
+ emit_line(block, indent + 1, "return")
+ emit_line(block, indent, "if _allowed_values is None:")
+ emit_line(block, indent + 1, "return")
+ else:
+ emit_line(block, indent, f"_allowed_values = {allowed!r}")
+ emit_line(block, indent, f"if {value_expr} not in _allowed_values:")
+ emit_line(
+ block,
+ indent + 1,
+ f"_append_error({errors_expr}, {path_expr}, f\"Invalid value '{{{value_expr}}}'. Allowed: {{', '.join(str(v) for v in _allowed_values)}}\")",
+ )
+
+ if "oneOf" in schema or "anyOf" in schema:
+ emit_union(schema, block, indent, value_expr, path_expr, errors_expr, schema_name_hint)
+ return
+
+ schema_type = schema.get("type")
+ effective_type = schema_type if isinstance(schema_type, str) else _schema_kind(schema)
+
+ if isinstance(effective_type, str) and effective_type not in ("value", "union"):
+ emit_line(block, indent, f"if not _is_type({value_expr}, {effective_type!r}):")
+ emit_line(block, indent + 1, f"_append_type_mismatch({errors_expr}, {path_expr}, {effective_type!r}, {value_expr})")
+ emit_line(block, indent + 1, "return")
+
+ if effective_type == "array":
+ items = schema.get("items")
+ if isinstance(items, dict):
+ item_hint = f"{schema_name_hint}_item" if schema_name_hint else "item"
+ item_fn = ensure_anonymous_function(items, hint=item_hint)
+ emit_line(block, indent, f"for _idx, _item in enumerate({value_expr}):")
+ emit_line(block, indent + 1, f"{item_fn}(_item, f\"{{{path_expr}}}[{{_idx}}]\", {errors_expr})")
+ return
+
+ if effective_type == "object":
+ properties = schema.get("properties", {})
+ required = schema.get("required", [])
+ if isinstance(properties, dict):
+ for field in required:
+ emit_line(block, indent, f"if {field!r} not in {value_expr}:")
+ emit_line(
+ block,
+ indent + 1,
+ f"_append_error({errors_expr}, f\"{{{path_expr}}}.{field}\", \"Required property '{field}' is missing\")",
+ )
+
+ for field, field_schema in sorted(properties.items()):
+ if not isinstance(field_schema, dict):
+ continue
+ field_hint = f"{schema_name_hint}_{field}" if schema_name_hint else field
+ field_fn = ensure_anonymous_function(field_schema, hint=field_hint)
+ emit_line(block, indent, f"if {field!r} in {value_expr}:")
+ emit_line(
+ block,
+ indent + 1,
+ f"{field_fn}({value_expr}[{field!r}], f\"{{{path_expr}}}.{field}\", {errors_expr})",
+ )
+
+ addl = schema.get("additionalProperties")
+ if isinstance(addl, dict):
+ addl_hint = f"{schema_name_hint}_additional_property" if schema_name_hint else "additional_property"
+ addl_fn = ensure_anonymous_function(addl, hint=addl_hint)
+ known = tuple(sorted(properties.keys())) if isinstance(properties, dict) else tuple()
+ emit_line(block, indent, f"for _key, _item in {value_expr}.items():")
+ emit_line(block, indent + 1, f"if _key not in {known!r}:")
+ emit_line(block, indent + 2, f"{addl_fn}(_item, f\"{{{path_expr}}}.{{_key}}\", {errors_expr})")
+
+ disc = schema.get("discriminator")
+ if isinstance(disc, dict):
+ prop = disc.get("propertyName", "type")
+ mapping = disc.get("mapping", {})
+ emit_line(block, indent, f"_disc_value = {value_expr}.get({prop!r})")
+ emit_line(block, indent, f"if not isinstance(_disc_value, str):")
+ emit_line(
+ block,
+ indent + 1,
+ f"_append_error({errors_expr}, f\"{{{path_expr}}}.{prop}\", \"Required discriminator '{prop}' is missing or invalid\")",
+ )
+ emit_line(block, indent + 1, "return")
+
+ for disc_value, ref in sorted(mapping.items()):
+ if not isinstance(ref, str):
+ continue
+ ref_name = _resolve_ref(ref)
+ ref_schema = ordered_schemas.get(ref_name)
+ if not isinstance(ref_schema, dict):
+ continue
+ ref_fn = ensure_schema_function(ref_name)
+ emit_line(block, indent, f"if _disc_value == {disc_value!r}:")
+ emit_line(block, indent + 1, f"{ref_fn}({value_expr}, {path_expr}, {errors_expr})")
+
+ rendered_blocks: dict[str, list[str]] = {}
+ idx = 0
+ while idx < len(function_order):
+ fn_name = function_order[idx]
+ idx += 1
+ schema = function_schemas[fn_name]
+ block: list[str] = [f"def {fn_name}(value: Any, path: str, errors: list[dict[str, str]]) -> None:"]
+ schema_name_hint = function_hints.get(fn_name)
+ emit_schema_body(schema, block, 1, "value", "path", "errors", schema_name_hint=schema_name_hint)
+ if len(block) == 1:
+ emit_line(block, 1, "return")
+ rendered_blocks[fn_name] = block
+
+ for fn_name in function_order:
+ lines.extend(rendered_blocks[fn_name])
+ lines.append("")
+
+ lines.append("ROOT_SCHEMAS = " + repr(target_roots))
+ lines.append("")
+
+ for root in target_roots:
+ class_name = f"{_sanitize_identifier(root)}Validator"
+ fn_name = f"_validate_{_sanitize_identifier(root)}"
+ lines.append(f"class {class_name}:")
+ lines.append(" \"\"\"Generated validator for the root schema.\"\"\"")
+ lines.append("")
+ lines.append(" @staticmethod")
+ lines.append(" def validate(payload: Any) -> list[dict[str, str]]:")
+ lines.append(" errors: list[dict[str, str]] = []")
+ lines.append(f" {fn_name}(payload, '$', errors)")
+ lines.append(" return errors")
+ lines.append("")
+
+ wrapper_name = f"validate_{_sanitize_identifier(root)}"
+ lines.append(f"def {wrapper_name}(payload: Any) -> list[dict[str, str]]:")
+ lines.append(f" return {class_name}.validate(payload)")
+ lines.append("")
+
+ if not target_roots:
+ lines.append("def validate_payload(payload: Any) -> list[dict[str, str]]:")
+ lines.append(" _ = payload")
+ lines.append(" return []")
+ lines.append("")
+
+ return "\n".join(lines).rstrip() + "\n"
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/scripts/validator_schema_walker.py b/sdk/agentserver/azure-ai-agentserver-responses/scripts/validator_schema_walker.py
new file mode 100644
index 000000000000..8b4ad67bc39a
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/scripts/validator_schema_walker.py
@@ -0,0 +1,104 @@
+"""Schema walking helpers for validator generation."""
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Any
+
+
+def resolve_ref(ref: str) -> str:
+ """Extract schema name from OpenAPI $ref values."""
+ return ref.rsplit("/", 1)[-1]
+
+
+def _iter_subschemas(schema: dict[str, Any]) -> list[dict[str, Any]]:
+ """Yield nested schema objects that may contain references."""
+ nested: list[dict[str, Any]] = []
+
+ for key in ("oneOf", "anyOf", "allOf"):
+ branches = schema.get(key, [])
+ if isinstance(branches, list):
+ nested.extend([branch for branch in branches if isinstance(branch, dict)])
+
+ properties = schema.get("properties", {})
+ if isinstance(properties, dict):
+ nested.extend([value for value in properties.values() if isinstance(value, dict)])
+
+ items = schema.get("items")
+ if isinstance(items, dict):
+ nested.append(items)
+
+ additional = schema.get("additionalProperties")
+ if isinstance(additional, dict):
+ nested.append(additional)
+
+ return nested
+
+
+@dataclass
+class SchemaWalker:
+ """Collect schemas reachable from one or more roots."""
+
+ schemas: dict[str, dict[str, Any]]
+ reachable: dict[str, dict[str, Any]] = field(default_factory=dict)
+ _visited: set[str] = field(default_factory=set)
+
+ def walk(self, name: str) -> None:
+ """Walk a schema by name and recursively collect reachable references."""
+ if name in self._visited:
+ return
+ self._visited.add(name)
+
+ schema = self.schemas.get(name)
+ if schema is None:
+ return
+
+ self.reachable[name] = schema
+ self._walk_schema(schema)
+
+ def _walk_schema(self, schema: dict[str, Any]) -> None:
+ """Walk nested schema branches."""
+ ref = schema.get("$ref")
+ if isinstance(ref, str):
+ self.walk(resolve_ref(ref))
+ return
+
+ for nested in _iter_subschemas(schema):
+ self._walk_schema(nested)
+
+
+def discover_post_request_roots(spec: dict[str, Any]) -> list[str]:
+ """Discover root schema names referenced by POST request bodies."""
+ roots: list[str] = []
+ paths = spec.get("paths", {})
+
+ for _path, methods in sorted(paths.items()):
+ if not isinstance(methods, dict):
+ continue
+ post = methods.get("post")
+ if not isinstance(post, dict):
+ continue
+ request_body = post.get("requestBody", {})
+ content = request_body.get("content", {}).get("application/json", {})
+ schema = content.get("schema", {})
+
+ if isinstance(schema, dict) and isinstance(schema.get("$ref"), str):
+ roots.append(resolve_ref(schema["$ref"]))
+ continue
+
+ if isinstance(schema, dict):
+ for key in ("oneOf", "anyOf"):
+ branches = schema.get(key, [])
+ if not isinstance(branches, list):
+ continue
+ for branch in branches:
+ if isinstance(branch, dict) and isinstance(branch.get("$ref"), str):
+ roots.append(resolve_ref(branch["$ref"]))
+
+ deduped: list[str] = []
+ seen: set[str] = set()
+ for root in roots:
+ if root not in seen:
+ seen.add(root)
+ deduped.append(root)
+ return deduped
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_cancel_endpoint.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_cancel_endpoint.py
new file mode 100644
index 000000000000..27e5be579aad
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_cancel_endpoint.py
@@ -0,0 +1,6 @@
+"""Contract tests for POST /responses/{response_id}/cancel behavior."""
+
+
+def test_cancel_endpoint_placeholder() -> None:
+ """Placeholder until contract tests are implemented in Phase 4."""
+ assert True
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_create_endpoint.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_create_endpoint.py
new file mode 100644
index 000000000000..2695995576d0
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_create_endpoint.py
@@ -0,0 +1,6 @@
+"""Contract tests for POST /responses endpoint behavior."""
+
+
+def test_create_endpoint_placeholder() -> None:
+ """Placeholder until contract tests are implemented in Phase 3."""
+ assert True
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_get_endpoint.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_get_endpoint.py
new file mode 100644
index 000000000000..3193ab34dff3
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_get_endpoint.py
@@ -0,0 +1,6 @@
+"""Contract tests for GET /responses/{response_id} endpoint behavior."""
+
+
+def test_get_endpoint_placeholder() -> None:
+ """Placeholder until contract tests are implemented in Phase 4."""
+ assert True
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_streaming_behavior.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_streaming_behavior.py
new file mode 100644
index 000000000000..07675efae00f
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/contract/test_streaming_behavior.py
@@ -0,0 +1,6 @@
+"""Contract tests for SSE streaming behavior."""
+
+
+def test_streaming_behavior_placeholder() -> None:
+ """Placeholder until streaming tests are implemented in Phase 3."""
+ assert True
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/data/minimal_openapi.json b/sdk/agentserver/azure-ai-agentserver-responses/tests/data/minimal_openapi.json
new file mode 100644
index 000000000000..f5b41faf41d2
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/data/minimal_openapi.json
@@ -0,0 +1,28 @@
+{
+ "paths": {
+ "/responses": {
+ "post": {
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/CreateResponse"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "components": {
+ "schemas": {
+ "CreateResponse": {
+ "type": "object",
+ "required": ["model"],
+ "properties": {
+ "model": {"type": "string"}
+ }
+ }
+ }
+ }
+}
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/integration/test_starlette_hosting.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/integration/test_starlette_hosting.py
new file mode 100644
index 000000000000..67ab0cf36a1a
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/integration/test_starlette_hosting.py
@@ -0,0 +1,6 @@
+"""Integration tests for Starlette host registration and wiring."""
+
+
+def test_starlette_hosting_placeholder() -> None:
+ """Placeholder until integration tests are implemented in Phase 5."""
+ assert True
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/integration/test_store_lifecycle.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/integration/test_store_lifecycle.py
new file mode 100644
index 000000000000..044b7a4cb1bd
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/integration/test_store_lifecycle.py
@@ -0,0 +1,6 @@
+"""Integration tests for store and lifecycle behavior."""
+
+
+def test_store_lifecycle_placeholder() -> None:
+ """Placeholder until lifecycle tests are implemented in Phase 4."""
+ assert True
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_generated_payload_validation.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_generated_payload_validation.py
new file mode 100644
index 000000000000..df10ffb7a8d6
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_generated_payload_validation.py
@@ -0,0 +1,177 @@
+"""Unit tests for generated payload validator integration in parse flow."""
+
+from __future__ import annotations
+
+import types
+from pathlib import Path
+
+import pytest
+
+from azure.ai.agentserver.responses import _validation
+from azure.ai.agentserver.responses._validation import parse_create_response
+from azure.ai.agentserver.responses.models import RequestValidationError
+
+
+class _StubCreateResponse:
+ def __init__(self, payload: object) -> None:
+ data = payload if isinstance(payload, dict) else {}
+ self.model = data.get("model")
+
+
+class _StubGeneratedValidators:
+ @staticmethod
+ def validate_CreateResponse(_payload: object) -> list[dict[str, str]]:
+ return [{"path": "$.model", "message": "Required property 'model' is missing"}]
+
+
+class _PassGeneratedValidators:
+ @staticmethod
+ def validate_CreateResponse(_payload: object) -> list[dict[str, str]]:
+ return []
+
+
+def _load_generated_validators_module() -> types.ModuleType:
+ validators_path = (
+ Path(__file__).resolve().parents[2] / "azure" / "ai" / "agentserver" / "responses" / "models" / "_generated" / "_validators.py"
+ )
+ module = types.ModuleType("generated_validators_runtime")
+ exec(validators_path.read_text(encoding="utf-8"), module.__dict__)
+ return module
+
+
+def test_parse_create_response_uses_generated_payload_validator(monkeypatch: pytest.MonkeyPatch) -> None:
+ monkeypatch.setattr(_validation, "CreateResponse", _StubCreateResponse)
+ monkeypatch.setattr(_validation, "_generated_validators", _StubGeneratedValidators)
+
+ with pytest.raises(RequestValidationError) as exc_info:
+ parse_create_response({})
+
+ error = exc_info.value
+ assert error.code == "invalid_request"
+ assert error.debug_info is not None
+ assert error.debug_info.get("errors") == [{"path": "$.model", "message": "Required property 'model' is missing"}]
+
+
+def test_parse_create_response_allows_valid_payload_when_generated_checks_pass(
+ monkeypatch: pytest.MonkeyPatch,
+) -> None:
+ monkeypatch.setattr(_validation, "CreateResponse", _StubCreateResponse)
+ monkeypatch.setattr(_validation, "_generated_validators", _PassGeneratedValidators)
+
+ parsed = parse_create_response({"model": "gpt-4o"})
+ assert parsed.model == "gpt-4o"
+
+
+def test_parse_create_response_without_generated_module_still_parses() -> None:
+ module = _validation._generated_validators
+ original_create_response = _validation.CreateResponse
+ try:
+ _validation.CreateResponse = _StubCreateResponse
+ _validation._generated_validators = None
+ parsed = parse_create_response({"model": "gpt-4o"})
+ assert parsed.model == "gpt-4o"
+ finally:
+ _validation.CreateResponse = original_create_response
+ _validation._generated_validators = module
+
+
+def test_generated_create_response_validator_accepts_string_input() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": "hello world",
+ }
+ )
+ assert errors == []
+
+
+def test_generated_create_response_validator_accepts_array_input_items() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [{"type": "message"}],
+ }
+ )
+ assert errors == []
+
+
+def test_generated_create_response_validator_rejects_non_string_non_array_input() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": 123,
+ }
+ )
+ assert any(e["path"] == "$.input" and "Expected one of: string, array" in e["message"] for e in errors)
+
+
+def test_generated_create_response_validator_rejects_non_object_input_item() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [123],
+ }
+ )
+ assert any(e["path"] == "$.input" and "Expected one of: string, array" in e["message"] for e in errors)
+
+
+def test_generated_create_response_validator_rejects_input_item_missing_type() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [{}],
+ }
+ )
+ assert any(e["path"] == "$.input" and "Expected one of: string, array" in e["message"] for e in errors)
+
+
+def test_generated_create_response_validator_rejects_input_item_type_with_wrong_primitive() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [{"type": 1}],
+ }
+ )
+ assert any(e["path"] == "$.input" and "Expected one of: string, array" in e["message"] for e in errors)
+
+
+@pytest.mark.parametrize(
+ "item_type",
+ [
+ "message",
+ "item_reference",
+ "function_call_output",
+ "computer_call_output",
+ "apply_patch_call_output",
+ ],
+)
+def test_generated_create_response_validator_accepts_multiple_input_item_types(item_type: str) -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [{"type": item_type}],
+ }
+ )
+ assert errors == []
+
+
+def test_generated_create_response_validator_accepts_mixed_input_item_types() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [
+ {"type": "message"},
+ {"type": "item_reference"},
+ {"type": "function_call_output"},
+ ],
+ }
+ )
+ assert errors == []
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_options.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_options.py
new file mode 100644
index 000000000000..a4c6e5dc2df8
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_options.py
@@ -0,0 +1,6 @@
+"""Unit tests for server options behavior."""
+
+
+def test_options_placeholder() -> None:
+ """Placeholder until options tests are implemented in Phase 5."""
+ assert True
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_sse_writer.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_sse_writer.py
new file mode 100644
index 000000000000..9d499de3010d
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_sse_writer.py
@@ -0,0 +1,6 @@
+"""Unit tests for SSE encoding helpers."""
+
+
+def test_sse_writer_placeholder() -> None:
+ """Placeholder until SSE unit tests are implemented in Phase 3."""
+ assert True
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validation.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validation.py
new file mode 100644
index 000000000000..27e9a9053be6
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validation.py
@@ -0,0 +1,6 @@
+"""Unit tests for validation helpers."""
+
+
+def test_validation_placeholder() -> None:
+ """Placeholder until unit validation tests are implemented in Phase 3."""
+ assert True
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_emitter.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_emitter.py
new file mode 100644
index 000000000000..0d328b7468f8
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_emitter.py
@@ -0,0 +1,249 @@
+"""Tests for validator emitter behavior."""
+
+from __future__ import annotations
+
+import re
+from types import ModuleType
+
+from scripts.validator_emitter import build_validator_module
+
+
+def _load_module(code: str) -> ModuleType:
+ module = ModuleType("generated_validators")
+ exec(code, module.__dict__)
+ return module
+
+
+def test_emitter_generates_required_property_check() -> None:
+ schemas = {
+ "CreateResponse": {
+ "type": "object",
+ "required": ["model"],
+ "properties": {"model": {"type": "string"}},
+ }
+ }
+ module = _load_module(build_validator_module(schemas, ["CreateResponse"]))
+ errors = module.validate_CreateResponse({})
+ assert any(e["path"] == "$.model" and "missing" in e["message"].lower() for e in errors)
+
+
+def test_emitter_generates_class_without_schema_definition() -> None:
+ schemas = {
+ "CreateResponse": {
+ "type": "object",
+ "required": ["model"],
+ "properties": {"model": {"type": "string"}},
+ }
+ }
+ code = build_validator_module(schemas, ["CreateResponse"])
+ assert "class CreateResponseValidator" in code
+ assert "\nSCHEMAS =" not in code
+
+
+def test_emitter_uses_generated_enum_values_when_available() -> None:
+ schemas = {
+ "OpenAI.ToolType": {
+ "anyOf": [
+ {"type": "string"},
+ {"type": "string", "enum": ["function", "file_search"]},
+ ]
+ }
+ }
+ code = build_validator_module(schemas, ["OpenAI.ToolType"])
+ assert "_enum_values('ToolType')" in code
+
+
+def test_emitter_deduplicates_string_union_error_message() -> None:
+ schemas = {
+ "OpenAI.InputItemType": {
+ "anyOf": [
+ {"type": "string"},
+ {"type": "string", "enum": ["message", "item_reference"]},
+ ]
+ }
+ }
+
+ module = _load_module(build_validator_module(schemas, ["OpenAI.InputItemType"]))
+ errors = module.validate_OpenAI_InputItemType(123)
+ assert errors
+ assert errors[0]["path"] == "$"
+ assert "InputItemType" in errors[0]["message"]
+ assert "got integer" in errors[0]["message"].lower()
+ assert "string, string" not in errors[0]["message"]
+
+
+def test_emitter_generates_nullable_handling() -> None:
+ schemas = {
+ "CreateResponse": {
+ "type": "object",
+ "properties": {"instructions": {"type": "string", "nullable": True}},
+ }
+ }
+ module = _load_module(build_validator_module(schemas, ["CreateResponse"]))
+ assert module.validate_CreateResponse({"instructions": None}) == []
+
+
+def test_emitter_generates_primitive_type_checks_and_enum_literal() -> None:
+ schemas = {
+ "CreateResponse": {
+ "type": "object",
+ "properties": {
+ "model": {"type": "string", "enum": ["gpt-4o", "gpt-4.1"]},
+ "temperature": {"type": "number"},
+ "stream": {"type": "boolean"},
+ },
+ }
+ }
+ module = _load_module(build_validator_module(schemas, ["CreateResponse"]))
+ errors = module.validate_CreateResponse({"model": "bad", "temperature": "hot", "stream": "yes"})
+ assert any(e["path"] == "$.model" and "allowed" in e["message"].lower() for e in errors)
+ assert any(e["path"] == "$.temperature" and "number" in e["message"].lower() for e in errors)
+ assert any(e["path"] == "$.stream" and "boolean" in e["message"].lower() for e in errors)
+
+
+def test_emitter_generates_nested_delegate_calls() -> None:
+ schemas = {
+ "CreateResponse": {
+ "type": "object",
+ "properties": {"metadata": {"$ref": "#/components/schemas/Metadata"}},
+ },
+ "Metadata": {
+ "type": "object",
+ "required": ["id"],
+ "properties": {"id": {"type": "string"}},
+ },
+ }
+ module = _load_module(build_validator_module(schemas, ["CreateResponse"]))
+ errors = module.validate_CreateResponse({"metadata": {}})
+ assert any(e["path"] == "$.metadata.id" for e in errors)
+
+
+def test_emitter_generates_union_kind_check_for_oneof_anyof() -> None:
+ schemas = {
+ "CreateResponse": {
+ "type": "object",
+ "properties": {
+ "tool_choice": {
+ "anyOf": [
+ {"type": "string"},
+ {"$ref": "#/components/schemas/ToolChoiceParam"},
+ ]
+ }
+ },
+ },
+ "ToolChoiceParam": {
+ "type": "object",
+ "required": ["type"],
+ "properties": {"type": {"type": "string"}},
+ },
+ }
+ module = _load_module(build_validator_module(schemas, ["CreateResponse"]))
+ errors = module.validate_CreateResponse({"tool_choice": 123})
+ assert any(e["path"] == "$.tool_choice" and "expected one of" in e["message"].lower() for e in errors)
+
+
+def test_emitter_validates_create_response_input_property() -> None:
+ schemas = {
+ "CreateResponse": {
+ "type": "object",
+ "properties": {
+ "input": {
+ "anyOf": [
+ {"type": "string"},
+ {
+ "type": "array",
+ "items": {"$ref": "#/components/schemas/InputItem"},
+ },
+ ]
+ }
+ },
+ },
+ "InputItem": {
+ "type": "object",
+ "required": ["type"],
+ "properties": {"type": {"type": "string"}},
+ },
+ }
+
+ module = _load_module(build_validator_module(schemas, ["CreateResponse"]))
+
+ # Invalid input kind should fail the CreateResponse.input union check.
+ invalid_errors = module.validate_CreateResponse({"input": 123})
+ assert any(e["path"] == "$.input" and "expected one of" in e["message"].lower() for e in invalid_errors)
+
+ # Supported input kinds should pass.
+ assert module.validate_CreateResponse({"input": "hello"}) == []
+ assert module.validate_CreateResponse({"input": [{"type": "message"}]}) == []
+
+
+def test_emitter_generates_discriminator_dispatch() -> None:
+ schemas = {
+ "Tool": {
+ "type": "object",
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "function": "#/components/schemas/FunctionTool",
+ },
+ },
+ "properties": {"type": {"type": "string"}},
+ },
+ "FunctionTool": {
+ "type": "object",
+ "required": ["name"],
+ "properties": {
+ "type": {"type": "string"},
+ "name": {"type": "string"},
+ },
+ },
+ }
+ module = _load_module(build_validator_module(schemas, ["Tool"]))
+ errors = module.validate_Tool({"type": "function"})
+ assert any(e["path"] == "$.name" and "missing" in e["message"].lower() for e in errors)
+
+
+def test_emitter_generates_array_and_map_checks() -> None:
+ schemas = {
+ "CreateResponse": {
+ "type": "object",
+ "properties": {
+ "tools": {
+ "type": "array",
+ "items": {"$ref": "#/components/schemas/Tool"},
+ },
+ "metadata": {
+ "type": "object",
+ "additionalProperties": {"type": "string"},
+ },
+ },
+ },
+ "Tool": {
+ "type": "object",
+ "required": ["name"],
+ "properties": {"name": {"type": "string"}},
+ },
+ }
+ module = _load_module(build_validator_module(schemas, ["CreateResponse"]))
+ errors = module.validate_CreateResponse({"tools": [{}], "metadata": {"a": 1}})
+ assert any(e["path"] == "$.tools[0].name" for e in errors)
+ assert any(e["path"] == "$.metadata.a" for e in errors)
+
+
+def test_emitter_uses_descriptive_helper_function_names() -> None:
+ schemas = {
+ "CreateResponse": {
+ "type": "object",
+ "properties": {
+ "model": {"type": "string"},
+ "metadata": {
+ "type": "object",
+ "additionalProperties": {"type": "string"},
+ },
+ },
+ }
+ }
+
+ code = build_validator_module(schemas, ["CreateResponse"])
+ assert "_validate_CreateResponse_model" in code
+ assert "_validate_CreateResponse_metadata" in code
+ assert re.search(r"_validate_branch_\d+", code) is None
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_generator_contract.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_generator_contract.py
new file mode 100644
index 000000000000..09041bbd8f62
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_generator_contract.py
@@ -0,0 +1,109 @@
+"""Tests for validator generator contract behavior."""
+
+from __future__ import annotations
+
+import subprocess
+import sys
+from pathlib import Path
+
+
+def _script_path() -> Path:
+ return Path(__file__).resolve().parents[2] / "scripts" / "generate_validators.py"
+
+
+def _minimal_spec() -> str:
+ return """{
+ "paths": {
+ "/responses": {
+ "post": {
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/CreateResponse"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "components": {
+ "schemas": {
+ "CreateResponse": {
+ "type": "object",
+ "required": ["model"],
+ "properties": {
+ "model": {"type": "string"}
+ }
+ }
+ }
+ }
+}
+"""
+
+
+def test_generator_requires_cli_args() -> None:
+ proc = subprocess.run(
+ [sys.executable, str(_script_path())],
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+ assert proc.returncode != 0
+ assert "--input" in proc.stderr
+ assert "--output" in proc.stderr
+
+
+def test_generated_file_has_autogen_header(tmp_path: Path) -> None:
+ spec_path = tmp_path / "spec.json"
+ out_path = tmp_path / "_validators.py"
+ spec_path.write_text(_minimal_spec(), encoding="utf-8")
+
+ proc = subprocess.run(
+ [
+ sys.executable,
+ str(_script_path()),
+ "--input",
+ str(spec_path),
+ "--output",
+ str(out_path),
+ "--root-schemas",
+ "CreateResponse",
+ ],
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+
+ assert proc.returncode == 0, proc.stderr
+ content = out_path.read_text(encoding="utf-8")
+ assert content.startswith("# pylint: disable=line-too-long,useless-suppression,too-many-lines")
+ assert "# Code generated by Microsoft (R) Python Code Generator." in content
+
+
+def test_generation_is_deterministic_for_same_input(tmp_path: Path) -> None:
+ spec_path = tmp_path / "spec.json"
+ out_path = tmp_path / "_validators.py"
+ spec_path.write_text(_minimal_spec(), encoding="utf-8")
+
+ cmd = [
+ sys.executable,
+ str(_script_path()),
+ "--input",
+ str(spec_path),
+ "--output",
+ str(out_path),
+ "--root-schemas",
+ "CreateResponse",
+ ]
+
+ first = subprocess.run(cmd, capture_output=True, text=True, check=False)
+ assert first.returncode == 0, first.stderr
+ first_output = out_path.read_text(encoding="utf-8")
+
+ second = subprocess.run(cmd, capture_output=True, text=True, check=False)
+ assert second.returncode == 0, second.stderr
+ second_output = out_path.read_text(encoding="utf-8")
+
+ assert first_output == second_output
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_generator_e2e.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_generator_e2e.py
new file mode 100644
index 000000000000..e44bb8db2562
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_generator_e2e.py
@@ -0,0 +1,192 @@
+"""End-to-end tests for validator generator CLI output."""
+
+from __future__ import annotations
+
+import importlib.util
+import subprocess
+import sys
+from pathlib import Path
+
+
+def _script_path() -> Path:
+ return Path(__file__).resolve().parents[2] / "scripts" / "generate_validators.py"
+
+
+def _spec() -> str:
+ return """{
+ "paths": {
+ "/responses": {
+ "post": {
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/CreateResponse"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "components": {
+ "schemas": {
+ "CreateResponse": {
+ "type": "object",
+ "required": ["model"],
+ "properties": {
+ "model": {"type": "string"},
+ "metadata": {"$ref": "#/components/schemas/Metadata"}
+ }
+ },
+ "Metadata": {
+ "type": "object",
+ "additionalProperties": {"type": "string"}
+ }
+ }
+ }
+}
+"""
+
+
+def test_generator_emits_valid_python_module(tmp_path: Path) -> None:
+ spec_path = tmp_path / "spec.json"
+ out_path = tmp_path / "_validators.py"
+ spec_path.write_text(_spec(), encoding="utf-8")
+
+ proc = subprocess.run(
+ [
+ sys.executable,
+ str(_script_path()),
+ "--input",
+ str(spec_path),
+ "--output",
+ str(out_path),
+ "--root-schemas",
+ "CreateResponse",
+ ],
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+ assert proc.returncode == 0, proc.stderr
+
+ source = out_path.read_text(encoding="utf-8")
+ compile(source, str(out_path), "exec")
+
+
+def test_generated_module_exposes_expected_validate_functions(tmp_path: Path) -> None:
+ spec_path = tmp_path / "spec.json"
+ out_path = tmp_path / "_validators.py"
+ spec_path.write_text(_spec(), encoding="utf-8")
+
+ proc = subprocess.run(
+ [
+ sys.executable,
+ str(_script_path()),
+ "--input",
+ str(spec_path),
+ "--output",
+ str(out_path),
+ "--root-schemas",
+ "CreateResponse",
+ ],
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+ assert proc.returncode == 0, proc.stderr
+
+ module_name = "generated_validator_module"
+ spec = importlib.util.spec_from_file_location(module_name, out_path)
+ assert spec is not None and spec.loader is not None
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+
+ assert hasattr(module, "validate_CreateResponse")
+
+
+def test_regeneration_overwrites_previous_output_cleanly(tmp_path: Path) -> None:
+ spec_path = tmp_path / "spec.json"
+ out_path = tmp_path / "_validators.py"
+ spec_path.write_text(_spec(), encoding="utf-8")
+
+ out_path.write_text("stale-content", encoding="utf-8")
+
+ proc = subprocess.run(
+ [
+ sys.executable,
+ str(_script_path()),
+ "--input",
+ str(spec_path),
+ "--output",
+ str(out_path),
+ "--root-schemas",
+ "CreateResponse",
+ ],
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+ assert proc.returncode == 0, proc.stderr
+
+ content = out_path.read_text(encoding="utf-8")
+ assert "stale-content" not in content
+ assert content.startswith("# pylint: disable=line-too-long,useless-suppression,too-many-lines")
+
+
+def test_generator_handles_inline_create_response_schema(tmp_path: Path) -> None:
+ spec_path = tmp_path / "spec-inline.json"
+ out_path = tmp_path / "_validators.py"
+ spec_path.write_text(
+ """{
+ "paths": {
+ "/responses": {
+ "post": {
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "anyOf": [
+ {
+ "type": "object",
+ "required": ["model"],
+ "properties": {
+ "model": {"type": "string"}
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "components": {
+ "schemas": {}
+ }
+}
+""",
+ encoding="utf-8",
+ )
+
+ proc = subprocess.run(
+ [
+ sys.executable,
+ str(_script_path()),
+ "--input",
+ str(spec_path),
+ "--output",
+ str(out_path),
+ "--root-schemas",
+ "CreateResponse",
+ ],
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+ assert proc.returncode == 0, proc.stderr
+ content = out_path.read_text(encoding="utf-8")
+ assert "def _validate_CreateResponse(" in content
+ assert "class CreateResponseValidator" in content
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_schema_walker.py b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_schema_walker.py
new file mode 100644
index 000000000000..244c5b63820c
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/tests/unit/test_validator_schema_walker.py
@@ -0,0 +1,84 @@
+"""Tests for OpenAPI schema walker behavior used by validator generation."""
+
+from __future__ import annotations
+
+from scripts.validator_schema_walker import SchemaWalker, discover_post_request_roots, resolve_ref
+
+
+def test_resolve_ref_extracts_schema_name() -> None:
+ assert resolve_ref("#/components/schemas/CreateResponse") == "CreateResponse"
+
+
+def test_schema_walker_collects_reachable_from_root_schema() -> None:
+ schemas = {
+ "CreateResponse": {
+ "type": "object",
+ "properties": {
+ "metadata": {"$ref": "#/components/schemas/Metadata"},
+ },
+ },
+ "Metadata": {
+ "type": "object",
+ "properties": {"id": {"type": "string"}},
+ },
+ }
+
+ walker = SchemaWalker(schemas)
+ walker.walk("CreateResponse")
+
+ assert "CreateResponse" in walker.reachable
+ assert "Metadata" in walker.reachable
+
+
+def test_schema_walker_discovers_inline_post_request_schema() -> None:
+ spec = {
+ "paths": {
+ "/responses": {
+ "post": {
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/CreateResponse",
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ assert discover_post_request_roots(spec) == ["CreateResponse"]
+
+
+def test_schema_walker_handles_oneof_anyof_ref_branches() -> None:
+ schemas = {
+ "CreateResponse": {
+ "type": "object",
+ "properties": {
+ "input": {
+ "oneOf": [
+ {"$ref": "#/components/schemas/InputText"},
+ {"$ref": "#/components/schemas/InputImage"},
+ ]
+ },
+ "tool_choice": {
+ "anyOf": [
+ {"type": "string"},
+ {"$ref": "#/components/schemas/ToolChoiceParam"},
+ ]
+ },
+ },
+ },
+ "InputText": {"type": "string"},
+ "InputImage": {"type": "object", "properties": {"url": {"type": "string"}}},
+ "ToolChoiceParam": {"type": "object", "properties": {"type": {"type": "string"}}},
+ }
+
+ walker = SchemaWalker(schemas)
+ walker.walk("CreateResponse")
+
+ assert "InputText" in walker.reachable
+ assert "InputImage" in walker.reachable
+ assert "ToolChoiceParam" in walker.reachable
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/type_spec/client.tsp b/sdk/agentserver/azure-ai-agentserver-responses/type_spec/client.tsp
new file mode 100644
index 000000000000..48528768d194
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/type_spec/client.tsp
@@ -0,0 +1,161 @@
+import "./main.tsp";
+
+using Azure.ClientGenerator.Core;
+using Azure.Core.Experimental;
+
+// Map all OpenAI base types into our SDK namespace
+@clientNamespace("Azure.AI.Responses.Server.Sdk.Models")
+namespace OpenAI {
+ // The responses view does not export ItemResourceType (only the full src does).
+ // Bridge the gap with an alias so the Azure augmentations can reference it.
+ alias ItemResourceType = OutputItemType;
+}
+
+// Map Azure.AI.Projects augmentation types into our SDK namespace
+#suppress "@azure-tools/typespec-azure-core/experimental-feature" ""
+@clientNamespace("Azure.AI.Responses.Server.Sdk.Models")
+namespace Azure.AI.Projects {
+ // Propagate "sequence_number" to base of stream events
+ @@copyProperties(OpenAI.ResponseStreamEvent,
+ {
+ sequence_number: integer,
+ }
+ );
+
+ // Remove created_by from specific models to avoid BinaryData/string mismatch
+ // with the base OutputItem.CreatedBy (BinaryData) type
+ @@withoutOmittedProperties(OpenAI.OutputItemFunctionShellCallOutput, "created_by");
+ @@withoutOmittedProperties(OpenAI.OutputItemFunctionShellCall, "created_by");
+ @@withoutOmittedProperties(OpenAI.OutputItemCompactionBody, "created_by");
+ @@withoutOmittedProperties(OpenAI.OutputItemApplyPatchToolCallOutput, "created_by");
+ @@withoutOmittedProperties(OpenAI.OutputItemApplyPatchToolCall, "created_by");
+
+ // Remove "object" from DeleteResponseResult to work around codegen bug
+ // (TypeSpec emitter generates `= "response"` string default for ResponseObjectType enum)
+ @@withoutOmittedProperties(Azure.AI.Projects.DeleteResponseResult, "object");
+
+ // ============================================================================
+ // Public constructors: mark models as input+output so the C# emitter generates
+ // public compact constructors. Consumers need these to construct events in their
+ // IResponseHandler implementations.
+ // ============================================================================
+
+ // --- ResponseStreamEvent subtypes (53 concrete types) ---
+ @@usage(OpenAI.ResponseAudioDeltaEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseAudioDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseAudioTranscriptDeltaEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseAudioTranscriptDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseCodeInterpreterCallCodeDeltaEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseCodeInterpreterCallCodeDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseCodeInterpreterCallCompletedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseCodeInterpreterCallInProgressEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseCodeInterpreterCallInterpretingEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseCompletedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseContentPartAddedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseContentPartDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseCreatedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseCustomToolCallInputDeltaEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseCustomToolCallInputDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseErrorEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseFailedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseFileSearchCallCompletedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseFileSearchCallInProgressEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseFileSearchCallSearchingEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseFunctionCallArgumentsDeltaEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseFunctionCallArgumentsDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseImageGenCallCompletedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseImageGenCallGeneratingEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseImageGenCallInProgressEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseImageGenCallPartialImageEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseIncompleteEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseInProgressEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseMCPCallArgumentsDeltaEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseMCPCallArgumentsDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseMCPCallCompletedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseMCPCallFailedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseMCPCallInProgressEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseMCPListToolsCompletedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseMCPListToolsFailedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseMCPListToolsInProgressEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseOutputItemAddedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseOutputItemDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseOutputTextAnnotationAddedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseQueuedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseReasoningSummaryPartAddedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseReasoningSummaryPartDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseReasoningSummaryTextDeltaEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseReasoningSummaryTextDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseReasoningTextDeltaEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseReasoningTextDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseRefusalDeltaEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseRefusalDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseTextDeltaEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseTextDoneEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseWebSearchCallCompletedEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseWebSearchCallInProgressEvent, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseWebSearchCallSearchingEvent, Usage.input | Usage.output);
+
+ // --- Response, ResponseError, and CreateResponse ---
+ @@usage(OpenAI.Response, Usage.input | Usage.output);
+ @@usage(OpenAI.ResponseError, Usage.input | Usage.output);
+ @@usage(OpenAI.CreateResponse, Usage.input | Usage.output);
+
+ // --- OpenAI OutputItem subtypes (24 concrete types) ---
+ @@usage(OpenAI.OutputItemApplyPatchToolCall, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemApplyPatchToolCallOutput, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemCodeInterpreterToolCall, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemCompactionBody, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemComputerToolCall, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemComputerToolCallOutput, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemCustomToolCall, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemCustomToolCallOutput, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemFileSearchToolCall, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemFunctionShellCall, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemFunctionShellCallOutput, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemFunctionToolCall, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemImageGenToolCall, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemLocalShellToolCall, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemLocalShellToolCallOutput, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemMcpApprovalRequest, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemMcpApprovalResponseResource, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemMcpListTools, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemMcpToolCall, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemMessage, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemOutputMessage, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemReasoningItem, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemWebSearchToolCall, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputItemFunctionToolCallOutput, Usage.input | Usage.output);
+
+ // --- OutputContent subtypes (3 concrete types) ---
+ @@usage(OpenAI.OutputContentOutputTextContent, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputContentReasoningTextContent, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputContentRefusalContent, Usage.input | Usage.output);
+
+ // --- OutputMessageContent subtypes (2 concrete types) ---
+ @@usage(OpenAI.OutputMessageContentOutputTextContent, Usage.input | Usage.output);
+ @@usage(OpenAI.OutputMessageContentRefusalContent, Usage.input | Usage.output);
+
+ // --- Azure.AI.Projects OutputItem subtypes (22 concrete types) ---
+ @@usage(Azure.AI.Projects.A2AToolCall, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.A2AToolCallOutput, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.AzureAISearchToolCall, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.AzureAISearchToolCallOutput, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.AzureFunctionToolCall, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.AzureFunctionToolCallOutput, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.BingCustomSearchToolCall, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.BingCustomSearchToolCallOutput, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.BingGroundingToolCall, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.BingGroundingToolCallOutput, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.BrowserAutomationToolCall, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.BrowserAutomationToolCallOutput, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.FabricDataAgentToolCall, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.FabricDataAgentToolCallOutput, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.MemorySearchToolCallItemResource, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.OAuthConsentRequestOutputItem, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.OpenApiToolCall, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.OpenApiToolCallOutput, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.SharepointGroundingToolCall, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.SharepointGroundingToolCallOutput, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.StructuredOutputsOutputItem, Usage.input | Usage.output);
+ @@usage(Azure.AI.Projects.WorkflowActionOutputItem, Usage.input | Usage.output);
+}
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/type_spec/main.tsp b/sdk/agentserver/azure-ai-agentserver-responses/type_spec/main.tsp
new file mode 100644
index 000000000000..f6d3a43eb65d
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/type_spec/main.tsp
@@ -0,0 +1,23 @@
+// Azure AI Responses Server SDK — TypeSpec view
+//
+// This view selectively imports the OpenAI responses routes and models from the
+// upstream Azure REST API spec and applies local customizations (client.tsp) to
+// generate C# model classes in our SDK namespace.
+//
+// Pattern based on:
+// https://github.com/Azure/azure-rest-api-specs/.../sdk-service-agents-contracts
+
+// OpenAI base models + operations used by the local responses contract view
+import "@azure-tools/openai-typespec/views/client-emitters";
+
+// OpenAI responses routes + models (routes reference the model types, making them visible to the emitter)
+import "./TempTypeSpecFiles/openai-responses/routes.tsp";
+
+// Common service definition (namespace Azure.AI.Projects, Versions enum)
+import "./TempTypeSpecFiles/common/service.tsp";
+
+// Common models (FoundryFeaturesOptInKeys, operation utilities, etc.)
+import "./TempTypeSpecFiles/common/models.tsp";
+
+// Local customizations (namespace mapping, sequence_number)
+import "./client.tsp";
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/type_spec/tsp-location.yaml b/sdk/agentserver/azure-ai-agentserver-responses/type_spec/tsp-location.yaml
new file mode 100644
index 000000000000..a5f940991b6c
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/type_spec/tsp-location.yaml
@@ -0,0 +1,9 @@
+directory: specification/ai-foundry/data-plane/Foundry
+commit: c1c762593f2c91877f6f76384bc8315404b64e8f
+repo: Azure/azure-rest-api-specs
+additionalDirectories:
+ - specification/ai-foundry/data-plane/Foundry/src/openai-responses
+ - specification/ai-foundry/data-plane/Foundry/src/openai-conversations
+ - specification/ai-foundry/data-plane/Foundry/src/tools
+ - specification/ai-foundry/data-plane/Foundry/src/common
+ - specification/ai-foundry/data-plane/Foundry/src/memory-stores
diff --git a/sdk/agentserver/azure-ai-agentserver-responses/type_spec/tspconfig.yaml b/sdk/agentserver/azure-ai-agentserver-responses/type_spec/tspconfig.yaml
new file mode 100644
index 000000000000..8875bfb7dfa8
--- /dev/null
+++ b/sdk/agentserver/azure-ai-agentserver-responses/type_spec/tspconfig.yaml
@@ -0,0 +1,23 @@
+emit:
+ - "@typespec/openapi3"
+ - "@azure-tools/typespec-python"
+options:
+ "@typespec/openapi3":
+ emitter-output-dir: "{output-dir}"
+ "@azure-tools/typespec-python":
+ emitter-output-dir: "{output-dir}"
+ package-name: "azure-ai-agentserver-responses"
+ package-mode: "dataplane"
+ flavor: "azure"
+ unreferenced-types-handling: keepAll
+ generate-test: false
+ generate-sample: false
+imports:
+ - "@typespec/http"
+ - "@typespec/openapi"
+ - "@azure-tools/typespec-azure-core"
+ - "@azure-tools/typespec-azure-core/experimental"
+ - "@azure-tools/typespec-client-generator-core"
+ - "@typespec/versioning"
+ - "@typespec/events"
+ - "@typespec/sse"
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/__init__.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/__init__.py
new file mode 100644
index 000000000000..ca1ef50af391
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/__init__.py
@@ -0,0 +1,32 @@
+"""Canonical non-generated model types for the response server."""
+
+from .errors import RequestValidationError
+try:
+ from .runtime import (
+ ResponseExecution,
+ ResponseModeFlags,
+ ResponseSession,
+ ResponseStatus,
+ StreamEventRecord,
+ StreamReplayState,
+ TerminalResponseStatus,
+ )
+except Exception: # pragma: no cover - allows importing lightweight model errors in isolated test envs.
+ pass
+
+__all__ = [
+ "RequestValidationError",
+]
+
+if "ResponseExecution" in globals():
+ __all__.extend(
+ [
+ "ResponseExecution",
+ "ResponseModeFlags",
+ "ResponseSession",
+ "ResponseStatus",
+ "StreamEventRecord",
+ "StreamReplayState",
+ "TerminalResponseStatus",
+ ]
+ )
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/__init__.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/__init__.py
new file mode 100644
index 000000000000..013008e395b4
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/__init__.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility re-exports for generated models preserved under sdk/models."""
+
+from .sdk.models.models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_enums.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_enums.py
new file mode 100644
index 000000000000..ffeb0d1362db
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_enums.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated enum symbols."""
+
+from .sdk.models.models._enums import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_models.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_models.py
new file mode 100644
index 000000000000..8c6878d69796
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_models.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated model symbols."""
+
+from .sdk.models.models._models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_patch.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_patch.py
new file mode 100644
index 000000000000..3d222c31c566
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_patch.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated patch helpers."""
+
+from .sdk.models.models._patch import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_validators.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_validators.py
new file mode 100644
index 000000000000..b2dfc33c9c4a
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/_validators.py
@@ -0,0 +1,666 @@
+# pylint: disable=line-too-long,useless-suppression,too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+
+from __future__ import annotations
+
+from typing import Any
+
+try:
+ from . import _enums as _generated_enums
+except Exception:
+ _generated_enums = None
+
+def _append_error(errors: list[dict[str, str]], path: str, message: str) -> None:
+ errors.append({'path': path, 'message': message})
+
+def _type_label(value: Any) -> str:
+ if value is None:
+ return 'null'
+ if isinstance(value, bool):
+ return 'boolean'
+ if isinstance(value, int):
+ return 'integer'
+ if isinstance(value, float):
+ return 'number'
+ if isinstance(value, str):
+ return 'string'
+ if isinstance(value, dict):
+ return 'object'
+ if isinstance(value, list):
+ return 'array'
+ return type(value).__name__
+
+def _is_type(value: Any, expected: str) -> bool:
+ if expected == 'string':
+ return isinstance(value, str)
+ if expected == 'integer':
+ return isinstance(value, int) and not isinstance(value, bool)
+ if expected == 'number':
+ return (isinstance(value, int) and not isinstance(value, bool)) or isinstance(value, float)
+ if expected == 'boolean':
+ return isinstance(value, bool)
+ if expected == 'object':
+ return isinstance(value, dict)
+ if expected == 'array':
+ return isinstance(value, list)
+ return True
+
+def _append_type_mismatch(errors: list[dict[str, str]], path: str, expected: str, value: Any) -> None:
+ _append_error(errors, path, f"Expected {expected}, got {_type_label(value)}")
+
+def _enum_values(enum_name: str) -> tuple[tuple[str, ...] | None, str | None]:
+ if _generated_enums is None:
+ return None, f'enum type _enums.{enum_name} is unavailable'
+ enum_cls = getattr(_generated_enums, enum_name, None)
+ if enum_cls is None:
+ return None, f'enum type _enums.{enum_name} is not defined'
+ try:
+ return tuple(str(member.value) for member in enum_cls), None
+ except Exception:
+ return None, f'enum type _enums.{enum_name} failed to load values'
+
+def _validate_CreateResponse(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'agent_reference' in value:
+ _validate_CreateResponse_agent_reference(value['agent_reference'], f"{path}.agent_reference", errors)
+ if 'background' in value:
+ _validate_CreateResponse_background(value['background'], f"{path}.background", errors)
+ if 'context_management' in value:
+ _validate_CreateResponse_context_management(value['context_management'], f"{path}.context_management", errors)
+ if 'conversation' in value:
+ _validate_CreateResponse_conversation(value['conversation'], f"{path}.conversation", errors)
+ if 'include' in value:
+ _validate_CreateResponse_include(value['include'], f"{path}.include", errors)
+ if 'input' in value:
+ _validate_CreateResponse_input(value['input'], f"{path}.input", errors)
+ if 'instructions' in value:
+ _validate_CreateResponse_instructions(value['instructions'], f"{path}.instructions", errors)
+ if 'max_output_tokens' in value:
+ _validate_CreateResponse_max_output_tokens(value['max_output_tokens'], f"{path}.max_output_tokens", errors)
+ if 'max_tool_calls' in value:
+ _validate_CreateResponse_max_output_tokens(value['max_tool_calls'], f"{path}.max_tool_calls", errors)
+ if 'metadata' in value:
+ _validate_CreateResponse_metadata(value['metadata'], f"{path}.metadata", errors)
+ if 'model' in value:
+ _validate_CreateResponse_model(value['model'], f"{path}.model", errors)
+ if 'parallel_tool_calls' in value:
+ _validate_CreateResponse_parallel_tool_calls(value['parallel_tool_calls'], f"{path}.parallel_tool_calls", errors)
+ if 'previous_response_id' in value:
+ _validate_CreateResponse_instructions(value['previous_response_id'], f"{path}.previous_response_id", errors)
+ if 'prompt' in value:
+ _validate_CreateResponse_prompt(value['prompt'], f"{path}.prompt", errors)
+ if 'prompt_cache_key' in value:
+ _validate_CreateResponse_prompt_cache_key(value['prompt_cache_key'], f"{path}.prompt_cache_key", errors)
+ if 'prompt_cache_retention' in value:
+ _validate_CreateResponse_prompt_cache_retention(value['prompt_cache_retention'], f"{path}.prompt_cache_retention", errors)
+ if 'reasoning' in value:
+ _validate_CreateResponse_reasoning(value['reasoning'], f"{path}.reasoning", errors)
+ if 'safety_identifier' in value:
+ _validate_CreateResponse_safety_identifier(value['safety_identifier'], f"{path}.safety_identifier", errors)
+ if 'service_tier' in value:
+ _validate_CreateResponse_service_tier(value['service_tier'], f"{path}.service_tier", errors)
+ if 'store' in value:
+ _validate_CreateResponse_parallel_tool_calls(value['store'], f"{path}.store", errors)
+ if 'stream' in value:
+ _validate_CreateResponse_background(value['stream'], f"{path}.stream", errors)
+ if 'stream_options' in value:
+ _validate_CreateResponse_stream_options(value['stream_options'], f"{path}.stream_options", errors)
+ if 'structured_inputs' in value:
+ _validate_CreateResponse_structured_inputs(value['structured_inputs'], f"{path}.structured_inputs", errors)
+ if 'temperature' in value:
+ _validate_CreateResponse_temperature(value['temperature'], f"{path}.temperature", errors)
+ if 'text' in value:
+ _validate_CreateResponse_text(value['text'], f"{path}.text", errors)
+ if 'tool_choice' in value:
+ _validate_CreateResponse_tool_choice(value['tool_choice'], f"{path}.tool_choice", errors)
+ if 'tools' in value:
+ _validate_CreateResponse_tools(value['tools'], f"{path}.tools", errors)
+ if 'top_logprobs' in value:
+ _validate_CreateResponse_max_output_tokens(value['top_logprobs'], f"{path}.top_logprobs", errors)
+ if 'top_p' in value:
+ _validate_CreateResponse_temperature(value['top_p'], f"{path}.top_p", errors)
+ if 'truncation' in value:
+ _validate_CreateResponse_truncation(value['truncation'], f"{path}.truncation", errors)
+ if 'user' in value:
+ _validate_CreateResponse_user(value['user'], f"{path}.user", errors)
+
+def _validate_CreateResponse_agent_reference(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ return
+
+def _validate_CreateResponse_background(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'boolean'):
+ _append_type_mismatch(errors, path, 'boolean', value)
+ return
+
+def _validate_CreateResponse_context_management(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'array'):
+ _append_type_mismatch(errors, path, 'array', value)
+ return
+ for _idx, _item in enumerate(value):
+ _validate_CreateResponse_context_management_item(_item, f"{path}[{_idx}]", errors)
+
+def _validate_CreateResponse_conversation(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+
+def _validate_CreateResponse_include(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'array'):
+ _append_type_mismatch(errors, path, 'array', value)
+ return
+ for _idx, _item in enumerate(value):
+ _validate_CreateResponse_include_item(_item, f"{path}[{_idx}]", errors)
+
+def _validate_CreateResponse_input(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_InputParam(value, path, errors)
+
+def _validate_CreateResponse_instructions(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_max_output_tokens(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'integer'):
+ _append_type_mismatch(errors, path, 'integer', value)
+ return
+
+def _validate_CreateResponse_metadata(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+
+def _validate_CreateResponse_model(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_parallel_tool_calls(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'boolean'):
+ _append_type_mismatch(errors, path, 'boolean', value)
+ return
+
+def _validate_CreateResponse_prompt(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_Prompt(value, path, errors)
+
+def _validate_CreateResponse_prompt_cache_key(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_prompt_cache_retention(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ _allowed_values = ('in-memory', '24h')
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_reasoning(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+
+def _validate_CreateResponse_safety_identifier(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_service_tier(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ServiceTier(value, path, errors)
+
+def _validate_CreateResponse_stream_options(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+
+def _validate_CreateResponse_structured_inputs(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ for _key, _item in value.items():
+ if _key not in ():
+ _validate_CreateResponse_structured_inputs_additional_property(_item, f"{path}.{_key}", errors)
+
+def _validate_CreateResponse_temperature(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'number'):
+ _append_type_mismatch(errors, path, 'number', value)
+ return
+
+def _validate_CreateResponse_text(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ResponseTextParam(value, path, errors)
+
+def _validate_CreateResponse_tool_choice(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_ToolChoiceOptions(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'object'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_ToolChoiceParam(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected one of: OpenAI.ToolChoiceOptions, OpenAI.ToolChoiceParam; got {_type_label(value)}")
+ return
+
+def _validate_CreateResponse_tools(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ToolsArray(value, path, errors)
+
+def _validate_CreateResponse_truncation(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ _allowed_values = ('auto', 'disabled')
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_user(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_context_management_item(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ContextManagementParam(value, path, errors)
+
+def _validate_CreateResponse_include_item(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_IncludeEnum(value, path, errors)
+
+def _validate_OpenAI_InputParam(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'array'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_array(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected one of: string, array; got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_Prompt(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'id' not in value:
+ _append_error(errors, f"{path}.id", "Required property 'id' is missing")
+ if 'id' in value:
+ _validate_OpenAI_Prompt_id(value['id'], f"{path}.id", errors)
+ if 'variables' in value:
+ _validate_OpenAI_Prompt_variables(value['variables'], f"{path}.variables", errors)
+ if 'version' in value:
+ _validate_CreateResponse_instructions(value['version'], f"{path}.version", errors)
+
+def _validate_OpenAI_ServiceTier(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ _allowed_values, _enum_error = _enum_values('ServiceTier')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_CreateResponse_structured_inputs_additional_property(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ return
+
+def _validate_OpenAI_ResponseTextParam(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'format' in value:
+ _validate_OpenAI_ResponseTextParam_format(value['format'], f"{path}.format", errors)
+ if 'verbosity' in value:
+ _validate_OpenAI_ResponseTextParam_verbosity(value['verbosity'], f"{path}.verbosity", errors)
+
+def _validate_OpenAI_ToolChoiceOptions(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('ToolChoiceOptions')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_ToolChoiceParam(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'type' not in value:
+ _append_error(errors, f"{path}.type", "Required property 'type' is missing")
+ if 'type' in value:
+ _validate_OpenAI_ToolChoiceParam_type(value['type'], f"{path}.type", errors)
+ _disc_value = value.get('type')
+ if not isinstance(_disc_value, str):
+ _append_error(errors, f"{path}.type", "Required discriminator 'type' is missing or invalid")
+ return
+
+def _validate_OpenAI_ToolsArray(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'array'):
+ _append_type_mismatch(errors, path, 'array', value)
+ return
+ for _idx, _item in enumerate(value):
+ _validate_OpenAI_ToolsArray_item(_item, f"{path}[{_idx}]", errors)
+
+def _validate_OpenAI_ContextManagementParam(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'type' not in value:
+ _append_error(errors, f"{path}.type", "Required property 'type' is missing")
+ if 'compact_threshold' in value:
+ _validate_CreateResponse_max_output_tokens(value['compact_threshold'], f"{path}.compact_threshold", errors)
+ if 'type' in value:
+ _validate_OpenAI_ContextManagementParam_type(value['type'], f"{path}.type", errors)
+
+def _validate_OpenAI_IncludeEnum(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_IncludeEnum_2(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected IncludeEnum to be a string value, got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_InputParam_string(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_InputParam_array(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'array'):
+ _append_type_mismatch(errors, path, 'array', value)
+ return
+ for _idx, _item in enumerate(value):
+ _validate_OpenAI_InputParam_array_item(_item, f"{path}[{_idx}]", errors)
+
+def _validate_OpenAI_Prompt_id(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_Prompt_variables(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+
+def _validate_OpenAI_ResponseTextParam_format(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_TextResponseFormatConfiguration(value, path, errors)
+
+def _validate_OpenAI_ResponseTextParam_verbosity(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_Verbosity(value, path, errors)
+
+def _validate_OpenAI_ToolChoiceParam_type(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ToolChoiceParamType(value, path, errors)
+
+def _validate_OpenAI_ToolsArray_item(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_Tool(value, path, errors)
+
+def _validate_OpenAI_ContextManagementParam_type(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_IncludeEnum_2(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('IncludeEnum')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_InputParam_array_item(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_InputItem(value, path, errors)
+
+def _validate_OpenAI_TextResponseFormatConfiguration(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'type' not in value:
+ _append_error(errors, f"{path}.type", "Required property 'type' is missing")
+ if 'type' in value:
+ _validate_OpenAI_TextResponseFormatConfiguration_type(value['type'], f"{path}.type", errors)
+ _disc_value = value.get('type')
+ if not isinstance(_disc_value, str):
+ _append_error(errors, f"{path}.type", "Required discriminator 'type' is missing or invalid")
+ return
+
+def _validate_OpenAI_Verbosity(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if value is None:
+ return
+ _allowed_values, _enum_error = _enum_values('Verbosity')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_ToolChoiceParamType(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_ToolChoiceParamType_2(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected ToolChoiceParamType to be a string value, got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_Tool(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'type' not in value:
+ _append_error(errors, f"{path}.type", "Required property 'type' is missing")
+ if 'type' in value:
+ _validate_OpenAI_Tool_type(value['type'], f"{path}.type", errors)
+ _disc_value = value.get('type')
+ if not isinstance(_disc_value, str):
+ _append_error(errors, f"{path}.type", "Required discriminator 'type' is missing or invalid")
+ return
+
+def _validate_OpenAI_InputItem(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ if not _is_type(value, 'object'):
+ _append_type_mismatch(errors, path, 'object', value)
+ return
+ if 'type' not in value:
+ _append_error(errors, f"{path}.type", "Required property 'type' is missing")
+ if 'type' in value:
+ _validate_OpenAI_InputItem_type(value['type'], f"{path}.type", errors)
+ _disc_value = value.get('type')
+ if not isinstance(_disc_value, str):
+ _append_error(errors, f"{path}.type", "Required discriminator 'type' is missing or invalid")
+ return
+
+def _validate_OpenAI_TextResponseFormatConfiguration_type(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_TextResponseFormatConfigurationType(value, path, errors)
+
+def _validate_OpenAI_ToolChoiceParamType_2(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('ToolChoiceParamType')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_Tool_type(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_ToolType(value, path, errors)
+
+def _validate_OpenAI_InputItem_type(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _validate_OpenAI_InputItemType(value, path, errors)
+
+def _validate_OpenAI_TextResponseFormatConfigurationType(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_TextResponseFormatConfigurationType_2(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected TextResponseFormatConfigurationType to be a string value, got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_ToolType(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_ToolType_2(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected ToolType to be a string value, got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_InputItemType(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _matched_union = False
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_0: list[dict[str, str]] = []
+ _validate_OpenAI_InputParam_string(value, path, _branch_errors_0)
+ if not _branch_errors_0:
+ _matched_union = True
+ if not _matched_union and _is_type(value, 'string'):
+ _branch_errors_1: list[dict[str, str]] = []
+ _validate_OpenAI_InputItemType_2(value, path, _branch_errors_1)
+ if not _branch_errors_1:
+ _matched_union = True
+ if not _matched_union:
+ _append_error(errors, path, f"Expected InputItemType to be a string value, got {_type_label(value)}")
+ return
+
+def _validate_OpenAI_TextResponseFormatConfigurationType_2(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('TextResponseFormatConfigurationType')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_ToolType_2(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('ToolType')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+def _validate_OpenAI_InputItemType_2(value: Any, path: str, errors: list[dict[str, str]]) -> None:
+ _allowed_values, _enum_error = _enum_values('InputItemType')
+ if _enum_error is not None:
+ _append_error(errors, path, _enum_error)
+ return
+ if _allowed_values is None:
+ return
+ if value not in _allowed_values:
+ _append_error(errors, path, f"Invalid value '{value}'. Allowed: {', '.join(str(v) for v in _allowed_values)}")
+ if not _is_type(value, 'string'):
+ _append_type_mismatch(errors, path, 'string', value)
+ return
+
+ROOT_SCHEMAS = ['CreateResponse']
+
+class CreateResponseValidator:
+ """Generated validator for the root schema."""
+
+ @staticmethod
+ def validate(payload: Any) -> list[dict[str, str]]:
+ errors: list[dict[str, str]] = []
+ _validate_CreateResponse(payload, '$', errors)
+ return errors
+
+def validate_CreateResponse(payload: Any) -> list[dict[str, str]]:
+ return CreateResponseValidator.validate(payload)
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/__init__.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/__init__.py
new file mode 100644
index 000000000000..784a3edcc881
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/__init__.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Model-only generated package surface."""
+
+from .models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_patch.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_patch.py
new file mode 100644
index 000000000000..87676c65a8f0
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_patch.py
@@ -0,0 +1,21 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_types.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_types.py
new file mode 100644
index 000000000000..c99439ce635a
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_types.py
@@ -0,0 +1,71 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING, Union
+
+if TYPE_CHECKING:
+ from . import models as _models
+Filters = Union["_models.ComparisonFilter", "_models.CompoundFilter"]
+ToolCallOutputContent = Union[dict[str, Any], str, list[Any]]
+InputParam = Union[str, list["_models.Item"]]
+ConversationParam = Union[str, "_models.ConversationParam_2"]
+CreateResponseStreamingResponse = Union[
+ "_models.ResponseAudioDeltaEvent",
+ "_models.ResponseAudioTranscriptDeltaEvent",
+ "_models.ResponseCodeInterpreterCallCodeDeltaEvent",
+ "_models.ResponseCodeInterpreterCallInProgressEvent",
+ "_models.ResponseCodeInterpreterCallInterpretingEvent",
+ "_models.ResponseContentPartAddedEvent",
+ "_models.ResponseCreatedEvent",
+ "_models.ResponseErrorEvent",
+ "_models.ResponseFileSearchCallInProgressEvent",
+ "_models.ResponseFileSearchCallSearchingEvent",
+ "_models.ResponseFunctionCallArgumentsDeltaEvent",
+ "_models.ResponseInProgressEvent",
+ "_models.ResponseFailedEvent",
+ "_models.ResponseIncompleteEvent",
+ "_models.ResponseOutputItemAddedEvent",
+ "_models.ResponseReasoningSummaryPartAddedEvent",
+ "_models.ResponseReasoningSummaryTextDeltaEvent",
+ "_models.ResponseReasoningTextDeltaEvent",
+ "_models.ResponseRefusalDeltaEvent",
+ "_models.ResponseTextDeltaEvent",
+ "_models.ResponseWebSearchCallInProgressEvent",
+ "_models.ResponseWebSearchCallSearchingEvent",
+ "_models.ResponseImageGenCallGeneratingEvent",
+ "_models.ResponseImageGenCallInProgressEvent",
+ "_models.ResponseImageGenCallPartialImageEvent",
+ "_models.ResponseMCPCallArgumentsDeltaEvent",
+ "_models.ResponseMCPCallFailedEvent",
+ "_models.ResponseMCPCallInProgressEvent",
+ "_models.ResponseMCPListToolsFailedEvent",
+ "_models.ResponseMCPListToolsInProgressEvent",
+ "_models.ResponseOutputTextAnnotationAddedEvent",
+ "_models.ResponseQueuedEvent",
+ "_models.ResponseCustomToolCallInputDeltaEvent",
+ "_models.ResponseAudioDoneEvent",
+ "_models.ResponseAudioTranscriptDoneEvent",
+ "_models.ResponseCodeInterpreterCallCodeDoneEvent",
+ "_models.ResponseCodeInterpreterCallCompletedEvent",
+ "_models.ResponseCompletedEvent",
+ "_models.ResponseContentPartDoneEvent",
+ "_models.ResponseFileSearchCallCompletedEvent",
+ "_models.ResponseFunctionCallArgumentsDoneEvent",
+ "_models.ResponseOutputItemDoneEvent",
+ "_models.ResponseReasoningSummaryPartDoneEvent",
+ "_models.ResponseReasoningSummaryTextDoneEvent",
+ "_models.ResponseReasoningTextDoneEvent",
+ "_models.ResponseRefusalDoneEvent",
+ "_models.ResponseTextDoneEvent",
+ "_models.ResponseWebSearchCallCompletedEvent",
+ "_models.ResponseImageGenCallCompletedEvent",
+ "_models.ResponseMCPCallArgumentsDoneEvent",
+ "_models.ResponseMCPCallCompletedEvent",
+ "_models.ResponseMCPListToolsCompletedEvent",
+ "_models.ResponseCustomToolCallInputDoneEvent",
+]
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_utils/__init__.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_utils/__init__.py
new file mode 100644
index 000000000000..8026245c2abc
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_utils/__init__.py
@@ -0,0 +1,6 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_utils/model_base.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_utils/model_base.py
new file mode 100644
index 000000000000..a75a22adbb97
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_utils/model_base.py
@@ -0,0 +1,1368 @@
+# pylint: disable=line-too-long,useless-suppression,too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=protected-access, broad-except
+
+import copy
+import calendar
+import decimal
+import functools
+import sys
+import logging
+import base64
+import re
+import typing
+import enum
+import email.utils
+from datetime import datetime, date, time, timedelta, timezone
+from json import JSONEncoder
+import xml.etree.ElementTree as ET
+from collections.abc import MutableMapping
+from typing_extensions import Self
+import isodate
+from azure.core.exceptions import DeserializationError
+from azure.core import CaseInsensitiveEnumMeta
+from azure.core.pipeline import PipelineResponse
+from azure.core.serialization import _Null
+from azure.core.rest import HttpResponse
+
+_LOGGER = logging.getLogger(__name__)
+
+__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"]
+
+TZ_UTC = timezone.utc
+_T = typing.TypeVar("_T")
+_NONE_TYPE = type(None)
+
+
+def _timedelta_as_isostr(td: timedelta) -> str:
+ """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S'
+
+ Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython
+
+ :param timedelta td: The timedelta to convert
+ :rtype: str
+ :return: ISO8601 version of this timedelta
+ """
+
+ # Split seconds to larger units
+ seconds = td.total_seconds()
+ minutes, seconds = divmod(seconds, 60)
+ hours, minutes = divmod(minutes, 60)
+ days, hours = divmod(hours, 24)
+
+ days, hours, minutes = list(map(int, (days, hours, minutes)))
+ seconds = round(seconds, 6)
+
+ # Build date
+ date_str = ""
+ if days:
+ date_str = "%sD" % days
+
+ if hours or minutes or seconds:
+ # Build time
+ time_str = "T"
+
+ # Hours
+ bigger_exists = date_str or hours
+ if bigger_exists:
+ time_str += "{:02}H".format(hours)
+
+ # Minutes
+ bigger_exists = bigger_exists or minutes
+ if bigger_exists:
+ time_str += "{:02}M".format(minutes)
+
+ # Seconds
+ try:
+ if seconds.is_integer():
+ seconds_string = "{:02}".format(int(seconds))
+ else:
+ # 9 chars long w/ leading 0, 6 digits after decimal
+ seconds_string = "%09.6f" % seconds
+ # Remove trailing zeros
+ seconds_string = seconds_string.rstrip("0")
+ except AttributeError: # int.is_integer() raises
+ seconds_string = "{:02}".format(seconds)
+
+ time_str += "{}S".format(seconds_string)
+ else:
+ time_str = ""
+
+ return "P" + date_str + time_str
+
+
+def _serialize_bytes(o, format: typing.Optional[str] = None) -> str:
+ encoded = base64.b64encode(o).decode()
+ if format == "base64url":
+ return encoded.strip("=").replace("+", "-").replace("/", "_")
+ return encoded
+
+
+def _serialize_datetime(o, format: typing.Optional[str] = None):
+ if hasattr(o, "year") and hasattr(o, "hour"):
+ if format == "rfc7231":
+ return email.utils.format_datetime(o, usegmt=True)
+ if format == "unix-timestamp":
+ return int(calendar.timegm(o.utctimetuple()))
+
+ # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set)
+ if not o.tzinfo:
+ iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat()
+ else:
+ iso_formatted = o.astimezone(TZ_UTC).isoformat()
+ # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt)
+ return iso_formatted.replace("+00:00", "Z")
+ # Next try datetime.date or datetime.time
+ return o.isoformat()
+
+
+def _is_readonly(p):
+ try:
+ return p._visibility == ["read"]
+ except AttributeError:
+ return False
+
+
+class SdkJSONEncoder(JSONEncoder):
+ """A JSON encoder that's capable of serializing datetime objects and bytes."""
+
+ def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.exclude_readonly = exclude_readonly
+ self.format = format
+
+ def default(self, o): # pylint: disable=too-many-return-statements
+ if _is_model(o):
+ if self.exclude_readonly:
+ readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)]
+ return {k: v for k, v in o.items() if k not in readonly_props}
+ return dict(o.items())
+ try:
+ return super(SdkJSONEncoder, self).default(o)
+ except TypeError:
+ if isinstance(o, _Null):
+ return None
+ if isinstance(o, decimal.Decimal):
+ return float(o)
+ if isinstance(o, (bytes, bytearray)):
+ return _serialize_bytes(o, self.format)
+ try:
+ # First try datetime.datetime
+ return _serialize_datetime(o, self.format)
+ except AttributeError:
+ pass
+ # Last, try datetime.timedelta
+ try:
+ return _timedelta_as_isostr(o)
+ except AttributeError:
+ # This will be raised when it hits value.total_seconds in the method above
+ pass
+ return super(SdkJSONEncoder, self).default(o)
+
+
+_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+_VALID_RFC7231 = re.compile(
+ r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s"
+ r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT"
+)
+
+_ARRAY_ENCODE_MAPPING = {
+ "pipeDelimited": "|",
+ "spaceDelimited": " ",
+ "commaDelimited": ",",
+ "newlineDelimited": "\n",
+}
+
+
+def _deserialize_array_encoded(delimit: str, attr):
+ if isinstance(attr, str):
+ if attr == "":
+ return []
+ return attr.split(delimit)
+ return attr
+
+
+def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime:
+ """Deserialize ISO-8601 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: ~datetime.datetime
+ :returns: The datetime object from that input
+ """
+ if isinstance(attr, datetime):
+ # i'm already deserialized
+ return attr
+ attr = attr.upper()
+ match = _VALID_DATE.match(attr)
+ if not match:
+ raise ValueError("Invalid datetime string: " + attr)
+
+ check_decimal = attr.split(".")
+ if len(check_decimal) > 1:
+ decimal_str = ""
+ for digit in check_decimal[1]:
+ if digit.isdigit():
+ decimal_str += digit
+ else:
+ break
+ if len(decimal_str) > 6:
+ attr = attr.replace(decimal_str, decimal_str[0:6])
+
+ date_obj = isodate.parse_datetime(attr)
+ test_utc = date_obj.utctimetuple()
+ if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
+ raise OverflowError("Hit max or min date")
+ return date_obj # type: ignore[no-any-return]
+
+
+def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime:
+ """Deserialize RFC7231 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: ~datetime.datetime
+ :returns: The datetime object from that input
+ """
+ if isinstance(attr, datetime):
+ # i'm already deserialized
+ return attr
+ match = _VALID_RFC7231.match(attr)
+ if not match:
+ raise ValueError("Invalid datetime string: " + attr)
+
+ return email.utils.parsedate_to_datetime(attr)
+
+
+def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime:
+ """Deserialize unix timestamp into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: ~datetime.datetime
+ :returns: The datetime object from that input
+ """
+ if isinstance(attr, datetime):
+ # i'm already deserialized
+ return attr
+ return datetime.fromtimestamp(attr, TZ_UTC)
+
+
+def _deserialize_date(attr: typing.Union[str, date]) -> date:
+ """Deserialize ISO-8601 formatted string into Date object.
+ :param str attr: response string to be deserialized.
+ :rtype: date
+ :returns: The date object from that input
+ """
+ # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
+ if isinstance(attr, date):
+ return attr
+ return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore
+
+
+def _deserialize_time(attr: typing.Union[str, time]) -> time:
+ """Deserialize ISO-8601 formatted string into time object.
+
+ :param str attr: response string to be deserialized.
+ :rtype: datetime.time
+ :returns: The time object from that input
+ """
+ if isinstance(attr, time):
+ return attr
+ return isodate.parse_time(attr) # type: ignore[no-any-return]
+
+
+def _deserialize_bytes(attr):
+ if isinstance(attr, (bytes, bytearray)):
+ return attr
+ return bytes(base64.b64decode(attr))
+
+
+def _deserialize_bytes_base64(attr):
+ if isinstance(attr, (bytes, bytearray)):
+ return attr
+ padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
+ attr = attr + padding # type: ignore
+ encoded = attr.replace("-", "+").replace("_", "/")
+ return bytes(base64.b64decode(encoded))
+
+
+def _deserialize_duration(attr):
+ if isinstance(attr, timedelta):
+ return attr
+ return isodate.parse_duration(attr)
+
+
+def _deserialize_decimal(attr):
+ if isinstance(attr, decimal.Decimal):
+ return attr
+ return decimal.Decimal(str(attr))
+
+
+def _deserialize_int_as_str(attr):
+ if isinstance(attr, int):
+ return attr
+ return int(attr)
+
+
+_DESERIALIZE_MAPPING = {
+ datetime: _deserialize_datetime,
+ date: _deserialize_date,
+ time: _deserialize_time,
+ bytes: _deserialize_bytes,
+ bytearray: _deserialize_bytes,
+ timedelta: _deserialize_duration,
+ typing.Any: lambda x: x,
+ decimal.Decimal: _deserialize_decimal,
+}
+
+_DESERIALIZE_MAPPING_WITHFORMAT = {
+ "rfc3339": _deserialize_datetime,
+ "rfc7231": _deserialize_datetime_rfc7231,
+ "unix-timestamp": _deserialize_datetime_unix_timestamp,
+ "base64": _deserialize_bytes,
+ "base64url": _deserialize_bytes_base64,
+}
+
+
+def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None):
+ if annotation is int and rf and rf._format == "str":
+ return _deserialize_int_as_str
+ if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING:
+ return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format])
+ if rf and rf._format:
+ return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
+ return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
+
+
+def _get_type_alias_type(module_name: str, alias_name: str):
+ types = {
+ k: v
+ for k, v in sys.modules[module_name].__dict__.items()
+ if isinstance(v, typing._GenericAlias) # type: ignore
+ }
+ if alias_name not in types:
+ return alias_name
+ return types[alias_name]
+
+
+def _get_model(module_name: str, model_name: str):
+ models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)}
+ module_end = module_name.rsplit(".", 1)[0]
+ models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)})
+ if isinstance(model_name, str):
+ model_name = model_name.split(".")[-1]
+ if model_name not in models:
+ return model_name
+ return models[model_name]
+
+
+_UNSET = object()
+
+
+class _MyMutableMapping(MutableMapping[str, typing.Any]):
+ def __init__(self, data: dict[str, typing.Any]) -> None:
+ self._data = data
+
+ def __contains__(self, key: typing.Any) -> bool:
+ return key in self._data
+
+ def __getitem__(self, key: str) -> typing.Any:
+ # If this key has been deserialized (for mutable types), we need to handle serialization
+ if hasattr(self, "_attr_to_rest_field"):
+ cache_attr = f"_deserialized_{key}"
+ if hasattr(self, cache_attr):
+ rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key)
+ if rf:
+ value = self._data.get(key)
+ if isinstance(value, (dict, list, set)):
+ # For mutable types, serialize and return
+ # But also update _data with serialized form and clear flag
+ # so mutations via this returned value affect _data
+ serialized = _serialize(value, rf._format)
+ # If serialized form is same type (no transformation needed),
+ # return _data directly so mutations work
+ if isinstance(serialized, type(value)) and serialized == value:
+ return self._data.get(key)
+ # Otherwise return serialized copy and clear flag
+ try:
+ object.__delattr__(self, cache_attr)
+ except AttributeError:
+ pass
+ # Store serialized form back
+ self._data[key] = serialized
+ return serialized
+ return self._data.__getitem__(key)
+
+ def __setitem__(self, key: str, value: typing.Any) -> None:
+ # Clear any cached deserialized value when setting through dictionary access
+ cache_attr = f"_deserialized_{key}"
+ try:
+ object.__delattr__(self, cache_attr)
+ except AttributeError:
+ pass
+ self._data.__setitem__(key, value)
+
+ def __delitem__(self, key: str) -> None:
+ self._data.__delitem__(key)
+
+ def __iter__(self) -> typing.Iterator[typing.Any]:
+ return self._data.__iter__()
+
+ def __len__(self) -> int:
+ return self._data.__len__()
+
+ def __ne__(self, other: typing.Any) -> bool:
+ return not self.__eq__(other)
+
+ def keys(self) -> typing.KeysView[str]:
+ """
+ :returns: a set-like object providing a view on D's keys
+ :rtype: ~typing.KeysView
+ """
+ return self._data.keys()
+
+ def values(self) -> typing.ValuesView[typing.Any]:
+ """
+ :returns: an object providing a view on D's values
+ :rtype: ~typing.ValuesView
+ """
+ return self._data.values()
+
+ def items(self) -> typing.ItemsView[str, typing.Any]:
+ """
+ :returns: set-like object providing a view on D's items
+ :rtype: ~typing.ItemsView
+ """
+ return self._data.items()
+
+ def get(self, key: str, default: typing.Any = None) -> typing.Any:
+ """
+ Get the value for key if key is in the dictionary, else default.
+ :param str key: The key to look up.
+ :param any default: The value to return if key is not in the dictionary. Defaults to None
+ :returns: D[k] if k in D, else d.
+ :rtype: any
+ """
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ @typing.overload
+ def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ
+
+ @typing.overload
+ def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs
+
+ @typing.overload
+ def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs
+
+ def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any:
+ """
+ Removes specified key and return the corresponding value.
+ :param str key: The key to pop.
+ :param any default: The value to return if key is not in the dictionary
+ :returns: The value corresponding to the key.
+ :rtype: any
+ :raises KeyError: If key is not found and default is not given.
+ """
+ if default is _UNSET:
+ return self._data.pop(key)
+ return self._data.pop(key, default)
+
+ def popitem(self) -> tuple[str, typing.Any]:
+ """
+ Removes and returns some (key, value) pair
+ :returns: The (key, value) pair.
+ :rtype: tuple
+ :raises KeyError: if D is empty.
+ """
+ return self._data.popitem()
+
+ def clear(self) -> None:
+ """
+ Remove all items from D.
+ """
+ self._data.clear()
+
+ def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ
+ """
+ Updates D from mapping/iterable E and F.
+ :param any args: Either a mapping object or an iterable of key-value pairs.
+ """
+ self._data.update(*args, **kwargs)
+
+ @typing.overload
+ def setdefault(self, key: str, default: None = None) -> None: ...
+
+ @typing.overload
+ def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs
+
+ def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any:
+ """
+ Same as calling D.get(k, d), and setting D[k]=d if k not found
+ :param str key: The key to look up.
+ :param any default: The value to set if key is not in the dictionary
+ :returns: D[k] if k in D, else d.
+ :rtype: any
+ """
+ if default is _UNSET:
+ return self._data.setdefault(key)
+ return self._data.setdefault(key, default)
+
+ def __eq__(self, other: typing.Any) -> bool:
+ if isinstance(other, _MyMutableMapping):
+ return self._data == other._data
+ try:
+ other_model = self.__class__(other)
+ except Exception:
+ return False
+ return self._data == other_model._data
+
+ def __repr__(self) -> str:
+ return str(self._data)
+
+
+def _is_model(obj: typing.Any) -> bool:
+ return getattr(obj, "_is_model", False)
+
+
+def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements
+ if isinstance(o, list):
+ if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o):
+ return _ARRAY_ENCODE_MAPPING[format].join(o)
+ return [_serialize(x, format) for x in o]
+ if isinstance(o, dict):
+ return {k: _serialize(v, format) for k, v in o.items()}
+ if isinstance(o, set):
+ return {_serialize(x, format) for x in o}
+ if isinstance(o, tuple):
+ return tuple(_serialize(x, format) for x in o)
+ if isinstance(o, (bytes, bytearray)):
+ return _serialize_bytes(o, format)
+ if isinstance(o, decimal.Decimal):
+ return float(o)
+ if isinstance(o, enum.Enum):
+ return o.value
+ if isinstance(o, int):
+ if format == "str":
+ return str(o)
+ return o
+ try:
+ # First try datetime.datetime
+ return _serialize_datetime(o, format)
+ except AttributeError:
+ pass
+ # Last, try datetime.timedelta
+ try:
+ return _timedelta_as_isostr(o)
+ except AttributeError:
+ # This will be raised when it hits value.total_seconds in the method above
+ pass
+ return o
+
+
+def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]:
+ try:
+ return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name)
+ except StopIteration:
+ return None
+
+
+def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any:
+ if not rf:
+ return _serialize(value, None)
+ if rf._is_multipart_file_input:
+ return value
+ if rf._is_model:
+ return _deserialize(rf._type, value)
+ if isinstance(value, ET.Element):
+ value = _deserialize(rf._type, value)
+ return _serialize(value, rf._format)
+
+
+class Model(_MyMutableMapping):
+ _is_model = True
+ # label whether current class's _attr_to_rest_field has been calculated
+ # could not see _attr_to_rest_field directly because subclass inherits it from parent class
+ _calculated: set[str] = set()
+
+ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None:
+ class_name = self.__class__.__name__
+ if len(args) > 1:
+ raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given")
+ dict_to_pass = {
+ rest_field._rest_name: rest_field._default
+ for rest_field in self._attr_to_rest_field.values()
+ if rest_field._default is not _UNSET
+ }
+ if args: # pylint: disable=too-many-nested-blocks
+ if isinstance(args[0], ET.Element):
+ existed_attr_keys = []
+ model_meta = getattr(self, "_xml", {})
+
+ for rf in self._attr_to_rest_field.values():
+ prop_meta = getattr(rf, "_xml", {})
+ xml_name = prop_meta.get("name", rf._rest_name)
+ xml_ns = prop_meta.get("ns", model_meta.get("ns", None))
+ if xml_ns:
+ xml_name = "{" + xml_ns + "}" + xml_name
+
+ # attribute
+ if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None:
+ existed_attr_keys.append(xml_name)
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name))
+ continue
+
+ # unwrapped element is array
+ if prop_meta.get("unwrapped", False):
+ # unwrapped array could either use prop items meta/prop meta
+ if prop_meta.get("itemsName"):
+ xml_name = prop_meta.get("itemsName")
+ xml_ns = prop_meta.get("itemNs")
+ if xml_ns:
+ xml_name = "{" + xml_ns + "}" + xml_name
+ items = args[0].findall(xml_name) # pyright: ignore
+ if len(items) > 0:
+ existed_attr_keys.append(xml_name)
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, items)
+ elif not rf._is_optional:
+ existed_attr_keys.append(xml_name)
+ dict_to_pass[rf._rest_name] = []
+ continue
+
+ # text element is primitive type
+ if prop_meta.get("text", False):
+ if args[0].text is not None:
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text)
+ continue
+
+ # wrapped element could be normal property or array, it should only have one element
+ item = args[0].find(xml_name)
+ if item is not None:
+ existed_attr_keys.append(xml_name)
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, item)
+
+ # rest thing is additional properties
+ for e in args[0]:
+ if e.tag not in existed_attr_keys:
+ dict_to_pass[e.tag] = _convert_element(e)
+ else:
+ dict_to_pass.update(
+ {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()}
+ )
+ else:
+ non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field]
+ if non_attr_kwargs:
+ # actual type errors only throw the first wrong keyword arg they see, so following that.
+ raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'")
+ dict_to_pass.update(
+ {
+ self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v)
+ for k, v in kwargs.items()
+ if v is not None
+ }
+ )
+ super().__init__(dict_to_pass)
+
+ def copy(self) -> "Model":
+ return Model(self.__dict__)
+
+ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self:
+ if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated:
+ # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping',
+ # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object'
+ mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order
+ attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property
+ k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type")
+ }
+ annotations = {
+ k: v
+ for mro_class in mros
+ if hasattr(mro_class, "__annotations__")
+ for k, v in mro_class.__annotations__.items()
+ }
+ for attr, rf in attr_to_rest_field.items():
+ rf._module = cls.__module__
+ if not rf._type:
+ rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None))
+ if not rf._rest_name_input:
+ rf._rest_name_input = attr
+ cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items())
+ cls._backcompat_attr_to_rest_field: dict[str, _RestField] = {
+ Model._get_backcompat_attribute_name(cls._attr_to_rest_field, attr): rf
+ for attr, rf in cls._attr_to_rest_field.items()
+ }
+ cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")
+
+ return super().__new__(cls)
+
+ def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None:
+ for base in cls.__bases__:
+ if hasattr(base, "__mapping__"):
+ base.__mapping__[discriminator or cls.__name__] = cls # type: ignore
+
+ @classmethod
+ def _get_backcompat_attribute_name(cls, attr_to_rest_field: dict[str, "_RestField"], attr_name: str) -> str:
+ rest_field_obj = attr_to_rest_field.get(attr_name) # pylint: disable=protected-access
+ if rest_field_obj is None:
+ return attr_name
+ original_tsp_name = getattr(rest_field_obj, "_original_tsp_name", None) # pylint: disable=protected-access
+ if original_tsp_name:
+ return original_tsp_name
+ return attr_name
+
+ @classmethod
+ def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]:
+ for v in cls.__dict__.values():
+ if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators:
+ return v
+ return None
+
+ @classmethod
+ def _deserialize(cls, data, exist_discriminators):
+ if not hasattr(cls, "__mapping__"):
+ return cls(data)
+ discriminator = cls._get_discriminator(exist_discriminators)
+ if discriminator is None:
+ return cls(data)
+ exist_discriminators.append(discriminator._rest_name)
+ if isinstance(data, ET.Element):
+ model_meta = getattr(cls, "_xml", {})
+ prop_meta = getattr(discriminator, "_xml", {})
+ xml_name = prop_meta.get("name", discriminator._rest_name)
+ xml_ns = prop_meta.get("ns", model_meta.get("ns", None))
+ if xml_ns:
+ xml_name = "{" + xml_ns + "}" + xml_name
+
+ if data.get(xml_name) is not None:
+ discriminator_value = data.get(xml_name)
+ else:
+ discriminator_value = data.find(xml_name).text # pyright: ignore
+ else:
+ discriminator_value = data.get(discriminator._rest_name)
+ mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member
+ return mapped_cls._deserialize(data, exist_discriminators)
+
+ def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]:
+ """Return a dict that can be turned into json using json.dump.
+
+ :keyword bool exclude_readonly: Whether to remove the readonly properties.
+ :returns: A dict JSON compatible object
+ :rtype: dict
+ """
+
+ result = {}
+ readonly_props = []
+ if exclude_readonly:
+ readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)]
+ for k, v in self.items():
+ if exclude_readonly and k in readonly_props: # pyright: ignore
+ continue
+ is_multipart_file_input = False
+ try:
+ is_multipart_file_input = next(
+ rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k
+ )._is_multipart_file_input
+ except StopIteration:
+ pass
+ result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly)
+ return result
+
+ @staticmethod
+ def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any:
+ if v is None or isinstance(v, _Null):
+ return None
+ if isinstance(v, (list, tuple, set)):
+ return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v)
+ if isinstance(v, dict):
+ return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()}
+ return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v
+
+
+def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj):
+ if _is_model(obj):
+ return obj
+ return _deserialize(model_deserializer, obj)
+
+
+def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj):
+ if obj is None:
+ return obj
+ return _deserialize_with_callable(if_obj_deserializer, obj)
+
+
+def _deserialize_with_union(deserializers, obj):
+ for deserializer in deserializers:
+ try:
+ return _deserialize(deserializer, obj)
+ except DeserializationError:
+ pass
+ raise DeserializationError()
+
+
+def _deserialize_dict(
+ value_deserializer: typing.Optional[typing.Callable],
+ module: typing.Optional[str],
+ obj: dict[typing.Any, typing.Any],
+):
+ if obj is None:
+ return obj
+ if isinstance(obj, ET.Element):
+ obj = {child.tag: child for child in obj}
+ return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()}
+
+
+def _deserialize_multiple_sequence(
+ entry_deserializers: list[typing.Optional[typing.Callable]],
+ module: typing.Optional[str],
+ obj,
+):
+ if obj is None:
+ return obj
+ return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers))
+
+
+def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool:
+ return (
+ isinstance(deserializer, functools.partial)
+ and isinstance(deserializer.args[0], functools.partial)
+ and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable
+ )
+
+
+def _deserialize_sequence(
+ deserializer: typing.Optional[typing.Callable],
+ module: typing.Optional[str],
+ obj,
+):
+ if obj is None:
+ return obj
+ if isinstance(obj, ET.Element):
+ obj = list(obj)
+
+ # encoded string may be deserialized to sequence
+ if isinstance(obj, str) and isinstance(deserializer, functools.partial):
+ # for list[str]
+ if _is_array_encoded_deserializer(deserializer):
+ return deserializer(obj)
+
+ # for list[Union[...]]
+ if isinstance(deserializer.args[0], list):
+ for sub_deserializer in deserializer.args[0]:
+ if _is_array_encoded_deserializer(sub_deserializer):
+ return sub_deserializer(obj)
+
+ return type(obj)(_deserialize(deserializer, entry, module) for entry in obj)
+
+
+def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]:
+ return sorted(
+ types,
+ key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"),
+ )
+
+
+def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches
+ annotation: typing.Any,
+ module: typing.Optional[str],
+ rf: typing.Optional["_RestField"] = None,
+) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]:
+ if not annotation:
+ return None
+
+ # is it a type alias?
+ if isinstance(annotation, str):
+ if module is not None:
+ annotation = _get_type_alias_type(module, annotation)
+
+ # is it a forward ref / in quotes?
+ if isinstance(annotation, (str, typing.ForwardRef)):
+ try:
+ model_name = annotation.__forward_arg__ # type: ignore
+ except AttributeError:
+ model_name = annotation
+ if module is not None:
+ annotation = _get_model(module, model_name) # type: ignore
+
+ try:
+ if module and _is_model(annotation):
+ if rf:
+ rf._is_model = True
+
+ return functools.partial(_deserialize_model, annotation) # pyright: ignore
+ except Exception:
+ pass
+
+ # is it a literal?
+ try:
+ if annotation.__origin__ is typing.Literal: # pyright: ignore
+ return None
+ except AttributeError:
+ pass
+
+ # is it optional?
+ try:
+ if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore
+ if rf:
+ rf._is_optional = True
+ if len(annotation.__args__) <= 2: # pyright: ignore
+ if_obj_deserializer = _get_deserialize_callable_from_annotation(
+ next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore
+ )
+
+ return functools.partial(_deserialize_with_optional, if_obj_deserializer)
+ # the type is Optional[Union[...]], we need to remove the None type from the Union
+ annotation_copy = copy.copy(annotation)
+ annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore
+ return _get_deserialize_callable_from_annotation(annotation_copy, module, rf)
+ except AttributeError:
+ pass
+
+ # is it union?
+ if getattr(annotation, "__origin__", None) is typing.Union:
+ # initial ordering is we make `string` the last deserialization option, because it is often them most generic
+ deserializers = [
+ _get_deserialize_callable_from_annotation(arg, module, rf)
+ for arg in _sorted_annotations(annotation.__args__) # pyright: ignore
+ ]
+
+ return functools.partial(_deserialize_with_union, deserializers)
+
+ try:
+ annotation_name = (
+ annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore
+ )
+ if annotation_name.lower() == "dict":
+ value_deserializer = _get_deserialize_callable_from_annotation(
+ annotation.__args__[1], module, rf # pyright: ignore
+ )
+
+ return functools.partial(
+ _deserialize_dict,
+ value_deserializer,
+ module,
+ )
+ except (AttributeError, IndexError):
+ pass
+ try:
+ annotation_name = (
+ annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore
+ )
+ if annotation_name.lower() in ["list", "set", "tuple", "sequence"]:
+ if len(annotation.__args__) > 1: # pyright: ignore
+ entry_deserializers = [
+ _get_deserialize_callable_from_annotation(dt, module, rf)
+ for dt in annotation.__args__ # pyright: ignore
+ ]
+ return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module)
+ deserializer = _get_deserialize_callable_from_annotation(
+ annotation.__args__[0], module, rf # pyright: ignore
+ )
+
+ return functools.partial(_deserialize_sequence, deserializer, module)
+ except (TypeError, IndexError, AttributeError, SyntaxError):
+ pass
+
+ def _deserialize_default(
+ deserializer,
+ obj,
+ ):
+ if obj is None:
+ return obj
+ try:
+ return _deserialize_with_callable(deserializer, obj)
+ except Exception:
+ pass
+ return obj
+
+ if get_deserializer(annotation, rf):
+ return functools.partial(_deserialize_default, get_deserializer(annotation, rf))
+
+ return functools.partial(_deserialize_default, annotation)
+
+
+def _deserialize_with_callable(
+ deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]],
+ value: typing.Any,
+): # pylint: disable=too-many-return-statements
+ try:
+ if value is None or isinstance(value, _Null):
+ return None
+ if isinstance(value, ET.Element):
+ if deserializer is str:
+ return value.text or ""
+ if deserializer is int:
+ return int(value.text) if value.text else None
+ if deserializer is float:
+ return float(value.text) if value.text else None
+ if deserializer is bool:
+ return value.text == "true" if value.text else None
+ if deserializer and deserializer in _DESERIALIZE_MAPPING.values():
+ return deserializer(value.text) if value.text else None
+ if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values():
+ return deserializer(value.text) if value.text else None
+ if deserializer is None:
+ return value
+ if deserializer in [int, float, bool]:
+ return deserializer(value)
+ if isinstance(deserializer, CaseInsensitiveEnumMeta):
+ try:
+ return deserializer(value.text if isinstance(value, ET.Element) else value)
+ except ValueError:
+ # for unknown value, return raw value
+ return value.text if isinstance(value, ET.Element) else value
+ if isinstance(deserializer, type) and issubclass(deserializer, Model):
+ return deserializer._deserialize(value, [])
+ return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value)
+ except Exception as e:
+ raise DeserializationError() from e
+
+
+def _deserialize(
+ deserializer: typing.Any,
+ value: typing.Any,
+ module: typing.Optional[str] = None,
+ rf: typing.Optional["_RestField"] = None,
+ format: typing.Optional[str] = None,
+) -> typing.Any:
+ if isinstance(value, PipelineResponse):
+ value = value.http_response.json()
+ if rf is None and format:
+ rf = _RestField(format=format)
+ if not isinstance(deserializer, functools.partial):
+ deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf)
+ return _deserialize_with_callable(deserializer, value)
+
+
+def _failsafe_deserialize(
+ deserializer: typing.Any,
+ response: HttpResponse,
+ module: typing.Optional[str] = None,
+ rf: typing.Optional["_RestField"] = None,
+ format: typing.Optional[str] = None,
+) -> typing.Any:
+ try:
+ return _deserialize(deserializer, response.json(), module, rf, format)
+ except Exception: # pylint: disable=broad-except
+ _LOGGER.warning(
+ "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
+ )
+ return None
+
+
+def _failsafe_deserialize_xml(
+ deserializer: typing.Any,
+ response: HttpResponse,
+) -> typing.Any:
+ try:
+ return _deserialize_xml(deserializer, response.text())
+ except Exception: # pylint: disable=broad-except
+ _LOGGER.warning(
+ "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
+ )
+ return None
+
+
+# pylint: disable=too-many-instance-attributes
+class _RestField:
+ def __init__(
+ self,
+ *,
+ name: typing.Optional[str] = None,
+ type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
+ is_discriminator: bool = False,
+ visibility: typing.Optional[list[str]] = None,
+ default: typing.Any = _UNSET,
+ format: typing.Optional[str] = None,
+ is_multipart_file_input: bool = False,
+ xml: typing.Optional[dict[str, typing.Any]] = None,
+ original_tsp_name: typing.Optional[str] = None,
+ ):
+ self._type = type
+ self._rest_name_input = name
+ self._module: typing.Optional[str] = None
+ self._is_discriminator = is_discriminator
+ self._visibility = visibility
+ self._is_model = False
+ self._is_optional = False
+ self._default = default
+ self._format = format
+ self._is_multipart_file_input = is_multipart_file_input
+ self._xml = xml if xml is not None else {}
+ self._original_tsp_name = original_tsp_name
+
+ @property
+ def _class_type(self) -> typing.Any:
+ result = getattr(self._type, "args", [None])[0]
+ # type may be wrapped by nested functools.partial so we need to check for that
+ if isinstance(result, functools.partial):
+ return getattr(result, "args", [None])[0]
+ return result
+
+ @property
+ def _rest_name(self) -> str:
+ if self._rest_name_input is None:
+ raise ValueError("Rest name was never set")
+ return self._rest_name_input
+
+ def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin
+ # by this point, type and rest_name will have a value bc we default
+ # them in __new__ of the Model class
+ # Use _data.get() directly to avoid triggering __getitem__ which clears the cache
+ item = obj._data.get(self._rest_name)
+ if item is None:
+ return item
+ if self._is_model:
+ return item
+
+ # For mutable types, we want mutations to directly affect _data
+ # Check if we've already deserialized this value
+ cache_attr = f"_deserialized_{self._rest_name}"
+ if hasattr(obj, cache_attr):
+ # Return the value from _data directly (it's been deserialized in place)
+ return obj._data.get(self._rest_name)
+
+ deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self)
+
+ # For mutable types, store the deserialized value back in _data
+ # so mutations directly affect _data
+ if isinstance(deserialized, (dict, list, set)):
+ obj._data[self._rest_name] = deserialized
+ object.__setattr__(obj, cache_attr, True) # Mark as deserialized
+ return deserialized
+
+ return deserialized
+
+ def __set__(self, obj: Model, value) -> None:
+ # Clear the cached deserialized object when setting a new value
+ cache_attr = f"_deserialized_{self._rest_name}"
+ if hasattr(obj, cache_attr):
+ object.__delattr__(obj, cache_attr)
+
+ if value is None:
+ # we want to wipe out entries if users set attr to None
+ try:
+ obj.__delitem__(self._rest_name)
+ except KeyError:
+ pass
+ return
+ if self._is_model:
+ if not _is_model(value):
+ value = _deserialize(self._type, value)
+ obj.__setitem__(self._rest_name, value)
+ return
+ obj.__setitem__(self._rest_name, _serialize(value, self._format))
+
+ def _get_deserialize_callable_from_annotation(
+ self, annotation: typing.Any
+ ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]:
+ return _get_deserialize_callable_from_annotation(annotation, self._module, self)
+
+
+def rest_field(
+ *,
+ name: typing.Optional[str] = None,
+ type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
+ visibility: typing.Optional[list[str]] = None,
+ default: typing.Any = _UNSET,
+ format: typing.Optional[str] = None,
+ is_multipart_file_input: bool = False,
+ xml: typing.Optional[dict[str, typing.Any]] = None,
+ original_tsp_name: typing.Optional[str] = None,
+) -> typing.Any:
+ return _RestField(
+ name=name,
+ type=type,
+ visibility=visibility,
+ default=default,
+ format=format,
+ is_multipart_file_input=is_multipart_file_input,
+ xml=xml,
+ original_tsp_name=original_tsp_name,
+ )
+
+
+def rest_discriminator(
+ *,
+ name: typing.Optional[str] = None,
+ type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
+ visibility: typing.Optional[list[str]] = None,
+ xml: typing.Optional[dict[str, typing.Any]] = None,
+) -> typing.Any:
+ return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml)
+
+
+def serialize_xml(model: Model, exclude_readonly: bool = False) -> str:
+ """Serialize a model to XML.
+
+ :param Model model: The model to serialize.
+ :param bool exclude_readonly: Whether to exclude readonly properties.
+ :returns: The XML representation of the model.
+ :rtype: str
+ """
+ return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore
+
+
+def _get_element(
+ o: typing.Any,
+ exclude_readonly: bool = False,
+ parent_meta: typing.Optional[dict[str, typing.Any]] = None,
+ wrapped_element: typing.Optional[ET.Element] = None,
+) -> typing.Union[ET.Element, list[ET.Element]]:
+ if _is_model(o):
+ model_meta = getattr(o, "_xml", {})
+
+ # if prop is a model, then use the prop element directly, else generate a wrapper of model
+ if wrapped_element is None:
+ wrapped_element = _create_xml_element(
+ model_meta.get("name", o.__class__.__name__),
+ model_meta.get("prefix"),
+ model_meta.get("ns"),
+ )
+
+ readonly_props = []
+ if exclude_readonly:
+ readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)]
+
+ for k, v in o.items():
+ # do not serialize readonly properties
+ if exclude_readonly and k in readonly_props:
+ continue
+
+ prop_rest_field = _get_rest_field(o._attr_to_rest_field, k)
+ if prop_rest_field:
+ prop_meta = getattr(prop_rest_field, "_xml").copy()
+ # use the wire name as xml name if no specific name is set
+ if prop_meta.get("name") is None:
+ prop_meta["name"] = k
+ else:
+ # additional properties will not have rest field, use the wire name as xml name
+ prop_meta = {"name": k}
+
+ # if no ns for prop, use model's
+ if prop_meta.get("ns") is None and model_meta.get("ns"):
+ prop_meta["ns"] = model_meta.get("ns")
+ prop_meta["prefix"] = model_meta.get("prefix")
+
+ if prop_meta.get("unwrapped", False):
+ # unwrapped could only set on array
+ wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta))
+ elif prop_meta.get("text", False):
+ # text could only set on primitive type
+ wrapped_element.text = _get_primitive_type_value(v)
+ elif prop_meta.get("attribute", False):
+ xml_name = prop_meta.get("name", k)
+ if prop_meta.get("ns"):
+ ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore
+ xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore
+ # attribute should be primitive type
+ wrapped_element.set(xml_name, _get_primitive_type_value(v))
+ else:
+ # other wrapped prop element
+ wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta))
+ return wrapped_element
+ if isinstance(o, list):
+ return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore
+ if isinstance(o, dict):
+ result = []
+ for k, v in o.items():
+ result.append(
+ _get_wrapped_element(
+ v,
+ exclude_readonly,
+ {
+ "name": k,
+ "ns": parent_meta.get("ns") if parent_meta else None,
+ "prefix": parent_meta.get("prefix") if parent_meta else None,
+ },
+ )
+ )
+ return result
+
+ # primitive case need to create element based on parent_meta
+ if parent_meta:
+ return _get_wrapped_element(
+ o,
+ exclude_readonly,
+ {
+ "name": parent_meta.get("itemsName", parent_meta.get("name")),
+ "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")),
+ "ns": parent_meta.get("itemsNs", parent_meta.get("ns")),
+ },
+ )
+
+ raise ValueError("Could not serialize value into xml: " + o)
+
+
+def _get_wrapped_element(
+ v: typing.Any,
+ exclude_readonly: bool,
+ meta: typing.Optional[dict[str, typing.Any]],
+) -> ET.Element:
+ wrapped_element = _create_xml_element(
+ meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None
+ )
+ if isinstance(v, (dict, list)):
+ wrapped_element.extend(_get_element(v, exclude_readonly, meta))
+ elif _is_model(v):
+ _get_element(v, exclude_readonly, meta, wrapped_element)
+ else:
+ wrapped_element.text = _get_primitive_type_value(v)
+ return wrapped_element # type: ignore[no-any-return]
+
+
+def _get_primitive_type_value(v) -> str:
+ if v is True:
+ return "true"
+ if v is False:
+ return "false"
+ if isinstance(v, _Null):
+ return ""
+ return str(v)
+
+
+def _create_xml_element(
+ tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None
+) -> ET.Element:
+ if prefix and ns:
+ ET.register_namespace(prefix, ns)
+ if ns:
+ return ET.Element("{" + ns + "}" + tag)
+ return ET.Element(tag)
+
+
+def _deserialize_xml(
+ deserializer: typing.Any,
+ value: str,
+) -> typing.Any:
+ element = ET.fromstring(value) # nosec
+ return _deserialize(deserializer, element)
+
+
+def _convert_element(e: ET.Element):
+ # dict case
+ if len(e.attrib) > 0 or len({child.tag for child in e}) > 1:
+ dict_result: dict[str, typing.Any] = {}
+ for child in e:
+ if dict_result.get(child.tag) is not None:
+ if isinstance(dict_result[child.tag], list):
+ dict_result[child.tag].append(_convert_element(child))
+ else:
+ dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)]
+ else:
+ dict_result[child.tag] = _convert_element(child)
+ dict_result.update(e.attrib)
+ return dict_result
+ # array case
+ if len(e) > 0:
+ array_result: list[typing.Any] = []
+ for child in e:
+ array_result.append(_convert_element(child))
+ return array_result
+ # primitive case
+ return e.text
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_utils/serialization.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_utils/serialization.py
new file mode 100644
index 000000000000..81ec1de5922b
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/_utils/serialization.py
@@ -0,0 +1,2041 @@
+# pylint: disable=line-too-long,useless-suppression,too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+# pyright: reportUnnecessaryTypeIgnoreComment=false
+
+from base64 import b64decode, b64encode
+import calendar
+import datetime
+import decimal
+import email
+from enum import Enum
+import json
+import logging
+import re
+import sys
+import codecs
+from typing import (
+ Any,
+ cast,
+ Optional,
+ Union,
+ AnyStr,
+ IO,
+ Mapping,
+ Callable,
+ MutableMapping,
+)
+
+try:
+ from urllib import quote # type: ignore
+except ImportError:
+ from urllib.parse import quote
+import xml.etree.ElementTree as ET
+
+import isodate # type: ignore
+from typing_extensions import Self
+
+from azure.core.exceptions import DeserializationError, SerializationError
+from azure.core.serialization import NULL as CoreNull
+
+_BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
+
+JSON = MutableMapping[str, Any]
+
+
+class RawDeserializer:
+
+ # Accept "text" because we're open minded people...
+ JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$")
+
+ # Name used in context
+ CONTEXT_NAME = "deserialized_data"
+
+ @classmethod
+ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any:
+ """Decode data according to content-type.
+
+ Accept a stream of data as well, but will be load at once in memory for now.
+
+ If no content-type, will return the string version (not bytes, not stream)
+
+ :param data: Input, could be bytes or stream (will be decoded with UTF8) or text
+ :type data: str or bytes or IO
+ :param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
+ """
+ if hasattr(data, "read"):
+ # Assume a stream
+ data = cast(IO, data).read()
+
+ if isinstance(data, bytes):
+ data_as_str = data.decode(encoding="utf-8-sig")
+ else:
+ # Explain to mypy the correct type.
+ data_as_str = cast(str, data)
+
+ # Remove Byte Order Mark if present in string
+ data_as_str = data_as_str.lstrip(_BOM)
+
+ if content_type is None:
+ return data
+
+ if cls.JSON_REGEXP.match(content_type):
+ try:
+ return json.loads(data_as_str)
+ except ValueError as err:
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
+ elif "xml" in (content_type or []):
+ try:
+
+ try:
+ if isinstance(data, unicode): # type: ignore
+ # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string
+ data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore
+ except NameError:
+ pass
+
+ return ET.fromstring(data_as_str) # nosec
+ except ET.ParseError as err:
+ # It might be because the server has an issue, and returned JSON with
+ # content-type XML....
+ # So let's try a JSON load, and if it's still broken
+ # let's flow the initial exception
+ def _json_attemp(data):
+ try:
+ return True, json.loads(data)
+ except ValueError:
+ return False, None # Don't care about this one
+
+ success, json_result = _json_attemp(data)
+ if success:
+ return json_result
+ # If i'm here, it's not JSON, it's not XML, let's scream
+ # and raise the last context in this block (the XML exception)
+ # The function hack is because Py2.7 messes up with exception
+ # context otherwise.
+ _LOGGER.critical("Wasn't XML not JSON, failing")
+ raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
+ raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
+
+ @classmethod
+ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any:
+ """Deserialize from HTTP response.
+
+ Use bytes and headers to NOT use any requests/aiohttp or whatever
+ specific implementation.
+ Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
+ """
+ # Try to use content-type from headers if available
+ content_type = None
+ if "content-type" in headers:
+ content_type = headers["content-type"].split(";")[0].strip().lower()
+ # Ouch, this server did not declare what it sent...
+ # Let's guess it's JSON...
+ # Also, since Autorest was considering that an empty body was a valid JSON,
+ # need that test as well....
+ else:
+ content_type = "application/json"
+
+ if body_bytes:
+ return cls.deserialize_from_text(body_bytes, content_type)
+ return None
+
+
+_LOGGER = logging.getLogger(__name__)
+
+try:
+ _long_type = long # type: ignore
+except NameError:
+ _long_type = int
+
+TZ_UTC = datetime.timezone.utc
+
+_FLATTEN = re.compile(r"(? None:
+ self.additional_properties: Optional[dict[str, Any]] = {}
+ for k in kwargs: # pylint: disable=consider-using-dict-items
+ if k not in self._attribute_map:
+ _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
+ elif k in self._validation and self._validation[k].get("readonly", False):
+ _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__)
+ else:
+ setattr(self, k, kwargs[k])
+
+ def __eq__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
+ if isinstance(other, self.__class__):
+ return self.__dict__ == other.__dict__
+ return False
+
+ def __ne__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
+ return not self.__eq__(other)
+
+ def __str__(self) -> str:
+ return str(self.__dict__)
+
+ @classmethod
+ def enable_additional_properties_sending(cls) -> None:
+ cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"}
+
+ @classmethod
+ def is_xml_model(cls) -> bool:
+ try:
+ cls._xml_map # type: ignore
+ except AttributeError:
+ return False
+ return True
+
+ @classmethod
+ def _create_xml_node(cls):
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
+ try:
+ xml_map = cls._xml_map # type: ignore
+ except AttributeError:
+ xml_map = {}
+
+ return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
+
+ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
+ """Return the JSON that would be sent to server from this model.
+
+ This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
+
+ If you want XML serialization, you can pass the kwargs is_xml=True.
+
+ :param bool keep_readonly: If you want to serialize the readonly attributes
+ :returns: A dict JSON compatible object
+ :rtype: dict
+ """
+ serializer = Serializer(self._infer_class_models())
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
+
+ def as_dict(
+ self,
+ keep_readonly: bool = True,
+ key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer,
+ **kwargs: Any
+ ) -> JSON:
+ """Return a dict that can be serialized using json.dump.
+
+ Advanced usage might optionally use a callback as parameter:
+
+ .. code::python
+
+ def my_key_transformer(key, attr_desc, value):
+ return key
+
+ Key is the attribute name used in Python. Attr_desc
+ is a dict of metadata. Currently contains 'type' with the
+ msrest type and 'key' with the RestAPI encoded key.
+ Value is the current value in this object.
+
+ The string returned will be used to serialize the key.
+ If the return type is a list, this is considered hierarchical
+ result dict.
+
+ See the three examples in this file:
+
+ - attribute_transformer
+ - full_restapi_key_transformer
+ - last_restapi_key_transformer
+
+ If you want XML serialization, you can pass the kwargs is_xml=True.
+
+ :param bool keep_readonly: If you want to serialize the readonly attributes
+ :param function key_transformer: A key transformer function.
+ :returns: A dict JSON compatible object
+ :rtype: dict
+ """
+ serializer = Serializer(self._infer_class_models())
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
+
+ @classmethod
+ def _infer_class_models(cls):
+ try:
+ str_models = cls.__module__.rsplit(".", 1)[0]
+ models = sys.modules[str_models]
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ if cls.__name__ not in client_models:
+ raise ValueError("Not Autorest generated code")
+ except Exception: # pylint: disable=broad-exception-caught
+ # Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
+ client_models = {cls.__name__: cls}
+ return client_models
+
+ @classmethod
+ def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self:
+ """Parse a str using the RestAPI syntax and return a model.
+
+ :param str data: A str using RestAPI structure. JSON by default.
+ :param str content_type: JSON by default, set application/xml if XML.
+ :returns: An instance of this model
+ :raises DeserializationError: if something went wrong
+ :rtype: Self
+ """
+ deserializer = Deserializer(cls._infer_class_models())
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
+
+ @classmethod
+ def from_dict(
+ cls,
+ data: Any,
+ key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None,
+ content_type: Optional[str] = None,
+ ) -> Self:
+ """Parse a dict using given key extractor return a model.
+
+ By default consider key
+ extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor
+ and last_rest_key_case_insensitive_extractor)
+
+ :param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
+ :param str content_type: JSON by default, set application/xml if XML.
+ :returns: An instance of this model
+ :raises DeserializationError: if something went wrong
+ :rtype: Self
+ """
+ deserializer = Deserializer(cls._infer_class_models())
+ deserializer.key_extractors = ( # type: ignore
+ [ # type: ignore
+ attribute_key_case_insensitive_extractor,
+ rest_key_case_insensitive_extractor,
+ last_rest_key_case_insensitive_extractor,
+ ]
+ if key_extractors is None
+ else key_extractors
+ )
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
+
+ @classmethod
+ def _flatten_subtype(cls, key, objects):
+ if "_subtype_map" not in cls.__dict__:
+ return {}
+ result = dict(cls._subtype_map[key])
+ for valuetype in cls._subtype_map[key].values():
+ result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access
+ return result
+
+ @classmethod
+ def _classify(cls, response, objects):
+ """Check the class _subtype_map for any child classes.
+ We want to ignore any inherited _subtype_maps.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
+ """
+ for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
+ subtype_value = None
+
+ if not isinstance(response, ET.Element):
+ rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
+ else:
+ subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
+ if subtype_value:
+ # Try to match base class. Can be class name only
+ # (bug to fix in Autorest to support x-ms-discriminator-name)
+ if cls.__name__ == subtype_value:
+ return cls
+ flatten_mapping_type = cls._flatten_subtype(subtype_key, objects)
+ try:
+ return objects[flatten_mapping_type[subtype_value]] # type: ignore
+ except KeyError:
+ _LOGGER.warning(
+ "Subtype value %s has no mapping, use base class %s.",
+ subtype_value,
+ cls.__name__,
+ )
+ break
+ else:
+ _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__)
+ break
+ return cls
+
+ @classmethod
+ def _get_rest_key_parts(cls, attr_key):
+ """Get the RestAPI key of this attr, split it and decode part
+ :param str attr_key: Attribute key must be in attribute_map.
+ :returns: A list of RestAPI part
+ :rtype: list
+ """
+ rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"])
+ return [_decode_attribute_map_key(key_part) for key_part in rest_split_key]
+
+
+def _decode_attribute_map_key(key):
+ """This decode a key in an _attribute_map to the actual key we want to look at
+ inside the received data.
+
+ :param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
+ """
+ return key.replace("\\.", ".")
+
+
+class Serializer: # pylint: disable=too-many-public-methods
+ """Request object model serializer."""
+
+ basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
+
+ _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()}
+ days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"}
+ months = {
+ 1: "Jan",
+ 2: "Feb",
+ 3: "Mar",
+ 4: "Apr",
+ 5: "May",
+ 6: "Jun",
+ 7: "Jul",
+ 8: "Aug",
+ 9: "Sep",
+ 10: "Oct",
+ 11: "Nov",
+ 12: "Dec",
+ }
+ validation = {
+ "min_length": lambda x, y: len(x) < y,
+ "max_length": lambda x, y: len(x) > y,
+ "minimum": lambda x, y: x < y,
+ "maximum": lambda x, y: x > y,
+ "minimum_ex": lambda x, y: x <= y,
+ "maximum_ex": lambda x, y: x >= y,
+ "min_items": lambda x, y: len(x) < y,
+ "max_items": lambda x, y: len(x) > y,
+ "pattern": lambda x, y: not re.match(y, x, re.UNICODE),
+ "unique": lambda x, y: len(x) != len(set(x)),
+ "multiple": lambda x, y: x % y != 0,
+ }
+
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
+ self.serialize_type = {
+ "iso-8601": Serializer.serialize_iso,
+ "rfc-1123": Serializer.serialize_rfc,
+ "unix-time": Serializer.serialize_unix,
+ "duration": Serializer.serialize_duration,
+ "date": Serializer.serialize_date,
+ "time": Serializer.serialize_time,
+ "decimal": Serializer.serialize_decimal,
+ "long": Serializer.serialize_long,
+ "bytearray": Serializer.serialize_bytearray,
+ "base64": Serializer.serialize_base64,
+ "object": self.serialize_object,
+ "[]": self.serialize_iter,
+ "{}": self.serialize_dict,
+ }
+ self.dependencies: dict[str, type] = dict(classes) if classes else {}
+ self.key_transformer = full_restapi_key_transformer
+ self.client_side_validation = True
+
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
+ """Serialize data into a string according to type.
+
+ :param object target_obj: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str, dict
+ :raises SerializationError: if serialization fails.
+ :returns: The serialized data.
+ """
+ key_transformer = kwargs.get("key_transformer", self.key_transformer)
+ keep_readonly = kwargs.get("keep_readonly", False)
+ if target_obj is None:
+ return None
+
+ attr_name = None
+ class_name = target_obj.__class__.__name__
+
+ if data_type:
+ return self.serialize_data(target_obj, data_type, **kwargs)
+
+ if not hasattr(target_obj, "_attribute_map"):
+ data_type = type(target_obj).__name__
+ if data_type in self.basic_types.values():
+ return self.serialize_data(target_obj, data_type, **kwargs)
+
+ # Force "is_xml" kwargs if we detect a XML model
+ try:
+ is_xml_model_serialization = kwargs["is_xml"]
+ except KeyError:
+ is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model())
+
+ serialized = {}
+ if is_xml_model_serialization:
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
+ try:
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
+ for attr, attr_desc in attributes.items():
+ attr_name = attr
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
+ continue
+
+ if attr_name == "additional_properties" and attr_desc["key"] == "":
+ if target_obj.additional_properties is not None:
+ serialized |= target_obj.additional_properties
+ continue
+ try:
+
+ orig_attr = getattr(target_obj, attr)
+ if is_xml_model_serialization:
+ pass # Don't provide "transformer" for XML for now. Keep "orig_attr"
+ else: # JSON
+ keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr)
+ keys = keys if isinstance(keys, list) else [keys]
+
+ kwargs["serialization_ctxt"] = attr_desc
+ new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs)
+
+ if is_xml_model_serialization:
+ xml_desc = attr_desc.get("xml", {})
+ xml_name = xml_desc.get("name", attr_desc["key"])
+ xml_prefix = xml_desc.get("prefix", None)
+ xml_ns = xml_desc.get("ns", None)
+ if xml_desc.get("attr", False):
+ if xml_ns:
+ ET.register_namespace(xml_prefix, xml_ns)
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+ serialized.set(xml_name, new_attr) # type: ignore
+ continue
+ if xml_desc.get("text", False):
+ serialized.text = new_attr # type: ignore
+ continue
+ if isinstance(new_attr, list):
+ serialized.extend(new_attr) # type: ignore
+ elif isinstance(new_attr, ET.Element):
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
+ if "name" not in getattr(orig_attr, "_xml_map", {}):
+ splitted_tag = new_attr.tag.split("}")
+ if len(splitted_tag) == 2: # Namespace
+ new_attr.tag = "}".join([splitted_tag[0], xml_name])
+ else:
+ new_attr.tag = xml_name
+ serialized.append(new_attr) # type: ignore
+ else: # That's a basic type
+ # Integrate namespace if necessary
+ local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
+ local_node.text = str(new_attr)
+ serialized.append(local_node) # type: ignore
+ else: # JSON
+ for k in reversed(keys): # type: ignore
+ new_attr = {k: new_attr}
+
+ _new_attr = new_attr
+ _serialized = serialized
+ for k in keys: # type: ignore
+ if k not in _serialized:
+ _serialized.update(_new_attr) # type: ignore
+ _new_attr = _new_attr[k] # type: ignore
+ _serialized = _serialized[k]
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+
+ except (AttributeError, KeyError, TypeError) as err:
+ msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
+ raise SerializationError(msg) from err
+ return serialized
+
+ def body(self, data, data_type, **kwargs):
+ """Serialize data intended for a request body.
+
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: dict
+ :raises SerializationError: if serialization fails.
+ :raises ValueError: if data is None
+ :returns: The serialized request body
+ """
+
+ # Just in case this is a dict
+ internal_data_type_str = data_type.strip("[]{}")
+ internal_data_type = self.dependencies.get(internal_data_type_str, None)
+ try:
+ is_xml_model_serialization = kwargs["is_xml"]
+ except KeyError:
+ if internal_data_type and issubclass(internal_data_type, Model):
+ is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model())
+ else:
+ is_xml_model_serialization = False
+ if internal_data_type and not isinstance(internal_data_type, Enum):
+ try:
+ deserializer = Deserializer(self.dependencies)
+ # Since it's on serialization, it's almost sure that format is not JSON REST
+ # We're not able to deal with additional properties for now.
+ deserializer.additional_properties_detection = False
+ if is_xml_model_serialization:
+ deserializer.key_extractors = [ # type: ignore
+ attribute_key_case_insensitive_extractor,
+ ]
+ else:
+ deserializer.key_extractors = [
+ rest_key_case_insensitive_extractor,
+ attribute_key_case_insensitive_extractor,
+ last_rest_key_case_insensitive_extractor,
+ ]
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
+ except DeserializationError as err:
+ raise SerializationError("Unable to build a model: " + str(err)) from err
+
+ return self._serialize(data, data_type, **kwargs)
+
+ def url(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a URL path.
+
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str
+ :returns: The serialized URL path
+ :raises TypeError: if serialization fails.
+ :raises ValueError: if data is None
+ """
+ try:
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+
+ if kwargs.get("skip_quote") is True:
+ output = str(output)
+ output = output.replace("{", quote("{")).replace("}", quote("}"))
+ else:
+ output = quote(str(output), safe="")
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
+
+ def query(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a URL query.
+
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str, list
+ :raises TypeError: if serialization fails.
+ :raises ValueError: if data is None
+ :returns: The serialized query parameter
+ """
+ try:
+ # Treat the list aside, since we don't want to encode the div separator
+ if data_type.startswith("["):
+ internal_data_type = data_type[1:-1]
+ do_quote = not kwargs.get("skip_quote", False)
+ return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
+
+ # Not a list, regular serialization
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+ if kwargs.get("skip_quote") is True:
+ output = str(output)
+ else:
+ output = quote(str(output), safe="")
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
+
+ def header(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a request header.
+
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str
+ :raises TypeError: if serialization fails.
+ :raises ValueError: if data is None
+ :returns: The serialized header
+ """
+ try:
+ if data_type in ["[str]"]:
+ data = ["" if d is None else d for d in data]
+
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
+
+ def serialize_data(self, data, data_type, **kwargs):
+ """Serialize generic data according to supplied data type.
+
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :raises AttributeError: if required data is None.
+ :raises ValueError: if data is None
+ :raises SerializationError: if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
+ """
+ if data is None:
+ raise ValueError("No value for given attribute")
+
+ try:
+ if data is CoreNull:
+ return None
+ if data_type in self.basic_types.values():
+ return self.serialize_basic(data, data_type, **kwargs)
+
+ if data_type in self.serialize_type:
+ return self.serialize_type[data_type](data, **kwargs)
+
+ # If dependencies is empty, try with current data class
+ # It has to be a subclass of Enum anyway
+ enum_type = self.dependencies.get(data_type, cast(type, data.__class__))
+ if issubclass(enum_type, Enum):
+ return Serializer.serialize_enum(data, enum_obj=enum_type)
+
+ iter_type = data_type[0] + data_type[-1]
+ if iter_type in self.serialize_type:
+ return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs)
+
+ except (ValueError, TypeError) as err:
+ msg = "Unable to serialize value: {!r} as type: {!r}."
+ raise SerializationError(msg.format(data, data_type)) from err
+ return self._serialize(data, **kwargs)
+
+ @classmethod
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
+ custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
+ if custom_serializer:
+ return custom_serializer
+ if kwargs.get("is_xml", False):
+ return cls._xml_basic_types_serializers.get(data_type)
+
+ @classmethod
+ def serialize_basic(cls, data, data_type, **kwargs):
+ """Serialize basic builting data type.
+ Serializes objects to str, int, float or bool.
+
+ Possible kwargs:
+ - basic_types_serializers dict[str, callable] : If set, use the callable as serializer
+ - is_xml bool : If set, use xml_basic_types_serializers
+
+ :param obj data: Object to be serialized.
+ :param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
+ :raises TypeError: raise if data_type is not one of str, int, float, bool.
+ """
+ custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
+ if custom_serializer:
+ return custom_serializer(data)
+ if data_type == "str":
+ return cls.serialize_unicode(data)
+ if data_type == "int":
+ return int(data)
+ if data_type == "float":
+ return float(data)
+ if data_type == "bool":
+ return bool(data)
+ raise TypeError("Unknown basic data type: {}".format(data_type))
+
+ @classmethod
+ def serialize_unicode(cls, data):
+ """Special handling for serializing unicode strings in Py2.
+ Encode to UTF-8 if unicode, otherwise handle as a str.
+
+ :param str data: Object to be serialized.
+ :rtype: str
+ :return: serialized object
+ """
+ try: # If I received an enum, return its value
+ return data.value
+ except AttributeError:
+ pass
+
+ try:
+ if isinstance(data, unicode): # type: ignore
+ # Don't change it, JSON and XML ElementTree are totally able
+ # to serialize correctly u'' strings
+ return data
+ except NameError:
+ return str(data)
+ return str(data)
+
+ def serialize_iter(self, data, iter_type, div=None, **kwargs):
+ """Serialize iterable.
+
+ Supported kwargs:
+ - serialization_ctxt dict : The current entry of _attribute_map, or same format.
+ serialization_ctxt['type'] should be same as data_type.
+ - is_xml bool : If set, serialize as XML
+
+ :param list data: Object to be serialized.
+ :param str iter_type: Type of object in the iterable.
+ :param str div: If set, this str will be used to combine the elements
+ in the iterable into a combined string. Default is 'None'.
+ Defaults to False.
+ :rtype: list, str
+ :return: serialized iterable
+ """
+ if isinstance(data, str):
+ raise SerializationError("Refuse str type as a valid iter type.")
+
+ serialization_ctxt = kwargs.get("serialization_ctxt", {})
+ is_xml = kwargs.get("is_xml", False)
+
+ serialized = []
+ for d in data:
+ try:
+ serialized.append(self.serialize_data(d, iter_type, **kwargs))
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+ serialized.append(None)
+
+ if kwargs.get("do_quote", False):
+ serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]
+
+ if div:
+ serialized = ["" if s is None else str(s) for s in serialized]
+ serialized = div.join(serialized)
+
+ if "xml" in serialization_ctxt or is_xml:
+ # XML serialization is more complicated
+ xml_desc = serialization_ctxt.get("xml", {})
+ xml_name = xml_desc.get("name")
+ if not xml_name:
+ xml_name = serialization_ctxt["key"]
+
+ # Create a wrap node if necessary (use the fact that Element and list have "append")
+ is_wrapped = xml_desc.get("wrapped", False)
+ node_name = xml_desc.get("itemsName", xml_name)
+ if is_wrapped:
+ final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ else:
+ final_result = []
+ # All list elements to "local_node"
+ for el in serialized:
+ if isinstance(el, ET.Element):
+ el_node = el
+ else:
+ el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ if el is not None: # Otherwise it writes "None" :-p
+ el_node.text = str(el)
+ final_result.append(el_node)
+ return final_result
+ return serialized
+
+ def serialize_dict(self, attr, dict_type, **kwargs):
+ """Serialize a dictionary of objects.
+
+ :param dict attr: Object to be serialized.
+ :param str dict_type: Type of object in the dictionary.
+ :rtype: dict
+ :return: serialized dictionary
+ """
+ serialization_ctxt = kwargs.get("serialization_ctxt", {})
+ serialized = {}
+ for key, value in attr.items():
+ try:
+ serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+ serialized[self.serialize_unicode(key)] = None
+
+ if "xml" in serialization_ctxt:
+ # XML serialization is more complicated
+ xml_desc = serialization_ctxt["xml"]
+ xml_name = xml_desc["name"]
+
+ final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ for key, value in serialized.items():
+ ET.SubElement(final_result, key).text = value
+ return final_result
+
+ return serialized
+
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
+ """Serialize a generic object.
+ This will be handled as a dictionary. If object passed in is not
+ a basic type (str, int, float, dict, list) it will simply be
+ cast to str.
+
+ :param dict attr: Object to be serialized.
+ :rtype: dict or str
+ :return: serialized object
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element):
+ return attr
+ obj_type = type(attr)
+ if obj_type in self.basic_types:
+ return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
+ if obj_type is _long_type:
+ return self.serialize_long(attr)
+ if obj_type is str:
+ return self.serialize_unicode(attr)
+ if obj_type is datetime.datetime:
+ return self.serialize_iso(attr)
+ if obj_type is datetime.date:
+ return self.serialize_date(attr)
+ if obj_type is datetime.time:
+ return self.serialize_time(attr)
+ if obj_type is datetime.timedelta:
+ return self.serialize_duration(attr)
+ if obj_type is decimal.Decimal:
+ return self.serialize_decimal(attr)
+
+ # If it's a model or I know this dependency, serialize as a Model
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
+ return self._serialize(attr)
+
+ if obj_type == dict:
+ serialized = {}
+ for key, value in attr.items():
+ try:
+ serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs)
+ except ValueError:
+ serialized[self.serialize_unicode(key)] = None
+ return serialized
+
+ if obj_type == list:
+ serialized = []
+ for obj in attr:
+ try:
+ serialized.append(self.serialize_object(obj, **kwargs))
+ except ValueError:
+ pass
+ return serialized
+ return str(attr)
+
+ @staticmethod
+ def serialize_enum(attr, enum_obj=None):
+ try:
+ result = attr.value
+ except AttributeError:
+ result = attr
+ try:
+ enum_obj(result) # type: ignore
+ return result
+ except ValueError as exc:
+ for enum_value in enum_obj: # type: ignore
+ if enum_value.value.lower() == str(attr).lower():
+ return enum_value.value
+ error = "{!r} is not valid value for enum {!r}"
+ raise SerializationError(error.format(attr, enum_obj)) from exc
+
+ @staticmethod
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize bytearray into base-64 string.
+
+ :param str attr: Object to be serialized.
+ :rtype: str
+ :return: serialized base64
+ """
+ return b64encode(attr).decode()
+
+ @staticmethod
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize str into base-64 string.
+
+ :param str attr: Object to be serialized.
+ :rtype: str
+ :return: serialized base64
+ """
+ encoded = b64encode(attr).decode("ascii")
+ return encoded.strip("=").replace("+", "-").replace("/", "_")
+
+ @staticmethod
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Decimal object to float.
+
+ :param decimal attr: Object to be serialized.
+ :rtype: float
+ :return: serialized decimal
+ """
+ return float(attr)
+
+ @staticmethod
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize long (Py2) or int (Py3).
+
+ :param int attr: Object to be serialized.
+ :rtype: int/long
+ :return: serialized long
+ """
+ return _long_type(attr)
+
+ @staticmethod
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Date object into ISO-8601 formatted string.
+
+ :param Date attr: Object to be serialized.
+ :rtype: str
+ :return: serialized date
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_date(attr)
+ t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day)
+ return t
+
+ @staticmethod
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Time object into ISO-8601 formatted string.
+
+ :param datetime.time attr: Object to be serialized.
+ :rtype: str
+ :return: serialized time
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_time(attr)
+ t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second)
+ if attr.microsecond:
+ t += ".{:02}".format(attr.microsecond)
+ return t
+
+ @staticmethod
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize TimeDelta object into ISO-8601 formatted string.
+
+ :param TimeDelta attr: Object to be serialized.
+ :rtype: str
+ :return: serialized duration
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_duration(attr)
+ return isodate.duration_isoformat(attr)
+
+ @staticmethod
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Datetime object into RFC-1123 formatted string.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: str
+ :raises TypeError: if format invalid.
+ :return: serialized rfc
+ """
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ utc = attr.utctimetuple()
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
+
+ return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
+ Serializer.days[utc.tm_wday],
+ utc.tm_mday,
+ Serializer.months[utc.tm_mon],
+ utc.tm_year,
+ utc.tm_hour,
+ utc.tm_min,
+ utc.tm_sec,
+ )
+
+ @staticmethod
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Datetime object into ISO-8601 formatted string.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: str
+ :raises SerializationError: if format invalid.
+ :return: serialized iso
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_datetime(attr)
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ utc = attr.utctimetuple()
+ if utc.tm_year > 9999 or utc.tm_year < 1:
+ raise OverflowError("Hit max or min date")
+
+ microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0")
+ if microseconds:
+ microseconds = "." + microseconds
+ date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
+ utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec
+ )
+ return date + microseconds + "Z"
+ except (ValueError, OverflowError) as err:
+ msg = "Unable to serialize datetime object."
+ raise SerializationError(msg) from err
+ except AttributeError as err:
+ msg = "ISO-8601 object must be valid Datetime object."
+ raise TypeError(msg) from err
+
+ @staticmethod
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Datetime object into IntTime format.
+ This is represented as seconds.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: int
+ :raises SerializationError: if format invalid
+ :return: serialied unix
+ """
+ if isinstance(attr, int):
+ return attr
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ return int(calendar.timegm(attr.utctimetuple()))
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
+
+
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ key = attr_desc["key"]
+ working_data = data
+
+ while "." in key:
+ # Need the cast, as for some reasons "split" is typed as list[str | Any]
+ dict_keys = cast(list[str], _FLATTEN.split(key))
+ if len(dict_keys) == 1:
+ key = _decode_attribute_map_key(dict_keys[0])
+ break
+ working_key = _decode_attribute_map_key(dict_keys[0])
+ working_data = working_data.get(working_key, data)
+ if working_data is None:
+ # If at any point while following flatten JSON path see None, it means
+ # that all properties under are None as well
+ return None
+ key = ".".join(dict_keys[1:])
+
+ return working_data.get(key)
+
+
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
+ key = attr_desc["key"]
+ working_data = data
+
+ while "." in key:
+ dict_keys = _FLATTEN.split(key)
+ if len(dict_keys) == 1:
+ key = _decode_attribute_map_key(dict_keys[0])
+ break
+ working_key = _decode_attribute_map_key(dict_keys[0])
+ working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data)
+ if working_data is None:
+ # If at any point while following flatten JSON path see None, it means
+ # that all properties under are None as well
+ return None
+ key = ".".join(dict_keys[1:])
+
+ if working_data:
+ return attribute_key_case_insensitive_extractor(key, None, working_data)
+
+
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
+ key = attr_desc["key"]
+ dict_keys = _FLATTEN.split(key)
+ return attribute_key_extractor(dict_keys[-1], None, data)
+
+
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
+ key = attr_desc["key"]
+ dict_keys = _FLATTEN.split(key)
+ return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data)
+
+
+def attribute_key_extractor(attr, _, data):
+ return data.get(attr)
+
+
+def attribute_key_case_insensitive_extractor(attr, _, data):
+ found_key = None
+ lower_attr = attr.lower()
+ for key in data:
+ if lower_attr == key.lower():
+ found_key = key
+ break
+
+ return data.get(found_key)
+
+
+def _extract_name_from_internal_type(internal_type):
+ """Given an internal type XML description, extract correct XML name with namespace.
+
+ :param dict internal_type: An model type
+ :rtype: tuple
+ :returns: A tuple XML name + namespace dict
+ """
+ internal_type_xml_map = getattr(internal_type, "_xml_map", {})
+ xml_name = internal_type_xml_map.get("name", internal_type.__name__)
+ xml_ns = internal_type_xml_map.get("ns", None)
+ if xml_ns:
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+ return xml_name
+
+
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
+ if isinstance(data, dict):
+ return None
+
+ # Test if this model is XML ready first
+ if not isinstance(data, ET.Element):
+ return None
+
+ xml_desc = attr_desc.get("xml", {})
+ xml_name = xml_desc.get("name", attr_desc["key"])
+
+ # Look for a children
+ is_iter_type = attr_desc["type"].startswith("[")
+ is_wrapped = xml_desc.get("wrapped", False)
+ internal_type = attr_desc.get("internalType", None)
+ internal_type_xml_map = getattr(internal_type, "_xml_map", {})
+
+ # Integrate namespace if necessary
+ xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None))
+ if xml_ns:
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+
+ # If it's an attribute, that's simple
+ if xml_desc.get("attr", False):
+ return data.get(xml_name)
+
+ # If it's x-ms-text, that's simple too
+ if xml_desc.get("text", False):
+ return data.text
+
+ # Scenario where I take the local name:
+ # - Wrapped node
+ # - Internal type is an enum (considered basic types)
+ # - Internal type has no XML/Name node
+ if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)):
+ children = data.findall(xml_name)
+ # If internal type has a local name and it's not a list, I use that name
+ elif not is_iter_type and internal_type and "name" in internal_type_xml_map:
+ xml_name = _extract_name_from_internal_type(internal_type)
+ children = data.findall(xml_name)
+ # That's an array
+ else:
+ if internal_type: # Complex type, ignore itemsName and use the complex type name
+ items_name = _extract_name_from_internal_type(internal_type)
+ else:
+ items_name = xml_desc.get("itemsName", xml_name)
+ children = data.findall(items_name)
+
+ if len(children) == 0:
+ if is_iter_type:
+ if is_wrapped:
+ return None # is_wrapped no node, we want None
+ return [] # not wrapped, assume empty list
+ return None # Assume it's not there, maybe an optional node.
+
+ # If is_iter_type and not wrapped, return all found children
+ if is_iter_type:
+ if not is_wrapped:
+ return children
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
+ xml_name
+ )
+ )
+ return list(children[0]) # Might be empty list and that's ok.
+
+ # Here it's not a itertype, we should have found one element only or empty
+ if len(children) > 1:
+ raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name))
+ return children[0]
+
+
+class Deserializer:
+ """Response object model deserializer.
+
+ :param dict classes: Class type dictionary for deserializing complex types.
+ :ivar list key_extractors: Ordered list of extractors to be used by this deserializer.
+ """
+
+ basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
+
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
+ self.deserialize_type = {
+ "iso-8601": Deserializer.deserialize_iso,
+ "rfc-1123": Deserializer.deserialize_rfc,
+ "unix-time": Deserializer.deserialize_unix,
+ "duration": Deserializer.deserialize_duration,
+ "date": Deserializer.deserialize_date,
+ "time": Deserializer.deserialize_time,
+ "decimal": Deserializer.deserialize_decimal,
+ "long": Deserializer.deserialize_long,
+ "bytearray": Deserializer.deserialize_bytearray,
+ "base64": Deserializer.deserialize_base64,
+ "object": self.deserialize_object,
+ "[]": self.deserialize_iter,
+ "{}": self.deserialize_dict,
+ }
+ self.deserialize_expected_types = {
+ "duration": (isodate.Duration, datetime.timedelta),
+ "iso-8601": (datetime.datetime),
+ }
+ self.dependencies: dict[str, type] = dict(classes) if classes else {}
+ self.key_extractors = [rest_key_extractor, xml_key_extractor]
+ # Additional properties only works if the "rest_key_extractor" is used to
+ # extract the keys. Making it to work whatever the key extractor is too much
+ # complicated, with no real scenario for now.
+ # So adding a flag to disable additional properties detection. This flag should be
+ # used if your expect the deserialization to NOT come from a JSON REST syntax.
+ # Otherwise, result are unexpected
+ self.additional_properties_detection = True
+
+ def __call__(self, target_obj, response_data, content_type=None):
+ """Call the deserializer to process a REST response.
+
+ :param str target_obj: Target data type to deserialize to.
+ :param requests.Response response_data: REST response object.
+ :param str content_type: Swagger "produces" if available.
+ :raises DeserializationError: if deserialization fails.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ data = self._unpack_content(response_data, content_type)
+ return self._deserialize(target_obj, data)
+
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
+ """Call the deserializer on a model.
+
+ Data needs to be already deserialized as JSON or XML ElementTree
+
+ :param str target_obj: Target data type to deserialize to.
+ :param object data: Object to deserialize.
+ :raises DeserializationError: if deserialization fails.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ # This is already a model, go recursive just in case
+ if hasattr(data, "_attribute_map"):
+ constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
+ try:
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
+ if attr in constants:
+ continue
+ value = getattr(data, attr)
+ if value is None:
+ continue
+ local_type = mapconfig["type"]
+ internal_data_type = local_type.strip("[]{}")
+ if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum):
+ continue
+ setattr(data, attr, self._deserialize(local_type, value))
+ return data
+ except AttributeError:
+ return
+
+ response, class_name = self._classify_target(target_obj, data)
+
+ if isinstance(response, str):
+ return self.deserialize_data(data, response)
+ if isinstance(response, type) and issubclass(response, Enum):
+ return self.deserialize_enum(data, response)
+
+ if data is None or data is CoreNull:
+ return data
+ try:
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
+ d_attrs = {}
+ for attr, attr_desc in attributes.items():
+ # Check empty string. If it's not empty, someone has a real "additionalProperties"...
+ if attr == "additional_properties" and attr_desc["key"] == "":
+ continue
+ raw_value = None
+ # Enhance attr_desc with some dynamic data
+ attr_desc = attr_desc.copy() # Do a copy, do not change the real one
+ internal_data_type = attr_desc["type"].strip("[]{}")
+ if internal_data_type in self.dependencies:
+ attr_desc["internalType"] = self.dependencies[internal_data_type]
+
+ for key_extractor in self.key_extractors:
+ found_value = key_extractor(attr, attr_desc, data)
+ if found_value is not None:
+ if raw_value is not None and raw_value != found_value:
+ msg = (
+ "Ignoring extracted value '%s' from %s for key '%s'"
+ " (duplicate extraction, follow extractors order)"
+ )
+ _LOGGER.warning(msg, found_value, key_extractor, attr)
+ continue
+ raw_value = found_value
+
+ value = self.deserialize_data(raw_value, attr_desc["type"])
+ d_attrs[attr] = value
+ except (AttributeError, TypeError, KeyError) as err:
+ msg = "Unable to deserialize to object: " + class_name # type: ignore
+ raise DeserializationError(msg) from err
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
+
+ def _build_additional_properties(self, attribute_map, data):
+ if not self.additional_properties_detection:
+ return None
+ if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "":
+ # Check empty string. If it's not empty, someone has a real "additionalProperties"
+ return None
+ if isinstance(data, ET.Element):
+ data = {el.tag: el.text for el in data}
+
+ known_keys = {
+ _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0])
+ for desc in attribute_map.values()
+ if desc["key"] != ""
+ }
+ present_keys = set(data.keys())
+ missing_keys = present_keys - known_keys
+ return {key: data[key] for key in missing_keys}
+
+ def _classify_target(self, target, data):
+ """Check to see whether the deserialization target object can
+ be classified into a subclass.
+ Once classification has been determined, initialize object.
+
+ :param str target: The target object type to deserialize to.
+ :param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
+ """
+ if target is None:
+ return None, None
+
+ if isinstance(target, str):
+ try:
+ target = self.dependencies[target]
+ except KeyError:
+ return target, target
+
+ try:
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
+ except AttributeError:
+ pass # Target is not a Model, no classify
+ return target, target.__class__.__name__ # type: ignore
+
+ def failsafe_deserialize(self, target_obj, data, content_type=None):
+ """Ignores any errors encountered in deserialization,
+ and falls back to not deserializing the object. Recommended
+ for use in error deserialization, as we want to return the
+ HttpResponseError to users, and not have them deal with
+ a deserialization error.
+
+ :param str target_obj: The target object type to deserialize to.
+ :param str/dict data: The response data to deserialize.
+ :param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ try:
+ return self(target_obj, data, content_type=content_type)
+ except: # pylint: disable=bare-except
+ _LOGGER.debug(
+ "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
+ )
+ return None
+
+ @staticmethod
+ def _unpack_content(raw_data, content_type=None):
+ """Extract the correct structure for deserialization.
+
+ If raw_data is a PipelineResponse, try to extract the result of RawDeserializer.
+ if we can't, raise. Your Pipeline should have a RawDeserializer.
+
+ If not a pipeline response and raw_data is bytes or string, use content-type
+ to decode it. If no content-type, try JSON.
+
+ If raw_data is something else, bypass all logic and return it directly.
+
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
+ :raises JSONDecodeError: If JSON is requested and parsing is impossible.
+ :raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
+ """
+ # Assume this is enough to detect a Pipeline Response without importing it
+ context = getattr(raw_data, "context", {})
+ if context:
+ if RawDeserializer.CONTEXT_NAME in context:
+ return context[RawDeserializer.CONTEXT_NAME]
+ raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize")
+
+ # Assume this is enough to recognize universal_http.ClientResponse without importing it
+ if hasattr(raw_data, "body"):
+ return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers)
+
+ # Assume this enough to recognize requests.Response without importing it.
+ if hasattr(raw_data, "_content_consumed"):
+ return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
+
+ if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
+ return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
+ return raw_data
+
+ def _instantiate_model(self, response, attrs, additional_properties=None):
+ """Instantiate a response model passing in deserialized args.
+
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
+ """
+ if callable(response):
+ subtype = getattr(response, "_subtype_map", {})
+ try:
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
+ kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
+ response_obj = response(**kwargs)
+ for attr in readonly:
+ setattr(response_obj, attr, attrs.get(attr))
+ if additional_properties:
+ response_obj.additional_properties = additional_properties # type: ignore
+ return response_obj
+ except TypeError as err:
+ msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
+ raise DeserializationError(msg + str(err)) from err
+ else:
+ try:
+ for attr, value in attrs.items():
+ setattr(response, attr, value)
+ return response
+ except Exception as exp:
+ msg = "Unable to populate response model. "
+ msg += "Type: {}, Error: {}".format(type(response), exp)
+ raise DeserializationError(msg) from exp
+
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
+ """Process data for deserialization according to data type.
+
+ :param str data: The response string to be deserialized.
+ :param str data_type: The type to deserialize to.
+ :raises DeserializationError: if deserialization fails.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ if data is None:
+ return data
+
+ try:
+ if not data_type:
+ return data
+ if data_type in self.basic_types.values():
+ return self.deserialize_basic(data, data_type)
+ if data_type in self.deserialize_type:
+ if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
+ return data
+
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
+ if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
+ return None
+ data_val = self.deserialize_type[data_type](data)
+ return data_val
+
+ iter_type = data_type[0] + data_type[-1]
+ if iter_type in self.deserialize_type:
+ return self.deserialize_type[iter_type](data, data_type[1:-1])
+
+ obj_type = self.dependencies[data_type]
+ if issubclass(obj_type, Enum):
+ if isinstance(data, ET.Element):
+ data = data.text
+ return self.deserialize_enum(data, obj_type)
+
+ except (ValueError, TypeError, AttributeError) as err:
+ msg = "Unable to deserialize response data."
+ msg += " Data: {}, {}".format(data, data_type)
+ raise DeserializationError(msg) from err
+ return self._deserialize(obj_type, data)
+
+ def deserialize_iter(self, attr, iter_type):
+ """Deserialize an iterable.
+
+ :param list attr: Iterable to be deserialized.
+ :param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
+ :rtype: list
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element): # If I receive an element here, get the children
+ attr = list(attr)
+ if not isinstance(attr, (list, set)):
+ raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr)))
+ return [self.deserialize_data(a, iter_type) for a in attr]
+
+ def deserialize_dict(self, attr, dict_type):
+ """Deserialize a dictionary.
+
+ :param dict/list attr: Dictionary to be deserialized. Also accepts
+ a list of key, value pairs.
+ :param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
+ :rtype: dict
+ """
+ if isinstance(attr, list):
+ return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr}
+
+ if isinstance(attr, ET.Element):
+ # Transform value into {"Key": "value"}
+ attr = {el.tag: el.text for el in attr}
+ return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
+
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
+ """Deserialize a generic object.
+ This will be handled as a dictionary.
+
+ :param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
+ :rtype: dict
+ :raises TypeError: if non-builtin datatype encountered.
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element):
+ # Do no recurse on XML, just return the tree as-is
+ return attr
+ if isinstance(attr, str):
+ return self.deserialize_basic(attr, "str")
+ obj_type = type(attr)
+ if obj_type in self.basic_types:
+ return self.deserialize_basic(attr, self.basic_types[obj_type])
+ if obj_type is _long_type:
+ return self.deserialize_long(attr)
+
+ if obj_type == dict:
+ deserialized = {}
+ for key, value in attr.items():
+ try:
+ deserialized[key] = self.deserialize_object(value, **kwargs)
+ except ValueError:
+ deserialized[key] = None
+ return deserialized
+
+ if obj_type == list:
+ deserialized = []
+ for obj in attr:
+ try:
+ deserialized.append(self.deserialize_object(obj, **kwargs))
+ except ValueError:
+ pass
+ return deserialized
+
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
+
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
+ """Deserialize basic builtin data type from string.
+ Will attempt to convert to str, int, float and bool.
+ This function will also accept '1', '0', 'true' and 'false' as
+ valid bool values.
+
+ :param str attr: response string to be deserialized.
+ :param str data_type: deserialization data type.
+ :return: Deserialized basic type.
+ :rtype: str, int, float or bool
+ :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool.
+ """
+ # If we're here, data is supposed to be a basic type.
+ # If it's still an XML node, take the text
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if not attr:
+ if data_type == "str":
+ # None or '', node is empty string.
+ return ""
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
+
+ if data_type == "bool":
+ if attr in [True, False, 1, 0]:
+ return bool(attr)
+ if isinstance(attr, str):
+ if attr.lower() in ["true", "1"]:
+ return True
+ if attr.lower() in ["false", "0"]:
+ return False
+ raise TypeError("Invalid boolean value: {}".format(attr))
+
+ if data_type == "str":
+ return self.deserialize_unicode(attr)
+ if data_type == "int":
+ return int(attr)
+ if data_type == "float":
+ return float(attr)
+ raise TypeError("Unknown basic data type: {}".format(data_type))
+
+ @staticmethod
+ def deserialize_unicode(data):
+ """Preserve unicode objects in Python 2, otherwise return data
+ as a string.
+
+ :param str data: response string to be deserialized.
+ :return: Deserialized string.
+ :rtype: str or unicode
+ """
+ # We might be here because we have an enum modeled as string,
+ # and we try to deserialize a partial dict with enum inside
+ if isinstance(data, Enum):
+ return data
+
+ # Consider this is real string
+ try:
+ if isinstance(data, unicode): # type: ignore
+ return data
+ except NameError:
+ return str(data)
+ return str(data)
+
+ @staticmethod
+ def deserialize_enum(data, enum_obj):
+ """Deserialize string into enum object.
+
+ If the string is not a valid enum value it will be returned as-is
+ and a warning will be logged.
+
+ :param str data: Response string to be deserialized. If this value is
+ None or invalid it will be returned as-is.
+ :param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
+ :rtype: Enum
+ """
+ if isinstance(data, enum_obj) or data is None:
+ return data
+ if isinstance(data, Enum):
+ data = data.value
+ if isinstance(data, int):
+ # Workaround. We might consider remove it in the future.
+ try:
+ return list(enum_obj.__members__.values())[data]
+ except IndexError as exc:
+ error = "{!r} is not a valid index for enum {!r}"
+ raise DeserializationError(error.format(data, enum_obj)) from exc
+ try:
+ return enum_obj(str(data))
+ except ValueError:
+ for enum_value in enum_obj:
+ if enum_value.value.lower() == str(data).lower():
+ return enum_value
+ # We don't fail anymore for unknown value, we deserialize as a string
+ _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj)
+ return Deserializer.deserialize_unicode(data)
+
+ @staticmethod
+ def deserialize_bytearray(attr):
+ """Deserialize string into bytearray.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
+ :rtype: bytearray
+ :raises TypeError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ return bytearray(b64decode(attr)) # type: ignore
+
+ @staticmethod
+ def deserialize_base64(attr):
+ """Deserialize base64 encoded string into string.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
+ :rtype: bytearray
+ :raises TypeError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
+ attr = attr + padding # type: ignore
+ encoded = attr.replace("-", "+").replace("_", "/")
+ return b64decode(encoded)
+
+ @staticmethod
+ def deserialize_decimal(attr):
+ """Deserialize string into Decimal object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized decimal
+ :raises DeserializationError: if string format invalid.
+ :rtype: decimal
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ return decimal.Decimal(str(attr)) # type: ignore
+ except decimal.DecimalException as err:
+ msg = "Invalid decimal {}".format(attr)
+ raise DeserializationError(msg) from err
+
+ @staticmethod
+ def deserialize_long(attr):
+ """Deserialize string into long (Py2) or int (Py3).
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized int
+ :rtype: long or int
+ :raises ValueError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ return _long_type(attr) # type: ignore
+
+ @staticmethod
+ def deserialize_duration(attr):
+ """Deserialize ISO-8601 formatted string into TimeDelta object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized duration
+ :rtype: TimeDelta
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ duration = isodate.parse_duration(attr)
+ except (ValueError, OverflowError, AttributeError) as err:
+ msg = "Cannot deserialize duration object."
+ raise DeserializationError(msg) from err
+ return duration
+
+ @staticmethod
+ def deserialize_date(attr):
+ """Deserialize ISO-8601 formatted string into Date object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized date
+ :rtype: Date
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
+ raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
+ # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
+ return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
+
+ @staticmethod
+ def deserialize_time(attr):
+ """Deserialize ISO-8601 formatted string into time object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized time
+ :rtype: datetime.time
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
+ raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
+ return isodate.parse_time(attr)
+
+ @staticmethod
+ def deserialize_rfc(attr):
+ """Deserialize RFC-1123 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
+ :rtype: Datetime
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ parsed_date = email.utils.parsedate_tz(attr) # type: ignore
+ date_obj = datetime.datetime(
+ *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60))
+ )
+ if not date_obj.tzinfo:
+ date_obj = date_obj.astimezone(tz=TZ_UTC)
+ except ValueError as err:
+ msg = "Cannot deserialize to rfc datetime object."
+ raise DeserializationError(msg) from err
+ return date_obj
+
+ @staticmethod
+ def deserialize_iso(attr):
+ """Deserialize ISO-8601 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
+ :rtype: Datetime
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ attr = attr.upper() # type: ignore
+ match = Deserializer.valid_date.match(attr)
+ if not match:
+ raise ValueError("Invalid datetime string: " + attr)
+
+ check_decimal = attr.split(".")
+ if len(check_decimal) > 1:
+ decimal_str = ""
+ for digit in check_decimal[1]:
+ if digit.isdigit():
+ decimal_str += digit
+ else:
+ break
+ if len(decimal_str) > 6:
+ attr = attr.replace(decimal_str, decimal_str[0:6])
+
+ date_obj = isodate.parse_datetime(attr)
+ test_utc = date_obj.utctimetuple()
+ if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
+ raise OverflowError("Hit max or min date")
+ except (ValueError, OverflowError, AttributeError) as err:
+ msg = "Cannot deserialize datetime object."
+ raise DeserializationError(msg) from err
+ return date_obj
+
+ @staticmethod
+ def deserialize_unix(attr):
+ """Serialize Datetime object into IntTime format.
+ This is represented as seconds.
+
+ :param int attr: Object to be serialized.
+ :return: Deserialized datetime
+ :rtype: Datetime
+ :raises DeserializationError: if format invalid
+ """
+ if isinstance(attr, ET.Element):
+ attr = int(attr.text) # type: ignore
+ try:
+ attr = int(attr)
+ date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
+ except ValueError as err:
+ msg = "Cannot deserialize to unix datetime object."
+ raise DeserializationError(msg) from err
+ return date_obj
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/__init__.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/__init__.py
new file mode 100644
index 000000000000..c0391d76526d
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/__init__.py
@@ -0,0 +1,872 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+
+from ._models import ( # type: ignore
+ A2APreviewTool,
+ A2AToolCall,
+ A2AToolCallOutput,
+ AISearchIndexResource,
+ AgentId,
+ AgentReference,
+ Annotation,
+ ApiErrorResponse,
+ ApplyPatchCreateFileOperation,
+ ApplyPatchCreateFileOperationParam,
+ ApplyPatchDeleteFileOperation,
+ ApplyPatchDeleteFileOperationParam,
+ ApplyPatchFileOperation,
+ ApplyPatchOperationParam,
+ ApplyPatchToolCallItemParam,
+ ApplyPatchToolCallOutputItemParam,
+ ApplyPatchToolParam,
+ ApplyPatchUpdateFileOperation,
+ ApplyPatchUpdateFileOperationParam,
+ ApproximateLocation,
+ AutoCodeInterpreterToolParam,
+ AzureAISearchTool,
+ AzureAISearchToolCall,
+ AzureAISearchToolCallOutput,
+ AzureAISearchToolResource,
+ AzureFunctionBinding,
+ AzureFunctionDefinition,
+ AzureFunctionDefinitionFunction,
+ AzureFunctionStorageQueue,
+ AzureFunctionTool,
+ AzureFunctionToolCall,
+ AzureFunctionToolCallOutput,
+ BingCustomSearchConfiguration,
+ BingCustomSearchPreviewTool,
+ BingCustomSearchToolCall,
+ BingCustomSearchToolCallOutput,
+ BingCustomSearchToolParameters,
+ BingGroundingSearchConfiguration,
+ BingGroundingSearchToolParameters,
+ BingGroundingTool,
+ BingGroundingToolCall,
+ BingGroundingToolCallOutput,
+ BrowserAutomationPreviewTool,
+ BrowserAutomationToolCall,
+ BrowserAutomationToolCallOutput,
+ BrowserAutomationToolConnectionParameters,
+ BrowserAutomationToolParameters,
+ CaptureStructuredOutputsTool,
+ ChatSummaryMemoryItem,
+ ClickParam,
+ CodeInterpreterOutputImage,
+ CodeInterpreterOutputLogs,
+ CodeInterpreterTool,
+ CompactResource,
+ CompactionSummaryItemParam,
+ ComparisonFilter,
+ CompoundFilter,
+ ComputerAction,
+ ComputerCallOutputItemParam,
+ ComputerCallSafetyCheckParam,
+ ComputerScreenshotContent,
+ ComputerScreenshotImage,
+ ComputerUsePreviewTool,
+ ContainerAutoParam,
+ ContainerFileCitationBody,
+ ContainerNetworkPolicyAllowlistParam,
+ ContainerNetworkPolicyDisabledParam,
+ ContainerNetworkPolicyDomainSecretParam,
+ ContainerNetworkPolicyParam,
+ ContainerReferenceResource,
+ ContainerSkill,
+ ContextManagementParam,
+ ConversationParam_2,
+ ConversationReference,
+ CoordParam,
+ CreateResponse,
+ CreatedBy,
+ CustomGrammarFormatParam,
+ CustomTextFormatParam,
+ CustomToolParam,
+ CustomToolParamFormat,
+ DeleteResponseResult,
+ DoubleClickAction,
+ DragParam,
+ Error,
+ FabricDataAgentToolCall,
+ FabricDataAgentToolCallOutput,
+ FabricDataAgentToolParameters,
+ FileCitationBody,
+ FilePath,
+ FileSearchTool,
+ FileSearchToolCallResults,
+ FunctionAndCustomToolCallOutput,
+ FunctionAndCustomToolCallOutputInputFileContent,
+ FunctionAndCustomToolCallOutputInputImageContent,
+ FunctionAndCustomToolCallOutputInputTextContent,
+ FunctionCallOutputItemParam,
+ FunctionShellAction,
+ FunctionShellActionParam,
+ FunctionShellCallEnvironment,
+ FunctionShellCallItemParam,
+ FunctionShellCallItemParamEnvironment,
+ FunctionShellCallItemParamEnvironmentContainerReferenceParam,
+ FunctionShellCallItemParamEnvironmentLocalEnvironmentParam,
+ FunctionShellCallOutputContent,
+ FunctionShellCallOutputContentParam,
+ FunctionShellCallOutputExitOutcome,
+ FunctionShellCallOutputExitOutcomeParam,
+ FunctionShellCallOutputItemParam,
+ FunctionShellCallOutputOutcome,
+ FunctionShellCallOutputOutcomeParam,
+ FunctionShellCallOutputTimeoutOutcome,
+ FunctionShellCallOutputTimeoutOutcomeParam,
+ FunctionShellToolParam,
+ FunctionShellToolParamEnvironment,
+ FunctionShellToolParamEnvironmentContainerReferenceParam,
+ FunctionShellToolParamEnvironmentLocalEnvironmentParam,
+ FunctionTool,
+ HybridSearchOptions,
+ ImageGenTool,
+ ImageGenToolInputImageMask,
+ InlineSkillParam,
+ InlineSkillSourceParam,
+ InputFileContent,
+ InputFileContentParam,
+ InputImageContent,
+ InputImageContentParamAutoParam,
+ InputTextContent,
+ InputTextContentParam,
+ Item,
+ ItemCodeInterpreterToolCall,
+ ItemComputerToolCall,
+ ItemCustomToolCall,
+ ItemCustomToolCallOutput,
+ ItemField,
+ ItemFieldApplyPatchToolCall,
+ ItemFieldApplyPatchToolCallOutput,
+ ItemFieldCodeInterpreterToolCall,
+ ItemFieldCompactionBody,
+ ItemFieldComputerToolCall,
+ ItemFieldComputerToolCallOutput,
+ ItemFieldCustomToolCall,
+ ItemFieldCustomToolCallOutput,
+ ItemFieldFileSearchToolCall,
+ ItemFieldFunctionShellCall,
+ ItemFieldFunctionShellCallOutput,
+ ItemFieldFunctionToolCall,
+ ItemFieldFunctionToolCallOutput,
+ ItemFieldImageGenToolCall,
+ ItemFieldLocalShellToolCall,
+ ItemFieldLocalShellToolCallOutput,
+ ItemFieldMcpApprovalRequest,
+ ItemFieldMcpApprovalResponseResource,
+ ItemFieldMcpListTools,
+ ItemFieldMcpToolCall,
+ ItemFieldMessage,
+ ItemFieldReasoningItem,
+ ItemFieldWebSearchToolCall,
+ ItemFileSearchToolCall,
+ ItemFunctionToolCall,
+ ItemImageGenToolCall,
+ ItemLocalShellToolCall,
+ ItemLocalShellToolCallOutput,
+ ItemMcpApprovalRequest,
+ ItemMcpListTools,
+ ItemMcpToolCall,
+ ItemMessage,
+ ItemOutputMessage,
+ ItemReasoningItem,
+ ItemReferenceParam,
+ ItemWebSearchToolCall,
+ KeyPressAction,
+ LocalEnvironmentResource,
+ LocalShellExecAction,
+ LocalShellToolParam,
+ LocalSkillParam,
+ LogProb,
+ MCPApprovalResponse,
+ MCPListToolsTool,
+ MCPListToolsToolAnnotations,
+ MCPListToolsToolInputSchema,
+ MCPTool,
+ MCPToolFilter,
+ MCPToolRequireApproval,
+ MemoryItem,
+ MemorySearchItem,
+ MemorySearchOptions,
+ MemorySearchPreviewTool,
+ MemorySearchTool,
+ MemorySearchToolCallItemParam,
+ MemorySearchToolCallItemResource,
+ MessageContent,
+ MessageContentInputFileContent,
+ MessageContentInputImageContent,
+ MessageContentInputTextContent,
+ MessageContentOutputTextContent,
+ MessageContentReasoningTextContent,
+ MessageContentRefusalContent,
+ Metadata,
+ MicrosoftFabricPreviewTool,
+ MoveParam,
+ OAuthConsentRequestOutputItem,
+ OpenApiAnonymousAuthDetails,
+ OpenApiAuthDetails,
+ OpenApiFunctionDefinition,
+ OpenApiFunctionDefinitionFunction,
+ OpenApiManagedAuthDetails,
+ OpenApiManagedSecurityScheme,
+ OpenApiProjectConnectionAuthDetails,
+ OpenApiProjectConnectionSecurityScheme,
+ OpenApiTool,
+ OpenApiToolCall,
+ OpenApiToolCallOutput,
+ OutputContent,
+ OutputContentOutputTextContent,
+ OutputContentReasoningTextContent,
+ OutputContentRefusalContent,
+ OutputItem,
+ OutputItemApplyPatchToolCall,
+ OutputItemApplyPatchToolCallOutput,
+ OutputItemCodeInterpreterToolCall,
+ OutputItemCompactionBody,
+ OutputItemComputerToolCall,
+ OutputItemComputerToolCallOutput,
+ OutputItemCustomToolCall,
+ OutputItemCustomToolCallOutput,
+ OutputItemFileSearchToolCall,
+ OutputItemFunctionShellCall,
+ OutputItemFunctionShellCallOutput,
+ OutputItemFunctionToolCall,
+ OutputItemFunctionToolCallOutput,
+ OutputItemImageGenToolCall,
+ OutputItemLocalShellToolCall,
+ OutputItemLocalShellToolCallOutput,
+ OutputItemMcpApprovalRequest,
+ OutputItemMcpApprovalResponseResource,
+ OutputItemMcpListTools,
+ OutputItemMcpToolCall,
+ OutputItemMessage,
+ OutputItemOutputMessage,
+ OutputItemReasoningItem,
+ OutputItemWebSearchToolCall,
+ OutputMessageContent,
+ OutputMessageContentOutputTextContent,
+ OutputMessageContentRefusalContent,
+ Prompt,
+ RankingOptions,
+ RealtimeMCPError,
+ RealtimeMCPHTTPError,
+ RealtimeMCPProtocolError,
+ RealtimeMCPToolExecutionError,
+ Reasoning,
+ ReasoningTextContent,
+ Response,
+ ResponseAudioDeltaEvent,
+ ResponseAudioDoneEvent,
+ ResponseAudioTranscriptDeltaEvent,
+ ResponseAudioTranscriptDoneEvent,
+ ResponseCodeInterpreterCallCodeDeltaEvent,
+ ResponseCodeInterpreterCallCodeDoneEvent,
+ ResponseCodeInterpreterCallCompletedEvent,
+ ResponseCodeInterpreterCallInProgressEvent,
+ ResponseCodeInterpreterCallInterpretingEvent,
+ ResponseCompletedEvent,
+ ResponseContentPartAddedEvent,
+ ResponseContentPartDoneEvent,
+ ResponseCreatedEvent,
+ ResponseCustomToolCallInputDeltaEvent,
+ ResponseCustomToolCallInputDoneEvent,
+ ResponseError,
+ ResponseErrorEvent,
+ ResponseFailedEvent,
+ ResponseFileSearchCallCompletedEvent,
+ ResponseFileSearchCallInProgressEvent,
+ ResponseFileSearchCallSearchingEvent,
+ ResponseFormatJsonSchemaSchema,
+ ResponseFunctionCallArgumentsDeltaEvent,
+ ResponseFunctionCallArgumentsDoneEvent,
+ ResponseImageGenCallCompletedEvent,
+ ResponseImageGenCallGeneratingEvent,
+ ResponseImageGenCallInProgressEvent,
+ ResponseImageGenCallPartialImageEvent,
+ ResponseInProgressEvent,
+ ResponseIncompleteDetails,
+ ResponseIncompleteEvent,
+ ResponseLogProb,
+ ResponseLogProbTopLogprobs,
+ ResponseMCPCallArgumentsDeltaEvent,
+ ResponseMCPCallArgumentsDoneEvent,
+ ResponseMCPCallCompletedEvent,
+ ResponseMCPCallFailedEvent,
+ ResponseMCPCallInProgressEvent,
+ ResponseMCPListToolsCompletedEvent,
+ ResponseMCPListToolsFailedEvent,
+ ResponseMCPListToolsInProgressEvent,
+ ResponseOutputItemAddedEvent,
+ ResponseOutputItemDoneEvent,
+ ResponseOutputTextAnnotationAddedEvent,
+ ResponsePromptVariables,
+ ResponseQueuedEvent,
+ ResponseReasoningSummaryPartAddedEvent,
+ ResponseReasoningSummaryPartAddedEventPart,
+ ResponseReasoningSummaryPartDoneEvent,
+ ResponseReasoningSummaryPartDoneEventPart,
+ ResponseReasoningSummaryTextDeltaEvent,
+ ResponseReasoningSummaryTextDoneEvent,
+ ResponseReasoningTextDeltaEvent,
+ ResponseReasoningTextDoneEvent,
+ ResponseRefusalDeltaEvent,
+ ResponseRefusalDoneEvent,
+ ResponseStreamEvent,
+ ResponseStreamOptions,
+ ResponseTextDeltaEvent,
+ ResponseTextDoneEvent,
+ ResponseTextParam,
+ ResponseUsage,
+ ResponseUsageInputTokensDetails,
+ ResponseUsageOutputTokensDetails,
+ ResponseWebSearchCallCompletedEvent,
+ ResponseWebSearchCallInProgressEvent,
+ ResponseWebSearchCallSearchingEvent,
+ ScreenshotParam,
+ ScrollParam,
+ SharepointGroundingToolCall,
+ SharepointGroundingToolCallOutput,
+ SharepointGroundingToolParameters,
+ SharepointPreviewTool,
+ SkillReferenceParam,
+ SpecificApplyPatchParam,
+ SpecificFunctionShellParam,
+ StructuredOutputDefinition,
+ StructuredOutputsOutputItem,
+ SummaryTextContent,
+ TextContent,
+ TextResponseFormatConfiguration,
+ TextResponseFormatConfigurationResponseFormatJsonObject,
+ TextResponseFormatConfigurationResponseFormatText,
+ TextResponseFormatJsonSchema,
+ Tool,
+ ToolChoiceAllowed,
+ ToolChoiceCodeInterpreter,
+ ToolChoiceComputerUsePreview,
+ ToolChoiceCustom,
+ ToolChoiceFileSearch,
+ ToolChoiceFunction,
+ ToolChoiceImageGeneration,
+ ToolChoiceMCP,
+ ToolChoiceParam,
+ ToolChoiceWebSearchPreview,
+ ToolChoiceWebSearchPreview20250311,
+ ToolProjectConnection,
+ TopLogProb,
+ TypeParam,
+ UrlCitationBody,
+ UserProfileMemoryItem,
+ VectorStoreFileAttributes,
+ WaitParam,
+ WebSearchActionFind,
+ WebSearchActionOpenPage,
+ WebSearchActionSearch,
+ WebSearchActionSearchSources,
+ WebSearchApproximateLocation,
+ WebSearchConfiguration,
+ WebSearchPreviewTool,
+ WebSearchTool,
+ WebSearchToolFilters,
+ WorkflowActionOutputItem,
+)
+
+from ._enums import ( # type: ignore
+ AnnotationType,
+ ApplyPatchCallOutputStatus,
+ ApplyPatchCallOutputStatusParam,
+ ApplyPatchCallStatus,
+ ApplyPatchCallStatusParam,
+ ApplyPatchFileOperationType,
+ ApplyPatchOperationParamType,
+ AzureAISearchQueryType,
+ ClickButtonType,
+ ComputerActionType,
+ ComputerEnvironment,
+ ContainerMemoryLimit,
+ ContainerNetworkPolicyParamType,
+ ContainerSkillType,
+ CustomToolParamFormatType,
+ DetailEnum,
+ FunctionAndCustomToolCallOutputType,
+ FunctionCallItemStatus,
+ FunctionShellCallEnvironmentType,
+ FunctionShellCallItemParamEnvironmentType,
+ FunctionShellCallItemStatus,
+ FunctionShellCallOutputOutcomeParamType,
+ FunctionShellCallOutputOutcomeType,
+ FunctionShellToolParamEnvironmentType,
+ GrammarSyntax1,
+ ImageDetail,
+ ImageGenActionEnum,
+ IncludeEnum,
+ InputFidelity,
+ ItemFieldType,
+ ItemType,
+ LocalShellCallOutputStatusEnum,
+ LocalShellCallStatus,
+ MCPToolCallStatus,
+ MemoryItemKind,
+ MessageContentType,
+ MessagePhase,
+ MessageRole,
+ MessageStatus,
+ ModelIdsCompaction,
+ OpenApiAuthType,
+ OutputContentType,
+ OutputItemType,
+ OutputMessageContentType,
+ PageOrder,
+ RankerVersionType,
+ RealtimeMcpErrorType,
+ ResponseErrorCode,
+ ResponseStreamEventType,
+ SearchContextSize,
+ TextResponseFormatConfigurationType,
+ ToolCallStatus,
+ ToolChoiceOptions,
+ ToolChoiceParamType,
+ ToolType,
+)
+from ._patch import __all__ as _patch_all
+from ._patch import *
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "A2APreviewTool",
+ "A2AToolCall",
+ "A2AToolCallOutput",
+ "AISearchIndexResource",
+ "AgentId",
+ "AgentReference",
+ "Annotation",
+ "ApiErrorResponse",
+ "ApplyPatchCreateFileOperation",
+ "ApplyPatchCreateFileOperationParam",
+ "ApplyPatchDeleteFileOperation",
+ "ApplyPatchDeleteFileOperationParam",
+ "ApplyPatchFileOperation",
+ "ApplyPatchOperationParam",
+ "ApplyPatchToolCallItemParam",
+ "ApplyPatchToolCallOutputItemParam",
+ "ApplyPatchToolParam",
+ "ApplyPatchUpdateFileOperation",
+ "ApplyPatchUpdateFileOperationParam",
+ "ApproximateLocation",
+ "AutoCodeInterpreterToolParam",
+ "AzureAISearchTool",
+ "AzureAISearchToolCall",
+ "AzureAISearchToolCallOutput",
+ "AzureAISearchToolResource",
+ "AzureFunctionBinding",
+ "AzureFunctionDefinition",
+ "AzureFunctionDefinitionFunction",
+ "AzureFunctionStorageQueue",
+ "AzureFunctionTool",
+ "AzureFunctionToolCall",
+ "AzureFunctionToolCallOutput",
+ "BingCustomSearchConfiguration",
+ "BingCustomSearchPreviewTool",
+ "BingCustomSearchToolCall",
+ "BingCustomSearchToolCallOutput",
+ "BingCustomSearchToolParameters",
+ "BingGroundingSearchConfiguration",
+ "BingGroundingSearchToolParameters",
+ "BingGroundingTool",
+ "BingGroundingToolCall",
+ "BingGroundingToolCallOutput",
+ "BrowserAutomationPreviewTool",
+ "BrowserAutomationToolCall",
+ "BrowserAutomationToolCallOutput",
+ "BrowserAutomationToolConnectionParameters",
+ "BrowserAutomationToolParameters",
+ "CaptureStructuredOutputsTool",
+ "ChatSummaryMemoryItem",
+ "ClickParam",
+ "CodeInterpreterOutputImage",
+ "CodeInterpreterOutputLogs",
+ "CodeInterpreterTool",
+ "CompactResource",
+ "CompactionSummaryItemParam",
+ "ComparisonFilter",
+ "CompoundFilter",
+ "ComputerAction",
+ "ComputerCallOutputItemParam",
+ "ComputerCallSafetyCheckParam",
+ "ComputerScreenshotContent",
+ "ComputerScreenshotImage",
+ "ComputerUsePreviewTool",
+ "ContainerAutoParam",
+ "ContainerFileCitationBody",
+ "ContainerNetworkPolicyAllowlistParam",
+ "ContainerNetworkPolicyDisabledParam",
+ "ContainerNetworkPolicyDomainSecretParam",
+ "ContainerNetworkPolicyParam",
+ "ContainerReferenceResource",
+ "ContainerSkill",
+ "ContextManagementParam",
+ "ConversationParam_2",
+ "ConversationReference",
+ "CoordParam",
+ "CreateResponse",
+ "CreatedBy",
+ "CustomGrammarFormatParam",
+ "CustomTextFormatParam",
+ "CustomToolParam",
+ "CustomToolParamFormat",
+ "DeleteResponseResult",
+ "DoubleClickAction",
+ "DragParam",
+ "Error",
+ "FabricDataAgentToolCall",
+ "FabricDataAgentToolCallOutput",
+ "FabricDataAgentToolParameters",
+ "FileCitationBody",
+ "FilePath",
+ "FileSearchTool",
+ "FileSearchToolCallResults",
+ "FunctionAndCustomToolCallOutput",
+ "FunctionAndCustomToolCallOutputInputFileContent",
+ "FunctionAndCustomToolCallOutputInputImageContent",
+ "FunctionAndCustomToolCallOutputInputTextContent",
+ "FunctionCallOutputItemParam",
+ "FunctionShellAction",
+ "FunctionShellActionParam",
+ "FunctionShellCallEnvironment",
+ "FunctionShellCallItemParam",
+ "FunctionShellCallItemParamEnvironment",
+ "FunctionShellCallItemParamEnvironmentContainerReferenceParam",
+ "FunctionShellCallItemParamEnvironmentLocalEnvironmentParam",
+ "FunctionShellCallOutputContent",
+ "FunctionShellCallOutputContentParam",
+ "FunctionShellCallOutputExitOutcome",
+ "FunctionShellCallOutputExitOutcomeParam",
+ "FunctionShellCallOutputItemParam",
+ "FunctionShellCallOutputOutcome",
+ "FunctionShellCallOutputOutcomeParam",
+ "FunctionShellCallOutputTimeoutOutcome",
+ "FunctionShellCallOutputTimeoutOutcomeParam",
+ "FunctionShellToolParam",
+ "FunctionShellToolParamEnvironment",
+ "FunctionShellToolParamEnvironmentContainerReferenceParam",
+ "FunctionShellToolParamEnvironmentLocalEnvironmentParam",
+ "FunctionTool",
+ "HybridSearchOptions",
+ "ImageGenTool",
+ "ImageGenToolInputImageMask",
+ "InlineSkillParam",
+ "InlineSkillSourceParam",
+ "InputFileContent",
+ "InputFileContentParam",
+ "InputImageContent",
+ "InputImageContentParamAutoParam",
+ "InputTextContent",
+ "InputTextContentParam",
+ "Item",
+ "ItemCodeInterpreterToolCall",
+ "ItemComputerToolCall",
+ "ItemCustomToolCall",
+ "ItemCustomToolCallOutput",
+ "ItemField",
+ "ItemFieldApplyPatchToolCall",
+ "ItemFieldApplyPatchToolCallOutput",
+ "ItemFieldCodeInterpreterToolCall",
+ "ItemFieldCompactionBody",
+ "ItemFieldComputerToolCall",
+ "ItemFieldComputerToolCallOutput",
+ "ItemFieldCustomToolCall",
+ "ItemFieldCustomToolCallOutput",
+ "ItemFieldFileSearchToolCall",
+ "ItemFieldFunctionShellCall",
+ "ItemFieldFunctionShellCallOutput",
+ "ItemFieldFunctionToolCall",
+ "ItemFieldFunctionToolCallOutput",
+ "ItemFieldImageGenToolCall",
+ "ItemFieldLocalShellToolCall",
+ "ItemFieldLocalShellToolCallOutput",
+ "ItemFieldMcpApprovalRequest",
+ "ItemFieldMcpApprovalResponseResource",
+ "ItemFieldMcpListTools",
+ "ItemFieldMcpToolCall",
+ "ItemFieldMessage",
+ "ItemFieldReasoningItem",
+ "ItemFieldWebSearchToolCall",
+ "ItemFileSearchToolCall",
+ "ItemFunctionToolCall",
+ "ItemImageGenToolCall",
+ "ItemLocalShellToolCall",
+ "ItemLocalShellToolCallOutput",
+ "ItemMcpApprovalRequest",
+ "ItemMcpListTools",
+ "ItemMcpToolCall",
+ "ItemMessage",
+ "ItemOutputMessage",
+ "ItemReasoningItem",
+ "ItemReferenceParam",
+ "ItemWebSearchToolCall",
+ "KeyPressAction",
+ "LocalEnvironmentResource",
+ "LocalShellExecAction",
+ "LocalShellToolParam",
+ "LocalSkillParam",
+ "LogProb",
+ "MCPApprovalResponse",
+ "MCPListToolsTool",
+ "MCPListToolsToolAnnotations",
+ "MCPListToolsToolInputSchema",
+ "MCPTool",
+ "MCPToolFilter",
+ "MCPToolRequireApproval",
+ "MemoryItem",
+ "MemorySearchItem",
+ "MemorySearchOptions",
+ "MemorySearchPreviewTool",
+ "MemorySearchTool",
+ "MemorySearchToolCallItemParam",
+ "MemorySearchToolCallItemResource",
+ "MessageContent",
+ "MessageContentInputFileContent",
+ "MessageContentInputImageContent",
+ "MessageContentInputTextContent",
+ "MessageContentOutputTextContent",
+ "MessageContentReasoningTextContent",
+ "MessageContentRefusalContent",
+ "Metadata",
+ "MicrosoftFabricPreviewTool",
+ "MoveParam",
+ "OAuthConsentRequestOutputItem",
+ "OpenApiAnonymousAuthDetails",
+ "OpenApiAuthDetails",
+ "OpenApiFunctionDefinition",
+ "OpenApiFunctionDefinitionFunction",
+ "OpenApiManagedAuthDetails",
+ "OpenApiManagedSecurityScheme",
+ "OpenApiProjectConnectionAuthDetails",
+ "OpenApiProjectConnectionSecurityScheme",
+ "OpenApiTool",
+ "OpenApiToolCall",
+ "OpenApiToolCallOutput",
+ "OutputContent",
+ "OutputContentOutputTextContent",
+ "OutputContentReasoningTextContent",
+ "OutputContentRefusalContent",
+ "OutputItem",
+ "OutputItemApplyPatchToolCall",
+ "OutputItemApplyPatchToolCallOutput",
+ "OutputItemCodeInterpreterToolCall",
+ "OutputItemCompactionBody",
+ "OutputItemComputerToolCall",
+ "OutputItemComputerToolCallOutput",
+ "OutputItemCustomToolCall",
+ "OutputItemCustomToolCallOutput",
+ "OutputItemFileSearchToolCall",
+ "OutputItemFunctionShellCall",
+ "OutputItemFunctionShellCallOutput",
+ "OutputItemFunctionToolCall",
+ "OutputItemFunctionToolCallOutput",
+ "OutputItemImageGenToolCall",
+ "OutputItemLocalShellToolCall",
+ "OutputItemLocalShellToolCallOutput",
+ "OutputItemMcpApprovalRequest",
+ "OutputItemMcpApprovalResponseResource",
+ "OutputItemMcpListTools",
+ "OutputItemMcpToolCall",
+ "OutputItemMessage",
+ "OutputItemOutputMessage",
+ "OutputItemReasoningItem",
+ "OutputItemWebSearchToolCall",
+ "OutputMessageContent",
+ "OutputMessageContentOutputTextContent",
+ "OutputMessageContentRefusalContent",
+ "Prompt",
+ "RankingOptions",
+ "RealtimeMCPError",
+ "RealtimeMCPHTTPError",
+ "RealtimeMCPProtocolError",
+ "RealtimeMCPToolExecutionError",
+ "Reasoning",
+ "ReasoningTextContent",
+ "Response",
+ "ResponseAudioDeltaEvent",
+ "ResponseAudioDoneEvent",
+ "ResponseAudioTranscriptDeltaEvent",
+ "ResponseAudioTranscriptDoneEvent",
+ "ResponseCodeInterpreterCallCodeDeltaEvent",
+ "ResponseCodeInterpreterCallCodeDoneEvent",
+ "ResponseCodeInterpreterCallCompletedEvent",
+ "ResponseCodeInterpreterCallInProgressEvent",
+ "ResponseCodeInterpreterCallInterpretingEvent",
+ "ResponseCompletedEvent",
+ "ResponseContentPartAddedEvent",
+ "ResponseContentPartDoneEvent",
+ "ResponseCreatedEvent",
+ "ResponseCustomToolCallInputDeltaEvent",
+ "ResponseCustomToolCallInputDoneEvent",
+ "ResponseError",
+ "ResponseErrorEvent",
+ "ResponseFailedEvent",
+ "ResponseFileSearchCallCompletedEvent",
+ "ResponseFileSearchCallInProgressEvent",
+ "ResponseFileSearchCallSearchingEvent",
+ "ResponseFormatJsonSchemaSchema",
+ "ResponseFunctionCallArgumentsDeltaEvent",
+ "ResponseFunctionCallArgumentsDoneEvent",
+ "ResponseImageGenCallCompletedEvent",
+ "ResponseImageGenCallGeneratingEvent",
+ "ResponseImageGenCallInProgressEvent",
+ "ResponseImageGenCallPartialImageEvent",
+ "ResponseInProgressEvent",
+ "ResponseIncompleteDetails",
+ "ResponseIncompleteEvent",
+ "ResponseLogProb",
+ "ResponseLogProbTopLogprobs",
+ "ResponseMCPCallArgumentsDeltaEvent",
+ "ResponseMCPCallArgumentsDoneEvent",
+ "ResponseMCPCallCompletedEvent",
+ "ResponseMCPCallFailedEvent",
+ "ResponseMCPCallInProgressEvent",
+ "ResponseMCPListToolsCompletedEvent",
+ "ResponseMCPListToolsFailedEvent",
+ "ResponseMCPListToolsInProgressEvent",
+ "ResponseOutputItemAddedEvent",
+ "ResponseOutputItemDoneEvent",
+ "ResponseOutputTextAnnotationAddedEvent",
+ "ResponsePromptVariables",
+ "ResponseQueuedEvent",
+ "ResponseReasoningSummaryPartAddedEvent",
+ "ResponseReasoningSummaryPartAddedEventPart",
+ "ResponseReasoningSummaryPartDoneEvent",
+ "ResponseReasoningSummaryPartDoneEventPart",
+ "ResponseReasoningSummaryTextDeltaEvent",
+ "ResponseReasoningSummaryTextDoneEvent",
+ "ResponseReasoningTextDeltaEvent",
+ "ResponseReasoningTextDoneEvent",
+ "ResponseRefusalDeltaEvent",
+ "ResponseRefusalDoneEvent",
+ "ResponseStreamEvent",
+ "ResponseStreamOptions",
+ "ResponseTextDeltaEvent",
+ "ResponseTextDoneEvent",
+ "ResponseTextParam",
+ "ResponseUsage",
+ "ResponseUsageInputTokensDetails",
+ "ResponseUsageOutputTokensDetails",
+ "ResponseWebSearchCallCompletedEvent",
+ "ResponseWebSearchCallInProgressEvent",
+ "ResponseWebSearchCallSearchingEvent",
+ "ScreenshotParam",
+ "ScrollParam",
+ "SharepointGroundingToolCall",
+ "SharepointGroundingToolCallOutput",
+ "SharepointGroundingToolParameters",
+ "SharepointPreviewTool",
+ "SkillReferenceParam",
+ "SpecificApplyPatchParam",
+ "SpecificFunctionShellParam",
+ "StructuredOutputDefinition",
+ "StructuredOutputsOutputItem",
+ "SummaryTextContent",
+ "TextContent",
+ "TextResponseFormatConfiguration",
+ "TextResponseFormatConfigurationResponseFormatJsonObject",
+ "TextResponseFormatConfigurationResponseFormatText",
+ "TextResponseFormatJsonSchema",
+ "Tool",
+ "ToolChoiceAllowed",
+ "ToolChoiceCodeInterpreter",
+ "ToolChoiceComputerUsePreview",
+ "ToolChoiceCustom",
+ "ToolChoiceFileSearch",
+ "ToolChoiceFunction",
+ "ToolChoiceImageGeneration",
+ "ToolChoiceMCP",
+ "ToolChoiceParam",
+ "ToolChoiceWebSearchPreview",
+ "ToolChoiceWebSearchPreview20250311",
+ "ToolProjectConnection",
+ "TopLogProb",
+ "TypeParam",
+ "UrlCitationBody",
+ "UserProfileMemoryItem",
+ "VectorStoreFileAttributes",
+ "WaitParam",
+ "WebSearchActionFind",
+ "WebSearchActionOpenPage",
+ "WebSearchActionSearch",
+ "WebSearchActionSearchSources",
+ "WebSearchApproximateLocation",
+ "WebSearchConfiguration",
+ "WebSearchPreviewTool",
+ "WebSearchTool",
+ "WebSearchToolFilters",
+ "WorkflowActionOutputItem",
+ "AnnotationType",
+ "ApplyPatchCallOutputStatus",
+ "ApplyPatchCallOutputStatusParam",
+ "ApplyPatchCallStatus",
+ "ApplyPatchCallStatusParam",
+ "ApplyPatchFileOperationType",
+ "ApplyPatchOperationParamType",
+ "AzureAISearchQueryType",
+ "ClickButtonType",
+ "ComputerActionType",
+ "ComputerEnvironment",
+ "ContainerMemoryLimit",
+ "ContainerNetworkPolicyParamType",
+ "ContainerSkillType",
+ "CustomToolParamFormatType",
+ "DetailEnum",
+ "FunctionAndCustomToolCallOutputType",
+ "FunctionCallItemStatus",
+ "FunctionShellCallEnvironmentType",
+ "FunctionShellCallItemParamEnvironmentType",
+ "FunctionShellCallItemStatus",
+ "FunctionShellCallOutputOutcomeParamType",
+ "FunctionShellCallOutputOutcomeType",
+ "FunctionShellToolParamEnvironmentType",
+ "GrammarSyntax1",
+ "ImageDetail",
+ "ImageGenActionEnum",
+ "IncludeEnum",
+ "InputFidelity",
+ "ItemFieldType",
+ "ItemType",
+ "LocalShellCallOutputStatusEnum",
+ "LocalShellCallStatus",
+ "MCPToolCallStatus",
+ "MemoryItemKind",
+ "MessageContentType",
+ "MessagePhase",
+ "MessageRole",
+ "MessageStatus",
+ "ModelIdsCompaction",
+ "OpenApiAuthType",
+ "OutputContentType",
+ "OutputItemType",
+ "OutputMessageContentType",
+ "PageOrder",
+ "RankerVersionType",
+ "RealtimeMcpErrorType",
+ "ResponseErrorCode",
+ "ResponseStreamEventType",
+ "SearchContextSize",
+ "TextResponseFormatConfigurationType",
+ "ToolCallStatus",
+ "ToolChoiceOptions",
+ "ToolChoiceParamType",
+ "ToolType",
+]
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
+_patch_sdk()
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/_enums.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/_enums.py
new file mode 100644
index 000000000000..0d91e8de8442
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/_enums.py
@@ -0,0 +1,1253 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum
+from azure.core import CaseInsensitiveEnumMeta
+
+
+class AnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of AnnotationType."""
+
+ FILE_CITATION = "file_citation"
+ """FILE_CITATION."""
+ URL_CITATION = "url_citation"
+ """URL_CITATION."""
+ CONTAINER_FILE_CITATION = "container_file_citation"
+ """CONTAINER_FILE_CITATION."""
+ FILE_PATH = "file_path"
+ """FILE_PATH."""
+
+
+class ApplyPatchCallOutputStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ApplyPatchCallOutputStatus."""
+
+ COMPLETED = "completed"
+ """COMPLETED."""
+ FAILED = "failed"
+ """FAILED."""
+
+
+class ApplyPatchCallOutputStatusParam(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Apply patch call output status."""
+
+ COMPLETED = "completed"
+ """COMPLETED."""
+ FAILED = "failed"
+ """FAILED."""
+
+
+class ApplyPatchCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ApplyPatchCallStatus."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+
+
+class ApplyPatchCallStatusParam(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Apply patch call status."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+
+
+class ApplyPatchFileOperationType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ApplyPatchFileOperationType."""
+
+ CREATE_FILE = "create_file"
+ """CREATE_FILE."""
+ DELETE_FILE = "delete_file"
+ """DELETE_FILE."""
+ UPDATE_FILE = "update_file"
+ """UPDATE_FILE."""
+
+
+class ApplyPatchOperationParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ApplyPatchOperationParamType."""
+
+ CREATE_FILE = "create_file"
+ """CREATE_FILE."""
+ DELETE_FILE = "delete_file"
+ """DELETE_FILE."""
+ UPDATE_FILE = "update_file"
+ """UPDATE_FILE."""
+
+
+class AzureAISearchQueryType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Available query types for Azure AI Search tool."""
+
+ SIMPLE = "simple"
+ """Query type ``simple``."""
+ SEMANTIC = "semantic"
+ """Query type ``semantic``."""
+ VECTOR = "vector"
+ """Query type ``vector``."""
+ VECTOR_SIMPLE_HYBRID = "vector_simple_hybrid"
+ """Query type ``vector_simple_hybrid``."""
+ VECTOR_SEMANTIC_HYBRID = "vector_semantic_hybrid"
+ """Query type ``vector_semantic_hybrid``."""
+
+
+class ClickButtonType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ClickButtonType."""
+
+ LEFT = "left"
+ """LEFT."""
+ RIGHT = "right"
+ """RIGHT."""
+ WHEEL = "wheel"
+ """WHEEL."""
+ BACK = "back"
+ """BACK."""
+ FORWARD = "forward"
+ """FORWARD."""
+
+
+class ComputerActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ComputerActionType."""
+
+ CLICK = "click"
+ """CLICK."""
+ DOUBLE_CLICK = "double_click"
+ """DOUBLE_CLICK."""
+ DRAG = "drag"
+ """DRAG."""
+ KEYPRESS = "keypress"
+ """KEYPRESS."""
+ MOVE = "move"
+ """MOVE."""
+ SCREENSHOT = "screenshot"
+ """SCREENSHOT."""
+ SCROLL = "scroll"
+ """SCROLL."""
+ TYPE = "type"
+ """TYPE."""
+ WAIT = "wait"
+ """WAIT."""
+
+
+class ComputerEnvironment(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ComputerEnvironment."""
+
+ WINDOWS = "windows"
+ """WINDOWS."""
+ MAC = "mac"
+ """MAC."""
+ LINUX = "linux"
+ """LINUX."""
+ UBUNTU = "ubuntu"
+ """UBUNTU."""
+ BROWSER = "browser"
+ """BROWSER."""
+
+
+class ContainerMemoryLimit(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ContainerMemoryLimit."""
+
+ ENUM_1_G = "1g"
+ """1_G."""
+ ENUM_4_G = "4g"
+ """4_G."""
+ ENUM_16_G = "16g"
+ """16_G."""
+ ENUM_64_G = "64g"
+ """64_G."""
+
+
+class ContainerNetworkPolicyParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ContainerNetworkPolicyParamType."""
+
+ DISABLED = "disabled"
+ """DISABLED."""
+ ALLOWLIST = "allowlist"
+ """ALLOWLIST."""
+
+
+class ContainerSkillType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ContainerSkillType."""
+
+ SKILL_REFERENCE = "skill_reference"
+ """SKILL_REFERENCE."""
+ INLINE = "inline"
+ """INLINE."""
+
+
+class CustomToolParamFormatType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of CustomToolParamFormatType."""
+
+ TEXT = "text"
+ """TEXT."""
+ GRAMMAR = "grammar"
+ """GRAMMAR."""
+
+
+class DetailEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of DetailEnum."""
+
+ LOW = "low"
+ """LOW."""
+ HIGH = "high"
+ """HIGH."""
+ AUTO = "auto"
+ """AUTO."""
+
+
+class FunctionAndCustomToolCallOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionAndCustomToolCallOutputType."""
+
+ INPUT_TEXT = "input_text"
+ """INPUT_TEXT."""
+ INPUT_IMAGE = "input_image"
+ """INPUT_IMAGE."""
+ INPUT_FILE = "input_file"
+ """INPUT_FILE."""
+
+
+class FunctionCallItemStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionCallItemStatus."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+
+
+class FunctionShellCallEnvironmentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionShellCallEnvironmentType."""
+
+ LOCAL = "local"
+ """LOCAL."""
+ CONTAINER_REFERENCE = "container_reference"
+ """CONTAINER_REFERENCE."""
+
+
+class FunctionShellCallItemParamEnvironmentType( # pylint: disable=name-too-long
+ str, Enum, metaclass=CaseInsensitiveEnumMeta
+):
+ """Type of FunctionShellCallItemParamEnvironmentType."""
+
+ LOCAL = "local"
+ """LOCAL."""
+ CONTAINER_REFERENCE = "container_reference"
+ """CONTAINER_REFERENCE."""
+
+
+class FunctionShellCallItemStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Shell call status."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+
+
+class FunctionShellCallOutputOutcomeParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionShellCallOutputOutcomeParamType."""
+
+ TIMEOUT = "timeout"
+ """TIMEOUT."""
+ EXIT = "exit"
+ """EXIT."""
+
+
+class FunctionShellCallOutputOutcomeType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionShellCallOutputOutcomeType."""
+
+ TIMEOUT = "timeout"
+ """TIMEOUT."""
+ EXIT = "exit"
+ """EXIT."""
+
+
+class FunctionShellToolParamEnvironmentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of FunctionShellToolParamEnvironmentType."""
+
+ CONTAINER_AUTO = "container_auto"
+ """CONTAINER_AUTO."""
+ LOCAL = "local"
+ """LOCAL."""
+ CONTAINER_REFERENCE = "container_reference"
+ """CONTAINER_REFERENCE."""
+
+
+class GrammarSyntax1(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of GrammarSyntax1."""
+
+ LARK = "lark"
+ """LARK."""
+ REGEX = "regex"
+ """REGEX."""
+
+
+class ImageDetail(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ImageDetail."""
+
+ LOW = "low"
+ """LOW."""
+ HIGH = "high"
+ """HIGH."""
+ AUTO = "auto"
+ """AUTO."""
+
+
+class ImageGenActionEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ImageGenActionEnum."""
+
+ GENERATE = "generate"
+ """GENERATE."""
+ EDIT = "edit"
+ """EDIT."""
+ AUTO = "auto"
+ """AUTO."""
+
+
+class IncludeEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Specify additional output data to include in the model response. Currently supported values
+ are:
+
+ * `web_search_call.action.sources`: Include the sources of the web search tool call.
+ * `code_interpreter_call.outputs`: Includes the outputs of python code execution in code
+ interpreter tool call items.
+ * `computer_call_output.output.image_url`: Include image urls from the computer call output.
+ * `file_search_call.results`: Include the search results of the file search tool call.
+ * `message.input_image.image_url`: Include image urls from the input message.
+ * `message.output_text.logprobs`: Include logprobs with assistant messages.
+ * `reasoning.encrypted_content`: Includes an encrypted version of reasoning tokens in reasoning
+ item outputs. This enables reasoning items to be used in multi-turn conversations when using
+ the Responses API statelessly (like when the `store` parameter is set to `false`, or when an
+ organization is enrolled in the zero data retention program).
+ """
+
+ FILE_SEARCH_CALL_RESULTS = "file_search_call.results"
+ """FILE_SEARCH_CALL_RESULTS."""
+ WEB_SEARCH_CALL_RESULTS = "web_search_call.results"
+ """WEB_SEARCH_CALL_RESULTS."""
+ WEB_SEARCH_CALL_ACTION_SOURCES = "web_search_call.action.sources"
+ """WEB_SEARCH_CALL_ACTION_SOURCES."""
+ MESSAGE_INPUT_IMAGE_IMAGE_URL = "message.input_image.image_url"
+ """MESSAGE_INPUT_IMAGE_IMAGE_URL."""
+ COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = "computer_call_output.output.image_url"
+ """COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL."""
+ CODE_INTERPRETER_CALL_OUTPUTS = "code_interpreter_call.outputs"
+ """CODE_INTERPRETER_CALL_OUTPUTS."""
+ REASONING_ENCRYPTED_CONTENT = "reasoning.encrypted_content"
+ """REASONING_ENCRYPTED_CONTENT."""
+ MESSAGE_OUTPUT_TEXT_LOGPROBS = "message.output_text.logprobs"
+ """MESSAGE_OUTPUT_TEXT_LOGPROBS."""
+ MEMORY_SEARCH_CALL_RESULTS = "memory_search_call.results"
+ """MEMORY_SEARCH_CALL_RESULTS."""
+
+
+class InputFidelity(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Control how much effort the model will exert to match the style and features, especially facial
+ features, of input images. This parameter is only supported for ``gpt-image-1`` and
+ ``gpt-image-1.5`` and later models, unsupported for ``gpt-image-1-mini``. Supports ``high`` and
+ ``low``. Defaults to ``low``.
+ """
+
+ HIGH = "high"
+ """HIGH."""
+ LOW = "low"
+ """LOW."""
+
+
+class ItemFieldType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ItemFieldType."""
+
+ MESSAGE = "message"
+ """MESSAGE."""
+ FUNCTION_CALL = "function_call"
+ """FUNCTION_CALL."""
+ FUNCTION_CALL_OUTPUT = "function_call_output"
+ """FUNCTION_CALL_OUTPUT."""
+ FILE_SEARCH_CALL = "file_search_call"
+ """FILE_SEARCH_CALL."""
+ WEB_SEARCH_CALL = "web_search_call"
+ """WEB_SEARCH_CALL."""
+ IMAGE_GENERATION_CALL = "image_generation_call"
+ """IMAGE_GENERATION_CALL."""
+ COMPUTER_CALL = "computer_call"
+ """COMPUTER_CALL."""
+ COMPUTER_CALL_OUTPUT = "computer_call_output"
+ """COMPUTER_CALL_OUTPUT."""
+ REASONING = "reasoning"
+ """REASONING."""
+ COMPACTION = "compaction"
+ """COMPACTION."""
+ CODE_INTERPRETER_CALL = "code_interpreter_call"
+ """CODE_INTERPRETER_CALL."""
+ LOCAL_SHELL_CALL = "local_shell_call"
+ """LOCAL_SHELL_CALL."""
+ LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output"
+ """LOCAL_SHELL_CALL_OUTPUT."""
+ SHELL_CALL = "shell_call"
+ """SHELL_CALL."""
+ SHELL_CALL_OUTPUT = "shell_call_output"
+ """SHELL_CALL_OUTPUT."""
+ APPLY_PATCH_CALL = "apply_patch_call"
+ """APPLY_PATCH_CALL."""
+ APPLY_PATCH_CALL_OUTPUT = "apply_patch_call_output"
+ """APPLY_PATCH_CALL_OUTPUT."""
+ MCP_LIST_TOOLS = "mcp_list_tools"
+ """MCP_LIST_TOOLS."""
+ MCP_APPROVAL_REQUEST = "mcp_approval_request"
+ """MCP_APPROVAL_REQUEST."""
+ MCP_APPROVAL_RESPONSE = "mcp_approval_response"
+ """MCP_APPROVAL_RESPONSE."""
+ MCP_CALL = "mcp_call"
+ """MCP_CALL."""
+ CUSTOM_TOOL_CALL = "custom_tool_call"
+ """CUSTOM_TOOL_CALL."""
+ CUSTOM_TOOL_CALL_OUTPUT = "custom_tool_call_output"
+ """CUSTOM_TOOL_CALL_OUTPUT."""
+
+
+class ItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ItemType."""
+
+ MESSAGE = "message"
+ """MESSAGE."""
+ OUTPUT_MESSAGE = "output_message"
+ """OUTPUT_MESSAGE."""
+ FILE_SEARCH_CALL = "file_search_call"
+ """FILE_SEARCH_CALL."""
+ COMPUTER_CALL = "computer_call"
+ """COMPUTER_CALL."""
+ COMPUTER_CALL_OUTPUT = "computer_call_output"
+ """COMPUTER_CALL_OUTPUT."""
+ WEB_SEARCH_CALL = "web_search_call"
+ """WEB_SEARCH_CALL."""
+ FUNCTION_CALL = "function_call"
+ """FUNCTION_CALL."""
+ FUNCTION_CALL_OUTPUT = "function_call_output"
+ """FUNCTION_CALL_OUTPUT."""
+ REASONING = "reasoning"
+ """REASONING."""
+ COMPACTION = "compaction"
+ """COMPACTION."""
+ IMAGE_GENERATION_CALL = "image_generation_call"
+ """IMAGE_GENERATION_CALL."""
+ CODE_INTERPRETER_CALL = "code_interpreter_call"
+ """CODE_INTERPRETER_CALL."""
+ LOCAL_SHELL_CALL = "local_shell_call"
+ """LOCAL_SHELL_CALL."""
+ LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output"
+ """LOCAL_SHELL_CALL_OUTPUT."""
+ SHELL_CALL = "shell_call"
+ """SHELL_CALL."""
+ SHELL_CALL_OUTPUT = "shell_call_output"
+ """SHELL_CALL_OUTPUT."""
+ APPLY_PATCH_CALL = "apply_patch_call"
+ """APPLY_PATCH_CALL."""
+ APPLY_PATCH_CALL_OUTPUT = "apply_patch_call_output"
+ """APPLY_PATCH_CALL_OUTPUT."""
+ MCP_LIST_TOOLS = "mcp_list_tools"
+ """MCP_LIST_TOOLS."""
+ MCP_APPROVAL_REQUEST = "mcp_approval_request"
+ """MCP_APPROVAL_REQUEST."""
+ MCP_APPROVAL_RESPONSE = "mcp_approval_response"
+ """MCP_APPROVAL_RESPONSE."""
+ MCP_CALL = "mcp_call"
+ """MCP_CALL."""
+ CUSTOM_TOOL_CALL_OUTPUT = "custom_tool_call_output"
+ """CUSTOM_TOOL_CALL_OUTPUT."""
+ CUSTOM_TOOL_CALL = "custom_tool_call"
+ """CUSTOM_TOOL_CALL."""
+ ITEM_REFERENCE = "item_reference"
+ """ITEM_REFERENCE."""
+ STRUCTURED_OUTPUTS = "structured_outputs"
+ """STRUCTURED_OUTPUTS."""
+ OAUTH_CONSENT_REQUEST = "oauth_consent_request"
+ """OAUTH_CONSENT_REQUEST."""
+ MEMORY_SEARCH_CALL = "memory_search_call"
+ """MEMORY_SEARCH_CALL."""
+ WORKFLOW_ACTION = "workflow_action"
+ """WORKFLOW_ACTION."""
+ A2_A_PREVIEW_CALL = "a2a_preview_call"
+ """A2_A_PREVIEW_CALL."""
+ A2_A_PREVIEW_CALL_OUTPUT = "a2a_preview_call_output"
+ """A2_A_PREVIEW_CALL_OUTPUT."""
+ BING_GROUNDING_CALL = "bing_grounding_call"
+ """BING_GROUNDING_CALL."""
+ BING_GROUNDING_CALL_OUTPUT = "bing_grounding_call_output"
+ """BING_GROUNDING_CALL_OUTPUT."""
+ SHAREPOINT_GROUNDING_PREVIEW_CALL = "sharepoint_grounding_preview_call"
+ """SHAREPOINT_GROUNDING_PREVIEW_CALL."""
+ SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT = "sharepoint_grounding_preview_call_output"
+ """SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT."""
+ AZURE_AI_SEARCH_CALL = "azure_ai_search_call"
+ """AZURE_AI_SEARCH_CALL."""
+ AZURE_AI_SEARCH_CALL_OUTPUT = "azure_ai_search_call_output"
+ """AZURE_AI_SEARCH_CALL_OUTPUT."""
+ BING_CUSTOM_SEARCH_PREVIEW_CALL = "bing_custom_search_preview_call"
+ """BING_CUSTOM_SEARCH_PREVIEW_CALL."""
+ BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT = "bing_custom_search_preview_call_output"
+ """BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT."""
+ OPENAPI_CALL = "openapi_call"
+ """OPENAPI_CALL."""
+ OPENAPI_CALL_OUTPUT = "openapi_call_output"
+ """OPENAPI_CALL_OUTPUT."""
+ BROWSER_AUTOMATION_PREVIEW_CALL = "browser_automation_preview_call"
+ """BROWSER_AUTOMATION_PREVIEW_CALL."""
+ BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT = "browser_automation_preview_call_output"
+ """BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT."""
+ FABRIC_DATAAGENT_PREVIEW_CALL = "fabric_dataagent_preview_call"
+ """FABRIC_DATAAGENT_PREVIEW_CALL."""
+ FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT = "fabric_dataagent_preview_call_output"
+ """FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT."""
+ AZURE_FUNCTION_CALL = "azure_function_call"
+ """AZURE_FUNCTION_CALL."""
+ AZURE_FUNCTION_CALL_OUTPUT = "azure_function_call_output"
+ """AZURE_FUNCTION_CALL_OUTPUT."""
+
+
+class LocalShellCallOutputStatusEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of LocalShellCallOutputStatusEnum."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+
+
+class LocalShellCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of LocalShellCallStatus."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+
+
+class MCPToolCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of MCPToolCallStatus."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+ CALLING = "calling"
+ """CALLING."""
+ FAILED = "failed"
+ """FAILED."""
+
+
+class MemoryItemKind(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Memory item kind."""
+
+ USER_PROFILE = "user_profile"
+ """User profile information extracted from conversations."""
+ CHAT_SUMMARY = "chat_summary"
+ """Summary of chat conversations."""
+
+
+class MessageContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of MessageContentType."""
+
+ INPUT_TEXT = "input_text"
+ """INPUT_TEXT."""
+ OUTPUT_TEXT = "output_text"
+ """OUTPUT_TEXT."""
+ TEXT = "text"
+ """TEXT."""
+ SUMMARY_TEXT = "summary_text"
+ """SUMMARY_TEXT."""
+ REASONING_TEXT = "reasoning_text"
+ """REASONING_TEXT."""
+ REFUSAL = "refusal"
+ """REFUSAL."""
+ INPUT_IMAGE = "input_image"
+ """INPUT_IMAGE."""
+ COMPUTER_SCREENSHOT = "computer_screenshot"
+ """COMPUTER_SCREENSHOT."""
+ INPUT_FILE = "input_file"
+ """INPUT_FILE."""
+
+
+class MessagePhase(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Labels an ``assistant`` message as intermediate commentary (``commentary``) or the final answer
+ (``final_answer``). For models like ``gpt-5.3-codex`` and beyond, when sending follow-up
+ requests, preserve and resend phase on all assistant messages — dropping it can degrade
+ performance. Not used for user messages. Use ``commentary`` for an intermediate assistant
+ message and ``final_answer`` for the final assistant message. For follow-up requests with
+ models like ``gpt-5.3-codex`` and later, preserve and resend phase on all assistant messages.
+ Omitting it can degrade performance. Not used for user messages.
+ """
+
+ COMMENTARY = "commentary"
+ """COMMENTARY."""
+ FINAL_ANSWER = "final_answer"
+ """FINAL_ANSWER."""
+
+
+class MessageRole(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of MessageRole."""
+
+ UNKNOWN = "unknown"
+ """UNKNOWN."""
+ USER = "user"
+ """USER."""
+ ASSISTANT = "assistant"
+ """ASSISTANT."""
+ SYSTEM = "system"
+ """SYSTEM."""
+ CRITIC = "critic"
+ """CRITIC."""
+ DISCRIMINATOR = "discriminator"
+ """DISCRIMINATOR."""
+ DEVELOPER = "developer"
+ """DEVELOPER."""
+ TOOL = "tool"
+ """TOOL."""
+
+
+class MessageStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of MessageStatus."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+
+
+class ModelIdsCompaction(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Model ID used to generate the response, like ``gpt-5`` or ``o3``. OpenAI offers a wide range of
+ models with different capabilities, performance characteristics, and price points. Refer to the
+ `model guide `_ to browse and compare available models.
+ """
+
+ GPT5_2 = "gpt-5.2"
+ """GPT5_2."""
+ GPT5_2_2025_12_11 = "gpt-5.2-2025-12-11"
+ """GPT5_2_2025_12_11."""
+ GPT5_2_CHAT_LATEST = "gpt-5.2-chat-latest"
+ """GPT5_2_CHAT_LATEST."""
+ GPT5_2_PRO = "gpt-5.2-pro"
+ """GPT5_2_PRO."""
+ GPT5_2_PRO2025_12_11 = "gpt-5.2-pro-2025-12-11"
+ """GPT5_2_PRO2025_12_11."""
+ GPT5_1 = "gpt-5.1"
+ """GPT5_1."""
+ GPT5_1_2025_11_13 = "gpt-5.1-2025-11-13"
+ """GPT5_1_2025_11_13."""
+ GPT5_1_CODEX = "gpt-5.1-codex"
+ """GPT5_1_CODEX."""
+ GPT5_1_MINI = "gpt-5.1-mini"
+ """GPT5_1_MINI."""
+ GPT5_1_CHAT_LATEST = "gpt-5.1-chat-latest"
+ """GPT5_1_CHAT_LATEST."""
+ GPT5 = "gpt-5"
+ """GPT5."""
+ GPT5_MINI = "gpt-5-mini"
+ """GPT5_MINI."""
+ GPT5_NANO = "gpt-5-nano"
+ """GPT5_NANO."""
+ GPT5_2025_08_07 = "gpt-5-2025-08-07"
+ """GPT5_2025_08_07."""
+ GPT5_MINI2025_08_07 = "gpt-5-mini-2025-08-07"
+ """GPT5_MINI2025_08_07."""
+ GPT5_NANO2025_08_07 = "gpt-5-nano-2025-08-07"
+ """GPT5_NANO2025_08_07."""
+ GPT5_CHAT_LATEST = "gpt-5-chat-latest"
+ """GPT5_CHAT_LATEST."""
+ GPT4_1 = "gpt-4.1"
+ """GPT4_1."""
+ GPT4_1_MINI = "gpt-4.1-mini"
+ """GPT4_1_MINI."""
+ GPT4_1_NANO = "gpt-4.1-nano"
+ """GPT4_1_NANO."""
+ GPT4_1_2025_04_14 = "gpt-4.1-2025-04-14"
+ """GPT4_1_2025_04_14."""
+ GPT4_1_MINI2025_04_14 = "gpt-4.1-mini-2025-04-14"
+ """GPT4_1_MINI2025_04_14."""
+ GPT4_1_NANO2025_04_14 = "gpt-4.1-nano-2025-04-14"
+ """GPT4_1_NANO2025_04_14."""
+ O4_MINI = "o4-mini"
+ """O4_MINI."""
+ O4_MINI2025_04_16 = "o4-mini-2025-04-16"
+ """O4_MINI2025_04_16."""
+ O3 = "o3"
+ """O3."""
+ O3_2025_04_16 = "o3-2025-04-16"
+ """O3_2025_04_16."""
+ O3_MINI = "o3-mini"
+ """O3_MINI."""
+ O3_MINI2025_01_31 = "o3-mini-2025-01-31"
+ """O3_MINI2025_01_31."""
+ O1 = "o1"
+ """O1."""
+ O1_2024_12_17 = "o1-2024-12-17"
+ """O1_2024_12_17."""
+ O1_PREVIEW = "o1-preview"
+ """O1_PREVIEW."""
+ O1_PREVIEW2024_09_12 = "o1-preview-2024-09-12"
+ """O1_PREVIEW2024_09_12."""
+ O1_MINI = "o1-mini"
+ """O1_MINI."""
+ O1_MINI2024_09_12 = "o1-mini-2024-09-12"
+ """O1_MINI2024_09_12."""
+ GPT4_O = "gpt-4o"
+ """GPT4_O."""
+ GPT4_O2024_11_20 = "gpt-4o-2024-11-20"
+ """GPT4_O2024_11_20."""
+ GPT4_O2024_08_06 = "gpt-4o-2024-08-06"
+ """GPT4_O2024_08_06."""
+ GPT4_O2024_05_13 = "gpt-4o-2024-05-13"
+ """GPT4_O2024_05_13."""
+ GPT4_O_AUDIO_PREVIEW = "gpt-4o-audio-preview"
+ """GPT4_O_AUDIO_PREVIEW."""
+ GPT4_O_AUDIO_PREVIEW2024_10_01 = "gpt-4o-audio-preview-2024-10-01"
+ """GPT4_O_AUDIO_PREVIEW2024_10_01."""
+ GPT4_O_AUDIO_PREVIEW2024_12_17 = "gpt-4o-audio-preview-2024-12-17"
+ """GPT4_O_AUDIO_PREVIEW2024_12_17."""
+ GPT4_O_AUDIO_PREVIEW2025_06_03 = "gpt-4o-audio-preview-2025-06-03"
+ """GPT4_O_AUDIO_PREVIEW2025_06_03."""
+ GPT4_O_MINI_AUDIO_PREVIEW = "gpt-4o-mini-audio-preview"
+ """GPT4_O_MINI_AUDIO_PREVIEW."""
+ GPT4_O_MINI_AUDIO_PREVIEW2024_12_17 = "gpt-4o-mini-audio-preview-2024-12-17"
+ """GPT4_O_MINI_AUDIO_PREVIEW2024_12_17."""
+ GPT4_O_SEARCH_PREVIEW = "gpt-4o-search-preview"
+ """GPT4_O_SEARCH_PREVIEW."""
+ GPT4_O_MINI_SEARCH_PREVIEW = "gpt-4o-mini-search-preview"
+ """GPT4_O_MINI_SEARCH_PREVIEW."""
+ GPT4_O_SEARCH_PREVIEW2025_03_11 = "gpt-4o-search-preview-2025-03-11"
+ """GPT4_O_SEARCH_PREVIEW2025_03_11."""
+ GPT4_O_MINI_SEARCH_PREVIEW2025_03_11 = "gpt-4o-mini-search-preview-2025-03-11"
+ """GPT4_O_MINI_SEARCH_PREVIEW2025_03_11."""
+ CHATGPT4_O_LATEST = "chatgpt-4o-latest"
+ """CHATGPT4_O_LATEST."""
+ CODEX_MINI_LATEST = "codex-mini-latest"
+ """CODEX_MINI_LATEST."""
+ GPT4_O_MINI = "gpt-4o-mini"
+ """GPT4_O_MINI."""
+ GPT4_O_MINI2024_07_18 = "gpt-4o-mini-2024-07-18"
+ """GPT4_O_MINI2024_07_18."""
+ GPT4_TURBO = "gpt-4-turbo"
+ """GPT4_TURBO."""
+ GPT4_TURBO2024_04_09 = "gpt-4-turbo-2024-04-09"
+ """GPT4_TURBO2024_04_09."""
+ GPT4_0125_PREVIEW = "gpt-4-0125-preview"
+ """GPT4_0125_PREVIEW."""
+ GPT4_TURBO_PREVIEW = "gpt-4-turbo-preview"
+ """GPT4_TURBO_PREVIEW."""
+ GPT4_1106_PREVIEW = "gpt-4-1106-preview"
+ """GPT4_1106_PREVIEW."""
+ GPT4_VISION_PREVIEW = "gpt-4-vision-preview"
+ """GPT4_VISION_PREVIEW."""
+ GPT4 = "gpt-4"
+ """GPT4."""
+ GPT4_0314 = "gpt-4-0314"
+ """GPT4_0314."""
+ GPT4_0613 = "gpt-4-0613"
+ """GPT4_0613."""
+ GPT4_32_K = "gpt-4-32k"
+ """GPT4_32_K."""
+ GPT4_32_K0314 = "gpt-4-32k-0314"
+ """GPT4_32_K0314."""
+ GPT4_32_K0613 = "gpt-4-32k-0613"
+ """GPT4_32_K0613."""
+ GPT3_5_TURBO = "gpt-3.5-turbo"
+ """GPT3_5_TURBO."""
+ GPT3_5_TURBO16_K = "gpt-3.5-turbo-16k"
+ """GPT3_5_TURBO16_K."""
+ GPT3_5_TURBO0301 = "gpt-3.5-turbo-0301"
+ """GPT3_5_TURBO0301."""
+ GPT3_5_TURBO0613 = "gpt-3.5-turbo-0613"
+ """GPT3_5_TURBO0613."""
+ GPT3_5_TURBO1106 = "gpt-3.5-turbo-1106"
+ """GPT3_5_TURBO1106."""
+ GPT3_5_TURBO0125 = "gpt-3.5-turbo-0125"
+ """GPT3_5_TURBO0125."""
+ GPT3_5_TURBO16_K0613 = "gpt-3.5-turbo-16k-0613"
+ """GPT3_5_TURBO16_K0613."""
+ O1_PRO = "o1-pro"
+ """O1_PRO."""
+ O1_PRO2025_03_19 = "o1-pro-2025-03-19"
+ """O1_PRO2025_03_19."""
+ O3_PRO = "o3-pro"
+ """O3_PRO."""
+ O3_PRO2025_06_10 = "o3-pro-2025-06-10"
+ """O3_PRO2025_06_10."""
+ O3_DEEP_RESEARCH = "o3-deep-research"
+ """O3_DEEP_RESEARCH."""
+ O3_DEEP_RESEARCH2025_06_26 = "o3-deep-research-2025-06-26"
+ """O3_DEEP_RESEARCH2025_06_26."""
+ O4_MINI_DEEP_RESEARCH = "o4-mini-deep-research"
+ """O4_MINI_DEEP_RESEARCH."""
+ O4_MINI_DEEP_RESEARCH2025_06_26 = "o4-mini-deep-research-2025-06-26"
+ """O4_MINI_DEEP_RESEARCH2025_06_26."""
+ COMPUTER_USE_PREVIEW = "computer-use-preview"
+ """COMPUTER_USE_PREVIEW."""
+ COMPUTER_USE_PREVIEW2025_03_11 = "computer-use-preview-2025-03-11"
+ """COMPUTER_USE_PREVIEW2025_03_11."""
+ GPT5_CODEX = "gpt-5-codex"
+ """GPT5_CODEX."""
+ GPT5_PRO = "gpt-5-pro"
+ """GPT5_PRO."""
+ GPT5_PRO2025_10_06 = "gpt-5-pro-2025-10-06"
+ """GPT5_PRO2025_10_06."""
+ GPT5_1_CODEX_MAX = "gpt-5.1-codex-max"
+ """GPT5_1_CODEX_MAX."""
+
+
+class OpenApiAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Authentication type for OpenApi endpoint. Allowed types are:
+
+ * Anonymous (no authentication required)
+ * Project Connection (requires project_connection_id to endpoint, as setup in AI Foundry)
+ * Managed_Identity (requires audience for identity based auth).
+ """
+
+ ANONYMOUS = "anonymous"
+ """ANONYMOUS."""
+ PROJECT_CONNECTION = "project_connection"
+ """PROJECT_CONNECTION."""
+ MANAGED_IDENTITY = "managed_identity"
+ """MANAGED_IDENTITY."""
+
+
+class OutputContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of OutputContentType."""
+
+ OUTPUT_TEXT = "output_text"
+ """OUTPUT_TEXT."""
+ REFUSAL = "refusal"
+ """REFUSAL."""
+ REASONING_TEXT = "reasoning_text"
+ """REASONING_TEXT."""
+
+
+class OutputItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of OutputItemType."""
+
+ OUTPUT_MESSAGE = "output_message"
+ """OUTPUT_MESSAGE."""
+ FILE_SEARCH_CALL = "file_search_call"
+ """FILE_SEARCH_CALL."""
+ FUNCTION_CALL = "function_call"
+ """FUNCTION_CALL."""
+ WEB_SEARCH_CALL = "web_search_call"
+ """WEB_SEARCH_CALL."""
+ COMPUTER_CALL = "computer_call"
+ """COMPUTER_CALL."""
+ REASONING = "reasoning"
+ """REASONING."""
+ COMPACTION = "compaction"
+ """COMPACTION."""
+ IMAGE_GENERATION_CALL = "image_generation_call"
+ """IMAGE_GENERATION_CALL."""
+ CODE_INTERPRETER_CALL = "code_interpreter_call"
+ """CODE_INTERPRETER_CALL."""
+ LOCAL_SHELL_CALL = "local_shell_call"
+ """LOCAL_SHELL_CALL."""
+ SHELL_CALL = "shell_call"
+ """SHELL_CALL."""
+ SHELL_CALL_OUTPUT = "shell_call_output"
+ """SHELL_CALL_OUTPUT."""
+ APPLY_PATCH_CALL = "apply_patch_call"
+ """APPLY_PATCH_CALL."""
+ APPLY_PATCH_CALL_OUTPUT = "apply_patch_call_output"
+ """APPLY_PATCH_CALL_OUTPUT."""
+ MCP_CALL = "mcp_call"
+ """MCP_CALL."""
+ MCP_LIST_TOOLS = "mcp_list_tools"
+ """MCP_LIST_TOOLS."""
+ MCP_APPROVAL_REQUEST = "mcp_approval_request"
+ """MCP_APPROVAL_REQUEST."""
+ CUSTOM_TOOL_CALL = "custom_tool_call"
+ """CUSTOM_TOOL_CALL."""
+ MESSAGE = "message"
+ """MESSAGE."""
+ COMPUTER_CALL_OUTPUT = "computer_call_output"
+ """COMPUTER_CALL_OUTPUT."""
+ FUNCTION_CALL_OUTPUT = "function_call_output"
+ """FUNCTION_CALL_OUTPUT."""
+ LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output"
+ """LOCAL_SHELL_CALL_OUTPUT."""
+ MCP_APPROVAL_RESPONSE = "mcp_approval_response"
+ """MCP_APPROVAL_RESPONSE."""
+ CUSTOM_TOOL_CALL_OUTPUT = "custom_tool_call_output"
+ """CUSTOM_TOOL_CALL_OUTPUT."""
+ STRUCTURED_OUTPUTS = "structured_outputs"
+ """STRUCTURED_OUTPUTS."""
+ OAUTH_CONSENT_REQUEST = "oauth_consent_request"
+ """OAUTH_CONSENT_REQUEST."""
+ MEMORY_SEARCH_CALL = "memory_search_call"
+ """MEMORY_SEARCH_CALL."""
+ WORKFLOW_ACTION = "workflow_action"
+ """WORKFLOW_ACTION."""
+ A2_A_PREVIEW_CALL = "a2a_preview_call"
+ """A2_A_PREVIEW_CALL."""
+ A2_A_PREVIEW_CALL_OUTPUT = "a2a_preview_call_output"
+ """A2_A_PREVIEW_CALL_OUTPUT."""
+ BING_GROUNDING_CALL = "bing_grounding_call"
+ """BING_GROUNDING_CALL."""
+ BING_GROUNDING_CALL_OUTPUT = "bing_grounding_call_output"
+ """BING_GROUNDING_CALL_OUTPUT."""
+ SHAREPOINT_GROUNDING_PREVIEW_CALL = "sharepoint_grounding_preview_call"
+ """SHAREPOINT_GROUNDING_PREVIEW_CALL."""
+ SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT = "sharepoint_grounding_preview_call_output"
+ """SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT."""
+ AZURE_AI_SEARCH_CALL = "azure_ai_search_call"
+ """AZURE_AI_SEARCH_CALL."""
+ AZURE_AI_SEARCH_CALL_OUTPUT = "azure_ai_search_call_output"
+ """AZURE_AI_SEARCH_CALL_OUTPUT."""
+ BING_CUSTOM_SEARCH_PREVIEW_CALL = "bing_custom_search_preview_call"
+ """BING_CUSTOM_SEARCH_PREVIEW_CALL."""
+ BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT = "bing_custom_search_preview_call_output"
+ """BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT."""
+ OPENAPI_CALL = "openapi_call"
+ """OPENAPI_CALL."""
+ OPENAPI_CALL_OUTPUT = "openapi_call_output"
+ """OPENAPI_CALL_OUTPUT."""
+ BROWSER_AUTOMATION_PREVIEW_CALL = "browser_automation_preview_call"
+ """BROWSER_AUTOMATION_PREVIEW_CALL."""
+ BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT = "browser_automation_preview_call_output"
+ """BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT."""
+ FABRIC_DATAAGENT_PREVIEW_CALL = "fabric_dataagent_preview_call"
+ """FABRIC_DATAAGENT_PREVIEW_CALL."""
+ FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT = "fabric_dataagent_preview_call_output"
+ """FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT."""
+ AZURE_FUNCTION_CALL = "azure_function_call"
+ """AZURE_FUNCTION_CALL."""
+ AZURE_FUNCTION_CALL_OUTPUT = "azure_function_call_output"
+ """AZURE_FUNCTION_CALL_OUTPUT."""
+
+
+class OutputMessageContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of OutputMessageContentType."""
+
+ OUTPUT_TEXT = "output_text"
+ """OUTPUT_TEXT."""
+ REFUSAL = "refusal"
+ """REFUSAL."""
+
+
+class PageOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of PageOrder."""
+
+ ASC = "asc"
+ """ASC."""
+ DESC = "desc"
+ """DESC."""
+
+
+class RankerVersionType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of RankerVersionType."""
+
+ AUTO = "auto"
+ """AUTO."""
+ DEFAULT2024_11_15 = "default-2024-11-15"
+ """DEFAULT2024_11_15."""
+
+
+class RealtimeMcpErrorType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of RealtimeMcpErrorType."""
+
+ PROTOCOL_ERROR = "protocol_error"
+ """PROTOCOL_ERROR."""
+ TOOL_EXECUTION_ERROR = "tool_execution_error"
+ """TOOL_EXECUTION_ERROR."""
+ HTTP_ERROR = "http_error"
+ """HTTP_ERROR."""
+
+
+class ResponseErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The error code for the response."""
+
+ SERVER_ERROR = "server_error"
+ """SERVER_ERROR."""
+ RATE_LIMIT_EXCEEDED = "rate_limit_exceeded"
+ """RATE_LIMIT_EXCEEDED."""
+ INVALID_PROMPT = "invalid_prompt"
+ """INVALID_PROMPT."""
+ VECTOR_STORE_TIMEOUT = "vector_store_timeout"
+ """VECTOR_STORE_TIMEOUT."""
+ INVALID_IMAGE = "invalid_image"
+ """INVALID_IMAGE."""
+ INVALID_IMAGE_FORMAT = "invalid_image_format"
+ """INVALID_IMAGE_FORMAT."""
+ INVALID_BASE64_IMAGE = "invalid_base64_image"
+ """INVALID_BASE64_IMAGE."""
+ INVALID_IMAGE_URL = "invalid_image_url"
+ """INVALID_IMAGE_URL."""
+ IMAGE_TOO_LARGE = "image_too_large"
+ """IMAGE_TOO_LARGE."""
+ IMAGE_TOO_SMALL = "image_too_small"
+ """IMAGE_TOO_SMALL."""
+ IMAGE_PARSE_ERROR = "image_parse_error"
+ """IMAGE_PARSE_ERROR."""
+ IMAGE_CONTENT_POLICY_VIOLATION = "image_content_policy_violation"
+ """IMAGE_CONTENT_POLICY_VIOLATION."""
+ INVALID_IMAGE_MODE = "invalid_image_mode"
+ """INVALID_IMAGE_MODE."""
+ IMAGE_FILE_TOO_LARGE = "image_file_too_large"
+ """IMAGE_FILE_TOO_LARGE."""
+ UNSUPPORTED_IMAGE_MEDIA_TYPE = "unsupported_image_media_type"
+ """UNSUPPORTED_IMAGE_MEDIA_TYPE."""
+ EMPTY_IMAGE_FILE = "empty_image_file"
+ """EMPTY_IMAGE_FILE."""
+ FAILED_TO_DOWNLOAD_IMAGE = "failed_to_download_image"
+ """FAILED_TO_DOWNLOAD_IMAGE."""
+ IMAGE_FILE_NOT_FOUND = "image_file_not_found"
+ """IMAGE_FILE_NOT_FOUND."""
+
+
+class ResponseStreamEventType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ResponseStreamEventType."""
+
+ RESPONSE_AUDIO_DELTA = "response.audio.delta"
+ """RESPONSE_AUDIO_DELTA."""
+ RESPONSE_AUDIO_DONE = "response.audio.done"
+ """RESPONSE_AUDIO_DONE."""
+ RESPONSE_AUDIO_TRANSCRIPT_DELTA = "response.audio.transcript.delta"
+ """RESPONSE_AUDIO_TRANSCRIPT_DELTA."""
+ RESPONSE_AUDIO_TRANSCRIPT_DONE = "response.audio.transcript.done"
+ """RESPONSE_AUDIO_TRANSCRIPT_DONE."""
+ RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA = "response.code_interpreter_call_code.delta"
+ """RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA."""
+ RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE = "response.code_interpreter_call_code.done"
+ """RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE."""
+ RESPONSE_CODE_INTERPRETER_CALL_COMPLETED = "response.code_interpreter_call.completed"
+ """RESPONSE_CODE_INTERPRETER_CALL_COMPLETED."""
+ RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS = "response.code_interpreter_call.in_progress"
+ """RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS."""
+ RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING = "response.code_interpreter_call.interpreting"
+ """RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING."""
+ RESPONSE_COMPLETED = "response.completed"
+ """RESPONSE_COMPLETED."""
+ RESPONSE_CONTENT_PART_ADDED = "response.content_part.added"
+ """RESPONSE_CONTENT_PART_ADDED."""
+ RESPONSE_CONTENT_PART_DONE = "response.content_part.done"
+ """RESPONSE_CONTENT_PART_DONE."""
+ RESPONSE_CREATED = "response.created"
+ """RESPONSE_CREATED."""
+ ERROR = "error"
+ """ERROR."""
+ RESPONSE_FILE_SEARCH_CALL_COMPLETED = "response.file_search_call.completed"
+ """RESPONSE_FILE_SEARCH_CALL_COMPLETED."""
+ RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS = "response.file_search_call.in_progress"
+ """RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS."""
+ RESPONSE_FILE_SEARCH_CALL_SEARCHING = "response.file_search_call.searching"
+ """RESPONSE_FILE_SEARCH_CALL_SEARCHING."""
+ RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA = "response.function_call_arguments.delta"
+ """RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA."""
+ RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE = "response.function_call_arguments.done"
+ """RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE."""
+ RESPONSE_IN_PROGRESS = "response.in_progress"
+ """RESPONSE_IN_PROGRESS."""
+ RESPONSE_FAILED = "response.failed"
+ """RESPONSE_FAILED."""
+ RESPONSE_INCOMPLETE = "response.incomplete"
+ """RESPONSE_INCOMPLETE."""
+ RESPONSE_OUTPUT_ITEM_ADDED = "response.output_item.added"
+ """RESPONSE_OUTPUT_ITEM_ADDED."""
+ RESPONSE_OUTPUT_ITEM_DONE = "response.output_item.done"
+ """RESPONSE_OUTPUT_ITEM_DONE."""
+ RESPONSE_REASONING_SUMMARY_PART_ADDED = "response.reasoning_summary_part.added"
+ """RESPONSE_REASONING_SUMMARY_PART_ADDED."""
+ RESPONSE_REASONING_SUMMARY_PART_DONE = "response.reasoning_summary_part.done"
+ """RESPONSE_REASONING_SUMMARY_PART_DONE."""
+ RESPONSE_REASONING_SUMMARY_TEXT_DELTA = "response.reasoning_summary_text.delta"
+ """RESPONSE_REASONING_SUMMARY_TEXT_DELTA."""
+ RESPONSE_REASONING_SUMMARY_TEXT_DONE = "response.reasoning_summary_text.done"
+ """RESPONSE_REASONING_SUMMARY_TEXT_DONE."""
+ RESPONSE_REASONING_TEXT_DELTA = "response.reasoning_text.delta"
+ """RESPONSE_REASONING_TEXT_DELTA."""
+ RESPONSE_REASONING_TEXT_DONE = "response.reasoning_text.done"
+ """RESPONSE_REASONING_TEXT_DONE."""
+ RESPONSE_REFUSAL_DELTA = "response.refusal.delta"
+ """RESPONSE_REFUSAL_DELTA."""
+ RESPONSE_REFUSAL_DONE = "response.refusal.done"
+ """RESPONSE_REFUSAL_DONE."""
+ RESPONSE_OUTPUT_TEXT_DELTA = "response.output_text.delta"
+ """RESPONSE_OUTPUT_TEXT_DELTA."""
+ RESPONSE_OUTPUT_TEXT_DONE = "response.output_text.done"
+ """RESPONSE_OUTPUT_TEXT_DONE."""
+ RESPONSE_WEB_SEARCH_CALL_COMPLETED = "response.web_search_call.completed"
+ """RESPONSE_WEB_SEARCH_CALL_COMPLETED."""
+ RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS = "response.web_search_call.in_progress"
+ """RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS."""
+ RESPONSE_WEB_SEARCH_CALL_SEARCHING = "response.web_search_call.searching"
+ """RESPONSE_WEB_SEARCH_CALL_SEARCHING."""
+ RESPONSE_IMAGE_GENERATION_CALL_COMPLETED = "response.image_generation_call.completed"
+ """RESPONSE_IMAGE_GENERATION_CALL_COMPLETED."""
+ RESPONSE_IMAGE_GENERATION_CALL_GENERATING = "response.image_generation_call.generating"
+ """RESPONSE_IMAGE_GENERATION_CALL_GENERATING."""
+ RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS = "response.image_generation_call.in_progress"
+ """RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS."""
+ RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE = "response.image_generation_call.partial_image"
+ """RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE."""
+ RESPONSE_MCP_CALL_ARGUMENTS_DELTA = "response.mcp_call_arguments.delta"
+ """RESPONSE_MCP_CALL_ARGUMENTS_DELTA."""
+ RESPONSE_MCP_CALL_ARGUMENTS_DONE = "response.mcp_call_arguments.done"
+ """RESPONSE_MCP_CALL_ARGUMENTS_DONE."""
+ RESPONSE_MCP_CALL_COMPLETED = "response.mcp_call.completed"
+ """RESPONSE_MCP_CALL_COMPLETED."""
+ RESPONSE_MCP_CALL_FAILED = "response.mcp_call.failed"
+ """RESPONSE_MCP_CALL_FAILED."""
+ RESPONSE_MCP_CALL_IN_PROGRESS = "response.mcp_call.in_progress"
+ """RESPONSE_MCP_CALL_IN_PROGRESS."""
+ RESPONSE_MCP_LIST_TOOLS_COMPLETED = "response.mcp_list_tools.completed"
+ """RESPONSE_MCP_LIST_TOOLS_COMPLETED."""
+ RESPONSE_MCP_LIST_TOOLS_FAILED = "response.mcp_list_tools.failed"
+ """RESPONSE_MCP_LIST_TOOLS_FAILED."""
+ RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS = "response.mcp_list_tools.in_progress"
+ """RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS."""
+ RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED = "response.output_text.annotation.added"
+ """RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED."""
+ RESPONSE_QUEUED = "response.queued"
+ """RESPONSE_QUEUED."""
+ RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA = "response.custom_tool_call_input.delta"
+ """RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA."""
+ RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE = "response.custom_tool_call_input.done"
+ """RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE."""
+
+
+class SearchContextSize(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of SearchContextSize."""
+
+ LOW = "low"
+ """LOW."""
+ MEDIUM = "medium"
+ """MEDIUM."""
+ HIGH = "high"
+ """HIGH."""
+
+
+class TextResponseFormatConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of TextResponseFormatConfigurationType."""
+
+ TEXT = "text"
+ """TEXT."""
+ JSON_SCHEMA = "json_schema"
+ """JSON_SCHEMA."""
+ JSON_OBJECT = "json_object"
+ """JSON_OBJECT."""
+
+
+class ToolCallStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The status of a tool call."""
+
+ IN_PROGRESS = "in_progress"
+ """IN_PROGRESS."""
+ COMPLETED = "completed"
+ """COMPLETED."""
+ INCOMPLETE = "incomplete"
+ """INCOMPLETE."""
+ FAILED = "failed"
+ """FAILED."""
+
+
+class ToolChoiceOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Tool choice mode."""
+
+ NONE = "none"
+ """NONE."""
+ AUTO = "auto"
+ """AUTO."""
+ REQUIRED = "required"
+ """REQUIRED."""
+
+
+class ToolChoiceParamType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ToolChoiceParamType."""
+
+ ALLOWED_TOOLS = "allowed_tools"
+ """ALLOWED_TOOLS."""
+ FUNCTION = "function"
+ """FUNCTION."""
+ MCP = "mcp"
+ """MCP."""
+ CUSTOM = "custom"
+ """CUSTOM."""
+ APPLY_PATCH = "apply_patch"
+ """APPLY_PATCH."""
+ SHELL = "shell"
+ """SHELL."""
+ FILE_SEARCH = "file_search"
+ """FILE_SEARCH."""
+ WEB_SEARCH_PREVIEW = "web_search_preview"
+ """WEB_SEARCH_PREVIEW."""
+ COMPUTER_USE_PREVIEW = "computer_use_preview"
+ """COMPUTER_USE_PREVIEW."""
+ WEB_SEARCH_PREVIEW2025_03_11 = "web_search_preview_2025_03_11"
+ """WEB_SEARCH_PREVIEW2025_03_11."""
+ IMAGE_GENERATION = "image_generation"
+ """IMAGE_GENERATION."""
+ CODE_INTERPRETER = "code_interpreter"
+ """CODE_INTERPRETER."""
+
+
+class ToolType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of ToolType."""
+
+ FUNCTION = "function"
+ """FUNCTION."""
+ FILE_SEARCH = "file_search"
+ """FILE_SEARCH."""
+ COMPUTER_USE_PREVIEW = "computer_use_preview"
+ """COMPUTER_USE_PREVIEW."""
+ WEB_SEARCH = "web_search"
+ """WEB_SEARCH."""
+ MCP = "mcp"
+ """MCP."""
+ CODE_INTERPRETER = "code_interpreter"
+ """CODE_INTERPRETER."""
+ IMAGE_GENERATION = "image_generation"
+ """IMAGE_GENERATION."""
+ LOCAL_SHELL = "local_shell"
+ """LOCAL_SHELL."""
+ SHELL = "shell"
+ """SHELL."""
+ CUSTOM = "custom"
+ """CUSTOM."""
+ WEB_SEARCH_PREVIEW = "web_search_preview"
+ """WEB_SEARCH_PREVIEW."""
+ APPLY_PATCH = "apply_patch"
+ """APPLY_PATCH."""
+ A2_A_PREVIEW = "a2a_preview"
+ """A2_A_PREVIEW."""
+ BING_CUSTOM_SEARCH_PREVIEW = "bing_custom_search_preview"
+ """BING_CUSTOM_SEARCH_PREVIEW."""
+ BROWSER_AUTOMATION_PREVIEW = "browser_automation_preview"
+ """BROWSER_AUTOMATION_PREVIEW."""
+ FABRIC_DATAAGENT_PREVIEW = "fabric_dataagent_preview"
+ """FABRIC_DATAAGENT_PREVIEW."""
+ SHAREPOINT_GROUNDING_PREVIEW = "sharepoint_grounding_preview"
+ """SHAREPOINT_GROUNDING_PREVIEW."""
+ MEMORY_SEARCH_PREVIEW = "memory_search_preview"
+ """MEMORY_SEARCH_PREVIEW."""
+ AZURE_AI_SEARCH = "azure_ai_search"
+ """AZURE_AI_SEARCH."""
+ AZURE_FUNCTION = "azure_function"
+ """AZURE_FUNCTION."""
+ BING_GROUNDING = "bing_grounding"
+ """BING_GROUNDING."""
+ CAPTURE_STRUCTURED_OUTPUTS = "capture_structured_outputs"
+ """CAPTURE_STRUCTURED_OUTPUTS."""
+ OPENAPI = "openapi"
+ """OPENAPI."""
+ MEMORY_SEARCH = "memory_search"
+ """MEMORY_SEARCH."""
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/_models.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/_models.py
new file mode 100644
index 000000000000..3996bd594389
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/_models.py
@@ -0,0 +1,17089 @@
+# pylint: disable=line-too-long,useless-suppression,too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=useless-super-delegation
+
+import datetime
+from typing import Any, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload
+
+from .._utils.model_base import Model as _Model, rest_discriminator, rest_field
+from ._enums import (
+ AnnotationType,
+ ApplyPatchFileOperationType,
+ ApplyPatchOperationParamType,
+ ComputerActionType,
+ ContainerNetworkPolicyParamType,
+ ContainerSkillType,
+ CustomToolParamFormatType,
+ FunctionAndCustomToolCallOutputType,
+ FunctionShellCallEnvironmentType,
+ FunctionShellCallItemParamEnvironmentType,
+ FunctionShellCallOutputOutcomeParamType,
+ FunctionShellCallOutputOutcomeType,
+ FunctionShellToolParamEnvironmentType,
+ ItemFieldType,
+ ItemType,
+ MemoryItemKind,
+ MessageContentType,
+ OpenApiAuthType,
+ OutputContentType,
+ OutputItemType,
+ OutputMessageContentType,
+ RealtimeMcpErrorType,
+ ResponseStreamEventType,
+ TextResponseFormatConfigurationType,
+ ToolChoiceParamType,
+ ToolType,
+)
+
+if TYPE_CHECKING:
+ from .. import _types, models as _models
+
+
+class Tool(_Model):
+ """A tool that can be used to generate a response.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ A2APreviewTool, ApplyPatchToolParam, AzureAISearchTool, AzureFunctionTool,
+ BingCustomSearchPreviewTool, BingGroundingTool, BrowserAutomationPreviewTool,
+ CaptureStructuredOutputsTool, CodeInterpreterTool, ComputerUsePreviewTool, CustomToolParam,
+ MicrosoftFabricPreviewTool, FileSearchTool, FunctionTool, ImageGenTool, LocalShellToolParam,
+ MCPTool, MemorySearchTool, MemorySearchPreviewTool, OpenApiTool, SharepointPreviewTool,
+ FunctionShellToolParam, WebSearchTool, WebSearchPreviewTool
+
+ :ivar type: Required. Known values are: "function", "file_search", "computer_use_preview",
+ "web_search", "mcp", "code_interpreter", "image_generation", "local_shell", "shell", "custom",
+ "web_search_preview", "apply_patch", "a2a_preview", "bing_custom_search_preview",
+ "browser_automation_preview", "fabric_dataagent_preview", "sharepoint_grounding_preview",
+ "memory_search_preview", "azure_ai_search", "azure_function", "bing_grounding",
+ "capture_structured_outputs", "openapi", and "memory_search".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ToolType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"function\", \"file_search\", \"computer_use_preview\",
+ \"web_search\", \"mcp\", \"code_interpreter\", \"image_generation\", \"local_shell\",
+ \"shell\", \"custom\", \"web_search_preview\", \"apply_patch\", \"a2a_preview\",
+ \"bing_custom_search_preview\", \"browser_automation_preview\", \"fabric_dataagent_preview\",
+ \"sharepoint_grounding_preview\", \"memory_search_preview\", \"azure_ai_search\",
+ \"azure_function\", \"bing_grounding\", \"capture_structured_outputs\", \"openapi\", and
+ \"memory_search\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class A2APreviewTool(Tool, discriminator="a2a_preview"):
+ """An agent implementing the A2A protocol.
+
+ :ivar type: The type of the tool. Always ``"a2a_preview``. Required. A2_A_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.A2_A_PREVIEW
+ :ivar base_url: Base URL of the agent.
+ :vartype base_url: str
+ :ivar agent_card_path: The path to the agent card relative to the ``base_url``. If not
+ provided, defaults to ``/.well-known/agent-card.json``.
+ :vartype agent_card_path: str
+ :ivar project_connection_id: The connection ID in the project for the A2A server. The
+ connection stores authentication and other connection details needed to connect to the A2A
+ server.
+ :vartype project_connection_id: str
+ """
+
+ type: Literal[ToolType.A2_A_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the tool. Always ``\"a2a_preview``. Required. A2_A_PREVIEW."""
+ base_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Base URL of the agent."""
+ agent_card_path: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The path to the agent card relative to the ``base_url``. If not provided, defaults to
+ ``/.well-known/agent-card.json``."""
+ project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The connection ID in the project for the A2A server. The connection stores authentication and
+ other connection details needed to connect to the A2A server."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ base_url: Optional[str] = None,
+ agent_card_path: Optional[str] = None,
+ project_connection_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.A2_A_PREVIEW # type: ignore
+
+
+class OutputItem(_Model):
+ """OutputItem.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ A2AToolCall, A2AToolCallOutput, OutputItemApplyPatchToolCall,
+ OutputItemApplyPatchToolCallOutput, AzureAISearchToolCall, AzureAISearchToolCallOutput,
+ AzureFunctionToolCall, AzureFunctionToolCallOutput, BingCustomSearchToolCall,
+ BingCustomSearchToolCallOutput, BingGroundingToolCall, BingGroundingToolCallOutput,
+ BrowserAutomationToolCall, BrowserAutomationToolCallOutput, OutputItemCodeInterpreterToolCall,
+ OutputItemCompactionBody, OutputItemComputerToolCall, OutputItemComputerToolCallOutput,
+ OutputItemCustomToolCall, OutputItemCustomToolCallOutput, FabricDataAgentToolCall,
+ FabricDataAgentToolCallOutput, OutputItemFileSearchToolCall, OutputItemFunctionToolCall,
+ OutputItemFunctionToolCallOutput, OutputItemImageGenToolCall, OutputItemLocalShellToolCall,
+ OutputItemLocalShellToolCallOutput, OutputItemMcpApprovalRequest,
+ OutputItemMcpApprovalResponseResource, OutputItemMcpToolCall, OutputItemMcpListTools,
+ MemorySearchToolCallItemResource, OutputItemMessage, OAuthConsentRequestOutputItem,
+ OpenApiToolCall, OpenApiToolCallOutput, OutputItemOutputMessage, OutputItemReasoningItem,
+ SharepointGroundingToolCall, SharepointGroundingToolCallOutput, OutputItemFunctionShellCall,
+ OutputItemFunctionShellCallOutput, StructuredOutputsOutputItem, OutputItemWebSearchToolCall,
+ WorkflowActionOutputItem
+
+ :ivar type: Required. Known values are: "output_message", "file_search_call", "function_call",
+ "web_search_call", "computer_call", "reasoning", "compaction", "image_generation_call",
+ "code_interpreter_call", "local_shell_call", "shell_call", "shell_call_output",
+ "apply_patch_call", "apply_patch_call_output", "mcp_call", "mcp_list_tools",
+ "mcp_approval_request", "custom_tool_call", "message", "computer_call_output",
+ "function_call_output", "local_shell_call_output", "mcp_approval_response",
+ "custom_tool_call_output", "structured_outputs", "oauth_consent_request", "memory_search_call",
+ "workflow_action", "a2a_preview_call", "a2a_preview_call_output", "bing_grounding_call",
+ "bing_grounding_call_output", "sharepoint_grounding_preview_call",
+ "sharepoint_grounding_preview_call_output", "azure_ai_search_call",
+ "azure_ai_search_call_output", "bing_custom_search_preview_call",
+ "bing_custom_search_preview_call_output", "openapi_call", "openapi_call_output",
+ "browser_automation_preview_call", "browser_automation_preview_call_output",
+ "fabric_dataagent_preview_call", "fabric_dataagent_preview_call_output", "azure_function_call",
+ and "azure_function_call_output".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OutputItemType
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"output_message\", \"file_search_call\", \"function_call\",
+ \"web_search_call\", \"computer_call\", \"reasoning\", \"compaction\",
+ \"image_generation_call\", \"code_interpreter_call\", \"local_shell_call\", \"shell_call\",
+ \"shell_call_output\", \"apply_patch_call\", \"apply_patch_call_output\", \"mcp_call\",
+ \"mcp_list_tools\", \"mcp_approval_request\", \"custom_tool_call\", \"message\",
+ \"computer_call_output\", \"function_call_output\", \"local_shell_call_output\",
+ \"mcp_approval_response\", \"custom_tool_call_output\", \"structured_outputs\",
+ \"oauth_consent_request\", \"memory_search_call\", \"workflow_action\", \"a2a_preview_call\",
+ \"a2a_preview_call_output\", \"bing_grounding_call\", \"bing_grounding_call_output\",
+ \"sharepoint_grounding_preview_call\", \"sharepoint_grounding_preview_call_output\",
+ \"azure_ai_search_call\", \"azure_ai_search_call_output\", \"bing_custom_search_preview_call\",
+ \"bing_custom_search_preview_call_output\", \"openapi_call\", \"openapi_call_output\",
+ \"browser_automation_preview_call\", \"browser_automation_preview_call_output\",
+ \"fabric_dataagent_preview_call\", \"fabric_dataagent_preview_call_output\",
+ \"azure_function_call\", and \"azure_function_call_output\"."""
+ created_by: Optional[Union["_models.CreatedBy", str]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The information about the creator of the item. Is either a CreatedBy type or a str type."""
+ agent_reference: Optional["_models.AgentReference"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The agent that created the item."""
+ response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response on which the item is created."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class A2AToolCall(OutputItem, discriminator="a2a_preview_call"):
+ """An A2A (Agent-to-Agent) tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. A2_A_PREVIEW_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.A2_A_PREVIEW_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the A2A agent card being called. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.A2_A_PREVIEW_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. A2_A_PREVIEW_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the A2A agent card being called. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.A2_A_PREVIEW_CALL # type: ignore
+
+
+class A2AToolCallOutput(OutputItem, discriminator="a2a_preview_call_output"):
+ """The output of an A2A (Agent-to-Agent) tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. A2_A_PREVIEW_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.A2_A_PREVIEW_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the A2A agent card that was called. Required.
+ :vartype name: str
+ :ivar output: The output from the A2A tool call. Is one of the following types: {str: Any},
+ str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.A2_A_PREVIEW_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. A2_A_PREVIEW_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the A2A agent card that was called. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the A2A tool call. Is one of the following types: {str: Any}, str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.A2_A_PREVIEW_CALL_OUTPUT # type: ignore
+
+
+class AgentId(_Model):
+ """AgentId.
+
+ :ivar type: Required. Default value is "agent_id".
+ :vartype type: str
+ :ivar name: The name of the agent. Required.
+ :vartype name: str
+ :ivar version: The version identifier of the agent. Required.
+ :vartype version: str
+ """
+
+ type: Literal["agent_id"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required. Default value is \"agent_id\"."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the agent. Required."""
+ version: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The version identifier of the agent. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ version: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["agent_id"] = "agent_id"
+
+
+class AgentReference(_Model):
+ """AgentReference.
+
+ :ivar type: Required. Default value is "agent_reference".
+ :vartype type: str
+ :ivar name: The name of the agent. Required.
+ :vartype name: str
+ :ivar version: The version identifier of the agent.
+ :vartype version: str
+ """
+
+ type: Literal["agent_reference"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required. Default value is \"agent_reference\"."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the agent. Required."""
+ version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The version identifier of the agent."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ version: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["agent_reference"] = "agent_reference"
+
+
+class AISearchIndexResource(_Model):
+ """A AI Search Index resource.
+
+ :ivar project_connection_id: An index connection ID in an IndexResource attached to this agent.
+ :vartype project_connection_id: str
+ :ivar index_name: The name of an index in an IndexResource attached to this agent.
+ :vartype index_name: str
+ :ivar query_type: Type of query in an AIIndexResource attached to this agent. Known values are:
+ "simple", "semantic", "vector", "vector_simple_hybrid", and "vector_semantic_hybrid".
+ :vartype query_type: str or ~azure.ai.responses.server.sdk.models.models.AzureAISearchQueryType
+ :ivar top_k: Number of documents to retrieve from search and present to the model.
+ :vartype top_k: int
+ :ivar filter: filter string for search resource. `Learn more here
+ `_.
+ :vartype filter: str
+ :ivar index_asset_id: Index asset id for search resource.
+ :vartype index_asset_id: str
+ """
+
+ project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An index connection ID in an IndexResource attached to this agent."""
+ index_name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of an index in an IndexResource attached to this agent."""
+ query_type: Optional[Union[str, "_models.AzureAISearchQueryType"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Type of query in an AIIndexResource attached to this agent. Known values are: \"simple\",
+ \"semantic\", \"vector\", \"vector_simple_hybrid\", and \"vector_semantic_hybrid\"."""
+ top_k: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Number of documents to retrieve from search and present to the model."""
+ filter: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """filter string for search resource. `Learn more here
+ `_."""
+ index_asset_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Index asset id for search resource."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: Optional[str] = None,
+ index_name: Optional[str] = None,
+ query_type: Optional[Union[str, "_models.AzureAISearchQueryType"]] = None,
+ top_k: Optional[int] = None,
+ filter: Optional[str] = None, # pylint: disable=redefined-builtin
+ index_asset_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class Annotation(_Model):
+ """An annotation that applies to a span of output text.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ContainerFileCitationBody, FileCitationBody, FilePath, UrlCitationBody
+
+ :ivar type: Required. Known values are: "file_citation", "url_citation",
+ "container_file_citation", and "file_path".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AnnotationType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"file_citation\", \"url_citation\", \"container_file_citation\",
+ and \"file_path\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ApiErrorResponse(_Model):
+ """Error response for API failures.
+
+ :ivar error: Required.
+ :vartype error: ~azure.ai.responses.server.sdk.models.models.Error
+ """
+
+ error: "_models.Error" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ error: "_models.Error",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ApplyPatchFileOperation(_Model):
+ """Apply patch operation.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ApplyPatchCreateFileOperation, ApplyPatchDeleteFileOperation, ApplyPatchUpdateFileOperation
+
+ :ivar type: Required. Known values are: "create_file", "delete_file", and "update_file".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchFileOperationType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"create_file\", \"delete_file\", and \"update_file\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ApplyPatchCreateFileOperation(ApplyPatchFileOperation, discriminator="create_file"):
+ """Apply patch create file operation.
+
+ :ivar type: Create a new file with the provided diff. Required. CREATE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CREATE_FILE
+ :ivar path: Path of the file to create. Required.
+ :vartype path: str
+ :ivar diff: Diff to apply. Required.
+ :vartype diff: str
+ """
+
+ type: Literal[ApplyPatchFileOperationType.CREATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Create a new file with the provided diff. Required. CREATE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to create. Required."""
+ diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Diff to apply. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ diff: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchFileOperationType.CREATE_FILE # type: ignore
+
+
+class ApplyPatchOperationParam(_Model):
+ """Apply patch operation.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ApplyPatchCreateFileOperationParam, ApplyPatchDeleteFileOperationParam,
+ ApplyPatchUpdateFileOperationParam
+
+ :ivar type: Required. Known values are: "create_file", "delete_file", and "update_file".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchOperationParamType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"create_file\", \"delete_file\", and \"update_file\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ApplyPatchCreateFileOperationParam(ApplyPatchOperationParam, discriminator="create_file"):
+ """Apply patch create file operation.
+
+ :ivar type: The operation type. Always ``create_file``. Required. CREATE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CREATE_FILE
+ :ivar path: Path of the file to create relative to the workspace root. Required.
+ :vartype path: str
+ :ivar diff: Unified diff content to apply when creating the file. Required.
+ :vartype diff: str
+ """
+
+ type: Literal[ApplyPatchOperationParamType.CREATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The operation type. Always ``create_file``. Required. CREATE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to create relative to the workspace root. Required."""
+ diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unified diff content to apply when creating the file. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ diff: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchOperationParamType.CREATE_FILE # type: ignore
+
+
+class ApplyPatchDeleteFileOperation(ApplyPatchFileOperation, discriminator="delete_file"):
+ """Apply patch delete file operation.
+
+ :ivar type: Delete the specified file. Required. DELETE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.DELETE_FILE
+ :ivar path: Path of the file to delete. Required.
+ :vartype path: str
+ """
+
+ type: Literal[ApplyPatchFileOperationType.DELETE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Delete the specified file. Required. DELETE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to delete. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchFileOperationType.DELETE_FILE # type: ignore
+
+
+class ApplyPatchDeleteFileOperationParam(ApplyPatchOperationParam, discriminator="delete_file"):
+ """Apply patch delete file operation.
+
+ :ivar type: The operation type. Always ``delete_file``. Required. DELETE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.DELETE_FILE
+ :ivar path: Path of the file to delete relative to the workspace root. Required.
+ :vartype path: str
+ """
+
+ type: Literal[ApplyPatchOperationParamType.DELETE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The operation type. Always ``delete_file``. Required. DELETE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to delete relative to the workspace root. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchOperationParamType.DELETE_FILE # type: ignore
+
+
+class Item(_Model):
+ """Content item used to generate a response.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ApplyPatchToolCallItemParam, ApplyPatchToolCallOutputItemParam, ItemCodeInterpreterToolCall,
+ CompactionSummaryItemParam, ItemComputerToolCall, ComputerCallOutputItemParam,
+ ItemCustomToolCall, ItemCustomToolCallOutput, ItemFileSearchToolCall, ItemFunctionToolCall,
+ FunctionCallOutputItemParam, ItemImageGenToolCall, ItemReferenceParam, ItemLocalShellToolCall,
+ ItemLocalShellToolCallOutput, ItemMcpApprovalRequest, MCPApprovalResponse, ItemMcpToolCall,
+ ItemMcpListTools, MemorySearchToolCallItemParam, ItemMessage, ItemOutputMessage,
+ ItemReasoningItem, FunctionShellCallItemParam, FunctionShellCallOutputItemParam,
+ ItemWebSearchToolCall
+
+ :ivar type: Required. Known values are: "message", "output_message", "file_search_call",
+ "computer_call", "computer_call_output", "web_search_call", "function_call",
+ "function_call_output", "reasoning", "compaction", "image_generation_call",
+ "code_interpreter_call", "local_shell_call", "local_shell_call_output", "shell_call",
+ "shell_call_output", "apply_patch_call", "apply_patch_call_output", "mcp_list_tools",
+ "mcp_approval_request", "mcp_approval_response", "mcp_call", "custom_tool_call_output",
+ "custom_tool_call", "item_reference", "structured_outputs", "oauth_consent_request",
+ "memory_search_call", "workflow_action", "a2a_preview_call", "a2a_preview_call_output",
+ "bing_grounding_call", "bing_grounding_call_output", "sharepoint_grounding_preview_call",
+ "sharepoint_grounding_preview_call_output", "azure_ai_search_call",
+ "azure_ai_search_call_output", "bing_custom_search_preview_call",
+ "bing_custom_search_preview_call_output", "openapi_call", "openapi_call_output",
+ "browser_automation_preview_call", "browser_automation_preview_call_output",
+ "fabric_dataagent_preview_call", "fabric_dataagent_preview_call_output", "azure_function_call",
+ and "azure_function_call_output".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ItemType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"message\", \"output_message\", \"file_search_call\",
+ \"computer_call\", \"computer_call_output\", \"web_search_call\", \"function_call\",
+ \"function_call_output\", \"reasoning\", \"compaction\", \"image_generation_call\",
+ \"code_interpreter_call\", \"local_shell_call\", \"local_shell_call_output\", \"shell_call\",
+ \"shell_call_output\", \"apply_patch_call\", \"apply_patch_call_output\", \"mcp_list_tools\",
+ \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"custom_tool_call_output\",
+ \"custom_tool_call\", \"item_reference\", \"structured_outputs\", \"oauth_consent_request\",
+ \"memory_search_call\", \"workflow_action\", \"a2a_preview_call\", \"a2a_preview_call_output\",
+ \"bing_grounding_call\", \"bing_grounding_call_output\", \"sharepoint_grounding_preview_call\",
+ \"sharepoint_grounding_preview_call_output\", \"azure_ai_search_call\",
+ \"azure_ai_search_call_output\", \"bing_custom_search_preview_call\",
+ \"bing_custom_search_preview_call_output\", \"openapi_call\", \"openapi_call_output\",
+ \"browser_automation_preview_call\", \"browser_automation_preview_call_output\",
+ \"fabric_dataagent_preview_call\", \"fabric_dataagent_preview_call_output\",
+ \"azure_function_call\", and \"azure_function_call_output\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ApplyPatchToolCallItemParam(Item, discriminator="apply_patch_call"):
+ """Apply patch tool call.
+
+ :ivar type: The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call. One of ``in_progress`` or ``completed``.
+ Required. Known values are: "in_progress" and "completed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallStatusParam
+ :ivar operation: The specific create, delete, or update instruction for the apply_patch tool
+ call. Required.
+ :vartype operation: ~azure.ai.responses.server.sdk.models.models.ApplyPatchOperationParam
+ """
+
+ type: Literal[ItemType.APPLY_PATCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallStatusParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call. One of ``in_progress`` or ``completed``. Required.
+ Known values are: \"in_progress\" and \"completed\"."""
+ operation: "_models.ApplyPatchOperationParam" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The specific create, delete, or update instruction for the apply_patch tool call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallStatusParam"],
+ operation: "_models.ApplyPatchOperationParam",
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.APPLY_PATCH_CALL # type: ignore
+
+
+class ApplyPatchToolCallOutputItemParam(Item, discriminator="apply_patch_call_output"):
+ """Apply patch tool call output.
+
+ :ivar type: The type of the item. Always ``apply_patch_call_output``. Required.
+ APPLY_PATCH_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL_OUTPUT
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call output. One of ``completed`` or
+ ``failed``. Required. Known values are: "completed" and "failed".
+ :vartype status: str or
+ ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallOutputStatusParam
+ :ivar output:
+ :vartype output: str
+ """
+
+ type: Literal[ItemType.APPLY_PATCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call_output``. Required. APPLY_PATCH_CALL_OUTPUT."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallOutputStatusParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call output. One of ``completed`` or ``failed``. Required.
+ Known values are: \"completed\" and \"failed\"."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallOutputStatusParam"],
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ output: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.APPLY_PATCH_CALL_OUTPUT # type: ignore
+
+
+class ApplyPatchToolParam(Tool, discriminator="apply_patch"):
+ """Apply patch tool.
+
+ :ivar type: The type of the tool. Always ``apply_patch``. Required. APPLY_PATCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH
+ """
+
+ type: Literal[ToolType.APPLY_PATCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the tool. Always ``apply_patch``. Required. APPLY_PATCH."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.APPLY_PATCH # type: ignore
+
+
+class ApplyPatchUpdateFileOperation(ApplyPatchFileOperation, discriminator="update_file"):
+ """Apply patch update file operation.
+
+ :ivar type: Update an existing file with the provided diff. Required. UPDATE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.UPDATE_FILE
+ :ivar path: Path of the file to update. Required.
+ :vartype path: str
+ :ivar diff: Diff to apply. Required.
+ :vartype diff: str
+ """
+
+ type: Literal[ApplyPatchFileOperationType.UPDATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Update an existing file with the provided diff. Required. UPDATE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to update. Required."""
+ diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Diff to apply. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ diff: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchFileOperationType.UPDATE_FILE # type: ignore
+
+
+class ApplyPatchUpdateFileOperationParam(ApplyPatchOperationParam, discriminator="update_file"):
+ """Apply patch update file operation.
+
+ :ivar type: The operation type. Always ``update_file``. Required. UPDATE_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.UPDATE_FILE
+ :ivar path: Path of the file to update relative to the workspace root. Required.
+ :vartype path: str
+ :ivar diff: Unified diff content to apply to the existing file. Required.
+ :vartype diff: str
+ """
+
+ type: Literal[ApplyPatchOperationParamType.UPDATE_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The operation type. Always ``update_file``. Required. UPDATE_FILE."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Path of the file to update relative to the workspace root. Required."""
+ diff: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unified diff content to apply to the existing file. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: str,
+ diff: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ApplyPatchOperationParamType.UPDATE_FILE # type: ignore
+
+
+class ApproximateLocation(_Model):
+ """ApproximateLocation.
+
+ :ivar type: The type of location approximation. Always ``approximate``. Required. Default value
+ is "approximate".
+ :vartype type: str
+ :ivar country:
+ :vartype country: str
+ :ivar region:
+ :vartype region: str
+ :ivar city:
+ :vartype city: str
+ :ivar timezone:
+ :vartype timezone: str
+ """
+
+ type: Literal["approximate"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of location approximation. Always ``approximate``. Required. Default value is
+ \"approximate\"."""
+ country: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ region: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ city: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ timezone: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ country: Optional[str] = None,
+ region: Optional[str] = None,
+ city: Optional[str] = None,
+ timezone: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["approximate"] = "approximate"
+
+
+class AutoCodeInterpreterToolParam(_Model):
+ """Automatic Code Interpreter Tool Parameters.
+
+ :ivar type: Always ``auto``. Required. Default value is "auto".
+ :vartype type: str
+ :ivar file_ids: An optional list of uploaded files to make available to your code.
+ :vartype file_ids: list[str]
+ :ivar memory_limit: Known values are: "1g", "4g", "16g", and "64g".
+ :vartype memory_limit: str or ~azure.ai.responses.server.sdk.models.models.ContainerMemoryLimit
+ :ivar network_policy:
+ :vartype network_policy:
+ ~azure.ai.responses.server.sdk.models.models.ContainerNetworkPolicyParam
+ """
+
+ type: Literal["auto"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Always ``auto``. Required. Default value is \"auto\"."""
+ file_ids: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An optional list of uploaded files to make available to your code."""
+ memory_limit: Optional[Union[str, "_models.ContainerMemoryLimit"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"1g\", \"4g\", \"16g\", and \"64g\"."""
+ network_policy: Optional["_models.ContainerNetworkPolicyParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_ids: Optional[list[str]] = None,
+ memory_limit: Optional[Union[str, "_models.ContainerMemoryLimit"]] = None,
+ network_policy: Optional["_models.ContainerNetworkPolicyParam"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["auto"] = "auto"
+
+
+class AzureAISearchTool(Tool, discriminator="azure_ai_search"):
+ """The input definition information for an Azure AI search tool as used to configure an agent.
+
+ :ivar type: The object type, which is always 'azure_ai_search'. Required. AZURE_AI_SEARCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_AI_SEARCH
+ :ivar azure_ai_search: The azure ai search index resource. Required.
+ :vartype azure_ai_search:
+ ~azure.ai.responses.server.sdk.models.models.AzureAISearchToolResource
+ """
+
+ type: Literal[ToolType.AZURE_AI_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'azure_ai_search'. Required. AZURE_AI_SEARCH."""
+ azure_ai_search: "_models.AzureAISearchToolResource" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The azure ai search index resource. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ azure_ai_search: "_models.AzureAISearchToolResource",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.AZURE_AI_SEARCH # type: ignore
+
+
+class AzureAISearchToolCall(OutputItem, discriminator="azure_ai_search_call"):
+ """An Azure AI Search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. AZURE_AI_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_AI_SEARCH_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.AZURE_AI_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. AZURE_AI_SEARCH_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.AZURE_AI_SEARCH_CALL # type: ignore
+
+
+class AzureAISearchToolCallOutput(OutputItem, discriminator="azure_ai_search_call_output"):
+ """The output of an Azure AI Search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. AZURE_AI_SEARCH_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_AI_SEARCH_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the Azure AI Search tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.AZURE_AI_SEARCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. AZURE_AI_SEARCH_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the Azure AI Search tool call. Is one of the following types: {str: Any}, str,
+ [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.AZURE_AI_SEARCH_CALL_OUTPUT # type: ignore
+
+
+class AzureAISearchToolResource(_Model):
+ """A set of index resources used by the ``azure_ai_search`` tool.
+
+ :ivar indexes: The indices attached to this agent. There can be a maximum of 1 index resource
+ attached to the agent. Required.
+ :vartype indexes: list[~azure.ai.responses.server.sdk.models.models.AISearchIndexResource]
+ """
+
+ indexes: list["_models.AISearchIndexResource"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The indices attached to this agent. There can be a maximum of 1 index resource attached to the
+ agent. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ indexes: list["_models.AISearchIndexResource"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class AzureFunctionBinding(_Model):
+ """The structure for keeping storage queue name and URI.
+
+ :ivar type: The type of binding, which is always 'storage_queue'. Required. Default value is
+ "storage_queue".
+ :vartype type: str
+ :ivar storage_queue: Storage queue. Required.
+ :vartype storage_queue: ~azure.ai.responses.server.sdk.models.models.AzureFunctionStorageQueue
+ """
+
+ type: Literal["storage_queue"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of binding, which is always 'storage_queue'. Required. Default value is
+ \"storage_queue\"."""
+ storage_queue: "_models.AzureFunctionStorageQueue" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Storage queue. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ storage_queue: "_models.AzureFunctionStorageQueue",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["storage_queue"] = "storage_queue"
+
+
+class AzureFunctionDefinition(_Model):
+ """The definition of Azure function.
+
+ :ivar function: The definition of azure function and its parameters. Required.
+ :vartype function: ~azure.ai.responses.server.sdk.models.models.AzureFunctionDefinitionFunction
+ :ivar input_binding: Input storage queue. The queue storage trigger runs a function as messages
+ are added to it. Required.
+ :vartype input_binding: ~azure.ai.responses.server.sdk.models.models.AzureFunctionBinding
+ :ivar output_binding: Output storage queue. The function writes output to this queue when the
+ input items are processed. Required.
+ :vartype output_binding: ~azure.ai.responses.server.sdk.models.models.AzureFunctionBinding
+ """
+
+ function: "_models.AzureFunctionDefinitionFunction" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The definition of azure function and its parameters. Required."""
+ input_binding: "_models.AzureFunctionBinding" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Input storage queue. The queue storage trigger runs a function as messages are added to it.
+ Required."""
+ output_binding: "_models.AzureFunctionBinding" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Output storage queue. The function writes output to this queue when the input items are
+ processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ function: "_models.AzureFunctionDefinitionFunction",
+ input_binding: "_models.AzureFunctionBinding",
+ output_binding: "_models.AzureFunctionBinding",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class AzureFunctionDefinitionFunction(_Model):
+ """AzureFunctionDefinitionFunction.
+
+ :ivar name: The name of the function to be called. Required.
+ :vartype name: str
+ :ivar description: A description of what the function does, used by the model to choose when
+ and how to call the function.
+ :vartype description: str
+ :ivar parameters: The parameters the functions accepts, described as a JSON Schema object.
+ Required.
+ :vartype parameters: dict[str, any]
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to be called. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A description of what the function does, used by the model to choose when and how to call the
+ function."""
+ parameters: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The parameters the functions accepts, described as a JSON Schema object. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ parameters: dict[str, Any],
+ description: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class AzureFunctionStorageQueue(_Model):
+ """The structure for keeping storage queue name and URI.
+
+ :ivar queue_service_endpoint: URI to the Azure Storage Queue service allowing you to manipulate
+ a queue. Required.
+ :vartype queue_service_endpoint: str
+ :ivar queue_name: The name of an Azure function storage queue. Required.
+ :vartype queue_name: str
+ """
+
+ queue_service_endpoint: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """URI to the Azure Storage Queue service allowing you to manipulate a queue. Required."""
+ queue_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of an Azure function storage queue. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ queue_service_endpoint: str,
+ queue_name: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class AzureFunctionTool(Tool, discriminator="azure_function"):
+ """The input definition information for an Azure Function Tool, as used to configure an Agent.
+
+ :ivar type: The object type, which is always 'browser_automation'. Required. AZURE_FUNCTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_FUNCTION
+ :ivar azure_function: The Azure Function Tool definition. Required.
+ :vartype azure_function: ~azure.ai.responses.server.sdk.models.models.AzureFunctionDefinition
+ """
+
+ type: Literal[ToolType.AZURE_FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'browser_automation'. Required. AZURE_FUNCTION."""
+ azure_function: "_models.AzureFunctionDefinition" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The Azure Function Tool definition. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ azure_function: "_models.AzureFunctionDefinition",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.AZURE_FUNCTION # type: ignore
+
+
+class AzureFunctionToolCall(OutputItem, discriminator="azure_function_call"):
+ """An Azure Function tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. AZURE_FUNCTION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_FUNCTION_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the Azure Function being called. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.AZURE_FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. AZURE_FUNCTION_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the Azure Function being called. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.AZURE_FUNCTION_CALL # type: ignore
+
+
+class AzureFunctionToolCallOutput(OutputItem, discriminator="azure_function_call_output"):
+ """The output of an Azure Function tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. AZURE_FUNCTION_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.AZURE_FUNCTION_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the Azure Function that was called. Required.
+ :vartype name: str
+ :ivar output: The output from the Azure Function tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.AZURE_FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. AZURE_FUNCTION_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the Azure Function that was called. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the Azure Function tool call. Is one of the following types: {str: Any}, str,
+ [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.AZURE_FUNCTION_CALL_OUTPUT # type: ignore
+
+
+class BingCustomSearchConfiguration(_Model):
+ """A bing custom search configuration.
+
+ :ivar project_connection_id: Project connection id for grounding with bing search. Required.
+ :vartype project_connection_id: str
+ :ivar instance_name: Name of the custom configuration instance given to config. Required.
+ :vartype instance_name: str
+ :ivar market: The market where the results come from.
+ :vartype market: str
+ :ivar set_lang: The language to use for user interface strings when calling Bing API.
+ :vartype set_lang: str
+ :ivar count: The number of search results to return in the bing api response.
+ :vartype count: int
+ :ivar freshness: Filter search results by a specific time range. See `accepted values here
+ `_.
+ :vartype freshness: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Project connection id for grounding with bing search. Required."""
+ instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Name of the custom configuration instance given to config. Required."""
+ market: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The market where the results come from."""
+ set_lang: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The language to use for user interface strings when calling Bing API."""
+ count: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The number of search results to return in the bing api response."""
+ freshness: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Filter search results by a specific time range. See `accepted values here
+ `_."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ instance_name: str,
+ market: Optional[str] = None,
+ set_lang: Optional[str] = None,
+ count: Optional[int] = None,
+ freshness: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class BingCustomSearchPreviewTool(Tool, discriminator="bing_custom_search_preview"):
+ """The input definition information for a Bing custom search tool as used to configure an agent.
+
+ :ivar type: The object type, which is always 'bing_custom_search_preview'. Required.
+ BING_CUSTOM_SEARCH_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.BING_CUSTOM_SEARCH_PREVIEW
+ :ivar bing_custom_search_preview: The bing custom search tool parameters. Required.
+ :vartype bing_custom_search_preview:
+ ~azure.ai.responses.server.sdk.models.models.BingCustomSearchToolParameters
+ """
+
+ type: Literal[ToolType.BING_CUSTOM_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'bing_custom_search_preview'. Required.
+ BING_CUSTOM_SEARCH_PREVIEW."""
+ bing_custom_search_preview: "_models.BingCustomSearchToolParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The bing custom search tool parameters. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ bing_custom_search_preview: "_models.BingCustomSearchToolParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.BING_CUSTOM_SEARCH_PREVIEW # type: ignore
+
+
+class BingCustomSearchToolCall(OutputItem, discriminator="bing_custom_search_preview_call"):
+ """A Bing custom search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BING_CUSTOM_SEARCH_PREVIEW_CALL.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.BING_CUSTOM_SEARCH_PREVIEW_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BING_CUSTOM_SEARCH_PREVIEW_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BING_CUSTOM_SEARCH_PREVIEW_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BING_CUSTOM_SEARCH_PREVIEW_CALL # type: ignore
+
+
+class BingCustomSearchToolCallOutput(OutputItem, discriminator="bing_custom_search_preview_call_output"):
+ """The output of a Bing custom search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the Bing custom search tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the Bing custom search tool call. Is one of the following types: {str: Any},
+ str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BING_CUSTOM_SEARCH_PREVIEW_CALL_OUTPUT # type: ignore
+
+
+class BingCustomSearchToolParameters(_Model):
+ """The bing custom search tool parameters.
+
+ :ivar search_configurations: The project connections attached to this tool. There can be a
+ maximum of 1 connection resource attached to the tool. Required.
+ :vartype search_configurations:
+ list[~azure.ai.responses.server.sdk.models.models.BingCustomSearchConfiguration]
+ """
+
+ search_configurations: list["_models.BingCustomSearchConfiguration"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The project connections attached to this tool. There can be a maximum of 1 connection resource
+ attached to the tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ search_configurations: list["_models.BingCustomSearchConfiguration"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class BingGroundingSearchConfiguration(_Model):
+ """Search configuration for Bing Grounding.
+
+ :ivar project_connection_id: Project connection id for grounding with bing search. Required.
+ :vartype project_connection_id: str
+ :ivar market: The market where the results come from.
+ :vartype market: str
+ :ivar set_lang: The language to use for user interface strings when calling Bing API.
+ :vartype set_lang: str
+ :ivar count: The number of search results to return in the bing api response.
+ :vartype count: int
+ :ivar freshness: Filter search results by a specific time range. See `accepted values here
+ `_.
+ :vartype freshness: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Project connection id for grounding with bing search. Required."""
+ market: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The market where the results come from."""
+ set_lang: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The language to use for user interface strings when calling Bing API."""
+ count: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The number of search results to return in the bing api response."""
+ freshness: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Filter search results by a specific time range. See `accepted values here
+ `_."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ market: Optional[str] = None,
+ set_lang: Optional[str] = None,
+ count: Optional[int] = None,
+ freshness: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class BingGroundingSearchToolParameters(_Model):
+ """The bing grounding search tool parameters.
+
+ :ivar search_configurations: The search configurations attached to this tool. There can be a
+ maximum of 1 search configuration resource attached to the tool. Required.
+ :vartype search_configurations:
+ list[~azure.ai.responses.server.sdk.models.models.BingGroundingSearchConfiguration]
+ """
+
+ search_configurations: list["_models.BingGroundingSearchConfiguration"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The search configurations attached to this tool. There can be a maximum of 1 search
+ configuration resource attached to the tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ search_configurations: list["_models.BingGroundingSearchConfiguration"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class BingGroundingTool(Tool, discriminator="bing_grounding"):
+ """The input definition information for a bing grounding search tool as used to configure an
+ agent.
+
+ :ivar type: The object type, which is always 'bing_grounding'. Required. BING_GROUNDING.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.BING_GROUNDING
+ :ivar bing_grounding: The bing grounding search tool parameters. Required.
+ :vartype bing_grounding:
+ ~azure.ai.responses.server.sdk.models.models.BingGroundingSearchToolParameters
+ """
+
+ type: Literal[ToolType.BING_GROUNDING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'bing_grounding'. Required. BING_GROUNDING."""
+ bing_grounding: "_models.BingGroundingSearchToolParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The bing grounding search tool parameters. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ bing_grounding: "_models.BingGroundingSearchToolParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.BING_GROUNDING # type: ignore
+
+
+class BingGroundingToolCall(OutputItem, discriminator="bing_grounding_call"):
+ """A Bing grounding tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BING_GROUNDING_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.BING_GROUNDING_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BING_GROUNDING_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BING_GROUNDING_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BING_GROUNDING_CALL # type: ignore
+
+
+class BingGroundingToolCallOutput(OutputItem, discriminator="bing_grounding_call_output"):
+ """The output of a Bing grounding tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BING_GROUNDING_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.BING_GROUNDING_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the Bing grounding tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BING_GROUNDING_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BING_GROUNDING_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the Bing grounding tool call. Is one of the following types: {str: Any}, str,
+ [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BING_GROUNDING_CALL_OUTPUT # type: ignore
+
+
+class BrowserAutomationPreviewTool(Tool, discriminator="browser_automation_preview"):
+ """The input definition information for a Browser Automation Tool, as used to configure an Agent.
+
+ :ivar type: The object type, which is always 'browser_automation_preview'. Required.
+ BROWSER_AUTOMATION_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.BROWSER_AUTOMATION_PREVIEW
+ :ivar browser_automation_preview: The Browser Automation Tool parameters. Required.
+ :vartype browser_automation_preview:
+ ~azure.ai.responses.server.sdk.models.models.BrowserAutomationToolParameters
+ """
+
+ type: Literal[ToolType.BROWSER_AUTOMATION_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'browser_automation_preview'. Required.
+ BROWSER_AUTOMATION_PREVIEW."""
+ browser_automation_preview: "_models.BrowserAutomationToolParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The Browser Automation Tool parameters. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ browser_automation_preview: "_models.BrowserAutomationToolParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.BROWSER_AUTOMATION_PREVIEW # type: ignore
+
+
+class BrowserAutomationToolCall(OutputItem, discriminator="browser_automation_preview_call"):
+ """A browser automation tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BROWSER_AUTOMATION_PREVIEW_CALL.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.BROWSER_AUTOMATION_PREVIEW_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BROWSER_AUTOMATION_PREVIEW_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BROWSER_AUTOMATION_PREVIEW_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BROWSER_AUTOMATION_PREVIEW_CALL # type: ignore
+
+
+class BrowserAutomationToolCallOutput(OutputItem, discriminator="browser_automation_preview_call_output"):
+ """The output of a browser automation tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the browser automation tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the browser automation tool call. Is one of the following types: {str: Any},
+ str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.BROWSER_AUTOMATION_PREVIEW_CALL_OUTPUT # type: ignore
+
+
+class BrowserAutomationToolConnectionParameters(_Model): # pylint: disable=name-too-long
+ """Definition of input parameters for the connection used by the Browser Automation Tool.
+
+ :ivar project_connection_id: The ID of the project connection to your Azure Playwright
+ resource. Required.
+ :vartype project_connection_id: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the project connection to your Azure Playwright resource. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class BrowserAutomationToolParameters(_Model):
+ """Definition of input parameters for the Browser Automation Tool.
+
+ :ivar connection: The project connection parameters associated with the Browser Automation
+ Tool. Required.
+ :vartype connection:
+ ~azure.ai.responses.server.sdk.models.models.BrowserAutomationToolConnectionParameters
+ """
+
+ connection: "_models.BrowserAutomationToolConnectionParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The project connection parameters associated with the Browser Automation Tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ connection: "_models.BrowserAutomationToolConnectionParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CaptureStructuredOutputsTool(Tool, discriminator="capture_structured_outputs"):
+ """A tool for capturing structured outputs.
+
+ :ivar type: The type of the tool. Always ``capture_structured_outputs``. Required.
+ CAPTURE_STRUCTURED_OUTPUTS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CAPTURE_STRUCTURED_OUTPUTS
+ :ivar outputs: The structured outputs to capture from the model. Required.
+ :vartype outputs: ~azure.ai.responses.server.sdk.models.models.StructuredOutputDefinition
+ """
+
+ type: Literal[ToolType.CAPTURE_STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the tool. Always ``capture_structured_outputs``. Required.
+ CAPTURE_STRUCTURED_OUTPUTS."""
+ outputs: "_models.StructuredOutputDefinition" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The structured outputs to capture from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ outputs: "_models.StructuredOutputDefinition",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.CAPTURE_STRUCTURED_OUTPUTS # type: ignore
+
+
+class MemoryItem(_Model):
+ """A single memory item stored in the memory store, containing content and metadata.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ChatSummaryMemoryItem, UserProfileMemoryItem
+
+ :ivar memory_id: The unique ID of the memory item. Required.
+ :vartype memory_id: str
+ :ivar updated_at: The last update time of the memory item. Required.
+ :vartype updated_at: ~datetime.datetime
+ :ivar scope: The namespace that logically groups and isolates memories, such as a user ID.
+ Required.
+ :vartype scope: str
+ :ivar content: The content of the memory. Required.
+ :vartype content: str
+ :ivar kind: The kind of the memory item. Required. Known values are: "user_profile" and
+ "chat_summary".
+ :vartype kind: str or ~azure.ai.responses.server.sdk.models.models.MemoryItemKind
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ memory_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the memory item. Required."""
+ updated_at: datetime.datetime = rest_field(
+ visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp"
+ )
+ """The last update time of the memory item. Required."""
+ scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The namespace that logically groups and isolates memories, such as a user ID. Required."""
+ content: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the memory. Required."""
+ kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"])
+ """The kind of the memory item. Required. Known values are: \"user_profile\" and \"chat_summary\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_id: str,
+ updated_at: datetime.datetime,
+ scope: str,
+ content: str,
+ kind: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ChatSummaryMemoryItem(MemoryItem, discriminator="chat_summary"):
+ """A memory item containing a summary extracted from conversations.
+
+ :ivar memory_id: The unique ID of the memory item. Required.
+ :vartype memory_id: str
+ :ivar updated_at: The last update time of the memory item. Required.
+ :vartype updated_at: ~datetime.datetime
+ :ivar scope: The namespace that logically groups and isolates memories, such as a user ID.
+ Required.
+ :vartype scope: str
+ :ivar content: The content of the memory. Required.
+ :vartype content: str
+ :ivar kind: The kind of the memory item. Required. Summary of chat conversations.
+ :vartype kind: str or ~azure.ai.responses.server.sdk.models.models.CHAT_SUMMARY
+ """
+
+ kind: Literal[MemoryItemKind.CHAT_SUMMARY] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The kind of the memory item. Required. Summary of chat conversations."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_id: str,
+ updated_at: datetime.datetime,
+ scope: str,
+ content: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.kind = MemoryItemKind.CHAT_SUMMARY # type: ignore
+
+
+class ComputerAction(_Model):
+ """ComputerAction.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ClickParam, DoubleClickAction, DragParam, KeyPressAction, MoveParam, ScreenshotParam,
+ ScrollParam, TypeParam, WaitParam
+
+ :ivar type: Required. Known values are: "click", "double_click", "drag", "keypress", "move",
+ "screenshot", "scroll", "type", and "wait".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ComputerActionType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"click\", \"double_click\", \"drag\", \"keypress\", \"move\",
+ \"screenshot\", \"scroll\", \"type\", and \"wait\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ClickParam(ComputerAction, discriminator="click"):
+ """Click.
+
+ :ivar type: Specifies the event type. For a click action, this property is always ``click``.
+ Required. CLICK.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CLICK
+ :ivar button: Indicates which mouse button was pressed during the click. One of ``left``,
+ ``right``, ``wheel``, ``back``, or ``forward``. Required. Known values are: "left", "right",
+ "wheel", "back", and "forward".
+ :vartype button: str or ~azure.ai.responses.server.sdk.models.models.ClickButtonType
+ :ivar x: The x-coordinate where the click occurred. Required.
+ :vartype x: int
+ :ivar y: The y-coordinate where the click occurred. Required.
+ :vartype y: int
+ """
+
+ type: Literal[ComputerActionType.CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a click action, this property is always ``click``. Required.
+ CLICK."""
+ button: Union[str, "_models.ClickButtonType"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Indicates which mouse button was pressed during the click. One of ``left``, ``right``,
+ ``wheel``, ``back``, or ``forward``. Required. Known values are: \"left\", \"right\",
+ \"wheel\", \"back\", and \"forward\"."""
+ x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The x-coordinate where the click occurred. Required."""
+ y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The y-coordinate where the click occurred. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ button: Union[str, "_models.ClickButtonType"],
+ x: int,
+ y: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.CLICK # type: ignore
+
+
+class CodeInterpreterOutputImage(_Model):
+ """Code interpreter output image.
+
+ :ivar type: The type of the output. Always ``image``. Required. Default value is "image".
+ :vartype type: str
+ :ivar url: The URL of the image output from the code interpreter. Required.
+ :vartype url: str
+ """
+
+ type: Literal["image"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the output. Always ``image``. Required. Default value is \"image\"."""
+ url: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the image output from the code interpreter. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ url: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["image"] = "image"
+
+
+class CodeInterpreterOutputLogs(_Model):
+ """Code interpreter output logs.
+
+ :ivar type: The type of the output. Always ``logs``. Required. Default value is "logs".
+ :vartype type: str
+ :ivar logs: The logs output from the code interpreter. Required.
+ :vartype logs: str
+ """
+
+ type: Literal["logs"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the output. Always ``logs``. Required. Default value is \"logs\"."""
+ logs: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The logs output from the code interpreter. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ logs: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["logs"] = "logs"
+
+
+class CodeInterpreterTool(Tool, discriminator="code_interpreter"):
+ """Code interpreter.
+
+ :ivar type: The type of the code interpreter tool. Always ``code_interpreter``. Required.
+ CODE_INTERPRETER.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CODE_INTERPRETER
+ :ivar container: The code interpreter container. Can be a container ID or an object that
+ specifies uploaded file IDs to make available to your code, along with an optional
+ ``memory_limit`` setting. If not provided, the service assumes auto. Is either a str type or a
+ AutoCodeInterpreterToolParam type.
+ :vartype container: str or
+ ~azure.ai.responses.server.sdk.models.models.AutoCodeInterpreterToolParam
+ """
+
+ type: Literal[ToolType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the code interpreter tool. Always ``code_interpreter``. Required. CODE_INTERPRETER."""
+ container: Optional[Union[str, "_models.AutoCodeInterpreterToolParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The code interpreter container. Can be a container ID or an object that specifies uploaded file
+ IDs to make available to your code, along with an optional ``memory_limit`` setting. If not
+ provided, the service assumes auto. Is either a str type or a AutoCodeInterpreterToolParam
+ type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ container: Optional[Union[str, "_models.AutoCodeInterpreterToolParam"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.CODE_INTERPRETER # type: ignore
+
+
+class CompactionSummaryItemParam(Item, discriminator="compaction"):
+ """Compaction item.
+
+ :ivar id:
+ :vartype id: str
+ :ivar type: The type of the item. Always ``compaction``. Required. COMPACTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPACTION
+ :ivar encrypted_content: The encrypted content of the compaction summary. Required.
+ :vartype encrypted_content: str
+ """
+
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ type: Literal[ItemType.COMPACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``compaction``. Required. COMPACTION."""
+ encrypted_content: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The encrypted content of the compaction summary. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ encrypted_content: str,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.COMPACTION # type: ignore
+
+
+class CompactResource(_Model):
+ """The compacted response object.
+
+ :ivar id: The unique identifier for the compacted response. Required.
+ :vartype id: str
+ :ivar object: The object type. Always ``response.compaction``. Required. Default value is
+ "response.compaction".
+ :vartype object: str
+ :ivar output: The compacted list of output items. Required.
+ :vartype output: list[~azure.ai.responses.server.sdk.models.models.ItemField]
+ :ivar created_at: Unix timestamp (in seconds) when the compacted conversation was created.
+ Required.
+ :vartype created_at: ~datetime.datetime
+ :ivar usage: Token accounting for the compaction pass, including cached, reasoning, and total
+ tokens. Required.
+ :vartype usage: ~azure.ai.responses.server.sdk.models.models.ResponseUsage
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier for the compacted response. Required."""
+ object: Literal["response.compaction"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The object type. Always ``response.compaction``. Required. Default value is
+ \"response.compaction\"."""
+ output: list["_models.ItemField"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The compacted list of output items. Required."""
+ created_at: datetime.datetime = rest_field(
+ visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp"
+ )
+ """Unix timestamp (in seconds) when the compacted conversation was created. Required."""
+ usage: "_models.ResponseUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Token accounting for the compaction pass, including cached, reasoning, and total tokens.
+ Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ output: list["_models.ItemField"],
+ created_at: datetime.datetime,
+ usage: "_models.ResponseUsage",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.object: Literal["response.compaction"] = "response.compaction"
+
+
+class ComparisonFilter(_Model):
+ """Comparison Filter.
+
+ :ivar type: Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``,
+ ``lte``, ``in``, ``nin``.
+
+ * `eq`: equals
+ * `ne`: not equal
+ * `gt`: greater than
+ * `gte`: greater than or equal
+ * `lt`: less than
+ * `lte`: less than or equal
+ * `in`: in
+ * `nin`: not in. Required. Is one of the following types: Literal["eq"], Literal["ne"],
+ Literal["gt"], Literal["gte"], Literal["lt"], Literal["lte"]
+ :vartype type: str or str or str or str or str or str
+ :ivar key: The key to compare against the value. Required.
+ :vartype key: str
+ :ivar value: The value to compare against the attribute key; supports string, number, or
+ boolean types. Required. Is one of the following types: str, int, bool, [Union[str, int]]
+ :vartype value: str or int or bool or list[str or int]
+ """
+
+ type: Literal["eq", "ne", "gt", "gte", "lt", "lte"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``, ``lte``, ``in``,
+ ``nin``.
+
+ * `eq`: equals
+ * `ne`: not equal
+ * `gt`: greater than
+ * `gte`: greater than or equal
+ * `lt`: less than
+ * `lte`: less than or equal
+ * `in`: in
+ * `nin`: not in. Required. Is one of the following types: Literal[\"eq\"],
+ Literal[\"ne\"], Literal[\"gt\"], Literal[\"gte\"], Literal[\"lt\"], Literal[\"lte\"]"""
+ key: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The key to compare against the value. Required."""
+ value: Union[str, int, bool, list[Union[str, int]]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The value to compare against the attribute key; supports string, number, or boolean types.
+ Required. Is one of the following types: str, int, bool, [Union[str, int]]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: Literal["eq", "ne", "gt", "gte", "lt", "lte"],
+ key: str,
+ value: Union[str, int, bool, list[Union[str, int]]],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CompoundFilter(_Model):
+ """Compound Filter.
+
+ :ivar type: Type of operation: ``and`` or ``or``. Required. Is either a Literal["and"] type or
+ a Literal["or"] type.
+ :vartype type: str or str
+ :ivar filters: Array of filters to combine. Items can be ``ComparisonFilter`` or
+ ``CompoundFilter``. Required.
+ :vartype filters: list[~azure.ai.responses.server.sdk.models.models.ComparisonFilter or any]
+ """
+
+ type: Literal["and", "or"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Type of operation: ``and`` or ``or``. Required. Is either a Literal[\"and\"] type or a
+ Literal[\"or\"] type."""
+ filters: list[Union["_models.ComparisonFilter", Any]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Array of filters to combine. Items can be ``ComparisonFilter`` or ``CompoundFilter``. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: Literal["and", "or"],
+ filters: list[Union["_models.ComparisonFilter", Any]],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ComputerCallOutputItemParam(Item, discriminator="computer_call_output"):
+ """Computer tool call output.
+
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The ID of the computer tool call that produced the output. Required.
+ :vartype call_id: str
+ :ivar type: The type of the computer tool call output. Always ``computer_call_output``.
+ Required. COMPUTER_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL_OUTPUT
+ :ivar output: Required.
+ :vartype output: ~azure.ai.responses.server.sdk.models.models.ComputerScreenshotImage
+ :ivar acknowledged_safety_checks:
+ :vartype acknowledged_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar status: Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.FunctionCallItemStatus
+ """
+
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the computer tool call that produced the output. Required."""
+ type: Literal[ItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer tool call output. Always ``computer_call_output``. Required.
+ COMPUTER_CALL_OUTPUT."""
+ output: "_models.ComputerScreenshotImage" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: "_models.ComputerScreenshotImage",
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = None,
+ status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.COMPUTER_CALL_OUTPUT # type: ignore
+
+
+class ComputerCallSafetyCheckParam(_Model):
+ """A pending safety check for the computer call.
+
+ :ivar id: The ID of the pending safety check. Required.
+ :vartype id: str
+ :ivar code:
+ :vartype code: str
+ :ivar message:
+ :vartype message: str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the pending safety check. Required."""
+ code: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ message: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ code: Optional[str] = None,
+ message: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MessageContent(_Model):
+ """A content part that makes up an input or output item.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ComputerScreenshotContent, MessageContentInputFileContent, MessageContentInputImageContent,
+ MessageContentInputTextContent, MessageContentOutputTextContent,
+ MessageContentReasoningTextContent, MessageContentRefusalContent, SummaryTextContent,
+ TextContent
+
+ :ivar type: Required. Known values are: "input_text", "output_text", "text", "summary_text",
+ "reasoning_text", "refusal", "input_image", "computer_screenshot", and "input_file".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MessageContentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"input_text\", \"output_text\", \"text\", \"summary_text\",
+ \"reasoning_text\", \"refusal\", \"input_image\", \"computer_screenshot\", and \"input_file\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ComputerScreenshotContent(MessageContent, discriminator="computer_screenshot"):
+ """Computer screenshot.
+
+ :ivar type: Specifies the event type. For a computer screenshot, this property is always set to
+ ``computer_screenshot``. Required. COMPUTER_SCREENSHOT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_SCREENSHOT
+ :ivar image_url: Required.
+ :vartype image_url: str
+ :ivar file_id: Required.
+ :vartype file_id: str
+ """
+
+ type: Literal[MessageContentType.COMPUTER_SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a computer screenshot, this property is always set to
+ ``computer_screenshot``. Required. COMPUTER_SCREENSHOT."""
+ image_url: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ image_url: str,
+ file_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.COMPUTER_SCREENSHOT # type: ignore
+
+
+class ComputerScreenshotImage(_Model):
+ """A computer screenshot image used with the computer use tool.
+
+ :ivar type: Specifies the event type. For a computer screenshot, this property is always set to
+ ``computer_screenshot``. Required. Default value is "computer_screenshot".
+ :vartype type: str
+ :ivar image_url: The URL of the screenshot image.
+ :vartype image_url: str
+ :ivar file_id: The identifier of an uploaded file that contains the screenshot.
+ :vartype file_id: str
+ """
+
+ type: Literal["computer_screenshot"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Specifies the event type. For a computer screenshot, this property is always set to
+ ``computer_screenshot``. Required. Default value is \"computer_screenshot\"."""
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the screenshot image."""
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The identifier of an uploaded file that contains the screenshot."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["computer_screenshot"] = "computer_screenshot"
+
+
+class ComputerUsePreviewTool(Tool, discriminator="computer_use_preview"):
+ """Computer use preview.
+
+ :ivar type: The type of the computer use tool. Always ``computer_use_preview``. Required.
+ COMPUTER_USE_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_USE_PREVIEW
+ :ivar environment: The type of computer environment to control. Required. Known values are:
+ "windows", "mac", "linux", "ubuntu", and "browser".
+ :vartype environment: str or ~azure.ai.responses.server.sdk.models.models.ComputerEnvironment
+ :ivar display_width: The width of the computer display. Required.
+ :vartype display_width: int
+ :ivar display_height: The height of the computer display. Required.
+ :vartype display_height: int
+ """
+
+ type: Literal[ToolType.COMPUTER_USE_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer use tool. Always ``computer_use_preview``. Required.
+ COMPUTER_USE_PREVIEW."""
+ environment: Union[str, "_models.ComputerEnvironment"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The type of computer environment to control. Required. Known values are: \"windows\", \"mac\",
+ \"linux\", \"ubuntu\", and \"browser\"."""
+ display_width: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The width of the computer display. Required."""
+ display_height: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The height of the computer display. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ environment: Union[str, "_models.ComputerEnvironment"],
+ display_width: int,
+ display_height: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.COMPUTER_USE_PREVIEW # type: ignore
+
+
+class FunctionShellToolParamEnvironment(_Model):
+ """FunctionShellToolParamEnvironment.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ContainerAutoParam, FunctionShellToolParamEnvironmentContainerReferenceParam,
+ FunctionShellToolParamEnvironmentLocalEnvironmentParam
+
+ :ivar type: Required. Known values are: "container_auto", "local", and "container_reference".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellToolParamEnvironmentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"container_auto\", \"local\", and \"container_reference\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ContainerAutoParam(FunctionShellToolParamEnvironment, discriminator="container_auto"):
+ """ContainerAutoParam.
+
+ :ivar type: Automatically creates a container for this request. Required. CONTAINER_AUTO.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CONTAINER_AUTO
+ :ivar file_ids: An optional list of uploaded files to make available to your code.
+ :vartype file_ids: list[str]
+ :ivar memory_limit: Known values are: "1g", "4g", "16g", and "64g".
+ :vartype memory_limit: str or ~azure.ai.responses.server.sdk.models.models.ContainerMemoryLimit
+ :ivar skills: An optional list of skills referenced by id or inline data.
+ :vartype skills: list[~azure.ai.responses.server.sdk.models.models.ContainerSkill]
+ :ivar network_policy:
+ :vartype network_policy:
+ ~azure.ai.responses.server.sdk.models.models.ContainerNetworkPolicyParam
+ """
+
+ type: Literal[FunctionShellToolParamEnvironmentType.CONTAINER_AUTO] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Automatically creates a container for this request. Required. CONTAINER_AUTO."""
+ file_ids: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An optional list of uploaded files to make available to your code."""
+ memory_limit: Optional[Union[str, "_models.ContainerMemoryLimit"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"1g\", \"4g\", \"16g\", and \"64g\"."""
+ skills: Optional[list["_models.ContainerSkill"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """An optional list of skills referenced by id or inline data."""
+ network_policy: Optional["_models.ContainerNetworkPolicyParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_ids: Optional[list[str]] = None,
+ memory_limit: Optional[Union[str, "_models.ContainerMemoryLimit"]] = None,
+ skills: Optional[list["_models.ContainerSkill"]] = None,
+ network_policy: Optional["_models.ContainerNetworkPolicyParam"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellToolParamEnvironmentType.CONTAINER_AUTO # type: ignore
+
+
+class ContainerFileCitationBody(Annotation, discriminator="container_file_citation"):
+ """Container file citation.
+
+ :ivar type: The type of the container file citation. Always ``container_file_citation``.
+ Required. CONTAINER_FILE_CITATION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CONTAINER_FILE_CITATION
+ :ivar container_id: The ID of the container file. Required.
+ :vartype container_id: str
+ :ivar file_id: The ID of the file. Required.
+ :vartype file_id: str
+ :ivar start_index: The index of the first character of the container file citation in the
+ message. Required.
+ :vartype start_index: int
+ :ivar end_index: The index of the last character of the container file citation in the message.
+ Required.
+ :vartype end_index: int
+ :ivar filename: The filename of the container file cited. Required.
+ :vartype filename: str
+ """
+
+ type: Literal[AnnotationType.CONTAINER_FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the container file citation. Always ``container_file_citation``. Required.
+ CONTAINER_FILE_CITATION."""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the container file. Required."""
+ file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the file. Required."""
+ start_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the first character of the container file citation in the message. Required."""
+ end_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the last character of the container file citation in the message. Required."""
+ filename: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The filename of the container file cited. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ container_id: str,
+ file_id: str,
+ start_index: int,
+ end_index: int,
+ filename: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = AnnotationType.CONTAINER_FILE_CITATION # type: ignore
+
+
+class ContainerNetworkPolicyParam(_Model):
+ """Network access policy for the container.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ContainerNetworkPolicyAllowlistParam, ContainerNetworkPolicyDisabledParam
+
+ :ivar type: Required. Known values are: "disabled" and "allowlist".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.ContainerNetworkPolicyParamType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"disabled\" and \"allowlist\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ContainerNetworkPolicyAllowlistParam(ContainerNetworkPolicyParam, discriminator="allowlist"):
+ """ContainerNetworkPolicyAllowlistParam.
+
+ :ivar type: Allow outbound network access only to specified domains. Always ``allowlist``.
+ Required. ALLOWLIST.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ALLOWLIST
+ :ivar allowed_domains: A list of allowed domains when type is ``allowlist``. Required.
+ :vartype allowed_domains: list[str]
+ :ivar domain_secrets: Optional domain-scoped secrets for allowlisted domains.
+ :vartype domain_secrets:
+ list[~azure.ai.responses.server.sdk.models.models.ContainerNetworkPolicyDomainSecretParam]
+ """
+
+ type: Literal[ContainerNetworkPolicyParamType.ALLOWLIST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Allow outbound network access only to specified domains. Always ``allowlist``. Required.
+ ALLOWLIST."""
+ allowed_domains: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A list of allowed domains when type is ``allowlist``. Required."""
+ domain_secrets: Optional[list["_models.ContainerNetworkPolicyDomainSecretParam"]] = rest_field(
+ visibility=["create"]
+ )
+ """Optional domain-scoped secrets for allowlisted domains."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ allowed_domains: list[str],
+ domain_secrets: Optional[list["_models.ContainerNetworkPolicyDomainSecretParam"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ContainerNetworkPolicyParamType.ALLOWLIST # type: ignore
+
+
+class ContainerNetworkPolicyDisabledParam(ContainerNetworkPolicyParam, discriminator="disabled"):
+ """ContainerNetworkPolicyDisabledParam.
+
+ :ivar type: Disable outbound network access. Always ``disabled``. Required. DISABLED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.DISABLED
+ """
+
+ type: Literal[ContainerNetworkPolicyParamType.DISABLED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Disable outbound network access. Always ``disabled``. Required. DISABLED."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ContainerNetworkPolicyParamType.DISABLED # type: ignore
+
+
+class ContainerNetworkPolicyDomainSecretParam(_Model):
+ """ContainerNetworkPolicyDomainSecretParam.
+
+ :ivar domain: The domain associated with the secret. Required.
+ :vartype domain: str
+ :ivar name: The name of the secret to inject for the domain. Required.
+ :vartype name: str
+ :ivar value: The secret value to inject for the domain. Required.
+ :vartype value: str
+ """
+
+ domain: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The domain associated with the secret. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the secret to inject for the domain. Required."""
+ value: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The secret value to inject for the domain. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ domain: str,
+ name: str,
+ value: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallEnvironment(_Model):
+ """FunctionShellCallEnvironment.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ContainerReferenceResource, LocalEnvironmentResource
+
+ :ivar type: Required. Known values are: "local" and "container_reference".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallEnvironmentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"local\" and \"container_reference\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ContainerReferenceResource(FunctionShellCallEnvironment, discriminator="container_reference"):
+ """Container Reference.
+
+ :ivar type: The environment type. Always ``container_reference``. Required.
+ CONTAINER_REFERENCE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CONTAINER_REFERENCE
+ :ivar container_id: Required.
+ :vartype container_id: str
+ """
+
+ type: Literal[FunctionShellCallEnvironmentType.CONTAINER_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The environment type. Always ``container_reference``. Required. CONTAINER_REFERENCE."""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ container_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallEnvironmentType.CONTAINER_REFERENCE # type: ignore
+
+
+class ContainerSkill(_Model):
+ """ContainerSkill.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ InlineSkillParam, SkillReferenceParam
+
+ :ivar type: Required. Known values are: "skill_reference" and "inline".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ContainerSkillType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"skill_reference\" and \"inline\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ContextManagementParam(_Model):
+ """ContextManagementParam.
+
+ :ivar type: The context management entry type. Currently only 'compaction' is supported.
+ Required.
+ :vartype type: str
+ :ivar compact_threshold:
+ :vartype compact_threshold: int
+ """
+
+ type: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The context management entry type. Currently only 'compaction' is supported. Required."""
+ compact_threshold: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ compact_threshold: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ConversationParam_2(_Model):
+ """Conversation object.
+
+ :ivar id: The unique ID of the conversation. Required.
+ :vartype id: str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the conversation. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ConversationReference(_Model):
+ """Conversation.
+
+ :ivar id: The unique ID of the conversation that this response was associated with. Required.
+ :vartype id: str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the conversation that this response was associated with. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CoordParam(_Model):
+ """Coordinate.
+
+ :ivar x: The x-coordinate. Required.
+ :vartype x: int
+ :ivar y: The y-coordinate. Required.
+ :vartype y: int
+ """
+
+ x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The x-coordinate. Required."""
+ y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The y-coordinate. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ x: int,
+ y: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CreatedBy(_Model):
+ """CreatedBy.
+
+ :ivar agent: The agent that created the item.
+ :vartype agent: ~azure.ai.responses.server.sdk.models.models.AgentId
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ """
+
+ agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The agent that created the item."""
+ response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response on which the item is created."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ agent: Optional["_models.AgentId"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CreateResponse(_Model):
+ """CreateResponse.
+
+ :ivar metadata:
+ :vartype metadata: ~azure.ai.responses.server.sdk.models.models.Metadata
+ :ivar top_logprobs:
+ :vartype top_logprobs: int
+ :ivar temperature:
+ :vartype temperature: int
+ :ivar top_p:
+ :vartype top_p: int
+ :ivar user: This field is being replaced by ``safety_identifier`` and ``prompt_cache_key``. Use
+ ``prompt_cache_key`` instead to maintain caching optimizations. A stable identifier for your
+ end-users. Used to boost cache hit rates by better bucketing similar requests and to help
+ OpenAI detect and prevent abuse. `Learn more
+ `_.
+ :vartype user: str
+ :ivar safety_identifier: A stable identifier used to help detect users of your application that
+ may be violating OpenAI's usage policies. The IDs should be a string that uniquely identifies
+ each user, with a maximum length of 64 characters. We recommend hashing their username or email
+ address, in order to avoid sending us any identifying information. `Learn more
+ `_.
+ :vartype safety_identifier: str
+ :ivar prompt_cache_key: Used by OpenAI to cache responses for similar requests to optimize your
+ cache hit rates. Replaces the ``user`` field. `Learn more `_.
+ :vartype prompt_cache_key: str
+ :ivar service_tier: Is one of the following types: Literal["auto"], Literal["default"],
+ Literal["flex"], Literal["scale"], Literal["priority"]
+ :vartype service_tier: str or str or str or str or str
+ :ivar prompt_cache_retention: Is either a Literal["in-memory"] type or a Literal["24h"] type.
+ :vartype prompt_cache_retention: str or str
+ :ivar previous_response_id:
+ :vartype previous_response_id: str
+ :ivar model: The model deployment to use for the creation of this response.
+ :vartype model: str
+ :ivar reasoning:
+ :vartype reasoning: ~azure.ai.responses.server.sdk.models.models.Reasoning
+ :ivar background:
+ :vartype background: bool
+ :ivar max_output_tokens:
+ :vartype max_output_tokens: int
+ :ivar max_tool_calls:
+ :vartype max_tool_calls: int
+ :ivar text:
+ :vartype text: ~azure.ai.responses.server.sdk.models.models.ResponseTextParam
+ :ivar tools:
+ :vartype tools: list[~azure.ai.responses.server.sdk.models.models.Tool]
+ :ivar tool_choice: Is either a Union[str, "_models.ToolChoiceOptions"] type or a
+ ToolChoiceParam type.
+ :vartype tool_choice: str or ~azure.ai.responses.server.sdk.models.models.ToolChoiceOptions or
+ ~azure.ai.responses.server.sdk.models.models.ToolChoiceParam
+ :ivar prompt:
+ :vartype prompt: ~azure.ai.responses.server.sdk.models.models.Prompt
+ :ivar truncation: Is either a Literal["auto"] type or a Literal["disabled"] type.
+ :vartype truncation: str or str
+ :ivar input: Is either a str type or a [Item] type.
+ :vartype input: str or list[~azure.ai.responses.server.sdk.models.models.Item]
+ :ivar include:
+ :vartype include: list[str or ~azure.ai.responses.server.sdk.models.models.IncludeEnum]
+ :ivar parallel_tool_calls:
+ :vartype parallel_tool_calls: bool
+ :ivar store:
+ :vartype store: bool
+ :ivar instructions:
+ :vartype instructions: str
+ :ivar stream:
+ :vartype stream: bool
+ :ivar stream_options:
+ :vartype stream_options: ~azure.ai.responses.server.sdk.models.models.ResponseStreamOptions
+ :ivar conversation: Is either a str type or a ConversationParam_2 type.
+ :vartype conversation: str or ~azure.ai.responses.server.sdk.models.models.ConversationParam_2
+ :ivar context_management: Context management configuration for this request.
+ :vartype context_management:
+ list[~azure.ai.responses.server.sdk.models.models.ContextManagementParam]
+ :ivar agent: (Deprecated) Use agent_reference instead. The agent to use for generating the
+ response.
+ :vartype agent: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar agent_reference: The agent to use for generating the response.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar structured_inputs: The structured inputs to the response that can participate in prompt
+ template substitution or tool argument bindings.
+ :vartype structured_inputs: dict[str, any]
+ :ivar agent_session_id: Optional session identifier for sandbox affinity. Currently only
+ relevant for hosted agents. When provided, the request is routed to the same sandbox. When
+ omitted, auto-derived from conversation_id/prev_response_id or a new UUID is generated.
+ :vartype agent_session_id: str
+ """
+
+ metadata: Optional["_models.Metadata"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ top_logprobs: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ temperature: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ top_p: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ user: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """This field is being replaced by ``safety_identifier`` and ``prompt_cache_key``. Use
+ ``prompt_cache_key`` instead to maintain caching optimizations. A stable identifier for your
+ end-users. Used to boost cache hit rates by better bucketing similar requests and to help
+ OpenAI detect and prevent abuse. `Learn more
+ `_."""
+ safety_identifier: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A stable identifier used to help detect users of your application that may be violating
+ OpenAI's usage policies. The IDs should be a string that uniquely identifies each user, with a
+ maximum length of 64 characters. We recommend hashing their username or email address, in order
+ to avoid sending us any identifying information. `Learn more
+ `_."""
+ prompt_cache_key: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Used by OpenAI to cache responses for similar requests to optimize your cache hit rates.
+ Replaces the ``user`` field. `Learn more `_."""
+ service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"auto\"], Literal[\"default\"], Literal[\"flex\"],
+ Literal[\"scale\"], Literal[\"priority\"]"""
+ prompt_cache_retention: Optional[Literal["in-memory", "24h"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Literal[\"in-memory\"] type or a Literal[\"24h\"] type."""
+ previous_response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ model: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The model deployment to use for the creation of this response."""
+ reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ background: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ max_output_tokens: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ max_tool_calls: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ text: Optional["_models.ResponseTextParam"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Union[str, \"_models.ToolChoiceOptions\"] type or a ToolChoiceParam type."""
+ prompt: Optional["_models.Prompt"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ truncation: Optional[Literal["auto", "disabled"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Literal[\"auto\"] type or a Literal[\"disabled\"] type."""
+ input: Optional["_types.InputParam"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Is either a str type or a [Item] type."""
+ include: Optional[list[Union[str, "_models.IncludeEnum"]]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ parallel_tool_calls: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ store: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ instructions: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ stream: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ stream_options: Optional["_models.ResponseStreamOptions"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ conversation: Optional["_types.ConversationParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a str type or a ConversationParam_2 type."""
+ context_management: Optional[list["_models.ContextManagementParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Context management configuration for this request."""
+ agent: Optional["_models.AgentReference"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """(Deprecated) Use agent_reference instead. The agent to use for generating the response."""
+ agent_reference: Optional["_models.AgentReference"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The agent to use for generating the response."""
+ structured_inputs: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The structured inputs to the response that can participate in prompt template substitution or
+ tool argument bindings."""
+ agent_session_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Optional session identifier for sandbox affinity. Currently only relevant for hosted agents.
+ When provided, the request is routed to the same sandbox. When omitted, auto-derived from
+ conversation_id/prev_response_id or a new UUID is generated."""
+
+ @overload
+ def __init__( # pylint: disable=too-many-locals
+ self,
+ *,
+ metadata: Optional["_models.Metadata"] = None,
+ top_logprobs: Optional[int] = None,
+ temperature: Optional[int] = None,
+ top_p: Optional[int] = None,
+ user: Optional[str] = None,
+ safety_identifier: Optional[str] = None,
+ prompt_cache_key: Optional[str] = None,
+ service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] = None,
+ prompt_cache_retention: Optional[Literal["in-memory", "24h"]] = None,
+ previous_response_id: Optional[str] = None,
+ model: Optional[str] = None,
+ reasoning: Optional["_models.Reasoning"] = None,
+ background: Optional[bool] = None,
+ max_output_tokens: Optional[int] = None,
+ max_tool_calls: Optional[int] = None,
+ text: Optional["_models.ResponseTextParam"] = None,
+ tools: Optional[list["_models.Tool"]] = None,
+ tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceParam"]] = None,
+ prompt: Optional["_models.Prompt"] = None,
+ truncation: Optional[Literal["auto", "disabled"]] = None,
+ input: Optional["_types.InputParam"] = None,
+ include: Optional[list[Union[str, "_models.IncludeEnum"]]] = None,
+ parallel_tool_calls: Optional[bool] = None,
+ store: Optional[bool] = None,
+ instructions: Optional[str] = None,
+ stream: Optional[bool] = None,
+ stream_options: Optional["_models.ResponseStreamOptions"] = None,
+ conversation: Optional["_types.ConversationParam"] = None,
+ context_management: Optional[list["_models.ContextManagementParam"]] = None,
+ agent: Optional["_models.AgentReference"] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ structured_inputs: Optional[dict[str, Any]] = None,
+ agent_session_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CustomToolParamFormat(_Model):
+ """The input format for the custom tool. Default is unconstrained text.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ CustomGrammarFormatParam, CustomTextFormatParam
+
+ :ivar type: Required. Known values are: "text" and "grammar".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CustomToolParamFormatType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"text\" and \"grammar\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class CustomGrammarFormatParam(CustomToolParamFormat, discriminator="grammar"):
+ """Grammar format.
+
+ :ivar type: Grammar format. Always ``grammar``. Required. GRAMMAR.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.GRAMMAR
+ :ivar syntax: The syntax of the grammar definition. One of ``lark`` or ``regex``. Required.
+ Known values are: "lark" and "regex".
+ :vartype syntax: str or ~azure.ai.responses.server.sdk.models.models.GrammarSyntax1
+ :ivar definition: The grammar definition. Required.
+ :vartype definition: str
+ """
+
+ type: Literal[CustomToolParamFormatType.GRAMMAR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Grammar format. Always ``grammar``. Required. GRAMMAR."""
+ syntax: Union[str, "_models.GrammarSyntax1"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The syntax of the grammar definition. One of ``lark`` or ``regex``. Required. Known values are:
+ \"lark\" and \"regex\"."""
+ definition: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The grammar definition. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ syntax: Union[str, "_models.GrammarSyntax1"],
+ definition: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = CustomToolParamFormatType.GRAMMAR # type: ignore
+
+
+class CustomTextFormatParam(CustomToolParamFormat, discriminator="text"):
+ """Text format.
+
+ :ivar type: Unconstrained text format. Always ``text``. Required. TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TEXT
+ """
+
+ type: Literal[CustomToolParamFormatType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Unconstrained text format. Always ``text``. Required. TEXT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = CustomToolParamFormatType.TEXT # type: ignore
+
+
+class CustomToolParam(Tool, discriminator="custom"):
+ """Custom tool.
+
+ :ivar type: The type of the custom tool. Always ``custom``. Required. CUSTOM.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM
+ :ivar name: The name of the custom tool, used to identify it in tool calls. Required.
+ :vartype name: str
+ :ivar description: Optional description of the custom tool, used to provide more context.
+ :vartype description: str
+ :ivar format: The input format for the custom tool. Default is unconstrained text.
+ :vartype format: ~azure.ai.responses.server.sdk.models.models.CustomToolParamFormat
+ """
+
+ type: Literal[ToolType.CUSTOM] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool. Always ``custom``. Required. CUSTOM."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the custom tool, used to identify it in tool calls. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Optional description of the custom tool, used to provide more context."""
+ format: Optional["_models.CustomToolParamFormat"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The input format for the custom tool. Default is unconstrained text."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: Optional[str] = None,
+ format: Optional["_models.CustomToolParamFormat"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.CUSTOM # type: ignore
+
+
+class DeleteResponseResult(_Model):
+ """The result of a delete response operation.
+
+ :ivar id: The operation ID. Required.
+ :vartype id: str
+ :ivar deleted: Always return true. Required. Default value is True.
+ :vartype deleted: bool
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The operation ID. Required."""
+ deleted: Literal[True] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Always return true. Required. Default value is True."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.deleted: Literal[True] = True
+
+
+class DoubleClickAction(ComputerAction, discriminator="double_click"):
+ """DoubleClick.
+
+ :ivar type: Specifies the event type. For a double click action, this property is always set to
+ ``double_click``. Required. DOUBLE_CLICK.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.DOUBLE_CLICK
+ :ivar x: The x-coordinate where the double click occurred. Required.
+ :vartype x: int
+ :ivar y: The y-coordinate where the double click occurred. Required.
+ :vartype y: int
+ """
+
+ type: Literal[ComputerActionType.DOUBLE_CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a double click action, this property is always set to
+ ``double_click``. Required. DOUBLE_CLICK."""
+ x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The x-coordinate where the double click occurred. Required."""
+ y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The y-coordinate where the double click occurred. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ x: int,
+ y: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.DOUBLE_CLICK # type: ignore
+
+
+class DragParam(ComputerAction, discriminator="drag"):
+ """Drag.
+
+ :ivar type: Specifies the event type. For a drag action, this property is always set to
+ ``drag``. Required. DRAG.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.DRAG
+ :ivar path: An array of coordinates representing the path of the drag action. Coordinates will
+ appear as an array of objects, eg
+
+ .. code-block::
+
+ [
+ { x: 100, y: 200 },
+ { x: 200, y: 300 }
+ ]. Required.
+ :vartype path: list[~azure.ai.responses.server.sdk.models.models.CoordParam]
+ """
+
+ type: Literal[ComputerActionType.DRAG] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a drag action, this property is always set to ``drag``. Required.
+ DRAG."""
+ path: list["_models.CoordParam"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An array of coordinates representing the path of the drag action. Coordinates will appear as an
+ array of objects, eg
+
+ .. code-block::
+
+ [
+ { x: 100, y: 200 },
+ { x: 200, y: 300 }
+ ]. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ path: list["_models.CoordParam"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.DRAG # type: ignore
+
+
+class Error(_Model):
+ """Error.
+
+ :ivar code: Required.
+ :vartype code: str
+ :ivar message: Required.
+ :vartype message: str
+ :ivar param:
+ :vartype param: str
+ :ivar type:
+ :vartype type: str
+ :ivar details:
+ :vartype details: list[~azure.ai.responses.server.sdk.models.models.Error]
+ :ivar additional_info:
+ :vartype additional_info: dict[str, any]
+ :ivar debug_info:
+ :vartype debug_info: dict[str, any]
+ """
+
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ param: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ type: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ details: Optional[list["_models.Error"]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ additional_info: Optional[dict[str, Any]] = rest_field(
+ name="additionalInfo", visibility=["read", "create", "update", "delete", "query"]
+ )
+ debug_info: Optional[dict[str, Any]] = rest_field(
+ name="debugInfo", visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ code: str,
+ message: str,
+ param: Optional[str] = None,
+ type: Optional[str] = None,
+ details: Optional[list["_models.Error"]] = None,
+ additional_info: Optional[dict[str, Any]] = None,
+ debug_info: Optional[dict[str, Any]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FabricDataAgentToolCall(OutputItem, discriminator="fabric_dataagent_preview_call"):
+ """A Fabric data agent tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. FABRIC_DATAAGENT_PREVIEW_CALL.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FABRIC_DATAAGENT_PREVIEW_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.FABRIC_DATAAGENT_PREVIEW_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. FABRIC_DATAAGENT_PREVIEW_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.FABRIC_DATAAGENT_PREVIEW_CALL # type: ignore
+
+
+class FabricDataAgentToolCallOutput(OutputItem, discriminator="fabric_dataagent_preview_call_output"):
+ """The output of a Fabric data agent tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the Fabric data agent tool call. Is one of the following types:
+ {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the Fabric data agent tool call. Is one of the following types: {str: Any},
+ str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.FABRIC_DATAAGENT_PREVIEW_CALL_OUTPUT # type: ignore
+
+
+class FabricDataAgentToolParameters(_Model):
+ """The fabric data agent tool parameters.
+
+ :ivar project_connections: The project connections attached to this tool. There can be a
+ maximum of 1 connection resource attached to the tool.
+ :vartype project_connections:
+ list[~azure.ai.responses.server.sdk.models.models.ToolProjectConnection]
+ """
+
+ project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The project connections attached to this tool. There can be a maximum of 1 connection resource
+ attached to the tool."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connections: Optional[list["_models.ToolProjectConnection"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FileCitationBody(Annotation, discriminator="file_citation"):
+ """File citation.
+
+ :ivar type: The type of the file citation. Always ``file_citation``. Required. FILE_CITATION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_CITATION
+ :ivar file_id: The ID of the file. Required.
+ :vartype file_id: str
+ :ivar index: The index of the file in the list of files. Required.
+ :vartype index: int
+ :ivar filename: The filename of the file cited. Required.
+ :vartype filename: str
+ """
+
+ type: Literal[AnnotationType.FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file citation. Always ``file_citation``. Required. FILE_CITATION."""
+ file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the file. Required."""
+ index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the file in the list of files. Required."""
+ filename: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The filename of the file cited. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: str,
+ index: int,
+ filename: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = AnnotationType.FILE_CITATION # type: ignore
+
+
+class FilePath(Annotation, discriminator="file_path"):
+ """File path.
+
+ :ivar type: The type of the file path. Always ``file_path``. Required. FILE_PATH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_PATH
+ :ivar file_id: The ID of the file. Required.
+ :vartype file_id: str
+ :ivar index: The index of the file in the list of files. Required.
+ :vartype index: int
+ """
+
+ type: Literal[AnnotationType.FILE_PATH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file path. Always ``file_path``. Required. FILE_PATH."""
+ file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the file. Required."""
+ index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the file in the list of files. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: str,
+ index: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = AnnotationType.FILE_PATH # type: ignore
+
+
+class FileSearchTool(Tool, discriminator="file_search"):
+ """File search.
+
+ :ivar type: The type of the file search tool. Always ``file_search``. Required. FILE_SEARCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_SEARCH
+ :ivar vector_store_ids: The IDs of the vector stores to search. Required.
+ :vartype vector_store_ids: list[str]
+ :ivar max_num_results: The maximum number of results to return. This number should be between 1
+ and 50 inclusive.
+ :vartype max_num_results: int
+ :ivar ranking_options: Ranking options for search.
+ :vartype ranking_options: ~azure.ai.responses.server.sdk.models.models.RankingOptions
+ :ivar filters: Is either a ComparisonFilter type or a CompoundFilter type.
+ :vartype filters: ~azure.ai.responses.server.sdk.models.models.ComparisonFilter or
+ ~azure.ai.responses.server.sdk.models.models.CompoundFilter
+ """
+
+ type: Literal[ToolType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file search tool. Always ``file_search``. Required. FILE_SEARCH."""
+ vector_store_ids: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The IDs of the vector stores to search. Required."""
+ max_num_results: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The maximum number of results to return. This number should be between 1 and 50 inclusive."""
+ ranking_options: Optional["_models.RankingOptions"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Ranking options for search."""
+ filters: Optional["_types.Filters"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Is either a ComparisonFilter type or a CompoundFilter type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ vector_store_ids: list[str],
+ max_num_results: Optional[int] = None,
+ ranking_options: Optional["_models.RankingOptions"] = None,
+ filters: Optional["_types.Filters"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.FILE_SEARCH # type: ignore
+
+
+class FileSearchToolCallResults(_Model):
+ """FileSearchToolCallResults.
+
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar text:
+ :vartype text: str
+ :ivar filename:
+ :vartype filename: str
+ :ivar attributes:
+ :vartype attributes: ~azure.ai.responses.server.sdk.models.models.VectorStoreFileAttributes
+ :ivar score:
+ :vartype score: float
+ """
+
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ attributes: Optional["_models.VectorStoreFileAttributes"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ score: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: Optional[str] = None,
+ text: Optional[str] = None,
+ filename: Optional[str] = None,
+ attributes: Optional["_models.VectorStoreFileAttributes"] = None,
+ score: Optional[float] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionAndCustomToolCallOutput(_Model):
+ """FunctionAndCustomToolCallOutput.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ FunctionAndCustomToolCallOutputInputFileContent,
+ FunctionAndCustomToolCallOutputInputImageContent,
+ FunctionAndCustomToolCallOutputInputTextContent
+
+ :ivar type: Required. Known values are: "input_text", "input_image", and "input_file".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutputType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"input_text\", \"input_image\", and \"input_file\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionAndCustomToolCallOutputInputFileContent(
+ FunctionAndCustomToolCallOutput, discriminator="input_file"
+): # pylint: disable=name-too-long
+ """Input file.
+
+ :ivar type: The type of the input item. Always ``input_file``. Required. INPUT_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_FILE
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar filename: The name of the file to be sent to the model.
+ :vartype filename: str
+ :ivar file_url: The URL of the file to be sent to the model.
+ :vartype file_url: str
+ :ivar file_data: The content of the file to be sent to the model.
+ :vartype file_data: str
+ """
+
+ type: Literal[FunctionAndCustomToolCallOutputType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_file``. Required. INPUT_FILE."""
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the file to be sent to the model."""
+ file_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the file to be sent to the model."""
+ file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the file to be sent to the model."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: Optional[str] = None,
+ filename: Optional[str] = None,
+ file_url: Optional[str] = None,
+ file_data: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionAndCustomToolCallOutputType.INPUT_FILE # type: ignore
+
+
+class FunctionAndCustomToolCallOutputInputImageContent(
+ FunctionAndCustomToolCallOutput, discriminator="input_image"
+): # pylint: disable=name-too-long
+ """Input image.
+
+ :ivar type: The type of the input item. Always ``input_image``. Required. INPUT_IMAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_IMAGE
+ :ivar image_url:
+ :vartype image_url: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``,
+ or ``auto``. Defaults to ``auto``. Required. Known values are: "low", "high", and "auto".
+ :vartype detail: str or ~azure.ai.responses.server.sdk.models.models.ImageDetail
+ """
+
+ type: Literal[FunctionAndCustomToolCallOutputType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_image``. Required. INPUT_IMAGE."""
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ detail: Union[str, "_models.ImageDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``.
+ Defaults to ``auto``. Required. Known values are: \"low\", \"high\", and \"auto\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ detail: Union[str, "_models.ImageDetail"],
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionAndCustomToolCallOutputType.INPUT_IMAGE # type: ignore
+
+
+class FunctionAndCustomToolCallOutputInputTextContent(
+ FunctionAndCustomToolCallOutput, discriminator="input_text"
+): # pylint: disable=name-too-long
+ """Input text.
+
+ :ivar type: The type of the input item. Always ``input_text``. Required. INPUT_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_TEXT
+ :ivar text: The text input to the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal[FunctionAndCustomToolCallOutputType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_text``. Required. INPUT_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text input to the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionAndCustomToolCallOutputType.INPUT_TEXT # type: ignore
+
+
+class FunctionCallOutputItemParam(Item, discriminator="function_call_output"):
+ """Function tool call output.
+
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar type: The type of the function tool call output. Always ``function_call_output``.
+ Required. FUNCTION_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL_OUTPUT
+ :ivar output: Text, image, or file output of the function tool call. Required. Is either a str
+ type or a [Union["_models.InputTextContentParam", "_models.InputImageContentParamAutoParam",
+ "_models.InputFileContentParam"]] type.
+ :vartype output: str or list[~azure.ai.responses.server.sdk.models.models.InputTextContentParam
+ or ~azure.ai.responses.server.sdk.models.models.InputImageContentParamAutoParam or
+ ~azure.ai.responses.server.sdk.models.models.InputFileContentParam]
+ :ivar status: Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.FunctionCallItemStatus
+ """
+
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ type: Literal[ItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call output. Always ``function_call_output``. Required.
+ FUNCTION_CALL_OUTPUT."""
+ output: Union[
+ str,
+ list[
+ Union[
+ "_models.InputTextContentParam",
+ "_models.InputImageContentParamAutoParam",
+ "_models.InputFileContentParam",
+ ]
+ ],
+ ] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Text, image, or file output of the function tool call. Required. Is either a str type or a
+ [Union[\"_models.InputTextContentParam\", \"_models.InputImageContentParamAutoParam\",
+ \"_models.InputFileContentParam\"]] type."""
+ status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[
+ str,
+ list[
+ Union[
+ "_models.InputTextContentParam",
+ "_models.InputImageContentParamAutoParam",
+ "_models.InputFileContentParam",
+ ]
+ ],
+ ],
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ status: Optional[Union[str, "_models.FunctionCallItemStatus"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.FUNCTION_CALL_OUTPUT # type: ignore
+
+
+class FunctionShellAction(_Model):
+ """Shell exec action.
+
+ :ivar commands: Required.
+ :vartype commands: list[str]
+ :ivar timeout_ms: Required.
+ :vartype timeout_ms: int
+ :ivar max_output_length: Required.
+ :vartype max_output_length: int
+ """
+
+ commands: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ timeout_ms: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ max_output_length: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ commands: list[str],
+ timeout_ms: int,
+ max_output_length: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellActionParam(_Model):
+ """Shell action.
+
+ :ivar commands: Ordered shell commands for the execution environment to run. Required.
+ :vartype commands: list[str]
+ :ivar timeout_ms:
+ :vartype timeout_ms: int
+ :ivar max_output_length:
+ :vartype max_output_length: int
+ """
+
+ commands: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Ordered shell commands for the execution environment to run. Required."""
+ timeout_ms: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ max_output_length: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ commands: list[str],
+ timeout_ms: Optional[int] = None,
+ max_output_length: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallItemParam(Item, discriminator="shell_call"):
+ """Shell tool call.
+
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar type: The type of the item. Always ``shell_call``. Required. SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL
+ :ivar action: The shell commands and limits that describe how to run the tool call. Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.FunctionShellActionParam
+ :ivar status: Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallItemStatus
+ :ivar environment:
+ :vartype environment:
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallItemParamEnvironment
+ """
+
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ type: Literal[ItemType.SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``shell_call``. Required. SHELL_CALL."""
+ action: "_models.FunctionShellActionParam" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The shell commands and limits that describe how to run the tool call. Required."""
+ status: Optional[Union[str, "_models.FunctionShellCallItemStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ environment: Optional["_models.FunctionShellCallItemParamEnvironment"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ action: "_models.FunctionShellActionParam",
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ status: Optional[Union[str, "_models.FunctionShellCallItemStatus"]] = None,
+ environment: Optional["_models.FunctionShellCallItemParamEnvironment"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.SHELL_CALL # type: ignore
+
+
+class FunctionShellCallItemParamEnvironment(_Model):
+ """The environment to execute the shell commands in.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ FunctionShellCallItemParamEnvironmentContainerReferenceParam,
+ FunctionShellCallItemParamEnvironmentLocalEnvironmentParam
+
+ :ivar type: Required. Known values are: "local" and "container_reference".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallItemParamEnvironmentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"local\" and \"container_reference\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallItemParamEnvironmentContainerReferenceParam(
+ FunctionShellCallItemParamEnvironment, discriminator="container_reference"
+): # pylint: disable=name-too-long
+ """FunctionShellCallItemParamEnvironmentContainerReferenceParam.
+
+ :ivar type: References a container created with the /v1/containers endpoint. Required.
+ CONTAINER_REFERENCE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CONTAINER_REFERENCE
+ :ivar container_id: The ID of the referenced container. Required.
+ :vartype container_id: str
+ """
+
+ type: Literal[FunctionShellCallItemParamEnvironmentType.CONTAINER_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """References a container created with the /v1/containers endpoint. Required. CONTAINER_REFERENCE."""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the referenced container. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ container_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallItemParamEnvironmentType.CONTAINER_REFERENCE # type: ignore
+
+
+class FunctionShellCallItemParamEnvironmentLocalEnvironmentParam(
+ FunctionShellCallItemParamEnvironment, discriminator="local"
+): # pylint: disable=name-too-long
+ """FunctionShellCallItemParamEnvironmentLocalEnvironmentParam.
+
+ :ivar type: Use a local computer environment. Required. LOCAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL
+ :ivar skills: An optional list of skills.
+ :vartype skills: list[~azure.ai.responses.server.sdk.models.models.LocalSkillParam]
+ """
+
+ type: Literal[FunctionShellCallItemParamEnvironmentType.LOCAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Use a local computer environment. Required. LOCAL."""
+ skills: Optional[list["_models.LocalSkillParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """An optional list of skills."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ skills: Optional[list["_models.LocalSkillParam"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallItemParamEnvironmentType.LOCAL # type: ignore
+
+
+class FunctionShellCallOutputContent(_Model):
+ """Shell call output content.
+
+ :ivar stdout: The standard output that was captured. Required.
+ :vartype stdout: str
+ :ivar stderr: The standard error output that was captured. Required.
+ :vartype stderr: str
+ :ivar outcome: Shell call outcome. Required.
+ :vartype outcome: ~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputOutcome
+ :ivar created_by: The identifier of the actor that created the item.
+ :vartype created_by: str
+ """
+
+ stdout: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The standard output that was captured. Required."""
+ stderr: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The standard error output that was captured. Required."""
+ outcome: "_models.FunctionShellCallOutputOutcome" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Shell call outcome. Required."""
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The identifier of the actor that created the item."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ stdout: str,
+ stderr: str,
+ outcome: "_models.FunctionShellCallOutputOutcome",
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallOutputContentParam(_Model):
+ """Shell output content.
+
+ :ivar stdout: Captured stdout output for the shell call. Required.
+ :vartype stdout: str
+ :ivar stderr: Captured stderr output for the shell call. Required.
+ :vartype stderr: str
+ :ivar outcome: The exit or timeout outcome associated with this shell call. Required.
+ :vartype outcome:
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputOutcomeParam
+ """
+
+ stdout: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Captured stdout output for the shell call. Required."""
+ stderr: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Captured stderr output for the shell call. Required."""
+ outcome: "_models.FunctionShellCallOutputOutcomeParam" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The exit or timeout outcome associated with this shell call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ stdout: str,
+ stderr: str,
+ outcome: "_models.FunctionShellCallOutputOutcomeParam",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallOutputOutcome(_Model):
+ """Shell call outcome.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ FunctionShellCallOutputExitOutcome, FunctionShellCallOutputTimeoutOutcome
+
+ :ivar type: Required. Known values are: "timeout" and "exit".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputOutcomeType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"timeout\" and \"exit\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallOutputExitOutcome(FunctionShellCallOutputOutcome, discriminator="exit"):
+ """Shell call exit outcome.
+
+ :ivar type: The outcome type. Always ``exit``. Required. EXIT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.EXIT
+ :ivar exit_code: Exit code from the shell process. Required.
+ :vartype exit_code: int
+ """
+
+ type: Literal[FunctionShellCallOutputOutcomeType.EXIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The outcome type. Always ``exit``. Required. EXIT."""
+ exit_code: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Exit code from the shell process. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ exit_code: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallOutputOutcomeType.EXIT # type: ignore
+
+
+class FunctionShellCallOutputOutcomeParam(_Model):
+ """Shell call outcome.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ FunctionShellCallOutputExitOutcomeParam, FunctionShellCallOutputTimeoutOutcomeParam
+
+ :ivar type: Required. Known values are: "timeout" and "exit".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputOutcomeParamType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"timeout\" and \"exit\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class FunctionShellCallOutputExitOutcomeParam(FunctionShellCallOutputOutcomeParam, discriminator="exit"):
+ """Shell call exit outcome.
+
+ :ivar type: The outcome type. Always ``exit``. Required. EXIT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.EXIT
+ :ivar exit_code: The exit code returned by the shell process. Required.
+ :vartype exit_code: int
+ """
+
+ type: Literal[FunctionShellCallOutputOutcomeParamType.EXIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The outcome type. Always ``exit``. Required. EXIT."""
+ exit_code: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The exit code returned by the shell process. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ exit_code: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallOutputOutcomeParamType.EXIT # type: ignore
+
+
+class FunctionShellCallOutputItemParam(Item, discriminator="shell_call_output"):
+ """Shell tool call output.
+
+ :ivar id:
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar type: The type of the item. Always ``shell_call_output``. Required. SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL_OUTPUT
+ :ivar output: Captured chunks of stdout and stderr output, along with their associated
+ outcomes. Required.
+ :vartype output:
+ list[~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputContentParam]
+ :ivar status: Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellCallItemStatus
+ :ivar max_output_length:
+ :vartype max_output_length: int
+ """
+
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ type: Literal[ItemType.SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``shell_call_output``. Required. SHELL_CALL_OUTPUT."""
+ output: list["_models.FunctionShellCallOutputContentParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Captured chunks of stdout and stderr output, along with their associated outcomes. Required."""
+ status: Optional[Union[str, "_models.FunctionShellCallItemStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ max_output_length: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: list["_models.FunctionShellCallOutputContentParam"],
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ status: Optional[Union[str, "_models.FunctionShellCallItemStatus"]] = None,
+ max_output_length: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.SHELL_CALL_OUTPUT # type: ignore
+
+
+class FunctionShellCallOutputTimeoutOutcome(FunctionShellCallOutputOutcome, discriminator="timeout"):
+ """Shell call timeout outcome.
+
+ :ivar type: The outcome type. Always ``timeout``. Required. TIMEOUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TIMEOUT
+ """
+
+ type: Literal[FunctionShellCallOutputOutcomeType.TIMEOUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The outcome type. Always ``timeout``. Required. TIMEOUT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallOutputOutcomeType.TIMEOUT # type: ignore
+
+
+class FunctionShellCallOutputTimeoutOutcomeParam(
+ FunctionShellCallOutputOutcomeParam, discriminator="timeout"
+): # pylint: disable=name-too-long
+ """Shell call timeout outcome.
+
+ :ivar type: The outcome type. Always ``timeout``. Required. TIMEOUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TIMEOUT
+ """
+
+ type: Literal[FunctionShellCallOutputOutcomeParamType.TIMEOUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The outcome type. Always ``timeout``. Required. TIMEOUT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallOutputOutcomeParamType.TIMEOUT # type: ignore
+
+
+class FunctionShellToolParam(Tool, discriminator="shell"):
+ """Shell tool.
+
+ :ivar type: The type of the shell tool. Always ``shell``. Required. SHELL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL
+ :ivar environment:
+ :vartype environment:
+ ~azure.ai.responses.server.sdk.models.models.FunctionShellToolParamEnvironment
+ """
+
+ type: Literal[ToolType.SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the shell tool. Always ``shell``. Required. SHELL."""
+ environment: Optional["_models.FunctionShellToolParamEnvironment"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ environment: Optional["_models.FunctionShellToolParamEnvironment"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.SHELL # type: ignore
+
+
+class FunctionShellToolParamEnvironmentContainerReferenceParam(
+ FunctionShellToolParamEnvironment, discriminator="container_reference"
+): # pylint: disable=name-too-long
+ """FunctionShellToolParamEnvironmentContainerReferenceParam.
+
+ :ivar type: References a container created with the /v1/containers endpoint. Required.
+ CONTAINER_REFERENCE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CONTAINER_REFERENCE
+ :ivar container_id: The ID of the referenced container. Required.
+ :vartype container_id: str
+ """
+
+ type: Literal[FunctionShellToolParamEnvironmentType.CONTAINER_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """References a container created with the /v1/containers endpoint. Required. CONTAINER_REFERENCE."""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the referenced container. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ container_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellToolParamEnvironmentType.CONTAINER_REFERENCE # type: ignore
+
+
+class FunctionShellToolParamEnvironmentLocalEnvironmentParam(
+ FunctionShellToolParamEnvironment, discriminator="local"
+): # pylint: disable=name-too-long
+ """FunctionShellToolParamEnvironmentLocalEnvironmentParam.
+
+ :ivar type: Use a local computer environment. Required. LOCAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL
+ :ivar skills: An optional list of skills.
+ :vartype skills: list[~azure.ai.responses.server.sdk.models.models.LocalSkillParam]
+ """
+
+ type: Literal[FunctionShellToolParamEnvironmentType.LOCAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Use a local computer environment. Required. LOCAL."""
+ skills: Optional[list["_models.LocalSkillParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """An optional list of skills."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ skills: Optional[list["_models.LocalSkillParam"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellToolParamEnvironmentType.LOCAL # type: ignore
+
+
+class FunctionTool(Tool, discriminator="function"):
+ """Function.
+
+ :ivar type: The type of the function tool. Always ``function``. Required. FUNCTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION
+ :ivar name: The name of the function to call. Required.
+ :vartype name: str
+ :ivar description:
+ :vartype description: str
+ :ivar parameters: Required.
+ :vartype parameters: dict[str, any]
+ :ivar strict: Required.
+ :vartype strict: bool
+ """
+
+ type: Literal[ToolType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool. Always ``function``. Required. FUNCTION."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to call. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ parameters: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ parameters: dict[str, Any],
+ strict: bool,
+ description: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.FUNCTION # type: ignore
+
+
+class HybridSearchOptions(_Model):
+ """HybridSearchOptions.
+
+ :ivar embedding_weight: The weight of the embedding in the reciprocal ranking fusion. Required.
+ :vartype embedding_weight: int
+ :ivar text_weight: The weight of the text in the reciprocal ranking fusion. Required.
+ :vartype text_weight: int
+ """
+
+ embedding_weight: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The weight of the embedding in the reciprocal ranking fusion. Required."""
+ text_weight: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The weight of the text in the reciprocal ranking fusion. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ embedding_weight: int,
+ text_weight: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ImageGenTool(Tool, discriminator="image_generation"):
+ """Image generation tool.
+
+ :ivar type: The type of the image generation tool. Always ``image_generation``. Required.
+ IMAGE_GENERATION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.IMAGE_GENERATION
+ :ivar model: Is one of the following types: Literal["gpt-image-1"],
+ Literal["gpt-image-1-mini"], Literal["gpt-image-1.5"], str
+ :vartype model: str or str or str or str
+ :ivar quality: The quality of the generated image. One of ``low``, ``medium``, ``high``, or
+ ``auto``. Default: ``auto``. Is one of the following types: Literal["low"], Literal["medium"],
+ Literal["high"], Literal["auto"]
+ :vartype quality: str or str or str or str
+ :ivar size: The size of the generated image. One of ``1024x1024``, ``1024x1536``,
+ ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types:
+ Literal["1024x1024"], Literal["1024x1536"], Literal["1536x1024"], Literal["auto"]
+ :vartype size: str or str or str or str
+ :ivar output_format: The output format of the generated image. One of ``png``, ``webp``, or
+ ``jpeg``. Default: ``png``. Is one of the following types: Literal["png"], Literal["webp"],
+ Literal["jpeg"]
+ :vartype output_format: str or str or str
+ :ivar output_compression: Compression level for the output image. Default: 100.
+ :vartype output_compression: int
+ :ivar moderation: Moderation level for the generated image. Default: ``auto``. Is either a
+ Literal["auto"] type or a Literal["low"] type.
+ :vartype moderation: str or str
+ :ivar background: Background type for the generated image. One of ``transparent``, ``opaque``,
+ or ``auto``. Default: ``auto``. Is one of the following types: Literal["transparent"],
+ Literal["opaque"], Literal["auto"]
+ :vartype background: str or str or str
+ :ivar input_fidelity: Known values are: "high" and "low".
+ :vartype input_fidelity: str or ~azure.ai.responses.server.sdk.models.models.InputFidelity
+ :ivar input_image_mask: Optional mask for inpainting. Contains ``image_url`` (string, optional)
+ and ``file_id`` (string, optional).
+ :vartype input_image_mask:
+ ~azure.ai.responses.server.sdk.models.models.ImageGenToolInputImageMask
+ :ivar partial_images: Number of partial images to generate in streaming mode, from 0 (default
+ value) to 3.
+ :vartype partial_images: int
+ :ivar action: Whether to generate a new image or edit an existing image. Default: ``auto``.
+ Known values are: "generate", "edit", and "auto".
+ :vartype action: str or ~azure.ai.responses.server.sdk.models.models.ImageGenActionEnum
+ """
+
+ type: Literal[ToolType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the image generation tool. Always ``image_generation``. Required. IMAGE_GENERATION."""
+ model: Optional[Union[Literal["gpt-image-1"], Literal["gpt-image-1-mini"], Literal["gpt-image-1.5"], str]] = (
+ rest_field(visibility=["read", "create", "update", "delete", "query"])
+ )
+ """Is one of the following types: Literal[\"gpt-image-1\"], Literal[\"gpt-image-1-mini\"],
+ Literal[\"gpt-image-1.5\"], str"""
+ quality: Optional[Literal["low", "medium", "high", "auto"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The quality of the generated image. One of ``low``, ``medium``, ``high``, or ``auto``. Default:
+ ``auto``. Is one of the following types: Literal[\"low\"], Literal[\"medium\"],
+ Literal[\"high\"], Literal[\"auto\"]"""
+ size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The size of the generated image. One of ``1024x1024``, ``1024x1536``, ``1536x1024``, or
+ ``auto``. Default: ``auto``. Is one of the following types: Literal[\"1024x1024\"],
+ Literal[\"1024x1536\"], Literal[\"1536x1024\"], Literal[\"auto\"]"""
+ output_format: Optional[Literal["png", "webp", "jpeg"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output format of the generated image. One of ``png``, ``webp``, or ``jpeg``. Default:
+ ``png``. Is one of the following types: Literal[\"png\"], Literal[\"webp\"], Literal[\"jpeg\"]"""
+ output_compression: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Compression level for the output image. Default: 100."""
+ moderation: Optional[Literal["auto", "low"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Moderation level for the generated image. Default: ``auto``. Is either a Literal[\"auto\"] type
+ or a Literal[\"low\"] type."""
+ background: Optional[Literal["transparent", "opaque", "auto"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Background type for the generated image. One of ``transparent``, ``opaque``, or ``auto``.
+ Default: ``auto``. Is one of the following types: Literal[\"transparent\"],
+ Literal[\"opaque\"], Literal[\"auto\"]"""
+ input_fidelity: Optional[Union[str, "_models.InputFidelity"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"high\" and \"low\"."""
+ input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Optional mask for inpainting. Contains ``image_url`` (string, optional) and ``file_id``
+ (string, optional)."""
+ partial_images: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Number of partial images to generate in streaming mode, from 0 (default value) to 3."""
+ action: Optional[Union[str, "_models.ImageGenActionEnum"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Whether to generate a new image or edit an existing image. Default: ``auto``. Known values are:
+ \"generate\", \"edit\", and \"auto\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ model: Optional[
+ Union[Literal["gpt-image-1"], Literal["gpt-image-1-mini"], Literal["gpt-image-1.5"], str]
+ ] = None,
+ quality: Optional[Literal["low", "medium", "high", "auto"]] = None,
+ size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = None,
+ output_format: Optional[Literal["png", "webp", "jpeg"]] = None,
+ output_compression: Optional[int] = None,
+ moderation: Optional[Literal["auto", "low"]] = None,
+ background: Optional[Literal["transparent", "opaque", "auto"]] = None,
+ input_fidelity: Optional[Union[str, "_models.InputFidelity"]] = None,
+ input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = None,
+ partial_images: Optional[int] = None,
+ action: Optional[Union[str, "_models.ImageGenActionEnum"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.IMAGE_GENERATION # type: ignore
+
+
+class ImageGenToolInputImageMask(_Model):
+ """ImageGenToolInputImageMask.
+
+ :ivar image_url:
+ :vartype image_url: str
+ :ivar file_id:
+ :vartype file_id: str
+ """
+
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class InlineSkillParam(ContainerSkill, discriminator="inline"):
+ """InlineSkillParam.
+
+ :ivar type: Defines an inline skill for this request. Required. INLINE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INLINE
+ :ivar name: The name of the skill. Required.
+ :vartype name: str
+ :ivar description: The description of the skill. Required.
+ :vartype description: str
+ :ivar source: Inline skill payload. Required.
+ :vartype source: ~azure.ai.responses.server.sdk.models.models.InlineSkillSourceParam
+ """
+
+ type: Literal[ContainerSkillType.INLINE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Defines an inline skill for this request. Required. INLINE."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the skill. Required."""
+ description: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The description of the skill. Required."""
+ source: "_models.InlineSkillSourceParam" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Inline skill payload. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: str,
+ source: "_models.InlineSkillSourceParam",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ContainerSkillType.INLINE # type: ignore
+
+
+class InlineSkillSourceParam(_Model):
+ """Inline skill payload.
+
+ :ivar type: The type of the inline skill source. Must be ``base64``. Required. Default value is
+ "base64".
+ :vartype type: str
+ :ivar media_type: The media type of the inline skill payload. Must be ``application/zip``.
+ Required. Default value is "application/zip".
+ :vartype media_type: str
+ :ivar data: Base64-encoded skill zip bundle. Required.
+ :vartype data: str
+ """
+
+ type: Literal["base64"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the inline skill source. Must be ``base64``. Required. Default value is \"base64\"."""
+ media_type: Literal["application/zip"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The media type of the inline skill payload. Must be ``application/zip``. Required. Default
+ value is \"application/zip\"."""
+ data: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Base64-encoded skill zip bundle. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ data: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["base64"] = "base64"
+ self.media_type: Literal["application/zip"] = "application/zip"
+
+
+class InputFileContent(_Model):
+ """Input file.
+
+ :ivar type: The type of the input item. Always ``input_file``. Required. Default value is
+ "input_file".
+ :vartype type: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar filename: The name of the file to be sent to the model.
+ :vartype filename: str
+ :ivar file_url: The URL of the file to be sent to the model.
+ :vartype file_url: str
+ :ivar file_data: The content of the file to be sent to the model.
+ :vartype file_data: str
+ """
+
+ type: Literal["input_file"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_file``. Required. Default value is \"input_file\"."""
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the file to be sent to the model."""
+ file_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the file to be sent to the model."""
+ file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the file to be sent to the model."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: Optional[str] = None,
+ filename: Optional[str] = None,
+ file_url: Optional[str] = None,
+ file_data: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_file"] = "input_file"
+
+
+class InputFileContentParam(_Model):
+ """Input file.
+
+ :ivar type: The type of the input item. Always ``input_file``. Required. Default value is
+ "input_file".
+ :vartype type: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar filename:
+ :vartype filename: str
+ :ivar file_data:
+ :vartype file_data: str
+ :ivar file_url:
+ :vartype file_url: str
+ """
+
+ type: Literal["input_file"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_file``. Required. Default value is \"input_file\"."""
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: Optional[str] = None,
+ filename: Optional[str] = None,
+ file_data: Optional[str] = None,
+ file_url: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_file"] = "input_file"
+
+
+class InputImageContent(_Model):
+ """Input image.
+
+ :ivar type: The type of the input item. Always ``input_image``. Required. Default value is
+ "input_image".
+ :vartype type: str
+ :ivar image_url:
+ :vartype image_url: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``,
+ or ``auto``. Defaults to ``auto``. Required. Known values are: "low", "high", and "auto".
+ :vartype detail: str or ~azure.ai.responses.server.sdk.models.models.ImageDetail
+ """
+
+ type: Literal["input_image"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_image``. Required. Default value is \"input_image\"."""
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ detail: Union[str, "_models.ImageDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``.
+ Defaults to ``auto``. Required. Known values are: \"low\", \"high\", and \"auto\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ detail: Union[str, "_models.ImageDetail"],
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_image"] = "input_image"
+
+
+class InputImageContentParamAutoParam(_Model):
+ """Input image.
+
+ :ivar type: The type of the input item. Always ``input_image``. Required. Default value is
+ "input_image".
+ :vartype type: str
+ :ivar image_url:
+ :vartype image_url: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar detail: Known values are: "low", "high", and "auto".
+ :vartype detail: str or ~azure.ai.responses.server.sdk.models.models.DetailEnum
+ """
+
+ type: Literal["input_image"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_image``. Required. Default value is \"input_image\"."""
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ detail: Optional[Union[str, "_models.DetailEnum"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"low\", \"high\", and \"auto\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ detail: Optional[Union[str, "_models.DetailEnum"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_image"] = "input_image"
+
+
+class InputTextContent(_Model):
+ """Input text.
+
+ :ivar type: The type of the input item. Always ``input_text``. Required. Default value is
+ "input_text".
+ :vartype type: str
+ :ivar text: The text input to the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal["input_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_text``. Required. Default value is \"input_text\"."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text input to the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_text"] = "input_text"
+
+
+class InputTextContentParam(_Model):
+ """Input text.
+
+ :ivar type: The type of the input item. Always ``input_text``. Required. Default value is
+ "input_text".
+ :vartype type: str
+ :ivar text: The text input to the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal["input_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the input item. Always ``input_text``. Required. Default value is \"input_text\"."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text input to the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["input_text"] = "input_text"
+
+
+class ItemCodeInterpreterToolCall(Item, discriminator="code_interpreter_call"):
+ """Code interpreter tool call.
+
+ :ivar type: The type of the code interpreter tool call. Always ``code_interpreter_call``.
+ Required. CODE_INTERPRETER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CODE_INTERPRETER_CALL
+ :ivar id: The unique ID of the code interpreter tool call. Required.
+ :vartype id: str
+ :ivar status: The status of the code interpreter tool call. Valid values are ``in_progress``,
+ ``completed``, ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the
+ following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"],
+ Literal["interpreting"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar container_id: The ID of the container used to run the code. Required.
+ :vartype container_id: str
+ :ivar code: Required.
+ :vartype code: str
+ :ivar outputs: Required.
+ :vartype outputs: list[~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputLogs
+ or ~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputImage]
+ """
+
+ type: Literal[ItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the code interpreter tool call. Always ``code_interpreter_call``. Required.
+ CODE_INTERPRETER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the code interpreter tool call. Required."""
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the code interpreter tool call. Valid values are ``in_progress``, ``completed``,
+ ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"],
+ Literal[\"interpreting\"], Literal[\"failed\"]"""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the container used to run the code. Required."""
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"],
+ container_id: str,
+ code: str,
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.CODE_INTERPRETER_CALL # type: ignore
+
+
+class ItemComputerToolCall(Item, discriminator="computer_call"):
+ """Computer tool call.
+
+ :ivar type: The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL
+ :ivar id: The unique ID of the computer call. Required.
+ :vartype id: str
+ :ivar call_id: An identifier used when responding to the tool call with output. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.ComputerAction
+ :ivar pending_safety_checks: The pending safety checks for the computer call. Required.
+ :vartype pending_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the computer call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used when responding to the tool call with output. Required."""
+ action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The pending safety checks for the computer call. Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.ComputerAction",
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"],
+ status: Literal["in_progress", "completed", "incomplete"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.COMPUTER_CALL # type: ignore
+
+
+class ItemCustomToolCall(Item, discriminator="custom_tool_call"):
+ """Custom tool call.
+
+ :ivar type: The type of the custom tool call. Always ``custom_tool_call``. Required.
+ CUSTOM_TOOL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL
+ :ivar id: The unique ID of the custom tool call in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: An identifier used to map this custom tool call to a tool call output. Required.
+ :vartype call_id: str
+ :ivar name: The name of the custom tool being called. Required.
+ :vartype name: str
+ :ivar input: The input for the custom tool call generated by the model. Required.
+ :vartype input: str
+ """
+
+ type: Literal[ItemType.CUSTOM_TOOL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call. Always ``custom_tool_call``. Required. CUSTOM_TOOL_CALL."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used to map this custom tool call to a tool call output. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the custom tool being called. Required."""
+ input: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The input for the custom tool call generated by the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ input: str,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.CUSTOM_TOOL_CALL # type: ignore
+
+
+class ItemCustomToolCallOutput(Item, discriminator="custom_tool_call_output"):
+ """Custom tool call output.
+
+ :ivar type: The type of the custom tool call output. Always ``custom_tool_call_output``.
+ Required. CUSTOM_TOOL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL_OUTPUT
+ :ivar id: The unique ID of the custom tool call output in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: The call ID, used to map this custom tool call output to a custom tool call.
+ Required.
+ :vartype call_id: str
+ :ivar output: The output from the custom tool call generated by your code. Can be a string or
+ an list of output content. Required. Is either a str type or a
+ [FunctionAndCustomToolCallOutput] type.
+ :vartype output: str or
+ list[~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutput]
+ """
+
+ type: Literal[ItemType.CUSTOM_TOOL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call output. Always ``custom_tool_call_output``. Required.
+ CUSTOM_TOOL_CALL_OUTPUT."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call output in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The call ID, used to map this custom tool call output to a custom tool call. Required."""
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the custom tool call generated by your code. Can be a string or an list of
+ output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]],
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.CUSTOM_TOOL_CALL_OUTPUT # type: ignore
+
+
+class ItemField(_Model):
+ """An item representing a message, tool call, tool output, reasoning, or other response element.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ItemFieldApplyPatchToolCall, ItemFieldApplyPatchToolCallOutput,
+ ItemFieldCodeInterpreterToolCall, ItemFieldCompactionBody, ItemFieldComputerToolCall,
+ ItemFieldComputerToolCallOutput, ItemFieldCustomToolCall, ItemFieldCustomToolCallOutput,
+ ItemFieldFileSearchToolCall, ItemFieldFunctionToolCall, ItemFieldFunctionToolCallOutput,
+ ItemFieldImageGenToolCall, ItemFieldLocalShellToolCall, ItemFieldLocalShellToolCallOutput,
+ ItemFieldMcpApprovalRequest, ItemFieldMcpApprovalResponseResource, ItemFieldMcpToolCall,
+ ItemFieldMcpListTools, ItemFieldMessage, ItemFieldReasoningItem, ItemFieldFunctionShellCall,
+ ItemFieldFunctionShellCallOutput, ItemFieldWebSearchToolCall
+
+ :ivar type: Required. Known values are: "message", "function_call", "function_call_output",
+ "file_search_call", "web_search_call", "image_generation_call", "computer_call",
+ "computer_call_output", "reasoning", "compaction", "code_interpreter_call", "local_shell_call",
+ "local_shell_call_output", "shell_call", "shell_call_output", "apply_patch_call",
+ "apply_patch_call_output", "mcp_list_tools", "mcp_approval_request", "mcp_approval_response",
+ "mcp_call", "custom_tool_call", and "custom_tool_call_output".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ItemFieldType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"message\", \"function_call\", \"function_call_output\",
+ \"file_search_call\", \"web_search_call\", \"image_generation_call\", \"computer_call\",
+ \"computer_call_output\", \"reasoning\", \"compaction\", \"code_interpreter_call\",
+ \"local_shell_call\", \"local_shell_call_output\", \"shell_call\", \"shell_call_output\",
+ \"apply_patch_call\", \"apply_patch_call_output\", \"mcp_list_tools\",
+ \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"custom_tool_call\", and
+ \"custom_tool_call_output\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ItemFieldApplyPatchToolCall(ItemField, discriminator="apply_patch_call"):
+ """Apply patch tool call.
+
+ :ivar type: The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL
+ :ivar id: The unique ID of the apply patch tool call. Populated when this item is returned via
+ API. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call. One of ``in_progress`` or ``completed``.
+ Required. Known values are: "in_progress" and "completed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallStatus
+ :ivar operation: Apply patch operation. Required.
+ :vartype operation: ~azure.ai.responses.server.sdk.models.models.ApplyPatchFileOperation
+ :ivar created_by: The ID of the entity that created this tool call.
+ :vartype created_by: str
+ """
+
+ type: Literal[ItemFieldType.APPLY_PATCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call. Populated when this item is returned via API.
+ Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call. One of ``in_progress`` or ``completed``. Required.
+ Known values are: \"in_progress\" and \"completed\"."""
+ operation: "_models.ApplyPatchFileOperation" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Apply patch operation. Required."""
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the entity that created this tool call."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallStatus"],
+ operation: "_models.ApplyPatchFileOperation",
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.APPLY_PATCH_CALL # type: ignore
+
+
+class ItemFieldApplyPatchToolCallOutput(ItemField, discriminator="apply_patch_call_output"):
+ """Apply patch tool call output.
+
+ :ivar type: The type of the item. Always ``apply_patch_call_output``. Required.
+ APPLY_PATCH_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL_OUTPUT
+ :ivar id: The unique ID of the apply patch tool call output. Populated when this item is
+ returned via API. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call output. One of ``completed`` or
+ ``failed``. Required. Known values are: "completed" and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallOutputStatus
+ :ivar output:
+ :vartype output: str
+ :ivar created_by: The ID of the entity that created this tool call output.
+ :vartype created_by: str
+ """
+
+ type: Literal[ItemFieldType.APPLY_PATCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call_output``. Required. APPLY_PATCH_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call output. Populated when this item is returned via
+ API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallOutputStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call output. One of ``completed`` or ``failed``. Required.
+ Known values are: \"completed\" and \"failed\"."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the entity that created this tool call output."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallOutputStatus"],
+ output: Optional[str] = None,
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.APPLY_PATCH_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldCodeInterpreterToolCall(ItemField, discriminator="code_interpreter_call"):
+ """Code interpreter tool call.
+
+ :ivar type: The type of the code interpreter tool call. Always ``code_interpreter_call``.
+ Required. CODE_INTERPRETER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CODE_INTERPRETER_CALL
+ :ivar id: The unique ID of the code interpreter tool call. Required.
+ :vartype id: str
+ :ivar status: The status of the code interpreter tool call. Valid values are ``in_progress``,
+ ``completed``, ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the
+ following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"],
+ Literal["interpreting"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar container_id: The ID of the container used to run the code. Required.
+ :vartype container_id: str
+ :ivar code: Required.
+ :vartype code: str
+ :ivar outputs: Required.
+ :vartype outputs: list[~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputLogs
+ or ~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputImage]
+ """
+
+ type: Literal[ItemFieldType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the code interpreter tool call. Always ``code_interpreter_call``. Required.
+ CODE_INTERPRETER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the code interpreter tool call. Required."""
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the code interpreter tool call. Valid values are ``in_progress``, ``completed``,
+ ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"],
+ Literal[\"interpreting\"], Literal[\"failed\"]"""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the container used to run the code. Required."""
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"],
+ container_id: str,
+ code: str,
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.CODE_INTERPRETER_CALL # type: ignore
+
+
+class ItemFieldCompactionBody(ItemField, discriminator="compaction"):
+ """Compaction item.
+
+ :ivar type: The type of the item. Always ``compaction``. Required. COMPACTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPACTION
+ :ivar id: The unique ID of the compaction item. Required.
+ :vartype id: str
+ :ivar encrypted_content: The encrypted content that was produced by compaction. Required.
+ :vartype encrypted_content: str
+ :ivar created_by: The identifier of the actor that created the item.
+ :vartype created_by: str
+ """
+
+ type: Literal[ItemFieldType.COMPACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``compaction``. Required. COMPACTION."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the compaction item. Required."""
+ encrypted_content: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The encrypted content that was produced by compaction. Required."""
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The identifier of the actor that created the item."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ encrypted_content: str,
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.COMPACTION # type: ignore
+
+
+class ItemFieldComputerToolCall(ItemField, discriminator="computer_call"):
+ """Computer tool call.
+
+ :ivar type: The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL
+ :ivar id: The unique ID of the computer call. Required.
+ :vartype id: str
+ :ivar call_id: An identifier used when responding to the tool call with output. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.ComputerAction
+ :ivar pending_safety_checks: The pending safety checks for the computer call. Required.
+ :vartype pending_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemFieldType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the computer call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used when responding to the tool call with output. Required."""
+ action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The pending safety checks for the computer call. Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.ComputerAction",
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"],
+ status: Literal["in_progress", "completed", "incomplete"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.COMPUTER_CALL # type: ignore
+
+
+class ItemFieldComputerToolCallOutput(ItemField, discriminator="computer_call_output"):
+ """Computer tool call output.
+
+ :ivar type: The type of the computer tool call output. Always ``computer_call_output``.
+ Required. COMPUTER_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL_OUTPUT
+ :ivar id: The ID of the computer tool call output. Required.
+ :vartype id: str
+ :ivar call_id: The ID of the computer tool call that produced the output. Required.
+ :vartype call_id: str
+ :ivar acknowledged_safety_checks: The safety checks reported by the API that have been
+ acknowledged by the developer.
+ :vartype acknowledged_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar output: Required.
+ :vartype output: ~azure.ai.responses.server.sdk.models.models.ComputerScreenshotImage
+ :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Populated when input items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemFieldType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer tool call output. Always ``computer_call_output``. Required.
+ COMPUTER_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read"])
+ """The ID of the computer tool call output. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the computer tool call that produced the output. Required."""
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The safety checks reported by the API that have been acknowledged by the developer."""
+ output: "_models.ComputerScreenshotImage" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the message input. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when input items are returned via API. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: "_models.ComputerScreenshotImage",
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.COMPUTER_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldCustomToolCall(ItemField, discriminator="custom_tool_call"):
+ """Custom tool call.
+
+ :ivar type: The type of the custom tool call. Always ``custom_tool_call``. Required.
+ CUSTOM_TOOL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL
+ :ivar id: The unique ID of the custom tool call in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: An identifier used to map this custom tool call to a tool call output. Required.
+ :vartype call_id: str
+ :ivar name: The name of the custom tool being called. Required.
+ :vartype name: str
+ :ivar input: The input for the custom tool call generated by the model. Required.
+ :vartype input: str
+ """
+
+ type: Literal[ItemFieldType.CUSTOM_TOOL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call. Always ``custom_tool_call``. Required. CUSTOM_TOOL_CALL."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used to map this custom tool call to a tool call output. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the custom tool being called. Required."""
+ input: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The input for the custom tool call generated by the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ input: str,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.CUSTOM_TOOL_CALL # type: ignore
+
+
+class ItemFieldCustomToolCallOutput(ItemField, discriminator="custom_tool_call_output"):
+ """Custom tool call output.
+
+ :ivar type: The type of the custom tool call output. Always ``custom_tool_call_output``.
+ Required. CUSTOM_TOOL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL_OUTPUT
+ :ivar id: The unique ID of the custom tool call output in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: The call ID, used to map this custom tool call output to a custom tool call.
+ Required.
+ :vartype call_id: str
+ :ivar output: The output from the custom tool call generated by your code. Can be a string or
+ an list of output content. Required. Is either a str type or a
+ [FunctionAndCustomToolCallOutput] type.
+ :vartype output: str or
+ list[~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutput]
+ """
+
+ type: Literal[ItemFieldType.CUSTOM_TOOL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call output. Always ``custom_tool_call_output``. Required.
+ CUSTOM_TOOL_CALL_OUTPUT."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call output in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The call ID, used to map this custom tool call output to a custom tool call. Required."""
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the custom tool call generated by your code. Can be a string or an list of
+ output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]],
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.CUSTOM_TOOL_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldFileSearchToolCall(ItemField, discriminator="file_search_call"):
+ """File search tool call.
+
+ :ivar id: The unique ID of the file search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the file search tool call. Always ``file_search_call``. Required.
+ FILE_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_SEARCH_CALL
+ :ivar status: The status of the file search tool call. One of ``in_progress``, ``searching``,
+ ``incomplete`` or ``failed``,. Required. Is one of the following types: Literal["in_progress"],
+ Literal["searching"], Literal["completed"], Literal["incomplete"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar queries: The queries used to search for files. Required.
+ :vartype queries: list[str]
+ :ivar results:
+ :vartype results: list[~azure.ai.responses.server.sdk.models.models.FileSearchToolCallResults]
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the file search tool call. Required."""
+ type: Literal[ItemFieldType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file search tool call. Always ``file_search_call``. Required. FILE_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the file search tool call. One of ``in_progress``, ``searching``, ``incomplete``
+ or ``failed``,. Required. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"searching\"], Literal[\"completed\"], Literal[\"incomplete\"], Literal[\"failed\"]"""
+ queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The queries used to search for files. Required."""
+ results: Optional[list["_models.FileSearchToolCallResults"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"],
+ queries: list[str],
+ results: Optional[list["_models.FileSearchToolCallResults"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.FILE_SEARCH_CALL # type: ignore
+
+
+class ItemFieldFunctionShellCall(ItemField, discriminator="shell_call"):
+ """Shell tool call.
+
+ :ivar type: The type of the item. Always ``shell_call``. Required. SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL
+ :ivar id: The unique ID of the shell tool call. Populated when this item is returned via API.
+ Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar action: The shell commands and limits that describe how to run the tool call. Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.FunctionShellAction
+ :ivar status: The status of the shell call. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Required. Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.LocalShellCallStatus
+ :ivar environment: Required.
+ :vartype environment: ~azure.ai.responses.server.sdk.models.models.FunctionShellCallEnvironment
+ :ivar created_by: The ID of the entity that created this tool call.
+ :vartype created_by: str
+ """
+
+ type: Literal[ItemFieldType.SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``shell_call``. Required. SHELL_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call. Populated when this item is returned via API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ action: "_models.FunctionShellAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The shell commands and limits that describe how to run the tool call. Required."""
+ status: Union[str, "_models.LocalShellCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the shell call. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Required. Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ environment: "_models.FunctionShellCallEnvironment" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the entity that created this tool call."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.FunctionShellAction",
+ status: Union[str, "_models.LocalShellCallStatus"],
+ environment: "_models.FunctionShellCallEnvironment",
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.SHELL_CALL # type: ignore
+
+
+class ItemFieldFunctionShellCallOutput(ItemField, discriminator="shell_call_output"):
+ """Shell call output.
+
+ :ivar type: The type of the shell call output. Always ``shell_call_output``. Required.
+ SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL_OUTPUT
+ :ivar id: The unique ID of the shell call output. Populated when this item is returned via API.
+ Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the shell call output. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Required. Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or
+ ~azure.ai.responses.server.sdk.models.models.LocalShellCallOutputStatusEnum
+ :ivar output: An array of shell call output contents. Required.
+ :vartype output:
+ list[~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputContent]
+ :ivar max_output_length: Required.
+ :vartype max_output_length: int
+ :ivar created_by: The identifier of the actor that created the item.
+ :vartype created_by: str
+ """
+
+ type: Literal[ItemFieldType.SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the shell call output. Always ``shell_call_output``. Required. SHELL_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell call output. Populated when this item is returned via API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ status: Union[str, "_models.LocalShellCallOutputStatusEnum"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the shell call output. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Required. Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ output: list["_models.FunctionShellCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """An array of shell call output contents. Required."""
+ max_output_length: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ created_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The identifier of the actor that created the item."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.LocalShellCallOutputStatusEnum"],
+ output: list["_models.FunctionShellCallOutputContent"],
+ max_output_length: int,
+ created_by: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.SHELL_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldFunctionToolCall(ItemField, discriminator="function_call"):
+ """Function tool call.
+
+ :ivar id: The unique ID of the function tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the function tool call. Always ``function_call``. Required.
+ FUNCTION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the function to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the function. Required.
+ :vartype arguments: str
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read"])
+ """The unique ID of the function tool call. Required."""
+ type: Literal[ItemFieldType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call. Always ``function_call``. Required. FUNCTION_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the function. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.FUNCTION_CALL # type: ignore
+
+
+class ItemFieldFunctionToolCallOutput(ItemField, discriminator="function_call_output"):
+ """Function tool call output.
+
+ :ivar id: The unique ID of the function tool call output. Populated when this item is returned
+ via API. Required.
+ :vartype id: str
+ :ivar type: The type of the function tool call output. Always ``function_call_output``.
+ Required. FUNCTION_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL_OUTPUT
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the function call generated by your code. Can be a string or an
+ list of output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput]
+ type.
+ :vartype output: str or
+ list[~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutput]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read"])
+ """The unique ID of the function tool call output. Populated when this item is returned via API.
+ Required."""
+ type: Literal[ItemFieldType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call output. Always ``function_call_output``. Required.
+ FUNCTION_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the function call generated by your code. Can be a string or an list of output
+ content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]],
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.FUNCTION_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldImageGenToolCall(ItemField, discriminator="image_generation_call"):
+ """Image generation call.
+
+ :ivar type: The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.IMAGE_GENERATION_CALL
+ :ivar id: The unique ID of the image generation call. Required.
+ :vartype id: str
+ :ivar status: The status of the image generation call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["generating"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar result: Required.
+ :vartype result: str
+ """
+
+ type: Literal[ItemFieldType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the image generation call. Required."""
+ status: Literal["in_progress", "completed", "generating", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the image generation call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"generating\"], Literal[\"failed\"]"""
+ result: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "generating", "failed"],
+ result: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.IMAGE_GENERATION_CALL # type: ignore
+
+
+class ItemFieldLocalShellToolCall(ItemField, discriminator="local_shell_call"):
+ """Local shell call.
+
+ :ivar type: The type of the local shell call. Always ``local_shell_call``. Required.
+ LOCAL_SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL
+ :ivar id: The unique ID of the local shell call. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.LocalShellExecAction
+ :ivar status: The status of the local shell call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemFieldType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell call. Always ``local_shell_call``. Required. LOCAL_SHELL_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the local shell call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.LocalShellExecAction",
+ status: Literal["in_progress", "completed", "incomplete"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.LOCAL_SHELL_CALL # type: ignore
+
+
+class ItemFieldLocalShellToolCallOutput(ItemField, discriminator="local_shell_call_output"):
+ """Local shell call output.
+
+ :ivar type: The type of the local shell tool call output. Always ``local_shell_call_output``.
+ Required. LOCAL_SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL_OUTPUT
+ :ivar id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype id: str
+ :ivar output: A JSON string of the output of the local shell tool call. Required.
+ :vartype output: str
+ :ivar status: Is one of the following types: Literal["in_progress"], Literal["completed"],
+ Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemFieldType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell tool call output. Always ``local_shell_call_output``. Required.
+ LOCAL_SHELL_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ output: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the output of the local shell tool call. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"],
+ Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ output: str,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.LOCAL_SHELL_CALL_OUTPUT # type: ignore
+
+
+class ItemFieldMcpApprovalRequest(ItemField, discriminator="mcp_approval_request"):
+ """MCP approval request.
+
+ :ivar type: The type of the item. Always ``mcp_approval_request``. Required.
+ MCP_APPROVAL_REQUEST.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_REQUEST
+ :ivar id: The unique ID of the approval request. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server making the request. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of arguments for the tool. Required.
+ :vartype arguments: str
+ """
+
+ type: Literal[ItemFieldType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_request``. Required. MCP_APPROVAL_REQUEST."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the approval request. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server making the request. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of arguments for the tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.MCP_APPROVAL_REQUEST # type: ignore
+
+
+class ItemFieldMcpApprovalResponseResource(ItemField, discriminator="mcp_approval_response"):
+ """MCP approval response.
+
+ :ivar type: The type of the item. Always ``mcp_approval_response``. Required.
+ MCP_APPROVAL_RESPONSE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_RESPONSE
+ :ivar id: The unique ID of the approval response. Required.
+ :vartype id: str
+ :ivar approval_request_id: The ID of the approval request being answered. Required.
+ :vartype approval_request_id: str
+ :ivar approve: Whether the request was approved. Required.
+ :vartype approve: bool
+ :ivar reason:
+ :vartype reason: str
+ """
+
+ type: Literal[ItemFieldType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_response``. Required. MCP_APPROVAL_RESPONSE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the approval response. Required."""
+ approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the approval request being answered. Required."""
+ approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Whether the request was approved. Required."""
+ reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ approval_request_id: str,
+ approve: bool,
+ reason: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.MCP_APPROVAL_RESPONSE # type: ignore
+
+
+class ItemFieldMcpListTools(ItemField, discriminator="mcp_list_tools"):
+ """MCP list tools.
+
+ :ivar type: The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_LIST_TOOLS
+ :ivar id: The unique ID of the list. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server. Required.
+ :vartype server_label: str
+ :ivar tools: The tools available on the server. Required.
+ :vartype tools: list[~azure.ai.responses.server.sdk.models.models.MCPListToolsTool]
+ :ivar error:
+ :vartype error: ~azure.ai.responses.server.sdk.models.models.RealtimeMCPError
+ """
+
+ type: Literal[ItemFieldType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the list. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server. Required."""
+ tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The tools available on the server. Required."""
+ error: Optional["_models.RealtimeMCPError"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ tools: list["_models.MCPListToolsTool"],
+ error: Optional["_models.RealtimeMCPError"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.MCP_LIST_TOOLS # type: ignore
+
+
+class ItemFieldMcpToolCall(ItemField, discriminator="mcp_call"):
+ """MCP tool call.
+
+ :ivar type: The type of the item. Always ``mcp_call``. Required. MCP_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_CALL
+ :ivar id: The unique ID of the tool call. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server running the tool. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool that was run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments passed to the tool. Required.
+ :vartype arguments: str
+ :ivar output:
+ :vartype output: str
+ :ivar error:
+ :vartype error: dict[str, any]
+ :ivar status: The status of the tool call. One of ``in_progress``, ``completed``,
+ ``incomplete``, ``calling``, or ``failed``. Known values are: "in_progress", "completed",
+ "incomplete", "calling", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MCPToolCallStatus
+ :ivar approval_request_id:
+ :vartype approval_request_id: str
+ """
+
+ type: Literal[ItemFieldType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_call``. Required. MCP_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server running the tool. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool that was run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments passed to the tool. Required."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ error: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. One of ``in_progress``, ``completed``, ``incomplete``,
+ ``calling``, or ``failed``. Known values are: \"in_progress\", \"completed\", \"incomplete\",
+ \"calling\", and \"failed\"."""
+ approval_request_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ output: Optional[str] = None,
+ error: Optional[dict[str, Any]] = None,
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = None,
+ approval_request_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.MCP_CALL # type: ignore
+
+
+class ItemFieldMessage(ItemField, discriminator="message"):
+ """Message.
+
+ :ivar type: The type of the message. Always set to ``message``. Required. MESSAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MESSAGE
+ :ivar id: The unique ID of the message. Required.
+ :vartype id: str
+ :ivar status: The status of item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Known values are: "in_progress",
+ "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MessageStatus
+ :ivar role: The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``,
+ ``critic``, ``discriminator``, ``developer``, or ``tool``. Required. Known values are:
+ "unknown", "user", "assistant", "system", "critic", "discriminator", "developer", and "tool".
+ :vartype role: str or ~azure.ai.responses.server.sdk.models.models.MessageRole
+ :ivar content: The content of the message. Required.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.MessageContent]
+ """
+
+ type: Literal[ItemFieldType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the message. Always set to ``message``. Required. MESSAGE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the message. Required."""
+ status: Union[str, "_models.MessageStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The status of item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated when
+ items are returned via API. Required. Known values are: \"in_progress\", \"completed\", and
+ \"incomplete\"."""
+ role: Union[str, "_models.MessageRole"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``, ``critic``,
+ ``discriminator``, ``developer``, or ``tool``. Required. Known values are: \"unknown\",
+ \"user\", \"assistant\", \"system\", \"critic\", \"discriminator\", \"developer\", and
+ \"tool\"."""
+ content: list["_models.MessageContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the message. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Union[str, "_models.MessageStatus"],
+ role: Union[str, "_models.MessageRole"],
+ content: list["_models.MessageContent"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.MESSAGE # type: ignore
+
+
+class ItemFieldReasoningItem(ItemField, discriminator="reasoning"):
+ """Reasoning.
+
+ :ivar type: The type of the object. Always ``reasoning``. Required. REASONING.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REASONING
+ :ivar id: The unique identifier of the reasoning content. Required.
+ :vartype id: str
+ :ivar encrypted_content:
+ :vartype encrypted_content: str
+ :ivar summary: Reasoning summary content. Required.
+ :vartype summary: list[~azure.ai.responses.server.sdk.models.models.SummaryTextContent]
+ :ivar content: Reasoning text content.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.ReasoningTextContent]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemFieldType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the object. Always ``reasoning``. Required. REASONING."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the reasoning content. Required."""
+ encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ summary: list["_models.SummaryTextContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Reasoning summary content. Required."""
+ content: Optional[list["_models.ReasoningTextContent"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Reasoning text content."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ summary: list["_models.SummaryTextContent"],
+ encrypted_content: Optional[str] = None,
+ content: Optional[list["_models.ReasoningTextContent"]] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.REASONING # type: ignore
+
+
+class ItemFieldWebSearchToolCall(ItemField, discriminator="web_search_call"):
+ """Web search tool call.
+
+ :ivar id: The unique ID of the web search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the web search tool call. Always ``web_search_call``. Required.
+ WEB_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_CALL
+ :ivar status: The status of the web search tool call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar action: An object describing the specific action taken in this web search call. Includes
+ details on how the model used the web (search, open_page, find_in_page). Required. Is one of
+ the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.WebSearchActionSearch or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionOpenPage or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionFind
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the web search tool call. Required."""
+ type: Literal[ItemFieldType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the web search tool call. Always ``web_search_call``. Required. WEB_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the web search tool call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]"""
+ action: Union["_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"] = (
+ rest_field(visibility=["read", "create", "update", "delete", "query"])
+ )
+ """An object describing the specific action taken in this web search call. Includes details on how
+ the model used the web (search, open_page, find_in_page). Required. Is one of the following
+ types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "failed"],
+ action: Union[
+ "_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"
+ ],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemFieldType.WEB_SEARCH_CALL # type: ignore
+
+
+class ItemFileSearchToolCall(Item, discriminator="file_search_call"):
+ """File search tool call.
+
+ :ivar id: The unique ID of the file search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the file search tool call. Always ``file_search_call``. Required.
+ FILE_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_SEARCH_CALL
+ :ivar status: The status of the file search tool call. One of ``in_progress``, ``searching``,
+ ``incomplete`` or ``failed``,. Required. Is one of the following types: Literal["in_progress"],
+ Literal["searching"], Literal["completed"], Literal["incomplete"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar queries: The queries used to search for files. Required.
+ :vartype queries: list[str]
+ :ivar results:
+ :vartype results: list[~azure.ai.responses.server.sdk.models.models.FileSearchToolCallResults]
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the file search tool call. Required."""
+ type: Literal[ItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file search tool call. Always ``file_search_call``. Required. FILE_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the file search tool call. One of ``in_progress``, ``searching``, ``incomplete``
+ or ``failed``,. Required. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"searching\"], Literal[\"completed\"], Literal[\"incomplete\"], Literal[\"failed\"]"""
+ queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The queries used to search for files. Required."""
+ results: Optional[list["_models.FileSearchToolCallResults"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"],
+ queries: list[str],
+ results: Optional[list["_models.FileSearchToolCallResults"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.FILE_SEARCH_CALL # type: ignore
+
+
+class ItemFunctionToolCall(Item, discriminator="function_call"):
+ """Function tool call.
+
+ :ivar id: The unique ID of the function tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the function tool call. Always ``function_call``. Required.
+ FUNCTION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the function to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the function. Required.
+ :vartype arguments: str
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read"])
+ """The unique ID of the function tool call. Required."""
+ type: Literal[ItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call. Always ``function_call``. Required. FUNCTION_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the function. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.FUNCTION_CALL # type: ignore
+
+
+class ItemImageGenToolCall(Item, discriminator="image_generation_call"):
+ """Image generation call.
+
+ :ivar type: The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.IMAGE_GENERATION_CALL
+ :ivar id: The unique ID of the image generation call. Required.
+ :vartype id: str
+ :ivar status: The status of the image generation call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["generating"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar result: Required.
+ :vartype result: str
+ """
+
+ type: Literal[ItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the image generation call. Required."""
+ status: Literal["in_progress", "completed", "generating", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the image generation call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"generating\"], Literal[\"failed\"]"""
+ result: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "generating", "failed"],
+ result: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.IMAGE_GENERATION_CALL # type: ignore
+
+
+class ItemLocalShellToolCall(Item, discriminator="local_shell_call"):
+ """Local shell call.
+
+ :ivar type: The type of the local shell call. Always ``local_shell_call``. Required.
+ LOCAL_SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL
+ :ivar id: The unique ID of the local shell call. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.LocalShellExecAction
+ :ivar status: The status of the local shell call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell call. Always ``local_shell_call``. Required. LOCAL_SHELL_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the local shell call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.LocalShellExecAction",
+ status: Literal["in_progress", "completed", "incomplete"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.LOCAL_SHELL_CALL # type: ignore
+
+
+class ItemLocalShellToolCallOutput(Item, discriminator="local_shell_call_output"):
+ """Local shell call output.
+
+ :ivar type: The type of the local shell tool call output. Always ``local_shell_call_output``.
+ Required. LOCAL_SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL_OUTPUT
+ :ivar id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype id: str
+ :ivar output: A JSON string of the output of the local shell tool call. Required.
+ :vartype output: str
+ :ivar status: Is one of the following types: Literal["in_progress"], Literal["completed"],
+ Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell tool call output. Always ``local_shell_call_output``. Required.
+ LOCAL_SHELL_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ output: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the output of the local shell tool call. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"],
+ Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ output: str,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore
+
+
+class ItemMcpApprovalRequest(Item, discriminator="mcp_approval_request"):
+ """MCP approval request.
+
+ :ivar type: The type of the item. Always ``mcp_approval_request``. Required.
+ MCP_APPROVAL_REQUEST.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_REQUEST
+ :ivar id: The unique ID of the approval request. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server making the request. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of arguments for the tool. Required.
+ :vartype arguments: str
+ """
+
+ type: Literal[ItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_request``. Required. MCP_APPROVAL_REQUEST."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the approval request. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server making the request. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of arguments for the tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MCP_APPROVAL_REQUEST # type: ignore
+
+
+class ItemMcpListTools(Item, discriminator="mcp_list_tools"):
+ """MCP list tools.
+
+ :ivar type: The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_LIST_TOOLS
+ :ivar id: The unique ID of the list. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server. Required.
+ :vartype server_label: str
+ :ivar tools: The tools available on the server. Required.
+ :vartype tools: list[~azure.ai.responses.server.sdk.models.models.MCPListToolsTool]
+ :ivar error:
+ :vartype error: ~azure.ai.responses.server.sdk.models.models.RealtimeMCPError
+ """
+
+ type: Literal[ItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the list. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server. Required."""
+ tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The tools available on the server. Required."""
+ error: Optional["_models.RealtimeMCPError"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ tools: list["_models.MCPListToolsTool"],
+ error: Optional["_models.RealtimeMCPError"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MCP_LIST_TOOLS # type: ignore
+
+
+class ItemMcpToolCall(Item, discriminator="mcp_call"):
+ """MCP tool call.
+
+ :ivar type: The type of the item. Always ``mcp_call``. Required. MCP_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_CALL
+ :ivar id: The unique ID of the tool call. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server running the tool. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool that was run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments passed to the tool. Required.
+ :vartype arguments: str
+ :ivar output:
+ :vartype output: str
+ :ivar error:
+ :vartype error: dict[str, any]
+ :ivar status: The status of the tool call. One of ``in_progress``, ``completed``,
+ ``incomplete``, ``calling``, or ``failed``. Known values are: "in_progress", "completed",
+ "incomplete", "calling", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MCPToolCallStatus
+ :ivar approval_request_id:
+ :vartype approval_request_id: str
+ """
+
+ type: Literal[ItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_call``. Required. MCP_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server running the tool. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool that was run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments passed to the tool. Required."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ error: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. One of ``in_progress``, ``completed``, ``incomplete``,
+ ``calling``, or ``failed``. Known values are: \"in_progress\", \"completed\", \"incomplete\",
+ \"calling\", and \"failed\"."""
+ approval_request_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ output: Optional[str] = None,
+ error: Optional[dict[str, Any]] = None,
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = None,
+ approval_request_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MCP_CALL # type: ignore
+
+
+class ItemMessage(Item, discriminator="message"):
+ """Message.
+
+ :ivar type: The type of the message. Always set to ``message``. Required. MESSAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MESSAGE
+ :ivar id: The unique ID of the message. Required.
+ :vartype id: str
+ :ivar status: The status of item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Known values are: "in_progress",
+ "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MessageStatus
+ :ivar role: The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``,
+ ``critic``, ``discriminator``, ``developer``, or ``tool``. Required. Known values are:
+ "unknown", "user", "assistant", "system", "critic", "discriminator", "developer", and "tool".
+ :vartype role: str or ~azure.ai.responses.server.sdk.models.models.MessageRole
+ :ivar content: The content of the message. Required.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.MessageContent]
+ """
+
+ type: Literal[ItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the message. Always set to ``message``. Required. MESSAGE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the message. Required."""
+ status: Union[str, "_models.MessageStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The status of item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated when
+ items are returned via API. Required. Known values are: \"in_progress\", \"completed\", and
+ \"incomplete\"."""
+ role: Union[str, "_models.MessageRole"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``, ``critic``,
+ ``discriminator``, ``developer``, or ``tool``. Required. Known values are: \"unknown\",
+ \"user\", \"assistant\", \"system\", \"critic\", \"discriminator\", \"developer\", and
+ \"tool\"."""
+ content: list["_models.MessageContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the message. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Union[str, "_models.MessageStatus"],
+ role: Union[str, "_models.MessageRole"],
+ content: list["_models.MessageContent"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MESSAGE # type: ignore
+
+
+class ItemOutputMessage(Item, discriminator="output_message"):
+ """Output message.
+
+ :ivar id: The unique ID of the output message. Required.
+ :vartype id: str
+ :ivar type: The type of the output message. Always ``message``. Required. OUTPUT_MESSAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OUTPUT_MESSAGE
+ :ivar role: The role of the output message. Always ``assistant``. Required. Default value is
+ "assistant".
+ :vartype role: str
+ :ivar content: The content of the output message. Required.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.OutputMessageContent]
+ :ivar phase: Known values are: "commentary" and "final_answer".
+ :vartype phase: str or ~azure.ai.responses.server.sdk.models.models.MessagePhase
+ :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Populated when input items are returned via API. Required. Is one of the
+ following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the output message. Required."""
+ type: Literal[ItemType.OUTPUT_MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the output message. Always ``message``. Required. OUTPUT_MESSAGE."""
+ role: Literal["assistant"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The role of the output message. Always ``assistant``. Required. Default value is \"assistant\"."""
+ content: list["_models.OutputMessageContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The content of the output message. Required."""
+ phase: Optional[Union[str, "_models.MessagePhase"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"commentary\" and \"final_answer\"."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the message input. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when input items are returned via API. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ content: list["_models.OutputMessageContent"],
+ status: Literal["in_progress", "completed", "incomplete"],
+ phase: Optional[Union[str, "_models.MessagePhase"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.OUTPUT_MESSAGE # type: ignore
+ self.role: Literal["assistant"] = "assistant"
+
+
+class ItemReasoningItem(Item, discriminator="reasoning"):
+ """Reasoning.
+
+ :ivar type: The type of the object. Always ``reasoning``. Required. REASONING.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REASONING
+ :ivar id: The unique identifier of the reasoning content. Required.
+ :vartype id: str
+ :ivar encrypted_content:
+ :vartype encrypted_content: str
+ :ivar summary: Reasoning summary content. Required.
+ :vartype summary: list[~azure.ai.responses.server.sdk.models.models.SummaryTextContent]
+ :ivar content: Reasoning text content.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.ReasoningTextContent]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[ItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the object. Always ``reasoning``. Required. REASONING."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the reasoning content. Required."""
+ encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ summary: list["_models.SummaryTextContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Reasoning summary content. Required."""
+ content: Optional[list["_models.ReasoningTextContent"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Reasoning text content."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ summary: list["_models.SummaryTextContent"],
+ encrypted_content: Optional[str] = None,
+ content: Optional[list["_models.ReasoningTextContent"]] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.REASONING # type: ignore
+
+
+class ItemReferenceParam(Item, discriminator="item_reference"):
+ """Item reference.
+
+ :ivar type: The type of item to reference. Always ``item_reference``. Required. ITEM_REFERENCE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ITEM_REFERENCE
+ :ivar id: The ID of the item to reference. Required.
+ :vartype id: str
+ """
+
+ type: Literal[ItemType.ITEM_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of item to reference. Always ``item_reference``. Required. ITEM_REFERENCE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item to reference. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.ITEM_REFERENCE # type: ignore
+
+
+class ItemWebSearchToolCall(Item, discriminator="web_search_call"):
+ """Web search tool call.
+
+ :ivar id: The unique ID of the web search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the web search tool call. Always ``web_search_call``. Required.
+ WEB_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_CALL
+ :ivar status: The status of the web search tool call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar action: An object describing the specific action taken in this web search call. Includes
+ details on how the model used the web (search, open_page, find_in_page). Required. Is one of
+ the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.WebSearchActionSearch or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionOpenPage or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionFind
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the web search tool call. Required."""
+ type: Literal[ItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the web search tool call. Always ``web_search_call``. Required. WEB_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the web search tool call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]"""
+ action: Union["_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"] = (
+ rest_field(visibility=["read", "create", "update", "delete", "query"])
+ )
+ """An object describing the specific action taken in this web search call. Includes details on how
+ the model used the web (search, open_page, find_in_page). Required. Is one of the following
+ types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "failed"],
+ action: Union[
+ "_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"
+ ],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.WEB_SEARCH_CALL # type: ignore
+
+
+class KeyPressAction(ComputerAction, discriminator="keypress"):
+ """KeyPress.
+
+ :ivar type: Specifies the event type. For a keypress action, this property is always set to
+ ``keypress``. Required. KEYPRESS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.KEYPRESS
+ :ivar keys_property: The combination of keys the model is requesting to be pressed. This is an
+ array of strings, each representing a key. Required.
+ :vartype keys_property: list[str]
+ """
+
+ type: Literal[ComputerActionType.KEYPRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a keypress action, this property is always set to ``keypress``.
+ Required. KEYPRESS."""
+ keys_property: list[str] = rest_field(
+ name="keys", visibility=["read", "create", "update", "delete", "query"], original_tsp_name="keys"
+ )
+ """The combination of keys the model is requesting to be pressed. This is an array of strings,
+ each representing a key. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ keys_property: list[str],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.KEYPRESS # type: ignore
+
+
+class LocalEnvironmentResource(FunctionShellCallEnvironment, discriminator="local"):
+ """Local Environment.
+
+ :ivar type: The environment type. Always ``local``. Required. LOCAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL
+ """
+
+ type: Literal[FunctionShellCallEnvironmentType.LOCAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The environment type. Always ``local``. Required. LOCAL."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = FunctionShellCallEnvironmentType.LOCAL # type: ignore
+
+
+class LocalShellExecAction(_Model):
+ """Local shell exec action.
+
+ :ivar type: The type of the local shell action. Always ``exec``. Required. Default value is
+ "exec".
+ :vartype type: str
+ :ivar command: The command to run. Required.
+ :vartype command: list[str]
+ :ivar timeout_ms:
+ :vartype timeout_ms: int
+ :ivar working_directory:
+ :vartype working_directory: str
+ :ivar env: Environment variables to set for the command. Required.
+ :vartype env: dict[str, str]
+ :ivar user:
+ :vartype user: str
+ """
+
+ type: Literal["exec"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the local shell action. Always ``exec``. Required. Default value is \"exec\"."""
+ command: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The command to run. Required."""
+ timeout_ms: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ working_directory: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ env: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Environment variables to set for the command. Required."""
+ user: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ command: list[str],
+ env: dict[str, str],
+ timeout_ms: Optional[int] = None,
+ working_directory: Optional[str] = None,
+ user: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["exec"] = "exec"
+
+
+class LocalShellToolParam(Tool, discriminator="local_shell"):
+ """Local shell tool.
+
+ :ivar type: The type of the local shell tool. Always ``local_shell``. Required. LOCAL_SHELL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL
+ """
+
+ type: Literal[ToolType.LOCAL_SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell tool. Always ``local_shell``. Required. LOCAL_SHELL."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.LOCAL_SHELL # type: ignore
+
+
+class LocalSkillParam(_Model):
+ """LocalSkillParam.
+
+ :ivar name: The name of the skill. Required.
+ :vartype name: str
+ :ivar description: The description of the skill. Required.
+ :vartype description: str
+ :ivar path: The path to the directory containing the skill. Required.
+ :vartype path: str
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the skill. Required."""
+ description: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The description of the skill. Required."""
+ path: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The path to the directory containing the skill. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: str,
+ path: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class LogProb(_Model):
+ """Log probability.
+
+ :ivar token: Required.
+ :vartype token: str
+ :ivar logprob: Required.
+ :vartype logprob: int
+ :ivar bytes: Required.
+ :vartype bytes: list[int]
+ :ivar top_logprobs: Required.
+ :vartype top_logprobs: list[~azure.ai.responses.server.sdk.models.models.TopLogProb]
+ """
+
+ token: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ logprob: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ top_logprobs: list["_models.TopLogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ token: str,
+ logprob: int,
+ bytes: list[int],
+ top_logprobs: list["_models.TopLogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MCPApprovalResponse(Item, discriminator="mcp_approval_response"):
+ """MCP approval response.
+
+ :ivar type: The type of the item. Always ``mcp_approval_response``. Required.
+ MCP_APPROVAL_RESPONSE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_RESPONSE
+ :ivar id:
+ :vartype id: str
+ :ivar approval_request_id: The ID of the approval request being answered. Required.
+ :vartype approval_request_id: str
+ :ivar approve: Whether the request was approved. Required.
+ :vartype approve: bool
+ :ivar reason:
+ :vartype reason: str
+ """
+
+ type: Literal[ItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_response``. Required. MCP_APPROVAL_RESPONSE."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the approval request being answered. Required."""
+ approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Whether the request was approved. Required."""
+ reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ approval_request_id: str,
+ approve: bool,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ reason: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MCP_APPROVAL_RESPONSE # type: ignore
+
+
+class MCPListToolsTool(_Model):
+ """MCP list tools tool.
+
+ :ivar name: The name of the tool. Required.
+ :vartype name: str
+ :ivar description:
+ :vartype description: str
+ :ivar input_schema: The JSON schema describing the tool's input. Required.
+ :vartype input_schema: ~azure.ai.responses.server.sdk.models.models.MCPListToolsToolInputSchema
+ :ivar annotations:
+ :vartype annotations: ~azure.ai.responses.server.sdk.models.models.MCPListToolsToolAnnotations
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ input_schema: "_models.MCPListToolsToolInputSchema" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The JSON schema describing the tool's input. Required."""
+ annotations: Optional["_models.MCPListToolsToolAnnotations"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ input_schema: "_models.MCPListToolsToolInputSchema",
+ description: Optional[str] = None,
+ annotations: Optional["_models.MCPListToolsToolAnnotations"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MCPListToolsToolAnnotations(_Model):
+ """MCPListToolsToolAnnotations."""
+
+
+class MCPListToolsToolInputSchema(_Model):
+ """MCPListToolsToolInputSchema."""
+
+
+class MCPTool(Tool, discriminator="mcp"):
+ """MCP tool.
+
+ :ivar type: The type of the MCP tool. Always ``mcp``. Required. MCP.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP
+ :ivar server_label: A label for this MCP server, used to identify it in tool calls. Required.
+ :vartype server_label: str
+ :ivar server_url: The URL for the MCP server. One of ``server_url`` or ``connector_id`` must be
+ provided.
+ :vartype server_url: str
+ :ivar connector_id: Identifier for service connectors, like those available in ChatGPT. One of
+ ``server_url`` or ``connector_id`` must be provided. Learn more about service connectors `here
+ `_. Currently supported ``connector_id`` values are:
+
+ * Dropbox: `connector_dropbox`
+ * Gmail: `connector_gmail`
+ * Google Calendar: `connector_googlecalendar`
+ * Google Drive: `connector_googledrive`
+ * Microsoft Teams: `connector_microsoftteams`
+ * Outlook Calendar: `connector_outlookcalendar`
+ * Outlook Email: `connector_outlookemail`
+ * SharePoint: `connector_sharepoint`. Is one of the following types:
+ Literal["connector_dropbox"], Literal["connector_gmail"], Literal["connector_googlecalendar"],
+ Literal["connector_googledrive"], Literal["connector_microsoftteams"],
+ Literal["connector_outlookcalendar"], Literal["connector_outlookemail"],
+ Literal["connector_sharepoint"]
+ :vartype connector_id: str or str or str or str or str or str or str or str
+ :ivar authorization: An OAuth access token that can be used with a remote MCP server, either
+ with a custom MCP server URL or a service connector. Your application must handle the OAuth
+ authorization flow and provide the token here.
+ :vartype authorization: str
+ :ivar server_description: Optional description of the MCP server, used to provide more context.
+ :vartype server_description: str
+ :ivar headers:
+ :vartype headers: dict[str, str]
+ :ivar allowed_tools: Is either a [str] type or a MCPToolFilter type.
+ :vartype allowed_tools: list[str] or ~azure.ai.responses.server.sdk.models.models.MCPToolFilter
+ :ivar require_approval: Is one of the following types: MCPToolRequireApproval,
+ Literal["always"], Literal["never"]
+ :vartype require_approval: ~azure.ai.responses.server.sdk.models.models.MCPToolRequireApproval
+ or str or str
+ :ivar project_connection_id: The connection ID in the project for the MCP server. The
+ connection stores authentication and other connection details needed to connect to the MCP
+ server.
+ :vartype project_connection_id: str
+ """
+
+ type: Literal[ToolType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the MCP tool. Always ``mcp``. Required. MCP."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A label for this MCP server, used to identify it in tool calls. Required."""
+ server_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL for the MCP server. One of ``server_url`` or ``connector_id`` must be provided."""
+ connector_id: Optional[
+ Literal[
+ "connector_dropbox",
+ "connector_gmail",
+ "connector_googlecalendar",
+ "connector_googledrive",
+ "connector_microsoftteams",
+ "connector_outlookcalendar",
+ "connector_outlookemail",
+ "connector_sharepoint",
+ ]
+ ] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Identifier for service connectors, like those available in ChatGPT. One of ``server_url`` or
+ ``connector_id`` must be provided. Learn more about service connectors `here
+ `_. Currently supported ``connector_id`` values are:
+
+ * Dropbox: `connector_dropbox`
+ * Gmail: `connector_gmail`
+ * Google Calendar: `connector_googlecalendar`
+ * Google Drive: `connector_googledrive`
+ * Microsoft Teams: `connector_microsoftteams`
+ * Outlook Calendar: `connector_outlookcalendar`
+ * Outlook Email: `connector_outlookemail`
+ * SharePoint: `connector_sharepoint`. Is one of the following types:
+ Literal[\"connector_dropbox\"], Literal[\"connector_gmail\"],
+ Literal[\"connector_googlecalendar\"], Literal[\"connector_googledrive\"],
+ Literal[\"connector_microsoftteams\"], Literal[\"connector_outlookcalendar\"],
+ Literal[\"connector_outlookemail\"], Literal[\"connector_sharepoint\"]"""
+ authorization: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An OAuth access token that can be used with a remote MCP server, either with a custom MCP
+ server URL or a service connector. Your application must handle the OAuth authorization flow
+ and provide the token here."""
+ server_description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Optional description of the MCP server, used to provide more context."""
+ headers: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ allowed_tools: Optional[Union[list[str], "_models.MCPToolFilter"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a [str] type or a MCPToolFilter type."""
+ require_approval: Optional[Union["_models.MCPToolRequireApproval", Literal["always"], Literal["never"]]] = (
+ rest_field(visibility=["read", "create", "update", "delete", "query"])
+ )
+ """Is one of the following types: MCPToolRequireApproval, Literal[\"always\"], Literal[\"never\"]"""
+ project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The connection ID in the project for the MCP server. The connection stores authentication and
+ other connection details needed to connect to the MCP server."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ server_label: str,
+ server_url: Optional[str] = None,
+ connector_id: Optional[
+ Literal[
+ "connector_dropbox",
+ "connector_gmail",
+ "connector_googlecalendar",
+ "connector_googledrive",
+ "connector_microsoftteams",
+ "connector_outlookcalendar",
+ "connector_outlookemail",
+ "connector_sharepoint",
+ ]
+ ] = None,
+ authorization: Optional[str] = None,
+ server_description: Optional[str] = None,
+ headers: Optional[dict[str, str]] = None,
+ allowed_tools: Optional[Union[list[str], "_models.MCPToolFilter"]] = None,
+ require_approval: Optional[Union["_models.MCPToolRequireApproval", Literal["always"], Literal["never"]]] = None,
+ project_connection_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.MCP # type: ignore
+
+
+class MCPToolFilter(_Model):
+ """MCP tool filter.
+
+ :ivar tool_names: MCP allowed tools.
+ :vartype tool_names: list[str]
+ :ivar read_only: Indicates whether or not a tool modifies data or is read-only. If an MCP
+ server is `annotated with `readOnlyHint`
+ `_,
+ it will match this filter.
+ :vartype read_only: bool
+ """
+
+ tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """MCP allowed tools."""
+ read_only: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Indicates whether or not a tool modifies data or is read-only. If an MCP server is `annotated
+ with `readOnlyHint`
+ `_,
+ it will match this filter."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ tool_names: Optional[list[str]] = None,
+ read_only: Optional[bool] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MCPToolRequireApproval(_Model):
+ """MCPToolRequireApproval.
+
+ :ivar always:
+ :vartype always: ~azure.ai.responses.server.sdk.models.models.MCPToolFilter
+ :ivar never:
+ :vartype never: ~azure.ai.responses.server.sdk.models.models.MCPToolFilter
+ """
+
+ always: Optional["_models.MCPToolFilter"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ never: Optional["_models.MCPToolFilter"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ always: Optional["_models.MCPToolFilter"] = None,
+ never: Optional["_models.MCPToolFilter"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MemorySearchItem(_Model):
+ """A retrieved memory item from memory search.
+
+ :ivar memory_item: Retrieved memory item. Required.
+ :vartype memory_item: ~azure.ai.responses.server.sdk.models.models.MemoryItem
+ """
+
+ memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Retrieved memory item. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_item: "_models.MemoryItem",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MemorySearchOptions(_Model):
+ """Memory search options.
+
+ :ivar max_memories: Maximum number of memory items to return.
+ :vartype max_memories: int
+ """
+
+ max_memories: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Maximum number of memory items to return."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ max_memories: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class MemorySearchPreviewTool(Tool, discriminator="memory_search_preview"):
+ """A tool for integrating memories into the agent.
+
+ :ivar type: The type of the tool. Always ``memory_search_preview``. Required.
+ MEMORY_SEARCH_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MEMORY_SEARCH_PREVIEW
+ :ivar memory_store_name: The name of the memory store to use. Required.
+ :vartype memory_store_name: str
+ :ivar scope: The namespace used to group and isolate memories, such as a user ID. Limits which
+ memories can be retrieved or updated. Use special variable ``{{$userId}}`` to scope memories to
+ the current signed-in user. Required.
+ :vartype scope: str
+ :ivar search_options: Options for searching the memory store.
+ :vartype search_options: ~azure.ai.responses.server.sdk.models.models.MemorySearchOptions
+ :ivar update_delay: Time to wait before updating memories after inactivity (seconds). Default
+ 300.
+ :vartype update_delay: int
+ """
+
+ type: Literal[ToolType.MEMORY_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the tool. Always ``memory_search_preview``. Required. MEMORY_SEARCH_PREVIEW."""
+ memory_store_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the memory store to use. Required."""
+ scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The namespace used to group and isolate memories, such as a user ID. Limits which memories can
+ be retrieved or updated. Use special variable ``{{$userId}}`` to scope memories to the current
+ signed-in user. Required."""
+ search_options: Optional["_models.MemorySearchOptions"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Options for searching the memory store."""
+ update_delay: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Time to wait before updating memories after inactivity (seconds). Default 300."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_store_name: str,
+ scope: str,
+ search_options: Optional["_models.MemorySearchOptions"] = None,
+ update_delay: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.MEMORY_SEARCH_PREVIEW # type: ignore
+
+
+class MemorySearchTool(Tool, discriminator="memory_search"):
+ """A tool for integrating memories into the agent.
+
+ :ivar type: The type of the tool. Always ``memory_search_preview``. Required. MEMORY_SEARCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MEMORY_SEARCH
+ :ivar memory_store_name: The name of the memory store to use. Required.
+ :vartype memory_store_name: str
+ :ivar scope: The namespace used to group and isolate memories, such as a user ID. Limits which
+ memories can be retrieved or updated. Use special variable ``{{$userId}}`` to scope memories to
+ the current signed-in user. Required.
+ :vartype scope: str
+ :ivar search_options: Options for searching the memory store.
+ :vartype search_options: ~azure.ai.responses.server.sdk.models.models.MemorySearchOptions
+ :ivar update_delay: Time to wait before updating memories after inactivity (seconds). Default
+ 300.
+ :vartype update_delay: int
+ """
+
+ type: Literal[ToolType.MEMORY_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the tool. Always ``memory_search_preview``. Required. MEMORY_SEARCH."""
+ memory_store_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the memory store to use. Required."""
+ scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The namespace used to group and isolate memories, such as a user ID. Limits which memories can
+ be retrieved or updated. Use special variable ``{{$userId}}`` to scope memories to the current
+ signed-in user. Required."""
+ search_options: Optional["_models.MemorySearchOptions"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Options for searching the memory store."""
+ update_delay: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Time to wait before updating memories after inactivity (seconds). Default 300."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_store_name: str,
+ scope: str,
+ search_options: Optional["_models.MemorySearchOptions"] = None,
+ update_delay: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.MEMORY_SEARCH # type: ignore
+
+
+class MemorySearchToolCallItemParam(Item, discriminator="memory_search_call"):
+ """MemorySearchToolCallItemParam.
+
+ :ivar type: Required. MEMORY_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MEMORY_SEARCH_CALL
+ :ivar results: The results returned from the memory search.
+ :vartype results: list[~azure.ai.responses.server.sdk.models.models.MemorySearchItem]
+ """
+
+ type: Literal[ItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. MEMORY_SEARCH_CALL."""
+ results: Optional[list["_models.MemorySearchItem"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The results returned from the memory search."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ results: Optional[list["_models.MemorySearchItem"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ItemType.MEMORY_SEARCH_CALL # type: ignore
+
+
+class MemorySearchToolCallItemResource(OutputItem, discriminator="memory_search_call"):
+ """MemorySearchToolCallItemResource.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. MEMORY_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MEMORY_SEARCH_CALL
+ :ivar status: The status of the memory search tool call. One of ``in_progress``, ``searching``,
+ ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following types:
+ Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["incomplete"],
+ Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar results: The results returned from the memory search.
+ :vartype results: list[~azure.ai.responses.server.sdk.models.models.MemorySearchItem]
+ """
+
+ type: Literal[OutputItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. MEMORY_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the memory search tool call. One of ``in_progress``, ``searching``,
+ ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"],
+ Literal[\"incomplete\"], Literal[\"failed\"]"""
+ results: Optional[list["_models.MemorySearchItem"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The results returned from the memory search."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ results: Optional[list["_models.MemorySearchItem"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MEMORY_SEARCH_CALL # type: ignore
+
+
+class MessageContentInputFileContent(MessageContent, discriminator="input_file"):
+ """Input file.
+
+ :ivar type: The type of the input item. Always ``input_file``. Required. INPUT_FILE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_FILE
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar filename: The name of the file to be sent to the model.
+ :vartype filename: str
+ :ivar file_url: The URL of the file to be sent to the model.
+ :vartype file_url: str
+ :ivar file_data: The content of the file to be sent to the model.
+ :vartype file_data: str
+ """
+
+ type: Literal[MessageContentType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_file``. Required. INPUT_FILE."""
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the file to be sent to the model."""
+ file_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the file to be sent to the model."""
+ file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the file to be sent to the model."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ file_id: Optional[str] = None,
+ filename: Optional[str] = None,
+ file_url: Optional[str] = None,
+ file_data: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.INPUT_FILE # type: ignore
+
+
+class MessageContentInputImageContent(MessageContent, discriminator="input_image"):
+ """Input image.
+
+ :ivar type: The type of the input item. Always ``input_image``. Required. INPUT_IMAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_IMAGE
+ :ivar image_url:
+ :vartype image_url: str
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``,
+ or ``auto``. Defaults to ``auto``. Required. Known values are: "low", "high", and "auto".
+ :vartype detail: str or ~azure.ai.responses.server.sdk.models.models.ImageDetail
+ """
+
+ type: Literal[MessageContentType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_image``. Required. INPUT_IMAGE."""
+ image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ detail: Union[str, "_models.ImageDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``.
+ Defaults to ``auto``. Required. Known values are: \"low\", \"high\", and \"auto\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ detail: Union[str, "_models.ImageDetail"],
+ image_url: Optional[str] = None,
+ file_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.INPUT_IMAGE # type: ignore
+
+
+class MessageContentInputTextContent(MessageContent, discriminator="input_text"):
+ """Input text.
+
+ :ivar type: The type of the input item. Always ``input_text``. Required. INPUT_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.INPUT_TEXT
+ :ivar text: The text input to the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal[MessageContentType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the input item. Always ``input_text``. Required. INPUT_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text input to the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.INPUT_TEXT # type: ignore
+
+
+class MessageContentOutputTextContent(MessageContent, discriminator="output_text"):
+ """Output text.
+
+ :ivar type: The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OUTPUT_TEXT
+ :ivar text: The text output from the model. Required.
+ :vartype text: str
+ :ivar annotations: The annotations of the text output. Required.
+ :vartype annotations: list[~azure.ai.responses.server.sdk.models.models.Annotation]
+ :ivar logprobs: Required.
+ :vartype logprobs: list[~azure.ai.responses.server.sdk.models.models.LogProb]
+ """
+
+ type: Literal[MessageContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text output from the model. Required."""
+ annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The annotations of the text output. Required."""
+ logprobs: list["_models.LogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ annotations: list["_models.Annotation"],
+ logprobs: list["_models.LogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.OUTPUT_TEXT # type: ignore
+
+
+class MessageContentReasoningTextContent(MessageContent, discriminator="reasoning_text"):
+ """Reasoning text.
+
+ :ivar type: The type of the reasoning text. Always ``reasoning_text``. Required.
+ REASONING_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REASONING_TEXT
+ :ivar text: The reasoning text from the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal[MessageContentType.REASONING_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the reasoning text. Always ``reasoning_text``. Required. REASONING_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The reasoning text from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.REASONING_TEXT # type: ignore
+
+
+class MessageContentRefusalContent(MessageContent, discriminator="refusal"):
+ """Refusal.
+
+ :ivar type: The type of the refusal. Always ``refusal``. Required. REFUSAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REFUSAL
+ :ivar refusal: The refusal explanation from the model. Required.
+ :vartype refusal: str
+ """
+
+ type: Literal[MessageContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the refusal. Always ``refusal``. Required. REFUSAL."""
+ refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The refusal explanation from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ refusal: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.REFUSAL # type: ignore
+
+
+class Metadata(_Model):
+ """Set of 16 key-value pairs that can be attached to an object. This can be useful for storing
+ additional information about the object in a structured format, and querying for objects via
+ API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are
+ strings with a maximum length of 512 characters.
+
+ """
+
+
+class MicrosoftFabricPreviewTool(Tool, discriminator="fabric_dataagent_preview"):
+ """The input definition information for a Microsoft Fabric tool as used to configure an agent.
+
+ :ivar type: The object type, which is always 'fabric_dataagent_preview'. Required.
+ FABRIC_DATAAGENT_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FABRIC_DATAAGENT_PREVIEW
+ :ivar fabric_dataagent_preview: The fabric data agent tool parameters. Required.
+ :vartype fabric_dataagent_preview:
+ ~azure.ai.responses.server.sdk.models.models.FabricDataAgentToolParameters
+ """
+
+ type: Literal[ToolType.FABRIC_DATAAGENT_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'fabric_dataagent_preview'. Required.
+ FABRIC_DATAAGENT_PREVIEW."""
+ fabric_dataagent_preview: "_models.FabricDataAgentToolParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The fabric data agent tool parameters. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ fabric_dataagent_preview: "_models.FabricDataAgentToolParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.FABRIC_DATAAGENT_PREVIEW # type: ignore
+
+
+class MoveParam(ComputerAction, discriminator="move"):
+ """Move.
+
+ :ivar type: Specifies the event type. For a move action, this property is always set to
+ ``move``. Required. MOVE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MOVE
+ :ivar x: The x-coordinate to move to. Required.
+ :vartype x: int
+ :ivar y: The y-coordinate to move to. Required.
+ :vartype y: int
+ """
+
+ type: Literal[ComputerActionType.MOVE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a move action, this property is always set to ``move``. Required.
+ MOVE."""
+ x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The x-coordinate to move to. Required."""
+ y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The y-coordinate to move to. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ x: int,
+ y: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.MOVE # type: ignore
+
+
+class OAuthConsentRequestOutputItem(OutputItem, discriminator="oauth_consent_request"):
+ """Request from the service for the user to perform OAuth consent.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: Required.
+ :vartype id: str
+ :ivar type: Required. OAUTH_CONSENT_REQUEST.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OAUTH_CONSENT_REQUEST
+ :ivar consent_link: The link the user can use to perform OAuth consent. Required.
+ :vartype consent_link: str
+ :ivar server_label: The server label for the OAuth consent request. Required.
+ :vartype server_label: str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ type: Literal[OutputItemType.OAUTH_CONSENT_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. OAUTH_CONSENT_REQUEST."""
+ consent_link: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The link the user can use to perform OAuth consent. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The server label for the OAuth consent request. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ consent_link: str,
+ server_label: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.OAUTH_CONSENT_REQUEST # type: ignore
+
+
+class OpenApiAuthDetails(_Model):
+ """authentication details for OpenApiFunctionDefinition.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ OpenApiAnonymousAuthDetails, OpenApiManagedAuthDetails, OpenApiProjectConnectionAuthDetails
+
+ :ivar type: The type of authentication, must be anonymous/project_connection/managed_identity.
+ Required. Known values are: "anonymous", "project_connection", and "managed_identity".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OpenApiAuthType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """The type of authentication, must be anonymous/project_connection/managed_identity. Required.
+ Known values are: \"anonymous\", \"project_connection\", and \"managed_identity\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OpenApiAnonymousAuthDetails(OpenApiAuthDetails, discriminator="anonymous"):
+ """Security details for OpenApi anonymous authentication.
+
+ :ivar type: The object type, which is always 'anonymous'. Required. ANONYMOUS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ANONYMOUS
+ """
+
+ type: Literal[OpenApiAuthType.ANONYMOUS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'anonymous'. Required. ANONYMOUS."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OpenApiAuthType.ANONYMOUS # type: ignore
+
+
+class OpenApiFunctionDefinition(_Model):
+ """The input definition information for an openapi function.
+
+ :ivar name: The name of the function to be called. Required.
+ :vartype name: str
+ :ivar description: A description of what the function does, used by the model to choose when
+ and how to call the function.
+ :vartype description: str
+ :ivar spec: The openapi function shape, described as a JSON Schema object. Required.
+ :vartype spec: dict[str, any]
+ :ivar auth: Open API authentication details. Required.
+ :vartype auth: ~azure.ai.responses.server.sdk.models.models.OpenApiAuthDetails
+ :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults.
+ :vartype default_params: list[str]
+ :ivar functions: List of function definitions used by OpenApi tool.
+ :vartype functions:
+ list[~azure.ai.responses.server.sdk.models.models.OpenApiFunctionDefinitionFunction]
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to be called. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A description of what the function does, used by the model to choose when and how to call the
+ function."""
+ spec: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The openapi function shape, described as a JSON Schema object. Required."""
+ auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Open API authentication details. Required."""
+ default_params: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """List of OpenAPI spec parameters that will use user-provided defaults."""
+ functions: Optional[list["_models.OpenApiFunctionDefinitionFunction"]] = rest_field(visibility=["read"])
+ """List of function definitions used by OpenApi tool."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ spec: dict[str, Any],
+ auth: "_models.OpenApiAuthDetails",
+ description: Optional[str] = None,
+ default_params: Optional[list[str]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OpenApiFunctionDefinitionFunction(_Model):
+ """OpenApiFunctionDefinitionFunction.
+
+ :ivar name: The name of the function to be called. Required.
+ :vartype name: str
+ :ivar description: A description of what the function does, used by the model to choose when
+ and how to call the function.
+ :vartype description: str
+ :ivar parameters: The parameters the functions accepts, described as a JSON Schema object.
+ Required.
+ :vartype parameters: dict[str, any]
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to be called. Required."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A description of what the function does, used by the model to choose when and how to call the
+ function."""
+ parameters: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The parameters the functions accepts, described as a JSON Schema object. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ parameters: dict[str, Any],
+ description: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OpenApiManagedAuthDetails(OpenApiAuthDetails, discriminator="managed_identity"):
+ """Security details for OpenApi managed_identity authentication.
+
+ :ivar type: The object type, which is always 'managed_identity'. Required. MANAGED_IDENTITY.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MANAGED_IDENTITY
+ :ivar security_scheme: Connection auth security details. Required.
+ :vartype security_scheme:
+ ~azure.ai.responses.server.sdk.models.models.OpenApiManagedSecurityScheme
+ """
+
+ type: Literal[OpenApiAuthType.MANAGED_IDENTITY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'managed_identity'. Required. MANAGED_IDENTITY."""
+ security_scheme: "_models.OpenApiManagedSecurityScheme" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Connection auth security details. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ security_scheme: "_models.OpenApiManagedSecurityScheme",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OpenApiAuthType.MANAGED_IDENTITY # type: ignore
+
+
+class OpenApiManagedSecurityScheme(_Model):
+ """Security scheme for OpenApi managed_identity authentication.
+
+ :ivar audience: Authentication scope for managed_identity auth type. Required.
+ :vartype audience: str
+ """
+
+ audience: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Authentication scope for managed_identity auth type. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ audience: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OpenApiProjectConnectionAuthDetails(OpenApiAuthDetails, discriminator="project_connection"):
+ """Security details for OpenApi project connection authentication.
+
+ :ivar type: The object type, which is always 'project_connection'. Required.
+ PROJECT_CONNECTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.PROJECT_CONNECTION
+ :ivar security_scheme: Project connection auth security details. Required.
+ :vartype security_scheme:
+ ~azure.ai.responses.server.sdk.models.models.OpenApiProjectConnectionSecurityScheme
+ """
+
+ type: Literal[OpenApiAuthType.PROJECT_CONNECTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'project_connection'. Required. PROJECT_CONNECTION."""
+ security_scheme: "_models.OpenApiProjectConnectionSecurityScheme" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Project connection auth security details. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ security_scheme: "_models.OpenApiProjectConnectionSecurityScheme",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OpenApiAuthType.PROJECT_CONNECTION # type: ignore
+
+
+class OpenApiProjectConnectionSecurityScheme(_Model):
+ """Security scheme for OpenApi managed_identity authentication.
+
+ :ivar project_connection_id: Project connection id for Project Connection auth type. Required.
+ :vartype project_connection_id: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Project connection id for Project Connection auth type. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OpenApiTool(Tool, discriminator="openapi"):
+ """The input definition information for an OpenAPI tool as used to configure an agent.
+
+ :ivar type: The object type, which is always 'openapi'. Required. OPENAPI.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OPENAPI
+ :ivar openapi: The openapi function definition. Required.
+ :vartype openapi: ~azure.ai.responses.server.sdk.models.models.OpenApiFunctionDefinition
+ """
+
+ type: Literal[ToolType.OPENAPI] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'openapi'. Required. OPENAPI."""
+ openapi: "_models.OpenApiFunctionDefinition" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The openapi function definition. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ openapi: "_models.OpenApiFunctionDefinition",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.OPENAPI # type: ignore
+
+
+class OpenApiToolCall(OutputItem, discriminator="openapi_call"):
+ """An OpenAPI tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. OPENAPI_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OPENAPI_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the OpenAPI operation being called. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.OPENAPI_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. OPENAPI_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the OpenAPI operation being called. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.OPENAPI_CALL # type: ignore
+
+
+class OpenApiToolCallOutput(OutputItem, discriminator="openapi_call_output"):
+ """The output of an OpenAPI tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. OPENAPI_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OPENAPI_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the OpenAPI operation that was called. Required.
+ :vartype name: str
+ :ivar output: The output from the OpenAPI tool call. Is one of the following types: {str: Any},
+ str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.OPENAPI_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. OPENAPI_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the OpenAPI operation that was called. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the OpenAPI tool call. Is one of the following types: {str: Any}, str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.OPENAPI_CALL_OUTPUT # type: ignore
+
+
+class OutputContent(_Model):
+ """OutputContent.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ OutputContentOutputTextContent, OutputContentReasoningTextContent, OutputContentRefusalContent
+
+ :ivar type: Required. Known values are: "output_text", "refusal", and "reasoning_text".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OutputContentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"output_text\", \"refusal\", and \"reasoning_text\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OutputContentOutputTextContent(OutputContent, discriminator="output_text"):
+ """Output text.
+
+ :ivar type: The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OUTPUT_TEXT
+ :ivar text: The text output from the model. Required.
+ :vartype text: str
+ :ivar annotations: The annotations of the text output. Required.
+ :vartype annotations: list[~azure.ai.responses.server.sdk.models.models.Annotation]
+ :ivar logprobs: Required.
+ :vartype logprobs: list[~azure.ai.responses.server.sdk.models.models.LogProb]
+ """
+
+ type: Literal[OutputContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text output from the model. Required."""
+ annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The annotations of the text output. Required."""
+ logprobs: list["_models.LogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ annotations: list["_models.Annotation"],
+ logprobs: list["_models.LogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputContentType.OUTPUT_TEXT # type: ignore
+
+
+class OutputContentReasoningTextContent(OutputContent, discriminator="reasoning_text"):
+ """Reasoning text.
+
+ :ivar type: The type of the reasoning text. Always ``reasoning_text``. Required.
+ REASONING_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REASONING_TEXT
+ :ivar text: The reasoning text from the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal[OutputContentType.REASONING_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the reasoning text. Always ``reasoning_text``. Required. REASONING_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The reasoning text from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputContentType.REASONING_TEXT # type: ignore
+
+
+class OutputContentRefusalContent(OutputContent, discriminator="refusal"):
+ """Refusal.
+
+ :ivar type: The type of the refusal. Always ``refusal``. Required. REFUSAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REFUSAL
+ :ivar refusal: The refusal explanation from the model. Required.
+ :vartype refusal: str
+ """
+
+ type: Literal[OutputContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the refusal. Always ``refusal``. Required. REFUSAL."""
+ refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The refusal explanation from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ refusal: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputContentType.REFUSAL # type: ignore
+
+
+class OutputItemApplyPatchToolCall(OutputItem, discriminator="apply_patch_call"):
+ """Apply patch tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL
+ :ivar id: The unique ID of the apply patch tool call. Populated when this item is returned via
+ API. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call. One of ``in_progress`` or ``completed``.
+ Required. Known values are: "in_progress" and "completed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallStatus
+ :ivar operation: Apply patch operation. Required.
+ :vartype operation: ~azure.ai.responses.server.sdk.models.models.ApplyPatchFileOperation
+ """
+
+ type: Literal[OutputItemType.APPLY_PATCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call``. Required. APPLY_PATCH_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call. Populated when this item is returned via API.
+ Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call. One of ``in_progress`` or ``completed``. Required.
+ Known values are: \"in_progress\" and \"completed\"."""
+ operation: "_models.ApplyPatchFileOperation" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Apply patch operation. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallStatus"],
+ operation: "_models.ApplyPatchFileOperation",
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.APPLY_PATCH_CALL # type: ignore
+
+
+class OutputItemApplyPatchToolCallOutput(OutputItem, discriminator="apply_patch_call_output"):
+ """Apply patch tool call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``apply_patch_call_output``. Required.
+ APPLY_PATCH_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH_CALL_OUTPUT
+ :ivar id: The unique ID of the apply patch tool call output. Populated when this item is
+ returned via API. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the apply patch tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the apply patch tool call output. One of ``completed`` or
+ ``failed``. Required. Known values are: "completed" and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ApplyPatchCallOutputStatus
+ :ivar output:
+ :vartype output: str
+ """
+
+ type: Literal[OutputItemType.APPLY_PATCH_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``apply_patch_call_output``. Required. APPLY_PATCH_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call output. Populated when this item is returned via
+ API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the apply patch tool call generated by the model. Required."""
+ status: Union[str, "_models.ApplyPatchCallOutputStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the apply patch tool call output. One of ``completed`` or ``failed``. Required.
+ Known values are: \"completed\" and \"failed\"."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.ApplyPatchCallOutputStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.APPLY_PATCH_CALL_OUTPUT # type: ignore
+
+
+class OutputItemCodeInterpreterToolCall(OutputItem, discriminator="code_interpreter_call"):
+ """Code interpreter tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the code interpreter tool call. Always ``code_interpreter_call``.
+ Required. CODE_INTERPRETER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CODE_INTERPRETER_CALL
+ :ivar id: The unique ID of the code interpreter tool call. Required.
+ :vartype id: str
+ :ivar status: The status of the code interpreter tool call. Valid values are ``in_progress``,
+ ``completed``, ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the
+ following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"],
+ Literal["interpreting"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar container_id: The ID of the container used to run the code. Required.
+ :vartype container_id: str
+ :ivar code: Required.
+ :vartype code: str
+ :ivar outputs: Required.
+ :vartype outputs: list[~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputLogs
+ or ~azure.ai.responses.server.sdk.models.models.CodeInterpreterOutputImage]
+ """
+
+ type: Literal[OutputItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the code interpreter tool call. Always ``code_interpreter_call``. Required.
+ CODE_INTERPRETER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the code interpreter tool call. Required."""
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the code interpreter tool call. Valid values are ``in_progress``, ``completed``,
+ ``incomplete``, ``interpreting``, and ``failed``. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"],
+ Literal[\"interpreting\"], Literal[\"failed\"]"""
+ container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the container used to run the code. Required."""
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"],
+ container_id: str,
+ code: str,
+ outputs: list[Union["_models.CodeInterpreterOutputLogs", "_models.CodeInterpreterOutputImage"]],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.CODE_INTERPRETER_CALL # type: ignore
+
+
+class OutputItemCompactionBody(OutputItem, discriminator="compaction"):
+ """Compaction item.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``compaction``. Required. COMPACTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPACTION
+ :ivar id: The unique ID of the compaction item. Required.
+ :vartype id: str
+ :ivar encrypted_content: The encrypted content that was produced by compaction. Required.
+ :vartype encrypted_content: str
+ """
+
+ type: Literal[OutputItemType.COMPACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``compaction``. Required. COMPACTION."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the compaction item. Required."""
+ encrypted_content: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The encrypted content that was produced by compaction. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ encrypted_content: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.COMPACTION # type: ignore
+
+
+class OutputItemComputerToolCall(OutputItem, discriminator="computer_call"):
+ """Computer tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL
+ :ivar id: The unique ID of the computer call. Required.
+ :vartype id: str
+ :ivar call_id: An identifier used when responding to the tool call with output. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.ComputerAction
+ :ivar pending_safety_checks: The pending safety checks for the computer call. Required.
+ :vartype pending_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[OutputItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer call. Always ``computer_call``. Required. COMPUTER_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the computer call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used when responding to the tool call with output. Required."""
+ action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The pending safety checks for the computer call. Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.ComputerAction",
+ pending_safety_checks: list["_models.ComputerCallSafetyCheckParam"],
+ status: Literal["in_progress", "completed", "incomplete"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.COMPUTER_CALL # type: ignore
+
+
+class OutputItemComputerToolCallOutput(OutputItem, discriminator="computer_call_output"):
+ """Computer tool call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the computer tool call output. Always ``computer_call_output``.
+ Required. COMPUTER_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_CALL_OUTPUT
+ :ivar id: The ID of the computer tool call output. Required.
+ :vartype id: str
+ :ivar call_id: The ID of the computer tool call that produced the output. Required.
+ :vartype call_id: str
+ :ivar acknowledged_safety_checks: The safety checks reported by the API that have been
+ acknowledged by the developer.
+ :vartype acknowledged_safety_checks:
+ list[~azure.ai.responses.server.sdk.models.models.ComputerCallSafetyCheckParam]
+ :ivar output: Required.
+ :vartype output: ~azure.ai.responses.server.sdk.models.models.ComputerScreenshotImage
+ :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Populated when input items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[OutputItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the computer tool call output. Always ``computer_call_output``. Required.
+ COMPUTER_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read"])
+ """The ID of the computer tool call output. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the computer tool call that produced the output. Required."""
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The safety checks reported by the API that have been acknowledged by the developer."""
+ output: "_models.ComputerScreenshotImage" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the message input. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when input items are returned via API. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: "_models.ComputerScreenshotImage",
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ acknowledged_safety_checks: Optional[list["_models.ComputerCallSafetyCheckParam"]] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.COMPUTER_CALL_OUTPUT # type: ignore
+
+
+class OutputItemCustomToolCall(OutputItem, discriminator="custom_tool_call"):
+ """Custom tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the custom tool call. Always ``custom_tool_call``. Required.
+ CUSTOM_TOOL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL
+ :ivar id: The unique ID of the custom tool call in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: An identifier used to map this custom tool call to a tool call output. Required.
+ :vartype call_id: str
+ :ivar name: The name of the custom tool being called. Required.
+ :vartype name: str
+ :ivar input: The input for the custom tool call generated by the model. Required.
+ :vartype input: str
+ """
+
+ type: Literal[OutputItemType.CUSTOM_TOOL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call. Always ``custom_tool_call``. Required. CUSTOM_TOOL_CALL."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An identifier used to map this custom tool call to a tool call output. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the custom tool being called. Required."""
+ input: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The input for the custom tool call generated by the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ input: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.CUSTOM_TOOL_CALL # type: ignore
+
+
+class OutputItemCustomToolCallOutput(OutputItem, discriminator="custom_tool_call_output"):
+ """Custom tool call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the custom tool call output. Always ``custom_tool_call_output``.
+ Required. CUSTOM_TOOL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM_TOOL_CALL_OUTPUT
+ :ivar id: The unique ID of the custom tool call output in the OpenAI platform.
+ :vartype id: str
+ :ivar call_id: The call ID, used to map this custom tool call output to a custom tool call.
+ Required.
+ :vartype call_id: str
+ :ivar output: The output from the custom tool call generated by your code. Can be a string or
+ an list of output content. Required. Is either a str type or a
+ [FunctionAndCustomToolCallOutput] type.
+ :vartype output: str or
+ list[~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutput]
+ """
+
+ type: Literal[OutputItemType.CUSTOM_TOOL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the custom tool call output. Always ``custom_tool_call_output``. Required.
+ CUSTOM_TOOL_CALL_OUTPUT."""
+ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the custom tool call output in the OpenAI platform."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The call ID, used to map this custom tool call output to a custom tool call. Required."""
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the custom tool call generated by your code. Can be a string or an list of
+ output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.CUSTOM_TOOL_CALL_OUTPUT # type: ignore
+
+
+class OutputItemFileSearchToolCall(OutputItem, discriminator="file_search_call"):
+ """File search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: The unique ID of the file search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the file search tool call. Always ``file_search_call``. Required.
+ FILE_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_SEARCH_CALL
+ :ivar status: The status of the file search tool call. One of ``in_progress``, ``searching``,
+ ``incomplete`` or ``failed``,. Required. Is one of the following types: Literal["in_progress"],
+ Literal["searching"], Literal["completed"], Literal["incomplete"], Literal["failed"]
+ :vartype status: str or str or str or str or str
+ :ivar queries: The queries used to search for files. Required.
+ :vartype queries: list[str]
+ :ivar results:
+ :vartype results: list[~azure.ai.responses.server.sdk.models.models.FileSearchToolCallResults]
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the file search tool call. Required."""
+ type: Literal[OutputItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the file search tool call. Always ``file_search_call``. Required. FILE_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the file search tool call. One of ``in_progress``, ``searching``, ``incomplete``
+ or ``failed``,. Required. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"searching\"], Literal[\"completed\"], Literal[\"incomplete\"], Literal[\"failed\"]"""
+ queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The queries used to search for files. Required."""
+ results: Optional[list["_models.FileSearchToolCallResults"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "incomplete", "failed"],
+ queries: list[str],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ results: Optional[list["_models.FileSearchToolCallResults"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.FILE_SEARCH_CALL # type: ignore
+
+
+class OutputItemFunctionShellCall(OutputItem, discriminator="shell_call"):
+ """Shell tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``shell_call``. Required. SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL
+ :ivar id: The unique ID of the shell tool call. Populated when this item is returned via API.
+ Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar action: The shell commands and limits that describe how to run the tool call. Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.FunctionShellAction
+ :ivar status: The status of the shell call. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Required. Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.LocalShellCallStatus
+ :ivar environment: Required.
+ :vartype environment: ~azure.ai.responses.server.sdk.models.models.FunctionShellCallEnvironment
+ """
+
+ type: Literal[OutputItemType.SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``shell_call``. Required. SHELL_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call. Populated when this item is returned via API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ action: "_models.FunctionShellAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The shell commands and limits that describe how to run the tool call. Required."""
+ status: Union[str, "_models.LocalShellCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the shell call. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Required. Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ environment: "_models.FunctionShellCallEnvironment" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.FunctionShellAction",
+ status: Union[str, "_models.LocalShellCallStatus"],
+ environment: "_models.FunctionShellCallEnvironment",
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.SHELL_CALL # type: ignore
+
+
+class OutputItemFunctionShellCallOutput(OutputItem, discriminator="shell_call_output"):
+ """Shell call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the shell call output. Always ``shell_call_output``. Required.
+ SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL_CALL_OUTPUT
+ :ivar id: The unique ID of the shell call output. Populated when this item is returned via API.
+ Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar status: The status of the shell call output. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Required. Known values are: "in_progress", "completed", and "incomplete".
+ :vartype status: str or
+ ~azure.ai.responses.server.sdk.models.models.LocalShellCallOutputStatusEnum
+ :ivar output: An array of shell call output contents. Required.
+ :vartype output:
+ list[~azure.ai.responses.server.sdk.models.models.FunctionShellCallOutputContent]
+ :ivar max_output_length: Required.
+ :vartype max_output_length: int
+ """
+
+ type: Literal[OutputItemType.SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the shell call output. Always ``shell_call_output``. Required. SHELL_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell call output. Populated when this item is returned via API. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the shell tool call generated by the model. Required."""
+ status: Union[str, "_models.LocalShellCallOutputStatusEnum"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the shell call output. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Required. Known values are: \"in_progress\", \"completed\", and \"incomplete\"."""
+ output: list["_models.FunctionShellCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """An array of shell call output contents. Required."""
+ max_output_length: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ status: Union[str, "_models.LocalShellCallOutputStatusEnum"],
+ output: list["_models.FunctionShellCallOutputContent"],
+ max_output_length: int,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.SHELL_CALL_OUTPUT # type: ignore
+
+
+class OutputItemFunctionToolCall(OutputItem, discriminator="function_call"):
+ """Function tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: The unique ID of the function tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the function tool call. Always ``function_call``. Required.
+ FUNCTION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar name: The name of the function to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments to pass to the function. Required.
+ :vartype arguments: str
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read"])
+ """The unique ID of the function tool call. Required."""
+ type: Literal[OutputItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call. Always ``function_call``. Required. FUNCTION_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the function. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ name: str,
+ arguments: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.FUNCTION_CALL # type: ignore
+
+
+class OutputItemFunctionToolCallOutput(OutputItem, discriminator="function_call_output"):
+ """Function tool call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: The unique ID of the function tool call output. Populated when this item is returned
+ via API. Required.
+ :vartype id: str
+ :ivar type: The type of the function tool call output. Always ``function_call_output``.
+ Required. FUNCTION_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION_CALL_OUTPUT
+ :ivar call_id: The unique ID of the function tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the function call generated by your code. Can be a string or an
+ list of output content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput]
+ type.
+ :vartype output: str or
+ list[~azure.ai.responses.server.sdk.models.models.FunctionAndCustomToolCallOutput]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read"])
+ """The unique ID of the function tool call output. Populated when this item is returned via API.
+ Required."""
+ type: Literal[OutputItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the function tool call output. Always ``function_call_output``. Required.
+ FUNCTION_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the function tool call generated by the model. Required."""
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the function call generated by your code. Can be a string or an list of output
+ content. Required. Is either a str type or a [FunctionAndCustomToolCallOutput] type."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ output: Union[str, list["_models.FunctionAndCustomToolCallOutput"]],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.FUNCTION_CALL_OUTPUT # type: ignore
+
+
+class OutputItemImageGenToolCall(OutputItem, discriminator="image_generation_call"):
+ """Image generation call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.IMAGE_GENERATION_CALL
+ :ivar id: The unique ID of the image generation call. Required.
+ :vartype id: str
+ :ivar status: The status of the image generation call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["generating"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar result: Required.
+ :vartype result: str
+ """
+
+ type: Literal[OutputItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the image generation call. Always ``image_generation_call``. Required.
+ IMAGE_GENERATION_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the image generation call. Required."""
+ status: Literal["in_progress", "completed", "generating", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the image generation call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"generating\"], Literal[\"failed\"]"""
+ result: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "completed", "generating", "failed"],
+ result: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.IMAGE_GENERATION_CALL # type: ignore
+
+
+class OutputItemLocalShellToolCall(OutputItem, discriminator="local_shell_call"):
+ """Local shell call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the local shell call. Always ``local_shell_call``. Required.
+ LOCAL_SHELL_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL
+ :ivar id: The unique ID of the local shell call. Required.
+ :vartype id: str
+ :ivar call_id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar action: Required.
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.LocalShellExecAction
+ :ivar status: The status of the local shell call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[OutputItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell call. Always ``local_shell_call``. Required. LOCAL_SHELL_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell call. Required."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the local shell call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ call_id: str,
+ action: "_models.LocalShellExecAction",
+ status: Literal["in_progress", "completed", "incomplete"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.LOCAL_SHELL_CALL # type: ignore
+
+
+class OutputItemLocalShellToolCallOutput(OutputItem, discriminator="local_shell_call_output"):
+ """Local shell call output.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the local shell tool call output. Always ``local_shell_call_output``.
+ Required. LOCAL_SHELL_CALL_OUTPUT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.LOCAL_SHELL_CALL_OUTPUT
+ :ivar id: The unique ID of the local shell tool call generated by the model. Required.
+ :vartype id: str
+ :ivar output: A JSON string of the output of the local shell tool call. Required.
+ :vartype output: str
+ :ivar status: Is one of the following types: Literal["in_progress"], Literal["completed"],
+ Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[OutputItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the local shell tool call output. Always ``local_shell_call_output``. Required.
+ LOCAL_SHELL_CALL_OUTPUT."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the local shell tool call generated by the model. Required."""
+ output: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the output of the local shell tool call. Required."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"],
+ Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ output: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore
+
+
+class OutputItemMcpApprovalRequest(OutputItem, discriminator="mcp_approval_request"):
+ """MCP approval request.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``mcp_approval_request``. Required.
+ MCP_APPROVAL_REQUEST.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_REQUEST
+ :ivar id: The unique ID of the approval request. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server making the request. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool to run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of arguments for the tool. Required.
+ :vartype arguments: str
+ """
+
+ type: Literal[OutputItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_request``. Required. MCP_APPROVAL_REQUEST."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the approval request. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server making the request. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool to run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of arguments for the tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MCP_APPROVAL_REQUEST # type: ignore
+
+
+class OutputItemMcpApprovalResponseResource(OutputItem, discriminator="mcp_approval_response"):
+ """MCP approval response.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``mcp_approval_response``. Required.
+ MCP_APPROVAL_RESPONSE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_APPROVAL_RESPONSE
+ :ivar id: The unique ID of the approval response. Required.
+ :vartype id: str
+ :ivar approval_request_id: The ID of the approval request being answered. Required.
+ :vartype approval_request_id: str
+ :ivar approve: Whether the request was approved. Required.
+ :vartype approve: bool
+ :ivar reason:
+ :vartype reason: str
+ """
+
+ type: Literal[OutputItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_approval_response``. Required. MCP_APPROVAL_RESPONSE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the approval response. Required."""
+ approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the approval request being answered. Required."""
+ approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Whether the request was approved. Required."""
+ reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ approval_request_id: str,
+ approve: bool,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ reason: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MCP_APPROVAL_RESPONSE # type: ignore
+
+
+class OutputItemMcpListTools(OutputItem, discriminator="mcp_list_tools"):
+ """MCP list tools.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_LIST_TOOLS
+ :ivar id: The unique ID of the list. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server. Required.
+ :vartype server_label: str
+ :ivar tools: The tools available on the server. Required.
+ :vartype tools: list[~azure.ai.responses.server.sdk.models.models.MCPListToolsTool]
+ :ivar error:
+ :vartype error: ~azure.ai.responses.server.sdk.models.models.RealtimeMCPError
+ """
+
+ type: Literal[OutputItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_list_tools``. Required. MCP_LIST_TOOLS."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the list. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server. Required."""
+ tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The tools available on the server. Required."""
+ error: Optional["_models.RealtimeMCPError"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ tools: list["_models.MCPListToolsTool"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ error: Optional["_models.RealtimeMCPError"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MCP_LIST_TOOLS # type: ignore
+
+
+class OutputItemMcpToolCall(OutputItem, discriminator="mcp_call"):
+ """MCP tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the item. Always ``mcp_call``. Required. MCP_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP_CALL
+ :ivar id: The unique ID of the tool call. Required.
+ :vartype id: str
+ :ivar server_label: The label of the MCP server running the tool. Required.
+ :vartype server_label: str
+ :ivar name: The name of the tool that was run. Required.
+ :vartype name: str
+ :ivar arguments: A JSON string of the arguments passed to the tool. Required.
+ :vartype arguments: str
+ :ivar output:
+ :vartype output: str
+ :ivar error:
+ :vartype error: dict[str, any]
+ :ivar status: The status of the tool call. One of ``in_progress``, ``completed``,
+ ``incomplete``, ``calling``, or ``failed``. Known values are: "in_progress", "completed",
+ "incomplete", "calling", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MCPToolCallStatus
+ :ivar approval_request_id:
+ :vartype approval_request_id: str
+ """
+
+ type: Literal[OutputItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the item. Always ``mcp_call``. Required. MCP_CALL."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call. Required."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server running the tool. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the tool that was run. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments passed to the tool. Required."""
+ output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ error: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. One of ``in_progress``, ``completed``, ``incomplete``,
+ ``calling``, or ``failed``. Known values are: \"in_progress\", \"completed\", \"incomplete\",
+ \"calling\", and \"failed\"."""
+ approval_request_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ server_label: str,
+ name: str,
+ arguments: str,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional[str] = None,
+ error: Optional[dict[str, Any]] = None,
+ status: Optional[Union[str, "_models.MCPToolCallStatus"]] = None,
+ approval_request_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MCP_CALL # type: ignore
+
+
+class OutputItemMessage(OutputItem, discriminator="message"):
+ """Message.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the message. Always set to ``message``. Required. MESSAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MESSAGE
+ :ivar id: The unique ID of the message. Required.
+ :vartype id: str
+ :ivar status: The status of item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Required. Known values are: "in_progress",
+ "completed", and "incomplete".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.MessageStatus
+ :ivar role: The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``,
+ ``critic``, ``discriminator``, ``developer``, or ``tool``. Required. Known values are:
+ "unknown", "user", "assistant", "system", "critic", "discriminator", "developer", and "tool".
+ :vartype role: str or ~azure.ai.responses.server.sdk.models.models.MessageRole
+ :ivar content: The content of the message. Required.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.MessageContent]
+ """
+
+ type: Literal[OutputItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the message. Always set to ``message``. Required. MESSAGE."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the message. Required."""
+ status: Union[str, "_models.MessageStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The status of item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated when
+ items are returned via API. Required. Known values are: \"in_progress\", \"completed\", and
+ \"incomplete\"."""
+ role: Union[str, "_models.MessageRole"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The role of the message. One of ``unknown``, ``user``, ``assistant``, ``system``, ``critic``,
+ ``discriminator``, ``developer``, or ``tool``. Required. Known values are: \"unknown\",
+ \"user\", \"assistant\", \"system\", \"critic\", \"discriminator\", \"developer\", and
+ \"tool\"."""
+ content: list["_models.MessageContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content of the message. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Union[str, "_models.MessageStatus"],
+ role: Union[str, "_models.MessageRole"],
+ content: list["_models.MessageContent"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.MESSAGE # type: ignore
+
+
+class OutputItemOutputMessage(OutputItem, discriminator="output_message"):
+ """Output message.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: The unique ID of the output message. Required.
+ :vartype id: str
+ :ivar type: The type of the output message. Always ``message``. Required. OUTPUT_MESSAGE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OUTPUT_MESSAGE
+ :ivar role: The role of the output message. Always ``assistant``. Required. Default value is
+ "assistant".
+ :vartype role: str
+ :ivar content: The content of the output message. Required.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.OutputMessageContent]
+ :ivar phase: Known values are: "commentary" and "final_answer".
+ :vartype phase: str or ~azure.ai.responses.server.sdk.models.models.MessagePhase
+ :ivar status: The status of the message input. One of ``in_progress``, ``completed``, or
+ ``incomplete``. Populated when input items are returned via API. Required. Is one of the
+ following types: Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the output message. Required."""
+ type: Literal[OutputItemType.OUTPUT_MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the output message. Always ``message``. Required. OUTPUT_MESSAGE."""
+ role: Literal["assistant"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The role of the output message. Always ``assistant``. Required. Default value is \"assistant\"."""
+ content: list["_models.OutputMessageContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The content of the output message. Required."""
+ phase: Optional[Union[str, "_models.MessagePhase"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Known values are: \"commentary\" and \"final_answer\"."""
+ status: Literal["in_progress", "completed", "incomplete"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the message input. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when input items are returned via API. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ content: list["_models.OutputMessageContent"],
+ status: Literal["in_progress", "completed", "incomplete"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ phase: Optional[Union[str, "_models.MessagePhase"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.OUTPUT_MESSAGE # type: ignore
+ self.role: Literal["assistant"] = "assistant"
+
+
+class OutputItemReasoningItem(OutputItem, discriminator="reasoning"):
+ """Reasoning.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: The type of the object. Always ``reasoning``. Required. REASONING.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REASONING
+ :ivar id: The unique identifier of the reasoning content. Required.
+ :vartype id: str
+ :ivar encrypted_content:
+ :vartype encrypted_content: str
+ :ivar summary: Reasoning summary content. Required.
+ :vartype summary: list[~azure.ai.responses.server.sdk.models.models.SummaryTextContent]
+ :ivar content: Reasoning text content.
+ :vartype content: list[~azure.ai.responses.server.sdk.models.models.ReasoningTextContent]
+ :ivar status: The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``.
+ Populated when items are returned via API. Is one of the following types:
+ Literal["in_progress"], Literal["completed"], Literal["incomplete"]
+ :vartype status: str or str or str
+ """
+
+ type: Literal[OutputItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the object. Always ``reasoning``. Required. REASONING."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the reasoning content. Required."""
+ encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ summary: list["_models.SummaryTextContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Reasoning summary content. Required."""
+ content: Optional[list["_models.ReasoningTextContent"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Reasoning text content."""
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the item. One of ``in_progress``, ``completed``, or ``incomplete``. Populated
+ when items are returned via API. Is one of the following types: Literal[\"in_progress\"],
+ Literal[\"completed\"], Literal[\"incomplete\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ summary: list["_models.SummaryTextContent"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ encrypted_content: Optional[str] = None,
+ content: Optional[list["_models.ReasoningTextContent"]] = None,
+ status: Optional[Literal["in_progress", "completed", "incomplete"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.REASONING # type: ignore
+
+
+class OutputItemWebSearchToolCall(OutputItem, discriminator="web_search_call"):
+ """Web search tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar id: The unique ID of the web search tool call. Required.
+ :vartype id: str
+ :ivar type: The type of the web search tool call. Always ``web_search_call``. Required.
+ WEB_SEARCH_CALL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_CALL
+ :ivar status: The status of the web search tool call. Required. Is one of the following types:
+ Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"]
+ :vartype status: str or str or str or str
+ :ivar action: An object describing the specific action taken in this web search call. Includes
+ details on how the model used the web (search, open_page, find_in_page). Required. Is one of
+ the following types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind
+ :vartype action: ~azure.ai.responses.server.sdk.models.models.WebSearchActionSearch or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionOpenPage or
+ ~azure.ai.responses.server.sdk.models.models.WebSearchActionFind
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the web search tool call. Required."""
+ type: Literal[OutputItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the web search tool call. Always ``web_search_call``. Required. WEB_SEARCH_CALL."""
+ status: Literal["in_progress", "searching", "completed", "failed"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the web search tool call. Required. Is one of the following types:
+ Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]"""
+ action: Union["_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"] = (
+ rest_field(visibility=["read", "create", "update", "delete", "query"])
+ )
+ """An object describing the specific action taken in this web search call. Includes details on how
+ the model used the web (search, open_page, find_in_page). Required. Is one of the following
+ types: WebSearchActionSearch, WebSearchActionOpenPage, WebSearchActionFind"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ status: Literal["in_progress", "searching", "completed", "failed"],
+ action: Union[
+ "_models.WebSearchActionSearch", "_models.WebSearchActionOpenPage", "_models.WebSearchActionFind"
+ ],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.WEB_SEARCH_CALL # type: ignore
+
+
+class OutputMessageContent(_Model):
+ """OutputMessageContent.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ OutputMessageContentOutputTextContent, OutputMessageContentRefusalContent
+
+ :ivar type: Required. Known values are: "output_text" and "refusal".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OutputMessageContentType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"output_text\" and \"refusal\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class OutputMessageContentOutputTextContent(OutputMessageContent, discriminator="output_text"):
+ """Output text.
+
+ :ivar type: The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.OUTPUT_TEXT
+ :ivar text: The text output from the model. Required.
+ :vartype text: str
+ :ivar annotations: The annotations of the text output. Required.
+ :vartype annotations: list[~azure.ai.responses.server.sdk.models.models.Annotation]
+ :ivar logprobs: Required.
+ :vartype logprobs: list[~azure.ai.responses.server.sdk.models.models.LogProb]
+ """
+
+ type: Literal[OutputMessageContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the output text. Always ``output_text``. Required. OUTPUT_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text output from the model. Required."""
+ annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The annotations of the text output. Required."""
+ logprobs: list["_models.LogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ annotations: list["_models.Annotation"],
+ logprobs: list["_models.LogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputMessageContentType.OUTPUT_TEXT # type: ignore
+
+
+class OutputMessageContentRefusalContent(OutputMessageContent, discriminator="refusal"):
+ """Refusal.
+
+ :ivar type: The type of the refusal. Always ``refusal``. Required. REFUSAL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.REFUSAL
+ :ivar refusal: The refusal explanation from the model. Required.
+ :vartype refusal: str
+ """
+
+ type: Literal[OutputMessageContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the refusal. Always ``refusal``. Required. REFUSAL."""
+ refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The refusal explanation from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ refusal: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputMessageContentType.REFUSAL # type: ignore
+
+
+class Prompt(_Model):
+ """Reference to a prompt template and its variables. `Learn more
+ `_.
+
+ :ivar id: The unique identifier of the prompt template to use. Required.
+ :vartype id: str
+ :ivar version:
+ :vartype version: str
+ :ivar variables:
+ :vartype variables: ~azure.ai.responses.server.sdk.models.models.ResponsePromptVariables
+ """
+
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the prompt template to use. Required."""
+ version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ variables: Optional["_models.ResponsePromptVariables"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+
+ @overload
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ version: Optional[str] = None,
+ variables: Optional["_models.ResponsePromptVariables"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class RankingOptions(_Model):
+ """RankingOptions.
+
+ :ivar ranker: The ranker to use for the file search. Known values are: "auto" and
+ "default-2024-11-15".
+ :vartype ranker: str or ~azure.ai.responses.server.sdk.models.models.RankerVersionType
+ :ivar score_threshold: The score threshold for the file search, a number between 0 and 1.
+ Numbers closer to 1 will attempt to return only the most relevant results, but may return fewer
+ results.
+ :vartype score_threshold: int
+ :ivar hybrid_search: Weights that control how reciprocal rank fusion balances semantic
+ embedding matches versus sparse keyword matches when hybrid search is enabled.
+ :vartype hybrid_search: ~azure.ai.responses.server.sdk.models.models.HybridSearchOptions
+ """
+
+ ranker: Optional[Union[str, "_models.RankerVersionType"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The ranker to use for the file search. Known values are: \"auto\" and \"default-2024-11-15\"."""
+ score_threshold: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The score threshold for the file search, a number between 0 and 1. Numbers closer to 1 will
+ attempt to return only the most relevant results, but may return fewer results."""
+ hybrid_search: Optional["_models.HybridSearchOptions"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Weights that control how reciprocal rank fusion balances semantic embedding matches versus
+ sparse keyword matches when hybrid search is enabled."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ ranker: Optional[Union[str, "_models.RankerVersionType"]] = None,
+ score_threshold: Optional[int] = None,
+ hybrid_search: Optional["_models.HybridSearchOptions"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class RealtimeMCPError(_Model):
+ """RealtimeMCPError.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ RealtimeMCPHTTPError, RealtimeMCPProtocolError, RealtimeMCPToolExecutionError
+
+ :ivar type: Required. Known values are: "protocol_error", "tool_execution_error", and
+ "http_error".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RealtimeMcpErrorType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"protocol_error\", \"tool_execution_error\", and \"http_error\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class RealtimeMCPHTTPError(RealtimeMCPError, discriminator="http_error"):
+ """Realtime MCP HTTP error.
+
+ :ivar type: Required. HTTP_ERROR.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.HTTP_ERROR
+ :ivar code: Required.
+ :vartype code: int
+ :ivar message: Required.
+ :vartype message: str
+ """
+
+ type: Literal[RealtimeMcpErrorType.HTTP_ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. HTTP_ERROR."""
+ code: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ code: int,
+ message: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = RealtimeMcpErrorType.HTTP_ERROR # type: ignore
+
+
+class RealtimeMCPProtocolError(RealtimeMCPError, discriminator="protocol_error"):
+ """Realtime MCP protocol error.
+
+ :ivar type: Required. PROTOCOL_ERROR.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.PROTOCOL_ERROR
+ :ivar code: Required.
+ :vartype code: int
+ :ivar message: Required.
+ :vartype message: str
+ """
+
+ type: Literal[RealtimeMcpErrorType.PROTOCOL_ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. PROTOCOL_ERROR."""
+ code: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ code: int,
+ message: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = RealtimeMcpErrorType.PROTOCOL_ERROR # type: ignore
+
+
+class RealtimeMCPToolExecutionError(RealtimeMCPError, discriminator="tool_execution_error"):
+ """Realtime MCP tool execution error.
+
+ :ivar type: Required. TOOL_EXECUTION_ERROR.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TOOL_EXECUTION_ERROR
+ :ivar message: Required.
+ :vartype message: str
+ """
+
+ type: Literal[RealtimeMcpErrorType.TOOL_EXECUTION_ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. TOOL_EXECUTION_ERROR."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ message: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = RealtimeMcpErrorType.TOOL_EXECUTION_ERROR # type: ignore
+
+
+class Reasoning(_Model):
+ """Reasoning.
+
+ :ivar effort: Is one of the following types: Literal["none"], Literal["minimal"],
+ Literal["low"], Literal["medium"], Literal["high"], Literal["xhigh"]
+ :vartype effort: str or str or str or str or str or str
+ :ivar summary: Is one of the following types: Literal["auto"], Literal["concise"],
+ Literal["detailed"]
+ :vartype summary: str or str or str
+ :ivar generate_summary: Is one of the following types: Literal["auto"], Literal["concise"],
+ Literal["detailed"]
+ :vartype generate_summary: str or str or str
+ """
+
+ effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"none\"], Literal[\"minimal\"], Literal[\"low\"],
+ Literal[\"medium\"], Literal[\"high\"], Literal[\"xhigh\"]"""
+ summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]"""
+ generate_summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] = None,
+ summary: Optional[Literal["auto", "concise", "detailed"]] = None,
+ generate_summary: Optional[Literal["auto", "concise", "detailed"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ReasoningTextContent(_Model):
+ """Reasoning text.
+
+ :ivar type: The type of the reasoning text. Always ``reasoning_text``. Required. Default value
+ is "reasoning_text".
+ :vartype type: str
+ :ivar text: The reasoning text from the model. Required.
+ :vartype text: str
+ """
+
+ type: Literal["reasoning_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of the reasoning text. Always ``reasoning_text``. Required. Default value is
+ \"reasoning_text\"."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The reasoning text from the model. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["reasoning_text"] = "reasoning_text"
+
+
+class Response(_Model):
+ """The response object.
+
+ :ivar metadata:
+ :vartype metadata: ~azure.ai.responses.server.sdk.models.models.Metadata
+ :ivar top_logprobs:
+ :vartype top_logprobs: int
+ :ivar temperature:
+ :vartype temperature: int
+ :ivar top_p:
+ :vartype top_p: int
+ :ivar user: This field is being replaced by ``safety_identifier`` and ``prompt_cache_key``. Use
+ ``prompt_cache_key`` instead to maintain caching optimizations. A stable identifier for your
+ end-users. Used to boost cache hit rates by better bucketing similar requests and to help
+ OpenAI detect and prevent abuse. `Learn more
+ `_.
+ :vartype user: str
+ :ivar safety_identifier: A stable identifier used to help detect users of your application that
+ may be violating OpenAI's usage policies. The IDs should be a string that uniquely identifies
+ each user, with a maximum length of 64 characters. We recommend hashing their username or email
+ address, in order to avoid sending us any identifying information. `Learn more
+ `_.
+ :vartype safety_identifier: str
+ :ivar prompt_cache_key: Used by OpenAI to cache responses for similar requests to optimize your
+ cache hit rates. Replaces the ``user`` field. `Learn more `_.
+ :vartype prompt_cache_key: str
+ :ivar service_tier: Is one of the following types: Literal["auto"], Literal["default"],
+ Literal["flex"], Literal["scale"], Literal["priority"]
+ :vartype service_tier: str or str or str or str or str
+ :ivar prompt_cache_retention: Is either a Literal["in-memory"] type or a Literal["24h"] type.
+ :vartype prompt_cache_retention: str or str
+ :ivar previous_response_id:
+ :vartype previous_response_id: str
+ :ivar model: The model deployment to use for the creation of this response.
+ :vartype model: str
+ :ivar reasoning:
+ :vartype reasoning: ~azure.ai.responses.server.sdk.models.models.Reasoning
+ :ivar background:
+ :vartype background: bool
+ :ivar max_output_tokens:
+ :vartype max_output_tokens: int
+ :ivar max_tool_calls:
+ :vartype max_tool_calls: int
+ :ivar text:
+ :vartype text: ~azure.ai.responses.server.sdk.models.models.ResponseTextParam
+ :ivar tools:
+ :vartype tools: list[~azure.ai.responses.server.sdk.models.models.Tool]
+ :ivar tool_choice: Is either a Union[str, "_models.ToolChoiceOptions"] type or a
+ ToolChoiceParam type.
+ :vartype tool_choice: str or ~azure.ai.responses.server.sdk.models.models.ToolChoiceOptions or
+ ~azure.ai.responses.server.sdk.models.models.ToolChoiceParam
+ :ivar prompt:
+ :vartype prompt: ~azure.ai.responses.server.sdk.models.models.Prompt
+ :ivar truncation: Is either a Literal["auto"] type or a Literal["disabled"] type.
+ :vartype truncation: str or str
+ :ivar id: Unique identifier for this Response. Required.
+ :vartype id: str
+ :ivar object: The object type of this resource - always set to ``response``. Required. Default
+ value is "response".
+ :vartype object: str
+ :ivar status: The status of the response generation. One of ``completed``, ``failed``,
+ ``in_progress``, ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types:
+ Literal["completed"], Literal["failed"], Literal["in_progress"], Literal["cancelled"],
+ Literal["queued"], Literal["incomplete"]
+ :vartype status: str or str or str or str or str or str
+ :ivar created_at: Unix timestamp (in seconds) of when this Response was created. Required.
+ :vartype created_at: ~datetime.datetime
+ :ivar completed_at:
+ :vartype completed_at: ~datetime.datetime
+ :ivar error: Required.
+ :vartype error: ~azure.ai.responses.server.sdk.models.models.ResponseError
+ :ivar incomplete_details: Required.
+ :vartype incomplete_details:
+ ~azure.ai.responses.server.sdk.models.models.ResponseIncompleteDetails
+ :ivar output: An array of content items generated by the model.
+
+ * The length and order of items in the `output` array is dependent
+ on the model's response.
+ * Rather than accessing the first item in the `output` array and
+ assuming it's an `assistant` message with the content generated by
+ the model, you might consider using the `output_text` property where
+ supported in SDKs. Required.
+ :vartype output: list[~azure.ai.responses.server.sdk.models.models.OutputItem]
+ :ivar instructions: Required. Is either a str type or a [Item] type.
+ :vartype instructions: str or list[~azure.ai.responses.server.sdk.models.models.Item]
+ :ivar output_text:
+ :vartype output_text: str
+ :ivar usage:
+ :vartype usage: ~azure.ai.responses.server.sdk.models.models.ResponseUsage
+ :ivar parallel_tool_calls: Whether to allow the model to run tool calls in parallel. Required.
+ :vartype parallel_tool_calls: bool
+ :ivar conversation:
+ :vartype conversation: ~azure.ai.responses.server.sdk.models.models.ConversationReference
+ :ivar agent: (Deprecated) Use agent_reference instead. The agent used for this response.
+ :vartype agent: ~azure.ai.responses.server.sdk.models.models.AgentId
+ :ivar agent_reference: The agent used for this response. Required.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar agent_session_id: The session identifier for this response. Currently only relevant for
+ hosted agents. Always returned for hosted agents — either the caller-provided value, the
+ auto-derived value, or an auto-generated UUID. Use for session-scoped operations and to
+ maintain sandbox affinity in follow-up calls.
+ :vartype agent_session_id: str
+ """
+
+ metadata: Optional["_models.Metadata"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ top_logprobs: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ temperature: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ top_p: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ user: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """This field is being replaced by ``safety_identifier`` and ``prompt_cache_key``. Use
+ ``prompt_cache_key`` instead to maintain caching optimizations. A stable identifier for your
+ end-users. Used to boost cache hit rates by better bucketing similar requests and to help
+ OpenAI detect and prevent abuse. `Learn more
+ `_."""
+ safety_identifier: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A stable identifier used to help detect users of your application that may be violating
+ OpenAI's usage policies. The IDs should be a string that uniquely identifies each user, with a
+ maximum length of 64 characters. We recommend hashing their username or email address, in order
+ to avoid sending us any identifying information. `Learn more
+ `_."""
+ prompt_cache_key: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Used by OpenAI to cache responses for similar requests to optimize your cache hit rates.
+ Replaces the ``user`` field. `Learn more `_."""
+ service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"auto\"], Literal[\"default\"], Literal[\"flex\"],
+ Literal[\"scale\"], Literal[\"priority\"]"""
+ prompt_cache_retention: Optional[Literal["in-memory", "24h"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Literal[\"in-memory\"] type or a Literal[\"24h\"] type."""
+ previous_response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ model: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The model deployment to use for the creation of this response."""
+ reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ background: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ max_output_tokens: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ max_tool_calls: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ text: Optional["_models.ResponseTextParam"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceParam"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Union[str, \"_models.ToolChoiceOptions\"] type or a ToolChoiceParam type."""
+ prompt: Optional["_models.Prompt"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ truncation: Optional[Literal["auto", "disabled"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Literal[\"auto\"] type or a Literal[\"disabled\"] type."""
+ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique identifier for this Response. Required."""
+ object: Literal["response"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The object type of this resource - always set to ``response``. Required. Default value is
+ \"response\"."""
+ status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the response generation. One of ``completed``, ``failed``, ``in_progress``,
+ ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types:
+ Literal[\"completed\"], Literal[\"failed\"], Literal[\"in_progress\"], Literal[\"cancelled\"],
+ Literal[\"queued\"], Literal[\"incomplete\"]"""
+ created_at: datetime.datetime = rest_field(
+ visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp"
+ )
+ """Unix timestamp (in seconds) of when this Response was created. Required."""
+ completed_at: Optional[datetime.datetime] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp"
+ )
+ error: "_models.ResponseError" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ incomplete_details: "_models.ResponseIncompleteDetails" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+ output: list["_models.OutputItem"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """An array of content items generated by the model.
+
+ * The length and order of items in the `output` array is dependent
+ on the model's response.
+ * Rather than accessing the first item in the `output` array and
+ assuming it's an `assistant` message with the content generated by
+ the model, you might consider using the `output_text` property where
+ supported in SDKs. Required."""
+ instructions: Union[str, list["_models.Item"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required. Is either a str type or a [Item] type."""
+ output_text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ usage: Optional["_models.ResponseUsage"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ parallel_tool_calls: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Whether to allow the model to run tool calls in parallel. Required."""
+ conversation: Optional["_models.ConversationReference"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """(Deprecated) Use agent_reference instead. The agent used for this response."""
+ agent_reference: "_models.AgentReference" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The agent used for this response. Required."""
+ agent_session_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The session identifier for this response. Currently only relevant for hosted agents. Always
+ returned for hosted agents — either the caller-provided value, the auto-derived value, or an
+ auto-generated UUID. Use for session-scoped operations and to maintain sandbox affinity in
+ follow-up calls."""
+
+ @overload
+ def __init__( # pylint: disable=too-many-locals
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ created_at: datetime.datetime,
+ error: "_models.ResponseError",
+ incomplete_details: "_models.ResponseIncompleteDetails",
+ output: list["_models.OutputItem"],
+ instructions: Union[str, list["_models.Item"]],
+ parallel_tool_calls: bool,
+ agent_reference: "_models.AgentReference",
+ metadata: Optional["_models.Metadata"] = None,
+ top_logprobs: Optional[int] = None,
+ temperature: Optional[int] = None,
+ top_p: Optional[int] = None,
+ user: Optional[str] = None,
+ safety_identifier: Optional[str] = None,
+ prompt_cache_key: Optional[str] = None,
+ service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] = None,
+ prompt_cache_retention: Optional[Literal["in-memory", "24h"]] = None,
+ previous_response_id: Optional[str] = None,
+ model: Optional[str] = None,
+ reasoning: Optional["_models.Reasoning"] = None,
+ background: Optional[bool] = None,
+ max_output_tokens: Optional[int] = None,
+ max_tool_calls: Optional[int] = None,
+ text: Optional["_models.ResponseTextParam"] = None,
+ tools: Optional[list["_models.Tool"]] = None,
+ tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceParam"]] = None,
+ prompt: Optional["_models.Prompt"] = None,
+ truncation: Optional[Literal["auto", "disabled"]] = None,
+ status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = None,
+ completed_at: Optional[datetime.datetime] = None,
+ output_text: Optional[str] = None,
+ usage: Optional["_models.ResponseUsage"] = None,
+ conversation: Optional["_models.ConversationReference"] = None,
+ agent: Optional["_models.AgentId"] = None,
+ agent_session_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.object: Literal["response"] = "response"
+
+
+class ResponseStreamEvent(_Model):
+ """ResponseStreamEvent.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ResponseErrorEvent, ResponseAudioDeltaEvent, ResponseAudioDoneEvent,
+ ResponseAudioTranscriptDeltaEvent, ResponseAudioTranscriptDoneEvent,
+ ResponseCodeInterpreterCallCompletedEvent, ResponseCodeInterpreterCallInProgressEvent,
+ ResponseCodeInterpreterCallInterpretingEvent, ResponseCodeInterpreterCallCodeDeltaEvent,
+ ResponseCodeInterpreterCallCodeDoneEvent, ResponseCompletedEvent,
+ ResponseContentPartAddedEvent, ResponseContentPartDoneEvent, ResponseCreatedEvent,
+ ResponseCustomToolCallInputDeltaEvent, ResponseCustomToolCallInputDoneEvent,
+ ResponseFailedEvent, ResponseFileSearchCallCompletedEvent,
+ ResponseFileSearchCallInProgressEvent, ResponseFileSearchCallSearchingEvent,
+ ResponseFunctionCallArgumentsDeltaEvent, ResponseFunctionCallArgumentsDoneEvent,
+ ResponseImageGenCallCompletedEvent, ResponseImageGenCallGeneratingEvent,
+ ResponseImageGenCallInProgressEvent, ResponseImageGenCallPartialImageEvent,
+ ResponseInProgressEvent, ResponseIncompleteEvent, ResponseMCPCallCompletedEvent,
+ ResponseMCPCallFailedEvent, ResponseMCPCallInProgressEvent, ResponseMCPCallArgumentsDeltaEvent,
+ ResponseMCPCallArgumentsDoneEvent, ResponseMCPListToolsCompletedEvent,
+ ResponseMCPListToolsFailedEvent, ResponseMCPListToolsInProgressEvent,
+ ResponseOutputItemAddedEvent, ResponseOutputItemDoneEvent,
+ ResponseOutputTextAnnotationAddedEvent, ResponseTextDeltaEvent, ResponseTextDoneEvent,
+ ResponseQueuedEvent, ResponseReasoningSummaryPartAddedEvent,
+ ResponseReasoningSummaryPartDoneEvent, ResponseReasoningSummaryTextDeltaEvent,
+ ResponseReasoningSummaryTextDoneEvent, ResponseReasoningTextDeltaEvent,
+ ResponseReasoningTextDoneEvent, ResponseRefusalDeltaEvent, ResponseRefusalDoneEvent,
+ ResponseWebSearchCallCompletedEvent, ResponseWebSearchCallInProgressEvent,
+ ResponseWebSearchCallSearchingEvent
+
+ :ivar type: Required. Known values are: "response.audio.delta", "response.audio.done",
+ "response.audio.transcript.delta", "response.audio.transcript.done",
+ "response.code_interpreter_call_code.delta", "response.code_interpreter_call_code.done",
+ "response.code_interpreter_call.completed", "response.code_interpreter_call.in_progress",
+ "response.code_interpreter_call.interpreting", "response.completed",
+ "response.content_part.added", "response.content_part.done", "response.created", "error",
+ "response.file_search_call.completed", "response.file_search_call.in_progress",
+ "response.file_search_call.searching", "response.function_call_arguments.delta",
+ "response.function_call_arguments.done", "response.in_progress", "response.failed",
+ "response.incomplete", "response.output_item.added", "response.output_item.done",
+ "response.reasoning_summary_part.added", "response.reasoning_summary_part.done",
+ "response.reasoning_summary_text.delta", "response.reasoning_summary_text.done",
+ "response.reasoning_text.delta", "response.reasoning_text.done", "response.refusal.delta",
+ "response.refusal.done", "response.output_text.delta", "response.output_text.done",
+ "response.web_search_call.completed", "response.web_search_call.in_progress",
+ "response.web_search_call.searching", "response.image_generation_call.completed",
+ "response.image_generation_call.generating", "response.image_generation_call.in_progress",
+ "response.image_generation_call.partial_image", "response.mcp_call_arguments.delta",
+ "response.mcp_call_arguments.done", "response.mcp_call.completed", "response.mcp_call.failed",
+ "response.mcp_call.in_progress", "response.mcp_list_tools.completed",
+ "response.mcp_list_tools.failed", "response.mcp_list_tools.in_progress",
+ "response.output_text.annotation.added", "response.queued",
+ "response.custom_tool_call_input.delta", and "response.custom_tool_call_input.done".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ResponseStreamEventType
+ :ivar sequence_number: Required.
+ :vartype sequence_number: int
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"response.audio.delta\", \"response.audio.done\",
+ \"response.audio.transcript.delta\", \"response.audio.transcript.done\",
+ \"response.code_interpreter_call_code.delta\", \"response.code_interpreter_call_code.done\",
+ \"response.code_interpreter_call.completed\", \"response.code_interpreter_call.in_progress\",
+ \"response.code_interpreter_call.interpreting\", \"response.completed\",
+ \"response.content_part.added\", \"response.content_part.done\", \"response.created\",
+ \"error\", \"response.file_search_call.completed\", \"response.file_search_call.in_progress\",
+ \"response.file_search_call.searching\", \"response.function_call_arguments.delta\",
+ \"response.function_call_arguments.done\", \"response.in_progress\", \"response.failed\",
+ \"response.incomplete\", \"response.output_item.added\", \"response.output_item.done\",
+ \"response.reasoning_summary_part.added\", \"response.reasoning_summary_part.done\",
+ \"response.reasoning_summary_text.delta\", \"response.reasoning_summary_text.done\",
+ \"response.reasoning_text.delta\", \"response.reasoning_text.done\",
+ \"response.refusal.delta\", \"response.refusal.done\", \"response.output_text.delta\",
+ \"response.output_text.done\", \"response.web_search_call.completed\",
+ \"response.web_search_call.in_progress\", \"response.web_search_call.searching\",
+ \"response.image_generation_call.completed\", \"response.image_generation_call.generating\",
+ \"response.image_generation_call.in_progress\",
+ \"response.image_generation_call.partial_image\", \"response.mcp_call_arguments.delta\",
+ \"response.mcp_call_arguments.done\", \"response.mcp_call.completed\",
+ \"response.mcp_call.failed\", \"response.mcp_call.in_progress\",
+ \"response.mcp_list_tools.completed\", \"response.mcp_list_tools.failed\",
+ \"response.mcp_list_tools.in_progress\", \"response.output_text.annotation.added\",
+ \"response.queued\", \"response.custom_tool_call_input.delta\", and
+ \"response.custom_tool_call_input.done\"."""
+ sequence_number: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseAudioDeltaEvent(ResponseStreamEvent, discriminator="response.audio.delta"):
+ """Emitted when there is a partial audio response.
+
+ :ivar type: The type of the event. Always ``response.audio.delta``. Required.
+ RESPONSE_AUDIO_DELTA.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_AUDIO_DELTA
+ :ivar sequence_number: A sequence number for this chunk of the stream response. Required.
+ :vartype sequence_number: int
+ :ivar delta: A chunk of Base64 encoded response audio bytes. Required.
+ :vartype delta: bytes
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_AUDIO_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.audio.delta``. Required. RESPONSE_AUDIO_DELTA."""
+ delta: bytes = rest_field(visibility=["read", "create", "update", "delete", "query"], format="base64")
+ """A chunk of Base64 encoded response audio bytes. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ delta: bytes,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_AUDIO_DELTA # type: ignore
+
+
+class ResponseAudioDoneEvent(ResponseStreamEvent, discriminator="response.audio.done"):
+ """Emitted when the audio response is complete.
+
+ :ivar type: The type of the event. Always ``response.audio.done``. Required.
+ RESPONSE_AUDIO_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_AUDIO_DONE
+ :ivar sequence_number: The sequence number of the delta. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_AUDIO_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.audio.done``. Required. RESPONSE_AUDIO_DONE."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_AUDIO_DONE # type: ignore
+
+
+class ResponseAudioTranscriptDeltaEvent(ResponseStreamEvent, discriminator="response.audio.transcript.delta"):
+ """Emitted when there is a partial transcript of audio.
+
+ :ivar type: The type of the event. Always ``response.audio.transcript.delta``. Required.
+ RESPONSE_AUDIO_TRANSCRIPT_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_AUDIO_TRANSCRIPT_DELTA
+ :ivar delta: The partial transcript of the audio response. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_AUDIO_TRANSCRIPT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.audio.transcript.delta``. Required.
+ RESPONSE_AUDIO_TRANSCRIPT_DELTA."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The partial transcript of the audio response. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_AUDIO_TRANSCRIPT_DELTA # type: ignore
+
+
+class ResponseAudioTranscriptDoneEvent(ResponseStreamEvent, discriminator="response.audio.transcript.done"):
+ """Emitted when the full audio transcript is completed.
+
+ :ivar type: The type of the event. Always ``response.audio.transcript.done``. Required.
+ RESPONSE_AUDIO_TRANSCRIPT_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_AUDIO_TRANSCRIPT_DONE
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_AUDIO_TRANSCRIPT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.audio.transcript.done``. Required.
+ RESPONSE_AUDIO_TRANSCRIPT_DONE."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_AUDIO_TRANSCRIPT_DONE # type: ignore
+
+
+class ResponseCodeInterpreterCallCodeDeltaEvent(
+ ResponseStreamEvent, discriminator="response.code_interpreter_call_code.delta"
+): # pylint: disable=name-too-long
+ """Emitted when a partial code snippet is streamed by the code interpreter.
+
+ :ivar type: The type of the event. Always ``response.code_interpreter_call_code.delta``.
+ Required. RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA
+ :ivar output_index: The index of the output item in the response for which the code is being
+ streamed. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the code interpreter tool call item. Required.
+ :vartype item_id: str
+ :ivar delta: The partial code snippet being streamed by the code interpreter. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event, used to order streaming events.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.code_interpreter_call_code.delta``. Required.
+ RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response for which the code is being streamed. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the code interpreter tool call item. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The partial code snippet being streamed by the code interpreter. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA # type: ignore
+
+
+class ResponseCodeInterpreterCallCodeDoneEvent(
+ ResponseStreamEvent, discriminator="response.code_interpreter_call_code.done"
+):
+ """Emitted when the code snippet is finalized by the code interpreter.
+
+ :ivar type: The type of the event. Always ``response.code_interpreter_call_code.done``.
+ Required. RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE
+ :ivar output_index: The index of the output item in the response for which the code is
+ finalized. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the code interpreter tool call item. Required.
+ :vartype item_id: str
+ :ivar code: The final code snippet output by the code interpreter. Required.
+ :vartype code: str
+ :ivar sequence_number: The sequence number of this event, used to order streaming events.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.code_interpreter_call_code.done``. Required.
+ RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response for which the code is finalized. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the code interpreter tool call item. Required."""
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The final code snippet output by the code interpreter. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ code: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE # type: ignore
+
+
+class ResponseCodeInterpreterCallCompletedEvent(
+ ResponseStreamEvent, discriminator="response.code_interpreter_call.completed"
+): # pylint: disable=name-too-long
+ """Emitted when the code interpreter call is completed.
+
+ :ivar type: The type of the event. Always ``response.code_interpreter_call.completed``.
+ Required. RESPONSE_CODE_INTERPRETER_CALL_COMPLETED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED
+ :ivar output_index: The index of the output item in the response for which the code interpreter
+ call is completed. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the code interpreter tool call item. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event, used to order streaming events.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.code_interpreter_call.completed``. Required.
+ RESPONSE_CODE_INTERPRETER_CALL_COMPLETED."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response for which the code interpreter call is completed.
+ Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the code interpreter tool call item. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED # type: ignore
+
+
+class ResponseCodeInterpreterCallInProgressEvent(
+ ResponseStreamEvent, discriminator="response.code_interpreter_call.in_progress"
+): # pylint: disable=name-too-long
+ """Emitted when a code interpreter call is in progress.
+
+ :ivar type: The type of the event. Always ``response.code_interpreter_call.in_progress``.
+ Required. RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS
+ :ivar output_index: The index of the output item in the response for which the code interpreter
+ call is in progress. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the code interpreter tool call item. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event, used to order streaming events.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.code_interpreter_call.in_progress``. Required.
+ RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response for which the code interpreter call is in
+ progress. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the code interpreter tool call item. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS # type: ignore
+
+
+class ResponseCodeInterpreterCallInterpretingEvent(
+ ResponseStreamEvent, discriminator="response.code_interpreter_call.interpreting"
+): # pylint: disable=name-too-long
+ """Emitted when the code interpreter is actively interpreting the code snippet.
+
+ :ivar type: The type of the event. Always ``response.code_interpreter_call.interpreting``.
+ Required. RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING
+ :ivar output_index: The index of the output item in the response for which the code interpreter
+ is interpreting code. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the code interpreter tool call item. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event, used to order streaming events.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.code_interpreter_call.interpreting``. Required.
+ RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response for which the code interpreter is interpreting
+ code. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the code interpreter tool call item. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING # type: ignore
+
+
+class ResponseCompletedEvent(ResponseStreamEvent, discriminator="response.completed"):
+ """Emitted when the model response is complete.
+
+ :ivar type: The type of the event. Always ``response.completed``. Required. RESPONSE_COMPLETED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_COMPLETED
+ :ivar response: Properties of the completed response. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ :ivar sequence_number: The sequence number for this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.completed``. Required. RESPONSE_COMPLETED."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Properties of the completed response. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ response: "_models.Response",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_COMPLETED # type: ignore
+
+
+class ResponseContentPartAddedEvent(ResponseStreamEvent, discriminator="response.content_part.added"):
+ """Emitted when a new content part is added.
+
+ :ivar type: The type of the event. Always ``response.content_part.added``. Required.
+ RESPONSE_CONTENT_PART_ADDED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_CONTENT_PART_ADDED
+ :ivar item_id: The ID of the output item that the content part was added to. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the content part was added to. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that was added. Required.
+ :vartype content_index: int
+ :ivar part: The content part that was added. Required.
+ :vartype part: ~azure.ai.responses.server.sdk.models.models.OutputContent
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.content_part.added``. Required.
+ RESPONSE_CONTENT_PART_ADDED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the content part was added to. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the content part was added to. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that was added. Required."""
+ part: "_models.OutputContent" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content part that was added. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ part: "_models.OutputContent",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED # type: ignore
+
+
+class ResponseContentPartDoneEvent(ResponseStreamEvent, discriminator="response.content_part.done"):
+ """Emitted when a content part is done.
+
+ :ivar type: The type of the event. Always ``response.content_part.done``. Required.
+ RESPONSE_CONTENT_PART_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_CONTENT_PART_DONE
+ :ivar item_id: The ID of the output item that the content part was added to. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the content part was added to. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that is done. Required.
+ :vartype content_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar part: The content part that is done. Required.
+ :vartype part: ~azure.ai.responses.server.sdk.models.models.OutputContent
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.content_part.done``. Required.
+ RESPONSE_CONTENT_PART_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the content part was added to. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the content part was added to. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that is done. Required."""
+ part: "_models.OutputContent" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The content part that is done. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ sequence_number: int,
+ part: "_models.OutputContent",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE # type: ignore
+
+
+class ResponseCreatedEvent(ResponseStreamEvent, discriminator="response.created"):
+ """An event that is emitted when a response is created.
+
+ :ivar type: The type of the event. Always ``response.created``. Required. RESPONSE_CREATED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_CREATED
+ :ivar response: The response that was created. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ :ivar sequence_number: The sequence number for this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CREATED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.created``. Required. RESPONSE_CREATED."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response that was created. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ response: "_models.Response",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CREATED # type: ignore
+
+
+class ResponseCustomToolCallInputDeltaEvent(ResponseStreamEvent, discriminator="response.custom_tool_call_input.delta"):
+ """ResponseCustomToolCallInputDelta.
+
+ :ivar type: The event type identifier. Required. RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar output_index: The index of the output this delta applies to. Required.
+ :vartype output_index: int
+ :ivar item_id: Unique identifier for the API item associated with this event. Required.
+ :vartype item_id: str
+ :ivar delta: The incremental input data (delta) for the custom tool call. Required.
+ :vartype delta: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The event type identifier. Required. RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output this delta applies to. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique identifier for the API item associated with this event. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The incremental input data (delta) for the custom tool call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ output_index: int,
+ item_id: str,
+ delta: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DELTA # type: ignore
+
+
+class ResponseCustomToolCallInputDoneEvent(ResponseStreamEvent, discriminator="response.custom_tool_call_input.done"):
+ """ResponseCustomToolCallInputDone.
+
+ :ivar type: The event type identifier. Required. RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar output_index: The index of the output this event applies to. Required.
+ :vartype output_index: int
+ :ivar item_id: Unique identifier for the API item associated with this event. Required.
+ :vartype item_id: str
+ :ivar input: The complete input data for the custom tool call. Required.
+ :vartype input: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The event type identifier. Required. RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output this event applies to. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique identifier for the API item associated with this event. Required."""
+ input: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The complete input data for the custom tool call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ output_index: int,
+ item_id: str,
+ input: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_CUSTOM_TOOL_CALL_INPUT_DONE # type: ignore
+
+
+class ResponseError(_Model):
+ """An error object returned when the model fails to generate a Response.
+
+ :ivar code: Required. Known values are: "server_error", "rate_limit_exceeded",
+ "invalid_prompt", "vector_store_timeout", "invalid_image", "invalid_image_format",
+ "invalid_base64_image", "invalid_image_url", "image_too_large", "image_too_small",
+ "image_parse_error", "image_content_policy_violation", "invalid_image_mode",
+ "image_file_too_large", "unsupported_image_media_type", "empty_image_file",
+ "failed_to_download_image", and "image_file_not_found".
+ :vartype code: str or ~azure.ai.responses.server.sdk.models.models.ResponseErrorCode
+ :ivar message: A human-readable description of the error. Required.
+ :vartype message: str
+ """
+
+ code: Union[str, "_models.ResponseErrorCode"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required. Known values are: \"server_error\", \"rate_limit_exceeded\", \"invalid_prompt\",
+ \"vector_store_timeout\", \"invalid_image\", \"invalid_image_format\",
+ \"invalid_base64_image\", \"invalid_image_url\", \"image_too_large\", \"image_too_small\",
+ \"image_parse_error\", \"image_content_policy_violation\", \"invalid_image_mode\",
+ \"image_file_too_large\", \"unsupported_image_media_type\", \"empty_image_file\",
+ \"failed_to_download_image\", and \"image_file_not_found\"."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A human-readable description of the error. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ code: Union[str, "_models.ResponseErrorCode"],
+ message: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseErrorEvent(ResponseStreamEvent, discriminator="error"):
+ """Emitted when an error occurs.
+
+ :ivar type: The type of the event. Always ``error``. Required. ERROR.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ERROR
+ :ivar code: Required.
+ :vartype code: str
+ :ivar message: The error message. Required.
+ :vartype message: str
+ :ivar param: Required.
+ :vartype param: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``error``. Required. ERROR."""
+ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ message: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The error message. Required."""
+ param: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ code: str,
+ message: str,
+ param: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.ERROR # type: ignore
+
+
+class ResponseFailedEvent(ResponseStreamEvent, discriminator="response.failed"):
+ """An event that is emitted when a response fails.
+
+ :ivar type: The type of the event. Always ``response.failed``. Required. RESPONSE_FAILED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_FAILED
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar response: The response that failed. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.failed``. Required. RESPONSE_FAILED."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response that failed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ response: "_models.Response",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FAILED # type: ignore
+
+
+class ResponseFileSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.file_search_call.completed"):
+ """Emitted when a file search call is completed (results found).
+
+ :ivar type: The type of the event. Always ``response.file_search_call.completed``. Required.
+ RESPONSE_FILE_SEARCH_CALL_COMPLETED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_FILE_SEARCH_CALL_COMPLETED
+ :ivar output_index: The index of the output item that the file search call is initiated.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: The ID of the output item that the file search call is initiated. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.file_search_call.completed``. Required.
+ RESPONSE_FILE_SEARCH_CALL_COMPLETED."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the file search call is initiated. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the file search call is initiated. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED # type: ignore
+
+
+class ResponseFileSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.file_search_call.in_progress"):
+ """Emitted when a file search call is initiated.
+
+ :ivar type: The type of the event. Always ``response.file_search_call.in_progress``. Required.
+ RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS
+ :ivar output_index: The index of the output item that the file search call is initiated.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: The ID of the output item that the file search call is initiated. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.file_search_call.in_progress``. Required.
+ RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the file search call is initiated. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the file search call is initiated. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS # type: ignore
+
+
+class ResponseFileSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.file_search_call.searching"):
+ """Emitted when a file search is currently searching.
+
+ :ivar type: The type of the event. Always ``response.file_search_call.searching``. Required.
+ RESPONSE_FILE_SEARCH_CALL_SEARCHING.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_FILE_SEARCH_CALL_SEARCHING
+ :ivar output_index: The index of the output item that the file search call is searching.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: The ID of the output item that the file search call is initiated. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.file_search_call.searching``. Required.
+ RESPONSE_FILE_SEARCH_CALL_SEARCHING."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the file search call is searching. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the file search call is initiated. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING # type: ignore
+
+
+class ResponseFormatJsonSchemaSchema(_Model):
+ """JSON schema."""
+
+
+class ResponseFunctionCallArgumentsDeltaEvent(
+ ResponseStreamEvent, discriminator="response.function_call_arguments.delta"
+):
+ """Emitted when there is a partial function-call arguments delta.
+
+ :ivar type: The type of the event. Always ``response.function_call_arguments.delta``. Required.
+ RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA
+ :ivar item_id: The ID of the output item that the function-call arguments delta is added to.
+ Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the function-call arguments delta is
+ added to. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar delta: The function-call arguments delta that is added. Required.
+ :vartype delta: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.function_call_arguments.delta``. Required.
+ RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the function-call arguments delta is added to. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the function-call arguments delta is added to. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The function-call arguments delta that is added. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ delta: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA # type: ignore
+
+
+class ResponseFunctionCallArgumentsDoneEvent(
+ ResponseStreamEvent, discriminator="response.function_call_arguments.done"
+):
+ """Emitted when function-call arguments are finalized.
+
+ :ivar type: Required. RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE
+ :ivar item_id: The ID of the item. Required.
+ :vartype item_id: str
+ :ivar name: The name of the function that was called. Required.
+ :vartype name: str
+ :ivar output_index: The index of the output item. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar arguments: The function-call arguments. Required.
+ :vartype arguments: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item. Required."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function that was called. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The function-call arguments. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ name: str,
+ output_index: int,
+ sequence_number: int,
+ arguments: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE # type: ignore
+
+
+class ResponseImageGenCallCompletedEvent(ResponseStreamEvent, discriminator="response.image_generation_call.completed"):
+ """ResponseImageGenCallCompletedEvent.
+
+ :ivar type: The type of the event. Always 'response.image_generation_call.completed'. Required.
+ RESPONSE_IMAGE_GENERATION_CALL_COMPLETED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar item_id: The unique identifier of the image generation item being processed. Required.
+ :vartype item_id: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.image_generation_call.completed'. Required.
+ RESPONSE_IMAGE_GENERATION_CALL_COMPLETED."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the image generation item being processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ sequence_number: int,
+ item_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED # type: ignore
+
+
+class ResponseImageGenCallGeneratingEvent(
+ ResponseStreamEvent, discriminator="response.image_generation_call.generating"
+):
+ """ResponseImageGenCallGeneratingEvent.
+
+ :ivar type: The type of the event. Always 'response.image_generation_call.generating'.
+ Required. RESPONSE_IMAGE_GENERATION_CALL_GENERATING.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_IMAGE_GENERATION_CALL_GENERATING
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the image generation item being processed. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the image generation item being processed.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.image_generation_call.generating'. Required.
+ RESPONSE_IMAGE_GENERATION_CALL_GENERATING."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the image generation item being processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING # type: ignore
+
+
+class ResponseImageGenCallInProgressEvent(
+ ResponseStreamEvent, discriminator="response.image_generation_call.in_progress"
+):
+ """ResponseImageGenCallInProgressEvent.
+
+ :ivar type: The type of the event. Always 'response.image_generation_call.in_progress'.
+ Required. RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the image generation item being processed. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the image generation item being processed.
+ Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.image_generation_call.in_progress'. Required.
+ RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the image generation item being processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS # type: ignore
+
+
+class ResponseImageGenCallPartialImageEvent(
+ ResponseStreamEvent, discriminator="response.image_generation_call.partial_image"
+):
+ """ResponseImageGenCallPartialImageEvent.
+
+ :ivar type: The type of the event. Always 'response.image_generation_call.partial_image'.
+ Required. RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the image generation item being processed. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the image generation item being processed.
+ Required.
+ :vartype sequence_number: int
+ :ivar partial_image_index: 0-based index for the partial image (backend is 1-based, but this is
+ 0-based for the user). Required.
+ :vartype partial_image_index: int
+ :ivar partial_image_b64: Base64-encoded partial image data, suitable for rendering as an image.
+ Required.
+ :vartype partial_image_b64: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.image_generation_call.partial_image'. Required.
+ RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the image generation item being processed. Required."""
+ partial_image_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """0-based index for the partial image (backend is 1-based, but this is 0-based for the user).
+ Required."""
+ partial_image_b64: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Base64-encoded partial image data, suitable for rendering as an image. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ partial_image_index: int,
+ partial_image_b64: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE # type: ignore
+
+
+class ResponseIncompleteDetails(_Model):
+ """ResponseIncompleteDetails.
+
+ :ivar reason: Is either a Literal["max_output_tokens"] type or a Literal["content_filter"]
+ type.
+ :vartype reason: str or str
+ """
+
+ reason: Optional[Literal["max_output_tokens", "content_filter"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is either a Literal[\"max_output_tokens\"] type or a Literal[\"content_filter\"] type."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ reason: Optional[Literal["max_output_tokens", "content_filter"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseIncompleteEvent(ResponseStreamEvent, discriminator="response.incomplete"):
+ """An event that is emitted when a response finishes as incomplete.
+
+ :ivar type: The type of the event. Always ``response.incomplete``. Required.
+ RESPONSE_INCOMPLETE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_INCOMPLETE
+ :ivar response: The response that was incomplete. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_INCOMPLETE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.incomplete``. Required. RESPONSE_INCOMPLETE."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response that was incomplete. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ response: "_models.Response",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_INCOMPLETE # type: ignore
+
+
+class ResponseInProgressEvent(ResponseStreamEvent, discriminator="response.in_progress"):
+ """Emitted when the response is in progress.
+
+ :ivar type: The type of the event. Always ``response.in_progress``. Required.
+ RESPONSE_IN_PROGRESS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_IN_PROGRESS
+ :ivar response: The response that is in progress. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.in_progress``. Required. RESPONSE_IN_PROGRESS."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The response that is in progress. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ response: "_models.Response",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_IN_PROGRESS # type: ignore
+
+
+class ResponseLogProb(_Model):
+ """A logprob is the logarithmic probability that the model assigns to producing a particular token
+ at a given position in the sequence. Less-negative (higher) logprob values indicate greater
+ model confidence in that token choice.
+
+ :ivar token: A possible text token. Required.
+ :vartype token: str
+ :ivar logprob: The log probability of this token. Required.
+ :vartype logprob: int
+ :ivar top_logprobs: The log probability of the top 20 most likely tokens.
+ :vartype top_logprobs:
+ list[~azure.ai.responses.server.sdk.models.models.ResponseLogProbTopLogprobs]
+ """
+
+ token: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A possible text token. Required."""
+ logprob: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The log probability of this token. Required."""
+ top_logprobs: Optional[list["_models.ResponseLogProbTopLogprobs"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The log probability of the top 20 most likely tokens."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ token: str,
+ logprob: int,
+ top_logprobs: Optional[list["_models.ResponseLogProbTopLogprobs"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseLogProbTopLogprobs(_Model):
+ """ResponseLogProbTopLogprobs.
+
+ :ivar token:
+ :vartype token: str
+ :ivar logprob:
+ :vartype logprob: int
+ """
+
+ token: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ logprob: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ token: Optional[str] = None,
+ logprob: Optional[int] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseMCPCallArgumentsDeltaEvent(ResponseStreamEvent, discriminator="response.mcp_call_arguments.delta"):
+ """ResponseMCPCallArgumentsDeltaEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_call_arguments.delta'. Required.
+ RESPONSE_MCP_CALL_ARGUMENTS_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_CALL_ARGUMENTS_DELTA
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the MCP tool call item being processed. Required.
+ :vartype item_id: str
+ :ivar delta: A JSON string containing the partial update to the arguments for the MCP tool
+ call. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_call_arguments.delta'. Required.
+ RESPONSE_MCP_CALL_ARGUMENTS_DELTA."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the MCP tool call item being processed. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string containing the partial update to the arguments for the MCP tool call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA # type: ignore
+
+
+class ResponseMCPCallArgumentsDoneEvent(ResponseStreamEvent, discriminator="response.mcp_call_arguments.done"):
+ """ResponseMCPCallArgumentsDoneEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_call_arguments.done'. Required.
+ RESPONSE_MCP_CALL_ARGUMENTS_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_CALL_ARGUMENTS_DONE
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the MCP tool call item being processed. Required.
+ :vartype item_id: str
+ :ivar arguments: A JSON string containing the finalized arguments for the MCP tool call.
+ Required.
+ :vartype arguments: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_call_arguments.done'. Required.
+ RESPONSE_MCP_CALL_ARGUMENTS_DONE."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the MCP tool call item being processed. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string containing the finalized arguments for the MCP tool call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ arguments: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE # type: ignore
+
+
+class ResponseMCPCallCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_call.completed"):
+ """ResponseMCPCallCompletedEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_call.completed'. Required.
+ RESPONSE_MCP_CALL_COMPLETED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_CALL_COMPLETED
+ :ivar item_id: The ID of the MCP tool call item that completed. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that completed. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_call.completed'. Required.
+ RESPONSE_MCP_CALL_COMPLETED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the MCP tool call item that completed. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that completed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED # type: ignore
+
+
+class ResponseMCPCallFailedEvent(ResponseStreamEvent, discriminator="response.mcp_call.failed"):
+ """ResponseMCPCallFailedEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_call.failed'. Required.
+ RESPONSE_MCP_CALL_FAILED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_CALL_FAILED
+ :ivar item_id: The ID of the MCP tool call item that failed. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that failed. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_call.failed'. Required. RESPONSE_MCP_CALL_FAILED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the MCP tool call item that failed. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that failed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED # type: ignore
+
+
+class ResponseMCPCallInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_call.in_progress"):
+ """ResponseMCPCallInProgressEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_call.in_progress'. Required.
+ RESPONSE_MCP_CALL_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_CALL_IN_PROGRESS
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar item_id: The unique identifier of the MCP tool call item being processed. Required.
+ :vartype item_id: str
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_call.in_progress'. Required.
+ RESPONSE_MCP_CALL_IN_PROGRESS."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the MCP tool call item being processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sequence_number: int,
+ output_index: int,
+ item_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS # type: ignore
+
+
+class ResponseMCPListToolsCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.completed"):
+ """ResponseMCPListToolsCompletedEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_list_tools.completed'. Required.
+ RESPONSE_MCP_LIST_TOOLS_COMPLETED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_LIST_TOOLS_COMPLETED
+ :ivar item_id: The ID of the MCP tool call item that produced this output. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that was processed. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_list_tools.completed'. Required.
+ RESPONSE_MCP_LIST_TOOLS_COMPLETED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the MCP tool call item that produced this output. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that was processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED # type: ignore
+
+
+class ResponseMCPListToolsFailedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.failed"):
+ """ResponseMCPListToolsFailedEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_list_tools.failed'. Required.
+ RESPONSE_MCP_LIST_TOOLS_FAILED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_LIST_TOOLS_FAILED
+ :ivar item_id: The ID of the MCP tool call item that failed. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that failed. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_list_tools.failed'. Required.
+ RESPONSE_MCP_LIST_TOOLS_FAILED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the MCP tool call item that failed. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that failed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED # type: ignore
+
+
+class ResponseMCPListToolsInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.in_progress"):
+ """ResponseMCPListToolsInProgressEvent.
+
+ :ivar type: The type of the event. Always 'response.mcp_list_tools.in_progress'. Required.
+ RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS
+ :ivar item_id: The ID of the MCP tool call item that is being processed. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that is being processed. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.mcp_list_tools.in_progress'. Required.
+ RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the MCP tool call item that is being processed. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that is being processed. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS # type: ignore
+
+
+class ResponseOutputItemAddedEvent(ResponseStreamEvent, discriminator="response.output_item.added"):
+ """Emitted when a new output item is added.
+
+ :ivar type: The type of the event. Always ``response.output_item.added``. Required.
+ RESPONSE_OUTPUT_ITEM_ADDED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_OUTPUT_ITEM_ADDED
+ :ivar output_index: The index of the output item that was added. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar item: The output item that was added. Required.
+ :vartype item: ~azure.ai.responses.server.sdk.models.models.OutputItem
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.output_item.added``. Required.
+ RESPONSE_OUTPUT_ITEM_ADDED."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that was added. Required."""
+ item: "_models.OutputItem" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The output item that was added. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ sequence_number: int,
+ item: "_models.OutputItem",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED # type: ignore
+
+
+class ResponseOutputItemDoneEvent(ResponseStreamEvent, discriminator="response.output_item.done"):
+ """Emitted when an output item is marked done.
+
+ :ivar type: The type of the event. Always ``response.output_item.done``. Required.
+ RESPONSE_OUTPUT_ITEM_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_OUTPUT_ITEM_DONE
+ :ivar output_index: The index of the output item that was marked done. Required.
+ :vartype output_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar item: The output item that was marked done. Required.
+ :vartype item: ~azure.ai.responses.server.sdk.models.models.OutputItem
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.output_item.done``. Required.
+ RESPONSE_OUTPUT_ITEM_DONE."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that was marked done. Required."""
+ item: "_models.OutputItem" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The output item that was marked done. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ sequence_number: int,
+ item: "_models.OutputItem",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE # type: ignore
+
+
+class ResponseOutputTextAnnotationAddedEvent(
+ ResponseStreamEvent, discriminator="response.output_text.annotation.added"
+):
+ """ResponseOutputTextAnnotationAddedEvent.
+
+ :ivar type: The type of the event. Always 'response.output_text.annotation.added'. Required.
+ RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED
+ :ivar item_id: The unique identifier of the item to which the annotation is being added.
+ Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item in the response's output array. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part within the output item. Required.
+ :vartype content_index: int
+ :ivar annotation_index: The index of the annotation within the content part. Required.
+ :vartype annotation_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar annotation: The annotation object being added. (See annotation schema for details.).
+ Required.
+ :vartype annotation: ~azure.ai.responses.server.sdk.models.models.Annotation
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.output_text.annotation.added'. Required.
+ RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique identifier of the item to which the annotation is being added. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item in the response's output array. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part within the output item. Required."""
+ annotation_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the annotation within the content part. Required."""
+ annotation: "_models.Annotation" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The annotation object being added. (See annotation schema for details.). Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ annotation_index: int,
+ sequence_number: int,
+ annotation: "_models.Annotation",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED # type: ignore
+
+
+class ResponsePromptVariables(_Model):
+ """Prompt Variables."""
+
+
+class ResponseQueuedEvent(ResponseStreamEvent, discriminator="response.queued"):
+ """ResponseQueuedEvent.
+
+ :ivar type: The type of the event. Always 'response.queued'. Required. RESPONSE_QUEUED.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_QUEUED
+ :ivar response: The full response object that is queued. Required.
+ :vartype response: ~azure.ai.responses.server.sdk.models.models.Response
+ :ivar sequence_number: The sequence number for this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_QUEUED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always 'response.queued'. Required. RESPONSE_QUEUED."""
+ response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The full response object that is queued. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ response: "_models.Response",
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_QUEUED # type: ignore
+
+
+class ResponseReasoningSummaryPartAddedEvent(
+ ResponseStreamEvent, discriminator="response.reasoning_summary_part.added"
+):
+ """Emitted when a new reasoning summary part is added.
+
+ :ivar type: The type of the event. Always ``response.reasoning_summary_part.added``. Required.
+ RESPONSE_REASONING_SUMMARY_PART_ADDED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_SUMMARY_PART_ADDED
+ :ivar item_id: The ID of the item this summary part is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this summary part is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar summary_index: The index of the summary part within the reasoning summary. Required.
+ :vartype summary_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar part: The summary part that was added. Required.
+ :vartype part:
+ ~azure.ai.responses.server.sdk.models.models.ResponseReasoningSummaryPartAddedEventPart
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_summary_part.added``. Required.
+ RESPONSE_REASONING_SUMMARY_PART_ADDED."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this summary part is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this summary part is associated with. Required."""
+ summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the summary part within the reasoning summary. Required."""
+ part: "_models.ResponseReasoningSummaryPartAddedEventPart" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The summary part that was added. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ summary_index: int,
+ sequence_number: int,
+ part: "_models.ResponseReasoningSummaryPartAddedEventPart",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED # type: ignore
+
+
+class ResponseReasoningSummaryPartAddedEventPart(_Model): # pylint: disable=name-too-long
+ """ResponseReasoningSummaryPartAddedEventPart.
+
+ :ivar type: Required. Default value is "summary_text".
+ :vartype type: str
+ :ivar text: Required.
+ :vartype text: str
+ """
+
+ type: Literal["summary_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required. Default value is \"summary_text\"."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["summary_text"] = "summary_text"
+
+
+class ResponseReasoningSummaryPartDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_part.done"):
+ """Emitted when a reasoning summary part is completed.
+
+ :ivar type: The type of the event. Always ``response.reasoning_summary_part.done``. Required.
+ RESPONSE_REASONING_SUMMARY_PART_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_SUMMARY_PART_DONE
+ :ivar item_id: The ID of the item this summary part is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this summary part is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar summary_index: The index of the summary part within the reasoning summary. Required.
+ :vartype summary_index: int
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ :ivar part: The completed summary part. Required.
+ :vartype part:
+ ~azure.ai.responses.server.sdk.models.models.ResponseReasoningSummaryPartDoneEventPart
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_summary_part.done``. Required.
+ RESPONSE_REASONING_SUMMARY_PART_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this summary part is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this summary part is associated with. Required."""
+ summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the summary part within the reasoning summary. Required."""
+ part: "_models.ResponseReasoningSummaryPartDoneEventPart" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The completed summary part. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ summary_index: int,
+ sequence_number: int,
+ part: "_models.ResponseReasoningSummaryPartDoneEventPart",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE # type: ignore
+
+
+class ResponseReasoningSummaryPartDoneEventPart(_Model): # pylint: disable=name-too-long
+ """ResponseReasoningSummaryPartDoneEventPart.
+
+ :ivar type: Required. Default value is "summary_text".
+ :vartype type: str
+ :ivar text: Required.
+ :vartype text: str
+ """
+
+ type: Literal["summary_text"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required. Default value is \"summary_text\"."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["summary_text"] = "summary_text"
+
+
+class ResponseReasoningSummaryTextDeltaEvent(
+ ResponseStreamEvent, discriminator="response.reasoning_summary_text.delta"
+):
+ """Emitted when a delta is added to a reasoning summary text.
+
+ :ivar type: The type of the event. Always ``response.reasoning_summary_text.delta``. Required.
+ RESPONSE_REASONING_SUMMARY_TEXT_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_SUMMARY_TEXT_DELTA
+ :ivar item_id: The ID of the item this summary text delta is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this summary text delta is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar summary_index: The index of the summary part within the reasoning summary. Required.
+ :vartype summary_index: int
+ :ivar delta: The text delta that was added to the summary. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_summary_text.delta``. Required.
+ RESPONSE_REASONING_SUMMARY_TEXT_DELTA."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this summary text delta is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this summary text delta is associated with. Required."""
+ summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the summary part within the reasoning summary. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text delta that was added to the summary. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ summary_index: int,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA # type: ignore
+
+
+class ResponseReasoningSummaryTextDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_text.done"):
+ """Emitted when a reasoning summary text is completed.
+
+ :ivar type: The type of the event. Always ``response.reasoning_summary_text.done``. Required.
+ RESPONSE_REASONING_SUMMARY_TEXT_DONE.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_SUMMARY_TEXT_DONE
+ :ivar item_id: The ID of the item this summary text is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this summary text is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar summary_index: The index of the summary part within the reasoning summary. Required.
+ :vartype summary_index: int
+ :ivar text: The full text of the completed reasoning summary. Required.
+ :vartype text: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_summary_text.done``. Required.
+ RESPONSE_REASONING_SUMMARY_TEXT_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this summary text is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this summary text is associated with. Required."""
+ summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the summary part within the reasoning summary. Required."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The full text of the completed reasoning summary. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ summary_index: int,
+ text: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE # type: ignore
+
+
+class ResponseReasoningTextDeltaEvent(ResponseStreamEvent, discriminator="response.reasoning_text.delta"):
+ """Emitted when a delta is added to a reasoning text.
+
+ :ivar type: The type of the event. Always ``response.reasoning_text.delta``. Required.
+ RESPONSE_REASONING_TEXT_DELTA.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_TEXT_DELTA
+ :ivar item_id: The ID of the item this reasoning text delta is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this reasoning text delta is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the reasoning content part this delta is associated with.
+ Required.
+ :vartype content_index: int
+ :ivar delta: The text delta that was added to the reasoning content. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_text.delta``. Required.
+ RESPONSE_REASONING_TEXT_DELTA."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this reasoning text delta is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this reasoning text delta is associated with. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the reasoning content part this delta is associated with. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text delta that was added to the reasoning content. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_TEXT_DELTA # type: ignore
+
+
+class ResponseReasoningTextDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_text.done"):
+ """Emitted when a reasoning text is completed.
+
+ :ivar type: The type of the event. Always ``response.reasoning_text.done``. Required.
+ RESPONSE_REASONING_TEXT_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_REASONING_TEXT_DONE
+ :ivar item_id: The ID of the item this reasoning text is associated with. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item this reasoning text is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the reasoning content part. Required.
+ :vartype content_index: int
+ :ivar text: The full text of the completed reasoning content. Required.
+ :vartype text: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REASONING_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.reasoning_text.done``. Required.
+ RESPONSE_REASONING_TEXT_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the item this reasoning text is associated with. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item this reasoning text is associated with. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the reasoning content part. Required."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The full text of the completed reasoning content. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ text: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REASONING_TEXT_DONE # type: ignore
+
+
+class ResponseRefusalDeltaEvent(ResponseStreamEvent, discriminator="response.refusal.delta"):
+ """Emitted when there is a partial refusal text.
+
+ :ivar type: The type of the event. Always ``response.refusal.delta``. Required.
+ RESPONSE_REFUSAL_DELTA.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_REFUSAL_DELTA
+ :ivar item_id: The ID of the output item that the refusal text is added to. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the refusal text is added to. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that the refusal text is added to. Required.
+ :vartype content_index: int
+ :ivar delta: The refusal text that is added. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.refusal.delta``. Required. RESPONSE_REFUSAL_DELTA."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the refusal text is added to. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the refusal text is added to. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that the refusal text is added to. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The refusal text that is added. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ delta: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DELTA # type: ignore
+
+
+class ResponseRefusalDoneEvent(ResponseStreamEvent, discriminator="response.refusal.done"):
+ """Emitted when refusal text is finalized.
+
+ :ivar type: The type of the event. Always ``response.refusal.done``. Required.
+ RESPONSE_REFUSAL_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_REFUSAL_DONE
+ :ivar item_id: The ID of the output item that the refusal text is finalized. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the refusal text is finalized. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that the refusal text is finalized.
+ Required.
+ :vartype content_index: int
+ :ivar refusal: The refusal text that is finalized. Required.
+ :vartype refusal: str
+ :ivar sequence_number: The sequence number of this event. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.refusal.done``. Required. RESPONSE_REFUSAL_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the refusal text is finalized. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the refusal text is finalized. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that the refusal text is finalized. Required."""
+ refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The refusal text that is finalized. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ refusal: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DONE # type: ignore
+
+
+class ResponseStreamOptions(_Model):
+ """Options for streaming responses. Only set this when you set ``stream: true``.
+
+ :ivar include_obfuscation: When true, stream obfuscation will be enabled. Stream obfuscation
+ adds random characters to an ``obfuscation`` field on streaming delta events to normalize
+ payload sizes as a mitigation to certain side-channel attacks. These obfuscation fields are
+ included by default, but add a small amount of overhead to the data stream. You can set
+ ``include_obfuscation`` to false to optimize for bandwidth if you trust the network links
+ between your application and the OpenAI API.
+ :vartype include_obfuscation: bool
+ """
+
+ include_obfuscation: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """When true, stream obfuscation will be enabled. Stream obfuscation adds random characters to an
+ ``obfuscation`` field on streaming delta events to normalize payload sizes as a mitigation to
+ certain side-channel attacks. These obfuscation fields are included by default, but add a small
+ amount of overhead to the data stream. You can set ``include_obfuscation`` to false to optimize
+ for bandwidth if you trust the network links between your application and the OpenAI API."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ include_obfuscation: Optional[bool] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseTextDeltaEvent(ResponseStreamEvent, discriminator="response.output_text.delta"):
+ """Emitted when there is an additional text delta.
+
+ :ivar type: The type of the event. Always ``response.output_text.delta``. Required.
+ RESPONSE_OUTPUT_TEXT_DELTA.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_OUTPUT_TEXT_DELTA
+ :ivar item_id: The ID of the output item that the text delta was added to. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the text delta was added to. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that the text delta was added to. Required.
+ :vartype content_index: int
+ :ivar delta: The text delta that was added. Required.
+ :vartype delta: str
+ :ivar sequence_number: The sequence number for this event. Required.
+ :vartype sequence_number: int
+ :ivar logprobs: The log probabilities of the tokens in the delta. Required.
+ :vartype logprobs: list[~azure.ai.responses.server.sdk.models.models.ResponseLogProb]
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.output_text.delta``. Required.
+ RESPONSE_OUTPUT_TEXT_DELTA."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the text delta was added to. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the text delta was added to. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that the text delta was added to. Required."""
+ delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text delta that was added. Required."""
+ logprobs: list["_models.ResponseLogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The log probabilities of the tokens in the delta. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ delta: str,
+ sequence_number: int,
+ logprobs: list["_models.ResponseLogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA # type: ignore
+
+
+class ResponseTextDoneEvent(ResponseStreamEvent, discriminator="response.output_text.done"):
+ """Emitted when text content is finalized.
+
+ :ivar type: The type of the event. Always ``response.output_text.done``. Required.
+ RESPONSE_OUTPUT_TEXT_DONE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.RESPONSE_OUTPUT_TEXT_DONE
+ :ivar item_id: The ID of the output item that the text content is finalized. Required.
+ :vartype item_id: str
+ :ivar output_index: The index of the output item that the text content is finalized. Required.
+ :vartype output_index: int
+ :ivar content_index: The index of the content part that the text content is finalized.
+ Required.
+ :vartype content_index: int
+ :ivar text: The text content that is finalized. Required.
+ :vartype text: str
+ :ivar sequence_number: The sequence number for this event. Required.
+ :vartype sequence_number: int
+ :ivar logprobs: The log probabilities of the tokens in the delta. Required.
+ :vartype logprobs: list[~azure.ai.responses.server.sdk.models.models.ResponseLogProb]
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.output_text.done``. Required.
+ RESPONSE_OUTPUT_TEXT_DONE."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the output item that the text content is finalized. Required."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the text content is finalized. Required."""
+ content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the content part that the text content is finalized. Required."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text content that is finalized. Required."""
+ logprobs: list["_models.ResponseLogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The log probabilities of the tokens in the delta. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ item_id: str,
+ output_index: int,
+ content_index: int,
+ text: str,
+ sequence_number: int,
+ logprobs: list["_models.ResponseLogProb"],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE # type: ignore
+
+
+class ResponseTextParam(_Model):
+ """Configuration options for a text response from the model. Can be plain
+ text or structured JSON data. Learn more:
+
+ * [Text inputs and outputs](/docs/guides/text)
+ * [Structured Outputs](/docs/guides/structured-outputs).
+
+ :ivar format:
+ :vartype format: ~azure.ai.responses.server.sdk.models.models.TextResponseFormatConfiguration
+ :ivar verbosity: Is one of the following types: Literal["low"], Literal["medium"],
+ Literal["high"]
+ :vartype verbosity: str or str or str
+ """
+
+ format: Optional["_models.TextResponseFormatConfiguration"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ verbosity: Optional[Literal["low", "medium", "high"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Is one of the following types: Literal[\"low\"], Literal[\"medium\"], Literal[\"high\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ format: Optional["_models.TextResponseFormatConfiguration"] = None,
+ verbosity: Optional[Literal["low", "medium", "high"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseUsage(_Model):
+ """Represents token usage details including input tokens, output tokens, a breakdown of output
+ tokens, and the total tokens used.
+
+ :ivar input_tokens: The number of input tokens. Required.
+ :vartype input_tokens: int
+ :ivar input_tokens_details: A detailed breakdown of the input tokens. Required.
+ :vartype input_tokens_details:
+ ~azure.ai.responses.server.sdk.models.models.ResponseUsageInputTokensDetails
+ :ivar output_tokens: The number of output tokens. Required.
+ :vartype output_tokens: int
+ :ivar output_tokens_details: A detailed breakdown of the output tokens. Required.
+ :vartype output_tokens_details:
+ ~azure.ai.responses.server.sdk.models.models.ResponseUsageOutputTokensDetails
+ :ivar total_tokens: The total number of tokens used. Required.
+ :vartype total_tokens: int
+ """
+
+ input_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The number of input tokens. Required."""
+ input_tokens_details: "_models.ResponseUsageInputTokensDetails" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """A detailed breakdown of the input tokens. Required."""
+ output_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The number of output tokens. Required."""
+ output_tokens_details: "_models.ResponseUsageOutputTokensDetails" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """A detailed breakdown of the output tokens. Required."""
+ total_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The total number of tokens used. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ input_tokens: int,
+ input_tokens_details: "_models.ResponseUsageInputTokensDetails",
+ output_tokens: int,
+ output_tokens_details: "_models.ResponseUsageOutputTokensDetails",
+ total_tokens: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseUsageInputTokensDetails(_Model):
+ """ResponseUsageInputTokensDetails.
+
+ :ivar cached_tokens: Required.
+ :vartype cached_tokens: int
+ """
+
+ cached_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ cached_tokens: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseUsageOutputTokensDetails(_Model):
+ """ResponseUsageOutputTokensDetails.
+
+ :ivar reasoning_tokens: Required.
+ :vartype reasoning_tokens: int
+ """
+
+ reasoning_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ reasoning_tokens: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class ResponseWebSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.web_search_call.completed"):
+ """Emitted when a web search call is completed.
+
+ :ivar type: The type of the event. Always ``response.web_search_call.completed``. Required.
+ RESPONSE_WEB_SEARCH_CALL_COMPLETED.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_WEB_SEARCH_CALL_COMPLETED
+ :ivar output_index: The index of the output item that the web search call is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: Unique ID for the output item associated with the web search call. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the web search call being processed. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.web_search_call.completed``. Required.
+ RESPONSE_WEB_SEARCH_CALL_COMPLETED."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the web search call is associated with. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique ID for the output item associated with the web search call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED # type: ignore
+
+
+class ResponseWebSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.web_search_call.in_progress"):
+ """Emitted when a web search call is initiated.
+
+ :ivar type: The type of the event. Always ``response.web_search_call.in_progress``. Required.
+ RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS
+ :ivar output_index: The index of the output item that the web search call is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: Unique ID for the output item associated with the web search call. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the web search call being processed. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.web_search_call.in_progress``. Required.
+ RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the web search call is associated with. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique ID for the output item associated with the web search call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS # type: ignore
+
+
+class ResponseWebSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.web_search_call.searching"):
+ """Emitted when a web search call is executing.
+
+ :ivar type: The type of the event. Always ``response.web_search_call.searching``. Required.
+ RESPONSE_WEB_SEARCH_CALL_SEARCHING.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.RESPONSE_WEB_SEARCH_CALL_SEARCHING
+ :ivar output_index: The index of the output item that the web search call is associated with.
+ Required.
+ :vartype output_index: int
+ :ivar item_id: Unique ID for the output item associated with the web search call. Required.
+ :vartype item_id: str
+ :ivar sequence_number: The sequence number of the web search call being processed. Required.
+ :vartype sequence_number: int
+ """
+
+ type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the event. Always ``response.web_search_call.searching``. Required.
+ RESPONSE_WEB_SEARCH_CALL_SEARCHING."""
+ output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the output item that the web search call is associated with. Required."""
+ item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique ID for the output item associated with the web search call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output_index: int,
+ item_id: str,
+ sequence_number: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING # type: ignore
+
+
+class ScreenshotParam(ComputerAction, discriminator="screenshot"):
+ """Screenshot.
+
+ :ivar type: Specifies the event type. For a screenshot action, this property is always set to
+ ``screenshot``. Required. SCREENSHOT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SCREENSHOT
+ """
+
+ type: Literal[ComputerActionType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a screenshot action, this property is always set to
+ ``screenshot``. Required. SCREENSHOT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.SCREENSHOT # type: ignore
+
+
+class ScrollParam(ComputerAction, discriminator="scroll"):
+ """Scroll.
+
+ :ivar type: Specifies the event type. For a scroll action, this property is always set to
+ ``scroll``. Required. SCROLL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SCROLL
+ :ivar x: The x-coordinate where the scroll occurred. Required.
+ :vartype x: int
+ :ivar y: The y-coordinate where the scroll occurred. Required.
+ :vartype y: int
+ :ivar scroll_x: The horizontal scroll distance. Required.
+ :vartype scroll_x: int
+ :ivar scroll_y: The vertical scroll distance. Required.
+ :vartype scroll_y: int
+ """
+
+ type: Literal[ComputerActionType.SCROLL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a scroll action, this property is always set to ``scroll``.
+ Required. SCROLL."""
+ x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The x-coordinate where the scroll occurred. Required."""
+ y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The y-coordinate where the scroll occurred. Required."""
+ scroll_x: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The horizontal scroll distance. Required."""
+ scroll_y: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The vertical scroll distance. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ x: int,
+ y: int,
+ scroll_x: int,
+ scroll_y: int,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.SCROLL # type: ignore
+
+
+class SharepointGroundingToolCall(OutputItem, discriminator="sharepoint_grounding_preview_call"):
+ """A SharePoint grounding tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. SHAREPOINT_GROUNDING_PREVIEW_CALL.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.SHAREPOINT_GROUNDING_PREVIEW_CALL
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar arguments: A JSON string of the arguments to pass to the tool. Required.
+ :vartype arguments: str
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.SHAREPOINT_GROUNDING_PREVIEW_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. SHAREPOINT_GROUNDING_PREVIEW_CALL."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A JSON string of the arguments to pass to the tool. Required."""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ arguments: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.SHAREPOINT_GROUNDING_PREVIEW_CALL # type: ignore
+
+
+class SharepointGroundingToolCallOutput(OutputItem, discriminator="sharepoint_grounding_preview_call_output"):
+ """The output of a SharePoint grounding tool call.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT.
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT
+ :ivar call_id: The unique ID of the tool call generated by the model. Required.
+ :vartype call_id: str
+ :ivar output: The output from the SharePoint grounding tool call. Is one of the following
+ types: {str: Any}, str, [Any]
+ :vartype output: dict[str, any] or str or list[any]
+ :ivar status: The status of the tool call. Required. Known values are: "in_progress",
+ "completed", "incomplete", and "failed".
+ :vartype status: str or ~azure.ai.responses.server.sdk.models.models.ToolCallStatus
+ """
+
+ type: Literal[OutputItemType.SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT."""
+ call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The unique ID of the tool call generated by the model. Required."""
+ output: Optional["_types.ToolCallOutputContent"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The output from the SharePoint grounding tool call. Is one of the following types: {str: Any},
+ str, [Any]"""
+ status: Union[str, "_models.ToolCallStatus"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The status of the tool call. Required. Known values are: \"in_progress\", \"completed\",
+ \"incomplete\", and \"failed\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ call_id: str,
+ status: Union[str, "_models.ToolCallStatus"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ output: Optional["_types.ToolCallOutputContent"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.SHAREPOINT_GROUNDING_PREVIEW_CALL_OUTPUT # type: ignore
+
+
+class SharepointGroundingToolParameters(_Model):
+ """The sharepoint grounding tool parameters.
+
+ :ivar project_connections: The project connections attached to this tool. There can be a
+ maximum of 1 connection resource attached to the tool.
+ :vartype project_connections:
+ list[~azure.ai.responses.server.sdk.models.models.ToolProjectConnection]
+ """
+
+ project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The project connections attached to this tool. There can be a maximum of 1 connection resource
+ attached to the tool."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connections: Optional[list["_models.ToolProjectConnection"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class SharepointPreviewTool(Tool, discriminator="sharepoint_grounding_preview"):
+ """The input definition information for a sharepoint tool as used to configure an agent.
+
+ :ivar type: The object type, which is always 'sharepoint_grounding_preview'. Required.
+ SHAREPOINT_GROUNDING_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHAREPOINT_GROUNDING_PREVIEW
+ :ivar sharepoint_grounding_preview: The sharepoint grounding tool parameters. Required.
+ :vartype sharepoint_grounding_preview:
+ ~azure.ai.responses.server.sdk.models.models.SharepointGroundingToolParameters
+ """
+
+ type: Literal[ToolType.SHAREPOINT_GROUNDING_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The object type, which is always 'sharepoint_grounding_preview'. Required.
+ SHAREPOINT_GROUNDING_PREVIEW."""
+ sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The sharepoint grounding tool parameters. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters",
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.SHAREPOINT_GROUNDING_PREVIEW # type: ignore
+
+
+class SkillReferenceParam(ContainerSkill, discriminator="skill_reference"):
+ """SkillReferenceParam.
+
+ :ivar type: References a skill created with the /v1/skills endpoint. Required. SKILL_REFERENCE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SKILL_REFERENCE
+ :ivar skill_id: The ID of the referenced skill. Required.
+ :vartype skill_id: str
+ :ivar version: Optional skill version. Use a positive integer or 'latest'. Omit for default.
+ :vartype version: str
+ """
+
+ type: Literal[ContainerSkillType.SKILL_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """References a skill created with the /v1/skills endpoint. Required. SKILL_REFERENCE."""
+ skill_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The ID of the referenced skill. Required."""
+ version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Optional skill version. Use a positive integer or 'latest'. Omit for default."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ skill_id: str,
+ version: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ContainerSkillType.SKILL_REFERENCE # type: ignore
+
+
+class ToolChoiceParam(_Model):
+ """How the model should select which tool (or tools) to use when generating a response. See the
+ ``tools`` parameter to see how to specify which tools the model can call.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ ToolChoiceAllowed, SpecificApplyPatchParam, ToolChoiceCodeInterpreter,
+ ToolChoiceComputerUsePreview, ToolChoiceCustom, ToolChoiceFileSearch, ToolChoiceFunction,
+ ToolChoiceImageGeneration, ToolChoiceMCP, SpecificFunctionShellParam,
+ ToolChoiceWebSearchPreview, ToolChoiceWebSearchPreview20250311
+
+ :ivar type: Required. Known values are: "allowed_tools", "function", "mcp", "custom",
+ "apply_patch", "shell", "file_search", "web_search_preview", "computer_use_preview",
+ "web_search_preview_2025_03_11", "image_generation", and "code_interpreter".
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ToolChoiceParamType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"allowed_tools\", \"function\", \"mcp\", \"custom\",
+ \"apply_patch\", \"shell\", \"file_search\", \"web_search_preview\", \"computer_use_preview\",
+ \"web_search_preview_2025_03_11\", \"image_generation\", and \"code_interpreter\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class SpecificApplyPatchParam(ToolChoiceParam, discriminator="apply_patch"):
+ """Specific apply patch tool choice.
+
+ :ivar type: The tool to call. Always ``apply_patch``. Required. APPLY_PATCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.APPLY_PATCH
+ """
+
+ type: Literal[ToolChoiceParamType.APPLY_PATCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The tool to call. Always ``apply_patch``. Required. APPLY_PATCH."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.APPLY_PATCH # type: ignore
+
+
+class SpecificFunctionShellParam(ToolChoiceParam, discriminator="shell"):
+ """Specific shell tool choice.
+
+ :ivar type: The tool to call. Always ``shell``. Required. SHELL.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SHELL
+ """
+
+ type: Literal[ToolChoiceParamType.SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The tool to call. Always ``shell``. Required. SHELL."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.SHELL # type: ignore
+
+
+class StructuredOutputDefinition(_Model):
+ """A structured output that can be produced by the agent.
+
+ :ivar name: The name of the structured output. Required.
+ :vartype name: str
+ :ivar description: A description of the output to emit. Used by the model to determine when to
+ emit the output. Required.
+ :vartype description: str
+ :ivar schema: The JSON schema for the structured output. Required.
+ :vartype schema: dict[str, any]
+ :ivar strict: Whether to enforce strict validation. Default ``true``. Required.
+ :vartype strict: bool
+ """
+
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the structured output. Required."""
+ description: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A description of the output to emit. Used by the model to determine when to emit the output.
+ Required."""
+ schema: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The JSON schema for the structured output. Required."""
+ strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Whether to enforce strict validation. Default ``true``. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ description: str,
+ schema: dict[str, Any],
+ strict: bool,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class StructuredOutputsOutputItem(OutputItem, discriminator="structured_outputs"):
+ """StructuredOutputsOutputItem.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. STRUCTURED_OUTPUTS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.STRUCTURED_OUTPUTS
+ :ivar output: The structured output captured during the response. Required.
+ :vartype output: any
+ """
+
+ type: Literal[OutputItemType.STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. STRUCTURED_OUTPUTS."""
+ output: Any = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The structured output captured during the response. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ output: Any,
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.STRUCTURED_OUTPUTS # type: ignore
+
+
+class SummaryTextContent(MessageContent, discriminator="summary_text"):
+ """Summary text.
+
+ :ivar type: The type of the object. Always ``summary_text``. Required. SUMMARY_TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.SUMMARY_TEXT
+ :ivar text: A summary of the reasoning output from the model so far. Required.
+ :vartype text: str
+ """
+
+ type: Literal[MessageContentType.SUMMARY_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the object. Always ``summary_text``. Required. SUMMARY_TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A summary of the reasoning output from the model so far. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.SUMMARY_TEXT # type: ignore
+
+
+class TextContent(MessageContent, discriminator="text"):
+ """Text Content.
+
+ :ivar type: Required. TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TEXT
+ :ivar text: Required.
+ :vartype text: str
+ """
+
+ type: Literal[MessageContentType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. TEXT."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = MessageContentType.TEXT # type: ignore
+
+
+class TextResponseFormatConfiguration(_Model):
+ """An object specifying the format that the model must output. Configuring ``{ "type":
+ "json_schema" }`` enables Structured Outputs, which ensures the model will match your supplied
+ JSON schema. Learn more in the `Structured Outputs guide `_.
+ The default format is ``{ "type": "text" }`` with no additional options. *Not recommended for
+ gpt-4o and newer models:** Setting to ``{ "type": "json_object" }`` enables the older JSON
+ mode, which ensures the message the model generates is valid JSON. Using ``json_schema`` is
+ preferred for models that support it.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ TextResponseFormatConfigurationResponseFormatJsonObject, TextResponseFormatJsonSchema,
+ TextResponseFormatConfigurationResponseFormatText
+
+ :ivar type: Required. Known values are: "text", "json_schema", and "json_object".
+ :vartype type: str or
+ ~azure.ai.responses.server.sdk.models.models.TextResponseFormatConfigurationType
+ """
+
+ __mapping__: dict[str, _Model] = {}
+ type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"])
+ """Required. Known values are: \"text\", \"json_schema\", and \"json_object\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ type: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class TextResponseFormatConfigurationResponseFormatJsonObject(
+ TextResponseFormatConfiguration, discriminator="json_object"
+): # pylint: disable=name-too-long
+ """JSON object.
+
+ :ivar type: The type of response format being defined. Always ``json_object``. Required.
+ JSON_OBJECT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.JSON_OBJECT
+ """
+
+ type: Literal[TextResponseFormatConfigurationType.JSON_OBJECT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of response format being defined. Always ``json_object``. Required. JSON_OBJECT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = TextResponseFormatConfigurationType.JSON_OBJECT # type: ignore
+
+
+class TextResponseFormatConfigurationResponseFormatText(
+ TextResponseFormatConfiguration, discriminator="text"
+): # pylint: disable=name-too-long
+ """Text.
+
+ :ivar type: The type of response format being defined. Always ``text``. Required. TEXT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TEXT
+ """
+
+ type: Literal[TextResponseFormatConfigurationType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of response format being defined. Always ``text``. Required. TEXT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = TextResponseFormatConfigurationType.TEXT # type: ignore
+
+
+class TextResponseFormatJsonSchema(TextResponseFormatConfiguration, discriminator="json_schema"):
+ """JSON schema.
+
+ :ivar type: The type of response format being defined. Always ``json_schema``. Required.
+ JSON_SCHEMA.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.JSON_SCHEMA
+ :ivar description: A description of what the response format is for, used by the model to
+ determine how to respond in the format.
+ :vartype description: str
+ :ivar name: The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores and
+ dashes, with a maximum length of 64. Required.
+ :vartype name: str
+ :ivar schema: Required.
+ :vartype schema: ~azure.ai.responses.server.sdk.models.models.ResponseFormatJsonSchemaSchema
+ :ivar strict:
+ :vartype strict: bool
+ """
+
+ type: Literal[TextResponseFormatConfigurationType.JSON_SCHEMA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of response format being defined. Always ``json_schema``. Required. JSON_SCHEMA."""
+ description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A description of what the response format is for, used by the model to determine how to respond
+ in the format."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with
+ a maximum length of 64. Required."""
+ schema: "_models.ResponseFormatJsonSchemaSchema" = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Required."""
+ strict: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ schema: "_models.ResponseFormatJsonSchemaSchema",
+ description: Optional[str] = None,
+ strict: Optional[bool] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = TextResponseFormatConfigurationType.JSON_SCHEMA # type: ignore
+
+
+class ToolChoiceAllowed(ToolChoiceParam, discriminator="allowed_tools"):
+ """Allowed tools.
+
+ :ivar type: Allowed tool configuration type. Always ``allowed_tools``. Required. ALLOWED_TOOLS.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.ALLOWED_TOOLS
+ :ivar mode: Constrains the tools available to the model to a pre-defined set. ``auto`` allows
+ the model to pick from among the allowed tools and generate a message. ``required`` requires
+ the model to call one or more of the allowed tools. Required. Is either a Literal["auto"] type
+ or a Literal["required"] type.
+ :vartype mode: str or str
+ :ivar tools: A list of tool definitions that the model should be allowed to call. For the
+ Responses API, the list of tool definitions might look like:
+
+ .. code-block:: json
+
+ [
+ { "type": "function", "name": "get_weather" },
+ { "type": "mcp", "server_label": "deepwiki" },
+ { "type": "image_generation" }
+ ]. Required.
+ :vartype tools: list[dict[str, any]]
+ """
+
+ type: Literal[ToolChoiceParamType.ALLOWED_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Allowed tool configuration type. Always ``allowed_tools``. Required. ALLOWED_TOOLS."""
+ mode: Literal["auto", "required"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Constrains the tools available to the model to a pre-defined set. ``auto`` allows the model to
+ pick from among the allowed tools and generate a message. ``required`` requires the model to
+ call one or more of the allowed tools. Required. Is either a Literal[\"auto\"] type or a
+ Literal[\"required\"] type."""
+ tools: list[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A list of tool definitions that the model should be allowed to call. For the Responses API, the
+ list of tool definitions might look like:
+
+ .. code-block:: json
+
+ [
+ { \"type\": \"function\", \"name\": \"get_weather\" },
+ { \"type\": \"mcp\", \"server_label\": \"deepwiki\" },
+ { \"type\": \"image_generation\" }
+ ]. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ mode: Literal["auto", "required"],
+ tools: list[dict[str, Any]],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.ALLOWED_TOOLS # type: ignore
+
+
+class ToolChoiceCodeInterpreter(ToolChoiceParam, discriminator="code_interpreter"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. CODE_INTERPRETER.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CODE_INTERPRETER
+ """
+
+ type: Literal[ToolChoiceParamType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. CODE_INTERPRETER."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.CODE_INTERPRETER # type: ignore
+
+
+class ToolChoiceComputerUsePreview(ToolChoiceParam, discriminator="computer_use_preview"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. COMPUTER_USE_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.COMPUTER_USE_PREVIEW
+ """
+
+ type: Literal[ToolChoiceParamType.COMPUTER_USE_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. COMPUTER_USE_PREVIEW."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.COMPUTER_USE_PREVIEW # type: ignore
+
+
+class ToolChoiceCustom(ToolChoiceParam, discriminator="custom"):
+ """Custom tool.
+
+ :ivar type: For custom tool calling, the type is always ``custom``. Required. CUSTOM.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.CUSTOM
+ :ivar name: The name of the custom tool to call. Required.
+ :vartype name: str
+ """
+
+ type: Literal[ToolChoiceParamType.CUSTOM] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """For custom tool calling, the type is always ``custom``. Required. CUSTOM."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the custom tool to call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.CUSTOM # type: ignore
+
+
+class ToolChoiceFileSearch(ToolChoiceParam, discriminator="file_search"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. FILE_SEARCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FILE_SEARCH
+ """
+
+ type: Literal[ToolChoiceParamType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. FILE_SEARCH."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.FILE_SEARCH # type: ignore
+
+
+class ToolChoiceFunction(ToolChoiceParam, discriminator="function"):
+ """Function tool.
+
+ :ivar type: For function calling, the type is always ``function``. Required. FUNCTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.FUNCTION
+ :ivar name: The name of the function to call. Required.
+ :vartype name: str
+ """
+
+ type: Literal[ToolChoiceParamType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """For function calling, the type is always ``function``. Required. FUNCTION."""
+ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The name of the function to call. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ name: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.FUNCTION # type: ignore
+
+
+class ToolChoiceImageGeneration(ToolChoiceParam, discriminator="image_generation"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. IMAGE_GENERATION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.IMAGE_GENERATION
+ """
+
+ type: Literal[ToolChoiceParamType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. IMAGE_GENERATION."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.IMAGE_GENERATION # type: ignore
+
+
+class ToolChoiceMCP(ToolChoiceParam, discriminator="mcp"):
+ """MCP tool.
+
+ :ivar type: For MCP tools, the type is always ``mcp``. Required. MCP.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.MCP
+ :ivar server_label: The label of the MCP server to use. Required.
+ :vartype server_label: str
+ :ivar name:
+ :vartype name: str
+ """
+
+ type: Literal[ToolChoiceParamType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """For MCP tools, the type is always ``mcp``. Required. MCP."""
+ server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The label of the MCP server to use. Required."""
+ name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ server_label: str,
+ name: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.MCP # type: ignore
+
+
+class ToolChoiceWebSearchPreview(ToolChoiceParam, discriminator="web_search_preview"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. WEB_SEARCH_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_PREVIEW
+ """
+
+ type: Literal[ToolChoiceParamType.WEB_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. WEB_SEARCH_PREVIEW."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.WEB_SEARCH_PREVIEW # type: ignore
+
+
+class ToolChoiceWebSearchPreview20250311(ToolChoiceParam, discriminator="web_search_preview_2025_03_11"):
+ """Indicates that the model should use a built-in tool to generate a response. `Learn more about
+ built-in tools `_.
+
+ :ivar type: Required. WEB_SEARCH_PREVIEW2025_03_11.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_PREVIEW2025_03_11
+ """
+
+ type: Literal[ToolChoiceParamType.WEB_SEARCH_PREVIEW2025_03_11] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. WEB_SEARCH_PREVIEW2025_03_11."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolChoiceParamType.WEB_SEARCH_PREVIEW2025_03_11 # type: ignore
+
+
+class ToolProjectConnection(_Model):
+ """A project connection resource.
+
+ :ivar project_connection_id: A project connection in a ToolProjectConnectionList attached to
+ this tool. Required.
+ :vartype project_connection_id: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """A project connection in a ToolProjectConnectionList attached to this tool. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class TopLogProb(_Model):
+ """Top log probability.
+
+ :ivar token: Required.
+ :vartype token: str
+ :ivar logprob: Required.
+ :vartype logprob: int
+ :ivar bytes: Required.
+ :vartype bytes: list[int]
+ """
+
+ token: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ logprob: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+ bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ token: str,
+ logprob: int,
+ bytes: list[int],
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class TypeParam(ComputerAction, discriminator="type"):
+ """Type.
+
+ :ivar type: Specifies the event type. For a type action, this property is always set to
+ ``type``. Required. TYPE.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.TYPE
+ :ivar text: The text to type. Required.
+ :vartype text: str
+ """
+
+ type: Literal[ComputerActionType.TYPE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a type action, this property is always set to ``type``. Required.
+ TYPE."""
+ text: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The text to type. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ text: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.TYPE # type: ignore
+
+
+class UrlCitationBody(Annotation, discriminator="url_citation"):
+ """URL citation.
+
+ :ivar type: The type of the URL citation. Always ``url_citation``. Required. URL_CITATION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.URL_CITATION
+ :ivar url: The URL of the web resource. Required.
+ :vartype url: str
+ :ivar start_index: The index of the first character of the URL citation in the message.
+ Required.
+ :vartype start_index: int
+ :ivar end_index: The index of the last character of the URL citation in the message. Required.
+ :vartype end_index: int
+ :ivar title: The title of the web resource. Required.
+ :vartype title: str
+ """
+
+ type: Literal[AnnotationType.URL_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the URL citation. Always ``url_citation``. Required. URL_CITATION."""
+ url: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the web resource. Required."""
+ start_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the first character of the URL citation in the message. Required."""
+ end_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The index of the last character of the URL citation in the message. Required."""
+ title: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The title of the web resource. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ url: str,
+ start_index: int,
+ end_index: int,
+ title: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = AnnotationType.URL_CITATION # type: ignore
+
+
+class UserProfileMemoryItem(MemoryItem, discriminator="user_profile"):
+ """A memory item specifically containing user profile information extracted from conversations,
+ such as preferences, interests, and personal details.
+
+ :ivar memory_id: The unique ID of the memory item. Required.
+ :vartype memory_id: str
+ :ivar updated_at: The last update time of the memory item. Required.
+ :vartype updated_at: ~datetime.datetime
+ :ivar scope: The namespace that logically groups and isolates memories, such as a user ID.
+ Required.
+ :vartype scope: str
+ :ivar content: The content of the memory. Required.
+ :vartype content: str
+ :ivar kind: The kind of the memory item. Required. User profile information extracted from
+ conversations.
+ :vartype kind: str or ~azure.ai.responses.server.sdk.models.models.USER_PROFILE
+ """
+
+ kind: Literal[MemoryItemKind.USER_PROFILE] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The kind of the memory item. Required. User profile information extracted from conversations."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ memory_id: str,
+ updated_at: datetime.datetime,
+ scope: str,
+ content: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.kind = MemoryItemKind.USER_PROFILE # type: ignore
+
+
+class VectorStoreFileAttributes(_Model):
+ """Set of 16 key-value pairs that can be attached to an object. This can be useful for storing
+ additional information about the object in a structured format, and querying for objects via
+ API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are
+ strings with a maximum length of 512 characters, booleans, or numbers.
+
+ """
+
+
+class WaitParam(ComputerAction, discriminator="wait"):
+ """Wait.
+
+ :ivar type: Specifies the event type. For a wait action, this property is always set to
+ ``wait``. Required. WAIT.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WAIT
+ """
+
+ type: Literal[ComputerActionType.WAIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Specifies the event type. For a wait action, this property is always set to ``wait``. Required.
+ WAIT."""
+
+ @overload
+ def __init__(
+ self,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ComputerActionType.WAIT # type: ignore
+
+
+class WebSearchActionFind(_Model):
+ """Find action.
+
+ :ivar type: The action type. Required. Default value is "find_in_page".
+ :vartype type: str
+ :ivar url: The URL of the page searched for the pattern. Required.
+ :vartype url: str
+ :ivar pattern: The pattern or text to search for within the page. Required.
+ :vartype pattern: str
+ """
+
+ type: Literal["find_in_page"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The action type. Required. Default value is \"find_in_page\"."""
+ url: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL of the page searched for the pattern. Required."""
+ pattern: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The pattern or text to search for within the page. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ url: str,
+ pattern: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["find_in_page"] = "find_in_page"
+
+
+class WebSearchActionOpenPage(_Model):
+ """Open page action.
+
+ :ivar type: The action type. Required. Default value is "open_page".
+ :vartype type: str
+ :ivar url: The URL opened by the model.
+ :vartype url: str
+ """
+
+ type: Literal["open_page"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The action type. Required. Default value is \"open_page\"."""
+ url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The URL opened by the model."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ url: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["open_page"] = "open_page"
+
+
+class WebSearchActionSearch(_Model):
+ """Search action.
+
+ :ivar type: The action type. Required. Default value is "search".
+ :vartype type: str
+ :ivar query: [DEPRECATED] The search query. Required.
+ :vartype query: str
+ :ivar queries: Search queries.
+ :vartype queries: list[str]
+ :ivar sources: Web search sources.
+ :vartype sources:
+ list[~azure.ai.responses.server.sdk.models.models.WebSearchActionSearchSources]
+ """
+
+ type: Literal["search"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The action type. Required. Default value is \"search\"."""
+ query: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """[DEPRECATED] The search query. Required."""
+ queries: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Search queries."""
+ sources: Optional[list["_models.WebSearchActionSearchSources"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Web search sources."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ query: str,
+ queries: Optional[list[str]] = None,
+ sources: Optional[list["_models.WebSearchActionSearchSources"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["search"] = "search"
+
+
+class WebSearchActionSearchSources(_Model):
+ """WebSearchActionSearchSources.
+
+ :ivar type: Required. Default value is "url".
+ :vartype type: str
+ :ivar url: Required.
+ :vartype url: str
+ """
+
+ type: Literal["url"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required. Default value is \"url\"."""
+ url: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ url: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["url"] = "url"
+
+
+class WebSearchApproximateLocation(_Model):
+ """Web search approximate location.
+
+ :ivar type: The type of location approximation. Always ``approximate``. Required. Default value
+ is "approximate".
+ :vartype type: str
+ :ivar country:
+ :vartype country: str
+ :ivar region:
+ :vartype region: str
+ :ivar city:
+ :vartype city: str
+ :ivar timezone:
+ :vartype timezone: str
+ """
+
+ type: Literal["approximate"] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The type of location approximation. Always ``approximate``. Required. Default value is
+ \"approximate\"."""
+ country: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ region: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ city: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ timezone: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ country: Optional[str] = None,
+ region: Optional[str] = None,
+ city: Optional[str] = None,
+ timezone: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type: Literal["approximate"] = "approximate"
+
+
+class WebSearchConfiguration(_Model):
+ """A web search configuration for bing custom search.
+
+ :ivar project_connection_id: Project connection id for grounding with bing custom search.
+ Required.
+ :vartype project_connection_id: str
+ :ivar instance_name: Name of the custom configuration instance given to config. Required.
+ :vartype instance_name: str
+ """
+
+ project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Project connection id for grounding with bing custom search. Required."""
+ instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Name of the custom configuration instance given to config. Required."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ project_connection_id: str,
+ instance_name: str,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class WebSearchPreviewTool(Tool, discriminator="web_search_preview"):
+ """Web search preview.
+
+ :ivar type: The type of the web search tool. One of ``web_search_preview`` or
+ ``web_search_preview_2025_03_11``. Required. WEB_SEARCH_PREVIEW.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH_PREVIEW
+ :ivar user_location:
+ :vartype user_location: ~azure.ai.responses.server.sdk.models.models.ApproximateLocation
+ :ivar search_context_size: High level guidance for the amount of context window space to use
+ for the search. One of ``low``, ``medium``, or ``high``. ``medium`` is the default. Known
+ values are: "low", "medium", and "high".
+ :vartype search_context_size: str or
+ ~azure.ai.responses.server.sdk.models.models.SearchContextSize
+ """
+
+ type: Literal[ToolType.WEB_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the web search tool. One of ``web_search_preview`` or
+ ``web_search_preview_2025_03_11``. Required. WEB_SEARCH_PREVIEW."""
+ user_location: Optional["_models.ApproximateLocation"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ search_context_size: Optional[Union[str, "_models.SearchContextSize"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """High level guidance for the amount of context window space to use for the search. One of
+ ``low``, ``medium``, or ``high``. ``medium`` is the default. Known values are: \"low\",
+ \"medium\", and \"high\"."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ user_location: Optional["_models.ApproximateLocation"] = None,
+ search_context_size: Optional[Union[str, "_models.SearchContextSize"]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.WEB_SEARCH_PREVIEW # type: ignore
+
+
+class WebSearchTool(Tool, discriminator="web_search"):
+ """Web search.
+
+ :ivar type: The type of the web search tool. One of ``web_search`` or
+ ``web_search_2025_08_26``. Required. WEB_SEARCH.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WEB_SEARCH
+ :ivar filters:
+ :vartype filters: ~azure.ai.responses.server.sdk.models.models.WebSearchToolFilters
+ :ivar user_location:
+ :vartype user_location:
+ ~azure.ai.responses.server.sdk.models.models.WebSearchApproximateLocation
+ :ivar search_context_size: High level guidance for the amount of context window space to use
+ for the search. One of ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of
+ the following types: Literal["low"], Literal["medium"], Literal["high"]
+ :vartype search_context_size: str or str or str
+ :ivar custom_search_configuration: The project connections attached to this tool. There can be
+ a maximum of 1 connection resource attached to the tool.
+ :vartype custom_search_configuration:
+ ~azure.ai.responses.server.sdk.models.models.WebSearchConfiguration
+ """
+
+ type: Literal[ToolType.WEB_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """The type of the web search tool. One of ``web_search`` or ``web_search_2025_08_26``. Required.
+ WEB_SEARCH."""
+ filters: Optional["_models.WebSearchToolFilters"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ user_location: Optional["_models.WebSearchApproximateLocation"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ search_context_size: Optional[Literal["low", "medium", "high"]] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """High level guidance for the amount of context window space to use for the search. One of
+ ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of the following types:
+ Literal[\"low\"], Literal[\"medium\"], Literal[\"high\"]"""
+ custom_search_configuration: Optional["_models.WebSearchConfiguration"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """The project connections attached to this tool. There can be a maximum of 1 connection resource
+ attached to the tool."""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ filters: Optional["_models.WebSearchToolFilters"] = None,
+ user_location: Optional["_models.WebSearchApproximateLocation"] = None,
+ search_context_size: Optional[Literal["low", "medium", "high"]] = None,
+ custom_search_configuration: Optional["_models.WebSearchConfiguration"] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = ToolType.WEB_SEARCH # type: ignore
+
+
+class WebSearchToolFilters(_Model):
+ """WebSearchToolFilters.
+
+ :ivar allowed_domains:
+ :vartype allowed_domains: list[str]
+ """
+
+ allowed_domains: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+
+ @overload
+ def __init__(
+ self,
+ *,
+ allowed_domains: Optional[list[str]] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+
+class WorkflowActionOutputItem(OutputItem, discriminator="workflow_action"):
+ """WorkflowActionOutputItem.
+
+ :ivar created_by: The information about the creator of the item. Is either a CreatedBy type or
+ a str type.
+ :vartype created_by: ~azure.ai.responses.server.sdk.models.models.CreatedBy or str
+ :ivar agent_reference: The agent that created the item.
+ :vartype agent_reference: ~azure.ai.responses.server.sdk.models.models.AgentReference
+ :ivar response_id: The response on which the item is created.
+ :vartype response_id: str
+ :ivar type: Required. WORKFLOW_ACTION.
+ :vartype type: str or ~azure.ai.responses.server.sdk.models.models.WORKFLOW_ACTION
+ :ivar kind: The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required.
+ :vartype kind: str
+ :ivar action_id: Unique identifier for the action. Required.
+ :vartype action_id: str
+ :ivar parent_action_id: ID of the parent action if this is a nested action.
+ :vartype parent_action_id: str
+ :ivar previous_action_id: ID of the previous action if this action follows another.
+ :vartype previous_action_id: str
+ :ivar status: Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled').
+ Required. Is one of the following types: Literal["completed"], Literal["failed"],
+ Literal["in_progress"], Literal["cancelled"]
+ :vartype status: str or str or str or str
+ """
+
+ type: Literal[OutputItemType.WORKFLOW_ACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
+ """Required. WORKFLOW_ACTION."""
+ kind: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required."""
+ action_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """Unique identifier for the action. Required."""
+ parent_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """ID of the parent action if this is a nested action."""
+ previous_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
+ """ID of the previous action if this action follows another."""
+ status: Literal["completed", "failed", "in_progress", "cancelled"] = rest_field(
+ visibility=["read", "create", "update", "delete", "query"]
+ )
+ """Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). Required. Is
+ one of the following types: Literal[\"completed\"], Literal[\"failed\"],
+ Literal[\"in_progress\"], Literal[\"cancelled\"]"""
+
+ @overload
+ def __init__(
+ self,
+ *,
+ kind: str,
+ action_id: str,
+ status: Literal["completed", "failed", "in_progress", "cancelled"],
+ created_by: Optional[Union["_models.CreatedBy", str]] = None,
+ agent_reference: Optional["_models.AgentReference"] = None,
+ response_id: Optional[str] = None,
+ parent_action_id: Optional[str] = None,
+ previous_action_id: Optional[str] = None,
+ ) -> None: ...
+
+ @overload
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
+ """
+ :param mapping: raw JSON to initialize the model.
+ :type mapping: Mapping[str, Any]
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self.type = OutputItemType.WORKFLOW_ACTION # type: ignore
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/_patch.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/_patch.py
new file mode 100644
index 000000000000..87676c65a8f0
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/models/_patch.py
@@ -0,0 +1,21 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+
+
+__all__: list[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/py.typed b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/py.typed
new file mode 100644
index 000000000000..e5aff4f83af8
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/_generated/sdk/models/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/errors.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/errors.py
new file mode 100644
index 000000000000..e9959ba5416c
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/errors.py
@@ -0,0 +1,54 @@
+"""Error model types for request validation failures."""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING:
+ from .._generated import ApiErrorResponse as ApiErrorResponseType
+ from .._generated import Error as ErrorType
+else:
+ ApiErrorResponseType = Any
+ ErrorType = Any
+
+try:
+ from .._generated import ApiErrorResponse, Error
+except Exception: # pragma: no cover - allows isolated unit testing when generated deps are unavailable.
+ class _GeneratedUnavailable:
+ def __init__(self, *_args: Any, **_kwargs: Any) -> None:
+ raise ModuleNotFoundError(
+ "generated contract models are unavailable; run generation to restore runtime dependencies"
+ )
+
+ ApiErrorResponse = _GeneratedUnavailable # type: ignore[assignment]
+ Error = _GeneratedUnavailable # type: ignore[assignment]
+
+
+@dataclass(slots=True)
+class RequestValidationError(ValueError):
+ """Represents a client-visible request validation failure."""
+
+ message: str
+ code: str = "invalid_request"
+ param: str | None = None
+ error_type: str = "invalid_request_error"
+ debug_info: dict[str, Any] | None = None
+
+ def __post_init__(self) -> None:
+ """Initialize the parent :class:`ValueError` message."""
+ ValueError.__init__(self, self.message)
+
+ def to_error(self) -> ErrorType:
+ """Convert this validation error to the generated ``Error`` model."""
+ return Error(
+ code=self.code,
+ message=self.message,
+ param=self.param,
+ type=self.error_type,
+ debug_info=self.debug_info,
+ )
+
+ def to_api_error_response(self) -> ApiErrorResponseType:
+ """Convert this validation error to the generated API error envelope."""
+ return ApiErrorResponse(error=self.to_error())
diff --git a/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/runtime.py b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/runtime.py
new file mode 100644
index 000000000000..b4a5a895458c
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/azure/ai/responses/server/models/runtime.py
@@ -0,0 +1,129 @@
+"""Runtime domain models for response sessions and stream events."""
+
+from __future__ import annotations
+
+import asyncio
+from dataclasses import dataclass, field
+from datetime import datetime, timezone
+from typing import Any, Literal, Mapping
+
+from .._generated import Response, ResponseStreamEvent
+
+ResponseStatus = Literal["queued", "in_progress", "completed", "failed", "cancelled", "incomplete"]
+TerminalResponseStatus = Literal["completed", "failed", "cancelled", "incomplete"]
+
+
+@dataclass(slots=True)
+class ResponseModeFlags:
+ """Execution mode flags captured from the create request."""
+
+ stream: bool
+ store: bool
+ background: bool
+
+
+@dataclass(slots=True)
+class StreamEventRecord:
+ """A persisted record for one emitted stream event."""
+
+ sequence_number: int
+ event_type: str
+ payload: Mapping[str, Any]
+ emitted_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
+
+ @property
+ def terminal(self) -> bool:
+ """Return True when this event is one of the terminal response events."""
+ return self.event_type in {
+ "response.completed",
+ "response.failed",
+ "response.cancelled",
+ "response.incomplete",
+ }
+
+ @classmethod
+ def from_generated(cls, event: ResponseStreamEvent, payload: Mapping[str, Any]) -> "StreamEventRecord":
+ """Create a stream event record from a generated response stream event model."""
+ return cls(sequence_number=event.sequence_number, event_type=event.type, payload=payload)
+
+
+@dataclass(slots=True)
+class ResponseExecution:
+ """Lightweight pipeline state for one response execution.
+
+ This type intentionally does not own persisted stream history. Stream replay
+ concerns are modeled separately in :class:`StreamReplayState`.
+ """
+
+ response_id: str
+ mode_flags: ResponseModeFlags
+ created_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
+ updated_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
+ completed_at: datetime | None = None
+ status: ResponseStatus = "queued"
+ response: Response | None = None
+ execution_task: asyncio.Task[Any] | None = None
+ cancel_requested: bool = False
+ client_disconnected: bool = False
+ response_created_seen: bool = False
+
+ def transition_to(self, next_status: ResponseStatus) -> None:
+ """Transition this execution to a valid lifecycle status.
+
+ :raises ValueError: If the requested transition is not allowed.
+ """
+ allowed: dict[ResponseStatus, set[ResponseStatus]] = {
+ "queued": {"in_progress", "failed"},
+ "in_progress": {"completed", "failed", "cancelled", "incomplete"},
+ "completed": set(),
+ "failed": set(),
+ "cancelled": set(),
+ "incomplete": set(),
+ }
+
+ if next_status == self.status:
+ self.updated_at = datetime.now(timezone.utc)
+ return
+
+ if next_status not in allowed[self.status]:
+ raise ValueError(f"invalid status transition: {self.status} -> {next_status}")
+
+ self.status = next_status
+ now = datetime.now(timezone.utc)
+ self.updated_at = now
+ if self.is_terminal:
+ self.completed_at = now
+
+ @property
+ def is_terminal(self) -> bool:
+ """Return whether the execution has reached a terminal state."""
+ return self.status in {"completed", "failed", "cancelled", "incomplete"}
+
+ def set_response_snapshot(self, response: Response) -> None:
+ """Replace the current response snapshot from handler-emitted events."""
+ self.response = response
+ self.updated_at = datetime.now(timezone.utc)
+
+
+@dataclass(slots=True)
+class StreamReplayState:
+ """Persisted stream replay state for one response identifier."""
+
+ response_id: str
+ events: list[StreamEventRecord] = field(default_factory=list)
+
+ def append(self, event: StreamEventRecord) -> None:
+ """Append a stream event and enforce replay sequence integrity."""
+ if self.events and event.sequence_number <= self.events[-1].sequence_number:
+ raise ValueError("stream event sequence numbers must be strictly increasing")
+
+ if self.events and self.events[-1].terminal:
+ raise ValueError("cannot append events after a terminal event")
+
+ self.events.append(event)
+
+ @property
+ def terminal_event_seen(self) -> bool:
+ """Return whether replay state has already recorded a terminal event."""
+ return bool(self.events and self.events[-1].terminal)
+
diff --git a/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/__init__.py b/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/__init__.py
new file mode 100644
index 000000000000..013008e395b4
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/__init__.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility re-exports for generated models preserved under sdk/models."""
+
+from .sdk.models.models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/_enums.py b/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/_enums.py
new file mode 100644
index 000000000000..ffeb0d1362db
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/_enums.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated enum symbols."""
+
+from .sdk.models.models._enums import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/_models.py b/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/_models.py
new file mode 100644
index 000000000000..8c6878d69796
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/_models.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated model symbols."""
+
+from .sdk.models.models._models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/_patch.py b/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/_patch.py
new file mode 100644
index 000000000000..3d222c31c566
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/_patch.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Compatibility shim for generated patch helpers."""
+
+from .sdk.models.models._patch import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/sdk_models__init__.py b/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/sdk_models__init__.py
new file mode 100644
index 000000000000..784a3edcc881
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/scripts/generated_shims/sdk_models__init__.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------
+
+"""Model-only generated package surface."""
+
+from .models import * # type: ignore # noqa: F401,F403
diff --git a/sdk/agentserver/azure-ai-responses-server/scripts/validator_emitter.py b/sdk/agentserver/azure-ai-responses-server/scripts/validator_emitter.py
new file mode 100644
index 000000000000..ccc1330faa9f
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/scripts/validator_emitter.py
@@ -0,0 +1,421 @@
+"""Emitter that builds deterministic Python validator modules from schemas."""
+
+from __future__ import annotations
+
+from typing import Any
+
+
+def _sanitize_identifier(name: str) -> str:
+ normalized = "".join(ch if ch.isalnum() else "_" for ch in name)
+ while "__" in normalized:
+ normalized = normalized.replace("__", "_")
+ normalized = normalized.strip("_")
+ return normalized or "schema"
+
+
+def _resolve_ref(ref: str) -> str:
+ return ref.rsplit("/", 1)[-1]
+
+
+def _ordered(value: Any) -> Any:
+ if isinstance(value, dict):
+ return {k: _ordered(value[k]) for k in sorted(value)}
+ if isinstance(value, list):
+ return [_ordered(v) for v in value]
+ return value
+
+
+def _header() -> str:
+ return (
+ "# pylint: disable=line-too-long,useless-suppression,too-many-lines\n"
+ "# coding=utf-8\n"
+ "# --------------------------------------------------------------------------\n"
+ "# Copyright (c) Microsoft Corporation. All rights reserved.\n"
+ "# Licensed under the MIT License. See License.txt in the project root for license information.\n"
+ "# Code generated by Microsoft (R) Python Code Generator.\n"
+ "# Changes may cause incorrect behavior and will be lost if the code is regenerated.\n"
+ "# --------------------------------------------------------------------------\n"
+ )
+
+
+def _schema_kind(schema: dict[str, Any]) -> str | None:
+ schema_type = schema.get("type")
+ if isinstance(schema_type, str):
+ return schema_type
+ if "properties" in schema or "additionalProperties" in schema or "discriminator" in schema:
+ return "object"
+ if "oneOf" in schema or "anyOf" in schema:
+ return "union"
+ return None
+
+
+def build_validator_module(schemas: dict[str, dict[str, Any]], roots: list[str]) -> str:
+ """Build generated validator module source code without runtime schema blobs."""
+ ordered_schemas = _ordered(schemas)
+ target_roots = sorted(dict.fromkeys(roots)) if roots else sorted(ordered_schemas)
+
+ lines: list[str] = [_header(), "", "from __future__ import annotations", "", "from typing import Any", ""]
+ lines.extend(
+ [
+ "try:",
+ " from . import _enums as _generated_enums",
+ "except Exception:",
+ " _generated_enums = None",
+ "",
+ "def _append_error(errors: list[dict[str, str]], path: str, message: str) -> None:",
+ " errors.append({'path': path, 'message': message})",
+ "",
+ "def _type_label(value: Any) -> str:",
+ " if value is None:",
+ " return 'null'",
+ " if isinstance(value, bool):",
+ " return 'boolean'",
+ " if isinstance(value, int):",
+ " return 'integer'",
+ " if isinstance(value, float):",
+ " return 'number'",
+ " if isinstance(value, str):",
+ " return 'string'",
+ " if isinstance(value, dict):",
+ " return 'object'",
+ " if isinstance(value, list):",
+ " return 'array'",
+ " return type(value).__name__",
+ "",
+ "def _is_type(value: Any, expected: str) -> bool:",
+ " if expected == 'string':",
+ " return isinstance(value, str)",
+ " if expected == 'integer':",
+ " return isinstance(value, int) and not isinstance(value, bool)",
+ " if expected == 'number':",
+ " return (isinstance(value, int) and not isinstance(value, bool)) or isinstance(value, float)",
+ " if expected == 'boolean':",
+ " return isinstance(value, bool)",
+ " if expected == 'object':",
+ " return isinstance(value, dict)",
+ " if expected == 'array':",
+ " return isinstance(value, list)",
+ " return True",
+ "",
+ "def _append_type_mismatch(errors: list[dict[str, str]], path: str, expected: str, value: Any) -> None:",
+ " _append_error(errors, path, f\"Expected {expected}, got {_type_label(value)}\")",
+ "",
+ "def _enum_values(enum_name: str) -> tuple[tuple[str, ...] | None, str | None]:",
+ " if _generated_enums is None:",
+ " return None, f'enum type _enums.{enum_name} is unavailable'",
+ " enum_cls = getattr(_generated_enums, enum_name, None)",
+ " if enum_cls is None:",
+ " return None, f'enum type _enums.{enum_name} is not defined'",
+ " try:",
+ " return tuple(str(member.value) for member in enum_cls), None",
+ " except Exception:",
+ " return None, f'enum type _enums.{enum_name} failed to load values'",
+ "",
+ ]
+ )
+
+ function_schemas: dict[str, dict[str, Any]] = {}
+ function_hints: dict[str, str | None] = {}
+ function_order: list[str] = []
+ anonymous_by_key: dict[str, str] = {}
+
+ def make_unique_function_name(hint: str | None) -> str:
+ base = _sanitize_identifier(hint or "branch")
+ candidate = f"_validate_{base}"
+ if candidate not in function_schemas:
+ return candidate
+
+ suffix = 2
+ while True:
+ candidate = f"_validate_{base}_{suffix}"
+ if candidate not in function_schemas:
+ return candidate
+ suffix += 1
+
+ def ensure_schema_function(schema_name: str) -> str:
+ fn_name = f"_validate_{_sanitize_identifier(schema_name)}"
+ if fn_name not in function_schemas:
+ schema = ordered_schemas.get(schema_name)
+ if isinstance(schema, dict):
+ function_schemas[fn_name] = schema
+ function_hints[fn_name] = schema_name
+ function_order.append(fn_name)
+ return fn_name
+
+ def ensure_anonymous_function(schema: dict[str, Any], hint: str | None = None) -> str:
+ key = repr(_ordered(schema))
+ if key in anonymous_by_key:
+ existing = anonymous_by_key[key]
+ if function_hints.get(existing) is None and hint is not None:
+ function_hints[existing] = hint
+ return existing
+ fn_name = make_unique_function_name(hint)
+ anonymous_by_key[key] = fn_name
+ function_schemas[fn_name] = schema
+ function_hints[fn_name] = hint
+ function_order.append(fn_name)
+ return fn_name
+
+ for root in target_roots:
+ ensure_schema_function(root)
+
+ def emit_line(block: list[str], indent: int, text: str) -> None:
+ block.append((" " * indent) + text)
+
+ def emit_union(
+ schema: dict[str, Any],
+ block: list[str],
+ indent: int,
+ value_expr: str,
+ path_expr: str,
+ errors_expr: str,
+ schema_name_hint: str | None,
+ ) -> None:
+ branches = schema.get("oneOf", schema.get("anyOf", []))
+ branch_funcs: list[tuple[str, str]] = []
+ expected_labels: list[str] = []
+ has_inline_enum_branch = False
+
+ for branch in branches:
+ if not isinstance(branch, dict):
+ continue
+
+ if "$ref" in branch:
+ ref_name = _resolve_ref(str(branch["$ref"]))
+ ref_schema = ordered_schemas.get(ref_name)
+ if isinstance(ref_schema, dict):
+ branch_funcs.append((ensure_schema_function(ref_name), _schema_kind(ref_schema) or "value"))
+ expected_labels.append(ref_name)
+ continue
+
+ if schema_name_hint and "enum" in branch:
+ # Keep enum branches tied to the logical schema name so enum-class resolution stays stable.
+ branch_hint = schema_name_hint
+ has_inline_enum_branch = True
+ else:
+ branch_type = branch.get("type") if isinstance(branch.get("type"), str) else (_schema_kind(branch) or "branch")
+ branch_hint = f"{schema_name_hint}_{branch_type}" if schema_name_hint else str(branch_type)
+ fn_name = ensure_anonymous_function(branch, hint=branch_hint)
+ branch_funcs.append((fn_name, _schema_kind(branch) or "value"))
+ label = branch.get("type") if isinstance(branch.get("type"), str) else (_schema_kind(branch) or "value")
+ expected_labels.append(str(label))
+
+ if not branch_funcs:
+ return
+
+ emit_line(block, indent, "_matched_union = False")
+ for idx, (fn_name, kind) in enumerate(branch_funcs):
+ condition = "True" if kind in ("value", "union", None) else f"_is_type({value_expr}, {kind!r})"
+ emit_line(block, indent, f"if not _matched_union and {condition}:")
+ emit_line(block, indent + 1, f"_branch_errors_{idx}: list[dict[str, str]] = []")
+ emit_line(block, indent + 1, f"{fn_name}({value_expr}, {path_expr}, _branch_errors_{idx})")
+ emit_line(block, indent + 1, f"if not _branch_errors_{idx}:")
+ emit_line(block, indent + 2, "_matched_union = True")
+
+ unique_expected_labels = list(dict.fromkeys(expected_labels))
+ emit_line(block, indent, "if not _matched_union:")
+ if len(unique_expected_labels) == 1:
+ only_label = unique_expected_labels[0]
+ if schema_name_hint and only_label == "string" and has_inline_enum_branch:
+ schema_label = schema_name_hint.rsplit(".", 1)[-1]
+ emit_line(
+ block,
+ indent + 1,
+ f"_append_error({errors_expr}, {path_expr}, f\"Expected {schema_label} to be a string value, got {{_type_label({value_expr})}}\")",
+ )
+ else:
+ emit_line(block, indent + 1, f"_append_error({errors_expr}, {path_expr}, 'Expected {only_label}')")
+ else:
+ expected = ", ".join(unique_expected_labels) if unique_expected_labels else "valid branch"
+ emit_line(
+ block,
+ indent + 1,
+ f"_append_error({errors_expr}, {path_expr}, f\"Expected one of: {expected}; got {{_type_label({value_expr})}}\")",
+ )
+ emit_line(block, indent + 1, "return")
+
+ def emit_schema_body(
+ schema: dict[str, Any],
+ block: list[str],
+ indent: int,
+ value_expr: str,
+ path_expr: str,
+ errors_expr: str,
+ schema_name_hint: str | None = None,
+ ) -> None:
+ if schema.get("nullable"):
+ emit_line(block, indent, f"if {value_expr} is None:")
+ emit_line(block, indent + 1, "return")
+
+ if "$ref" in schema:
+ ref_name = _resolve_ref(str(schema["$ref"]))
+ ref_schema = ordered_schemas.get(ref_name)
+ if isinstance(ref_schema, dict):
+ emit_line(block, indent, f"{ensure_schema_function(ref_name)}({value_expr}, {path_expr}, {errors_expr})")
+ return
+
+ if "enum" in schema:
+ allowed = tuple(schema.get("enum", []))
+ enum_class_name = None
+ if schema_name_hint:
+ hint_schema = ordered_schemas.get(schema_name_hint)
+ hint_is_enum_like = False
+ if isinstance(hint_schema, dict):
+ if "enum" in hint_schema:
+ hint_is_enum_like = True
+ else:
+ for combo in ("oneOf", "anyOf"):
+ branches = hint_schema.get(combo, [])
+ if isinstance(branches, list) and any(
+ isinstance(b, dict) and "enum" in b for b in branches
+ ):
+ hint_is_enum_like = True
+ break
+ if hint_is_enum_like:
+ candidate = schema_name_hint.rsplit(".", 1)[-1]
+ if candidate and candidate[0].isalpha():
+ enum_class_name = candidate
+
+ if enum_class_name:
+ emit_line(
+ block,
+ indent,
+ f"_allowed_values, _enum_error = _enum_values({enum_class_name!r})",
+ )
+ emit_line(block, indent, "if _enum_error is not None:")
+ emit_line(block, indent + 1, f"_append_error({errors_expr}, {path_expr}, _enum_error)")
+ emit_line(block, indent + 1, "return")
+ emit_line(block, indent, "if _allowed_values is None:")
+ emit_line(block, indent + 1, "return")
+ else:
+ emit_line(block, indent, f"_allowed_values = {allowed!r}")
+ emit_line(block, indent, f"if {value_expr} not in _allowed_values:")
+ emit_line(
+ block,
+ indent + 1,
+ f"_append_error({errors_expr}, {path_expr}, f\"Invalid value '{{{value_expr}}}'. Allowed: {{', '.join(str(v) for v in _allowed_values)}}\")",
+ )
+
+ if "oneOf" in schema or "anyOf" in schema:
+ emit_union(schema, block, indent, value_expr, path_expr, errors_expr, schema_name_hint)
+ return
+
+ schema_type = schema.get("type")
+ effective_type = schema_type if isinstance(schema_type, str) else _schema_kind(schema)
+
+ if isinstance(effective_type, str) and effective_type not in ("value", "union"):
+ emit_line(block, indent, f"if not _is_type({value_expr}, {effective_type!r}):")
+ emit_line(block, indent + 1, f"_append_type_mismatch({errors_expr}, {path_expr}, {effective_type!r}, {value_expr})")
+ emit_line(block, indent + 1, "return")
+
+ if effective_type == "array":
+ items = schema.get("items")
+ if isinstance(items, dict):
+ item_hint = f"{schema_name_hint}_item" if schema_name_hint else "item"
+ item_fn = ensure_anonymous_function(items, hint=item_hint)
+ emit_line(block, indent, f"for _idx, _item in enumerate({value_expr}):")
+ emit_line(block, indent + 1, f"{item_fn}(_item, f\"{{{path_expr}}}[{{_idx}}]\", {errors_expr})")
+ return
+
+ if effective_type == "object":
+ properties = schema.get("properties", {})
+ required = schema.get("required", [])
+ if isinstance(properties, dict):
+ for field in required:
+ emit_line(block, indent, f"if {field!r} not in {value_expr}:")
+ emit_line(
+ block,
+ indent + 1,
+ f"_append_error({errors_expr}, f\"{{{path_expr}}}.{field}\", \"Required property '{field}' is missing\")",
+ )
+
+ for field, field_schema in sorted(properties.items()):
+ if not isinstance(field_schema, dict):
+ continue
+ field_hint = f"{schema_name_hint}_{field}" if schema_name_hint else field
+ field_fn = ensure_anonymous_function(field_schema, hint=field_hint)
+ emit_line(block, indent, f"if {field!r} in {value_expr}:")
+ emit_line(
+ block,
+ indent + 1,
+ f"{field_fn}({value_expr}[{field!r}], f\"{{{path_expr}}}.{field}\", {errors_expr})",
+ )
+
+ addl = schema.get("additionalProperties")
+ if isinstance(addl, dict):
+ addl_hint = f"{schema_name_hint}_additional_property" if schema_name_hint else "additional_property"
+ addl_fn = ensure_anonymous_function(addl, hint=addl_hint)
+ known = tuple(sorted(properties.keys())) if isinstance(properties, dict) else tuple()
+ emit_line(block, indent, f"for _key, _item in {value_expr}.items():")
+ emit_line(block, indent + 1, f"if _key not in {known!r}:")
+ emit_line(block, indent + 2, f"{addl_fn}(_item, f\"{{{path_expr}}}.{{_key}}\", {errors_expr})")
+
+ disc = schema.get("discriminator")
+ if isinstance(disc, dict):
+ prop = disc.get("propertyName", "type")
+ mapping = disc.get("mapping", {})
+ emit_line(block, indent, f"_disc_value = {value_expr}.get({prop!r})")
+ emit_line(block, indent, f"if not isinstance(_disc_value, str):")
+ emit_line(
+ block,
+ indent + 1,
+ f"_append_error({errors_expr}, f\"{{{path_expr}}}.{prop}\", \"Required discriminator '{prop}' is missing or invalid\")",
+ )
+ emit_line(block, indent + 1, "return")
+
+ for disc_value, ref in sorted(mapping.items()):
+ if not isinstance(ref, str):
+ continue
+ ref_name = _resolve_ref(ref)
+ ref_schema = ordered_schemas.get(ref_name)
+ if not isinstance(ref_schema, dict):
+ continue
+ ref_fn = ensure_schema_function(ref_name)
+ emit_line(block, indent, f"if _disc_value == {disc_value!r}:")
+ emit_line(block, indent + 1, f"{ref_fn}({value_expr}, {path_expr}, {errors_expr})")
+
+ rendered_blocks: dict[str, list[str]] = {}
+ idx = 0
+ while idx < len(function_order):
+ fn_name = function_order[idx]
+ idx += 1
+ schema = function_schemas[fn_name]
+ block: list[str] = [f"def {fn_name}(value: Any, path: str, errors: list[dict[str, str]]) -> None:"]
+ schema_name_hint = function_hints.get(fn_name)
+ emit_schema_body(schema, block, 1, "value", "path", "errors", schema_name_hint=schema_name_hint)
+ if len(block) == 1:
+ emit_line(block, 1, "return")
+ rendered_blocks[fn_name] = block
+
+ for fn_name in function_order:
+ lines.extend(rendered_blocks[fn_name])
+ lines.append("")
+
+ lines.append("ROOT_SCHEMAS = " + repr(target_roots))
+ lines.append("")
+
+ for root in target_roots:
+ class_name = f"{_sanitize_identifier(root)}Validator"
+ fn_name = f"_validate_{_sanitize_identifier(root)}"
+ lines.append(f"class {class_name}:")
+ lines.append(" \"\"\"Generated validator for the root schema.\"\"\"")
+ lines.append("")
+ lines.append(" @staticmethod")
+ lines.append(" def validate(payload: Any) -> list[dict[str, str]]:")
+ lines.append(" errors: list[dict[str, str]] = []")
+ lines.append(f" {fn_name}(payload, '$', errors)")
+ lines.append(" return errors")
+ lines.append("")
+
+ wrapper_name = f"validate_{_sanitize_identifier(root)}"
+ lines.append(f"def {wrapper_name}(payload: Any) -> list[dict[str, str]]:")
+ lines.append(f" return {class_name}.validate(payload)")
+ lines.append("")
+
+ if not target_roots:
+ lines.append("def validate_payload(payload: Any) -> list[dict[str, str]]:")
+ lines.append(" _ = payload")
+ lines.append(" return []")
+ lines.append("")
+
+ return "\n".join(lines).rstrip() + "\n"
diff --git a/sdk/agentserver/azure-ai-responses-server/tests/unit/test_generated_payload_validation.py b/sdk/agentserver/azure-ai-responses-server/tests/unit/test_generated_payload_validation.py
new file mode 100644
index 000000000000..7a9a85fe3bc2
--- /dev/null
+++ b/sdk/agentserver/azure-ai-responses-server/tests/unit/test_generated_payload_validation.py
@@ -0,0 +1,176 @@
+"""Unit tests for generated payload validator integration in parse flow."""
+
+from __future__ import annotations
+
+import types
+from pathlib import Path
+
+import pytest
+
+from azure.ai.responses.server import _validation
+from azure.ai.responses.server._validation import RequestValidationError, parse_create_response
+
+
+class _StubCreateResponse:
+ def __init__(self, payload: object) -> None:
+ data = payload if isinstance(payload, dict) else {}
+ self.model = data.get("model")
+
+
+class _StubGeneratedValidators:
+ @staticmethod
+ def validate_CreateResponse(_payload: object) -> list[dict[str, str]]:
+ return [{"path": "$.model", "message": "Required property 'model' is missing"}]
+
+
+class _PassGeneratedValidators:
+ @staticmethod
+ def validate_CreateResponse(_payload: object) -> list[dict[str, str]]:
+ return []
+
+
+def _load_generated_validators_module() -> types.ModuleType:
+ validators_path = (
+ Path(__file__).resolve().parents[2] / "azure" / "ai" / "responses" / "server" / "_generated" / "_validators.py"
+ )
+ module = types.ModuleType("generated_validators_runtime")
+ exec(validators_path.read_text(encoding="utf-8"), module.__dict__)
+ return module
+
+
+def test_parse_create_response_uses_generated_payload_validator(monkeypatch: pytest.MonkeyPatch) -> None:
+ monkeypatch.setattr(_validation, "CreateResponse", _StubCreateResponse)
+ monkeypatch.setattr(_validation, "_generated_validators", _StubGeneratedValidators)
+
+ with pytest.raises(RequestValidationError) as exc_info:
+ parse_create_response({})
+
+ error = exc_info.value
+ assert error.code == "invalid_request"
+ assert error.debug_info is not None
+ assert error.debug_info.get("errors") == [{"path": "$.model", "message": "Required property 'model' is missing"}]
+
+
+def test_parse_create_response_allows_valid_payload_when_generated_checks_pass(
+ monkeypatch: pytest.MonkeyPatch,
+) -> None:
+ monkeypatch.setattr(_validation, "CreateResponse", _StubCreateResponse)
+ monkeypatch.setattr(_validation, "_generated_validators", _PassGeneratedValidators)
+
+ parsed = parse_create_response({"model": "gpt-4o"})
+ assert parsed.model == "gpt-4o"
+
+
+def test_parse_create_response_without_generated_module_still_parses() -> None:
+ module = _validation._generated_validators
+ original_create_response = _validation.CreateResponse
+ try:
+ _validation.CreateResponse = _StubCreateResponse
+ _validation._generated_validators = None
+ parsed = parse_create_response({"model": "gpt-4o"})
+ assert parsed.model == "gpt-4o"
+ finally:
+ _validation.CreateResponse = original_create_response
+ _validation._generated_validators = module
+
+
+def test_generated_create_response_validator_accepts_string_input() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": "hello world",
+ }
+ )
+ assert errors == []
+
+
+def test_generated_create_response_validator_accepts_array_input_items() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [{"type": "message"}],
+ }
+ )
+ assert errors == []
+
+
+def test_generated_create_response_validator_rejects_non_string_non_array_input() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": 123,
+ }
+ )
+ assert any(e["path"] == "$.input" and "Expected one of: string, array" in e["message"] for e in errors)
+
+
+def test_generated_create_response_validator_rejects_non_object_input_item() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [123],
+ }
+ )
+ assert any(e["path"] == "$.input" and "Expected one of: string, array" in e["message"] for e in errors)
+
+
+def test_generated_create_response_validator_rejects_input_item_missing_type() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [{}],
+ }
+ )
+ assert any(e["path"] == "$.input" and "Expected one of: string, array" in e["message"] for e in errors)
+
+
+def test_generated_create_response_validator_rejects_input_item_type_with_wrong_primitive() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [{"type": 1}],
+ }
+ )
+ assert any(e["path"] == "$.input" and "Expected one of: string, array" in e["message"] for e in errors)
+
+
+@pytest.mark.parametrize(
+ "item_type",
+ [
+ "message",
+ "item_reference",
+ "function_call_output",
+ "computer_call_output",
+ "apply_patch_call_output",
+ ],
+)
+def test_generated_create_response_validator_accepts_multiple_input_item_types(item_type: str) -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [{"type": item_type}],
+ }
+ )
+ assert errors == []
+
+
+def test_generated_create_response_validator_accepts_mixed_input_item_types() -> None:
+ validators = _load_generated_validators_module()
+ errors = validators.validate_CreateResponse(
+ {
+ "model": "gpt-4o",
+ "input": [
+ {"type": "message"},
+ {"type": "item_reference"},
+ {"type": "function_call_output"},
+ ],
+ }
+ )
+ assert errors == []