diff --git a/.semversioner/next-release/patch-20260206194808781905.json b/.semversioner/next-release/patch-20260206194808781905.json new file mode 100644 index 000000000..6f6638f43 --- /dev/null +++ b/.semversioner/next-release/patch-20260206194808781905.json @@ -0,0 +1,4 @@ +{ + "type": "patch", + "description": "Remove unnecessary response format check. Fixes: #2203" +} diff --git a/packages/graphrag-llm/graphrag_llm/completion/completion.py b/packages/graphrag-llm/graphrag_llm/completion/completion.py index 0debab273..be8ee4ae1 100644 --- a/packages/graphrag-llm/graphrag_llm/completion/completion.py +++ b/packages/graphrag-llm/graphrag_llm/completion/completion.py @@ -77,17 +77,6 @@ def __init__( """ raise NotImplementedError - @abstractmethod - def supports_structured_response(self) -> bool: - """Whether the completion supports structured responses. - - Returns - ------- - bool: - True if structured responses are supported, False otherwise. - """ - raise NotImplementedError - @abstractmethod def completion( self, diff --git a/packages/graphrag-llm/graphrag_llm/completion/lite_llm_completion.py b/packages/graphrag-llm/graphrag_llm/completion/lite_llm_completion.py index 794296604..dd62d459d 100644 --- a/packages/graphrag-llm/graphrag_llm/completion/lite_llm_completion.py +++ b/packages/graphrag-llm/graphrag_llm/completion/lite_llm_completion.py @@ -8,7 +8,7 @@ import litellm from azure.identity import DefaultAzureCredential, get_bearer_token_provider -from litellm import ModelResponse, supports_response_schema # type: ignore +from litellm import ModelResponse # type: ignore from graphrag_llm.completion.completion import LLMCompletion from graphrag_llm.config.types import AuthMethod @@ -128,10 +128,6 @@ def __init__( retrier=self._retrier, ) - def supports_structured_response(self) -> bool: - """Check if the model supports structured response.""" - return supports_response_schema(self._model_id) - def completion( self, /, @@ -140,9 +136,6 @@ def completion( """Sync completion method.""" messages: LLMCompletionMessagesParam = kwargs.pop("messages") response_format = kwargs.pop("response_format", None) - if response_format and not self.supports_structured_response(): - msg = f"Model '{self._model_id}' does not support response schemas." - raise ValueError(msg) is_streaming = kwargs.get("stream") or False @@ -182,11 +175,6 @@ async def completion_async( """Async completion method.""" messages: LLMCompletionMessagesParam = kwargs.pop("messages") response_format = kwargs.pop("response_format", None) - if response_format and not supports_response_schema( - self._model_id, - ): - msg = f"Model '{self._model_id}' does not support response schemas." - raise ValueError(msg) is_streaming = kwargs.get("stream") or False diff --git a/packages/graphrag-llm/graphrag_llm/completion/mock_llm_completion.py b/packages/graphrag-llm/graphrag_llm/completion/mock_llm_completion.py index c1e29fcfc..7cdcc319e 100644 --- a/packages/graphrag-llm/graphrag_llm/completion/mock_llm_completion.py +++ b/packages/graphrag-llm/graphrag_llm/completion/mock_llm_completion.py @@ -87,10 +87,6 @@ def __init__( self._mock_responses = mock_responses # type: ignore - def supports_structured_response(self) -> bool: - """Check if the model supports structured response.""" - return True - def completion( self, /, diff --git a/packages/graphrag-llm/notebooks/03_structured_responses.ipynb b/packages/graphrag-llm/notebooks/03_structured_responses.ipynb index f01499d2d..80f6af037 100644 --- a/packages/graphrag-llm/notebooks/03_structured_responses.ipynb +++ b/packages/graphrag-llm/notebooks/03_structured_responses.ipynb @@ -12,7 +12,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "id": "a79c242b", "metadata": {}, "outputs": [ @@ -22,10 +22,10 @@ "text": [ "City: Seattle\n", " Temperature: 11.1 °C\n", - " Condition: sunny\n", + " Condition: Sunny\n", "City: San Francisco\n", " Temperature: 23.9 °C\n", - " Condition: cloudy\n" + " Condition: Cloudy\n" ] } ], @@ -84,32 +84,6 @@ " print(f\" Condition: {report.condition}\")" ] }, - { - "cell_type": "markdown", - "id": "6dcfa20c", - "metadata": {}, - "source": [ - "## Checking for support\n" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "aa1edadb", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Supports structured responses: True\n" - ] - } - ], - "source": [ - "print(f\"Supports structured responses: {llm_completion.supports_structured_response()}\")" - ] - }, { "cell_type": "markdown", "id": "6360f512", diff --git a/packages/graphrag-llm/pyproject.toml b/packages/graphrag-llm/pyproject.toml index 883b03f6b..62e4915c0 100644 --- a/packages/graphrag-llm/pyproject.toml +++ b/packages/graphrag-llm/pyproject.toml @@ -32,7 +32,7 @@ classifiers = [ "Programming Language :: Python :: 3.13", ] dependencies = [ - "azure-identity~=1.19.0", + "azure-identity~=1.25", "graphrag-cache==3.0.1", "graphrag-common==3.0.1", "jinja2~=3.1", diff --git a/packages/graphrag-storage/pyproject.toml b/packages/graphrag-storage/pyproject.toml index e3b5388d3..2b64532f1 100644 --- a/packages/graphrag-storage/pyproject.toml +++ b/packages/graphrag-storage/pyproject.toml @@ -32,7 +32,7 @@ classifiers = [ dependencies = [ "aiofiles~=24.1", "azure-cosmos~=4.9", - "azure-identity~=1.19", + "azure-identity~=1.25", "azure-storage-blob~=12.24", "graphrag-common==3.0.1", "pandas~=2.3", diff --git a/packages/graphrag-vectors/pyproject.toml b/packages/graphrag-vectors/pyproject.toml index 1ff3255dd..05739af78 100644 --- a/packages/graphrag-vectors/pyproject.toml +++ b/packages/graphrag-vectors/pyproject.toml @@ -32,7 +32,7 @@ classifiers = [ dependencies = [ "azure-core~=1.32", "azure-cosmos~=4.9", - "azure-identity~=1.19", + "azure-identity~=1.25", "azure-search-documents~=11.6", "graphrag-common==3.0.1", "lancedb~=0.24.1", diff --git a/packages/graphrag/pyproject.toml b/packages/graphrag/pyproject.toml index cbb768fd7..a825b5b97 100644 --- a/packages/graphrag/pyproject.toml +++ b/packages/graphrag/pyproject.toml @@ -32,7 +32,7 @@ classifiers = [ ] dependencies = [ - "azure-identity~=1.19", + "azure-identity~=1.25", "azure-search-documents~=11.5", "azure-storage-blob~=12.24", "devtools~=0.12", diff --git a/tests/integration/language_model/test_factory.py b/tests/integration/language_model/test_factory.py index 526cb3e8d..428586bf0 100644 --- a/tests/integration/language_model/test_factory.py +++ b/tests/integration/language_model/test_factory.py @@ -38,9 +38,6 @@ class CustomChatModel(LLMCompletion): def __init__(self, **kwargs): pass - def supports_structured_response(self) -> bool: - return True - def completion( self, /, diff --git a/uv.lock b/uv.lock index 1801e0eca..7bc020e7c 100644 --- a/uv.lock +++ b/uv.lock @@ -266,7 +266,7 @@ wheels = [ [[package]] name = "azure-identity" -version = "1.19.0" +version = "1.25.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core" }, @@ -275,9 +275,9 @@ dependencies = [ { name = "msal-extensions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/aa/91/cbaeff9eb0b838f0d35b4607ac1c6195c735c8eb17db235f8f60e622934c/azure_identity-1.19.0.tar.gz", hash = "sha256:500144dc18197d7019b81501165d4fa92225f03778f17d7ca8a2a180129a9c83", size = 263058, upload-time = "2024-10-08T15:41:33.554Z" } +sdist = { url = "https://files.pythonhosted.org/packages/06/8d/1a6c41c28a37eab26dc85ab6c86992c700cd3f4a597d9ed174b0e9c69489/azure_identity-1.25.1.tar.gz", hash = "sha256:87ca8328883de6036443e1c37b40e8dc8fb74898240f61071e09d2e369361456", size = 279826, upload-time = "2025-10-06T20:30:02.194Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/d5/3995ed12f941f4a41a273d9b1709282e825ef87ed8eab3833038fee54d59/azure_identity-1.19.0-py3-none-any.whl", hash = "sha256:e3f6558c181692d7509f09de10cca527c7dce426776454fb97df512a46527e81", size = 187587, upload-time = "2024-10-08T15:41:36.423Z" }, + { url = "https://files.pythonhosted.org/packages/83/7b/5652771e24fff12da9dde4c20ecf4682e606b104f26419d139758cc935a6/azure_identity-1.25.1-py3-none-any.whl", hash = "sha256:e9edd720af03dff020223cd269fa3a61e8f345ea75443858273bcb44844ab651", size = 191317, upload-time = "2025-10-06T20:30:04.251Z" }, ] [[package]] @@ -1054,7 +1054,7 @@ dependencies = [ [package.metadata] requires-dist = [ - { name = "azure-identity", specifier = "~=1.19" }, + { name = "azure-identity", specifier = "~=1.25" }, { name = "azure-search-documents", specifier = "~=11.5" }, { name = "azure-storage-blob", specifier = "~=12.24" }, { name = "blis", specifier = "~=1.0" }, @@ -1162,7 +1162,7 @@ dependencies = [ [package.metadata] requires-dist = [ - { name = "azure-identity", specifier = "~=1.19.0" }, + { name = "azure-identity", specifier = "~=1.25" }, { name = "graphrag-cache", editable = "packages/graphrag-cache" }, { name = "graphrag-common", editable = "packages/graphrag-common" }, { name = "jinja2", specifier = "~=3.1" }, @@ -1245,7 +1245,7 @@ dependencies = [ requires-dist = [ { name = "aiofiles", specifier = "~=24.1" }, { name = "azure-cosmos", specifier = "~=4.9" }, - { name = "azure-identity", specifier = "~=1.19" }, + { name = "azure-identity", specifier = "~=1.25" }, { name = "azure-storage-blob", specifier = "~=12.24" }, { name = "graphrag-common", editable = "packages/graphrag-common" }, { name = "pandas", specifier = "~=2.3" }, @@ -1272,7 +1272,7 @@ dependencies = [ requires-dist = [ { name = "azure-core", specifier = "~=1.32" }, { name = "azure-cosmos", specifier = "~=4.9" }, - { name = "azure-identity", specifier = "~=1.19" }, + { name = "azure-identity", specifier = "~=1.25" }, { name = "azure-search-documents", specifier = "~=11.6" }, { name = "graphrag-common", editable = "packages/graphrag-common" }, { name = "lancedb", specifier = "~=0.24.1" },