From 8f41ab742ab8255d7ecc39cd9a993b997e688c17 Mon Sep 17 00:00:00 2001 From: box-sdk-build Date: Tue, 4 Feb 2025 02:11:11 -0800 Subject: [PATCH 1/4] feat: add hubs support to /ai/ask (box/box-openapi#506) --- .codegen.json | 2 +- box_sdk_gen/managers/ai.py | 16 ++++++++----- box_sdk_gen/schemas/__init__.py | 2 ++ box_sdk_gen/schemas/ai_ask.py | 6 ++--- box_sdk_gen/schemas/ai_item_ask.py | 37 ++++++++++++++++++++++++++++++ docs/ai.md | 14 +++++------ test/ai.py | 24 +++++++++++-------- 7 files changed, 74 insertions(+), 27 deletions(-) create mode 100644 box_sdk_gen/schemas/ai_item_ask.py diff --git a/.codegen.json b/.codegen.json index 2e16cbe2..878d3596 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1 +1 @@ -{ "engineHash": "7874ac3", "specHash": "1fdcbef", "version": "1.10.0" } +{ "engineHash": "7874ac3", "specHash": "764e12c", "version": "1.10.0" } diff --git a/box_sdk_gen/managers/ai.py b/box_sdk_gen/managers/ai.py index 33469a55..37929539 100644 --- a/box_sdk_gen/managers/ai.py +++ b/box_sdk_gen/managers/ai.py @@ -10,18 +10,20 @@ from box_sdk_gen.serialization.json import serialize -from box_sdk_gen.serialization.json import deserialize - from box_sdk_gen.internal.utils import to_string +from box_sdk_gen.serialization.json import deserialize + from typing import Union -from box_sdk_gen.schemas.ai_item_base import AiItemBase +from box_sdk_gen.schemas.ai_item_ask import AiItemAsk from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory from box_sdk_gen.networking.fetch_options import ResponseFormat +from box_sdk_gen.schemas.ai_item_base import AiItemBase + from box_sdk_gen.schemas.ai_response_full import AiResponseFull from box_sdk_gen.schemas.client_error import ClientError @@ -214,13 +216,13 @@ def create_ai_ask( self, mode: CreateAiAskMode, prompt: str, - items: List[AiItemBase], + items: List[AiItemAsk], *, dialogue_history: Optional[List[AiDialogueHistory]] = None, include_citations: Optional[bool] = None, ai_agent: Optional[AiAgentAsk] = None, extra_headers: Optional[Dict[str, Optional[str]]] = None - ) -> AiResponseFull: + ) -> Optional[AiResponseFull]: """ Sends an AI request to supported LLMs and returns an answer specifically focused on the user's question given the provided context. :param mode: The mode specifies if this request is for a single or multiple items. If you select `single_item_qa` the `items` array can have one element only. Selecting `multiple_item_qa` allows you to provide up to 25 items. @@ -232,7 +234,7 @@ def create_ai_ask( **Note**: Box AI handles documents with text representations up to 1MB in size, or a maximum of 25 files, whichever comes first. If the file size exceeds 1MB, the first 1MB of text representation will be processed. If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only. - :type items: List[AiItemBase] + :type items: List[AiItemAsk] :param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None :type dialogue_history: Optional[List[AiDialogueHistory]], optional :param include_citations: A flag to indicate whether citations should be returned., defaults to None @@ -263,6 +265,8 @@ def create_ai_ask( network_session=self.network_session, ) ) + if to_string(response.status) == '204': + return None return deserialize(response.data, AiResponseFull) def create_ai_text_gen( diff --git a/box_sdk_gen/schemas/__init__.py b/box_sdk_gen/schemas/__init__.py index 90012de9..670a4490 100644 --- a/box_sdk_gen/schemas/__init__.py +++ b/box_sdk_gen/schemas/__init__.py @@ -14,6 +14,8 @@ from box_sdk_gen.schemas.ai_item_base import * +from box_sdk_gen.schemas.ai_item_ask import * + from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import * from box_sdk_gen.schemas.ai_llm_endpoint_params_google import * diff --git a/box_sdk_gen/schemas/ai_ask.py b/box_sdk_gen/schemas/ai_ask.py index 31a69211..e890876f 100644 --- a/box_sdk_gen/schemas/ai_ask.py +++ b/box_sdk_gen/schemas/ai_ask.py @@ -6,7 +6,7 @@ from box_sdk_gen.internal.base_object import BaseObject -from box_sdk_gen.schemas.ai_item_base import AiItemBase +from box_sdk_gen.schemas.ai_item_ask import AiItemAsk from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory @@ -25,7 +25,7 @@ def __init__( self, mode: AiAskModeField, prompt: str, - items: List[AiItemBase], + items: List[AiItemAsk], *, dialogue_history: Optional[List[AiDialogueHistory]] = None, include_citations: Optional[bool] = None, @@ -42,7 +42,7 @@ def __init__( **Note**: Box AI handles documents with text representations up to 1MB in size, or a maximum of 25 files, whichever comes first. If the file size exceeds 1MB, the first 1MB of text representation will be processed. If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only. - :type items: List[AiItemBase] + :type items: List[AiItemAsk] :param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None :type dialogue_history: Optional[List[AiDialogueHistory]], optional :param include_citations: A flag to indicate whether citations should be returned., defaults to None diff --git a/box_sdk_gen/schemas/ai_item_ask.py b/box_sdk_gen/schemas/ai_item_ask.py new file mode 100644 index 00000000..d2702b88 --- /dev/null +++ b/box_sdk_gen/schemas/ai_item_ask.py @@ -0,0 +1,37 @@ +from enum import Enum + +from typing import Optional + +from box_sdk_gen.internal.base_object import BaseObject + +from box_sdk_gen.box.errors import BoxSDKError + + +class AiItemAskTypeField(str, Enum): + FILE = 'file' + HUBS = 'hubs' + + +class AiItemAsk(BaseObject): + _discriminator = 'type', {'file', 'hubs'} + + def __init__( + self, + id: str, + type: AiItemAskTypeField, + *, + content: Optional[str] = None, + **kwargs + ): + """ + :param id: The ID of the file. + :type id: str + :param type: The type of the item. A `hubs` item must be used as a single item. + :type type: AiItemAskTypeField + :param content: The content of the item, often the text representation., defaults to None + :type content: Optional[str], optional + """ + super().__init__(**kwargs) + self.id = id + self.type = type + self.content = content diff --git a/docs/ai.md b/docs/ai.md index a7daaafc..92b2f456 100644 --- a/docs/ai.md +++ b/docs/ai.md @@ -22,14 +22,14 @@ client.ai.create_ai_ask( CreateAiAskMode.MULTIPLE_ITEM_QA, "Which direction sun rises?", [ - AiItemBase( + AiItemAsk( id=file_to_ask_1.id, - type=AiItemBaseTypeField.FILE, + type=AiItemAskTypeField.FILE, content="Earth goes around the sun", ), - AiItemBase( + AiItemAsk( id=file_to_ask_2.id, - type=AiItemBaseTypeField.FILE, + type=AiItemAskTypeField.FILE, content="Sun rises in the East in the morning", ), ], @@ -42,7 +42,7 @@ client.ai.create_ai_ask( - The mode specifies if this request is for a single or multiple items. If you select `single_item_qa` the `items` array can have one element only. Selecting `multiple_item_qa` allows you to provide up to 25 items. - prompt `str` - The prompt provided by the client to be answered by the LLM. The prompt's length is limited to 10000 characters. -- items `List[AiItemBase]` +- items `List[AiItemAsk]` - The items to be processed by the LLM, often files. **Note**: Box AI handles documents with text representations up to 1MB in size, or a maximum of 25 files, whichever comes first. If the file size exceeds 1MB, the first 1MB of text representation will be processed. If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only. - dialogue_history `Optional[List[AiDialogueHistory]]` - The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response. @@ -55,9 +55,9 @@ client.ai.create_ai_ask( ### Returns -This function returns a value of type `AiResponseFull`. +This function returns a value of type `Optional[AiResponseFull]`. -A successful response including the answer from the LLM. +A successful response including the answer from the LLM.No content is available to answer the question. This is returned when the request item is a hub, but content in the hubs is not indexed. To ensure content in the hub is indexed, make sure Box AI for Hubs in the Admin Console was enabled before hub creation. ## Generate text diff --git a/test/ai.py b/test/ai.py index a835fddf..e7b77359 100644 --- a/test/ai.py +++ b/test/ai.py @@ -1,3 +1,5 @@ +from typing import Optional + from typing import Union from box_sdk_gen.internal.utils import to_string @@ -12,9 +14,9 @@ from box_sdk_gen.managers.ai import CreateAiAskMode -from box_sdk_gen.schemas.ai_item_base import AiItemBase +from box_sdk_gen.schemas.ai_item_ask import AiItemAsk -from box_sdk_gen.schemas.ai_item_base import AiItemBaseTypeField +from box_sdk_gen.schemas.ai_item_ask import AiItemAskTypeField from box_sdk_gen.schemas.ai_response import AiResponse @@ -30,6 +32,8 @@ from box_sdk_gen.managers.uploads import UploadFileAttributesParentField +from box_sdk_gen.schemas.ai_item_base import AiItemBase + from box_sdk_gen.schemas.ai_extract_structured_response import ( AiExtractStructuredResponse, ) @@ -88,13 +92,13 @@ def testAskAISingleItem(): GetAiAgentDefaultConfigMode.ASK, language='en-US' ) file_to_ask: FileFull = upload_new_file() - response: AiResponseFull = client.ai.create_ai_ask( + response: Optional[AiResponseFull] = client.ai.create_ai_ask( CreateAiAskMode.SINGLE_ITEM_QA, 'which direction sun rises', [ - AiItemBase( + AiItemAsk( id=file_to_ask.id, - type=AiItemBaseTypeField.FILE, + type=AiItemAskTypeField.FILE, content='Sun rises in the East', ) ], @@ -108,18 +112,18 @@ def testAskAISingleItem(): def testAskAIMultipleItems(): file_to_ask_1: FileFull = upload_new_file() file_to_ask_2: FileFull = upload_new_file() - response: AiResponseFull = client.ai.create_ai_ask( + response: Optional[AiResponseFull] = client.ai.create_ai_ask( CreateAiAskMode.MULTIPLE_ITEM_QA, 'Which direction sun rises?', [ - AiItemBase( + AiItemAsk( id=file_to_ask_1.id, - type=AiItemBaseTypeField.FILE, + type=AiItemAskTypeField.FILE, content='Earth goes around the sun', ), - AiItemBase( + AiItemAsk( id=file_to_ask_2.id, - type=AiItemBaseTypeField.FILE, + type=AiItemAskTypeField.FILE, content='Sun rises in the East in the morning', ), ], From 729795cb3e9dd926238fb4f4bd6681b954be0ce8 Mon Sep 17 00:00:00 2001 From: box-sdk-build Date: Tue, 4 Feb 2025 03:42:55 -0800 Subject: [PATCH 2/4] chore: Update .codegen.json with commit hash of codegen and openapi spec --- .codegen.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.codegen.json b/.codegen.json index 878d3596..e33edb17 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1 +1 @@ -{ "engineHash": "7874ac3", "specHash": "764e12c", "version": "1.10.0" } +{ "engineHash": "7874ac3", "specHash": "59747aa", "version": "1.10.0" } From 138d49e8df393519f55db3c7d0bae4e4cb606fdc Mon Sep 17 00:00:00 2001 From: box-sdk-build Date: Tue, 4 Feb 2025 05:41:34 -0800 Subject: [PATCH 3/4] chore: Update .codegen.json with commit hash of codegen and openapi spec --- .codegen.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.codegen.json b/.codegen.json index e33edb17..a7a66301 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1 +1 @@ -{ "engineHash": "7874ac3", "specHash": "59747aa", "version": "1.10.0" } +{ "engineHash": "a88aabb", "specHash": "59747aa", "version": "1.10.0" } From 813f97e572a717b8b84a49adfd2f3576925207aa Mon Sep 17 00:00:00 2001 From: box-sdk-build Date: Tue, 4 Feb 2025 08:09:43 -0800 Subject: [PATCH 4/4] chore: Update .codegen.json with commit hash of codegen and openapi spec --- .codegen.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.codegen.json b/.codegen.json index a7a66301..491800f3 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1 +1 @@ -{ "engineHash": "a88aabb", "specHash": "59747aa", "version": "1.10.0" } +{ "engineHash": "76725f4", "specHash": "59747aa", "version": "1.10.0" }