Skip to content

Commit

Permalink
feat: Add hubs support to /ai/ask (box/box-openapi#506) (#466)
Browse files Browse the repository at this point in the history
  • Loading branch information
box-sdk-build authored Feb 5, 2025
1 parent 9779bd9 commit 29f0364
Show file tree
Hide file tree
Showing 7 changed files with 74 additions and 27 deletions.
2 changes: 1 addition & 1 deletion .codegen.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{ "engineHash": "7874ac3", "specHash": "1fdcbef", "version": "1.10.0" }
{ "engineHash": "7874ac3", "specHash": "764e12c", "version": "1.10.0" }
16 changes: 10 additions & 6 deletions box_sdk_gen/managers/ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,20 @@

from box_sdk_gen.serialization.json import serialize

from box_sdk_gen.serialization.json import deserialize

from box_sdk_gen.internal.utils import to_string

from box_sdk_gen.serialization.json import deserialize

from typing import Union

from box_sdk_gen.schemas.ai_item_base import AiItemBase
from box_sdk_gen.schemas.ai_item_ask import AiItemAsk

from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory

from box_sdk_gen.networking.fetch_options import ResponseFormat

from box_sdk_gen.schemas.ai_item_base import AiItemBase

from box_sdk_gen.schemas.ai_response_full import AiResponseFull

from box_sdk_gen.schemas.client_error import ClientError
Expand Down Expand Up @@ -214,13 +216,13 @@ def create_ai_ask(
self,
mode: CreateAiAskMode,
prompt: str,
items: List[AiItemBase],
items: List[AiItemAsk],
*,
dialogue_history: Optional[List[AiDialogueHistory]] = None,
include_citations: Optional[bool] = None,
ai_agent: Optional[AiAgentAsk] = None,
extra_headers: Optional[Dict[str, Optional[str]]] = None
) -> AiResponseFull:
) -> Optional[AiResponseFull]:
"""
Sends an AI request to supported LLMs and returns an answer specifically focused on the user's question given the provided context.
:param mode: The mode specifies if this request is for a single or multiple items. If you select `single_item_qa` the `items` array can have one element only. Selecting `multiple_item_qa` allows you to provide up to 25 items.
Expand All @@ -232,7 +234,7 @@ def create_ai_ask(
**Note**: Box AI handles documents with text representations up to 1MB in size, or a maximum of 25 files, whichever comes first.
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
:type items: List[AiItemBase]
:type items: List[AiItemAsk]
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
:param include_citations: A flag to indicate whether citations should be returned., defaults to None
Expand Down Expand Up @@ -263,6 +265,8 @@ def create_ai_ask(
network_session=self.network_session,
)
)
if to_string(response.status) == '204':
return None
return deserialize(response.data, AiResponseFull)

def create_ai_text_gen(
Expand Down
2 changes: 2 additions & 0 deletions box_sdk_gen/schemas/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@

from box_sdk_gen.schemas.ai_item_base import *

from box_sdk_gen.schemas.ai_item_ask import *

from box_sdk_gen.schemas.ai_llm_endpoint_params_aws import *

from box_sdk_gen.schemas.ai_llm_endpoint_params_google import *
Expand Down
6 changes: 3 additions & 3 deletions box_sdk_gen/schemas/ai_ask.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from box_sdk_gen.internal.base_object import BaseObject

from box_sdk_gen.schemas.ai_item_base import AiItemBase
from box_sdk_gen.schemas.ai_item_ask import AiItemAsk

from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory

Expand All @@ -25,7 +25,7 @@ def __init__(
self,
mode: AiAskModeField,
prompt: str,
items: List[AiItemBase],
items: List[AiItemAsk],
*,
dialogue_history: Optional[List[AiDialogueHistory]] = None,
include_citations: Optional[bool] = None,
Expand All @@ -42,7 +42,7 @@ def __init__(
**Note**: Box AI handles documents with text representations up to 1MB in size, or a maximum of 25 files, whichever comes first.
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
:type items: List[AiItemBase]
:type items: List[AiItemAsk]
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
:param include_citations: A flag to indicate whether citations should be returned., defaults to None
Expand Down
37 changes: 37 additions & 0 deletions box_sdk_gen/schemas/ai_item_ask.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
from enum import Enum

from typing import Optional

from box_sdk_gen.internal.base_object import BaseObject

from box_sdk_gen.box.errors import BoxSDKError


class AiItemAskTypeField(str, Enum):
FILE = 'file'
HUBS = 'hubs'


class AiItemAsk(BaseObject):
_discriminator = 'type', {'file', 'hubs'}

def __init__(
self,
id: str,
type: AiItemAskTypeField,
*,
content: Optional[str] = None,
**kwargs
):
"""
:param id: The ID of the file.
:type id: str
:param type: The type of the item. A `hubs` item must be used as a single item.
:type type: AiItemAskTypeField
:param content: The content of the item, often the text representation., defaults to None
:type content: Optional[str], optional
"""
super().__init__(**kwargs)
self.id = id
self.type = type
self.content = content
14 changes: 7 additions & 7 deletions docs/ai.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,14 @@ client.ai.create_ai_ask(
CreateAiAskMode.MULTIPLE_ITEM_QA,
"Which direction sun rises?",
[
AiItemBase(
AiItemAsk(
id=file_to_ask_1.id,
type=AiItemBaseTypeField.FILE,
type=AiItemAskTypeField.FILE,
content="Earth goes around the sun",
),
AiItemBase(
AiItemAsk(
id=file_to_ask_2.id,
type=AiItemBaseTypeField.FILE,
type=AiItemAskTypeField.FILE,
content="Sun rises in the East in the morning",
),
],
Expand All @@ -42,7 +42,7 @@ client.ai.create_ai_ask(
- The mode specifies if this request is for a single or multiple items. If you select `single_item_qa` the `items` array can have one element only. Selecting `multiple_item_qa` allows you to provide up to 25 items.
- prompt `str`
- The prompt provided by the client to be answered by the LLM. The prompt's length is limited to 10000 characters.
- items `List[AiItemBase]`
- items `List[AiItemAsk]`
- The items to be processed by the LLM, often files. **Note**: Box AI handles documents with text representations up to 1MB in size, or a maximum of 25 files, whichever comes first. If the file size exceeds 1MB, the first 1MB of text representation will be processed. If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
- dialogue_history `Optional[List[AiDialogueHistory]]`
- The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response.
Expand All @@ -55,9 +55,9 @@ client.ai.create_ai_ask(

### Returns

This function returns a value of type `AiResponseFull`.
This function returns a value of type `Optional[AiResponseFull]`.

A successful response including the answer from the LLM.
A successful response including the answer from the LLM.No content is available to answer the question. This is returned when the request item is a hub, but content in the hubs is not indexed. To ensure content in the hub is indexed, make sure Box AI for Hubs in the Admin Console was enabled before hub creation.

## Generate text

Expand Down
24 changes: 14 additions & 10 deletions test/ai.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from typing import Optional

from typing import Union

from box_sdk_gen.internal.utils import to_string
Expand All @@ -12,9 +14,9 @@

from box_sdk_gen.managers.ai import CreateAiAskMode

from box_sdk_gen.schemas.ai_item_base import AiItemBase
from box_sdk_gen.schemas.ai_item_ask import AiItemAsk

from box_sdk_gen.schemas.ai_item_base import AiItemBaseTypeField
from box_sdk_gen.schemas.ai_item_ask import AiItemAskTypeField

from box_sdk_gen.schemas.ai_response import AiResponse

Expand All @@ -30,6 +32,8 @@

from box_sdk_gen.managers.uploads import UploadFileAttributesParentField

from box_sdk_gen.schemas.ai_item_base import AiItemBase

from box_sdk_gen.schemas.ai_extract_structured_response import (
AiExtractStructuredResponse,
)
Expand Down Expand Up @@ -88,13 +92,13 @@ def testAskAISingleItem():
GetAiAgentDefaultConfigMode.ASK, language='en-US'
)
file_to_ask: FileFull = upload_new_file()
response: AiResponseFull = client.ai.create_ai_ask(
response: Optional[AiResponseFull] = client.ai.create_ai_ask(
CreateAiAskMode.SINGLE_ITEM_QA,
'which direction sun rises',
[
AiItemBase(
AiItemAsk(
id=file_to_ask.id,
type=AiItemBaseTypeField.FILE,
type=AiItemAskTypeField.FILE,
content='Sun rises in the East',
)
],
Expand All @@ -108,18 +112,18 @@ def testAskAISingleItem():
def testAskAIMultipleItems():
file_to_ask_1: FileFull = upload_new_file()
file_to_ask_2: FileFull = upload_new_file()
response: AiResponseFull = client.ai.create_ai_ask(
response: Optional[AiResponseFull] = client.ai.create_ai_ask(
CreateAiAskMode.MULTIPLE_ITEM_QA,
'Which direction sun rises?',
[
AiItemBase(
AiItemAsk(
id=file_to_ask_1.id,
type=AiItemBaseTypeField.FILE,
type=AiItemAskTypeField.FILE,
content='Earth goes around the sun',
),
AiItemBase(
AiItemAsk(
id=file_to_ask_2.id,
type=AiItemBaseTypeField.FILE,
type=AiItemAskTypeField.FILE,
content='Sun rises in the East in the morning',
),
],
Expand Down

0 comments on commit 29f0364

Please sign in to comment.