Skip to content

Commit

Permalink
feat: parametrise chunked uploads endpoint urls (box/box-openapi#444)
Browse files Browse the repository at this point in the history
  • Loading branch information
box-sdk-build committed Aug 12, 2024
1 parent 88ce019 commit bc25040
Show file tree
Hide file tree
Showing 13 changed files with 75 additions and 165 deletions.
2 changes: 1 addition & 1 deletion .codegen.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{ "engineHash": "ab2fc63", "specHash": "871a814", "version": "1.2.0" }
{ "engineHash": "d1cb68d", "specHash": "9919482", "version": "1.2.0" }
45 changes: 29 additions & 16 deletions box_sdk_gen/managers/ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,14 @@

from typing import Union

from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory
from box_sdk_gen.internal.utils import DateTime

from box_sdk_gen.schemas.ai_ask_response import AiAskResponse
from box_sdk_gen.schemas.ai_response import AiResponse

from box_sdk_gen.schemas.client_error import ClientError

from box_sdk_gen.schemas.ai_ask import AiAsk

from box_sdk_gen.schemas.ai_response import AiResponse

from box_sdk_gen.schemas.ai_text_gen import AiTextGen

from box_sdk_gen.schemas.ai_agent_ask import AiAgentAsk
Expand Down Expand Up @@ -116,6 +114,29 @@ def __init__(
self.content = content


class CreateAiTextGenDialogueHistory(BaseObject):
def __init__(
self,
*,
prompt: Optional[str] = None,
answer: Optional[str] = None,
created_at: Optional[DateTime] = None,
**kwargs
):
"""
:param prompt: The prompt previously provided by the client and answered by the LLM., defaults to None
:type prompt: Optional[str], optional
:param answer: The answer previously provided by the LLM., defaults to None
:type answer: Optional[str], optional
:param created_at: The ISO date formatted timestamp of when the previous answer to the prompt was created., defaults to None
:type created_at: Optional[DateTime], optional
"""
super().__init__(**kwargs)
self.prompt = prompt
self.answer = answer
self.created_at = created_at


class GetAiAgentDefaultConfigMode(str, Enum):
ASK = 'ask'
TEXT_GEN = 'text_gen'
Expand All @@ -139,11 +160,9 @@ def create_ai_ask(
prompt: str,
items: List[CreateAiAskItems],
*,
dialogue_history: Optional[List[AiDialogueHistory]] = None,
include_citations: Optional[bool] = None,
ai_agent: Optional[AiAgentAsk] = None,
extra_headers: Optional[Dict[str, Optional[str]]] = None
) -> AiAskResponse:
) -> AiResponse:
"""
Sends an AI request to supported LLMs and returns an answer specifically focused on the user's question given the provided context.
:param mode: The mode specifies if this request is for a single or multiple items. If you select `single_item_qa` the `items` array can have one element only. Selecting `multiple_item_qa` allows you to provide up to 25 items.
Expand All @@ -156,10 +175,6 @@ def create_ai_ask(
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
:type items: List[CreateAiAskItems]
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
:param include_citations: A flag to indicate whether citations should be returned., defaults to None
:type include_citations: Optional[bool], optional
:param extra_headers: Extra headers that will be included in the HTTP request., defaults to None
:type extra_headers: Optional[Dict[str, Optional[str]]], optional
"""
Expand All @@ -169,8 +184,6 @@ def create_ai_ask(
'mode': mode,
'prompt': prompt,
'items': items,
'dialogue_history': dialogue_history,
'include_citations': include_citations,
'ai_agent': ai_agent,
}
headers_map: Dict[str, str] = prepare_params({**extra_headers})
Expand All @@ -186,14 +199,14 @@ def create_ai_ask(
network_session=self.network_session,
),
)
return deserialize(response.data, AiAskResponse)
return deserialize(response.data, AiResponse)

def create_ai_text_gen(
self,
prompt: str,
items: List[CreateAiTextGenItems],
*,
dialogue_history: Optional[List[AiDialogueHistory]] = None,
dialogue_history: Optional[List[CreateAiTextGenDialogueHistory]] = None,
ai_agent: Optional[AiAgentTextGen] = None,
extra_headers: Optional[Dict[str, Optional[str]]] = None
) -> AiResponse:
Expand All @@ -208,7 +221,7 @@ def create_ai_text_gen(
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
:type items: List[CreateAiTextGenItems]
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
:type dialogue_history: Optional[List[CreateAiTextGenDialogueHistory]], optional
:param extra_headers: Extra headers that will be included in the HTTP request., defaults to None
:type extra_headers: Optional[Dict[str, Optional[str]]], optional
"""
Expand Down
2 changes: 0 additions & 2 deletions box_sdk_gen/networking/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,6 @@ def fetch(url: str, options: FetchOptions) -> FetchResponse:
)
)

if options.file_stream and options.file_stream.seekable():
options.file_stream.seek(0)
attempt_nr += 1

__raise_on_unsuccessful_request(request=request, response=response)
Expand Down
10 changes: 2 additions & 8 deletions box_sdk_gen/schemas/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,18 +340,12 @@

from box_sdk_gen.schemas.ai_agent_text_gen import *

from box_sdk_gen.schemas.ai_text_gen import *

from box_sdk_gen.schemas.ai_agent_basic_text_tool_ask import *

from box_sdk_gen.schemas.ai_agent_ask import *

from box_sdk_gen.schemas.ai_citation import *

from box_sdk_gen.schemas.ai_ask_response import *

from box_sdk_gen.schemas.ai_dialogue_history import *

from box_sdk_gen.schemas.ai_text_gen import *

from box_sdk_gen.schemas.ai_ask import *

from box_sdk_gen.schemas.completion_rule_variable import *
Expand Down
6 changes: 3 additions & 3 deletions box_sdk_gen/schemas/ai_agent_ask.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,16 @@ class AiAgentAsk(BaseObject):
def __init__(
self,
*,
type: AiAgentAskTypeField = AiAgentAskTypeField.AI_AGENT_ASK.value,
type: Optional[AiAgentAskTypeField] = None,
long_text: Optional[AiAgentLongTextTool] = None,
basic_text: Optional[AiAgentBasicTextToolAsk] = None,
long_text_multi: Optional[AiAgentLongTextTool] = None,
basic_text_multi: Optional[AiAgentBasicTextToolAsk] = None,
**kwargs
):
"""
:param type: The type of AI agent used to handle queries., defaults to AiAgentAskTypeField.AI_AGENT_ASK.value
:type type: AiAgentAskTypeField, optional
:param type: The type of AI agent used to handle queries., defaults to None
:type type: Optional[AiAgentAskTypeField], optional
"""
super().__init__(**kwargs)
self.type = type
Expand Down
6 changes: 3 additions & 3 deletions box_sdk_gen/schemas/ai_agent_text_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@ class AiAgentTextGen(BaseObject):
def __init__(
self,
*,
type: AiAgentTextGenTypeField = AiAgentTextGenTypeField.AI_AGENT_TEXT_GEN.value,
type: Optional[AiAgentTextGenTypeField] = None,
basic_gen: Optional[AiAgentBasicGenTool] = None,
**kwargs
):
"""
:param type: The type of AI agent used for generating text., defaults to AiAgentTextGenTypeField.AI_AGENT_TEXT_GEN.value
:type type: AiAgentTextGenTypeField, optional
:param type: The type of AI agent used for generating text., defaults to None
:type type: Optional[AiAgentTextGenTypeField], optional
"""
super().__init__(**kwargs)
self.type = type
Expand Down
10 changes: 0 additions & 10 deletions box_sdk_gen/schemas/ai_ask.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@

from typing import List

from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory

from box_sdk_gen.schemas.ai_agent_ask import AiAgentAsk


Expand Down Expand Up @@ -52,8 +50,6 @@ def __init__(
prompt: str,
items: List[AiAskItemsField],
*,
dialogue_history: Optional[List[AiDialogueHistory]] = None,
include_citations: Optional[bool] = None,
ai_agent: Optional[AiAgentAsk] = None,
**kwargs
):
Expand All @@ -68,15 +64,9 @@ def __init__(
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
If you set `mode` parameter to `single_item_qa`, the `items` array can have one element only.
:type items: List[AiAskItemsField]
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
:param include_citations: A flag to indicate whether citations should be returned., defaults to None
:type include_citations: Optional[bool], optional
"""
super().__init__(**kwargs)
self.mode = mode
self.prompt = prompt
self.items = items
self.dialogue_history = dialogue_history
self.include_citations = include_citations
self.ai_agent = ai_agent
36 changes: 0 additions & 36 deletions box_sdk_gen/schemas/ai_ask_response.py

This file was deleted.

38 changes: 0 additions & 38 deletions box_sdk_gen/schemas/ai_citation.py

This file was deleted.

28 changes: 0 additions & 28 deletions box_sdk_gen/schemas/ai_dialogue_history.py

This file was deleted.

31 changes: 27 additions & 4 deletions box_sdk_gen/schemas/ai_text_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@

from typing import List

from box_sdk_gen.schemas.ai_dialogue_history import AiDialogueHistory

from box_sdk_gen.schemas.ai_agent_text_gen import AiAgentTextGen

from box_sdk_gen.internal.utils import DateTime


class AiTextGenItemsTypeField(str, Enum):
FILE = 'file'
Expand Down Expand Up @@ -40,13 +40,36 @@ def __init__(
self.content = content


class AiTextGenDialogueHistoryField(BaseObject):
def __init__(
self,
*,
prompt: Optional[str] = None,
answer: Optional[str] = None,
created_at: Optional[DateTime] = None,
**kwargs
):
"""
:param prompt: The prompt previously provided by the client and answered by the LLM., defaults to None
:type prompt: Optional[str], optional
:param answer: The answer previously provided by the LLM., defaults to None
:type answer: Optional[str], optional
:param created_at: The ISO date formatted timestamp of when the previous answer to the prompt was created., defaults to None
:type created_at: Optional[DateTime], optional
"""
super().__init__(**kwargs)
self.prompt = prompt
self.answer = answer
self.created_at = created_at


class AiTextGen(BaseObject):
def __init__(
self,
prompt: str,
items: List[AiTextGenItemsField],
*,
dialogue_history: Optional[List[AiDialogueHistory]] = None,
dialogue_history: Optional[List[AiTextGenDialogueHistoryField]] = None,
ai_agent: Optional[AiAgentTextGen] = None,
**kwargs
):
Expand All @@ -60,7 +83,7 @@ def __init__(
If the file size exceeds 1MB, the first 1MB of text representation will be processed.
:type items: List[AiTextGenItemsField]
:param dialogue_history: The history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response., defaults to None
:type dialogue_history: Optional[List[AiDialogueHistory]], optional
:type dialogue_history: Optional[List[AiTextGenDialogueHistoryField]], optional
"""
super().__init__(**kwargs)
self.prompt = prompt
Expand Down
Loading

0 comments on commit bc25040

Please sign in to comment.