Skip to content

Commit 3026d2a

Browse files
authored
feat: Support Box AI features (#877)
1 parent 6c45394 commit 3026d2a

File tree

4 files changed

+269
-0
lines changed

4 files changed

+269
-0
lines changed

boxsdk/client/client.py

Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1692,3 +1692,79 @@ def get_sign_template(
16921692
session=self._session,
16931693
response_object=response.json(),
16941694
)
1695+
1696+
@api_call
1697+
def send_ai_question(
1698+
self,
1699+
items: Iterable,
1700+
prompt: str,
1701+
mode: Optional[str] = None
1702+
) -> Any:
1703+
"""
1704+
Sends an AI request to supported LLMs and returns an answer specifically focused on the user's
1705+
question given the provided context.
1706+
1707+
:param items:
1708+
The items to be processed by the LLM, often files.
1709+
:param prompt:
1710+
The prompt provided by the client to be answered by the LLM.
1711+
The prompt's length is limited to 10000 characters.
1712+
:param mode:
1713+
The mode specifies if this request is for a single or multiple items.
1714+
If you select single_item_qa the items array can have one element only.
1715+
Selecting multiple_item_qa allows you to provide up to 25 items.
1716+
1717+
Value is one of `multiple_item_qa`, `single_item_qa`
1718+
:returns:
1719+
A response including the answer from the LLM.
1720+
"""
1721+
url = self._session.get_url('ai/ask')
1722+
if mode is None:
1723+
mode = ('single_item_qa' if len(items) == 1 else 'multiple_item_qa')
1724+
body = {
1725+
'items': items,
1726+
'prompt': prompt,
1727+
'mode': mode
1728+
}
1729+
1730+
box_response = self._session.post(url, data=json.dumps(body))
1731+
response = box_response.json()
1732+
return self.translator.translate(
1733+
session=self._session,
1734+
response_object=response,
1735+
)
1736+
1737+
@api_call
1738+
def send_ai_text_gen(
1739+
self,
1740+
dialogue_history: Iterable,
1741+
items: Iterable,
1742+
prompt: str,
1743+
):
1744+
"""
1745+
Sends an AI request to supported LLMs and returns an answer specifically focused on the creation of new text.
1746+
1747+
:param dialogue_history:
1748+
The history of prompts and answers previously passed to the LLM.
1749+
This provides additional context to the LLM in generating the response.
1750+
:param items:
1751+
The items to be processed by the LLM, often files. The array can include exactly one element.
1752+
:param prompt:
1753+
The prompt provided by the client to be answered by the LLM.
1754+
The prompt's length is limited to 10000 characters.
1755+
:returns:
1756+
A response including the generated text from the LLM.
1757+
"""
1758+
url = self._session.get_url('ai/text_gen')
1759+
body = {
1760+
'dialogue_history': dialogue_history,
1761+
'items': items,
1762+
'prompt': prompt
1763+
}
1764+
1765+
box_response = self._session.post(url, data=json.dumps(body))
1766+
response = box_response.json()
1767+
return self.translator.translate(
1768+
session=self._session,
1769+
response_object=response,
1770+
)

docs/usage/ai.md

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
AI
2+
==
3+
4+
AI allows to send an intelligence request to supported large language models and returns an answer based on the provided prompt and items.
5+
6+
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
7+
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
8+
9+
- [Send AI request](#send-ai-request)
10+
- [Send AI text generation request](#send-ai-text-generation-request)
11+
12+
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
13+
14+
Send AI request
15+
------------------------
16+
17+
Calling the [`client.send_ai_question(items, prompt, mode)`][send-ai-question] method will send an AI request to the supported large language models. The `items` parameter is a list of items to be processed by the LLM, often files. The `prompt` provided by the client to be answered by the LLM. The prompt's length is limited to 10000 characters. The `mode` specifies if this request is for a single or multiple items. If you select `single_item_qa` the items array can have one element only. Selecting `multiple_item_qa` allows you to provide up to 25 items.
18+
19+
20+
21+
<!-- sample post_ai_ask -->
22+
```python
23+
items = [{
24+
"id": "1582915952443",
25+
"type": "file",
26+
"content": "More information about public APIs"
27+
}]
28+
answer = client.send_ai_question(
29+
items=items,
30+
prompt="What is this file?",
31+
mode="single_item_qa"
32+
)
33+
print(answer)
34+
```
35+
36+
NOTE: The AI endpoint may return a 412 status code if you use for your request a file which has just been updated to the box.
37+
It usually takes a few seconds for the file to be indexed and available for the AI endpoint.
38+
39+
[send-ai-question]: https://box-python-sdk.readthedocs.io/en/latest/boxsdk.client.html#boxsdk.client.client.Client.send_ai_question
40+
41+
Send AI text generation request
42+
------------------------
43+
44+
Calling the [`client.send_ai_text_gen(dialogue_history, items, prompt)`][send-ai-text-gen] method will send an AI text generation request to the supported large language models. The `dialogue_history` parameter is history of prompts and answers previously passed to the LLM. This provides additional context to the LLM in generating the response. The `items` parameter is a list of items to be processed by the LLM, often files. The `prompt` provided by the client to be answered by the LLM. The prompt's length is limited to 10000 characters.
45+
46+
<!-- sample post_ai_text_gen -->
47+
```python
48+
items = [{
49+
"id": "1582915952443",
50+
"type": "file",
51+
"content": "More information about public APIs"
52+
}]
53+
dialogue_history = [{
54+
"prompt": "Make my email about public APIs sound more professional",
55+
"answer": "Here is the first draft of your professional email about public APIs",
56+
"created_at": "2013-12-12T10:53:43-08:00"
57+
},
58+
{
59+
"prompt": "Can you add some more information?",
60+
"answer": "Public API schemas provide necessary information to integrate with APIs...",
61+
"created_at": "2013-12-12T11:20:43-08:00"
62+
}]
63+
answer = client.send_ai_text_gen(
64+
dialogue_history=dialogue_history,
65+
items=items,
66+
prompt="Write an email to a client about the importance of public APIs."
67+
)
68+
print(answer)
69+
```
70+
71+
[send-ai-text-gen]: https://box-python-sdk.readthedocs.io/en/latest/boxsdk.client.html#boxsdk.client.client.Client.send_ai_text_gen
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
from datetime import datetime
2+
3+
import pytest
4+
5+
from test.integration_new import CLIENT
6+
from test.integration_new.context_managers.box_test_folder import BoxTestFolder
7+
from test.integration_new.context_managers.box_test_file import BoxTestFile
8+
9+
FOLDER_TESTS_DIRECTORY_NAME = 'folder-integration-tests'
10+
11+
12+
@pytest.fixture(scope='module', autouse=True)
13+
def parent_folder():
14+
with BoxTestFolder(name=f'{FOLDER_TESTS_DIRECTORY_NAME} {datetime.now()}') as folder:
15+
yield folder
16+
17+
18+
def test_send_ai_question(parent_folder, small_file_path):
19+
with BoxTestFile(parent_folder=parent_folder, file_path=small_file_path) as file:
20+
items = [{
21+
'id': file.id,
22+
'type': 'file',
23+
'content': 'The sun raises in the east.'
24+
}]
25+
answer = CLIENT.send_ai_question(
26+
items=items,
27+
prompt='Which direction does the sun raise?',
28+
mode='single_item_qa'
29+
)
30+
assert 'east' in answer['answer'].lower()
31+
assert answer['completion_reason'] == 'done'
32+
33+
34+
def test_send_ai_text_gen(parent_folder, small_file_path):
35+
with BoxTestFile(parent_folder=parent_folder, file_path=small_file_path) as file:
36+
items = [{
37+
'id': file.id,
38+
'type': 'file',
39+
'content': 'The sun raises in the east.'
40+
}]
41+
dialogue_history = [{
42+
'prompt': 'How does the sun rise?',
43+
'answer': 'The sun raises in the east.',
44+
'created_at': '2013-12-12T10:53:43-08:00'
45+
}, {
46+
'prompt': 'How many hours does it take for the sun to rise?',
47+
'answer': 'It takes 24 hours for the sun to rise.',
48+
'created_at': '2013-12-12T11:20:43-08:00'
49+
}]
50+
answer = CLIENT.send_ai_text_gen(
51+
dialogue_history=dialogue_history,
52+
items=items,
53+
prompt='Which direction does the sun raise?'
54+
)
55+
assert 'east' in answer['answer'].lower()
56+
assert answer['completion_reason'] == 'done'

test/unit/client/test_client.py

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1766,6 +1766,16 @@ def mock_sign_template_response():
17661766
return mock_sign_template
17671767

17681768

1769+
@pytest.fixture(scope='module')
1770+
def mock_ai_question_response():
1771+
mock_ai_question_response = {
1772+
'answer': 'Public APIs are important because of key and important reasons.',
1773+
'completion_reason': 'done',
1774+
'created_at': '2021-04-26T08:12:13.982Z',
1775+
}
1776+
return mock_ai_question_response
1777+
1778+
17691779
def test_get_sign_requests(mock_client, mock_box_session, mock_sign_request_response):
17701780
expected_url = f'{API.BASE_API_URL}/sign_requests'
17711781

@@ -1906,3 +1916,59 @@ def test_get_sign_templates(mock_client, mock_box_session, mock_sign_template_re
19061916
assert isinstance(sign_template, SignTemplate)
19071917
assert sign_template.id == '93153068-5420-467b-b8ef-8e54bfb7be42'
19081918
assert sign_template.name == 'important-file.pdf'
1919+
1920+
1921+
def test_send_ai_question(mock_client, mock_box_session, mock_ai_question_response):
1922+
expected_url = f'{API.BASE_API_URL}/ai/ask'
1923+
mock_box_session.post.return_value.json.return_value = mock_ai_question_response
1924+
1925+
items = [{
1926+
'type': 'file',
1927+
'id': '12345'
1928+
}]
1929+
question = 'Why are public APIs important?'
1930+
mode = 'single_item_qa'
1931+
1932+
answer = mock_client.send_ai_question(items, question, mode)
1933+
1934+
mock_box_session.post.assert_called_once_with(expected_url, data=json.dumps({
1935+
'items': items,
1936+
'prompt': question,
1937+
'mode': mode
1938+
}))
1939+
assert answer['answer'] == 'Public APIs are important because of key and important reasons.'
1940+
assert answer['completion_reason'] == 'done'
1941+
assert answer['created_at'] == '2021-04-26T08:12:13.982Z'
1942+
1943+
1944+
def test_send_ai_text_gen(mock_client, mock_box_session, mock_ai_question_response):
1945+
expected_url = f'{API.BASE_API_URL}/ai/text_gen'
1946+
mock_box_session.post.return_value.json.return_value = mock_ai_question_response
1947+
1948+
items = [{
1949+
'type': 'file',
1950+
'id': '12345'
1951+
}]
1952+
dialogue_history = [{
1953+
"prompt": "Make my email about public APIs sound more professional",
1954+
"answer": "Here is the first draft of your professional email about public APIs",
1955+
"created_at": "2013-12-12T10:53:43-08:00"
1956+
}, {
1957+
"prompt": "Can you add some more information?",
1958+
"answer": "Public API schemas provide necessary information to integrate with APIs...",
1959+
"created_at": "2013-12-12T11:20:43-08:00"
1960+
}]
1961+
answer = mock_client.send_ai_text_gen(
1962+
dialogue_history=dialogue_history,
1963+
items=items,
1964+
prompt="Write an email to a client about the importance of public APIs."
1965+
)
1966+
1967+
mock_box_session.post.assert_called_once_with(expected_url, data=json.dumps({
1968+
'dialogue_history': dialogue_history,
1969+
'items': items,
1970+
'prompt': "Write an email to a client about the importance of public APIs."
1971+
}))
1972+
assert answer['answer'] == 'Public APIs are important because of key and important reasons.'
1973+
assert answer['completion_reason'] == 'done'
1974+
assert answer['created_at'] == '2021-04-26T08:12:13.982Z'

0 commit comments

Comments
 (0)