Skip to content

Commit b1c25e7

Browse files
docs: genai integration docs fixes
1 parent c4a5cf0 commit b1c25e7

File tree

1 file changed

+160
-92
lines changed

1 file changed

+160
-92
lines changed

docs/usage/http-transport/integrations/genai-compatible.md

Lines changed: 160 additions & 92 deletions
Original file line numberDiff line numberDiff line change
@@ -27,24 +27,23 @@ Bifrost provides **100% Google GenAI API compatibility** with enhanced features:
2727
### **Python (Google GenAI SDK)**
2828

2929
```python
30-
import google.generativeai as genai
30+
from google import genai
31+
from google.genai.types import HttpOptions
3132

3233
# Before - Direct Google GenAI
33-
genai.configure(
34-
api_key="your-google-api-key",
35-
transport="rest"
36-
)
34+
client = genai.Client(api_key="your-google-api-key")
3735

3836
# After - Via Bifrost
39-
genai.configure(
37+
client = genai.Client(
4038
api_key="your-google-api-key",
41-
transport="rest",
42-
client_options={"api_endpoint": "http://localhost:8080/genai"} # Only change this
39+
http_options=HttpOptions(base_url="http://localhost:8080/genai") # Only change this
4340
)
4441

4542
# Everything else stays the same
46-
model = genai.GenerativeModel('gemini-pro')
47-
response = model.generate_content("Hello!")
43+
response = client.models.generate_content(
44+
model="gemini-pro",
45+
contents="Hello!"
46+
)
4847
```
4948

5049
### **JavaScript (Google GenAI SDK)**
@@ -249,89 +248,93 @@ curl -X POST http://localhost:8080/genai/v1beta/models/gemini-pro:generateConten
249248
### **System Instructions**
250249

251250
```python
252-
import google.generativeai as genai
251+
from google import genai
252+
from google.genai.types import HttpOptions, GenerateContentConfig
253253

254-
genai.configure(
255-
api_key=google_api_key,
256-
client_options={"api_endpoint": "http://localhost:8080/genai"}
254+
client = genai.Client(
255+
api_key="your-google-api-key",
256+
http_options=HttpOptions(base_url="http://localhost:8080/genai")
257257
)
258258

259-
model = genai.GenerativeModel(
260-
'gemini-pro',
261-
system_instruction="You are a helpful assistant that answers questions about geography."
259+
response = client.models.generate_content(
260+
model="gemini-pro",
261+
contents="What is the capital of France?",
262+
config=GenerateContentConfig(
263+
system_instruction="You are a helpful assistant that answers questions about geography."
264+
)
262265
)
263-
264-
response = model.generate_content("What is the capital of France?")
265266
```
266267

267268
### **Generation Configuration**
268269

269270
```python
270-
import google.generativeai as genai
271-
272-
genai.configure(
273-
api_key=google_api_key,
274-
client_options={"api_endpoint": "http://localhost:8080/genai"}
275-
)
271+
from google import genai
272+
from google.genai.types import HttpOptions, GenerateContentConfig
276273

277-
generation_config = genai.types.GenerationConfig(
278-
candidate_count=1,
279-
max_output_tokens=1000,
280-
temperature=0.7,
281-
top_p=0.8,
282-
top_k=40,
283-
stop_sequences=["END"]
274+
client = genai.Client(
275+
api_key="your-google-api-key",
276+
http_options=HttpOptions(base_url="http://localhost:8080/genai")
284277
)
285278

286-
model = genai.GenerativeModel('gemini-pro')
287-
response = model.generate_content(
288-
"Tell me a story",
289-
generation_config=generation_config
279+
response = client.models.generate_content(
280+
model="gemini-pro",
281+
contents="Tell me a story",
282+
config=GenerateContentConfig(
283+
candidate_count=1,
284+
max_output_tokens=1000,
285+
temperature=0.7,
286+
top_p=0.8,
287+
top_k=40,
288+
stop_sequences=["END"]
289+
)
290290
)
291291
```
292292

293293
### **Safety Settings**
294294

295295
```python
296-
import google.generativeai as genai
296+
from google import genai
297+
from google.genai.types import HttpOptions, GenerateContentConfig, SafetySetting
297298

298-
genai.configure(
299-
api_key=google_api_key,
300-
client_options={"api_endpoint": "http://localhost:8080/genai"}
299+
client = genai.Client(
300+
api_key="your-google-api-key",
301+
http_options=HttpOptions(base_url="http://localhost:8080/genai")
301302
)
302303

303304
safety_settings = [
304-
{
305-
"category": "HARM_CATEGORY_HARASSMENT",
306-
"threshold": "BLOCK_MEDIUM_AND_ABOVE"
307-
},
308-
{
309-
"category": "HARM_CATEGORY_HATE_SPEECH",
310-
"threshold": "BLOCK_MEDIUM_AND_ABOVE"
311-
}
305+
SafetySetting(
306+
category="HARM_CATEGORY_HARASSMENT",
307+
threshold="BLOCK_MEDIUM_AND_ABOVE"
308+
),
309+
SafetySetting(
310+
category="HARM_CATEGORY_HATE_SPEECH",
311+
threshold="BLOCK_MEDIUM_AND_ABOVE"
312+
)
312313
]
313314

314-
model = genai.GenerativeModel('gemini-pro')
315-
response = model.generate_content(
316-
"Your content here",
317-
safety_settings=safety_settings
315+
response = client.models.generate_content(
316+
model="gemini-pro",
317+
contents="Your content here",
318+
config=GenerateContentConfig(safety_settings=safety_settings)
318319
)
319320
```
320321

321322
### **Error Handling**
322323

323324
```python
324-
import google.generativeai as genai
325+
from google import genai
326+
from google.genai.types import HttpOptions
325327
from google.api_core import exceptions
326328

327-
genai.configure(
328-
api_key=google_api_key,
329-
client_options={"api_endpoint": "http://localhost:8080/genai"}
330-
)
331-
332329
try:
333-
model = genai.GenerativeModel('gemini-pro')
334-
response = model.generate_content("Hello!")
330+
client = genai.Client(
331+
api_key=google_api_key,
332+
http_options=HttpOptions(base_url="http://localhost:8080/genai")
333+
)
334+
response = client.models.generate_content(
335+
model="gemini-pro",
336+
contents="Hello!"
337+
)
335338
except exceptions.InvalidArgument as e:
336339
print(f"Invalid argument: {e}")
337340
except exceptions.PermissionDenied as e:
@@ -349,23 +352,73 @@ except Exception as e:
349352
MCP tools are automatically available in GenAI-compatible requests:
350353

351354
```python
352-
import google.generativeai as genai
355+
from google import genai
356+
from google.genai.types import HttpOptions, Tool, FunctionDeclaration, Schema
353357

354-
genai.configure(
355-
api_key=google_api_key,
356-
client_options={"api_endpoint": "http://localhost:8080/genai"}
358+
client = genai.Client(
359+
api_key="your-google-api-key",
360+
http_options=HttpOptions(base_url="http://localhost:8080/genai")
357361
)
358362

359-
# No tool definitions needed - MCP tools auto-discovered
360-
model = genai.GenerativeModel('gemini-pro')
361-
response = model.generate_content(
362-
"List the files in the current directory and tell me about the project structure"
363+
# Define tools if needed (or use auto-discovered MCP tools)
364+
tools = [
365+
Tool(function_declarations=[
366+
FunctionDeclaration(
367+
name="list_files",
368+
description="List files in a directory",
369+
parameters=Schema(
370+
type="OBJECT",
371+
properties={
372+
"path": Schema(type="STRING", description="Directory path")
373+
},
374+
required=["path"]
375+
)
376+
)
377+
])
378+
]
379+
380+
response = client.models.generate_content(
381+
model="gemini-pro",
382+
contents="List the files in the current directory and tell me about the project structure",
383+
config=GenerateContentConfig(tools=tools)
363384
)
364385

365-
# Response may include automatic function calls
386+
# Check for function calls in response
366387
if response.candidates[0].content.parts[0].function_call:
367388
function_call = response.candidates[0].content.parts[0].function_call
368-
print(f"Called MCP tool: {function_call.name}")
389+
print(f"Called tool: {function_call.name}")
390+
```
391+
392+
### **Multi-provider Support**
393+
394+
Use multiple providers with Google GenAI SDK format by prefixing model names:
395+
396+
```python
397+
from google import genai
398+
from google.genai.types import HttpOptions
399+
400+
client = genai.Client(
401+
api_key="dummy", # API keys configured in Bifrost
402+
http_options=HttpOptions(base_url="http://localhost:8080/genai")
403+
)
404+
405+
# Google models (default)
406+
response1 = client.models.generate_content(
407+
model="gemini-pro",
408+
contents="Hello!"
409+
)
410+
411+
# OpenAI models via GenAI SDK
412+
response2 = client.models.generate_content(
413+
model="openai/gpt-4o-mini",
414+
contents="Hello!"
415+
)
416+
417+
# Anthropic models via GenAI SDK
418+
response3 = client.models.generate_content(
419+
model="anthropic/claude-3-sonnet-20240229",
420+
contents="Hello!"
421+
)
369422
```
370423

371424
### **Multi-provider Fallbacks**
@@ -428,28 +481,32 @@ Multiple Google Cloud projects automatically load balanced:
428481
Test your existing Google GenAI code with Bifrost:
429482

430483
```python
431-
import google.generativeai as genai
484+
from google import genai
485+
from google.genai.types import HttpOptions
432486

433487
def test_bifrost_compatibility():
434488
# Test with Bifrost
435-
genai.configure(
489+
bifrost_client = genai.Client(
436490
api_key=google_api_key,
437-
client_options={"api_endpoint": "http://localhost:8080/genai"}
491+
http_options=HttpOptions(base_url="http://localhost:8080/genai")
438492
)
439-
bifrost_model = genai.GenerativeModel('gemini-pro')
440493

441494
# Test with direct Google GenAI (for comparison)
442-
genai.configure(
443-
api_key=google_api_key,
444-
client_options={} # Reset to default
495+
google_client = genai.Client(
496+
api_key=google_api_key
445497
)
446-
google_model = genai.GenerativeModel('gemini-pro')
447498

448499
test_prompt = "Hello, test!"
449500

450501
# Both should work identically
451-
bifrost_response = bifrost_model.generate_content(test_prompt)
452-
google_response = google_model.generate_content(test_prompt)
502+
bifrost_response = bifrost_client.models.generate_content(
503+
model="gemini-pro",
504+
contents=test_prompt
505+
)
506+
google_response = google_client.models.generate_content(
507+
model="gemini-pro",
508+
contents=test_prompt
509+
)
453510

454511
# Compare response structure
455512
assert bifrost_response.candidates[0].content.parts[0].text is not None
@@ -463,23 +520,34 @@ test_bifrost_compatibility()
463520
### **Function Calling Testing**
464521

465522
```python
466-
import google.generativeai as genai
523+
from google import genai
524+
from google.genai.types import HttpOptions, Tool, FunctionDeclaration, Schema
467525

468526
def test_function_calling():
469-
genai.configure(
527+
client = genai.Client(
470528
api_key=google_api_key,
471-
client_options={"api_endpoint": "http://localhost:8080/genai"}
529+
http_options=HttpOptions(base_url="http://localhost:8080/genai")
472530
)
473531

474-
# Define a test function
475-
def get_time():
476-
"""Get current time"""
477-
return "2024-01-01 12:00:00"
532+
# Define a test tool
533+
tools = [
534+
Tool(function_declarations=[
535+
FunctionDeclaration(
536+
name="get_time",
537+
description="Get current time",
538+
parameters=Schema(
539+
type="OBJECT",
540+
properties={},
541+
required=[]
542+
)
543+
)
544+
])
545+
]
478546

479-
model = genai.GenerativeModel('gemini-pro')
480-
response = model.generate_content(
481-
"What time is it?",
482-
tools=[get_time]
547+
response = client.models.generate_content(
548+
model="gemini-pro",
549+
contents="What time is it?",
550+
config=GenerateContentConfig(tools=tools)
483551
)
484552

485553
# Should include function call
@@ -498,7 +566,7 @@ test_function_calling()
498566
Use multiple providers with Google GenAI SDK format by prefixing model names:
499567

500568
```python
501-
import google.generativeai as genai
569+
from google import genai
502570

503571
genai.configure(
504572
api_key="dummy", # API keys configured in Bifrost

0 commit comments

Comments
 (0)