Skip to content

google.ai.generativelanguage #534

@FeliciaAnnKelleyTaylorGV

Description

`"""
Install an additional SDK for JSON schema support Google AI Python SDK

$ pip install google.ai.generativelanguage
"""

import os
import time
import google.generativeai as genai
from google.ai.generativelanguage_v1beta.types import content

genai.configure(api_key=os.environ["GEMINI_API_KEY"])

def upload_to_gemini(path, mime_type=None):
"""Uploads the given file to Gemini.

See https://ai.google.dev/gemini-api/docs/prompting_with_media
"""
file = genai.upload_file(path, mime_type=mime_type)
print(f"Uploaded file '{file.display_name}' as: {file.uri}")
return file

def wait_for_files_active(files):
"""Waits for the given files to be active.

Some files uploaded to the Gemini API need to be processed before they can be
used as prompt inputs. The status can be seen by querying the file's "state"
field.

This implementation uses a simple blocking polling loop. Production code
should probably employ a more sophisticated approach.
"""
print("Waiting for file processing...")
for name in (file.name for file in files):
file = genai.get_file(name)
while file.state.name == "PROCESSING":
print(".", end="", flush=True)
time.sleep(10)
file = genai.get_file(name)
if file.state.name != "ACTIVE":
raise Exception(f"File {file.name} failed to process")
print("...all files ready")
print()

Create the model

generation_config = {
"temperature": 2,
"top_p": 0.95,
"top_k": 40,
"max_output_tokens": 8192,
"response_schema": content.Schema(
type = content.Type.OBJECT,
properties = {
"null": content.Schema(
type = content.Type.OBJECT,
properties = {
"string": content.Schema(
type = content.Type.OBJECT,
properties = {
},
),
},
),
},
),
"response_mime_type": "application/json",
}

model = genai.GenerativeModel(
model_name="gemini-1.5-pro",
generation_config=generation_config,
)

TODO Make these files available on the local file system

You may need to update the file paths

files = [
upload_to_gemini("https://github.com/gitkraken/vscode-gitlens.wiki.git", mime_type="application/zip"),
upload_to_gemini("Unknown File", mime_type="application/octet-stream"),
]

Some files have a processing delay. Wait for them to be ready.

wait_for_files_active(files)

chat_session = model.start_chat(
history=[
{
"role": "user",
"parts": [
files[0],
],
},
{
"role": "model",
"parts": [
files[1],
],
},
]
)

response = chat_session.send_message("INSERT_INPUT_HERE")

print(response.text)`

Metadata

Metadata

Assignees

No one assigned

    Labels

    type:helpSupport-related issues

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions