We're Hiring!
Whitepaper
Docs
Sign In
@shra1v32
·
8 months ago
function
GeminiAPI
Get
Last Updated
8 months ago
Created
8 months ago
Function
pipe
Name
GeminiAPI
Downloads
110+
Saves
0+
Description
Gemini API
Function Code
Show
import os import json import requests from pydantic import BaseModel, Field from typing import List, Union, Iterator # Set DEBUG to True to enable detailed logging DEBUG = False class Pipe: class Valves(BaseModel): GOOGLE_API_KEY: str = Field(default="") USE_PERMISSIVE_SAFETY: bool = Field(default=False) def __init__(self): self.id = "google_genai" self.type = "manifold" self.name = "Google: " self.valves = self.Valves( **{ "GOOGLE_API_KEY": os.getenv("GOOGLE_API_KEY", ""), "USE_PERMISSIVE_SAFETY": False, } ) def get_google_models(self): """ Retrieve a list of available Gemini models using the REST API. Returns a list of dictionaries with model IDs and names. """ if not self.valves.GOOGLE_API_KEY: return [ { "id": "error", "name": "GOOGLE_API_KEY is not set. Please update the API Key in the valves.", } ] try: url = f"https://generativelanguage.googleapis.com/v1beta/models?key={self.valves.GOOGLE_API_KEY}" response = requests.get(url) response.raise_for_status() data = response.json() models = data.get("models", []) return [ { "id": model["name"][7:], # Remove "models/" prefix "name": model["displayName"], } for model in models if "generateContent" in model.get("supportedGenerationMethods", []) if model["name"].startswith("models/") ] except Exception as e: if DEBUG: print(f"Error fetching Google models: {e}") return [ {"id": "error", "name": f"Could not fetch models from Google: {str(e)}"} ] def pipes(self) -> List[dict]: """ Wrapper method to fetch available models. """ return self.get_google_models() def pipe(self, body: dict) -> Union[str, Iterator[str]]: """ Generate content using the specified Gemini model via the REST API. Supports both streaming and non-streaming responses. """ if not self.valves.GOOGLE_API_KEY: return "Error: GOOGLE_API_KEY is not set" try: # Extract and clean model ID model_id = body["model"] if model_id.startswith("google_genai."): model_id = model_id[12:] model_id = model_id.lstrip(".") if not model_id.startswith("gemini-"): return f"Error: Invalid model name format: {model_id}" messages = body["messages"] stream = body.get("stream", False) # Extract system message if present system_message = next( (msg["content"] for msg in messages if msg["role"] == "system"), None ) # Build contents from messages contents = [] for message in messages: if message["role"] != "system": if isinstance(message.get("content"), list): parts = [] for content in message["content"]: if content["type"] == "text": parts.append({"text": content["text"]}) elif content["type"] == "image_url": image_url = content["image_url"]["url"] if image_url.startswith("data:image"): image_data = image_url.split(",")[1] parts.append( { "inline_data": { "mime_type": "image/jpeg", "data": image_data, } } ) else: parts.append({"image_url": image_url}) contents.append({"role": message["role"], "parts": parts}) else: contents.append( { "role": ("user" if message["role"] == "user" else "model"), "parts": [{"text": message["content"]}], } ) # Configure generation parameters generation_config = { "temperature": body.get("temperature", 0.7), "topP": body.get("topP", 0.9), "topK": body.get("topK", 40), "maxOutputTokens": body.get("maxTokens", 8192), "stopSequences": body.get("stop", []), } # Set safety settings if self.valves.USE_PERMISSIVE_SAFETY: safety_settings = [ {"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"}, {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"}, {"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"}, {"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"}, ] else: safety_settings = body.get("safety_settings", []) # Construct request body request_body = { "contents": contents, "generationConfig": generation_config, "safetySettings": safety_settings, } # Handle system message based on model if "gemini-1.5" in model_id: if system_message: request_body["systemInstruction"] = {"parts": [{"text": system_message}]} else: if system_message: contents.insert(0, {"role": "user", "parts": [{"text": f"System: {system_message}"}]}) # Build API endpoint URL url = f"https://generativelanguage.googleapis.com/v1beta/models/{model_id}:generateContent?key={self.valves.GOOGLE_API_KEY}" if stream: # Enable streaming in request request_body["stream"] = True def stream_generator(): response = requests.post(url, json=request_body, stream=True) response.raise_for_status() for line in response.iter_lines(): if line: chunk = json.loads(line) if "candidates" in chunk and chunk["candidates"]: text = chunk["candidates"][0]["content"]["parts"][0]["text"] yield text return stream_generator() else: # Non-streaming request response = requests.post(url, json=request_body) response.raise_for_status() data = response.json() if "candidates" in data and data["candidates"]: return data["candidates"][0]["content"]["parts"][0]["text"] else: return "Error: No candidates returned" except Exception as e: if DEBUG: print(f"Error in pipe method: {e}") return f"Error: {e}"
Sponsored by Open WebUI Inc.
We are hiring!
Shape the way humanity engages with
intelligence
.