"""
title: Gemini Safety Settings off (for OpenAI API, e.g OpenRouter)
author: Kkoolldd
author_url: Nope
version: 0.1.0
description:
This Filter function modifies the request payload for Google Gemini models
to disable content safety filters. It adds the 'safety_settings' parameter
to the request body with all harm categories set to 'BLOCK_NONE'.
This is intended for use cases where Gemini's default filtering is overly
restrictive. Use responsibly.
Note: This function identifies Gemini models by checking if the model ID
starts with 'google/gemini'. This might need adjustment depending on how
your specific backend (e.g., OpenRouter, Ollama with LiteLLM Proxy) names
the Gemini models.
"""
from pydantic import BaseModel, Field
from typing import Optional, List, Dict, Any # Import Dict and Any for body typing
# Name for logging purposes
NAME = "gemini-safety-settings"
# Define the safety settings structure to disable filtering
# Note: Gemini API uses 'BLOCK_NONE', not 'OFF'
SAFETY_SETTINGS_OFF = [
{"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"},
]
class Filter:
"""
A Filter function to inject Gemini safety settings into the request payload.
"""
class Valves(BaseModel):
"""
User-configurable settings for the filter.
"""
priority: int = Field(
default=0, description="Filter processing priority level."
)
debug: bool = Field(
default=False,
description="Log the modified request payload to the console for debugging.",
)
# Add other potential valves here if needed in the future
# e.g., enable_filter: bool = Field(default=True, description="Enable/disable this filter.")
# e.g., model_prefixes: List[str] = Field(default=["google/gemini"], description="Prefixes to identify Gemini models.")
def __init__(self):
"""
Initializes the filter and its settings valve.
"""
self.valves = self.Valves()
# You could potentially load valves from a config file or environment variables here
# For now, it just uses the Pydantic defaults.
def inlet(self, body: Dict[str, Any], **user: Optional[dict]) -> Dict[str, Any]:
"""
Modifies the request body *before* it's sent to the LLM backend.
Args:
body: The request payload dictionary.
Expected keys might include 'model', 'messages', 'stream', etc.
user: A dictionary containing information about the user making the request.
Returns:
The potentially modified request payload dictionary.
"""
if not self._is_applicable(user, body):
# If the filter shouldn't apply (not Gemini model or other conditions),
# return the original body without modification.
return body
else:
# Apply the modification: Add safety_settings to the top level of the body.
# This assumes the backend (like OpenRouter or a proxy) expects it here.
self._debug(f"Applying safety settings for model: {body.get('model')}")
body["safety_settings"] = SAFETY_SETTINGS_OFF
# Log the modified body if debug mode is enabled
self._debug("Modified body payload:", body)
return body
def outlet(self, body: Dict[str, Any], **user: Optional[dict]) -> Dict[str, Any]:
"""
Modifies the response body *after* receiving it from the LLM backend.
This filter does not modify the output, so it just returns the body as is.
Args:
body: The response payload dictionary from the backend.
user: A dictionary containing information about the user making the request.
Returns:
The original response payload dictionary.
"""
# No changes needed on the way out for this specific filter
return body
def _is_applicable(self, user: Optional[dict], body: Dict[str, Any]) -> bool:
"""
Determines if this filter should be applied to the current request.
Args:
user: The user information dictionary.
body: The request payload dictionary.
Returns:
True if the filter should be applied, False otherwise.
"""
# Basic check: Ensure user info is present and user role is allowed (optional but good practice)
# You might adjust or remove the role check depending on your policy
# if not user or user.get("role", "admin") not in ["user", "admin"]:
# self._debug("User role check failed or user info missing.")
# return False
# Check if the model specified in the body is a Gemini model
model_id = body.get("model", "")
if not model_id:
self._debug("No model ID found in the request body.")
return False
# Check if the model ID starts with 'google/gemini' (case-insensitive)
# This is a common convention for OpenRouter / LiteLLM / etc.
# You might need to adjust this check based on your specific setup.
# Consider making this configurable via Valves if needed.
is_gemini_model = model_id.lower().startswith("google/gemini")
if not is_gemini_model:
self._debug(f"Model '{model_id}' is not identified as a Gemini model.")
# The filter is applicable only if it's a Gemini model
return is_gemini_model
def _debug(self, *args):
"""
Helper function to print debug messages if the debug valve is enabled.
"""
if self.valves.debug:
# Using a distinctive prefix for easier log searching
print(f"### Gemini Safety Settings Filter DEBUG ###")
print(f"[{NAME}]", *args)