Whitepaper
Docs
Sign In
Function
Function
pipe
v1.0.0
siliconflow
Function ID
siliconflow
Creator
@lb7666
Downloads
238+
Call siliconflow API in openwebui
Get
README
No README available
Function Code
Show
""" title: Siliconflow Manifold Pipe authors: LB7666 author_url: https://github.com/acking-you funding_url: https://github.com/open-webui version: 1.0.0 required_open_webui_version: 0.3.32 license: MIT """ import os import requests import json import time from typing import List, Union, Generator, Iterator from pydantic import BaseModel, Field from open_webui.utils.misc import pop_system_message class Pipe: class Valves(BaseModel): API_KEY: str = Field(default="") def __init__(self): self.type = "manifold" self.id = "siliconflow" self.name = "siliconflow/" self.valves = self.Valves( **{"API_KEY": os.getenv("API_KEY", "")} ) pass # see: https://docs.siliconflow.cn/api-reference/chat-completions/chat-completions#body-model def get_deepseek_models(self): return [ {"id": "deepseek-ai/DeepSeek-V2.5", "name": "deepseek-ai/DeepSeek-V2.5"}, {"id": "deepseek-ai/DeepSeek-R1", "name": "deepseek-ai/DeepSeek-R1"}, {"id": "deepseek-ai/DeepSeek-V3", "name": "deepseek-ai/DeepSeek-V3"}, {"id": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B", "name": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B"}, {"id": "eepseek-ai/DeepSeek-R1-Distill-Qwen-32B", "name": "eepseek-ai/DeepSeek-R1-Distill-Qwen-32B"}, {"id": "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B", "name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B"}, {"id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "name": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B"}, {"id": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B", "name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B"}, {"id": "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B", "name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"}, {"id": "Pro/deepseek-ai/DeepSeek-R1-Distill-Llama-8B", "name": "Pro/deepseek-ai/DeepSeek-R1-Distill-Llama-8B"}, {"id": "Pro/deepseek-ai/DeepSeek-R1-Distill-Qwen-7B", "name": "Pro/deepseek-ai/DeepSeek-R1-Distill-Qwen-7B"}, {"id": "Pro/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B", "name": "Pro/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"}, {"id": "meta-llama/Llama-3.3-70B-Instruct", "name": "meta-llama/Llama-3.3-70B-Instruct"}, {"id": "AIDC-AI/Marco-o1", "name": "AIDC-AI/Marco-o1"}, {"id": "Qwen/Qwen2.5-72B-Instruct-128K", "name": "Qwen/Qwen2.5-72B-Instruct-128K"}, {"id": "Qwen/Qwen2.5-72B-Instruct", "name": "Qwen/Qwen2.5-72B-Instruct"}, {"id": "Qwen/Qwen2.5-32B-Instruct", "name": "Qwen/Qwen2.5-32B-Instruct"}, {"id": "Qwen/Qwen2.5-14B-Instruct", "name": "Qwen/Qwen2.5-14B-Instruct"}, {"id": "Qwen/Qwen2.5-7B-Instruct", "name": "Qwen/Qwen2.5-7B-Instruct"}, {"id": "Qwen/Qwen2.5-Coder-32B-Instruct", "name": "Qwen/Qwen2.5-Coder-32B-Instruct"}, {"id": "Qwen/Qwen2.5-Coder-7B-Instruct", "name": "Qwen/Qwen2.5-Coder-7B-Instruct"}, {"id": "Qwen/Qwen2-7B-Instruct", "name": "Qwen/Qwen2-7B-Instruct"}, {"id": "Qwen/Qwen2-1.5B-Instruct", "name": "Qwen/Qwen2-1.5B-Instruct"}, {"id": "Qwen/QwQ-32B-Preview", "name": "Qwen/QwQ-32B-Preview"}, {"id": "TeleAI/TeleChat2", "name": "TeleAI/TeleChat2"}, {"id": "01-ai/Yi-1.5-34B-Chat-16K", "name": "01-ai/Yi-1.5-34B-Chat-16K"}, {"id": "01-ai/Yi-1.5-9B-Chat-16K", "name": "01-ai/Yi-1.5-9B-Chat-16K"}, {"id": "01-ai/Yi-1.5-6B-Chat", "name": "01-ai/Yi-1.5-6B-Chat"}, {"id": "THUDM/glm-4-9b-chat", "name": "THUDM/glm-4-9b-chat"}, {"id": "Vendor-A/Qwen/Qwen2.5-72B-Instruct", "name": "Vendor-A/Qwen/Qwen2.5-72B-Instruct"}, {"id": "internlm/internlm2_5-7b-chat", "name": "internlm/internlm2_5-7b-chat"}, {"id": "internlm/internlm2_5-20b-chat", "name": "internlm/internlm2_5-20b-chat"}, {"id": "nvidia/Llama-3.1-Nemotron-70B-Instruct", "name": "nvidia/Llama-3.1-Nemotron-70B-Instruct"}, {"id": "meta-llama/Meta-Llama-3.1-405B-Instruct", "name": "meta-llama/Meta-Llama-3.1-405B-Instruct"}, {"id": "meta-llama/Meta-Llama-3.1-70B-Instruct", "name": "meta-llama/Meta-Llama-3.1-70B-Instruct"}, {"id": "meta-llama/Meta-Llama-3.1-8B-Instruct", "name": "meta-llama/Meta-Llama-3.1-8B-Instruct"}, {"id": "google/gemma-2-27b-it", "name": "google/gemma-2-27b-it"}, {"id": "google/gemma-2-9b-it", "name": "google/gemma-2-9b-it"}, {"id": "Pro/Qwen/Qwen2.5-7B-Instruct", "name": "Pro/Qwen/Qwen2.5-7B-Instruct"}, {"id": "Pro/Qwen/Qwen2-7B-Instruct", "name": "Pro/Qwen/Qwen2-7B-Instruct"}, {"id": "Pro/Qwen/Qwen2-1.5B-Instruct", "name": "Pro/Qwen/Qwen2-1.5B-Instruct"}, {"id": "Pro/THUDM/chatglm3-6b", "name": "Pro/THUDM/chatglm3-6b"}, {"id": "Pro/THUDM/glm-4-9b-chat", "name": "Pro/THUDM/glm-4-9b-chat"}, {"id": "Pro/meta-llama/Meta-Llama-3.1-8B-Instruct", "name": "Pro/meta-llama/Meta-Llama-3.1-8B-Instruct"}, {"id": "Pro/google/gemma-2-9b-it", "name": "Pro/google/gemma-2-9b-it"} ] def pipes(self) -> List[dict]: return self.get_deepseek_models() def pipe(self, body: dict) -> Union[str, Generator, Iterator]: system_message, messages = pop_system_message(body["messages"]) processed_messages = [] for message in messages: processed_content = "" if isinstance(message.get("content"), list): for item in message["content"]: if item["type"] == "text": processed_content = item["text"] else: processed_content = message.get("content", "") processed_messages.append( {"role": message["role"], "content": processed_content} ) # Ensure the system_message is coerced to a string payload = { "model": body["model"][body["model"].find(".") + 1 :], "messages": processed_messages, "stream": body.get("stream", False), } headers = {} headers["Authorization"] = f"Bearer {self.valves.API_KEY}" headers["Content-Type"] = "application/json" url = "https://api.siliconflow.cn/v1/chat/completions" try: if body.get("stream", False): return self.stream_response(url, headers, payload) else: return self.non_stream_response(url, headers, payload) except requests.exceptions.RequestException as e: print(f"Request failed: {e}") return f"Error: Request failed: {e}" except Exception as e: print(f"Error in pipe method: {e}") return f"Error1: {e}" def stream_response(self, url, headers, payload): # Remove unnecessary fields from the payload unnecessary_fields = ["user", "chat_id", "title"] for field in unnecessary_fields: payload.pop(field, None) try: response = requests.post( url=url, json=payload, headers=headers, stream=True, ) if response.status_code != 200: raise Exception( f"HTTP Error {payload} {response.status_code}: {response.text}" ) # Process the streamed response for chunk in response.iter_content(chunk_size=None): if chunk: yield chunk.decode("utf-8") # Delay to avoid overwhelming the client time.sleep(0.01) except requests.exceptions.RequestException as e: print(f"Stream request failed: {e}") yield f"Error: {e}" except Exception as e: print(f"Error in stream_response method: {e}") yield f"Error: {e}" def non_stream_response(self, url, headers, payload): try: response = requests.post( url=url, headers=headers, json=payload, timeout=(3.05, 60) ) if response.status_code != 200: raise Exception(f"HTTP Error: {response.status_code}: {response.text}") return response.json() except requests.exceptions.RequestException as e: print(f"Failed non-stream request: {e}") return f"Error5: {response.content}" except Exception as e: print(f"Error in non_stream_response method: {e}") return f"Error6: {e}"