"""
title: Open-WebUI Reasoning Manifold
version: 0.5.1
- [x] Updated to work on OWUI 0.4.x
- [ ] Updated to work on OWUI 0.5.x
- [x] Wrapper functions to support new signatures
- [ ] Fix query never ending (user must press stop - confirmed issue by other function authors)
- [x] OpenAI streaming
- [x] Ollama streaming
- [x] Thought expansion
- [x] LLM summary generation
- [x] Advanced parsing logic
- [x] Optimised emission timing
- [x] Valve to append system prompt to every request
- [ ] Support summary models that have connection prefix eg 'ollama.' (OWUI bug? maybe because using bypass filter?)
"""
import os
import json
import time
import asyncio
import importlib
import re
import aiohttp
from datetime import datetime
from typing import List, Union, Optional, Callable, Awaitable, Dict, Any, Tuple
from pydantic import BaseModel, Field
from starlette.requests import Request
from starlette.responses import StreamingResponse
from open_webui.config import TASK_MODEL, TASK_MODEL_EXTERNAL
import logging
THOUGHT_SUMMARY_PLACEHOLDER = "your-task-model-id-goes-here"
SYSTEM_MESSAGE_DEFAULT = "Respond using and