Whitepaper
Docs
Sign In
Tool
Tool
v1.1
[0.5.0] Stock Reporter
Tool ID
stock_reporter
Creator
@echoa1
Downloads
938+
A comprehensive stock analysis tool that gathers data from Finnhub Free API and compiles a detailed report.
Get
README
No README available
Tool Code
Show
""" title: Stock Reporter description: A comprehensive stock analysis tool that gathers data from Finnhub API and compiles a detailed report. original_author: Pyotr Growpotkin updated_and_maintianed_by: EchoA1 funding_url: https://github.com/open-webui version: 1.1 license: MIT requirements: finnhub-python required_open_webui_version: 0.5.0 """ import finnhub import requests import aiohttp import asyncio from transformers import AutoTokenizer, AutoModelForSequenceClassification import torch from bs4 import BeautifulSoup from pydantic import BaseModel, Field from datetime import datetime, timedelta import logging from typing import Dict, Any, List, Union, Callable, Awaitable from functools import lru_cache # Configure logging logging.basicConfig( format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", level=logging.DEBUG ) def _format_date(date: datetime) -> str: logging.debug(f"Formatting date: {date}") return date.strftime("%Y-%m-%d") @lru_cache(maxsize=128) def _get_sentiment_model(): logging.debug("Loading sentiment analysis model...") model_name = "ProsusAI/finbert" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForSequenceClassification.from_pretrained(model_name) logging.debug("Model loaded successfully") return tokenizer, model def _get_basic_info(client: finnhub.Client, ticker: str) -> Dict[str, Any]: logging.debug(f"Fetching basic info for ticker: {ticker}") profile = client.company_profile2(symbol=ticker) basic_financials = client.company_basic_financials(ticker, "all") peers = client.company_peers(ticker) logging.debug(f"Fetched basic info: {profile}") return {"profile": profile, "basic_financials": basic_financials, "peers": peers} def _get_current_price(client: finnhub.Client, ticker: str) -> Dict[str, float]: logging.debug(f"Fetching current price for ticker: {ticker}") quote = client.quote(ticker) return { "current_price": quote["c"], "change": quote["dp"], "change_amount": quote["d"], "high": quote["h"], "low": quote["l"], "open": quote["o"], "previous_close": quote["pc"], } def _get_company_news(client: finnhub.Client, ticker: str) -> List[Dict[str, str]]: logging.debug(f"Fetching news for ticker: {ticker}") end_date = datetime.now() start_date = end_date - timedelta(days=7) news = client.company_news(ticker, _format_date(start_date), _format_date(end_date)) logging.debug(f"Fetched {len(news)} news items") news_items = news[:10] # Get the first 10 news items return [{"url": item["url"], "title": item["headline"]} for item in news_items] async def _async_web_scrape(session: aiohttp.ClientSession, url: str) -> str: logging.debug(f"Scraping URL: {url}") jina_url = f"https://r.jina.ai/{url}" headers = { "X-No-Cache": "true", "X-With-Images-Summary": "true", "X-With-Links-Summary": "true", } try: async with session.get(jina_url, headers=headers) as response: response.raise_for_status() content = await response.text() logging.debug(f"Fetched content from {url}") links_section_start = content.rfind("Images:") if links_section_start != -1: content = content[:links_section_start].strip() return content except aiohttp.ClientError as e: logging.error(f"Error scraping web page: {str(e)}") return f"Error scraping web page: {str(e)}" async def _async_sentiment_analysis(content: str) -> Dict[str, Union[str, float]]: logging.debug(f"Performing sentiment analysis on content snippet: {content[:30]}") tokenizer, model = _get_sentiment_model() inputs = tokenizer(content, return_tensors="pt", truncation=True, max_length=512) with torch.no_grad(): outputs = model(**inputs) probabilities = torch.nn.functional.softmax(outputs.logits, dim=-1) sentiment_scores = probabilities.tolist()[0] sentiments = ["Neutral", "Positive", "Negative"] sentiment = sentiments[sentiment_scores.index(max(sentiment_scores))] confidence = max(sentiment_scores) logging.debug( f"Sentiment analysis result: sentiment={sentiment}, confidence={confidence}" ) return {"sentiment": sentiment, "confidence": confidence} async def _async_gather_stock_data( client: finnhub.Client, ticker: str ) -> Dict[str, Any]: logging.debug(f"Gathering data for ticker: {ticker}") basic_info = _get_basic_info(client, ticker) current_price = _get_current_price(client, ticker) news_items = _get_company_news(client, ticker) async with aiohttp.ClientSession() as session: scrape_tasks = [_async_web_scrape(session, item["url"]) for item in news_items] contents = await asyncio.gather(*scrape_tasks) sentiment_tasks = [ _async_sentiment_analysis(content) for content in contents if content ] sentiments = await asyncio.gather(*sentiment_tasks) sentiment_results = [ { "url": news_items[i]["url"], "title": news_items[i]["title"], "sentiment": sentiment["sentiment"], "confidence": sentiment["confidence"], } for i, sentiment in enumerate(sentiments) if contents[i] ] logging.debug("Data gathering completed.") return { "basic_info": basic_info, "current_price": current_price, "sentiments": sentiment_results, } def _compile_report(data: Dict[str, Any]) -> str: logging.debug("Starting to compile the stock report.") try: profile = data["basic_info"]["profile"] financials = data["basic_info"]["basic_financials"] metrics = financials.get("metric", {}) def safe_metric(metric_name, default="Data Unavailable"): value = metrics.get(metric_name) if value is None: logging.warning( f"Metric {metric_name} is None, using default '{default}'." ) return default return value peers = data["basic_info"]["peers"] price_data = data["current_price"] report = f""" Comprehensive Stock Analysis Report for {profile['name']} ({profile['ticker']}) Basic Information: Industry: {profile.get('finnhubIndustry', 'N/A')} Market Cap: {profile.get('marketCapitalization', 'N/A'):,.0f} M USD Share Outstanding: {profile.get('shareOutstanding', 'N/A'):,.0f} M Country: {profile.get('country', 'N/A')} Exchange: {profile.get('exchange', 'N/A')} IPO Date: {profile.get('ipo', 'N/A')} Current Trading Information: Current Price: ${price_data['current_price']:.2f} Daily Change: {price_data['change']:.2f}% (${price_data['change_amount']:.2f}) Day's Range: ${price_data['low']:.2f} - ${price_data['high']:.2f} Open: ${price_data['open']:.2f} Previous Close: ${price_data['previous_close']:.2f} Key Financial Metrics: 52 Week High: ${safe_metric('52WeekHigh')} 52 Week Low: ${safe_metric('52WeekLow')} """ def add_metric_with_interpretation( metric_name, metric_value, high_threshold, low_threshold, interpretations ): if metric_value == "Data Unavailable": report_line = f"{metric_name}: Data Unavailable\n" else: interpretation = "" metric_value_float = float(metric_value) if metric_value_float > high_threshold: interpretation = interpretations["high"] elif low_threshold <= metric_value_float <= high_threshold: interpretation = interpretations["moderate"] else: interpretation = interpretations["low"] report_line = f"{metric_name}: {metric_value}\n" if interpretation: report_line += f"- Interpretation: {interpretation}\n" return report_line report += add_metric_with_interpretation( "P/E Ratio", safe_metric("peBasicExclExtraTTM"), 25, 15, { "high": "High (may be overvalued)", "moderate": "Moderate", "low": "Low (may be undervalued)", }, ) report += add_metric_with_interpretation( "P/B Ratio", safe_metric("pbQuarterly"), 3, 1, {"high": "High", "moderate": "Moderate", "low": "Low"}, ) report += add_metric_with_interpretation( "Return on Equity", safe_metric("roeRfy"), 20, 10, {"high": "Excellent", "moderate": "Good", "low": "Poor"}, ) report += add_metric_with_interpretation( "Net Profit Margin", safe_metric("netProfitMarginTTM"), 20, 5, {"high": "Excellent", "moderate": "Good", "low": "Poor"}, ) report += add_metric_with_interpretation( "Current Ratio", safe_metric("currentRatioQuarterly"), 2, 1, {"high": "Strong", "moderate": "Healthy", "low": "Poor"}, ) report += add_metric_with_interpretation( "Debt-to-Equity Ratio", safe_metric("totalDebtToEquityQuarterly"), 1, 0.5, { "high": "High leverage", "moderate": "Moderate leverage", "low": "Low leverage", }, ) report += add_metric_with_interpretation( "Dividend Yield", safe_metric("dividendYieldIndicatedAnnual"), 4, 2, {"high": "High yield", "moderate": "Moderate yield", "low": "Low yield"}, ) report += f""" Peer Companies: {', '.join(peers[:5])} Recent News and Sentiment Analysis: """ for item in data["sentiments"]: report += f""" Title: {item['title']} URL: {item['url']} Sentiment Analysis: {item['sentiment']} (Confidence: {item['confidence']:.2f}) """ logging.debug("Report compilation successful.") except Exception as e: logging.error(f"Error while compiling report: {e}") raise return report class Tools: class Valves(BaseModel): FINNHUB_API_KEY: str = Field(default="") def __init__(self): self.valves = self.Valves() self.log = logging.getLogger(__name__) self.log.setLevel(logging.DEBUG) self.log.debug("Tools instance initialized.") async def compile_stock_report( self, ticker: str, user: dict = {}, __event_emitter__: Callable[[Any], Awaitable[None]] = None, ) -> str: self.log.debug(f"Starting analysis for {ticker}.") await __event_emitter__( { "type": "status", "data": {"description": "Initializing client", "done": False}, } ) self.client = finnhub.Client(api_key=self.valves.FINNHUB_API_KEY) await __event_emitter__( { "type": "status", "data": {"description": "Retrieving stock data", "done": False}, } ) data = await _async_gather_stock_data(self.client, ticker) await __event_emitter__( { "type": "status", "data": {"description": "Compiling stock report", "done": False}, } ) report = _compile_report(data) last_price = data["current_price"]["current_price"] self.log.debug(f"Finished creating report - latest price: {last_price}") await __event_emitter__( { "type": "status", "data": { "description": "Finished creating report - latest price: " + str(last_price), "done": True, }, } ) return report