Function
filter
v0.1
Token Clip Filter
A filter that truncates chat history to retain the 7000 tokens while preserving the system prompt for optimal context management.
Function ID
token_clip_filter
Creator
@houxin
Downloads
56+

Function Content
python
"""
title: Token Clip Filter
author: houxin
author_url: https://github.com/hx173149
funding_url: https://github.com/hx173149
version: 0.1
"""

from pydantic import BaseModel, Field
from typing import Optional

import tiktoken


class Filter:
    class Valves(BaseModel):
        priority: int = Field(
            default=0, description="Priority level for the filter operations."
        )
        n_token_limit: int = Field(
            default=7000, description="Number of token limit to retain."
        )
        pass

    class UserValves(BaseModel):
        pass

    def __init__(self):
        self.valves = self.Valves()
        self.encoding = tiktoken.get_encoding("cl100k_base")
        pass

    def inlet(self, body: dict, __user__: Optional[dict] = None) -> dict:
        messages = body["messages"]
        sys_msgs = [message for message in messages if message.get("role") == "system"]
        if len(sys_msgs) > 0:
            sys_msg = sys_msgs[0]
        else:
            sys_msg = {"role": "system", "content": ""}
        token_len = len(self.encoding.encode(sys_msg["content"]))
        filter_messages = []
        remain_messages = [
            message for message in messages if message.get("role") != "system"
        ]
        for msg in remain_messages[::-1]:
            if (
                len(self.encoding.encode(msg["content"])) + token_len
                > self.valves.n_token_limit
            ):
                break
            filter_messages.append(msg)
            token_len += len(self.encoding.encode(msg["content"]))
        body["messages"] = [sys_msg] + filter_messages[::-1]
        return body