From ec64be8d1a8ffa7251b1318c9de1a1db95459b5e Mon Sep 17 00:00:00 2001 From: ZhuangYumin Date: Sun, 6 Jul 2025 20:24:32 +0800 Subject: [PATCH] feat: first version of prompt caching --- anthropic_prompt_caching.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 anthropic_prompt_caching.py diff --git a/anthropic_prompt_caching.py b/anthropic_prompt_caching.py new file mode 100644 index 0000000..6dd1b63 --- /dev/null +++ b/anthropic_prompt_caching.py @@ -0,0 +1,33 @@ +from pydantic import BaseModel, Field +from typing import Optional + +class Filter: + class Valves(BaseModel): + pass + + def __init__(self): + self.valves = self.Valves() + self.toggle = True # IMPORTANT: This creates a switch UI in Open WebUI + pass + + async def inlet( + self, body: dict, __event_emitter__, __user__: Optional[dict] = None + ) -> dict: + await __event_emitter__( + { + "type": "status", + "data": { + "description": "Prompt Caching Enabled!", + "done": True, + "hidden": False, + }, + } + ) + print("Prompt Caching Enabled!") + last_message_item = body["messages"][-1] + # 如果 content 是字符串 + if isinstance(last_message_item["content"], str): + last_message_item['content'] = [{"type": "text", "text": last_message_item["content"], "cache_control": {"type": "ephemeral"}}] + else: + print("Prompt Caching Failed: unsupported content type") + return body \ No newline at end of file