|
|
import gradio as gr |
|
|
import wikipedia |
|
|
from langchain_openai import ChatOpenAI |
|
|
from langchain.memory import ConversationBufferMemory |
|
|
from langchain.agents import initialize_agent, AgentType |
|
|
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder |
|
|
from langchain.tools import Tool |
|
|
|
|
|
|
|
|
try: |
|
|
from tavily import TavilyClient |
|
|
tavily_available = True |
|
|
except ImportError: |
|
|
TavilyClient = None |
|
|
tavily_available = False |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def create_your_own(query: str) -> str: |
|
|
return query[::-1] |
|
|
|
|
|
def get_current_temperature(query: str) -> str: |
|
|
return "It's sunny and 75ยฐF." |
|
|
|
|
|
def search_wikipedia(query: str) -> str: |
|
|
try: |
|
|
return wikipedia.summary(query, sentences=2) |
|
|
except wikipedia.exceptions.DisambiguationError as e: |
|
|
return f"Multiple results found: {', '.join(e.options[:5])}" |
|
|
except wikipedia.exceptions.PageError: |
|
|
return "No relevant Wikipedia page found." |
|
|
|
|
|
|
|
|
tools = [ |
|
|
Tool(name="Temperature", func=get_current_temperature, description="Get current temperature"), |
|
|
Tool(name="Search Wikipedia", func=search_wikipedia, description="Search Wikipedia"), |
|
|
Tool(name="Create Your Own", func=create_your_own, description="Custom tool for processing input"), |
|
|
] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class cbfs: |
|
|
def __init__(self, tools, openai_key: str, tavily_key: str = None): |
|
|
if not openai_key: |
|
|
raise ValueError("โ ๏ธ OpenAI API key is required.") |
|
|
|
|
|
|
|
|
self.model = ChatOpenAI(temperature=0, openai_api_key=openai_key) |
|
|
|
|
|
|
|
|
self.tavily = None |
|
|
if tavily_available and tavily_key: |
|
|
self.tavily = TavilyClient(api_key=tavily_key) |
|
|
|
|
|
|
|
|
self.memory = ConversationBufferMemory( |
|
|
return_messages=True, |
|
|
memory_key="chat_history", |
|
|
ai_prefix="Assistant" |
|
|
) |
|
|
|
|
|
|
|
|
self.prompt = ChatPromptTemplate.from_messages([ |
|
|
("system", "You are a helpful but sassy assistant. Remember what the user tells you in the conversation."), |
|
|
MessagesPlaceholder(variable_name="chat_history"), |
|
|
("user", "{input}"), |
|
|
MessagesPlaceholder(variable_name="agent_scratchpad"), |
|
|
]) |
|
|
|
|
|
|
|
|
self.chain = initialize_agent( |
|
|
tools=tools, |
|
|
llm=self.model, |
|
|
agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, |
|
|
verbose=True, |
|
|
memory=self.memory, |
|
|
handle_parsing_errors=True, |
|
|
) |
|
|
|
|
|
def convchain(self, query: str) -> str: |
|
|
if not query: |
|
|
return "Please enter a query." |
|
|
|
|
|
try: |
|
|
result = self.chain.invoke({"input": query}) |
|
|
print("๐ Raw agent result:", result) |
|
|
|
|
|
if isinstance(result, dict): |
|
|
response = ( |
|
|
result.get("output") |
|
|
or result.get("output_text") |
|
|
or str(result) |
|
|
) |
|
|
else: |
|
|
response = str(result) |
|
|
|
|
|
|
|
|
if not response.strip(): |
|
|
print("โ ๏ธ No structured output, falling back to direct LLM call") |
|
|
resp = self.model.invoke(query) |
|
|
response = getattr(resp, "content", str(resp)) |
|
|
|
|
|
self.memory.save_context({"input": query}, {"output": response}) |
|
|
return response |
|
|
|
|
|
except Exception as e: |
|
|
return f"โ Error: {str(e)}" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
with gr.Row(): |
|
|
openai_key = gr.Textbox( |
|
|
label="๐ OpenAI API Key", |
|
|
type="password", |
|
|
placeholder="Paste your key" |
|
|
) |
|
|
tavily_key = gr.Textbox( |
|
|
label="๐ Tavily API Key (optional)", |
|
|
type="password", |
|
|
placeholder="Paste your Tavily key" |
|
|
) |
|
|
|
|
|
chatbot_state = gr.State(None) |
|
|
|
|
|
with gr.Row(): |
|
|
inp = gr.Textbox(placeholder="Enter text hereโฆ", label="User Input") |
|
|
output = gr.Textbox(placeholder="Response...", label="ChatBot Output", interactive=False) |
|
|
|
|
|
status = gr.Textbox(label="Status", interactive=False) |
|
|
|
|
|
def init_chatbot(openai_key, tavily_key): |
|
|
try: |
|
|
bot = cbfs(tools, openai_key, tavily_key) |
|
|
return bot, "โ
Chatbot initialized successfully!" |
|
|
except Exception as e: |
|
|
return None, f"โ Error: {str(e)}" |
|
|
|
|
|
init_btn = gr.Button("Initialize Chatbot") |
|
|
init_btn.click( |
|
|
fn=init_chatbot, |
|
|
inputs=[openai_key, tavily_key], |
|
|
outputs=[chatbot_state, status] |
|
|
) |
|
|
|
|
|
def process_query(query, chatbot): |
|
|
if chatbot is None: |
|
|
return "โ ๏ธ Please initialize the chatbot first by entering your API keys." |
|
|
return chatbot.convchain(query) |
|
|
|
|
|
inp.submit(process_query, inputs=[inp, chatbot_state], outputs=output) |
|
|
|
|
|
|
|
|
|
|
|
demo.launch() |
|
|
|
|
|
|
|
|
|
|
|
|