ConvoAI / app.py
dlaima's picture
Update app.py
c692360 verified
import gradio as gr
import wikipedia
from langchain_openai import ChatOpenAI # โœ… correct modern import
from langchain.memory import ConversationBufferMemory
from langchain.agents import initialize_agent, AgentType
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.tools import Tool
# Optional: Tavily client
try:
from tavily import TavilyClient
tavily_available = True
except ImportError:
TavilyClient = None
tavily_available = False
# ----------------------
# Tools
# ----------------------
def create_your_own(query: str) -> str:
return query[::-1]
def get_current_temperature(query: str) -> str:
return "It's sunny and 75ยฐF."
def search_wikipedia(query: str) -> str:
try:
return wikipedia.summary(query, sentences=2)
except wikipedia.exceptions.DisambiguationError as e:
return f"Multiple results found: {', '.join(e.options[:5])}"
except wikipedia.exceptions.PageError:
return "No relevant Wikipedia page found."
tools = [
Tool(name="Temperature", func=get_current_temperature, description="Get current temperature"),
Tool(name="Search Wikipedia", func=search_wikipedia, description="Search Wikipedia"),
Tool(name="Create Your Own", func=create_your_own, description="Custom tool for processing input"),
]
# ----------------------
# Chatbot class
# ----------------------
class cbfs:
def __init__(self, tools, openai_key: str, tavily_key: str = None):
if not openai_key:
raise ValueError("โš ๏ธ OpenAI API key is required.")
# Initialize OpenAI model
self.model = ChatOpenAI(temperature=0, openai_api_key=openai_key)
# Tavily init (optional)
self.tavily = None
if tavily_available and tavily_key:
self.tavily = TavilyClient(api_key=tavily_key)
# Conversation memory
self.memory = ConversationBufferMemory(
return_messages=True,
memory_key="chat_history",
ai_prefix="Assistant"
)
# Prompt
self.prompt = ChatPromptTemplate.from_messages([
("system", "You are a helpful but sassy assistant. Remember what the user tells you in the conversation."),
MessagesPlaceholder(variable_name="chat_history"),
("user", "{input}"),
MessagesPlaceholder(variable_name="agent_scratchpad"),
])
# Initialize agent
self.chain = initialize_agent(
tools=tools,
llm=self.model,
agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
verbose=True,
memory=self.memory,
handle_parsing_errors=True,
)
def convchain(self, query: str) -> str:
if not query:
return "Please enter a query."
try:
result = self.chain.invoke({"input": query})
print("๐Ÿ” Raw agent result:", result)
if isinstance(result, dict):
response = (
result.get("output")
or result.get("output_text")
or str(result)
)
else:
response = str(result)
# fallback direct LLM call
if not response.strip():
print("โš ๏ธ No structured output, falling back to direct LLM call")
resp = self.model.invoke(query)
response = getattr(resp, "content", str(resp))
self.memory.save_context({"input": query}, {"output": response})
return response
except Exception as e:
return f"โŒ Error: {str(e)}"
# ----------------------
# Gradio UI
# ----------------------
with gr.Blocks() as demo:
with gr.Row():
openai_key = gr.Textbox(
label="๐Ÿ”‘ OpenAI API Key",
type="password",
placeholder="Paste your key"
)
tavily_key = gr.Textbox(
label="๐Ÿ”‘ Tavily API Key (optional)",
type="password",
placeholder="Paste your Tavily key"
)
chatbot_state = gr.State(None)
with gr.Row():
inp = gr.Textbox(placeholder="Enter text hereโ€ฆ", label="User Input")
output = gr.Textbox(placeholder="Response...", label="ChatBot Output", interactive=False)
status = gr.Textbox(label="Status", interactive=False)
def init_chatbot(openai_key, tavily_key):
try:
bot = cbfs(tools, openai_key, tavily_key)
return bot, "โœ… Chatbot initialized successfully!"
except Exception as e:
return None, f"โŒ Error: {str(e)}"
init_btn = gr.Button("Initialize Chatbot")
init_btn.click(
fn=init_chatbot,
inputs=[openai_key, tavily_key],
outputs=[chatbot_state, status]
)
def process_query(query, chatbot):
if chatbot is None:
return "โš ๏ธ Please initialize the chatbot first by entering your API keys."
return chatbot.convchain(query)
inp.submit(process_query, inputs=[inp, chatbot_state], outputs=output)
# ๐Ÿš€ Launch (no share=True on Spaces)
demo.launch()