dlaima commited on
Commit
c692360
Β·
verified Β·
1 Parent(s): 0613044

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -22
app.py CHANGED
@@ -1,14 +1,11 @@
1
  import gradio as gr
2
  import wikipedia
3
- #from langchain_community.chat_models import ChatOpenAI
4
  from langchain.memory import ConversationBufferMemory
5
  from langchain.agents import initialize_agent, AgentType
6
  from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
7
  from langchain.tools import Tool
8
 
9
- from langchain_openai import ChatOpenAI
10
-
11
-
12
  # Optional: Tavily client
13
  try:
14
  from tavily import TavilyClient
@@ -35,10 +32,11 @@ def search_wikipedia(query: str) -> str:
35
  except wikipedia.exceptions.PageError:
36
  return "No relevant Wikipedia page found."
37
 
 
38
  tools = [
39
  Tool(name="Temperature", func=get_current_temperature, description="Get current temperature"),
40
  Tool(name="Search Wikipedia", func=search_wikipedia, description="Search Wikipedia"),
41
- Tool(name="Create Your Own", func=create_your_own, description="Custom tool for processing input")
42
  ]
43
 
44
 
@@ -58,9 +56,11 @@ class cbfs:
58
  if tavily_available and tavily_key:
59
  self.tavily = TavilyClient(api_key=tavily_key)
60
 
61
- # Memory
62
  self.memory = ConversationBufferMemory(
63
- return_messages=True, memory_key="chat_history", ai_prefix="Assistant"
 
 
64
  )
65
 
66
  # Prompt
@@ -68,17 +68,17 @@ class cbfs:
68
  ("system", "You are a helpful but sassy assistant. Remember what the user tells you in the conversation."),
69
  MessagesPlaceholder(variable_name="chat_history"),
70
  ("user", "{input}"),
71
- MessagesPlaceholder(variable_name="agent_scratchpad")
72
  ])
73
 
74
- # Agent
75
  self.chain = initialize_agent(
76
  tools=tools,
77
  llm=self.model,
78
  agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
79
  verbose=True,
80
  memory=self.memory,
81
- handle_parsing_errors=True
82
  )
83
 
84
  def convchain(self, query: str) -> str:
@@ -87,30 +87,27 @@ class cbfs:
87
 
88
  try:
89
  result = self.chain.invoke({"input": query})
90
- print("πŸ” Raw agent result:", result) # log to HuggingFace
91
 
92
- # Extract possible outputs
93
  if isinstance(result, dict):
94
  response = (
95
- result.get("output") or
96
- result.get("output_text") or
97
- str(result)
98
  )
99
  else:
100
  response = str(result)
101
 
102
- # Fallback: if still empty, just ask the model directly
103
  if not response.strip():
104
  print("⚠️ No structured output, falling back to direct LLM call")
105
  resp = self.model.invoke(query)
106
  response = getattr(resp, "content", str(resp))
107
 
108
- # Save to memory
109
  self.memory.save_context({"input": query}, {"output": response})
110
  return response
111
 
112
  except Exception as e:
113
- print("❌ Execution Error:", str(e))
114
  return f"❌ Error: {str(e)}"
115
 
116
 
@@ -119,8 +116,16 @@ class cbfs:
119
  # ----------------------
120
  with gr.Blocks() as demo:
121
  with gr.Row():
122
- openai_key = gr.Textbox(label="πŸ”‘ OpenAI API Key", type="password", placeholder="Paste your key")
123
- tavily_key = gr.Textbox(label="πŸ”‘ Tavily API Key (optional)", type="password", placeholder="Paste your Tavily key")
 
 
 
 
 
 
 
 
124
 
125
  chatbot_state = gr.State(None)
126
 
@@ -138,7 +143,11 @@ with gr.Blocks() as demo:
138
  return None, f"❌ Error: {str(e)}"
139
 
140
  init_btn = gr.Button("Initialize Chatbot")
141
- init_btn.click(fn=init_chatbot, inputs=[openai_key, tavily_key], outputs=[chatbot_state, status])
 
 
 
 
142
 
143
  def process_query(query, chatbot):
144
  if chatbot is None:
@@ -148,7 +157,7 @@ with gr.Blocks() as demo:
148
  inp.submit(process_query, inputs=[inp, chatbot_state], outputs=output)
149
 
150
 
151
- #demo.launch(share=True)
152
  demo.launch()
153
 
154
 
 
1
  import gradio as gr
2
  import wikipedia
3
+ from langchain_openai import ChatOpenAI # βœ… correct modern import
4
  from langchain.memory import ConversationBufferMemory
5
  from langchain.agents import initialize_agent, AgentType
6
  from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
7
  from langchain.tools import Tool
8
 
 
 
 
9
  # Optional: Tavily client
10
  try:
11
  from tavily import TavilyClient
 
32
  except wikipedia.exceptions.PageError:
33
  return "No relevant Wikipedia page found."
34
 
35
+
36
  tools = [
37
  Tool(name="Temperature", func=get_current_temperature, description="Get current temperature"),
38
  Tool(name="Search Wikipedia", func=search_wikipedia, description="Search Wikipedia"),
39
+ Tool(name="Create Your Own", func=create_your_own, description="Custom tool for processing input"),
40
  ]
41
 
42
 
 
56
  if tavily_available and tavily_key:
57
  self.tavily = TavilyClient(api_key=tavily_key)
58
 
59
+ # Conversation memory
60
  self.memory = ConversationBufferMemory(
61
+ return_messages=True,
62
+ memory_key="chat_history",
63
+ ai_prefix="Assistant"
64
  )
65
 
66
  # Prompt
 
68
  ("system", "You are a helpful but sassy assistant. Remember what the user tells you in the conversation."),
69
  MessagesPlaceholder(variable_name="chat_history"),
70
  ("user", "{input}"),
71
+ MessagesPlaceholder(variable_name="agent_scratchpad"),
72
  ])
73
 
74
+ # Initialize agent
75
  self.chain = initialize_agent(
76
  tools=tools,
77
  llm=self.model,
78
  agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
79
  verbose=True,
80
  memory=self.memory,
81
+ handle_parsing_errors=True,
82
  )
83
 
84
  def convchain(self, query: str) -> str:
 
87
 
88
  try:
89
  result = self.chain.invoke({"input": query})
90
+ print("πŸ” Raw agent result:", result)
91
 
 
92
  if isinstance(result, dict):
93
  response = (
94
+ result.get("output")
95
+ or result.get("output_text")
96
+ or str(result)
97
  )
98
  else:
99
  response = str(result)
100
 
101
+ # fallback direct LLM call
102
  if not response.strip():
103
  print("⚠️ No structured output, falling back to direct LLM call")
104
  resp = self.model.invoke(query)
105
  response = getattr(resp, "content", str(resp))
106
 
 
107
  self.memory.save_context({"input": query}, {"output": response})
108
  return response
109
 
110
  except Exception as e:
 
111
  return f"❌ Error: {str(e)}"
112
 
113
 
 
116
  # ----------------------
117
  with gr.Blocks() as demo:
118
  with gr.Row():
119
+ openai_key = gr.Textbox(
120
+ label="πŸ”‘ OpenAI API Key",
121
+ type="password",
122
+ placeholder="Paste your key"
123
+ )
124
+ tavily_key = gr.Textbox(
125
+ label="πŸ”‘ Tavily API Key (optional)",
126
+ type="password",
127
+ placeholder="Paste your Tavily key"
128
+ )
129
 
130
  chatbot_state = gr.State(None)
131
 
 
143
  return None, f"❌ Error: {str(e)}"
144
 
145
  init_btn = gr.Button("Initialize Chatbot")
146
+ init_btn.click(
147
+ fn=init_chatbot,
148
+ inputs=[openai_key, tavily_key],
149
+ outputs=[chatbot_state, status]
150
+ )
151
 
152
  def process_query(query, chatbot):
153
  if chatbot is None:
 
157
  inp.submit(process_query, inputs=[inp, chatbot_state], outputs=output)
158
 
159
 
160
+ # πŸš€ Launch (no share=True on Spaces)
161
  demo.launch()
162
 
163