dlaima commited on
Commit
6796cdd
Β·
verified Β·
1 Parent(s): 6cec9ce

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -12
app.py CHANGED
@@ -6,7 +6,7 @@ from langchain.agents import initialize_agent, AgentType
6
  from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
7
  from langchain.tools import Tool
8
 
9
- # Optional: Tavily client (only if installed)
10
  try:
11
  from tavily import TavilyClient
12
  tavily_available = True
@@ -26,8 +26,7 @@ def get_current_temperature(query: str) -> str:
26
 
27
  def search_wikipedia(query: str) -> str:
28
  try:
29
- summary = wikipedia.summary(query, sentences=2)
30
- return summary
31
  except wikipedia.exceptions.DisambiguationError as e:
32
  return f"Multiple results found: {', '.join(e.options[:5])}"
33
  except wikipedia.exceptions.PageError:
@@ -51,7 +50,7 @@ class cbfs:
51
  # Initialize OpenAI model
52
  self.model = ChatOpenAI(temperature=0, openai_api_key=openai_key)
53
 
54
- # Initialize Tavily if available
55
  self.tavily = None
56
  if tavily_available and tavily_key:
57
  self.tavily = TavilyClient(api_key=tavily_key)
@@ -69,7 +68,7 @@ class cbfs:
69
  MessagesPlaceholder(variable_name="agent_scratchpad")
70
  ])
71
 
72
- # Initialize agent properly with enum
73
  self.chain = initialize_agent(
74
  tools=tools,
75
  llm=self.model,
@@ -82,25 +81,33 @@ class cbfs:
82
  def convchain(self, query: str) -> str:
83
  if not query:
84
  return "Please enter a query."
 
85
  try:
86
  result = self.chain.invoke({"input": query})
 
87
 
88
- # Debugging: show raw result in logs
89
- print("πŸ” Raw agent result:", result)
90
-
91
- # Try different output keys
92
  if isinstance(result, dict):
93
  response = (
94
- result.get("output")
95
- or result.get("output_text")
96
- or str(result)
97
  )
98
  else:
99
  response = str(result)
100
 
 
 
 
 
 
 
 
101
  self.memory.save_context({"input": query}, {"output": response})
102
  return response
 
103
  except Exception as e:
 
104
  return f"❌ Error: {str(e)}"
105
 
106
 
 
6
  from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
7
  from langchain.tools import Tool
8
 
9
+ # Optional: Tavily client
10
  try:
11
  from tavily import TavilyClient
12
  tavily_available = True
 
26
 
27
  def search_wikipedia(query: str) -> str:
28
  try:
29
+ return wikipedia.summary(query, sentences=2)
 
30
  except wikipedia.exceptions.DisambiguationError as e:
31
  return f"Multiple results found: {', '.join(e.options[:5])}"
32
  except wikipedia.exceptions.PageError:
 
50
  # Initialize OpenAI model
51
  self.model = ChatOpenAI(temperature=0, openai_api_key=openai_key)
52
 
53
+ # Tavily init (optional)
54
  self.tavily = None
55
  if tavily_available and tavily_key:
56
  self.tavily = TavilyClient(api_key=tavily_key)
 
68
  MessagesPlaceholder(variable_name="agent_scratchpad")
69
  ])
70
 
71
+ # Agent
72
  self.chain = initialize_agent(
73
  tools=tools,
74
  llm=self.model,
 
81
  def convchain(self, query: str) -> str:
82
  if not query:
83
  return "Please enter a query."
84
+
85
  try:
86
  result = self.chain.invoke({"input": query})
87
+ print("πŸ” Raw agent result:", result) # log to HuggingFace
88
 
89
+ # Extract possible outputs
 
 
 
90
  if isinstance(result, dict):
91
  response = (
92
+ result.get("output") or
93
+ result.get("output_text") or
94
+ str(result)
95
  )
96
  else:
97
  response = str(result)
98
 
99
+ # Fallback: if still empty, just ask the model directly
100
+ if not response.strip():
101
+ print("⚠️ No structured output, falling back to direct LLM call")
102
+ resp = self.model.invoke(query)
103
+ response = getattr(resp, "content", str(resp))
104
+
105
+ # Save to memory
106
  self.memory.save_context({"input": query}, {"output": response})
107
  return response
108
+
109
  except Exception as e:
110
+ print("❌ Execution Error:", str(e))
111
  return f"❌ Error: {str(e)}"
112
 
113