File size: 5,155 Bytes
f158561
a025ba2
c692360
f158561
bf3bb95
f158561
 
bf3bb95
6796cdd
bf3bb95
 
 
 
 
 
f158561
c9a9697
 
bf3bb95
c9a9697
f158561
 
 
 
 
 
 
a025ba2
6796cdd
a025ba2
 
 
 
f158561
c692360
4c4f363
 
 
c692360
4c4f363
f158561
c9a9697
 
bf3bb95
c9a9697
f158561
151a77b
 
bf3bb95
c9a9697
151a77b
c9a9697
 
6796cdd
bf3bb95
 
 
c9a9697
c692360
151a77b
c692360
 
 
151a77b
c9a9697
bf3bb95
 
 
 
 
c692360
bf3bb95
 
c692360
3b64660
9d05c23
 
bf3bb95
9d05c23
151a77b
c692360
3b64660
bf3bb95
c9a9697
c528499
 
6796cdd
9d05c23
a025ba2
c692360
6cec9ce
 
 
c692360
 
 
6cec9ce
 
 
 
c692360
6796cdd
 
 
 
 
6cec9ce
 
6796cdd
9d05c23
c9a9697
f158561
 
c9a9697
 
 
f158561
c9a9697
c692360
 
 
 
 
 
 
 
 
 
c9a9697
bf3bb95
c9a9697
f158561
 
 
3b64660
bf3bb95
 
c9a9697
 
bf3bb95
 
c9a9697
 
 
 
c692360
 
 
 
 
c9a9697
 
 
bf3bb95
c9a9697
 
 
 
 
c692360
a240581
3b64660
9d05c23
a025ba2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
import gradio as gr
import wikipedia
from langchain_openai import ChatOpenAI   # โœ… correct modern import
from langchain.memory import ConversationBufferMemory
from langchain.agents import initialize_agent, AgentType
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.tools import Tool

# Optional: Tavily client
try:
    from tavily import TavilyClient
    tavily_available = True
except ImportError:
    TavilyClient = None
    tavily_available = False


# ----------------------
# Tools
# ----------------------
def create_your_own(query: str) -> str:
    return query[::-1]

def get_current_temperature(query: str) -> str:
    return "It's sunny and 75ยฐF."

def search_wikipedia(query: str) -> str:
    try:
        return wikipedia.summary(query, sentences=2)
    except wikipedia.exceptions.DisambiguationError as e:
        return f"Multiple results found: {', '.join(e.options[:5])}"
    except wikipedia.exceptions.PageError:
        return "No relevant Wikipedia page found."


tools = [
    Tool(name="Temperature", func=get_current_temperature, description="Get current temperature"),
    Tool(name="Search Wikipedia", func=search_wikipedia, description="Search Wikipedia"),
    Tool(name="Create Your Own", func=create_your_own, description="Custom tool for processing input"),
]


# ----------------------
# Chatbot class
# ----------------------
class cbfs:
    def __init__(self, tools, openai_key: str, tavily_key: str = None):
        if not openai_key:
            raise ValueError("โš ๏ธ OpenAI API key is required.")

        # Initialize OpenAI model
        self.model = ChatOpenAI(temperature=0, openai_api_key=openai_key)

        # Tavily init (optional)
        self.tavily = None
        if tavily_available and tavily_key:
            self.tavily = TavilyClient(api_key=tavily_key)

        # Conversation memory
        self.memory = ConversationBufferMemory(
            return_messages=True,
            memory_key="chat_history",
            ai_prefix="Assistant"
        )

        # Prompt
        self.prompt = ChatPromptTemplate.from_messages([
            ("system", "You are a helpful but sassy assistant. Remember what the user tells you in the conversation."),
            MessagesPlaceholder(variable_name="chat_history"),
            ("user", "{input}"),
            MessagesPlaceholder(variable_name="agent_scratchpad"),
        ])

        # Initialize agent
        self.chain = initialize_agent(
            tools=tools,
            llm=self.model,
            agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
            verbose=True,
            memory=self.memory,
            handle_parsing_errors=True,
        )

    def convchain(self, query: str) -> str:
        if not query:
            return "Please enter a query."

        try:
            result = self.chain.invoke({"input": query})
            print("๐Ÿ” Raw agent result:", result)

            if isinstance(result, dict):
                response = (
                    result.get("output")
                    or result.get("output_text")
                    or str(result)
                )
            else:
                response = str(result)

            # fallback direct LLM call
            if not response.strip():
                print("โš ๏ธ No structured output, falling back to direct LLM call")
                resp = self.model.invoke(query)
                response = getattr(resp, "content", str(resp))

            self.memory.save_context({"input": query}, {"output": response})
            return response

        except Exception as e:
            return f"โŒ Error: {str(e)}"


# ----------------------
# Gradio UI
# ----------------------
with gr.Blocks() as demo:
    with gr.Row():
        openai_key = gr.Textbox(
            label="๐Ÿ”‘ OpenAI API Key",
            type="password",
            placeholder="Paste your key"
        )
        tavily_key = gr.Textbox(
            label="๐Ÿ”‘ Tavily API Key (optional)",
            type="password",
            placeholder="Paste your Tavily key"
        )

    chatbot_state = gr.State(None)

    with gr.Row():
        inp = gr.Textbox(placeholder="Enter text hereโ€ฆ", label="User Input")
        output = gr.Textbox(placeholder="Response...", label="ChatBot Output", interactive=False)

    status = gr.Textbox(label="Status", interactive=False)

    def init_chatbot(openai_key, tavily_key):
        try:
            bot = cbfs(tools, openai_key, tavily_key)
            return bot, "โœ… Chatbot initialized successfully!"
        except Exception as e:
            return None, f"โŒ Error: {str(e)}"

    init_btn = gr.Button("Initialize Chatbot")
    init_btn.click(
        fn=init_chatbot,
        inputs=[openai_key, tavily_key],
        outputs=[chatbot_state, status]
    )

    def process_query(query, chatbot):
        if chatbot is None:
            return "โš ๏ธ Please initialize the chatbot first by entering your API keys."
        return chatbot.convchain(query)

    inp.submit(process_query, inputs=[inp, chatbot_state], outputs=output)


# ๐Ÿš€ Launch (no share=True on Spaces)
demo.launch()