forked from P-r-e-m-i-u-m/mee-you-want-to-see
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp.py
More file actions
322 lines (264 loc) Β· 12.7 KB
/
app.py
File metadata and controls
322 lines (264 loc) Β· 12.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
import os
import asyncio
import base64
import streamlit as st
from researcher import Researcher
async def run_research_agent(researcher, agent, user_input):
"""Run the research agent asynchronously"""
try:
response = await researcher.run_agent_with_memory(agent, user_input)
return response
except Exception as e:
raise e
async def run_memory_agent(researcher, agent, user_input):
"""Run the memory agent asynchronously"""
try:
response = await researcher.run_agent_with_memory(agent, user_input)
return response
except Exception as e:
raise e
def main():
st.set_page_config(
page_title="Research Agent with Memory", page_icon="π¬", layout="wide"
)
with open("./assets/gibson.svg", "r", encoding="utf-8") as gibson_file:
gibson_svg = (
gibson_file.read()
.replace("\n", "")
.replace("\r", "")
.replace(" ", "")
.replace('"', "'")
)
with open("./assets/tavily.png", "rb") as tavily_file:
tavily_base64 = base64.b64encode(tavily_file.read()).decode()
gibson_svg_inline = f'<span style="height:80px; width:200px; display:inline-block; vertical-align:middle; margin-left:8px;margin-top:20px;margin-right:8px;">{gibson_svg}</span>'
title_html = f"""
<div style="display: flex; width: 100%; ">
<h1 style="margin: 0; padding: 0; font-size: 2.5rem; font-weight: bold;">
<span style="font-size:2.5rem;">π΅π»ββοΈ</span> arXiv Researcher Agent
{gibson_svg_inline}
<span style="">Memori</span> &
<img src="data:image/png;base64,{tavily_base64}" style="height: 60px; vertical-align: middle; bottom: 5px;"/>
</h1>
</div>
"""
st.markdown(title_html, unsafe_allow_html=True)
# st.markdown("π¬Arxiv Research Papers Agent with Persistent Memory ")
# Sidebar with navigation and info
with st.sidebar:
st.image("./assets/nebius.png", width=150)
nebius_key = st.text_input("Enter your Nebius API key", value=os.getenv("NEBIUS_API_KEY", ""), type="password")
tavily_api_key = st.text_input("Enter your Tavily API key", value=os.getenv("TAVILY_API_KEY", ""), type="password")
st.divider()
tab_choice = st.radio(
"Choose Mode:", ["π¬ Research Chat", "π§ Memory Chat"], key="tab_choice"
)
st.divider()
if tab_choice == "π¬ Research Chat":
st.markdown("### π¬ Example Research Topics:")
if st.button("π§ Brain-Computer Interfaces"):
st.session_state.research_messages.append(
{
"role": "user",
"content": "Research the latest developments in brain-computer interfaces",
}
)
# st.rerun()
if st.button("π Solid-State Batteries"):
st.session_state.research_messages.append(
{
"role": "user",
"content": "Analyze the current state of solid-state batteries",
}
)
# st.rerun()
if st.button("𧬠CRISPR Gene Editing"):
st.session_state.research_messages.append(
{
"role": "user",
"content": "Research recent breakthroughs in CRISPR gene editing",
}
)
# st.rerun()
if st.button("π Autonomous Vehicles"):
st.session_state.research_messages.append(
{
"role": "user",
"content": "Investigate the development of autonomous vehicles",
}
)
# st.rerun()
elif tab_choice == "π§ Memory Chat":
st.markdown("### π§ Example Memory Queries:")
if st.button("π Summarize my research history"):
st.session_state.memory_messages.append(
{
"role": "user",
"content": "Can you summarize my research history and main findings?",
}
)
if st.button("𧬠Find my biotech research"):
st.session_state.memory_messages.append(
{
"role": "user",
"content": "Find all my research related to biotechnology and gene editing",
}
)
if st.button("π What were my last research topics?"):
st.session_state.memory_messages.append(
{
"role": "user",
"content": "What were my last research topics?",
}
)
if st.button("π Show my research on AI"):
st.session_state.memory_messages.append(
{
"role": "user",
"content": "Show me all my previous research related to artificial intelligence",
}
)
st.header("Research History")
if st.button("π View All Research"):
st.session_state.show_all_research = True
if st.button("ποΈ Clear All Memory", type="secondary"):
if st.session_state.get("confirm_clear_research"):
st.success("Research memory cleared!")
st.session_state.confirm_clear_research = False
st.rerun()
else:
st.session_state.confirm_clear_research = True
st.warning("Click again to confirm")
# Initialize researcher
if "researcher" not in st.session_state:
with st.spinner("Initializing Researcher with Memory..."):
st.session_state.researcher = Researcher()
st.session_state.researcher.define_agents()
# Get agents from researcher
if "research_agent" not in st.session_state:
st.session_state.research_agent = st.session_state.researcher.get_research_agent()
if "memory_agent" not in st.session_state:
st.session_state.memory_agent = st.session_state.researcher.get_memory_agent()
# Initialize chat histories
if "research_messages" not in st.session_state:
st.session_state.research_messages = []
if "memory_messages" not in st.session_state:
st.session_state.memory_messages = []
if not st.session_state.research_messages:
st.markdown("## About This Demo")
st.markdown(
"""
This demo showcases:
- **Research Agent**: Uses Tavily for arXiv research paper search
- **Memori Integration**: Remembers all research sessions
- **Memory Chat**: Query your research history
The research agent can:
- π Conduct comprehensive research using arXiv papers
- π§ Remember all previous research
- π Build upon past research
- πΎ Store findings for future reference
"""
)
# Research Chat Tab
if tab_choice == "π¬ Research Chat":
# Display research chat messages
for message in st.session_state.research_messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Research chat input
if research_prompt := st.chat_input("What would you like me to research?"):
# Add user message to chat history
st.session_state.research_messages.append(
{"role": "user", "content": research_prompt}
)
with st.chat_message("user"):
st.markdown(research_prompt)
# Generate research response
with st.chat_message("assistant"):
with st.spinner("π Conducting research and searching memory..."):
try:
# Get response from research agent with automatic memory recording
response = asyncio.run(run_research_agent(
st.session_state.researcher,
st.session_state.research_agent,
research_prompt
))
# Extract response content
if hasattr(response, 'final_output'):
response_content = response.final_output
elif hasattr(response, 'content'):
response_content = response.content
else:
response_content = str(response)
# Display the response
st.markdown(response_content)
# Show confirmation that individual conversations were recorded
st.success("β
All agent conversations recorded to memory!", icon="π§ ")
# Add assistant response to chat history
st.session_state.research_messages.append(
{"role": "assistant", "content": response_content}
)
except Exception as e:
error_message = f"Sorry, I encountered an error: {str(e)}"
st.error(error_message)
st.session_state.research_messages.append(
{"role": "assistant", "content": error_message}
)
# Research example prompts
# Memory Chat Tab
elif tab_choice == "π§ Memory Chat":
for message in st.session_state.memory_messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Memory chat input
if memory_prompt := st.chat_input(
"What would you like to know about your research history?"
):
# Add user message to chat history
st.session_state.memory_messages.append(
{"role": "user", "content": memory_prompt}
)
with st.chat_message("user"):
st.markdown(memory_prompt)
# Generate memory response
with st.chat_message("assistant"):
with st.spinner("π§ Searching through your research history..."):
try:
# Get response from memory agent with automatic memory recording
response = asyncio.run(run_memory_agent(
st.session_state.researcher,
st.session_state.memory_agent,
memory_prompt
))
# Extract response content
if hasattr(response, 'final_output'):
response_content = response.final_output
elif hasattr(response, 'content'):
response_content = response.content
else:
response_content = str(response)
# Display the response
st.markdown(response_content)
# Show confirmation that conversations were recorded
st.success("β
Memory agent conversations recorded!", icon="π§ ")
# Add assistant response to chat history
st.session_state.memory_messages.append(
{"role": "assistant", "content": response_content}
)
except Exception as e:
error_message = f"Sorry, I encountered an error: {str(e)}"
st.error(error_message)
st.session_state.memory_messages.append(
{"role": "assistant", "content": error_message}
)
if __name__ == "__main__":
# Check for required environment variables
missing = []
if not os.getenv("NEBIUS_API_KEY"):
missing.append("NEBIUS_API_KEY")
if not os.getenv("TAVILY_API_KEY"):
missing.append("TAVILY_API_KEY")
if missing:
st.error(f"Please set required environment variable(s): {', '.join(missing)}")
st.stop()
main()