-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgemini_explorer.py
More file actions
123 lines (98 loc) · 4.6 KB
/
gemini_explorer.py
File metadata and controls
123 lines (98 loc) · 4.6 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
#pip install streamlit
#pip install google-cloud-aiplatform
import vertexai
import streamlit as st
from vertexai.preview import generative_models
from vertexai.preview.generative_models import GenerativeModel, Part, Content, ChatSession
# Set up the project
project = "chatapp-442003"
vertexai.init(project=project)
# Load and start the model
config = generative_models.GenerationConfig(
temperature=0.5
)
model = GenerativeModel(
"gemini-pro",
generation_config=config
)
chat = model.start_chat()
def llm_function(user_input, chat):
"""
This function interacts with the LLM and returns the response.
It also appends the user input and LLM response to the chat history.
"""
# Send the user input to the model and get the response
response = chat.send_message(user_input)
# Return both user input and model response
return user_input, response.text
# Streamlit interface
st.title("AI Chat App")
# Initialize chat history and emoji input
if 'chat_history' not in st.session_state:
st.session_state.chat_history = []
if 'emoji_input' not in st.session_state:
st.session_state.emoji_input = ""
# Step 1: Introduce Xi if the chat history is empty
if len(st.session_state.chat_history) == 0:
initial_prompt = "I am your assistant powered by Google Gemini. :) Ask me anything!"
# Send the initial prompt to the chat
user_message, llm_response = llm_function(initial_prompt, chat)
# Add the initial introduction message to the chat history
st.session_state.chat_history.append({"role": "gemini", "content": llm_response})
# Function to auto-adjust text area height based on content
def calculate_height(message):
# Estimate the height of the text area based on the length of the content
lines = len(message.split('\n'))
return min(400, max(68, lines * 20)) # Ensure the minimum height is 68px
# Display chat history
for i, message in enumerate(st.session_state.chat_history):
key = f"{message['role']}_{i}" # Combine role and index for a unique key
if message["role"] == "user":
st.text_area("You:", value=message["content"], height=calculate_height(message["content"]), key=key, disabled=True)
else:
st.text_area("Gemini:", value=message["content"], height=calculate_height(message["content"]), key=key, disabled=True)
# Emoji tray visibility toggle
if 'show_emoji_tray' not in st.session_state:
st.session_state.show_emoji_tray = False
# Attachment icon button
attachment_clicked = st.button("📎") # Create attachment button
# Toggle the visibility of the emoji tray
if attachment_clicked:
st.session_state.show_emoji_tray = not st.session_state.show_emoji_tray
# Emoji tray with buttons (visible only if the attachment icon is clicked)
if st.session_state.show_emoji_tray:
st.write("Click an emoji to add it to your message:")
# Define a list of emojis to display
emoji_list = ['😊', '😂', '❤️', '👍', '🔥', '🎉', '💡', '💬', '😎', '🙌']
# Create clickable emoji buttons
cols = st.columns(len(emoji_list)) # Display emojis in a row
for i, emoji in enumerate(emoji_list):
if cols[i].button(emoji):
st.session_state.emoji_input += emoji # Append clicked emoji to the input state
# Create a form for user input with automatic clearing upon submission
with st.form(key='input_form', clear_on_submit=True):
user_input = st.text_input(
"You:",
placeholder="I am your personal assistant, ask me what you want to know.",
value=st.session_state.emoji_input # Pre-fill with emojis if clicked
)
submit_button = st.form_submit_button(label='Send')
if submit_button:
if user_input.strip():
# Use llm_function to process the user input and get the response
user_message, llm_response = llm_function(user_input, chat)
# Append the user message and LLM response to the chat history
st.session_state.chat_history.append({"role": "user", "content": user_message})
st.session_state.chat_history.append({"role": "gemini", "content": llm_response})
# Reset the emoji_input state after submission
st.session_state.emoji_input = ""
# Rerun the app to refresh the chat history and scroll to the bottom
st.rerun()
# JavaScript to scroll to the bottom of the page automatically
scroll_script = """
<script>
var chatDiv = window.parent.document.getElementsByClassName('main')[0];
chatDiv.scrollTo(0, chatDiv.scrollHeight);
</script>
"""
st.markdown(scroll_script, unsafe_allow_html=True)