Spaces:
Sleeping
Sleeping
File size: 1,579 Bytes
de43789 fb84b3c d93e314 4b8086f de43789 db4e906 a1a96ab db4e906 a1a96ab c68709e fb84b3c a1a96ab fb84b3c 8eaa269 fb84b3c a1a96ab fb84b3c a1a96ab fb84b3c a1a96ab 8eaa269 8a0bbe5 be1c697 8a0bbe5 be1c697 54f6de5 ff0e82b be1c697 4766d46 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
import streamlit as st
import google.generativeai as genai
import os
from streamlit_chat import message
gemini_api_key = os.getenv("GEMINI_API_KEY")
genai.configure(api_key = gemini_api_key)
model = genai.GenerativeModel('gemini-pro')
# prompt = st.chat_input("Say something")
# if prompt:
# st.write(f"User has sent the following prompt: {prompt}")
# else:
# prompt = "who are you?"
# response = model.generate_content(prompt)
# message = st.chat_message("ai")
# message.write(response.text)
def chat_actions():
st.session_state["chat_history"].append(
{"role": "user", "content": st.session_state["chat_input"]},
)
response = model.generate_content(st.session_state["chat_input"])
st.session_state["chat_history"].append(
{
"role": "assistant",
"content": response.text,
}, # This can be replaced with your chat response logic
)
if "chat_history" not in st.session_state:
st.session_state["chat_history"] = []
st.chat_input("Enter your message", on_submit=chat_actions, key="chat_input")
for i in st.session_state["chat_history"]:
with st.chat_message(name=i["role"]):
st.write(i["content"])
# img_file_buffer = st.file_uploader('Upload a PNG image', type='png')
import numpy as np
from PIL import Image
def uploader_callback():
print('Uploaded file')
img_file_buffer = st.file_uploader('Upload a PNG image', on_change=uploader_callback, type=['png','jpg'])
if img_file_buffer is not None:
image = Image.open(img_file_buffer)
img_array = np.array(image)
|