I was encountering an issue of using this code. When i click to use âflan-t5â, it shows the image as follows . Does anyone have any idea? Hereâs my code: (p.s. i commented on the url as it doesnât allow me to add more than 2 links in a post
import streamlit as st
from langchain_community.llms import HuggingFaceEndpoint
from langchain.chains import ConversationChain
from langchain.memory import ConversationBufferMemory
from langchain_core.prompts import PromptTemplate
from langchain.memory import ConversationSummaryMemory
from langchain.prompts import PromptTemplate
import os
st.set_page_config(page_title=âChatbot for ForexForestâ:evergreen_tree:â)
session state variables for chat messages and history
if âchat_messageâ not in st.session_state:
st.session_state[âchat_messageâ] =
if âchat_historyâ not in st.session_state:
st.session_state[âchat_historyâ] =
#allow user to select model
selected_model = st.radio(âSelect a modelâ, [âmistral7bâ, âbertâ, âdistilbertâ], key=âselected_modelâ)
Define model descriptions
model_descriptions = {
âmistral7bâ: âA Text Generation Chatbot.â,
âbertâ: âA Text-2-Text Generation Chatbot.â,
âdistilbertâ: âA Q&A Chatbot.â
}
#display model description
st.write(f" {model_descriptions[selected_model]}")
if selected_model == âflan-t5â:
#llm =âhttps://api-inference.huggingface.co/models/google/flan-t5-largeâ
#Hugging Face API token
huggingfacehub_api_token = â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â
#get url based on model name
def get_endpoint_url(llm):
return llm
endpoint_url = get_endpoint_url(llm)
#Hugging Face Endpoint with parameters
hf_endpoint = HuggingFaceEndpoint(
endpoint_url=endpoint_url,
huggingfacehub_api_token=huggingfacehub_api_token,
task=âtext2text-generationâ,
max_length=200, # Change this value to 200 or lower
temperature=st.sidebar.slider(âtemperatureâ, min_value=0.01, max_value=1.00, value=0.1, step=0.01)
)
Define the prompt template for summarization
template = âââYou are a helpful assistant that answers question as simple as possible, Please respond concisely .
Current conversation : {history}
{input}
âââ
prompt = PromptTemplate(template=template, input_variables=[âinputâ])
Create a memory object to store conversation history
memory = ConversationSummaryMemory(llm=hf_endpoint)
#load chat history
if âchat_historyâ not in st.session_state:
st.session_state.chat_history =
else:
for message in st.session_state.chat_history:
memory.save_context({âinputâ:message[âhumanâ]},{âoutputâ:message[âassistantâ]})
Create the ConversationChain
conversation = ConversationChain(
prompt = prompt,
llm=hf_endpoint,
memory=memory,
verbose=True,
)
start chat sessions
if âchat_sessionsâ not in st.session_state:
st.session_state[âchat_sessionsâ] = {
âChat 01â: [{âroleâ: âassistantâ, âcontentâ: âHow may I assist you today?â}]}
if âcurrent_chatâ not in st.session_state:
st.session_state[âcurrent_chatâ] = âChat 01â
#new chat session function
def create_new_chat():
chat_count = len(st.session_state[âchat_sessionsâ])
new_chat_id = fâChat{chat_count + 1:02d}â
st.session_state[âchat_sessionsâ][new_chat_id] = [{âroleâ: âassistantâ, âcontentâ: âHow may I assist you today?â}]
st.session_state[âcurrent_chatâ] = new_chat_id
st.rerun()
#function to clear chat history
def clear_chat_history():
st.session_state[âchat_sessionsâ][st.session_state[âcurrent_chatâ]] = [{âroleâ: âassistantâ, âcontentâ: âHow may I assist you today?â}]
#sidebar for chat controls
with st.sidebar:
st.title(âChatbot for ForexForestâ:evergreen_tree:â)
if st.button(âNew Chatâ):
create_new_chat()
st.write(âChat Sessions:â)
for chat_id in st.session_state[âchat_sessionsâ].keys():
if st.button(chat_id):
st.session_state[âcurrent_chatâ] = chat_id
st.rerun()
Ask for the input of question
st.subheader(âPlease ask your questionâ)
Display chat messages
for message in st.session_state[âchat_sessionsâ][st.session_state[âcurrent_chatâ]]:
with st.chat_message(message[âroleâ]):
st.write(message[âcontentâ])
#add sidebar button for clearing chat history
st.sidebar.button(âClear Chat Historyâ, on_click=clear_chat_history)
User provided prompt
if user_input := st.chat_input(âSay Somethingâ):
st.session_state[âchat_sessionsâ][st.session_state[âcurrent_chatâ]].append({âroleâ: âuserâ, âcontentâ: user_input})
with st.chat_message(âuserâ):
st.write(user_input)
#Initialize messages if not present
if âmessagesâ not in st.session_state.keys():
st.session_state.messages = [{âroleâ: âassistantâ, âcontentâ: âHow may I assist you today?â}]
Generate a new response if last message is not from assistant
if st.session_state[âchat_sessionsâ][st.session_state[âcurrent_chatâ]] and st.session_state[âchat_sessionsâ][st.session_state[âcurrent_chatâ]][-1][âroleâ] != âassistantâ:
with st.chat_message(âassistantâ):
with st.spinner(âThinkingâŚâ):
response = conversation.run(user_input)
placeholder = st.empty()
full_response = ââ
for item in response:
full_response += item
placeholder.markdown(full_response)
message = {âroleâ: âassistantâ, âcontentâ: full_response}
st.session_state[âchat_sessionsâ][st.session_state[âcurrent_chatâ]].append(message)
Update chat history in memory
if âchat_historyâ not in st.session_state:
st.session_state.chat_history =
else:
for message in st.session_state[âchat_sessionsâ][st.session_state[âcurrent_chatâ]]:
if message[âroleâ] == âuserâ:
memory.save_context({âinputâ: message[âcontentâ]}, {âoutputâ: ââ})
elif message[âroleâ] == âassistantâ:
memory.save_context({âinputâ: ââ}, {âoutputâ: message[âcontentâ]})
#display chat history
if len(st.session_state.chat_message) == len(st.session_state.chat_history) and len(st.session_state.chat_message) > 1:
for i in reversed(range(len(st.session_state.chat_message) - 1)):
with st.container():
with st.chat_message(âuserâ):
st.markdown(st.session_state.chat_message[i])
with st.chat_message(âassistantâ):
st.markdown(st.session_state.chat_history[i])