Log @abhishek
File "/usr/local/lib/python3.10/site-packages/streamlit/runtime/scriptrunner/script_runner.py", line 534, in _run_script
exec(code, module.__dict__)
File "/home/user/app/app.py", line 130, in <module>
main()
File "/home/user/app/app.py", line 106, in main
handle_userinput(user_question)
File "/home/user/app/app.py", line 71, in handle_userinput
response = st.session_state.conversation({"question": user_question})
File "/usr/local/lib/python3.10/site-packages/langchain/chains/base.py", line 310, in __call__
raise e
File "/usr/local/lib/python3.10/site-packages/langchain/chains/base.py", line 304, in __call__
self._call(inputs, run_manager=run_manager)
File "/usr/local/lib/python3.10/site-packages/langchain/chains/conversational_retrieval/base.py", line 159, in _call
answer = self.combine_docs_chain.run(
File "/usr/local/lib/python3.10/site-packages/langchain/chains/base.py", line 510, in run
return self(kwargs, callbacks=callbacks, tags=tags, metadata=metadata)[
File "/usr/local/lib/python3.10/site-packages/langchain/chains/base.py", line 310, in __call__
raise e
File "/usr/local/lib/python3.10/site-packages/langchain/chains/base.py", line 304, in __call__
self._call(inputs, run_manager=run_manager)
File "/usr/local/lib/python3.10/site-packages/langchain/chains/combine_documents/base.py", line 122, in _call
output, extra_return_dict = self.combine_docs(
File "/usr/local/lib/python3.10/site-packages/langchain/chains/combine_documents/stuff.py", line 171, in combine_docs
return self.llm_chain.predict(callbacks=callbacks, **inputs), {}
File "/usr/local/lib/python3.10/site-packages/langchain/chains/llm.py", line 298, in predict
return self(kwargs, callbacks=callbacks)[self.output_key]
File "/usr/local/lib/python3.10/site-packages/langchain/chains/base.py", line 310, in __call__
raise e
File "/usr/local/lib/python3.10/site-packages/langchain/chains/base.py", line 304, in __call__
self._call(inputs, run_manager=run_manager)
File "/usr/local/lib/python3.10/site-packages/langchain/chains/llm.py", line 108, in _call
response = self.generate([inputs], run_manager=run_manager)
File "/usr/local/lib/python3.10/site-packages/langchain/chains/llm.py", line 120, in generate
return self.llm.generate_prompt(
File "/usr/local/lib/python3.10/site-packages/langchain/llms/base.py", line 507, in generate_prompt
return self.generate(prompt_strings, stop=stop, callbacks=callbacks, **kwargs)
File "/usr/local/lib/python3.10/site-packages/langchain/llms/base.py", line 656, in generate
output = self._generate_helper(
File "/usr/local/lib/python3.10/site-packages/langchain/llms/base.py", line 544, in _generate_helper
raise e
File "/usr/local/lib/python3.10/site-packages/langchain/llms/base.py", line 531, in _generate_helper
self._generate(
File "/usr/local/lib/python3.10/site-packages/langchain/llms/base.py", line 1053, in _generate
self._call(prompt, stop=stop, run_manager=run_manager, **kwargs)
File "/usr/local/lib/python3.10/site-packages/langchain/llms/huggingface_hub.py", line 112, in _call
raise ValueError(f"Error raised by inference API: {response['error']}"
Function code
def get_conversation_chain(vectorstore:FAISS) -> ConversationalRetrievalChain:
llm = HuggingFaceHub(
repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
#repo_id="TheBloke/Mixtral-8x7B-Instruct-v0.1-GGUF"
model_kwargs={"temperature": 0.5, "max_length": 1048},
)