C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\redi…s\connection.py:77: UserWarning: redis-py works best with hiredis. Please consider installing
warnings.warn(msg)
Write Query Here: describe
C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\huggingface_hub\utils\_deprecation.py:131: FutureWarning: 'post' (from 'huggingface_hub.inference._client') is deprecated and will be removed from version '0.31.0'. Making direct POST requests to the inference server is not supported anymore. Please use task methods instead (e.g. `InferenceClient.chat_completion`). If your use case is not supported, please open an issue in https://github.com/huggingface/huggingface_hub.
warnings.warn(warning_message, FutureWarning)
Traceback (most recent call last):
File "C:\Users\Public\CHATBOT\llm_memory_with_Model.py", line 59, in <module>
response=qa_chain.invoke({'query': user_query})
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\base.py", line 170, in invoke
raise e
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\base.py", line 160, in invoke
self._call(inputs, run_manager=run_manager)
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\retrieval_qa\base.py", line 154, in _call
answer = self.combine_documents_chain.run(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\_api\deprecation.py", line 181, in warning_emitting_wrapper
return wrapped(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\base.py", line 611, in run
return self(kwargs, callbacks=callbacks, tags=tags, metadata=metadata)[
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\_api\deprecation.py", line 181, in warning_emitting_wrapper
return wrapped(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\base.py", line 389, in __call__
return self.invoke(
^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\base.py", line 170, in invoke
raise e
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\base.py", line 160, in invoke
self._call(inputs, run_manager=run_manager)
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\combine_documents\base.py", line 138, in _call
output, extra_return_dict = self.combine_docs(
^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\combine_documents\stuff.py", line 259, in combine_docs
return self.llm_chain.predict(callbacks=callbacks, **inputs), {}
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\llm.py", line 318, in predict
return self(kwargs, callbacks=callbacks)[self.output_key]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\_api\deprecation.py", line 181, in warning_emitting_wrapper
return wrapped(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\base.py", line 389, in __call__
return self.invoke(
^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\base.py", line 170, in invoke
raise e
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\base.py", line 160, in invoke
self._call(inputs, run_manager=run_manager)
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\llm.py", line 126, in _call
response = self.generate([inputs], run_manager=run_manager)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain\chains\llm.py", line 138, in generate
return self.llm.generate_prompt(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\llms.py", line 763, in generate_prompt
return self.generate(prompt_strings, stop=stop, callbacks=callbacks, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\llms.py", line 966, in generate
output = self._generate_helper(
^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\llms.py", line 787, in _generate_helper
self._generate(
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\llms.py", line 1526, in _generate
self._call(prompt, stop=stop, run_manager=run_manager, **kwargs)
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_huggingface\llms\huggingface_endpoint.py", line 312, in _call
response = self.client.post(
^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\huggingface_hub\utils\_deprecation.py", line 132, in inner_f
return f(*args, **kwargs)
^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\huggingface_hub\inference\_client.py", line 302, in post
mapped_model = provider_helper._prepare_mapped_model(model or self.model)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\huggingface_hub\inference\_providers\hf_inference.py", line 35, in _prepare_mapped_model
_check_supported_task(model_id, self.task)
File "C:\Users\suboyina\AppData\Local\Programs\Python\Python312\Lib\site-packages\huggingface_hub\inference\_providers\hf_inference.py", line 156, in _check_supported_task
raise ValueError(
ValueError: Model 'mistralai/Mistral-7B-Instruct-v0.3' doesn't support task 'unknown'. Supported tasks: 'text-generation', got: 'unknown'