I cannot figure out what does the below error mean when trying to create a knowledge graph from multiple documents using LLMGraphTransformer
module from LangChain . I have retrieved several documents from Wikipedia, and applied some preprocessing steps. Here is my full code:
repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
llm = HuggingFaceEndpoint(
repo_id=repo_id,
max_length=1000,
temperature=0.001,
huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
)
llm_transformer = LLMGraphTransformer(llm=llm)
documents = [Document(page_content=doc.page_content) for doc in preprocessed_documents]
# Convert unique documents to graph documents
graph_documents = llm_transformer.convert_to_graph_documents(documents)
# Combine nodes and relationships from all graph documents
all_nodes = set()
all_relationships = set()
for graph_doc in graph_documents:
all_nodes.update(graph_doc.nodes)
all_relationships.update(graph_doc.relationships)
print(f"Nodes: {all_nodes}")
print(f"Relationships: {all_relationships}")
And this is the error:
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In[54], line 3
1 documents = [Document(page_content=doc.page_content) for doc in preprocessed_documents]
2 # Convert unique documents to graph documents
----> 3 graph_documents = llm_transformer.convert_to_graph_documents(documents)
5 # Combine nodes and relationships from all graph documents
6 all_nodes = set()
File /opt/conda/lib/python3.10/site-packages/langchain_experimental/graph_transformers/llm.py:809, in LLMGraphTransformer.convert_to_graph_documents(self, documents, config)
797 def convert_to_graph_documents(
798 self, documents: Sequence[Document], config: Optional[RunnableConfig] = None
799 ) -> List[GraphDocument]:
800 """Convert a sequence of documents into graph documents.
801
802 Args:
(...)
807 Sequence[GraphDocument]: The transformed documents as graphs.
808 """
--> 809 return [self.process_response(document, config) for document in documents]
File /opt/conda/lib/python3.10/site-packages/langchain_experimental/graph_transformers/llm.py:809, in <listcomp>(.0)
797 def convert_to_graph_documents(
798 self, documents: Sequence[Document], config: Optional[RunnableConfig] = None
799 ) -> List[GraphDocument]:
800 """Convert a sequence of documents into graph documents.
801
802 Args:
(...)
807 Sequence[GraphDocument]: The transformed documents as graphs.
808 """
--> 809 return [self.process_response(document, config) for document in documents]
File /opt/conda/lib/python3.10/site-packages/langchain_experimental/graph_transformers/llm.py:762, in LLMGraphTransformer.process_response(self, document, config)
759 # Nodes need to be deduplicated using a set
760 # Use default Node label for nodes if missing
761 nodes_set.add((rel["head"], rel.get("head_type", "Node")))
--> 762 nodes_set.add((rel["tail"], rel.get("tail_type", "Node")))
764 source_node = Node(id=rel["head"], type=rel.get("head_type", "Node"))
765 target_node = Node(id=rel["tail"], type=rel.get("tail_type", "Node"))
TypeError: unhashable type: 'list'
1 Like
Search results that look like it.
opened 02:59PM - 02 Sep 24 UTC
closed 07:11PM - 03 Sep 24 UTC
🤖:bug
investigate
### Checked other resources
- [X] I added a very descriptive title to this issu… e.
- [X] I searched the LangChain documentation with the integrated search.
- [X] I used the GitHub search to find a similar question and didn't find it.
- [X] I am sure that this is a bug in LangChain rather than my code.
- [X] The bug is not resolved by updating to the latest stable version of LangChain (or the specific integration package).
### Example Code
```
from langchain_huggingface.llms import HuggingFacePipeline
from langchain_core.prompts import PromptTemplate
import torch
model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
pipeline(
task="text-generation",
model=model_id,
model_kwargs={"torch_dtype": torch.bfloat16},
device_map="auto",
use_fast=True,
do_sample=True,
top_k=3,
max_new_tokens=40,
token="XXXXX" # Replace with your token.
)
hf = HuggingFacePipeline(pipeline=pipeline)
template = """Question: {question}
Answer: Let's think step by step."""
prompt = PromptTemplate.from_template(template)
chain = prompt | hf
question = "What is electroencephalography?"
print(chain.invoke({"question": question}))
```
### Error Message and Stack Trace (if applicable)
Traceback (most recent call last):
File "/home/ec2-user/test_models/smart_reply/langchain_llama3_1_8b.py", line 35, in <module>
print(chain.invoke({"question": question}))
File "/home/ec2-user/test_models/smart_reply/env/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 2878, in invoke
input = context.run(step.invoke, input, config)
File "/home/ec2-user/test_models/smart_reply/env/lib/python3.10/site-packages/langchain_core/language_models/llms.py", line 385, in invoke
self.generate_prompt(
File "/home/ec2-user/test_models/smart_reply/env/lib/python3.10/site-packages/langchain_core/language_models/llms.py", line 750, in generate_prompt
return self.generate(prompt_strings, stop=stop, callbacks=callbacks, **kwargs)
File "/home/ec2-user/test_models/smart_reply/env/lib/python3.10/site-packages/langchain_core/language_models/llms.py", line 944, in generate
output = self._generate_helper(
File "/home/ec2-user/test_models/smart_reply/env/lib/python3.10/site-packages/langchain_core/language_models/llms.py", line 787, in _generate_helper
raise e
File "/home/ec2-user/test_models/smart_reply/env/lib/python3.10/site-packages/langchain_core/language_models/llms.py", line 774, in _generate_helper
self._generate(
File "/home/ec2-user/test_models/smart_reply/env/lib/python3.10/site-packages/langchain_huggingface/llms/huggingface_pipeline.py", line 269, in _generate
responses = self.pipeline(
File "/home/ec2-user/test_models/smart_reply/env/lib/python3.10/site-packages/transformers/pipelines/__init__.py", line 831, in pipeline
if task in custom_tasks:
TypeError: unhashable type: 'list'
### Description
I am trying to experiment with `llama3.1-8b-instruct` model. I used the reference code given in the doc here:
https://python.langchain.com/v0.2/docs/integrations/llms/huggingface_pipelines/
I am not able to generate the inference and getting error as shown above.
### System Info
System Information
------------------
> OS: Linux
> OS Version: #1 SMP Mon Jul 29 19:52:29 UTC 2024
> Python Version: 3.10.14 | packaged by conda-forge | (main, Mar 20 2024, 12:45:18) [GCC 12.3.0]
Package Information
-------------------
> langchain_core: 0.2.37
> langchain: 0.2.15
> langsmith: 0.1.108
> langchain_huggingface: 0.0.3
> langchain_llama3_1_8b: Installed. No version info available.
> langchain_text_splitters: 0.2.2
Optional packages not installed
-------------------------------
> langgraph
> langserve
Other Dependencies
------------------
> aiohttp: 3.9.3
> async-timeout: 4.0.3
> httpx: 0.27.2
> huggingface-hub: 0.24.3
> jsonpatch: 1.33
> numpy: 1.26.4
> orjson: 3.10.7
> packaging: 24.1
> pydantic: 1.10.18
> PyYAML: 6.0.2
> requests: 2.32.3
> sentence-transformers: 3.0.1
> SQLAlchemy: 2.0.32
> tenacity: 8.5.0
> tokenizers: 0.19.1
> transformers: 4.44.2
> typing-extensions: 4.12.2
opened 08:45AM - 29 Jun 24 UTC
🤖:bug
### Checked other resources
- [X] I added a very descriptive title to this issu… e.
- [X] I searched the LangChain documentation with the integrated search.
- [X] I used the GitHub search to find a similar question and didn't find it.
- [X] I am sure that this is a bug in LangChain rather than my code.
- [X] The bug is not resolved by updating to the latest stable version of LangChain (or the specific integration package).
### Example Code
```
loader = TextLoader(file_path)
# loader = Docx2txtLoader(file_path)
documents = loader.load() # + docx_documents
print("texts doc: =============================")
print(type(documents))
text_splitter = RecursiveCharacterTextSplitter(
chunk_size=800, chunk_overlap=200)
# text_splitter = TokenTextSplitter(chunk_size=512, chunk_overlap=24)
texts = text_splitter.split_documents(documents)
raph = Neo4jGraph()
llm_transformer = LLMGraphTransformer(llm=model)
print("===================load llm_transformer!=========================")
graph_documents = llm_transformer.convert_to_graph_documents(texts)
```
### Error Message and Stack Trace (if applicable)
_No response_
### Description
Traceback (most recent call last):
File "/work/baichuan/script/langchain/graphRag.py", line 225, in <module>
graph_documents = llm_transformer.convert_to_graph_documents(texts)
File "/root/miniconda3/envs/rag/lib/python3.10/site-packages/langchain_experimental/graph_transformers/llm.py", line 762, in convert_to_graph_documents
return [self.process_response(document) for document in documents]
File "/root/miniconda3/envs/rag/lib/python3.10/site-packages/langchain_experimental/graph_transformers/llm.py", line 762, in <listcomp>
return [self.process_response(document) for document in documents]
File "/root/miniconda3/envs/rag/lib/python3.10/site-packages/langchain_experimental/graph_transformers/llm.py", line 714, in process_response
nodes_set.add((rel["head"], rel["head_type"]))
TypeError: list indices must be integers or slices, not str
### System Info
# Name Version Build Channel
_libgcc_mutex 0.1 conda_forge conda-forge
_openmp_mutex 4.5 2_gnu conda-forge
absl-py 2.1.0 pypi_0 pypi
accelerate 0.21.0 pypi_0 pypi
addict 2.4.0 pypi_0 pypi
aiofiles 23.2.1 pypi_0 pypi
aiohttp 3.9.5 py310h2372a71_0 conda-forge
aiosignal 1.3.1 pyhd8ed1ab_0 conda-forge
altair 5.3.0 pypi_0 pypi
annotated-types 0.7.0 pyhd8ed1ab_0 conda-forge
anyio 4.3.0 pyhd8ed1ab_0 conda-forge
astunparse 1.6.2 pypi_0 pypi
async-timeout 4.0.3 pyhd8ed1ab_0 conda-forge
attrs 23.2.0 pyh71513ae_0 conda-forge
backoff 2.2.1 pypi_0 pypi
beautifulsoup4 4.12.3 pypi_0 pypi
bitsandbytes 0.41.0 pypi_0 pypi
blas 1.0 mkl anaconda
blinker 1.8.2 pypi_0 pypi
brotli-python 1.0.9 py310hd8f1fbe_7 conda-forge
bzip2 1.0.8 h5eee18b_6
ca-certificates 2024.3.11 h06a4308_0
certifi 2024.2.2 py310h06a4308_0
chardet 5.2.0 pypi_0 pypi
charset-normalizer 3.3.2 pyhd8ed1ab_0 conda-forge
click 8.1.7 pypi_0 pypi
cmake 3.29.3 pypi_0 pypi
contourpy 1.2.1 pypi_0 pypi
cudatoolkit 11.4.2 h7a5bcfd_10 conda-forge
cycler 0.12.1 pypi_0 pypi
dataclasses-json 0.6.6 pyhd8ed1ab_0 conda-forge
datasets 2.14.7 pypi_0 pypi
deepdiff 7.0.1 pypi_0 pypi
deepspeed 0.9.5 pypi_0 pypi
dill 0.3.7 pypi_0 pypi
dnspython 2.6.1 pypi_0 pypi
docstring-parser 0.16 pypi_0 pypi
docx2txt 0.8 pypi_0 pypi
einops 0.8.0 pypi_0 pypi
email-validator 2.1.1 pypi_0 pypi
emoji 2.12.1 pypi_0 pypi
exceptiongroup 1.2.1 pypi_0 pypi
faiss 1.7.3 py310cuda112hae2f2aa_0_cuda conda-forge
faiss-gpu 1.7.3 h5b0ac8e_0 conda-forge
fastapi 0.111.0 pypi_0 pypi
fastapi-cli 0.0.4 pypi_0 pypi
ffmpy 0.3.2 pypi_0 pypi
filelock 3.14.0 pypi_0 pypi
filetype 1.2.0 pypi_0 pypi
flask 3.0.3 pypi_0 pypi
flask-cors 4.0.1 pypi_0 pypi
fonttools 4.52.1 pypi_0 pypi
frozenlist 1.4.1 py310h2372a71_0 conda-forge
fsspec 2023.10.0 pypi_0 pypi
gradio-client 0.17.0 pypi_0 pypi
greenlet 1.1.2 py310hd8f1fbe_2 conda-forge
grpcio 1.64.0 pypi_0 pypi
h11 0.14.0 pypi_0 pypi
hjson 3.1.0 pypi_0 pypi
httpcore 1.0.5 pypi_0 pypi
httptools 0.6.1 pypi_0 pypi
httpx 0.27.0 pypi_0 pypi
huggingface-hub 0.17.3 pypi_0 pypi
idna 3.7 pyhd8ed1ab_0 conda-forge
importlib-metadata 7.1.0 pypi_0 pypi
importlib-resources 6.4.0 pypi_0 pypi
intel-openmp 2021.4.0 h06a4308_3561 anaconda
itsdangerous 2.2.0 pypi_0 pypi
jinja2 3.1.4 pypi_0 pypi
joblib 1.2.0 py310h06a4308_0 anaconda
json-repair 0.25.2 pypi_0 pypi
jsonpatch 1.33 pyhd8ed1ab_0 conda-forge
jsonpath-python 1.0.6 pypi_0 pypi
jsonpointer 2.4 py310hff52083_3 conda-forge
jsonschema 4.22.0 pypi_0 pypi
jsonschema-specifications 2023.12.1 pypi_0 pypi
kiwisolver 1.4.5 pypi_0 pypi
langchain 0.2.6 pypi_0 pypi
langchain-community 0.2.6 pypi_0 pypi
langchain-core 0.2.10 pypi_0 pypi
langchain-experimental 0.0.62 pypi_0 pypi
langchain-text-splitters 0.2.2 pypi_0 pypi
langdetect 1.0.9 pypi_0 pypi
langsmith 0.1.82 pypi_0 pypi
ld_impl_linux-64 2.38 h1181459_1
libblas 3.9.0 12_linux64_mkl conda-forge
libfaiss 1.7.3 cuda112hb18a002_0_cuda conda-forge
libfaiss-avx2 1.7.3 cuda112h1234567_0_cuda conda-forge
libffi 3.4.4 h6a678d5_1
libgcc-ng 13.2.0 h77fa898_7 conda-forge
libgfortran-ng 7.5.0 ha8ba4b0_17
libgfortran4 7.5.0 ha8ba4b0_17
libgomp 13.2.0 h77fa898_7 conda-forge
liblapack 3.9.0 12_linux64_mkl conda-forge
libstdcxx-ng 13.2.0 hc0a3c3a_7 conda-forge
libuuid 1.41.5 h5eee18b_0
lit 18.1.6 pypi_0 pypi
loguru 0.7.0 pypi_0 pypi
lxml 5.2.2 pypi_0 pypi
markdown 3.6 pypi_0 pypi
markdown-it-py 3.0.0 pypi_0 pypi
markupsafe 2.1.5 pypi_0 pypi
marshmallow 3.21.2 pyhd8ed1ab_0 conda-forge
matplotlib 3.8.4 pypi_0 pypi
mdurl 0.1.2 pypi_0 pypi
mkl 2021.4.0 h06a4308_640 anaconda
mkl-service 2.4.0 py310h7f8727e_0 anaconda
mkl_fft 1.3.1 py310hd6ae3a3_0 anaconda
mkl_random 1.2.2 py310h00e6091_0 anaconda
mmengine 0.10.4 pypi_0 pypi
mpi 1.0 mpich
mpi4py 3.1.4 py310hfc96bbd_0
mpich 3.3.2 hc856adb_0
mpmath 1.3.0 pypi_0 pypi
multidict 6.0.5 py310h2372a71_0 conda-forge
multiprocess 0.70.15 pypi_0 pypi
mypy_extensions 1.0.0 pyha770c72_0 conda-forge
ncurses 6.4 h6a678d5_0
neo4j 5.22.0 pypi_0 pypi
networkx 3.3 pypi_0 pypi
ninja 1.11.1.1 pypi_0 pypi
nltk 3.8.1 pypi_0 pypi
numpy 1.21.4 pypi_0 pypi
numpy-base 1.24.3 py310h8e6c178_0 anaconda
nvidia-cublas-cu11 11.10.3.66 pypi_0 pypi
nvidia-cuda-cupti-cu11 11.7.101 pypi_0 pypi
nvidia-cuda-nvrtc-cu11 11.7.99 pypi_0 pypi
nvidia-cuda-runtime-cu11 11.7.99 pypi_0 pypi
nvidia-cudnn-cu11 8.5.0.96 pypi_0 pypi
nvidia-cufft-cu11 10.9.0.58 pypi_0 pypi
nvidia-curand-cu11 10.2.10.91 pypi_0 pypi
nvidia-cusolver-cu11 11.4.0.1 pypi_0 pypi
nvidia-cusparse-cu11 11.7.4.91 pypi_0 pypi
nvidia-nccl-cu11 2.14.3 pypi_0 pypi
nvidia-nvtx-cu11 11.7.91 pypi_0 pypi
opencv-python 4.9.0.80 pypi_0 pypi
openssl 3.3.0 h4ab18f5_3 conda-forge
ordered-set 4.1.0 pypi_0 pypi
orjson 3.10.3 py310he421c4c_0 conda-forge
packaging 24.0 pypi_0 pypi
pandas 1.2.5 pypi_0 pypi
peft 0.4.0 pypi_0 pypi
pillow 10.3.0 pypi_0 pypi
pip 24.0 py310h06a4308_0
platformdirs 4.2.2 pypi_0 pypi
protobuf 5.27.0 pypi_0 pypi
psutil 5.9.8 pypi_0 pypi
py-cpuinfo 9.0.0 pypi_0 pypi
pyarrow 16.1.0 pypi_0 pypi
pyarrow-hotfix 0.6 pypi_0 pypi
pydantic 2.7.3 pypi_0 pypi
pydantic-core 2.18.4 pypi_0 pypi
pydub 0.25.1 pypi_0 pypi
pygments 2.18.0 pypi_0 pypi
pyparsing 3.1.2 pypi_0 pypi
pypdf 4.2.0 pypi_0 pypi
pyre-extensions 0.0.29 pypi_0 pypi
pysocks 1.7.1 pyha2e5f31_6 conda-forge
python 3.10.14 h955ad1f_1
python-dateutil 2.9.0.post0 pypi_0 pypi
python-dotenv 1.0.1 pypi_0 pypi
python-iso639 2024.4.27 pypi_0 pypi
python-magic 0.4.27 pypi_0 pypi
python-multipart 0.0.9 pypi_0 pypi
python_abi 3.10 2_cp310 conda-forge
pytz 2024.1 pypi_0 pypi
pyyaml 6.0.1 py310h2372a71_1 conda-forge
rapidfuzz 3.9.3 pypi_0 pypi
readline 8.2 h5eee18b_0
referencing 0.35.1 pypi_0 pypi
regex 2024.5.15 pypi_0 pypi
requests 2.32.2 pyhd8ed1ab_0 conda-forge
rich 13.7.1 pypi_0 pypi
rpds-py 0.18.1 pypi_0 pypi
ruff 0.4.7 pypi_0 pypi
safetensors 0.4.3 pypi_0 pypi
scikit-learn 1.3.0 py310h1128e8f_0 anaconda
scipy 1.10.1 pypi_0 pypi
semantic-version 2.10.0 pypi_0 pypi
sentence-transformers 2.7.0 pypi_0 pypi
sentencepiece 0.2.0 pypi_0 pypi
setuptools 70.0.0 pypi_0 pypi
shellingham 1.5.4 pypi_0 pypi
shtab 1.7.1 pypi_0 pypi
six 1.16.0 pyhd3eb1b0_1 anaconda
sniffio 1.3.1 pyhd8ed1ab_0 conda-forge
soupsieve 2.5 pypi_0 pypi
sqlalchemy 2.0.30 py310hc51659f_0 conda-forge
sqlite 3.45.3 h5eee18b_0
starlette 0.37.2 pypi_0 pypi
sympy 1.12 pypi_0 pypi
tabulate 0.9.0 pypi_0 pypi
tenacity 8.3.0 pyhd8ed1ab_0 conda-forge
tensorboard 2.16.2 pypi_0 pypi
tensorboard-data-server 0.7.2 pypi_0 pypi
termcolor 2.4.0 pypi_0 pypi
threadpoolctl 2.2.0 pyh0d69192_0 anaconda
tiktoken 0.7.0 pypi_0 pypi
tk 8.6.14 h39e8969_0
tokenizers 0.14.1 pypi_0 pypi
tomli 2.0.1 pypi_0 pypi
tomlkit 0.12.0 pypi_0 pypi
toolz 0.12.1 pypi_0 pypi
torch 2.0.0 pypi_0 pypi
tqdm 4.62.3 pypi_0 pypi
transformers 4.34.0 pypi_0 pypi
transformers-stream-generator 0.0.5 pypi_0 pypi
triton 2.0.0 pypi_0 pypi
trl 0.7.11 pypi_0 pypi
typer 0.12.3 pypi_0 pypi
typing-extensions 4.9.0 pypi_0 pypi
typing_inspect 0.9.0 pyhd8ed1ab_0 conda-forge
tyro 0.8.4 pypi_0 pypi
tzdata 2024a h04d1e81_0
ujson 5.10.0 pypi_0 pypi
unstructured 0.14.4 pypi_0 pypi
unstructured-client 0.22.0 pypi_0 pypi
urllib3 2.2.1 pyhd8ed1ab_0 conda-forge
uvicorn 0.30.1 pypi_0 pypi
uvloop 0.19.0 pypi_0 pypi
watchfiles 0.22.0 pypi_0 pypi
websockets 11.0.3 pypi_0 pypi
werkzeug 3.0.3 pypi_0 pypi
wheel 0.43.0 py310h06a4308_0
wikipedia 1.4.0 pypi_0 pypi
wrapt 1.16.0 pypi_0 pypi
xformers 0.0.19 pypi_0 pypi
xxhash 3.4.1 pypi_0 pypi
xz 5.4.6 h5eee18b_1
yaml 0.2.5 h7f98852_2 conda-forge
yapf 0.40.2 pypi_0 pypi
yarl 1.9.4 py310h2372a71_0 conda-forge
zipp 3.18.2 pypi_0 pypi
zlib 1.2.13 h5eee18b_1