I have installed the below python modules
%pip install llama-index-readers-elasticsearch
%pip install llama-index-vector-stores-opensearch
then try to import
from llama_index.vector_stores.opensearch import OpensearchVectorClient
Getting the following error Can you help to resolve this?
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
Cell In[12], line 1
----> 1 from llama_index.vector_stores.opensearch import OpensearchVectorClient
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\vector_stores\__init__.py:33
31 from llama_index.vector_stores.myscale import MyScaleVectorStore
32 from llama_index.vector_stores.neo4jvector import Neo4jVectorStore
---> 33 from llama_index.vector_stores.opensearch import (
34 OpensearchVectorClient,
35 OpensearchVectorStore,
36 )
37 from llama_index.vector_stores.pgvecto_rs import PGVectoRsStore
38 from llama_index.vector_stores.pinecone import PineconeVectorStore
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\vector_stores\opensearch\__init__.py:1
----> 1 from llama_index.vector_stores.opensearch.base import OpensearchVectorStore
3 __all__ = ["OpensearchVectorStore"]
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\vector_stores\opensearch\base.py:7
4 from typing import Any, Dict, Iterable, List, Optional, Union, cast
6 from llama_index.core.schema import BaseNode, MetadataMode, TextNode
----> 7 from llama_index.core.vector_stores.types import (
8 MetadataFilters,
9 VectorStore,
10 VectorStoreQuery,
11 VectorStoreQueryMode,
12 VectorStoreQueryResult,
13 )
14 from llama_index.core.vector_stores.utils import (
15 metadata_dict_to_node,
16 node_to_metadata_dict,
17 )
18 from opensearchpy import OpenSearch
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\vector_stores\__init__.py:4
1 """Vector stores."""
----> 4 from llama_index.core.vector_stores.simple import SimpleVectorStore
5 from llama_index.core.vector_stores.types import (
6 ExactMatchFilter,
7 FilterCondition,
(...)
12 VectorStoreQueryResult,
13 )
15 __all__ = [
16 "VectorStoreQuery",
17 "VectorStoreQueryResult",
(...)
23 "SimpleVectorStore",
24 ]
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\vector_stores\simple.py:11
9 import fsspec
10 from dataclasses_json import DataClassJsonMixin
---> 11 from llama_index.core.indices.query.embedding_utils import (
12 get_top_k_embeddings,
13 get_top_k_embeddings_learner,
14 get_top_k_mmr_embeddings,
15 )
16 from llama_index.core.schema import BaseNode
17 from llama_index.core.utils import concat_dirs
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\indices\__init__.py:4
1 """LlamaIndex data structures."""
3 # indices
----> 4 from llama_index.core.indices.composability.graph import ComposableGraph
5 from llama_index.core.indices.document_summary import (
6 DocumentSummaryIndex,
7 GPTDocumentSummaryIndex,
8 )
9 from llama_index.core.indices.document_summary.base import DocumentSummaryIndex
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\indices\composability\__init__.py:4
1 """This module contains all classes used for composing graphs over indices."""
----> 4 from llama_index.core.indices.composability.graph import ComposableGraph
6 __all__ = ["ComposableGraph"]
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\indices\composability\graph.py:5
1 """Composability graphs."""
3 from typing import Any, Dict, List, Optional, Sequence, Type, cast
----> 5 from llama_index.core.base.base_query_engine import BaseQueryEngine
6 from llama_index.core.data_structs.data_structs import IndexStruct
7 from llama_index.core.indices.base import BaseIndex
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\base\base_query_engine.py:17
15 from llama_index.core.bridge.pydantic import Field
16 from llama_index.core.callbacks.base import CallbackManager
---> 17 from llama_index.core.prompts.mixin import PromptDictType, PromptMixin
18 from llama_index.core.schema import NodeWithScore, QueryBundle, QueryType
20 logger = logging.getLogger(__name__)
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\prompts\__init__.py:4
1 """Prompt class."""
3 from llama_index.core.base.llms.types import ChatMessage, MessageRole
----> 4 from llama_index.core.prompts.base import (
5 BasePromptTemplate,
6 ChatPromptTemplate,
7 LangchainPromptTemplate,
8 Prompt,
9 PromptTemplate,
10 PromptType,
11 SelectorPromptTemplate,
12 )
13 from llama_index.core.prompts.display_utils import display_prompt_dict
15 __all__ = [
16 "Prompt",
17 "PromptTemplate",
(...)
25 "display_prompt_dict",
26 ]
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\prompts\base.py:37
29 from llama_index.core.base.query_pipeline.query import (
30 ChainableMixin,
31 InputKeys,
(...)
34 validate_and_convert_stringable,
35 )
36 from llama_index.core.bridge.pydantic import BaseModel
---> 37 from llama_index.core.llms.base import BaseLLM
38 from llama_index.core.llms.generic_utils import (
39 messages_to_prompt as default_messages_to_prompt,
40 )
41 from llama_index.core.llms.generic_utils import (
42 prompt_to_messages,
43 )
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\llms\base.py:21
17 from llama_index.core.base.query_pipeline.query import (
18 ChainableMixin,
19 )
20 from llama_index.core.bridge.pydantic import Field, validator
---> 21 from llama_index.core.callbacks import CallbackManager
22 from llama_index.core.schema import BaseComponent
25 class BaseLLM(ChainableMixin, BaseComponent):
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\callbacks\__init__.py:4
2 from .llama_debug import LlamaDebugHandler
3 from .schema import CBEvent, CBEventType, EventPayload
----> 4 from .token_counting import TokenCountingHandler
5 from .utils import trace_method
7 __all__ = [
8 "CallbackManager",
9 "CBEvent",
(...)
14 "trace_method",
15 ]
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\callbacks\token_counting.py:6
4 from llama_index.core.callbacks.base_handler import BaseCallbackHandler
5 from llama_index.core.callbacks.schema import CBEventType, EventPayload
----> 6 from llama_index.core.utilities.token_counting import TokenCounter
7 from llama_index.core.utils import get_tokenizer
10 @dataclass
11 class TokenCountingEvent:
File ~\AppData\Roaming\Python\Python311\site-packages\llama_index\core\utilities\token_counting.py:6
1 # Modified from:
2 # https://github.com/nyno-ai/openai-token-counter
4 from typing import Any, Callable, Dict, List, Optional
----> 6 from llama_index.core.llms import ChatMessage, MessageRole
7 from llama_index.core.utils import get_tokenizer
10 class TokenCounter:
ImportError: cannot import name 'ChatMessage' from 'llama_index.core.llms' (C:\Users\user\AppData\Roaming\Python\Python311\site-packages\llama_index\core\llms\__init__.py)
In llama-index 0.10.4 I solved the OpensearchVector client import by importing it:
from llama_index.legacy.vector_stores.opensearch import OpensearchVectorClient
The other error in your logs
cannot import name 'ChatMessage' from 'llama_index.core.llms'
was happening to me after upgrading a 0.9 version to the latest. It got away after removing llama-index completely and reinstalling 0.10.4 (I saw this in a github issues thread).