I am trying to implement Document based question answering system using Langchain and Gemini Pro but getting error langchain_google_genai.chat_models.ChatGoogleGenerativeAIError: Message of 'system' type not supported by Gemini. Please only provide it with Human or AI (user/assistant) messages.

My code is

from langchain_google_genai import ChatGoogleGenerativeAI
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_google_genai.embeddings import GoogleGenerativeAIEmbeddings
from PyPDF2 import PdfReader
from langchain.chains.summarize import load_summarize_chain
import google.generativeai as genai

genai.configure(api_key='api_key')

reader = PdfReader('Document Path') 
file_data = []
for i in range(len(reader.pages)):
    page = reader.pages[i]
    text = page.extract_text()
    file_data.append(text)
file_data = " ".join(file_data)

text_splitter = RecursiveCharacterTextSplitter(separators=["\n\n","\n","\t"],chunk_size=8000,chunk_overlap=500)
docs = text_splitter.create_documents([file_data])
print(docs[0].page_content)
print(len(docs))
embeddings = GoogleGenerativeAIEmbeddings(google_api_key='api_key',model='models/embedding-001')
vectors = embeddings.embed_documents([i.page_content for i in docs])
print(vectors)
llm = ChatGoogleGenerativeAI(google_api_key='api_key',model='gemini-pro',temperature=0)

from langchain.vectorstores import Chroma
from langchain.chains import RetrievalQA
from langchain.schema.messages import HumanMessage
question = {"text": "What is the main argument of this document?"}
vectorstore = Chroma.from_documents(docs, embeddings)

qa = RetrievalQA.from_chain_type(llm=llm, chain_type="map_reduce", retriever=vectorstore.as_retriever())
response = qa({'query':question,"role":"Human"})
print(response)

How to resolve this?

I want to implement document based question answer system using Langchain and Gemini Pro.

1

There are 1 answers

0
Vishal Jha On

Gemini doesn't support system messages yet. Try using the parameter

convert_system_message_to_human=True

in the llm config

llm = ChatGoogleGenerativeAI(
      google_api_key='api_key',
      model='gemini-pro',
      temperature=0,
      convert_system_message_to_human=True
 )