I'm experiencing an issue with Pinecone storage in an Edge environment. When I use Pinecone storage, the application breaks, and I receive an error message stating "edgeFunction is not a function." However, if I remove the function and its call (createContext and contextChain), the code works fine. The functions FetchChatbotByID and DeductMessages also perform their tasks without issues. I have been trying to resolve this problem for three days now and would appreciate any help. Below is the code snippet where the issue occurs:
import { StreamingTextResponse, LangChainStream, Message } from "ai";
import { ChatOpenAI } from "langchain/chat_models/openai";
import { AIChatMessage, HumanChatMessage } from "langchain/schema";
import { NextRequest } from "next/server";
import { supabase } from '@/lib/supabase';
import { Chatbot } from '@/types/database';
import { PINECONE_INDEX_NAME } from '@/config/pinecone';
import { pinecone } from '@/lib/pinecone/pinecone-client';
import { PineconeStore } from 'langchain/vectorstores/pinecone';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { ConversationalRetrievalQAChain } from 'langchain/chains';
import { BufferMemory } from 'langchain/memory';
export const runtime = "edge";
export async function createContext(chatbot_id: string) {
const index = await pinecone.Index(PINECONE_INDEX_NAME);
const vectorStore = await PineconeStore.fromExistingIndex(
new OpenAIEmbeddings({}),
{
pineconeIndex: index,
textKey: 'text',
namespace: Array.isArray(chatbot_id) ? chatbot_id[0] : chatbot_id,
}
);
const GPT3 = new ChatOpenAI({
temperature: 0,
modelName: 'gpt-3.5-turbo',
});
const chain = ConversationalRetrievalQAChain.fromLLM(
GPT3,
vectorStore.asRetriever(),
{
returnSourceDocuments: true,
memory: new BufferMemory({
memoryKey: 'chat_history',
inputKey: 'question',
outputKey: 'text',
returnMessages: true,
}),
questionGeneratorChainOptions: {
llm: GPT3,
},
},
);
return chain;
}
async function deductMessages(user_id: string): Promise<number> {
// Получите текущий message_limit
console.log("Fetching message_limit for user:", user_id);
const { data, error } = await supabase
.from('user_message_limits')
.select('message_limit')
.eq('user_id', user_id);
console.log("Fetched data:", data);
if (error) {
console.error("Failed to fetch message_limit:", error);
throw error;
}
if (!data || data.length === 0) {
throw new Error("No subscription found for the user");
}
const currentLimit = data[0]?.message_limit;
if (currentLimit === undefined || currentLimit < 1) {
throw new Error("Not enough messages");
}
const updatedLimit = currentLimit - 1;
// Обновите message_limit
const { error: updateError } = await supabase
.from('user_message_limits')
.update({ message_limit: updatedLimit })
.eq('user_id', user_id);
if (updateError) {
console.error("Failed to update message_limit:", updateError);
throw updateError;
}
return updatedLimit;
}
async function fetchChatbotById(chatbot_id: string): Promise<Chatbot[]> {
const { data, error } = await supabase
.from('chatbots')
.select('*')
.eq('chatbot_id', chatbot_id);
if (error) {
console.error("Query execution failed:", error);
throw error; // or return { error };
}
return data;
}
export default async function handler(req: NextRequest) {
const messages = await req.json();
const data = req.headers.get('Authorization');
const chatbot_id = data as string;
console.log('Chatbot_id:', chatbot_id);
const chatbotData = await fetchChatbotById(chatbot_id);
const chatbot: Chatbot = chatbotData[0];
const userId = chatbot.user_id as string;
await deductMessages(userId);
const contextChain = await createContext(chatbot_id);
const { stream, handlers } = LangChainStream();
interface Message {
role: string;
content: string;
}
const llm = new ChatOpenAI({
modelName: "gpt-3.5-turbo",
streaming: true,
temperature: 0.9,
maxTokens: 200,
openAIApiKey: '###'
});
console.log(messages)
llm.call(
messages.messages.map((m: Message) =>
m.role == "user" ? new HumanChatMessage(m.content) : new AIChatMessage(m.content)
),
{},
[handlers]
)
.catch(console.error);
return new StreamingTextResponse(stream);
}
I tried to move this function to a separate file with props passing and removing edgeFunction - in this case, I would have to rewrite a significant part of the code.