Langchain chatbot not using my prompt template

I have created a chatbot using langchain and custom prompt template but when I send a message “Who are you” the bot responded “I am an AI assistant programmed to provide information and answer questions. How can I assist you today?” and here is my code

let retriever = null;
let llm = null;

class Socket {
    constructor(socket, openai) {
        this.io = socket;
        this.openai = openai;
        this.user_unique_id = '';
        this.users = [];
    }

    SocketConnection() {
        this.io.use(async (socket, next) => {
            if (socket.handshake.query.user_id && socket.handshake.query.unique_key && socket.handshake.query.company_id) {
                this.user_unique_id = socket.handshake.query.user_id
                socket['user_id'] = 'user_' + this.user_unique_id;
                this.users[socket.user_id] = socket.id;
                if (!socket['conversation_' + socket['user_id']]) {
                    socket['conversation_' + socket['user_id']] = [];
                }

                socket['training_unique_key_user_' + socket['user_id']] = socket.handshake.query.unique_key;
                socket['company_id_user_' + socket['user_id']] = socket.handshake.query.company_id;
                next();
            }
            console.log(socket['training_unique_key_user_' + socket['user_id']])
            console.log(socket['company_id_user_' + socket['user_id']])

            if (retriever === null ||
                !fs.existsSync(`./db/${socket['company_id_user_' + socket['user_id']]}/docstore.json`) ||
                !fs.existsSync(`./db/${socket['company_id_user_' + socket['user_id']]}/faiss.index`)) {
                await this.loadDataFromDocs(socket['company_id_user_' + socket['user_id']]);
            }

            if (llm === null) {
                await this.generateLLM();
            }
        });
    }

    async loadDataFromDocs(company_id) {
        try {
            // Load the data from docs files only once
            const loader = new TextLoader(`./docs/${company_id}/products.json`);
            let docs
            await loader.load().then((data) => {
                docs = data
            });

            // Generate the vector store from the documents & Store the vector store in a variable to be used as retriever
            await FaissStore.fromDocuments(docs, new OpenAIEmbeddings()).then((vectorStore) => {
                vectorStore.save(`db/${company_id}`).then(() => {
                    retriever = vectorStore.asRetriever()
                })
            });
        } catch (e) {
            console.error(e.message);
        }
    }

    generateLLM() {
        try {
            llm = new ChatOpenAI({
                modelName: process.env.OPENAI_MODEL,
                openAIApiKey: process.env.OPENAI_API_KEY,
                temperature: 0,
                maxTokens: 150
            });
        } catch (e) {
            console.error(e.message);
        }
    }
    
    sendMessage(socket) {
        socket.on("sendMessage", async (data, callback) => {
            let user_message = data.msg.trim();
            let openai_response = '';
            try {
                socket['conversation_' + socket['user_id']].push({role: "user", content: user_message});

                const template = "You are a chatbot for e-commerce. Be kind, detailed and nice. " +
                    "Present the given queried search result in a nice way as an answer to the user input. " +
                    "The products are fruits, and the prices are in EGP. " +
                    "Provide the fruits in a dot list. " +
                    "Don't ask questions back! Just take the given context:" +
                    "{chat_history} " +
                    "Human: {question} " +
                    "Chatbot:";

                // Query the retrieval chain with the specified question
                const chain = ConversationalRetrievalQAChain.fromLLM(llm, retriever, {
                    prompt: PromptTemplate.fromTemplate(template),
                    memory: new ConversationSummaryMemory({
                        memoryKey: "chat_history",
                        llm: llm,
                    })
                })
                const res = await chain.call({question: user_message})
                openai_response = res.text;
                this.io.sockets.connected[this.users[socket['user_id']]].emit("getOpenAiResponse", {
                    response: res,
                });

                socket['conversation_' + socket['user_id']].push({role: "assistant", content: openai_response});
            } catch (e) {
                console.log(e)
            }
            callback();
        });

    }
}

@jochenschultz Could you pls check this issue and advise me?

1 Like

Haha, nice try :sweat_smile:

Debugging your own code is the best way to learn it.

If you have any general questions I am happy to answer them.

For finding bugs in code or giving solutions in code you would have to pay me or at least the project needs to serve a higher purpose like ending world hunger (not to mess up with providing world richness). But there is also a waitlist for that.

I suggest to use the webrequest plugin on chatgpt and give it the code.