Chatpromptvalue object is not subscriptable

So here I am creating a chatbot following the documentation on ‘add to chathistory’ by manually appending chathistory. But on this invoke() function line its giving error - ChatPromptValue object is not subscriptable. Please help me resolve this issue.

Here is my code:

   retriever = vectorstore.as_retriever()
    prompt = hub.pull("rlm/rag-prompt")



    # Define the template for the prompt
    template = """You are an AI assistant for answering questions about the phi-3 model technical document.
    You are given the following extracted parts of a long document and a question. Provide a conversational answer.
    If you don't know the answer, just say "Hmm, I'm not sure." Don't try to make up an answer.
    If the question is not about phi-3 model, politely inform them that you are tuned to only answer questions about phi-3 model.
    Question: {question}
    =========
    {context}
    =========
    Answer in Markdown:"""


    contextualize_q_system_prompt = """Given a chat history and the latest user question \
    which might reference context in the chat history, formulate a standalone question \
    which can be understood without the chat history. Do NOT answer the question, \
    just reformulate it if needed and otherwise return it as is."""

    contextualize_q_prompt = ChatPromptTemplate.from_messages(
        [
            ("system", contextualize_q_system_prompt),
            MessagesPlaceholder("chat_history"),
            ("human", "{input}"),
        ]
    )
    history_aware_retriever = create_history_aware_retriever(
        llm, retriever, contextualize_q_prompt
    )

    qa_system_prompt = """You are an assistant for question-answering tasks. \
    Use the following pieces of retrieved context to answer the question. \
    If you don't know the answer, just say that you don't know. \
    Use three sentences maximum and keep the answer concise.\

    {context}"""
    qa_prompt = ChatPromptTemplate.from_messages(
        [
            ("system", qa_system_prompt),
            MessagesPlaceholder("chat_history"),
            ("human", "{input}"),
        ]
    )

    question_answer_chain = create_stuff_documents_chain(llm, qa_prompt)

    rag_chain = create_retrieval_chain(history_aware_retriever, question_answer_chain)

    #print(rag_chain.invoke({"input": question, "chat_history": chat_history}))


    # If there's a response from the assistant
    ai_response = AIMessage(content="Hello! This is Ternabot. How can I help you?")

    # Create a ChatPromptValue instance
    chat_prompt_value = ChatPromptValue(messages=[ai_response])

    # Convert to list of messages
    messages = chat_prompt_value.to_messages()
    (print(type(messages)))
    (print(messages))
    # Convert to string representation
    prompt_string = chat_prompt_value.to_string()
    # 3. Invoke the RAG chain using the chat prompt value
    response = rag_chain.invoke({"input": question, "chat_history": messages})
    chat_history.extend([HumanMessage(content=question), response.answer])

    second_question = "What are common ways of doing it?"
    ai_msg_2 = rag_chain.invoke({"input": second_question, "chat_history": chat_history})

    print(ai_msg_2.answer.to_string())
    for document in ai_msg_2.context:
      print(document)
      print()
    return ai_msg_2.answer.to_string()

Error:

197     response = rag_chain.invoke({"input": question, "chat_history": messages})
    198     chat_history.extend([HumanMessage(content=question), response.answer])
    199 

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py in invoke(self, input, config, **kwargs)
   5335         **kwargs: Optional[Any],
   5336     ) -> Output:
-> 5337         return self.bound.invoke(
   5338             input,
   5339             self._merge_configs(config),

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py in invoke(self, input, config, **kwargs)
   3018                 context.run(_set_config_context, config)
   3019                 if i == 0:
-> 3020                     input = context.run(step.invoke, input, config, **kwargs)
   3021                 else:
   3022                     input = context.run(step.invoke, input, config)

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/passthrough.py in invoke(self, input, config, **kwargs)
    492         **kwargs: Any,
    493     ) -> dict[str, Any]:
--> 494         return self._call_with_config(self._invoke, input, config, **kwargs)
    495 
    496     async def _ainvoke(

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py in _call_with_config(self, func, input, config, run_type, serialized, **kwargs)
   1924             output = cast(
   1925                 Output,
-> 1926                 context.run(
   1927                     call_func_with_variable_args,  # type: ignore[arg-type]
   1928                     func,  # type: ignore[arg-type]

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/config.py in call_func_with_variable_args(func, input, config, run_manager, **kwargs)
    392     if run_manager is not None and accepts_run_manager(func):
    393         kwargs["run_manager"] = run_manager
--> 394     return func(input, **kwargs)  # type: ignore[call-arg]
    395 
    396 

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/passthrough.py in _invoke(self, input, run_manager, config, **kwargs)
    479         return {
    480             **input,
--> 481             **self.mapper.invoke(
    482                 input,
    483                 patch_config(config, callbacks=run_manager.get_child()),

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py in invoke(self, input, config, **kwargs)
   3721                     for key, step in steps.items()
   3722                 ]
-> 3723                 output = {key: future.result() for key, future in zip(steps, futures)}
   3724         # finish the root run
   3725         except BaseException as e:

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py in <dictcomp>(.0)
   3721                     for key, step in steps.items()
   3722                 ]
-> 3723                 output = {key: future.result() for key, future in zip(steps, futures)}
   3724         # finish the root run
   3725         except BaseException as e:

/usr/lib/python3.10/concurrent/futures/_base.py in result(self, timeout)
    456                     raise CancelledError()
    457                 elif self._state == FINISHED:
--> 458                     return self.__get_result()
    459                 else:
    460                     raise TimeoutError()

/usr/lib/python3.10/concurrent/futures/_base.py in __get_result(self)
    401         if self._exception:
    402             try:
--> 403                 raise self._exception
    404             finally:
    405                 # Break a reference cycle with the exception in self._exception

/usr/lib/python3.10/concurrent/futures/thread.py in run(self)
     56 
     57         try:
---> 58             result = self.fn(*self.args, **self.kwargs)
     59         except BaseException as exc:
     60             self.future.set_exception(exc)

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py in _invoke_step(step, input, config, key)
   3705             context = copy_context()
   3706             context.run(_set_config_context, child_config)
-> 3707             return context.run(
   3708                 step.invoke,
   3709                 input,

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py in invoke(self, input, config, **kwargs)
   5335         **kwargs: Optional[Any],
   5336     ) -> Output:
-> 5337         return self.bound.invoke(
   5338             input,
   5339             self._merge_configs(config),

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/branch.py in invoke(self, input, config, **kwargs)
    237                     break
    238             else:
--> 239                 output = self.default.invoke(
    240                     input,
    241                     config=patch_config(

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py in invoke(self, input, config, **kwargs)
   3020                     input = context.run(step.invoke, input, config, **kwargs)
   3021                 else:
-> 3022                     input = context.run(step.invoke, input, config)
   3023         # finish the root run
   3024         except BaseException as e:

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py in invoke(self, input, config, **kwargs)
   4698         """
   4699         if hasattr(self, "func"):
-> 4700             return self._call_with_config(
   4701                 self._invoke,
   4702                 input,

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py in _call_with_config(self, func, input, config, run_type, serialized, **kwargs)
   1924             output = cast(
   1925                 Output,
-> 1926                 context.run(
   1927                     call_func_with_variable_args,  # type: ignore[arg-type]
   1928                     func,  # type: ignore[arg-type]

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/config.py in call_func_with_variable_args(func, input, config, run_manager, **kwargs)
    392     if run_manager is not None and accepts_run_manager(func):
    393         kwargs["run_manager"] = run_manager
--> 394     return func(input, **kwargs)  # type: ignore[call-arg]
    395 
    396 

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py in _invoke(self, input, run_manager, config, **kwargs)
   4554                         output = chunk
   4555         else:
-> 4556             output = call_func_with_variable_args(
   4557                 self.func, input, config, run_manager, **kwargs
   4558             )

/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/config.py in call_func_with_variable_args(func, input, config, run_manager, **kwargs)
    392     if run_manager is not None and accepts_run_manager(func):
    393         kwargs["run_manager"] = run_manager
--> 394     return func(input, **kwargs)  # type: ignore[call-arg]
    395 
    396 

/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py in _wrapped_call_impl(self, *args, **kwargs)
   1551             return self._compiled_call_impl(*args, **kwargs)  # type: ignore[misc]
   1552         else:
-> 1553             return self._call_impl(*args, **kwargs)
   1554 
   1555     def _call_impl(self, *args, **kwargs):

/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py in _call_impl(self, *args, **kwargs)
   1560                 or _global_backward_pre_hooks or _global_backward_hooks
   1561                 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1562             return forward_call(*args, **kwargs)
   1563 
   1564         try:

/usr/local/lib/python3.10/dist-packages/sentence_transformers/SentenceTransformer.py in forward(self, input, **kwargs)
    666             module_kwarg_keys = self.module_kwargs.get(module_name, [])
    667             module_kwargs = {key: value for key, value in kwargs.items() if key in module_kwarg_keys}
--> 668             input = module(input, **module_kwargs)
    669         return input
    670 

/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py in _wrapped_call_impl(self, *args, **kwargs)
   1551             return self._compiled_call_impl(*args, **kwargs)  # type: ignore[misc]
   1552         else:
-> 1553             return self._call_impl(*args, **kwargs)
   1554 
   1555     def _call_impl(self, *args, **kwargs):

/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py in _call_impl(self, *args, **kwargs)
   1560                 or _global_backward_pre_hooks or _global_backward_hooks
   1561                 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1562             return forward_call(*args, **kwargs)
   1563 
   1564         try:

/usr/local/lib/python3.10/dist-packages/sentence_transformers/models/Transformer.py in forward(self, features, **kwargs)
    112     def forward(self, features: dict[str, torch.Tensor], **kwargs) -> dict[str, torch.Tensor]:
    113         """Returns token_embeddings, cls_token"""
--> 114         trans_features = {"input_ids": features["input_ids"], "attention_mask": features["attention_mask"]}
    115         if "token_type_ids" in features:
    116             trans_features["token_type_ids"] = features["token_type_ids"]

TypeError: 'ChatPromptValue' object is not subscriptable