Messages Context - Per Thread or Per Assistant?

It should be, unless you code something funky.

import openai
import streamlit as st
import requests
import base64
from openai import OpenAI
client = OpenAI()

thread = client.beta.threads.create()

    def gpt4():

        if "openai_model" not in st.session_state:
            st.session_state["openai_model"] = "gpt-4-1106-preview"

        if "messages" not in st.session_state:
            st.session_state.messages = []

        # Create two columns
        col1, col2 = st.columns([4, 1])

        # Empty space in the first column
        col1.empty()

        # Add a button to clear the chat in the second column
        if col2.button('Clear Chat'):
            st.session_state.messages = []

        for message in st.session_state.messages:
            with st.chat_message(message["role"]):
                st.markdown(message["content"])

        if prompt := st.chat_input("What is up?"):
            st.session_state.messages.append({"role": "user", "content": prompt})
            with st.chat_message("user"):
                st.markdown(prompt)

            with st.chat_message("assistant"):
                message_placeholder = st.empty()
                full_response = ""
                for response in client.chat.completions.create(
                    model=st.session_state["openai_model"],
                    messages=[
                        {"role": m["role"], "content": m["content"]}
                        for m in st.session_state.messages
                    ],
                    stream=True,
                    max_tokens=4000,
                ):
                    


                    # Extract the message content using the proper JSON keys
                    #st.write(response)
                    # Access the first 'Choice' object in the 'choices' list.
                    choice = response.choices[0]  # 'choices' is a list, so you can use index access here.

                    # Access the 'ChoiceDelta' object from 'choice' which contains 'content' field.
                    choice_delta = choice.delta  # 'delta' is an attribute of 'Choice' object.

                    # Access the 'content' field from 'choice_delta'.
                    message_content = choice_delta.content  # 'content' is an attribute of 'ChoiceDelta' object.

                    # Append the extracted content to the 'full_response' string with a newline character
                    full_response += message_content if message_content is not None else ""

                    # Update the placeholder with the 'full_response' using Streamlit's markdown to render it
                    message_placeholder.markdown(full_response + "▌")



                message_placeholder.markdown(full_response)
            st.session_state.messages.append({"role": "assistant", "content": full_response})
            
        if st.button('Download Chat Log'):
            chat_log_str = "\n".join([f"{m['role']}: {m['content']}" for m in st.session_state.messages])
            buffer = get_text_file_buffer(chat_log_str)
            b64 = base64.b64encode(buffer.getvalue().encode()).decode()
            st.markdown(f'<a href="data:file/txt;base64,{b64}" download="chat_log.txt">Download Chat Log</a>', unsafe_allow_html=True)

This is basically what I do, not sure if you decipher my cryptic horrible code but it’s there if it helps.

2 Likes