Saving content when chatting with ChatGPT

Good day to all of you!
I’m writing a Telegram bot for personal purposes to communicate with ChatGPT, but I do not understand how to keep the context of the conversation. Tell me, please, what am I doing wrong?
Here is my code:

import os
import json
import openai
import random
import datetime
from aiogram import Bot, Dispatcher, executor, types, filters
from aiogram.types import KeyboardButton, ReplyKeyboardMarkup
from aiogram.dispatcher import FSMContext
from aiogram.contrib.fsm_storage.memory import MemoryStorage
from aiogram.dispatcher.filters.state import StatesGroup, State

os.chdir(os.path.dirname(os.path.abspath(__file__)))

from config import *

debug = True
bot = Bot(token = tokenTLG)
dp = Dispatcher(bot, storage = MemoryStorage())

openai.api_key = tokenGPT
maxTokens = 4096
tempAnswer = 0.4
engine = "gpt-4"

''' кнопки '''
btn_CreateWriting = KeyboardButton('Создать статью по пунктам')
kb_RushGPT_bot = ReplyKeyboardMarkup(resize_keyboard = True)
kb_RushGPT_bot.row(btn_CreateWriting)

btn_FreePromptChat_next = KeyboardButton('Продолжить написание статьи')
btn_FreePromptChat_finish = KeyboardButton('Закончить написание статьи')
kb_RushGPT_bot_Writing = ReplyKeyboardMarkup(resize_keyboard = True)
kb_RushGPT_bot_Writing.row(btn_FreePromptChat_next)
kb_RushGPT_bot_Writing.row(btn_FreePromptChat_finish)

''' обработчики кнопок '''
@dp.message_handler(commands = ['start'])
async def cmd_start(message: types.Message):
    await message.answer(random.choice(msgGreetings), reply_markup = kb_RushGPT_bot)

''' ########################################## '''
''' Создать статью по пунктам / Create Writing '''
class CreateWriting(StatesGroup):
    writing = State()

@dp.message_handler(filters.Text(equals="Создать статью по пунктам", ignore_case = True))
async def CreateWriting_begin(message: types.Message):
    if debug: print("Создать статью по пунктам")
    await message.answer("Выбрано <b><i>Создание статьи по пунктам</i></b>. Отправьте запрос", parse_mode = types.ParseMode.HTML, reply_markup = kb_RushGPT_bot_Writing)
    await CreateWriting.writing.set()

@dp.message_handler(filters.Text(equals="Закончить написание статьи", ignore_case = True), state = "*")
async def CreateWriting_finish(message: types.Message, state: FSMContext):
    if debug: print("Закончить написание статьи")
    await message.answer("Создание статьи окончено.", reply_markup = kb_RushGPT_bot)
    fileName = "users/h" + str(message.from_user.id) + ".txt"
    with open (fileName, "w", encoding='utf-8') as file1:
        file1.write("")
    await state.finish()

@dp.message_handler(state = CreateWriting.writing)
async def CreateWriting_continue(message: types.Message, state: FSMContext):
    if debug: print("Продолжить написание статьи")
    await message.reply(random.choice(msgAnswer))
    if not os.path.exists("users/h" + str(message.from_user.id) + ".txt"):
        fileName = "users/h" + str(message.from_user.id) + ".txt"
        with open (fileName, "w", encoding='utf-8') as file1:
            file1.write("")
    msg = message.text
    response = await openai.ChatCompletion.acreate(
    temperature = 0.7,
    model = engine,
    messages = [
            {"role": "assistant", "content": str(open("users/h" + str(message.from_user.id) + ".txt", "r")) + msg}
        ]
    )
    fileName = "users/h" + str(message.from_user.id) + ".txt"
    with open (fileName, "a+", encoding='utf-8') as file1:
        file1.write(msg.strip() + "\n")
    response_tokens = response["usage"]["completion_tokens"]
    msg = response.choices[0].message.content
    if response_tokens > 4096:
        await message.answer(f"Текст получился очень большой ({response_tokens} символа(ов)), но я прикладываю его в виде файла...", reply_markup = kb_RushGPT_bot)
        fileName = "users/a" + str(message.from_user.id) + "_" + str(datetime.datetime.today().strftime("%Y-%m-%d-%H-%M-%S")) + ".txt"
        with open (fileName, "w", encoding='utf-8') as file1:
            file1.write(msg)
        await bot.send_document(message.chat.id, document = open(fileName, 'rb'))
    else:
        await message.answer(msg, reply_markup = kb_RushGPT_bot)
    await message.answer("Нужно что-то добавить?", reply_markup = kb_RushGPT_bot_Writing)
    await CreateWriting.writing.set()

if __name__ == '__main__':
    if debug: print("Бот запущен!")
    executor.start_polling(dp)

Help, please

This is the format of a message, with roles:

response = openai.ChatCompletion.create(
    model="gpt-3.5-turbo",
    messages=[
        {"role": "system", "content": "You are a helpful assistant."},
        {"role": "user", "content": "Who won the world series in 2020?"},
        {"role": "assistant", "content": "The Los Angeles Dodgers won the World Series in 2020."},
        {"role": "user", "content": "Where was it played?"}
    ]
)

system: AI programing
user: previous turn <<<<<<<< from saved history
assistant: previous answer<< from saved history
user: current question

You must manage a record in memory or database of prior conversation, and limit the size so you don’t send more than the model context length available.

@dp.message_handler(state = CreateWriting.writing)
async def CreateWriting_continue(message: types.Message, state: FSMContext):
    if debug: print("Продолжить написание статьи")
    msg = message.as_json().split("\"text\": \"")[1][:-2]
    fileName = "users/h" + str(message.from_user.id) + ".txt"
    if os.stat(fileName).st_size == 0:
        assistantMSG = "You are a helpful assistant."
    else:
        assistantMSG = str(open(fileName))
    response = await openai.ChatCompletion.acreate(
        model = engine,
        max_tokens = maxTokens,
        messages = [
            {"role": "assistant", "content": assistantMSG},
            {"role": "user", "content": msg}
        ]
    )
    # if debug: print(user_contexts[message.from_user.id], "\n")
    # if debug: print(response)
    msg = str(response.choices[0].message.content)
    with open (fileName, "w", encoding = 'utf-8') as file1:
        file1.write(msg)
    await message.answer(msg, reply_markup = kb_RushGPT_bot)
    await message.answer("Нужно что-то добавить?", reply_markup=kb_RushGPT_bot_Writing)
    await CreateWriting.writing.set()

Not work(

str(open(filename)) is not how you read a file in Python.

>>> print(str(open("/tmp/x")))
<_io.TextIOWrapper name='/tmp/x' mode='r' encoding='UTF-8'>

I suggest you first figure out how to read and write files (or databases) separately from using that data to send it to OpenAI.
Once that works, re-use that code in the OpenAI context.

Also, even if you could read the file like that, you would only provide the previous answer from the assistant, not the previous text from the user, or the original system prompt. You need to keep adding messages to the history, you can’t just provide the one last response.

You might want to look into the functions json.dump() / json.load() or perhaps pickle.dump() / pickle.load().
You can then keep messages in an array, and dump that array to disk/re-load it when running a new inference.

3 Likes

Having promblens learning or troubleshooting programming? Try—> —> ChatGPT

1 Like