Make conversation between Chatgpt & Gemini

import os
from typing import List, Optional
import openai
import requests
from requests.exceptions import RequestException
import logging
import sqlite3
from contextlib import contextmanager
import streamlit as st
from textblob import TextBlob
from datetime import datetime

# 🔹 Secure configuration using environment variables
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "your_openai_api_key")
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY", "your_gemini_api_key")

# 🔹 Set up OpenAI API
openai.api_key = OPENAI_API_KEY

# 🔹 Set up logging system with an external file
logging.basicConfig(
    level=logging.INFO,
    filename="app.log",
    format="%(asctime)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger(__name__)

# 🔹 Function to manage database connection using context manager
@contextmanager
def get_db_connection():
    conn = sqlite3.connect("chat_history.db", check_same_thread=False)
    try:
        yield conn
    finally:
        conn.close()

# 🔹 Create database table if it doesn't exist
def init_db():
    with get_db_connection() as conn:
        cursor = conn.cursor()
        cursor.execute("""
            CREATE TABLE IF NOT EXISTS chat (
                id INTEGER PRIMARY KEY AUTOINCREMENT,
                model TEXT NOT NULL,
                message TEXT NOT NULL,
                sentiment TEXT,
                timestamp TEXT DEFAULT (datetime('now'))
            )
        """)
        conn.commit()

# 🔹 Function to save a message with sentiment analysis
def save_message(model: str, message: str) -> None:
    sentiment = analyze_sentiment(message)
    with get_db_connection() as conn:
        cursor = conn.cursor()
        cursor.execute(
            "INSERT INTO chat (model, message, sentiment) VALUES (?, ?, ?)",
            (model, message, sentiment)
        )
        conn.commit()

# 🔹 Function to retrieve chat history with optional filtering
def get_chat_history(limit: int = 20) -> List[tuple]:
    with get_db_connection() as conn:
        cursor = conn.cursor()
        cursor.execute(
            "SELECT model, message, sentiment FROM chat ORDER BY timestamp DESC LIMIT ?",
            (limit,)
        )
        return cursor.fetchall()

# 🔹 Sentiment analysis function with handling for large texts
def analyze_sentiment(text: str) -> str:
    try:
        blob = TextBlob(text[:500])  # Limit length for performance
        polarity = blob.sentiment.polarity
        return "Positive 😊" if polarity > 0 else "Negative 😠" if polarity < 0 else "Neutral 😐"
    except Exception as e:
        logger.error(f"Sentiment analysis failed: {e}")
        return "Undefined"

# 🔹 Function to call OpenAI with improved error handling
def call_openai(prompt: str, history: List[str], temperature: float, max_tokens: int) -> Optional[str]:
    try:
        messages = [{"role": "system", "content": "Conversation between AI models"}] + \
                   [{"role": "user", "content": msg} for msg in history] + \
                   [{"role": "user", "content": prompt}]
        
        response = openai.ChatCompletion.create(
            model="gpt-3.5-turbo",  # Changed to a widely available model
            messages=messages,
            temperature=temperature,
            max_tokens=max_tokens,
            timeout=30
        )
        return response.choices[0].message["content"].strip()
    except openai.error.OpenAIError as e:
        logger.error(f"OpenAI API Error: {e}")
        return None

# 🔹 Function to call Gemini with performance improvements
def call_gemini(prompt: str, history: List[str], temperature: float, max_tokens: int) -> Optional[str]:
    url = "https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent"
    headers = {"Content-Type": "application/json"}
    payload = {
        "contents": [{"role": "user", "parts": [{"text": msg}]} for msg in history] + 
                    [{"role": "user", "parts": [{"text": prompt}]}],
        "generationConfig": {"maxOutputTokens": max_tokens, "temperature": temperature}
    }
    
    try:
        response = requests.post(f"{url}?key={GEMINI_API_KEY}", json=payload, headers=headers, timeout=30)
        response.raise_for_status()
        data = response.json()
        return data["candidates"][0]["content"]["parts"][0]["text"].strip()
    except (RequestException, KeyError) as e:
        logger.error(f"Gemini API Error: {e}")
        return None

# 🔹 Enhanced Streamlit interface
def conversation():
    logger.info("Starting Streamlit conversation app")
    st.set_page_config(page_title="AI Conversation", layout="wide")
    st.title("🗣️ Conversation between GPT-3.5 and Gemini")

    # 🔹 Initialize session state if not present
    if "history" not in st.session_state:
        st.session_state.history = []

    # 🔹 User settings in sidebar
    with st.sidebar:
        temperature = st.slider("🔧 Temperature (randomness)", 0.0, 1.0, 0.7, 0.05)
        max_tokens = st.slider("📏 Max Tokens (word limit)", 50, 1000, 300, 50)

    # 🔹 Display history in a scrollable box
    with st.expander("📜 Chat History", expanded=False):
        for model, message, sentiment in reversed(get_chat_history()):
            st.markdown(f"**{model}**: {message} *Sentiment: {sentiment}*")

    # 🔹 User input
    with st.form(key="chat_form"):
        user_input = st.text_area("📝 Enter conversation topic:", height=100)
        submit = st.form_submit_button("🔄 Start Conversation")

    if submit and user_input:
        st.session_state.history.append(f"User: {user_input}")
        save_message("User", user_input)

        # 🔹 Call models and display results in columns
        col1, col2 = st.columns(2)
        
        with col1:
            st.subheader("🤖 OpenAI GPT-3.5")
            gpt_response = call_openai(user_input, st.session_state.history, temperature, max_tokens)
            if gpt_response:
                sentiment_gpt = analyze_sentiment(gpt_response)
                st.write(gpt_response)
                st.write(f"🔍 Sentiment Analysis: {sentiment_gpt}")
                st.session_state.history.append(f"GPT-3.5: {gpt_response}")
                save_message("GPT-3.5", gpt_response)
            else:
                st.error("Failed to call GPT-3.5")

        with col2:
            st.subheader("🟢 Google Gemini")
            gemini_response = call_gemini(user_input if not gpt_response else gpt_response, 
                                          st.session_state.history, temperature, max_tokens)
            if gemini_response:
                sentiment_gemini = analyze_sentiment(gemini_response)
                st.write(gemini_response)
                st.write(f"🔍 Sentiment Analysis: {sentiment_gemini}")
                st.session_state.history.append(f"Gemini: {gemini_response}")
                save_message("Gemini", gemini_response)
            else:
                st.error("Failed to call Gemini")

# 🔹 Entry point
if __name__ == "__main__":
    init_db()
    conversation()

Interesting Experiment, Feel free to share the responses from them! :slight_smile: