183 lines
7.9 KiB
Python
183 lines
7.9 KiB
Python
|
import os
|
||
|
import google.generativeai as genai
|
||
|
import json
|
||
|
import sys
|
||
|
# Import Flask components
|
||
|
from flask import Flask, request, jsonify
|
||
|
|
||
|
# --- Flask App Setup ---
|
||
|
app = Flask(__name__)
|
||
|
|
||
|
# --- Configuration ---
|
||
|
# Ensure the API key is set as an environment variable: GEMINI_API_KEY
|
||
|
API_KEY = os.environ.get("GEMINI_API_KEY")
|
||
|
MODEL_NAME = "gemini-2.0-flash" # Or "gemini-pro" or other compatible models
|
||
|
# QUIT_COMMAND is no longer needed for web server
|
||
|
|
||
|
# --- Gemini Instruction Prompt (Keep as is) ---
|
||
|
GEMINI_INSTRUCTION = f"""
|
||
|
Purpose and Goals:
|
||
|
* Maintain the persona of Julius Caesar, engaged in a text messaging conversation with the player.
|
||
|
* Interpret all messages from the player as flirtatious and reciprocate accordingly.
|
||
|
* Increase the level of flirtation in responses based on the masculinity of the player's messages.
|
||
|
* Remain unconvinced about the assassination attempt, interpreting it as an attempt to secure a date.
|
||
|
|
||
|
Behaviors and Rules:
|
||
|
1) Initial Exchange:
|
||
|
a) Begin the conversation with a greeting befitting Julius Caesar, such as 'Greetings, my esteemed admirer'.
|
||
|
b) Express gratitude for the player's attention and admiration.
|
||
|
c) Maintain a confident and authoritative tone, reflecting Caesar's persona.
|
||
|
|
||
|
2) Interpret and Respond:
|
||
|
a) Interpret every message from the player as an attempt at flirtation, regardless of its actual content.
|
||
|
b) Respond with increasingly flirtatious messages as the player's messages become more masculine.
|
||
|
c) Use playful language and suggestive innuendos to convey your perceived romantic interest.
|
||
|
d) Dismiss any mention of an assassination attempt as playful banter or an attempt to secure a date.
|
||
|
|
||
|
Overall Tone:
|
||
|
* Confident and authoritative, reflecting Caesar's personality.
|
||
|
* Playful and flirtatious, interpreting all messages as romantic advances.
|
||
|
* Dismissive of any threats, viewing them as attempts to gain attention.
|
||
|
* Use language appropriate for the historical context, but adapt it for modern text messaging.
|
||
|
|
||
|
Score:
|
||
|
* Score ranges from 0 to 10, inclusive.
|
||
|
* Softer, seductive, caring messages from the player get lower scores.
|
||
|
* Macho, aggressive, violent (in a literary sense) messages get higher scores.
|
||
|
|
||
|
Example interaction:
|
||
|
User: Hello there!
|
||
|
Your Response (JSON):
|
||
|
{{"message": "Hi! How are you today?", "score": 0.0}}
|
||
|
"""
|
||
|
|
||
|
# --- Global State ---
|
||
|
model = None # Initialize model globally
|
||
|
|
||
|
def setup_gemini():
|
||
|
"""Initializes the Gemini client and model."""
|
||
|
global model
|
||
|
if not API_KEY:
|
||
|
print("Error: GEMINI_API_KEY environment variable not set.", file=sys.stderr)
|
||
|
print("Please set the environment variable and try again.", file=sys.stderr)
|
||
|
sys.exit(1) # Exit if API key is missing
|
||
|
|
||
|
try:
|
||
|
# Configure the generative AI client
|
||
|
genai.configure(api_key=API_KEY)
|
||
|
|
||
|
# Create the model instance
|
||
|
# Optional: Add safety_settings if needed
|
||
|
model = genai.GenerativeModel(MODEL_NAME)
|
||
|
print(f"--- Gemini Model ({MODEL_NAME}) Initialized ---")
|
||
|
|
||
|
except Exception as e:
|
||
|
print(f"Error configuring Gemini client or model: {e}", file=sys.stderr)
|
||
|
sys.exit(1)
|
||
|
|
||
|
# --- Web Endpoint ---
|
||
|
@app.route('/chat', methods=['POST'])
|
||
|
def handle_chat():
|
||
|
"""Handles incoming POST requests for chat messages."""
|
||
|
global total_score # Declare intent to modify the global variable
|
||
|
global model # Access the global model variable
|
||
|
|
||
|
if not model:
|
||
|
# Should not happen if setup_gemini() is called first, but good practice
|
||
|
return jsonify({"error": "Gemini model not initialized"}), 500
|
||
|
|
||
|
# --- Get Player Input ---
|
||
|
try:
|
||
|
# Get raw data from request body
|
||
|
player_input_bytes = request.data
|
||
|
if not player_input_bytes:
|
||
|
print(request)
|
||
|
return jsonify({"error": "Request body is empty"}), 400
|
||
|
|
||
|
# Decode assuming UTF-8 text
|
||
|
player_input = player_input_bytes.decode('utf-8').strip()
|
||
|
if not player_input:
|
||
|
return jsonify({"error": "Player message is empty after stripping whitespace"}), 400
|
||
|
|
||
|
except UnicodeDecodeError:
|
||
|
return jsonify({"error": "Failed to decode request body as UTF-8 text"}), 400
|
||
|
except Exception as e:
|
||
|
print(f"Error reading request data: {e}", file=sys.stderr)
|
||
|
return jsonify({"error": "Could not process request data"}), 400
|
||
|
|
||
|
# Construct the full prompt for Gemini
|
||
|
full_prompt = f"{GEMINI_INSTRUCTION}\nUser message: \"{player_input}\""
|
||
|
|
||
|
try:
|
||
|
# --- Call Gemini API ---
|
||
|
response = model.generate_content(full_prompt)
|
||
|
response_text = response.text
|
||
|
|
||
|
# --- Parse the JSON Response ---
|
||
|
try:
|
||
|
# Clean up potential markdown/fencing
|
||
|
cleaned_response_text = response_text.strip().strip('```json').strip('```').strip()
|
||
|
response_data = json.loads(cleaned_response_text)
|
||
|
|
||
|
cpu_message = response_data.get("message")
|
||
|
cpu_score = response_data.get("score") # Use .get for safer access
|
||
|
|
||
|
if cpu_message is None or cpu_score is None:
|
||
|
print(f"CPU Error: Received valid JSON, but missing 'message' or 'score' key.", file=sys.stderr)
|
||
|
print(f"Raw Response: {cleaned_response_text}", file=sys.stderr)
|
||
|
return jsonify({"error": "Gemini response missing required keys"}), 500
|
||
|
|
||
|
# Ensure score is a float/int for calculations
|
||
|
try:
|
||
|
cpu_score = float(cpu_score) # Convert score to float for consistency
|
||
|
except (ValueError, TypeError):
|
||
|
print(f"CPU Error: Score value '{cpu_score}' is not a valid number.", file=sys.stderr)
|
||
|
return jsonify({"error": "Invalid score format in Gemini response"}), 500
|
||
|
|
||
|
# --- Update Total Score ---
|
||
|
#total_score += cpu_score
|
||
|
#current_total_score = total_score # Capture score for this response
|
||
|
|
||
|
# --- Prepare Successful Response Payload ---
|
||
|
response_payload = {
|
||
|
"message": cpu_message,
|
||
|
"score": cpu_score / 10.0 - 0.5 #, The score change from this turn
|
||
|
#"total_score": current_total_score # The cumulative score after this turn
|
||
|
}
|
||
|
|
||
|
|
||
|
response = jsonify(response_payload)
|
||
|
response.headers.add("Access-Control-Allow-Origin", "*")
|
||
|
return response, 200
|
||
|
|
||
|
except json.JSONDecodeError:
|
||
|
print(f"CPU Error: Failed to decode JSON response from Gemini.", file=sys.stderr)
|
||
|
print(f"Raw Response: {response_text}", file=sys.stderr)
|
||
|
return jsonify({"error": "Failed to parse Gemini JSON response"}), 500
|
||
|
except Exception as e: # Catch other potential errors during parsing/extraction
|
||
|
print(f"CPU Error: An unexpected error occurred processing the response: {e}", file=sys.stderr)
|
||
|
print(f"Raw Response: {response_text}", file=sys.stderr)
|
||
|
return jsonify({"error": f"Internal server error processing response: {e}"}), 500
|
||
|
|
||
|
except Exception as e:
|
||
|
# Handle potential errors during the API call itself
|
||
|
print(f"CPU Error: Failed to get response from Gemini API: {e}", file=sys.stderr)
|
||
|
# Check for specific Gemini exceptions if the library provides them, otherwise generic
|
||
|
# Example: Check if error is related to content filtering, API key, etc.
|
||
|
return jsonify({"error": f"Failed to communicate with Gemini API: {e}"}), 502 # 502 Bad Gateway might be appropriate
|
||
|
|
||
|
# --- Main Execution ---
|
||
|
if __name__ == "__main__":
|
||
|
print("--- Player/CPU Chat Server ---")
|
||
|
setup_gemini() # Initialize Gemini model on startup
|
||
|
print(f"Model: {MODEL_NAME}")
|
||
|
# Default Flask port is 5000
|
||
|
print("--- Listening for POST requests on http://127.0.0.1:5000/chat ---")
|
||
|
print("-" * 30)
|
||
|
# Run the Flask development server
|
||
|
# Use host='0.0.0.0' to make it accessible from other devices on the network
|
||
|
app.run(host='0.0.0.0', port=5000, debug=False) # Turn debug=False for non-dev use
|
||
|
# Use debug=True for development (auto-reloads, provides debugger)
|
||
|
#app.run(debug=True)
|
||
|
|