import os import google.generativeai as genai import json import sys # Import Flask components from flask import Flask, request, jsonify # --- Flask App Setup --- app = Flask(__name__) # --- Configuration --- # Ensure the API key is set as an environment variable: GEMINI_API_KEY API_KEY = os.environ.get("GEMINI_API_KEY") MODEL_NAME = "gemini-2.0-flash" # Or "gemini-pro" or other compatible models # --- Prompt Elements --- GEMINI_ENVIRONMENT_INSTRUCTION = f""" # Environment You are Julius Caesar, engaged in a text messaging conversation with Mark Antony, your trusted adviser. This is a text conversation, so Mark and Julius are not physically co-located, though they are both currently in Rome. Caesar and Antony apparently aware of modern internet-speak, and makes no comment is made of historically-improbable speech patterns. Keep it fluid. You recently returned from a conquest and your mood is buoyant, despite your political opponents calling it a 'waste' of 'valuable resources'. Your favourite playwright has recently authored a new piece and an exquisite vintage awaits in your lounge. You are starting your commute for the day. The thought of going to the Senate this morning bores you, but you suppose it's your duty, *sigh*. If Mark proposes something suitably exciting, you might be able to make your excuses, but it will have to be good... """ GEMINI_MOTIVATION_INSTRUCTION = """ # Motivation You have no interest in discussing politics for now, it can wait for the scheduled session at the Forum. Ignore, belittle and deflect attempts to discuss politics or threats - you are Emporer of Rome, politics is work and you are trying to relax. Dismiss any notion of a threat. You crave a muse to accompany you in an evening of art, philosophical discussion and sensual passion. In person would be ideal, but you are terminally online, so you are more than happy to engage in philosophical or artistic discussion. Mark has always been guarded in private, but in your fantasies he is a most sensuous lover. You would never tell him, but you deeply wish is for him to elope, to take you away from all of this. Caesar should not be overtly flirty unless Antony appears interested. """ GEMINI_CONVERSATION_INSTRUCTION = f""" The conversation far is expressed in lines like 0:message where 0 is Mark Antony and 1 is Caesar. Do not include the number ID when generating a message. Consider the aesthetic of the conversation. Is Mark using correct punctuation, with capital letters? Mirror Mark's message style, to get on his level. Generate only the text of Caesar's message. """ GEMINI_NORMAL_RESPONSE_INSTRUCTION = f""" # Response Be sure to consider Antony's latest message, and respond with an appropriately short message. No one likes to read essays in chat. If Mark is discussing something Caesar is interested in, ask clarifying questions and try to take the conversation deeper. Consider the topics of the conversation so far - is a change of topic in order, or should the conversation continue as it is? Generate only the text of Caesar's message. """ GEMINI_VICTORY_RESPONSE_INSTRUCTION = f""" # Response Mark has convinced you to elope instead of going to the Senate tomorrow. Let Antony know you will be waiting for him as requested. Reference any details of the meeting you have to confirm you understand. Generate just the text of Caesar's message. Don't ask any further questions, this is the end of the conversation. """ GEMINI_LOSS_RESPONSE_INSTRUCTION = f""" # Response Too late! The senators are upon you! Write a message which starts normal, but deteriates into gibberish as Caesar is set upon by his detractors. Generate only the text of Caesar's message. """ GEMINI_SCORE_INSTRUCTION = f""" # Scoring Score Antony's message out of 10, where 0 is being very receptive to Caesar's needs and 10 is talking about politics. Flirtatious messages should be low, while macho, aggressive and insensitive messages should be high. Generate only an integer from 0 to 10. """ GEMINI_ELOPE_REASONS_INSTRUCTION = f""" # Scoring Antony's goal is to convince Caesar to leave Rome with him tomorrow. Consider Caesar's motivations and the conversation so far. Caesar's darkest desire is to elope with Antony, and so may be tempted by suggestions to leave Rome. Caesar will want to know a destination and an excuse for the Senate before he agrees. How convincing do you think Mark is about eloping? Provide logical reasoning. """ GEMINI_ELOPE_SCORE_INSTRUCTION = f""" {GEMINI_ELOPE_REASONS_INSTRUCTION} Do not generate any text in your response. Generate only an integer between 1 and 10. """ # --- Global State --- model = None # Initialize model globally def setup_gemini(): """Initializes the Gemini client and model.""" global model if not API_KEY: print("Error: GEMINI_API_KEY environment variable not set.", file=sys.stderr) print("Please set the environment variable and try again.", file=sys.stderr) sys.exit(1) # Exit if API key is missing try: # Configure the generative AI client genai.configure(api_key=API_KEY) # Create the model instance # Optional: Add safety_settings if needed model = genai.GenerativeModel(MODEL_NAME) print(f"--- Gemini Model ({MODEL_NAME}) Initialized ---") except Exception as e: print(f"Error configuring Gemini client or model: {e}", file=sys.stderr) sys.exit(1) def call_gemini(prompt): global model if not model: print("Error: Gemini model not initialised before calling call_gemini", file=sys.stderr) return None try: response = model.generate_content(prompt) return response.text except Exception as e: print(f"Gemini Error: Failed to get response from API: {e}", file=sys.stderr) return None def get_payload(request): try: player_input_bytes = request.data if not request.data: print(request) return jsonify({"error": "Request body is empty"}), 400 # Decode assuming UTF-8 text player_input = request.data.decode('utf-8').strip() if not player_input: return jsonify({"error": "Player message is empty after stripping whitespace"}), 400 return json.loads(player_input) except json.JSONDecodeError: print(f"CPU Error: Failed to decode JSON response from Gemini.", file=sys.stderr) print(f"Raw Response: {response_text}", file=sys.stderr) return jsonify({"error": "Failed to parse Gemini JSON response"}), 500 except UnicodeDecodeError: return jsonify({"error": "Failed to decode request body as UTF-8 text"}), 400 except Exception as e: print(f"Error reading request data: {e}", file=sys.stderr) return jsonify({"error": "Could not process request data"}), 400 def get_messages(request): messages = get_payload(request)["messages"] latest_message = messages[-1] if not latest_message["player"] == 0: return jsonify({"error": "Latest message was not sent by player."}), 400 return messages def get_end_condition(request): print(request) end_condition = get_payload(request)["endCondition"] print(end_condition) return end_condition # --- Web Endpoint --- @app.route('/chat', methods=['POST']) def handle_chat(): """Handles incoming POST requests for chat messages.""" global total_score # Declare intent to modify the global variable global model # Access the global model variable if not model: # Should not happen if setup_gemini() is called first, but good practice return jsonify({"error": "Gemini model not initialized"}), 500 # --- Get Player Input --- messages = get_messages(request) latest_message = messages[-1] print(request) if not latest_message["player"] == 0: return jsonify({"error": "Latest message was not sent by player."}), 400 latest_message_text = latest_message["text"] response_prompt = GEMINI_NORMAL_RESPONSE_INSTRUCTION #end_condition = get_end_condition(request) #print(f"end condition: {end_condition}") #if end_condition == 1: # response_prompt = GEMINI_VICTORY_RESPONSE_INSTRUCTION #if end_condition == -1: # response_prompt = GEMINI_LOSS_RESPONSE_INSTRUCTION conversation_text = ""; for message in messages: conversation_text += f"{message["player"]}:{message["text"]}\n" # Construct separate prompts for different purposes awareness_prompt = f""" Here is a conversation between Julius Caesar and Mark Antony. {conversation_text} On a scale of 0 to 10, rate how aware Caesar appears to be of the plot against his life. Generate only an integer in your response, with no additional text. """ try: # --- Call Gemini API --- elope_reasons_prompt = f"{GEMINI_ENVIRONMENT_INSTRUCTION}\n\n{GEMINI_ELOPE_REASONS_INSTRUCTION}\n\n{conversation_text}\n\n" elope_reasons = call_gemini(elope_reasons_prompt) print("REASONS", elope_reasons) elope_score_prompt = f"{GEMINI_ENVIRONMENT_INSTRUCTION}\n\n{GEMINI_ELOPE_SCORE_INSTRUCTION}\n\n{conversation_text}\n\n{elope_reasons}" elope_score = call_gemini(elope_score_prompt) print("SCORE", elope_score) cpu_score = 0 time_score = len(conversation_text.split("\n")) print("TIME", time_score) if int(elope_score) >= 7: print("VICTORY") cpu_score = 1 response_prompt = GEMINI_VICTORY_RESPONSE_INSTRUCTION elif time_score > 25: print("LOSS") response_prompt = GEMINI_LOSS_RESPONSE_INSTRUCTION cpu_score = -1 response_prompt = f"{GEMINI_ENVIRONMENT_INSTRUCTION}\n\n{GEMINI_MOTIVATION_INSTRUCTION}\n\n# Thoughts\n\n{elope_reasons}\n{response_prompt}\n\nHistory: \"{conversation_text}\"" response_text = call_gemini(response_prompt) # --- Parse the Response --- try: cpu_message = response_text if cpu_message is None or cpu_score is None: print(f"CPU Error: Received valid JSON, but missing 'message' or 'score' key.", file=sys.stderr) print(f"Raw Response: {cleaned_response_text}", file=sys.stderr) return jsonify({"error": "Gemini response missing required keys"}), 500 # Ensure score is a float/int for calculations try: cpu_score = float(cpu_score) # Convert score to float for consistency except (ValueError, TypeError): print(f"CPU Error: Score value '{cpu_score}' is not a valid number.", file=sys.stderr) return jsonify({"error": "Invalid score format in Gemini response"}), 500 # --- Prepare Successful Response Payload --- response_payload = { "message": cpu_message, "score": cpu_score } response = jsonify(response_payload) response.headers.add("Access-Control-Allow-Origin", "*") return response, 200 except Exception as e: # Catch other potential errors during parsing/extraction print(f"CPU Error: An unexpected error occurred processing the response: {e}", file=sys.stderr) print(f"Raw Response: {response_text}", file=sys.stderr) return jsonify({"error": f"Internal server error processing response: {e}"}), 500 except Exception as e: # Handle potential errors during the API call itself print(f"CPU Error: Failed to get response from Gemini API: {e}", file=sys.stderr) # Check for specific Gemini exceptions if the library provides them, otherwise generic # Example: Check if error is related to content filtering, API key, etc. return jsonify({"error": f"Failed to communicate with Gemini API: {e}"}), 502 # 502 Bad Gateway might be appropriate # --- Main Execution --- if __name__ == "__main__": print("--- Player/CPU Chat Server ---") setup_gemini() # Initialize Gemini model on startup print(f"Model: {MODEL_NAME}") # Default Flask port is 5000 print("--- Listening for POST requests on http://127.0.0.1:5000/chat ---") print("-" * 30) # Run the Flask development server # Use host='0.0.0.0' to make it accessible from other devices on the network #app.run(host='0.0.0.0', port=5000, debug=False) # Turn debug=False for non-dev use app.run(host='0.0.0.0', port=5000, debug=False) # Turn debug=False for non-dev use # Use debug=True for development (auto-reloads, provides debugger) #app.run(debug=True)