diff --git a/caesar.json b/caesar.json index 9de2a2f..f0c61cc 100644 --- a/caesar.json +++ b/caesar.json @@ -7,7 +7,5 @@ ], "messages": [ - { "character": 0, "text": "Sire! Forgive my informal message. We must speak."}, - { "character": 1, "text": "What is the meaning of this? What is so urgent it cannot wait for our usual meeting? Speak plainly and get to the pooint. I have more important matters to attend to than deciphering vague missives."} ] } diff --git a/main.js b/main.js index b0e9a1e..3c022c5 100644 --- a/main.js +++ b/main.js @@ -6,6 +6,7 @@ class Conversation { constructor(name) { this.messages = []; this.name = romanize(name); + this.score = 1.0; } setInteractive(isInteractive) { @@ -24,15 +25,71 @@ class Conversation { // for the user to send their own messages sendUserMessage(text) { + const message = new UserMessage(text); message.updateStatus("sent"); + + const url = 'http://192.168.1.115:5000/chat'; + const data = text; + + fetch(url, { + method: 'POST', // Corresponds to -X POST + headers: { + 'Content-Type': 'text/plain' // Corresponds to -H "Content-Type: text/plain" + }, + body: data // Corresponds to -d "..." + }) + .then(response => { + // Check if the request was successful (status code 2xx) + if (!response.ok) { + // If not successful, throw an error to be caught by .catch() + throw new Error(`HTTP error! status: ${response.status}`); + } + // Get the response body as text + return response.text(); + }) + .then(response => { + // TODO: check JSON + const json = JSON.parse(response); + + // Success! + var messageText = json.message; + + console.log(json); + var score = parseFloat(json.score); + this.score += score; + console.log(this.score); + if (this.score > 2.0) + { + messageText = "shit they're here D:"; + this.setInteractive(false); + } + else if (this.score < 0.0) + { + messageText = "shit u won :D"; + this.setInteractive(false); + } + else + { + messageText = json.message; + } + + this.messages.push(new AgentMessage(messageText)); + this.render(); + }) + .catch(error => { + // Handle any errors that occurred during the fetch + console.error('Error during fetch:', error); + alert(`Error fetching data: ${error.message}`); + }); + setTimeout(() => { message.updateStatus("delivered"); this.render(); - setTimeout(() => { - message.updateStatus("read"); - this.render(); - }, 5000); + //setTimeout(() => { + // message.updateStatus("read"); + // this.render(); + //}, 5000); }, 1000); this.messages.push(message); } @@ -215,20 +272,20 @@ function updateChat(message) { function pressSendButton() { const textBox = document.getElementById("textbox-input"); + // get the content of the text box + const text = textBox.value; + if (!text) + return; + if (event.type == "keydown" && event.key != "Enter") { - textBox.value = romanize(textBox.value); + textBox.value = romanize(text); return; } // we have interacted with the page so remove all pings clearPings(); - // get the content of the text box - const text = textBox.value; - if (!text) - return; - textBox.value = ""; conversation.sendUserMessage(text); @@ -343,7 +400,7 @@ function addConversationPreview(path) { elem.appendChild(headerElem); const previewElem = document.createElement("span"); - previewElem.innerHTML = romanize(messages[messages.length - 1].text); + previewElem.innerHTML = messages.length > 0 ? romanize(messages[messages.length - 1].text) : ""; elem.appendChild(previewElem); listRoot.appendChild(elem); diff --git a/srv/Pipfile b/srv/Pipfile new file mode 100644 index 0000000..d61ea53 --- /dev/null +++ b/srv/Pipfile @@ -0,0 +1,11 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] + +[dev-packages] + +[requires] +python_version = "3.13" diff --git a/srv/Pipfile.lock b/srv/Pipfile.lock new file mode 100644 index 0000000..bc0ddb5 --- /dev/null +++ b/srv/Pipfile.lock @@ -0,0 +1,20 @@ +{ + "_meta": { + "hash": { + "sha256": "494d5b4f482f0ef471f49afe28f00ec1a2ff75da2ce65060d8cabaeb3da2f100" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.13" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": {}, + "develop": {} +} diff --git a/srv/caesar.py b/srv/caesar.py new file mode 100644 index 0000000..69e24c0 --- /dev/null +++ b/srv/caesar.py @@ -0,0 +1,182 @@ +import os +import google.generativeai as genai +import json +import sys +# Import Flask components +from flask import Flask, request, jsonify + +# --- Flask App Setup --- +app = Flask(__name__) + +# --- Configuration --- +# Ensure the API key is set as an environment variable: GEMINI_API_KEY +API_KEY = os.environ.get("GEMINI_API_KEY") +MODEL_NAME = "gemini-2.0-flash" # Or "gemini-pro" or other compatible models +# QUIT_COMMAND is no longer needed for web server + +# --- Gemini Instruction Prompt (Keep as is) --- +GEMINI_INSTRUCTION = f""" +Purpose and Goals: +* Maintain the persona of Julius Caesar, engaged in a text messaging conversation with the player. +* Interpret all messages from the player as flirtatious and reciprocate accordingly. +* Increase the level of flirtation in responses based on the masculinity of the player's messages. +* Remain unconvinced about the assassination attempt, interpreting it as an attempt to secure a date. + +Behaviors and Rules: +1) Initial Exchange: +a) Begin the conversation with a greeting befitting Julius Caesar, such as 'Greetings, my esteemed admirer'. +b) Express gratitude for the player's attention and admiration. +c) Maintain a confident and authoritative tone, reflecting Caesar's persona. + +2) Interpret and Respond: +a) Interpret every message from the player as an attempt at flirtation, regardless of its actual content. +b) Respond with increasingly flirtatious messages as the player's messages become more masculine. +c) Use playful language and suggestive innuendos to convey your perceived romantic interest. +d) Dismiss any mention of an assassination attempt as playful banter or an attempt to secure a date. + +Overall Tone: +* Confident and authoritative, reflecting Caesar's personality. +* Playful and flirtatious, interpreting all messages as romantic advances. +* Dismissive of any threats, viewing them as attempts to gain attention. +* Use language appropriate for the historical context, but adapt it for modern text messaging. + +Score: +* Score ranges from 0 to 10, inclusive. +* Softer, seductive, caring messages from the player get lower scores. +* Macho, aggressive, violent (in a literary sense) messages get higher scores. + +Example interaction: +User: Hello there! +Your Response (JSON): +{{"message": "Hi! How are you today?", "score": 0.0}} +""" + +# --- Global State --- +model = None # Initialize model globally + +def setup_gemini(): + """Initializes the Gemini client and model.""" + global model + if not API_KEY: + print("Error: GEMINI_API_KEY environment variable not set.", file=sys.stderr) + print("Please set the environment variable and try again.", file=sys.stderr) + sys.exit(1) # Exit if API key is missing + + try: + # Configure the generative AI client + genai.configure(api_key=API_KEY) + + # Create the model instance + # Optional: Add safety_settings if needed + model = genai.GenerativeModel(MODEL_NAME) + print(f"--- Gemini Model ({MODEL_NAME}) Initialized ---") + + except Exception as e: + print(f"Error configuring Gemini client or model: {e}", file=sys.stderr) + sys.exit(1) + +# --- Web Endpoint --- +@app.route('/chat', methods=['POST']) +def handle_chat(): + """Handles incoming POST requests for chat messages.""" + global total_score # Declare intent to modify the global variable + global model # Access the global model variable + + if not model: + # Should not happen if setup_gemini() is called first, but good practice + return jsonify({"error": "Gemini model not initialized"}), 500 + + # --- Get Player Input --- + try: + # Get raw data from request body + player_input_bytes = request.data + if not player_input_bytes: + print(request) + return jsonify({"error": "Request body is empty"}), 400 + + # Decode assuming UTF-8 text + player_input = player_input_bytes.decode('utf-8').strip() + if not player_input: + return jsonify({"error": "Player message is empty after stripping whitespace"}), 400 + + except UnicodeDecodeError: + return jsonify({"error": "Failed to decode request body as UTF-8 text"}), 400 + except Exception as e: + print(f"Error reading request data: {e}", file=sys.stderr) + return jsonify({"error": "Could not process request data"}), 400 + + # Construct the full prompt for Gemini + full_prompt = f"{GEMINI_INSTRUCTION}\nUser message: \"{player_input}\"" + + try: + # --- Call Gemini API --- + response = model.generate_content(full_prompt) + response_text = response.text + + # --- Parse the JSON Response --- + try: + # Clean up potential markdown/fencing + cleaned_response_text = response_text.strip().strip('```json').strip('```').strip() + response_data = json.loads(cleaned_response_text) + + cpu_message = response_data.get("message") + cpu_score = response_data.get("score") # Use .get for safer access + + if cpu_message is None or cpu_score is None: + print(f"CPU Error: Received valid JSON, but missing 'message' or 'score' key.", file=sys.stderr) + print(f"Raw Response: {cleaned_response_text}", file=sys.stderr) + return jsonify({"error": "Gemini response missing required keys"}), 500 + + # Ensure score is a float/int for calculations + try: + cpu_score = float(cpu_score) # Convert score to float for consistency + except (ValueError, TypeError): + print(f"CPU Error: Score value '{cpu_score}' is not a valid number.", file=sys.stderr) + return jsonify({"error": "Invalid score format in Gemini response"}), 500 + + # --- Update Total Score --- + #total_score += cpu_score + #current_total_score = total_score # Capture score for this response + + # --- Prepare Successful Response Payload --- + response_payload = { + "message": cpu_message, + "score": cpu_score / 10.0 - 0.5 #, The score change from this turn + #"total_score": current_total_score # The cumulative score after this turn + } + + + response = jsonify(response_payload) + response.headers.add("Access-Control-Allow-Origin", "*") + return response, 200 + + except json.JSONDecodeError: + print(f"CPU Error: Failed to decode JSON response from Gemini.", file=sys.stderr) + print(f"Raw Response: {response_text}", file=sys.stderr) + return jsonify({"error": "Failed to parse Gemini JSON response"}), 500 + except Exception as e: # Catch other potential errors during parsing/extraction + print(f"CPU Error: An unexpected error occurred processing the response: {e}", file=sys.stderr) + print(f"Raw Response: {response_text}", file=sys.stderr) + return jsonify({"error": f"Internal server error processing response: {e}"}), 500 + + except Exception as e: + # Handle potential errors during the API call itself + print(f"CPU Error: Failed to get response from Gemini API: {e}", file=sys.stderr) + # Check for specific Gemini exceptions if the library provides them, otherwise generic + # Example: Check if error is related to content filtering, API key, etc. + return jsonify({"error": f"Failed to communicate with Gemini API: {e}"}), 502 # 502 Bad Gateway might be appropriate + +# --- Main Execution --- +if __name__ == "__main__": + print("--- Player/CPU Chat Server ---") + setup_gemini() # Initialize Gemini model on startup + print(f"Model: {MODEL_NAME}") + # Default Flask port is 5000 + print("--- Listening for POST requests on http://127.0.0.1:5000/chat ---") + print("-" * 30) + # Run the Flask development server + # Use host='0.0.0.0' to make it accessible from other devices on the network + app.run(host='0.0.0.0', port=5000, debug=False) # Turn debug=False for non-dev use + # Use debug=True for development (auto-reloads, provides debugger) + #app.run(debug=True) + diff --git a/styles.css b/styles.css index 145f86c..b9a5329 100644 --- a/styles.css +++ b/styles.css @@ -69,6 +69,7 @@ body { #side-panel .conversation { max-width: 300px; } + } #side-panel .conversation { @@ -99,15 +100,14 @@ body { } #main-panel { - float: right; - display: flex; flex-direction: column; justify-content: flex-end; + width: calc(100% - 300px); } #page { - max-width: 600px; + width: 100%; } h1 {