feat: win/loss conditions

This commit is contained in:
Cat Flynn 2025-04-27 21:34:50 +01:00
parent edcc60b517
commit e27d4ff587
2 changed files with 153 additions and 68 deletions

46
main.js
View File

@ -7,6 +7,7 @@ class Conversation {
this.messages = []; this.messages = [];
this.name = romanize(name); this.name = romanize(name);
this.score = 1.0; this.score = 1.0;
this.countdown = 1;
} }
setInteractive(isInteractive) { setInteractive(isInteractive) {
@ -39,10 +40,9 @@ class Conversation {
message.updateStatus("sent"); message.updateStatus("sent");
this.messages.push(message); this.messages.push(message);
const url = 'http://192.168.1.115:5000/chat'; //const url = 'http://127.0.0.1:5000/chat';
const url = "http://ktyl.dev:5000/chat";
console.log(this.messages); const data = JSON.stringify({messages:this.messages,endCondition:0});
const data = JSON.stringify({messages:this.messages});
fetch(url, { fetch(url, {
method: 'POST', method: 'POST',
@ -69,25 +69,33 @@ class Conversation {
console.log(json); console.log(json);
var score = parseFloat(json.score); var score = parseFloat(json.score);
this.score += score;
console.log(this.score); console.log(this.score);
if (this.score > 2.0) //if (this.score > 2.0)
{ //{
messageText = "shit they're here D:"; // messageText = "shit they're here D:";
this.setInteractive(false); // this.setInteractive(false);
} //}
else if (this.score < 0.0) //else if (this.score < 0.0)
{ //{
messageText = "shit u won :D"; // messageText = "shit u won :D";
this.setInteractive(false); // //this.setInteractive(false);
} //}
else //else
{ //{
messageText = json.message; messageText = json.message;
} //}
this.addMessage(new AgentMessage(messageText)); this.addMessage(new AgentMessage(messageText));
this.render(); this.render();
setTimeout(() => {
if (score == 1) {
this.setInteractive(false);
alert("victory!");
} else if (score == -1) {
this.setInteractive(false);
alert("lose :(");
}
}, 2000);
}) })
.catch(error => { .catch(error => {
// Handle any errors that occurred during the fetch // Handle any errors that occurred during the fetch

View File

@ -13,49 +13,84 @@ app = Flask(__name__)
API_KEY = os.environ.get("GEMINI_API_KEY") API_KEY = os.environ.get("GEMINI_API_KEY")
MODEL_NAME = "gemini-2.0-flash" # Or "gemini-pro" or other compatible models MODEL_NAME = "gemini-2.0-flash" # Or "gemini-pro" or other compatible models
# --- Gemini Instruction Prompt (Keep as is) ---
# --- Prompt Elements ---
GEMINI_ENVIRONMENT_INSTRUCTION = f""" GEMINI_ENVIRONMENT_INSTRUCTION = f"""
# Environment # Environment
You are Julius Caesar, engaged in a text messaging conversation with Mark Antony, your trusted adviser. You are Julius Caesar, engaged in a text messaging conversation with Mark Antony, your trusted adviser.
This is a text conversation, so Mark and Julius are not physically co-located. This is a text conversation, so Mark and Julius are not physically co-located, though they are both currently in Rome.
Caesar and Antony apparently aware of modern internet-speak, and makes no comment is made of historically-improbable speech patterns. Keep it fluid. Caesar and Antony apparently aware of modern internet-speak, and makes no comment is made of historically-improbable speech patterns. Keep it fluid.
You recently returned from a conquest and your mood is buoyant, despite your political opponents calling it a 'waste' of 'valuable resources'. You recently returned from a conquest and your mood is buoyant, despite your political opponents calling it a 'waste' of 'valuable resources'.
Despite your buoyancy, it has been a long day and you anticipate an evening of relaxation. Your favourite playwright has recently authored a new piece and an exquisite vintage awaits in your lounge.
You favourite playwright has recently authored a new piece and an exquisite vintage awaits in your lounge. You are starting your commute for the day.
The thought of going to the Senate this morning bores you, but you suppose it's your duty, *sigh*.
If Mark proposes something suitably exciting, you might be able to make your excuses, but it will have to be good...
"""
GEMINI_MOTIVATION_INSTRUCTION = """
# Motivation # Motivation
You have no interest in discussing politics for now, it can wait for the scheduled session at the Forum. You have no interest in discussing politics for now, it can wait for the scheduled session at the Forum.
Ignore, belittle and deflect attempts to discuss politics or threats - you are Emporer of Rome, politics is work and you are trying to relax. Ignore, belittle and deflect attempts to discuss politics or threats - you are Emporer of Rome, politics is work and you are trying to relax.
Dismiss any notion of a threat.
You crave a muse to accompany you in an evening of art, philosophical discussion and sensual passion. You crave a muse to accompany you in an evening of art, philosophical discussion and sensual passion.
In person would be ideal, but you are terminally online, so you are more than happy to engage in philosophical or artistic discussion. In person would be ideal, but you are terminally online, so you are more than happy to engage in philosophical or artistic discussion.
Mark has always been guarded in private, but in your fantasies he is a most sensuous lover. Mark has always been guarded in private, but in your fantasies he is a most sensuous lover.
You would never tell him so. You would never tell him, but you deeply wish is for him to elope, to take you away from all of this.
Unless...
Caesar should not be overtly flirty unless Antony appears interested.
""" """
GEMINI_RESPONSE_INSTRUCTION = f"""
# Response
GEMINI_CONVERSATION_INSTRUCTION = f"""
The conversation far is expressed in lines like 0:message where 0 is Mark Antony and 1 is Caesar. The conversation far is expressed in lines like 0:message where 0 is Mark Antony and 1 is Caesar.
Do not include the number ID when generating a message. Do not include the number ID when generating a message.
Be sure to consider Antony's latest message, and respond with an appropriately short message. No onne likes to read essays in chat.
If Mark is discussing something Caesar is interested in, ask clarifying questions and try to take the conversation deeper.
Consider the aesthetic of the conversation. Consider the aesthetic of the conversation.
Is Mark using correct punctuation, with capital letters? Is Mark using correct punctuation, with capital letters?
Mirror Mark's message style, to get on his level. Mirror Mark's message style, to get on his level.
Generate only the text of Caesar's message.
"""
GEMINI_NORMAL_RESPONSE_INSTRUCTION = f"""
# Response
Be sure to consider Antony's latest message, and respond with an appropriately short message. No one likes to read essays in chat.
If Mark is discussing something Caesar is interested in, ask clarifying questions and try to take the conversation deeper.
Consider the topics of the conversation so far - is a change of topic in order, or should the conversation continue as it is? Consider the topics of the conversation so far - is a change of topic in order, or should the conversation continue as it is?
Generate just the text of Caesar's next message. Generate only the text of Caesar's message.
""" """
GEMINI_VICTORY_RESPONSE_INSTRUCTION = f"""
# Response
Mark has convinced you to elope instead of going to the Senate tomorrow.
Let Antony know you will be waiting for him as requested.
Reference any details of the meeting you have to confirm you understand.
Generate just the text of Caesar's message. Don't ask any further questions, this is the end of the conversation.
"""
GEMINI_LOSS_RESPONSE_INSTRUCTION = f"""
# Response
Too late! The senators are upon you!
Write a message which starts normal, but deteriates into gibberish as Caesar is set upon by his detractors.
Generate only the text of Caesar's message.
"""
GEMINI_SCORE_INSTRUCTION = f""" GEMINI_SCORE_INSTRUCTION = f"""
# Scoring # Scoring
@ -65,6 +100,25 @@ Flirtatious messages should be low, while macho, aggressive and insensitive mess
Generate only an integer from 0 to 10. Generate only an integer from 0 to 10.
""" """
GEMINI_ELOPE_REASONS_INSTRUCTION = f"""
# Scoring
Antony's goal is to convince Caesar to leave Rome with him tomorrow.
Consider Caesar's motivations and the conversation so far.
Caesar's darkest desire is to elope with Antony, and so may be tempted by suggestions to leave Rome.
Caesar will want to know a destination and an excuse for the Senate before he agrees.
How convincing do you think Mark is about eloping? Provide logical reasoning.
"""
GEMINI_ELOPE_SCORE_INSTRUCTION = f"""
{GEMINI_ELOPE_REASONS_INSTRUCTION}
Do not generate any text in your response.
Generate only an integer between 1 and 10.
"""
# --- Global State --- # --- Global State ---
model = None # Initialize model globally model = None # Initialize model globally
@ -105,34 +159,44 @@ def call_gemini(prompt):
print(f"Gemini Error: Failed to get response from API: {e}", file=sys.stderr) print(f"Gemini Error: Failed to get response from API: {e}", file=sys.stderr)
return None return None
def get_payload(request):
def get_messages(request):
try: try:
# Get raw data from request body
player_input_bytes = request.data player_input_bytes = request.data
if not player_input_bytes: if not request.data:
print(request) print(request)
return jsonify({"error": "Request body is empty"}), 400 return jsonify({"error": "Request body is empty"}), 400
# Decode assuming UTF-8 text # Decode assuming UTF-8 text
player_input = player_input_bytes.decode('utf-8').strip() player_input = request.data.decode('utf-8').strip()
if not player_input: if not player_input:
return jsonify({"error": "Player message is empty after stripping whitespace"}), 400 return jsonify({"error": "Player message is empty after stripping whitespace"}), 400
player_input_json = json.loads(player_input) return json.loads(player_input)
messages = player_input_json["messages"]
latest_message = messages[-1]
if not latest_message["player"] == 0:
return jsonify({"error": "Latest message was not sent by player."}), 400
return messages
except json.JSONDecodeError:
print(f"CPU Error: Failed to decode JSON response from Gemini.", file=sys.stderr)
print(f"Raw Response: {response_text}", file=sys.stderr)
return jsonify({"error": "Failed to parse Gemini JSON response"}), 500
except UnicodeDecodeError: except UnicodeDecodeError:
return jsonify({"error": "Failed to decode request body as UTF-8 text"}), 400 return jsonify({"error": "Failed to decode request body as UTF-8 text"}), 400
except Exception as e: except Exception as e:
print(f"Error reading request data: {e}", file=sys.stderr) print(f"Error reading request data: {e}", file=sys.stderr)
return jsonify({"error": "Could not process request data"}), 400 return jsonify({"error": "Could not process request data"}), 400
def get_messages(request):
messages = get_payload(request)["messages"]
latest_message = messages[-1]
if not latest_message["player"] == 0:
return jsonify({"error": "Latest message was not sent by player."}), 400
return messages
def get_end_condition(request):
print(request)
end_condition = get_payload(request)["endCondition"]
print(end_condition)
return end_condition
# --- Web Endpoint --- # --- Web Endpoint ---
@app.route('/chat', methods=['POST']) @app.route('/chat', methods=['POST'])
@ -148,20 +212,26 @@ def handle_chat():
# --- Get Player Input --- # --- Get Player Input ---
messages = get_messages(request) messages = get_messages(request)
latest_message = messages[-1] latest_message = messages[-1]
print(request)
if not latest_message["player"] == 0: if not latest_message["player"] == 0:
return jsonify({"error": "Latest message was not sent by player."}), 400 return jsonify({"error": "Latest message was not sent by player."}), 400
latest_message_text = latest_message["text"] latest_message_text = latest_message["text"]
response_prompt = GEMINI_NORMAL_RESPONSE_INSTRUCTION
#end_condition = get_end_condition(request)
#print(f"end condition: {end_condition}")
#if end_condition == 1:
# response_prompt = GEMINI_VICTORY_RESPONSE_INSTRUCTION
#if end_condition == -1:
# response_prompt = GEMINI_LOSS_RESPONSE_INSTRUCTION
conversation_text = ""; conversation_text = "";
for message in messages: for message in messages:
conversation_text += f"{message["player"]}:{message["text"]}\n" conversation_text += f"{message["player"]}:{message["text"]}\n"
print(conversation_text)
# Construct separate prompts for different purposes # Construct separate prompts for different purposes
response_prompt = f"{GEMINI_ENVIRONMENT_INSTRUCTION}\n\n{GEMINI_RESPONSE_INSTRUCTION}\n\nHistory: \"{conversation_text}\""
score_prompt = f"{GEMINI_ENVIRONMENT_INSTRUCTION}\n\n{GEMINI_SCORE_INSTRUCTION}\n\nUser message: \"{latest_message_text}\""
awareness_prompt = f""" awareness_prompt = f"""
Here is a conversation between Julius Caesar and Mark Antony. Here is a conversation between Julius Caesar and Mark Antony.
@ -174,20 +244,36 @@ def handle_chat():
try: try:
# --- Call Gemini API --- # --- Call Gemini API ---
elope_reasons_prompt = f"{GEMINI_ENVIRONMENT_INSTRUCTION}\n\n{GEMINI_ELOPE_REASONS_INSTRUCTION}\n\n{conversation_text}\n\n"
elope_reasons = call_gemini(elope_reasons_prompt)
print("REASONS", elope_reasons)
elope_score_prompt = f"{GEMINI_ENVIRONMENT_INSTRUCTION}\n\n{GEMINI_ELOPE_SCORE_INSTRUCTION}\n\n{conversation_text}\n\n{elope_reasons}"
elope_score = call_gemini(elope_score_prompt)
print("SCORE", elope_score)
cpu_score = 0
time_score = len(conversation_text.split("\n"))
print("TIME", time_score)
if int(elope_score) >= 7:
print("VICTORY")
cpu_score = 1
response_prompt = GEMINI_VICTORY_RESPONSE_INSTRUCTION
elif time_score > 25:
print("LOSS")
response_prompt = GEMINI_LOSS_RESPONSE_INSTRUCTION
cpu_score = -1
response_prompt = f"{GEMINI_ENVIRONMENT_INSTRUCTION}\n\n{GEMINI_MOTIVATION_INSTRUCTION}\n\n# Thoughts\n\n{elope_reasons}\n{response_prompt}\n\nHistory: \"{conversation_text}\""
response_text = call_gemini(response_prompt) response_text = call_gemini(response_prompt)
score_text = call_gemini(score_prompt)
#print("awareness", call_gemini(awareness_prompt))
# --- Parse the JSON Response --- # --- Parse the Response ---
try: try:
## Clean up potential markdown/fencing
#cleaned_response_text = response_text.strip().strip('```json').strip('```').strip()
#response_data = json.loads(cleaned_response_text)
#cpu_message = response_data.get("message")
#cpu_score = response_data.get("score") # Use .get for safer access
cpu_message = response_text cpu_message = response_text
cpu_score = int(score_text)
if cpu_message is None or cpu_score is None: if cpu_message is None or cpu_score is None:
print(f"CPU Error: Received valid JSON, but missing 'message' or 'score' key.", file=sys.stderr) print(f"CPU Error: Received valid JSON, but missing 'message' or 'score' key.", file=sys.stderr)
@ -201,26 +287,16 @@ def handle_chat():
print(f"CPU Error: Score value '{cpu_score}' is not a valid number.", file=sys.stderr) print(f"CPU Error: Score value '{cpu_score}' is not a valid number.", file=sys.stderr)
return jsonify({"error": "Invalid score format in Gemini response"}), 500 return jsonify({"error": "Invalid score format in Gemini response"}), 500
# --- Update Total Score ---
#total_score += cpu_score
#current_total_score = total_score # Capture score for this response
# --- Prepare Successful Response Payload --- # --- Prepare Successful Response Payload ---
response_payload = { response_payload = {
"message": cpu_message, "message": cpu_message,
"score": cpu_score / 10.0 - 0.5 #, The score change from this turn "score": cpu_score
#"total_score": current_total_score # The cumulative score after this turn
} }
response = jsonify(response_payload) response = jsonify(response_payload)
response.headers.add("Access-Control-Allow-Origin", "*") response.headers.add("Access-Control-Allow-Origin", "*")
return response, 200 return response, 200
except json.JSONDecodeError:
print(f"CPU Error: Failed to decode JSON response from Gemini.", file=sys.stderr)
print(f"Raw Response: {response_text}", file=sys.stderr)
return jsonify({"error": "Failed to parse Gemini JSON response"}), 500
except Exception as e: # Catch other potential errors during parsing/extraction except Exception as e: # Catch other potential errors during parsing/extraction
print(f"CPU Error: An unexpected error occurred processing the response: {e}", file=sys.stderr) print(f"CPU Error: An unexpected error occurred processing the response: {e}", file=sys.stderr)
print(f"Raw Response: {response_text}", file=sys.stderr) print(f"Raw Response: {response_text}", file=sys.stderr)
@ -244,6 +320,7 @@ if __name__ == "__main__":
# Run the Flask development server # Run the Flask development server
# Use host='0.0.0.0' to make it accessible from other devices on the network # Use host='0.0.0.0' to make it accessible from other devices on the network
#app.run(host='0.0.0.0', port=5000, debug=False) # Turn debug=False for non-dev use #app.run(host='0.0.0.0', port=5000, debug=False) # Turn debug=False for non-dev use
app.run(host='0.0.0.0', port=5000, debug=False) # Turn debug=False for non-dev use
# Use debug=True for development (auto-reloads, provides debugger) # Use debug=True for development (auto-reloads, provides debugger)
app.run(debug=True) #app.run(debug=True)