From 5bf54f0d4467a6294ea7f0ea40e2b390ad881d3c Mon Sep 17 00:00:00 2001 From: Joao Figueiredo Date: Sun, 4 May 2025 15:39:57 +0100 Subject: [PATCH] into services --- ai_service/main.py | 30 +++++++++++++++++----- matrix_service/main.py | 57 +++++++++++++++++++++++++++++++----------- 2 files changed, 66 insertions(+), 21 deletions(-) diff --git a/ai_service/main.py b/ai_service/main.py index 42f5eba..c44fd2b 100644 --- a/ai_service/main.py +++ b/ai_service/main.py @@ -2,13 +2,27 @@ import os from dotenv import load_dotenv from fastapi import FastAPI, Header, HTTPException from pydantic import BaseModel -import openai +from openai import OpenAI +import logging import redis +LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper() AI_TOKEN = os.environ["AI_HANDLER_TOKEN"] -openai.api_key = os.environ["OPENAI_API_KEY"] + +AIclient = OpenAI( + api_key=os.environ["OPENAI_API_KEY"], +) + r = redis.Redis.from_url(os.environ.get("REDIS_URL", "redis://redis:6379")) +# --- Logging Setup --- +numeric_level = getattr(logging, LOG_LEVEL, logging.INFO) +logging.basicConfig( + level=numeric_level, + format="%(asctime)s %(levelname)s %(name)s: %(message)s" +) +logger = logging.getLogger(__name__) + class MessagePayload(BaseModel): roomId: str userId: str @@ -32,15 +46,19 @@ async def message( # Build prompt (very simple example) prompt = f"User {payload.userId} said: {payload.content}\nBot:" - chat_response = opeai.ChatCompletion.create( + chat_response = AIclient.chat.completions.create( model="gpt-3.5-turbo", messages=[ - {"role": "system", "content": "-"}, + {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": prompt} - ] + ], + max_tokens=150, + n=1, + stop=None, temperature=0.7, ) - reply = chat_response.choices[0].text.strip() + + reply = chat_response.choices[0].message.content.strip() # Cache reply for idempotency r.set(payload.eventId, reply, ex=3600) diff --git a/matrix_service/main.py b/matrix_service/main.py index e4ce5f8..008f6a5 100644 --- a/matrix_service/main.py +++ b/matrix_service/main.py @@ -37,20 +37,19 @@ async def main() -> None: async def trust_all_devices(client) -> None: """ - Programmatically verify all devices to allow sharing encryption keys. + Mark every other user's device as verified so we can receive their + future Megolm keys without being blocked. """ for room_id in client.rooms: try: - devices = await client.room_devices(room_id) - if isinstance(devices, dict): - for user, dev_ids in devices.items(): - if user == USER_ID: - continue - for dev_id in dev_ids: - device = client.crypto.device_store.get_device(user, dev_id) - if device and not client.crypto.device_store.is_device_verified(device): - logger.info(f"Trusting {dev_id} for {user}") - client.verify_device(device) + devices_in_room = client.room_devices(room_id) + for user_id, user_devices in devices_in_room.items(): + if user_id == client.user_id: + continue + for dev_id, device in user_devices.items(): + if not client.device_store.is_device_verified(device): + logger.info(f"Trusting {dev_id} for {user}") + client.verify_device(device) except Exception: logger.exception(f"Error trusting devices in {room_id}") @@ -63,6 +62,32 @@ async def main() -> None: logger.info("Joined %s", room.room_id) await trust_all_devices(client) + async def send_message(room: MatrixRoom, message: str) -> None: + """ + Send a message to `room`. + """ + try: + await trust_all_devices(client) + + await client.share_group_session( + room.room_id, + ignore_unverified_devices=True + ) + + await client.room_send( + room_id=room.room_id, + message_type="m.room.message", + content={ + "msgtype": "m.text", + "body": message + }, + ignore_unverified_devices=True + ) + logger.info("Sent message to %s: %s", room.room_id, message) + except Exception as e: + logger.error(f"Error sending message: {e}") + + async def on_message(room: MatrixRoom, event: RoomMessageText) -> None: """ Handle incoming messages. @@ -80,8 +105,8 @@ async def main() -> None: payload = { "roomId": event.room_id, "userId": event.sender, - "eventId": event.event_id, - "serverTimestamp": event.server_timestamp, + "eventId": event.event_id, + "serverTimestamp": event.server_timestamp, "content": event.body } headers = {"Authorization": f"Bearer {AI_HANDLER_TOKEN}"} @@ -91,7 +116,9 @@ async def main() -> None: resp.raise_for_status() data = resp.json() if data.get("reply"): - client.send_message(event["room_id"], data["reply"]) + await trust_all_devices(client) + await send_message(room, data["reply"]) + logger.info("Reply sent: %s", data["reply"]) except httpx.HTTPStatusError as e: logger.error(f"HTTP error: {e.response.status_code} - {e.response.text}") except Exception: @@ -123,7 +150,7 @@ async def main() -> None: logger.debug("Upload one time Olm keys") try: - await client.upload_keys() + await client.keys_upload() except Exception as e: logger.error(f"Error uploading keys: {e}") return