into services

This commit is contained in:
2025-05-04 15:39:57 +01:00
parent 5c9bdc771d
commit 5bf54f0d44
2 changed files with 66 additions and 21 deletions

View File

@@ -2,13 +2,27 @@ import os
from dotenv import load_dotenv
from fastapi import FastAPI, Header, HTTPException
from pydantic import BaseModel
import openai
from openai import OpenAI
import logging
import redis
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper()
AI_TOKEN = os.environ["AI_HANDLER_TOKEN"]
openai.api_key = os.environ["OPENAI_API_KEY"]
AIclient = OpenAI(
api_key=os.environ["OPENAI_API_KEY"],
)
r = redis.Redis.from_url(os.environ.get("REDIS_URL", "redis://redis:6379"))
# --- Logging Setup ---
numeric_level = getattr(logging, LOG_LEVEL, logging.INFO)
logging.basicConfig(
level=numeric_level,
format="%(asctime)s %(levelname)s %(name)s: %(message)s"
)
logger = logging.getLogger(__name__)
class MessagePayload(BaseModel):
roomId: str
userId: str
@@ -32,15 +46,19 @@ async def message(
# Build prompt (very simple example)
prompt = f"User {payload.userId} said: {payload.content}\nBot:"
chat_response = opeai.ChatCompletion.create(
chat_response = AIclient.chat.completions.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": "-"},
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": prompt}
]
],
max_tokens=150,
n=1,
stop=None,
temperature=0.7,
)
reply = chat_response.choices[0].text.strip()
reply = chat_response.choices[0].message.content.strip()
# Cache reply for idempotency
r.set(payload.eventId, reply, ex=3600)

View File

@@ -37,18 +37,17 @@ async def main() -> None:
async def trust_all_devices(client) -> None:
"""
Programmatically verify all devices to allow sharing encryption keys.
Mark every other user's device as verified so we can receive their
future Megolm keys without being blocked.
"""
for room_id in client.rooms:
try:
devices = await client.room_devices(room_id)
if isinstance(devices, dict):
for user, dev_ids in devices.items():
if user == USER_ID:
devices_in_room = client.room_devices(room_id)
for user_id, user_devices in devices_in_room.items():
if user_id == client.user_id:
continue
for dev_id in dev_ids:
device = client.crypto.device_store.get_device(user, dev_id)
if device and not client.crypto.device_store.is_device_verified(device):
for dev_id, device in user_devices.items():
if not client.device_store.is_device_verified(device):
logger.info(f"Trusting {dev_id} for {user}")
client.verify_device(device)
except Exception:
@@ -63,6 +62,32 @@ async def main() -> None:
logger.info("Joined %s", room.room_id)
await trust_all_devices(client)
async def send_message(room: MatrixRoom, message: str) -> None:
"""
Send a message to `room`.
"""
try:
await trust_all_devices(client)
await client.share_group_session(
room.room_id,
ignore_unverified_devices=True
)
await client.room_send(
room_id=room.room_id,
message_type="m.room.message",
content={
"msgtype": "m.text",
"body": message
},
ignore_unverified_devices=True
)
logger.info("Sent message to %s: %s", room.room_id, message)
except Exception as e:
logger.error(f"Error sending message: {e}")
async def on_message(room: MatrixRoom, event: RoomMessageText) -> None:
"""
Handle incoming messages.
@@ -91,7 +116,9 @@ async def main() -> None:
resp.raise_for_status()
data = resp.json()
if data.get("reply"):
client.send_message(event["room_id"], data["reply"])
await trust_all_devices(client)
await send_message(room, data["reply"])
logger.info("Reply sent: %s", data["reply"])
except httpx.HTTPStatusError as e:
logger.error(f"HTTP error: {e.response.status_code} - {e.response.text}")
except Exception:
@@ -123,7 +150,7 @@ async def main() -> None:
logger.debug("Upload one time Olm keys")
try:
await client.upload_keys()
await client.keys_upload()
except Exception as e:
logger.error(f"Error uploading keys: {e}")
return