From afeaeba3136e0bf7533da08cb8323fd38d072c07 Mon Sep 17 00:00:00 2001 From: Joao Figueiredo Date: Sat, 3 May 2025 19:21:12 +0100 Subject: [PATCH] WIP: tunning --- ai_service/main.py | 13 ++++++++----- ai_service/requirements.txt | 2 +- matrix_service/main.py | 11 ++++++++--- 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/ai_service/main.py b/ai_service/main.py index 4566499..42f5eba 100644 --- a/ai_service/main.py +++ b/ai_service/main.py @@ -32,12 +32,15 @@ async def message( # Build prompt (very simple example) prompt = f"User {payload.userId} said: {payload.content}\nBot:" - resp = openai.Completion.create( - model="text-davinci-003", - prompt=prompt, - max_tokens=150 + chat_response = opeai.ChatCompletion.create( + model="gpt-3.5-turbo", + messages=[ + {"role": "system", "content": "-"}, + {"role": "user", "content": prompt} + ] + temperature=0.7, ) - reply = resp.choices[0].text.strip() + reply = chat_response.choices[0].text.strip() # Cache reply for idempotency r.set(payload.eventId, reply, ex=3600) diff --git a/ai_service/requirements.txt b/ai_service/requirements.txt index b481e28..fe52b67 100644 --- a/ai_service/requirements.txt +++ b/ai_service/requirements.txt @@ -1,5 +1,5 @@ python-dotenv>=1.0.0 -openai +openai>=1.0.0 fastapi>=0.95 uvicorn>=0.22 redis>=4.5 diff --git a/matrix_service/main.py b/matrix_service/main.py index aec13cd..3954169 100644 --- a/matrix_service/main.py +++ b/matrix_service/main.py @@ -86,9 +86,14 @@ async def main() -> None: } headers = {"Authorization": f"Bearer {AI_HANDLER_TOKEN}"} async with httpx.AsyncClient() as http: - resp = await http.post(f"{AI_HANDLER_URL}/api/v1/message", json=payload, headers=headers) - resp.raise_for_status() - data = resp.json() + try: + resp = await http.post(f"{AI_HANDLER_URL}/api/v1/message", json=payload, headers=headers) + resp.raise_for_status() + data = resp.json() + except httpx.HTTPStatusError as e: + logger.error(f"HTTP error: {e.response.status_code} - {e.response.text}") + except Exception: + logger.exception("Error while calling AI handler") if data.get("reply"): client.send_message(event["room_id"], data["reply"])