Transforms the Matrix and OpenAI interactor into indepandent services, fikes #2 #3

Merged
jfig merged 6 commits from 2-into_services into dev 2025-05-04 14:56:58 +00:00
3 changed files with 17 additions and 9 deletions
Showing only changes of commit afeaeba313 - Show all commits

View File

@@ -32,12 +32,15 @@ async def message(
# Build prompt (very simple example)
prompt = f"User {payload.userId} said: {payload.content}\nBot:"
resp = openai.Completion.create(
model="text-davinci-003",
prompt=prompt,
max_tokens=150
chat_response = opeai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": "-"},
{"role": "user", "content": prompt}
]
temperature=0.7,
)
reply = resp.choices[0].text.strip()
reply = chat_response.choices[0].text.strip()
# Cache reply for idempotency
r.set(payload.eventId, reply, ex=3600)

View File

@@ -1,5 +1,5 @@
python-dotenv>=1.0.0
openai
openai>=1.0.0
fastapi>=0.95
uvicorn>=0.22
redis>=4.5

View File

@@ -86,9 +86,14 @@ async def main() -> None:
}
headers = {"Authorization": f"Bearer {AI_HANDLER_TOKEN}"}
async with httpx.AsyncClient() as http:
try:
resp = await http.post(f"{AI_HANDLER_URL}/api/v1/message", json=payload, headers=headers)
resp.raise_for_status()
data = resp.json()
except httpx.HTTPStatusError as e:
logger.error(f"HTTP error: {e.response.status_code} - {e.response.text}")
except Exception:
logger.exception("Error while calling AI handler")
if data.get("reply"):
client.send_message(event["room_id"], data["reply"])