into services
This commit is contained in:
@@ -2,13 +2,27 @@ import os
|
||||
from dotenv import load_dotenv
|
||||
from fastapi import FastAPI, Header, HTTPException
|
||||
from pydantic import BaseModel
|
||||
import openai
|
||||
from openai import OpenAI
|
||||
import logging
|
||||
import redis
|
||||
|
||||
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper()
|
||||
AI_TOKEN = os.environ["AI_HANDLER_TOKEN"]
|
||||
openai.api_key = os.environ["OPENAI_API_KEY"]
|
||||
|
||||
AIclient = OpenAI(
|
||||
api_key=os.environ["OPENAI_API_KEY"],
|
||||
)
|
||||
|
||||
r = redis.Redis.from_url(os.environ.get("REDIS_URL", "redis://redis:6379"))
|
||||
|
||||
# --- Logging Setup ---
|
||||
numeric_level = getattr(logging, LOG_LEVEL, logging.INFO)
|
||||
logging.basicConfig(
|
||||
level=numeric_level,
|
||||
format="%(asctime)s %(levelname)s %(name)s: %(message)s"
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class MessagePayload(BaseModel):
|
||||
roomId: str
|
||||
userId: str
|
||||
@@ -32,15 +46,19 @@ async def message(
|
||||
|
||||
# Build prompt (very simple example)
|
||||
prompt = f"User {payload.userId} said: {payload.content}\nBot:"
|
||||
chat_response = opeai.ChatCompletion.create(
|
||||
chat_response = AIclient.chat.completions.create(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[
|
||||
{"role": "system", "content": "-"},
|
||||
{"role": "system", "content": "You are a helpful assistant."},
|
||||
{"role": "user", "content": prompt}
|
||||
]
|
||||
],
|
||||
max_tokens=150,
|
||||
n=1,
|
||||
stop=None,
|
||||
temperature=0.7,
|
||||
)
|
||||
reply = chat_response.choices[0].text.strip()
|
||||
|
||||
reply = chat_response.choices[0].message.content.strip()
|
||||
|
||||
# Cache reply for idempotency
|
||||
r.set(payload.eventId, reply, ex=3600)
|
||||
|
||||
Reference in New Issue
Block a user