Django openai websocket live chat

This code works on openai==0.28

python 复制代码
## consumers.py
## consumers.py
import json
import os

from channels.generic.websocket import AsyncWebsocketConsumer
import openai
import asyncio
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'AutoServer.settings')
openai.api_key = settings.OPENAI_API_KEY


class ChatConsumer(AsyncWebsocketConsumer):
    async def connect(self):
        await self.accept()

    async def disconnect(self, close_code):
        pass

    async def receive(self, text_data):
        data = json.loads(text_data)
        prompt = data['prompt']

        loop = asyncio.get_event_loop()
        await loop.run_in_executor(None, self.stream_openai_response, prompt)

    def stream_openai_response(self, prompt):
        response = openai.ChatCompletion.create(
            model="gpt-3.5-turbo",
            messages=[{"role": "user", "content": prompt}],
            stream=True,
        )
        for chunk in response:
            if chunk.choices[0].delta.get('content'):
                asyncio.run(self.send_streaming_data(chunk.choices[0].delta['content']))
                ## return to the webpage as stream

    async def send_streaming_data(self, content):
        await self.send(text_data=json.dumps({
            'response': content
        }))

This works on newest version, 1.31

python 复制代码
import json
import openai
import asyncio
from channels.generic.websocket import AsyncWebsocketConsumer
from django.conf import settings
import os
from openai import OpenAI

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'AutoServer.settings')
api_key = settings.OPENAI_API_KEY
client = OpenAI(api_key=api_key)
class ChatConsumer(AsyncWebsocketConsumer):
    async def connect(self):
        await self.accept()

    async def disconnect(self, close_code):
        pass

    async def receive(self, text_data):
        data = json.loads(text_data)
        prompt = data['prompt']
        # key part, the loop
        loop = asyncio.get_event_loop()
        await loop.run_in_executor(None, self.stream_openai_response, prompt)

    def stream_openai_response(self, prompt):
        response = client.chat.completions.create(
            model="gpt-4",
            messages=[
                {"role": "system", "content": "You are a helpful assistant."},
                {"role": "user", "content": prompt},
            ],
            stream=True,
        )
        for chunk in response:
            if chunk.choices[0].delta.content:
                asyncio.run(self.send_streaming_data(chunk.choices[0].delta.content))

    async def send_streaming_data(self, content):
        await self.send(text_data=json.dumps({
            'response': content
        }))
相关推荐
TF男孩1 小时前
ARQ:一款低成本的消息队列,实现每秒万级吞吐
后端·python·消息队列
该用户已不存在6 小时前
Mojo vs Python vs Rust: 2025年搞AI,该学哪个?
后端·python·rust
站大爷IP8 小时前
Java调用Python的5种实用方案:从简单到进阶的全场景解析
python
用户83562907805113 小时前
从手动编辑到代码生成:Python 助你高效创建 Word 文档
后端·python
c8i13 小时前
python中类的基本结构、特殊属性于MRO理解
python
liwulin050614 小时前
【ESP32-CAM】HELLO WORLD
python
Doris_202314 小时前
Python条件判断语句 if、elif 、else
前端·后端·python
Doris_202314 小时前
Python 模式匹配match case
前端·后端·python
这里有鱼汤15 小时前
Python量化实盘踩坑指南:分钟K线没处理好,小心直接亏钱!
后端·python·程序员
大模型真好玩16 小时前
深入浅出LangGraph AI Agent智能体开发教程(五)—LangGraph 数据分析助手智能体项目实战
人工智能·python·mcp