Django openai websocket live chat

This code works on openai==0.28

python 复制代码
## consumers.py
## consumers.py
import json
import os

from channels.generic.websocket import AsyncWebsocketConsumer
import openai
import asyncio
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'AutoServer.settings')
openai.api_key = settings.OPENAI_API_KEY


class ChatConsumer(AsyncWebsocketConsumer):
    async def connect(self):
        await self.accept()

    async def disconnect(self, close_code):
        pass

    async def receive(self, text_data):
        data = json.loads(text_data)
        prompt = data['prompt']

        loop = asyncio.get_event_loop()
        await loop.run_in_executor(None, self.stream_openai_response, prompt)

    def stream_openai_response(self, prompt):
        response = openai.ChatCompletion.create(
            model="gpt-3.5-turbo",
            messages=[{"role": "user", "content": prompt}],
            stream=True,
        )
        for chunk in response:
            if chunk.choices[0].delta.get('content'):
                asyncio.run(self.send_streaming_data(chunk.choices[0].delta['content']))
                ## return to the webpage as stream

    async def send_streaming_data(self, content):
        await self.send(text_data=json.dumps({
            'response': content
        }))

This works on newest version, 1.31

python 复制代码
import json
import openai
import asyncio
from channels.generic.websocket import AsyncWebsocketConsumer
from django.conf import settings
import os
from openai import OpenAI

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'AutoServer.settings')
api_key = settings.OPENAI_API_KEY
client = OpenAI(api_key=api_key)
class ChatConsumer(AsyncWebsocketConsumer):
    async def connect(self):
        await self.accept()

    async def disconnect(self, close_code):
        pass

    async def receive(self, text_data):
        data = json.loads(text_data)
        prompt = data['prompt']
        # key part, the loop
        loop = asyncio.get_event_loop()
        await loop.run_in_executor(None, self.stream_openai_response, prompt)

    def stream_openai_response(self, prompt):
        response = client.chat.completions.create(
            model="gpt-4",
            messages=[
                {"role": "system", "content": "You are a helpful assistant."},
                {"role": "user", "content": prompt},
            ],
            stream=True,
        )
        for chunk in response:
            if chunk.choices[0].delta.content:
                asyncio.run(self.send_streaming_data(chunk.choices[0].delta.content))

    async def send_streaming_data(self, content):
        await self.send(text_data=json.dumps({
            'response': content
        }))
相关推荐
李晨卓11 分钟前
python学习之不同储存方式的操作方法
python·代码规范
站大爷IP14 分钟前
实战:爬取某联招聘职位需求并生成词云——从零开始的完整指南
python
deephub16 分钟前
从零开始:用Python和Gemini 3四步搭建你自己的AI Agent
人工智能·python·大语言模型·agent
咕白m62520 分钟前
Python 实现 PDF 页面旋转
python
c***87191 小时前
Flask:后端框架使用
后端·python·flask
Q_Q5110082852 小时前
python+django/flask的情绪宣泄系统
spring boot·python·pycharm·django·flask·node.js·php
撸码猿2 小时前
《Python AI入门》第9章 让机器读懂文字——NLP基础与情感分析实战
人工智能·python·自然语言处理
二川bro2 小时前
多模态AI开发:Python实现跨模态学习
人工智能·python·学习
2301_764441333 小时前
Python构建输入法应用
开发语言·python·算法
love530love3 小时前
【笔记】ComfUI RIFEInterpolation 节点缺失问题(cupy CUDA 安装)解决方案
人工智能·windows·笔记·python·插件·comfyui