一次性生成回答:

from flask import Flask, render_template, request, session, make_response,Response,jsonify,send_file
import openai
import urllib3
urllib3.disable_warnings()


openai.api_key = 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'

app = Flask(__name__)


@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def send_static(path):
    if path == '':
        path = 'index.html'
    return send_file('/app/' + path)


@app.route('/chat', methods=['POST'])
def get_chat_completion():
	#一次性返回生成的答案
    messages_bro = request.get_json()['messages'];  # messages
    completion = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=messages_bro
    )
    return jsonify(completion)



if __name__ == '__main__':
    app.run(host='0.0.0.0',debug=False)

实现流式传输(打字机效果):

from flask import Flask, render_template, request, session, make_response,Response,jsonify,send_file
import openai
import json
import urllib3
urllib3.disable_warnings()


openai.api_key = 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'

app = Flask(__name__)


@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def send_static(path):
    if path == '':
        path = 'index.html'
    return send_file('C:/Users/xxxxx/Desktop/app/' + path)



@app.route('/chat', methods=['POST'])
def get_chat_completion():
    messages_bro = request.get_json()['messages'];  # messages
    completion = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=messages_bro,        
        stream=True
    )
    def generate():
        # 生成器函数,分块生成响应,流式传输数据
        for message in completion:
            yield "data: " + json.dumps(message) + "\n\n"
        yield "data: [DONE]\n\n"
    return Response(generate(), content_type='text/event-stream')




if __name__ == '__main__':
    app.run(host='0.0.0.0',debug=False,port=5200)

更多推荐

ChatGPT python openai库使用示例