其他备份

发布时间 2023-11-30 09:44:28作者: 朝朝暮Mu
@router.post('/openai', response_class=EventSourceResponse)
async def simple_chat(user_input=Body(..., embed=True, alias='input'),
                      chat_history=Body(..., embed=True, alias='chat_history')):
    app_input = user_input
    callback_handler = StreamingCallbackHandler()
    chat_prompt = PromptTemplate(
        input_variables=['human_input', 'chat_history'],
        template='''you are a robot having a conversation with a human being.
        previous conversation:
        {chat_history}
        
        human: {human_input}
        robot:'''
    )
    chain = LLMChain(
        llm=OpenAI(
            temperature=0.8,
            max_retries=1,
            max_tokens=2048,
            streaming=True,
            verbose=True,
        ),
        prompt=chat_prompt,
    )
    task = chain.arun({'human_input': app_input, 'chat_history': chat_history}, callbacks=[callback_handler])
    loop = asyncio.get_event_loop()
    asyncio.run_coroutine_threadsafe(task, loop)

    def resp():
        while True:
            try:

                tk = callback_handler.tokens.get()
                if tk is StopIteration:raise tk
                yield ServerSentEvent(data=json.dumps(tk, ensure_ascii=False))
            except StopIteration:
                break

    return EventSourceResponse(resp())