Fix streaming in Aura

This commit is contained in:
Heiner Lohaus 2023-12-23 20:33:23 +01:00
parent bcd1cdf4e8
commit 554609dc73
1 changed files with 11 additions and 12 deletions

View File

@ -32,25 +32,24 @@ class Aura(AsyncGeneratorProvider):
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
} }
async with ClientSession(headers=headers) as session: async with ClientSession(headers=headers) as session:
system_prompt="" new_messages = []
system_message=[] system_message = []
for message in messages: for message in messages:
if message["role"]=="system": if message["role"] == "system":
system_prompt +=message["content"] system_message.append(message["content"])
else: else:
system_message.append(message) new_messages.append(message)
data = { data = {
"model": { "model": {
"id": "openchat_v3.2_mistral", "id": "openchat_v3.2_mistral",
"name": "OpenChat Aura", "name": "OpenChat Aura",
"maxLength": 24576, "maxLength": 24576,
"tokenLimit": 8192 "tokenLimit": 8192
}, },
"messages": system_message, "messages": new_messages,
"key": "", "key": "",
"prompt": f"{system_prompt}", "prompt": "\n".join(system_message),
"temperature": 0.5 "temperature": 0.5
} }
async with session.post(f"{cls.url}/api/chat",json=data,proxy=proxy) as response: async with session.post(f"{cls.url}/api/chat", json=data, proxy=proxy) as response:
yield await response.text() return response.content.iter_any()