~ | Merge pull request #997 from hlohaus/all

Fix timeout in create_async
This commit is contained in:
Tekky 2023-10-06 18:41:23 +01:00 committed by GitHub
commit 796e4d7b55
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 17 additions and 30 deletions

View File

@ -1,11 +1,9 @@
from __future__ import annotations
from aiohttp import ClientSession
from aiohttp import ClientSession, ClientTimeout
from ..typing import AsyncGenerator
from .base_provider import AsyncGeneratorProvider
class AiAsk(AsyncGeneratorProvider):
url = "https://e.aiask.me"
supports_gpt_35_turbo = True
@ -24,7 +22,7 @@ class AiAsk(AsyncGeneratorProvider):
"origin": cls.url,
"referer": f"{cls.url}/chat",
}
async with ClientSession(headers=headers, timeout=timeout) as session:
async with ClientSession(headers=headers, timeout=ClientTimeout(timeout)) as session:
data = {
"continuous": True,
"id": "fRMSQtuHl91A4De9cCvKD",

View File

@ -1,6 +1,6 @@
from __future__ import annotations
from aiohttp import ClientSession
from aiohttp import ClientSession, ClientTimeout
from .base_provider import AsyncProvider, format_prompt
@ -34,7 +34,7 @@ class Aichat(AsyncProvider):
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36",
}
async with ClientSession(
headers=headers, timeout=timeout
headers=headers, timeout=ClientTimeout(timeout)
) as session:
json_data = {
"message": format_prompt(messages),

View File

@ -250,7 +250,7 @@ async def stream_generate(
conversation = await create_conversation(session)
try:
async with session.ws_connect(
f'wss://sydney.bing.com/sydney/ChatHub?sec_access_token={urllib.parse.quote_plus(conversation.conversationSignature)}',
f'wss://sydney.bing.com/sydney/ChatHub',
autoping=False,
params={'sec_access_token': conversation.conversationSignature}
) as wss:

View File

@ -1,7 +1,5 @@
from __future__ import annotations
import time, hashlib
from ..typing import AsyncGenerator
from ..requests import StreamSession
from .base_provider import AsyncGeneratorProvider
@ -21,11 +19,9 @@ class ChatForAi(AsyncGeneratorProvider):
**kwargs
) -> AsyncGenerator:
async with StreamSession(impersonate="chrome107", timeout=timeout) as session:
conversation_id = f"id_{int(time.time())}"
prompt = messages[-1]["content"]
timestamp = int(time.time())
data = {
"conversationId": conversation_id,
"conversationId": "temp",
"conversationType": "chat_continuous",
"botId": "chat_continuous",
"globalSettings":{
@ -39,8 +35,6 @@ class ChatForAi(AsyncGeneratorProvider):
"botSettings": {},
"prompt": prompt,
"messages": messages,
"sign": generate_signature(timestamp, conversation_id, prompt),
"timestamp": timestamp
}
async with session.post(f"{cls.url}/api/handle/provider-openai", json=data) as response:
response.raise_for_status()
@ -57,7 +51,3 @@ class ChatForAi(AsyncGeneratorProvider):
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"
def generate_signature(timestamp, id, prompt):
data = f"{timestamp}:{id}:{prompt}:6B46K4pt"
return hashlib.sha256(data.encode()).hexdigest()

View File

@ -1,7 +1,7 @@
from __future__ import annotations
import re
from aiohttp import ClientSession
from aiohttp import ClientSession, ClientTimeout
from .base_provider import AsyncProvider, format_prompt
@ -40,7 +40,7 @@ class ChatgptAi(AsyncProvider):
"user-agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36",
}
async with ClientSession(
headers=headers, timeout=timeout
headers=headers, timeout=ClientTimeout(timeout)
) as session:
if not cls._nonce:
async with session.get(cls.url, proxy=proxy) as response:

View File

@ -1,7 +1,7 @@
from __future__ import annotations
import time, json, re
from aiohttp import ClientSession
from aiohttp import ClientSession, ClientTimeout
from typing import AsyncGenerator
from .base_provider import AsyncGeneratorProvider
@ -34,7 +34,7 @@ class ChatgptDemo(AsyncGeneratorProvider):
"sec-fetch-site": "same-origin",
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36"
}
async with ClientSession(headers=headers, timeout=timeout) as session:
async with ClientSession(headers=headers, timeout=ClientTimeout(timeout)) as session:
async with session.get(f"{cls.url}/", proxy=proxy) as response:
response.raise_for_status()
response = await response.text()

View File

@ -1,6 +1,6 @@
from __future__ import annotations
from aiohttp import ClientSession
from aiohttp import ClientSession, ClientTimeout
import json
from ..typing import AsyncGenerator
@ -32,7 +32,7 @@ class GptGo(AsyncGeneratorProvider):
"Sec-Fetch-Site" : "same-origin",
}
async with ClientSession(
headers=headers, timeout=timeout
headers=headers, timeout=ClientTimeout(timeout)
) as session:
async with session.get(
"https://gptgo.ai/action_get_token.php",

View File

@ -1,9 +1,8 @@
from __future__ import annotations
import json
import uuid
from aiohttp import ClientSession
from aiohttp import ClientSession, ClientTimeout
from ..typing import AsyncGenerator
from .base_provider import AsyncGeneratorProvider
@ -55,7 +54,7 @@ class Liaobots(AsyncGeneratorProvider):
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
}
async with ClientSession(
headers=headers, timeout=timeout
headers=headers, timeout=ClientTimeout(timeout)
) as session:
cls._auth_code = auth if isinstance(auth, str) else cls._auth_code
if not cls._auth_code:

View File

@ -1,7 +1,7 @@
from __future__ import annotations
import json
from aiohttp import ClientSession
from aiohttp import ClientSession, ClientTimeout
from .base_provider import AsyncGeneratorProvider
from ..typing import AsyncGenerator
@ -41,7 +41,7 @@ class Vitalentum(AsyncGeneratorProvider):
**kwargs
}
async with ClientSession(
headers=headers, timeout=timeout
headers=headers, timeout=ClientTimeout(timeout)
) as session:
async with session.post(cls.url + "/api/converse-edge", json=data, proxy=proxy) as response:
response.raise_for_status()