mirror of https://github.com/xtekky/gpt4free.git
~ | remove DeepAi
This commit is contained in:
parent
0ac32656af
commit
c9c573a6e6
|
@ -215,7 +215,6 @@ from g4f.Provider import (
|
|||
Bing,
|
||||
ChatBase,
|
||||
ChatgptAi,
|
||||
DeepAi,
|
||||
H2o,
|
||||
HuggingChat,
|
||||
OpenAssistant,
|
||||
|
@ -282,7 +281,6 @@ _providers = [
|
|||
g4f.Provider.Aichat,
|
||||
g4f.Provider.ChatBase,
|
||||
g4f.Provider.Bing,
|
||||
g4f.Provider.DeepAi,
|
||||
g4f.Provider.GptGo,
|
||||
g4f.Provider.You,
|
||||
g4f.Provider.Yqcloud,
|
||||
|
@ -394,7 +392,6 @@ if __name__ == "__main__":
|
|||
| [chatgpt.ai](https://chatgpt.ai/) | `g4f.Provider.ChatgptAi` | ✔️ | ❌ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
|
||||
| [opchatgpts.net](https://opchatgpts.net) | `g4f.Provider.ChatgptLogin` | ✔️ | ❌ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
|
||||
| [ava-ai-ef611.web.app](https://ava-ai-ef611.web.app) | `g4f.Provider.CodeLinkAva` | ✔️ | ❌ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
|
||||
| [deepai.org](https://deepai.org) | `g4f.Provider.DeepAi` | ✔️ | ❌ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
|
||||
| [gptgo.ai](https://gptgo.ai) | `g4f.Provider.GptGo` | ✔️ | ❌ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
|
||||
| [gpt-gm.h2o.ai](https://gpt-gm.h2o.ai) | `g4f.Provider.H2o` | ❌ | ❌ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
|
||||
| [huggingface.co](https://huggingface.co/chat/) | `g4f.Provider.HuggingChat` | ❌ | ❌ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ✔️ |
|
||||
|
|
|
@ -1,80 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import js2py
|
||||
import random
|
||||
import hashlib
|
||||
from aiohttp import ClientSession
|
||||
|
||||
from ..typing import AsyncResult, Messages
|
||||
from .base_provider import AsyncGeneratorProvider
|
||||
|
||||
|
||||
class DeepAi(AsyncGeneratorProvider):
|
||||
url = "https://deepai.org"
|
||||
working = True
|
||||
supports_gpt_35_turbo = True
|
||||
|
||||
@staticmethod
|
||||
async def create_async_generator(
|
||||
model: str,
|
||||
messages: Messages,
|
||||
proxy: str = None,
|
||||
**kwargs
|
||||
) -> AsyncResult:
|
||||
agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36"
|
||||
|
||||
token_js = """
|
||||
var agent = '""" + agent + """'
|
||||
var a, b, c, d, e, h, f, l, g, k, m, n, r, x, C, E, N, F, T, O, P, w, D, G, Q, R, W, I, aa, fa, na, oa, ha, ba, X, ia, ja, ka, J, la, K, L, ca, S, U, M, ma, B, da, V, Y;
|
||||
h = Math.round(1E11 * Math.random()) + "";
|
||||
f = function() {
|
||||
for (var p = [], r = 0; 64 > r;) p[r] = 0 | 4294967296 * Math.sin(++r % Math.PI);
|
||||
return function(z) {
|
||||
var B, G, H, ca = [B = 1732584193, G = 4023233417, ~B, ~G],
|
||||
X = [],
|
||||
x = unescape(encodeURI(z)) + "\u0080",
|
||||
v = x.length;
|
||||
z = --v / 4 + 2 | 15;
|
||||
for (X[--z] = 8 * v; ~v;) X[v >> 2] |= x.charCodeAt(v) << 8 * v--;
|
||||
for (r = x = 0; r < z; r += 16) {
|
||||
for (v = ca; 64 > x; v = [H = v[3], B + ((H = v[0] + [B & G | ~B & H, H & B | ~H & G, B ^ G ^ H, G ^ (B | ~H)][v = x >> 4] + p[x] + ~~X[r | [x, 5 * x + 1, 3 * x + 5, 7 * x][v] & 15]) << (v = [7, 12, 17, 22, 5, 9, 14, 20, 4, 11, 16, 23, 6, 10, 15, 21][4 * v + x++ % 4]) | H >>> -v), B, G]) B = v[1] | 0, G = v[2];
|
||||
for (x = 4; x;) ca[--x] += v[x]
|
||||
}
|
||||
for (z = ""; 32 > x;) z += (ca[x >> 3] >> 4 * (1 ^ x++) & 15).toString(16);
|
||||
return z.split("").reverse().join("")
|
||||
}
|
||||
}();
|
||||
|
||||
"tryit-" + h + "-" + f(agent + f(agent + f(agent + h + "x")));
|
||||
"""
|
||||
|
||||
payload = {"chat_style": "chat", "chatHistory": json.dumps(messages)}
|
||||
api_key = js2py.eval_js(token_js)
|
||||
headers = {
|
||||
"api-key": api_key,
|
||||
"User-Agent": agent,
|
||||
**kwargs.get("headers", {})
|
||||
}
|
||||
async with ClientSession(
|
||||
headers=headers
|
||||
) as session:
|
||||
fill = "ing_is"
|
||||
fill = f"ack{fill}_a_crim"
|
||||
async with session.post(f"https://api.deepai.org/h{fill}e", proxy=proxy, data=payload) as response:
|
||||
response.raise_for_status()
|
||||
async for stream in response.content.iter_any():
|
||||
if stream:
|
||||
try:
|
||||
yield stream.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
yield stream.decode("unicode-escape")
|
||||
|
||||
|
||||
def get_api_key(user_agent: str):
|
||||
e = str(round(1E11 * random.random()))
|
||||
|
||||
def hash(data: str):
|
||||
return hashlib.md5(data.encode()).hexdigest()[::-1]
|
||||
|
||||
return f"tryit-{e}-" + hash(user_agent + hash(user_agent + hash(user_agent + e + "x")))
|
|
@ -16,7 +16,6 @@ from .ChatgptDemo import ChatgptDemo
|
|||
from .ChatgptDuo import ChatgptDuo
|
||||
from .ChatgptX import ChatgptX
|
||||
from .Cromicle import Cromicle
|
||||
from .DeepAi import DeepAi
|
||||
from .FreeGpt import FreeGpt
|
||||
from .GPTalk import GPTalk
|
||||
from .GptForLove import GptForLove
|
||||
|
@ -64,7 +63,6 @@ class ProviderUtils:
|
|||
'ChatgptX': ChatgptX,
|
||||
'CodeLinkAva': CodeLinkAva,
|
||||
'Cromicle': Cromicle,
|
||||
'DeepAi': DeepAi,
|
||||
'DfeHub': DfeHub,
|
||||
'EasyChat': EasyChat,
|
||||
'Equing': Equing,
|
||||
|
@ -127,7 +125,6 @@ __all__ = [
|
|||
'ChatgptX',
|
||||
'Cromicle',
|
||||
'CodeLinkAva',
|
||||
'DeepAi',
|
||||
'DfeHub',
|
||||
'EasyChat',
|
||||
'Forefront',
|
||||
|
|
|
@ -153,7 +153,6 @@
|
|||
<option value="g4f.Provider.Myshell">Myshell</option>
|
||||
<option value="g4f.Provider.FreeGpt">FreeGpt</option>
|
||||
<option value="g4f.Provider.Vercel">Vercel</option>
|
||||
<option value="g4f.Provider.DeepAi">DeepAi</option>
|
||||
<option value="g4f.Provider.Aichat">Aichat</option>
|
||||
<option value="g4f.Provider.GPTalk">GPTalk</option>
|
||||
<option value="g4f.Provider.GptGod">GptGod</option>
|
||||
|
|
|
@ -16,7 +16,6 @@ from .Provider import (
|
|||
Myshell,
|
||||
FreeGpt,
|
||||
Vercel,
|
||||
DeepAi,
|
||||
Aichat,
|
||||
GPTalk,
|
||||
GptGod,
|
||||
|
@ -45,7 +44,7 @@ default = Model(
|
|||
Yqcloud, # Answers short questions in chinese
|
||||
ChatBase, # Don't want to answer creatively
|
||||
ChatgptDuo, # Include search results
|
||||
Aibn, Aichat, ChatgptAi, ChatgptLogin, DeepAi, FreeGpt, GptGo, Myshell, Ylokh,
|
||||
Aibn, Aichat, ChatgptAi, ChatgptLogin, FreeGpt, GptGo, Myshell, Ylokh,
|
||||
])
|
||||
)
|
||||
|
||||
|
@ -65,7 +64,7 @@ gpt_35_turbo = Model(
|
|||
name = 'gpt-3.5-turbo',
|
||||
base_provider = 'openai',
|
||||
best_provider = RetryProvider([
|
||||
DeepAi, Aivvm, ChatgptLogin, ChatgptAi, GptGo, AItianhu, Aichat, AItianhuSpace, Myshell, Aibn, FreeGpt, Ylokh
|
||||
Aivvm, ChatgptLogin, ChatgptAi, GptGo, AItianhu, Aichat, AItianhuSpace, Myshell, Aibn, FreeGpt, Ylokh
|
||||
])
|
||||
)
|
||||
|
||||
|
|
Loading…
Reference in New Issue