Add Aichatos and Blackbox Provider (#1822)

Providers added:
- Feedough - Added a new provider with GPT-3 model
- Cnote - Added a new provider with GPT-3.5 model
This commit is contained in:
kqlio67 2024-04-12 19:00:57 +00:00 committed by GitHub
parent f724c07f1b
commit fa739d2e7c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 229 additions and 2 deletions

57
g4f/Provider/Aichatos.py Normal file
View File

@ -0,0 +1,57 @@
from __future__ import annotations
from aiohttp import ClientSession
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
from .helper import format_prompt
import random
class Aichatos(AsyncGeneratorProvider):
url = "https://chat10.aichatos.xyz"
api = "https://api.binjie.fun"
working = True
supports_gpt_35_turbo = True
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
"Accept": "application/json, text/plain, */*",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"Content-Type": "application/json",
"Origin": "https://chat10.aichatos.xyz",
"DNT": "1",
"Sec-GPC": "1",
"Connection": "keep-alive",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "cross-site",
"TE": "trailers",
}
async with ClientSession(headers=headers) as session:
prompt = format_prompt(messages)
userId = random.randint(1000000000000, 9999999999999)
system_message: str = "",
data = {
"prompt": prompt,
"userId": "#/chat/{userId}",
"network": True,
"system": system_message,
"withoutContext": False,
"stream": True,
}
async with session.post(f"{cls.api}/api/generateStream", json=data, proxy=proxy) as response:
response.raise_for_status()
async for chunk in response.content:
if chunk:
yield chunk.decode()

56
g4f/Provider/Blackbox.py Normal file
View File

@ -0,0 +1,56 @@
from __future__ import annotations
import uuid
import secrets
from aiohttp import ClientSession
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
class Blackbox(AsyncGeneratorProvider):
url = "https://www.blackbox.ai"
working = True
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
"Accept": "*/*",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"Referer": cls.url,
"Content-Type": "application/json",
"Origin": cls.url,
"DNT": "1",
"Sec-GPC": "1",
"Alt-Used": "www.blackbox.ai",
"Connection": "keep-alive",
}
async with ClientSession(headers=headers) as session:
random_id = secrets.token_hex(16)
random_user_id = str(uuid.uuid4())
data = {
"messages": messages,
"id": random_id,
"userId": random_user_id,
"codeModelMode": True,
"agentMode": {},
"trendingAgentMode": {},
"isMicMode": False,
"isChromeExt": False,
"playgroundMode": False,
"webSearchMode": False,
"userSystemPrompt": "",
"githubToken": None
}
async with session.post(f"{cls.url}/api/chat", json=data, proxy=proxy) as response:
response.raise_for_status()
async for chunk in response.content:
if chunk:
yield chunk.decode()

58
g4f/Provider/Cnote.py Normal file
View File

@ -0,0 +1,58 @@
from __future__ import annotations
import json
from aiohttp import ClientSession
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
from .helper import format_prompt
class Cnote(AsyncGeneratorProvider):
url = "https://f1.cnote.top"
api_url = "https://p1api.xjai.pro/freeapi/chat-process"
working = True
supports_gpt_35_turbo = True
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
"Accept": "application/json, text/plain, */*",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"Content-Type": "application/json",
"Origin": cls.url,
"DNT": "1",
"Sec-GPC": "1",
"Connection": "keep-alive",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "cross-site",
"TE": "trailers",
}
async with ClientSession(headers=headers) as session:
prompt = format_prompt(messages)
system_message: str = "",
data = {
"prompt": prompt,
"systemMessage": system_message,
"temperature": 0.8,
"top_p": 1,
}
async with session.post(cls.api_url, json=data, proxy=proxy) as response:
response.raise_for_status()
async for chunk in response.content:
if chunk:
try:
data = json.loads(chunk.decode().split("&KFw6loC9Qvy&")[-1])
text = data.get("text", "")
yield text
except (json.JSONDecodeError, IndexError):
pass

52
g4f/Provider/Feedough.py Normal file
View File

@ -0,0 +1,52 @@
from __future__ import annotations
import json
from aiohttp import ClientSession
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
from .helper import format_prompt
class Feedough(AsyncGeneratorProvider):
url = "https://www.feedough.com"
working = True
supports_gpt_35_turbo = True
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
"Accept": "*/*",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"Referer": "https://www.feedough.com/ai-prompt-generator/",
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
"Origin": "https://www.feedough.com",
"DNT": "1",
"Sec-GPC": "1",
"Connection": "keep-alive",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"TE": "trailers",
}
async with ClientSession(headers=headers) as session:
prompt = format_prompt(messages)
data = {
"action": "aixg_generate",
"prompt": prompt,
}
async with session.post(f"{cls.url}/wp-admin/admin-ajax.php", data=data, proxy=proxy) as response:
response.raise_for_status()
response_text = await response.text()
response_json = json.loads(response_text)
if response_json["success"]:
message = response_json["data"]["message"]
yield message

View File

@ -142,7 +142,7 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
"model": models[cls.get_model(model)], "model": models[cls.get_model(model)],
"messages": messages, "messages": messages,
"key": "", "key": "",
"prompt": kwargs.get("system_message", "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully."), "prompt": kwargs.get("system_message", "You are a helpful assistant."),
} }
async with session.post( async with session.post(
"https://liaobots.work/api/chat", "https://liaobots.work/api/chat",

View File

@ -11,18 +11,22 @@ from .selenium import *
from .needs_auth import * from .needs_auth import *
from .unfinished import * from .unfinished import *
from .Aichatos import Aichatos
from .Aura import Aura from .Aura import Aura
from .Bing import Bing from .Bing import Bing
from .BingCreateImages import BingCreateImages from .BingCreateImages import BingCreateImages
from .Blackbox import Blackbox
from .ChatForAi import ChatForAi from .ChatForAi import ChatForAi
from .Chatgpt4Online import Chatgpt4Online from .Chatgpt4Online import Chatgpt4Online
from .ChatgptAi import ChatgptAi from .ChatgptAi import ChatgptAi
from .ChatgptFree import ChatgptFree from .ChatgptFree import ChatgptFree
from .ChatgptNext import ChatgptNext from .ChatgptNext import ChatgptNext
from .ChatgptX import ChatgptX from .ChatgptX import ChatgptX
from .Cnote import Cnote
from .DeepInfra import DeepInfra from .DeepInfra import DeepInfra
from .DeepInfraImage import DeepInfraImage from .DeepInfraImage import DeepInfraImage
from .DuckDuckGo import DuckDuckGo from .DuckDuckGo import DuckDuckGo
from .Feedough import Feedough
from .FlowGpt import FlowGpt from .FlowGpt import FlowGpt
from .FreeChatgpt import FreeChatgpt from .FreeChatgpt import FreeChatgpt
from .FreeGpt import FreeGpt from .FreeGpt import FreeGpt