mirror of https://github.com/xtekky/gpt4free.git
Add labels to provider list
This commit is contained in:
parent
a107d3f2ff
commit
f724c07f1b
|
@ -6,7 +6,7 @@ import uuid
|
||||||
import time
|
import time
|
||||||
import asyncio
|
import asyncio
|
||||||
from urllib import parse
|
from urllib import parse
|
||||||
from datetime import datetime
|
from datetime import datetime, date
|
||||||
from aiohttp import ClientSession, ClientTimeout, BaseConnector, WSMsgType
|
from aiohttp import ClientSession, ClientTimeout, BaseConnector, WSMsgType
|
||||||
|
|
||||||
from ..typing import AsyncResult, Messages, ImageType, Cookies
|
from ..typing import AsyncResult, Messages, ImageType, Cookies
|
||||||
|
@ -32,6 +32,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
"""
|
"""
|
||||||
Bing provider for generating responses using the Bing API.
|
Bing provider for generating responses using the Bing API.
|
||||||
"""
|
"""
|
||||||
|
label = "Microsoft Copilot in Bing"
|
||||||
url = "https://bing.com/chat"
|
url = "https://bing.com/chat"
|
||||||
working = True
|
working = True
|
||||||
supports_message_history = True
|
supports_message_history = True
|
||||||
|
@ -47,7 +48,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
proxy: str = None,
|
proxy: str = None,
|
||||||
timeout: int = 900,
|
timeout: int = 900,
|
||||||
api_key: str = None,
|
api_key: str = None,
|
||||||
cookies: Cookies = {},
|
cookies: Cookies = None,
|
||||||
connector: BaseConnector = None,
|
connector: BaseConnector = None,
|
||||||
tone: str = None,
|
tone: str = None,
|
||||||
image: ImageType = None,
|
image: ImageType = None,
|
||||||
|
@ -69,8 +70,6 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
:return: An asynchronous result object.
|
:return: An asynchronous result object.
|
||||||
"""
|
"""
|
||||||
prompt = messages[-1]["content"]
|
prompt = messages[-1]["content"]
|
||||||
if api_key is not None:
|
|
||||||
cookies["_U"] = api_key
|
|
||||||
if context is None:
|
if context is None:
|
||||||
context = create_context(messages[:-1]) if len(messages) > 1 else None
|
context = create_context(messages[:-1]) if len(messages) > 1 else None
|
||||||
if tone is None:
|
if tone is None:
|
||||||
|
@ -79,7 +78,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
gpt4_turbo = True if model.startswith("gpt-4-turbo") else False
|
gpt4_turbo = True if model.startswith("gpt-4-turbo") else False
|
||||||
|
|
||||||
return stream_generate(
|
return stream_generate(
|
||||||
prompt, tone, image, context, cookies,
|
prompt, tone, image, context, api_key, cookies,
|
||||||
get_connector(connector, proxy, True),
|
get_connector(connector, proxy, True),
|
||||||
proxy, web_search, gpt4_turbo, timeout,
|
proxy, web_search, gpt4_turbo, timeout,
|
||||||
**kwargs
|
**kwargs
|
||||||
|
@ -110,11 +109,15 @@ def get_default_cookies():
|
||||||
'SUID' : '',
|
'SUID' : '',
|
||||||
'SRCHUSR' : '',
|
'SRCHUSR' : '',
|
||||||
'SRCHHPGUSR' : f'HV={int(time.time())}',
|
'SRCHHPGUSR' : f'HV={int(time.time())}',
|
||||||
|
'BCP' : 'AD=1&AL=1&SM=1',
|
||||||
|
'_Rwho' : f'u=d&ts={date.today().isoformat()}',
|
||||||
}
|
}
|
||||||
|
|
||||||
def create_headers(cookies: Cookies = None) -> dict:
|
def create_headers(cookies: Cookies = None, api_key: str = None) -> dict:
|
||||||
if cookies is None:
|
if cookies is None:
|
||||||
cookies = get_default_cookies()
|
cookies = get_default_cookies()
|
||||||
|
if api_key is not None:
|
||||||
|
cookies["_U"] = api_key
|
||||||
headers = Defaults.headers.copy()
|
headers = Defaults.headers.copy()
|
||||||
headers["cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
|
headers["cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
|
||||||
headers["x-forwarded-for"] = get_ip_address()
|
headers["x-forwarded-for"] = get_ip_address()
|
||||||
|
@ -364,6 +367,7 @@ async def stream_generate(
|
||||||
image: ImageType = None,
|
image: ImageType = None,
|
||||||
context: str = None,
|
context: str = None,
|
||||||
cookies: dict = None,
|
cookies: dict = None,
|
||||||
|
api_key: str = None,
|
||||||
connector: BaseConnector = None,
|
connector: BaseConnector = None,
|
||||||
proxy: str = None,
|
proxy: str = None,
|
||||||
web_search: bool = False,
|
web_search: bool = False,
|
||||||
|
@ -389,7 +393,7 @@ async def stream_generate(
|
||||||
:param timeout: Timeout for the request.
|
:param timeout: Timeout for the request.
|
||||||
:return: An asynchronous generator yielding responses.
|
:return: An asynchronous generator yielding responses.
|
||||||
"""
|
"""
|
||||||
headers = create_headers(cookies)
|
headers = create_headers(cookies, api_key)
|
||||||
new_conversation = conversation is None
|
new_conversation = conversation is None
|
||||||
max_retries = (5 if new_conversation else 0) if max_retries is None else max_retries
|
max_retries = (5 if new_conversation else 0) if max_retries is None else max_retries
|
||||||
async with ClientSession(
|
async with ClientSession(
|
||||||
|
|
|
@ -12,6 +12,7 @@ from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||||
from .bing.create_images import create_images, create_session, get_cookies_from_browser
|
from .bing.create_images import create_images, create_session, get_cookies_from_browser
|
||||||
|
|
||||||
class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin):
|
class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
|
label = "Microsoft Designer"
|
||||||
url = "https://www.bing.com/images/create"
|
url = "https://www.bing.com/images/create"
|
||||||
working = True
|
working = True
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@ from ..typing import AsyncResult, Messages
|
||||||
from .needs_auth.Openai import Openai
|
from .needs_auth.Openai import Openai
|
||||||
|
|
||||||
class DeepInfra(Openai):
|
class DeepInfra(Openai):
|
||||||
|
label = "DeepInfra"
|
||||||
url = "https://deepinfra.com"
|
url = "https://deepinfra.com"
|
||||||
working = True
|
working = True
|
||||||
needs_auth = False
|
needs_auth = False
|
||||||
|
|
|
@ -12,7 +12,7 @@ from .helper import format_prompt, get_connector
|
||||||
class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
|
class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
url = "https://huggingface.co/chat"
|
url = "https://huggingface.co/chat"
|
||||||
working = True
|
working = True
|
||||||
default_model = "meta-llama/Llama-2-70b-chat-hf"
|
default_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
||||||
models = [
|
models = [
|
||||||
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
||||||
"google/gemma-7b-it",
|
"google/gemma-7b-it",
|
||||||
|
|
|
@ -12,6 +12,7 @@ from ..providers.base_provider import AbstractProvider, ProviderModelMixin
|
||||||
from ..errors import MissingRequirementsError
|
from ..errors import MissingRequirementsError
|
||||||
|
|
||||||
class Local(AbstractProvider, ProviderModelMixin):
|
class Local(AbstractProvider, ProviderModelMixin):
|
||||||
|
label = "gpt4all"
|
||||||
working = True
|
working = True
|
||||||
supports_message_history = True
|
supports_message_history = True
|
||||||
supports_system_message = True
|
supports_system_message = True
|
||||||
|
|
|
@ -4,6 +4,7 @@ from .Openai import Openai
|
||||||
from ...typing import AsyncResult, Messages
|
from ...typing import AsyncResult, Messages
|
||||||
|
|
||||||
class Groq(Openai):
|
class Groq(Openai):
|
||||||
|
lebel = "Groq"
|
||||||
url = "https://console.groq.com/playground"
|
url = "https://console.groq.com/playground"
|
||||||
working = True
|
working = True
|
||||||
default_model = "mixtral-8x7b-32768"
|
default_model = "mixtral-8x7b-32768"
|
||||||
|
|
|
@ -6,6 +6,7 @@ from .Openai import Openai
|
||||||
from ...typing import AsyncResult, Messages
|
from ...typing import AsyncResult, Messages
|
||||||
|
|
||||||
class OpenRouter(Openai):
|
class OpenRouter(Openai):
|
||||||
|
label = "OpenRouter"
|
||||||
url = "https://openrouter.ai"
|
url = "https://openrouter.ai"
|
||||||
working = True
|
working = True
|
||||||
default_model = "openrouter/auto"
|
default_model = "openrouter/auto"
|
||||||
|
|
|
@ -9,6 +9,7 @@ from ...requests import StreamSession, raise_for_status
|
||||||
from ...errors import MissingAuthError, ResponseError
|
from ...errors import MissingAuthError, ResponseError
|
||||||
|
|
||||||
class Openai(AsyncGeneratorProvider, ProviderModelMixin):
|
class Openai(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
|
label = "OpenAI API"
|
||||||
url = "https://openai.com"
|
url = "https://openai.com"
|
||||||
working = True
|
working = True
|
||||||
needs_auth = True
|
needs_auth = True
|
||||||
|
|
|
@ -35,6 +35,7 @@ from ... import debug
|
||||||
class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
|
class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
"""A class for creating and managing conversations with OpenAI chat service"""
|
"""A class for creating and managing conversations with OpenAI chat service"""
|
||||||
|
|
||||||
|
lebel = "OpenAI ChatGPT"
|
||||||
url = "https://chat.openai.com"
|
url = "https://chat.openai.com"
|
||||||
working = True
|
working = True
|
||||||
supports_gpt_35_turbo = True
|
supports_gpt_35_turbo = True
|
||||||
|
|
|
@ -32,6 +32,7 @@ models = {
|
||||||
}
|
}
|
||||||
|
|
||||||
class Theb(AbstractProvider):
|
class Theb(AbstractProvider):
|
||||||
|
label = "TheB.AI"
|
||||||
url = "https://beta.theb.ai"
|
url = "https://beta.theb.ai"
|
||||||
working = True
|
working = True
|
||||||
supports_gpt_35_turbo = True
|
supports_gpt_35_turbo = True
|
||||||
|
|
|
@ -28,6 +28,7 @@ models = {
|
||||||
}
|
}
|
||||||
|
|
||||||
class ThebApi(Openai):
|
class ThebApi(Openai):
|
||||||
|
label = "TheB.AI API"
|
||||||
url = "https://theb.ai"
|
url = "https://theb.ai"
|
||||||
working = True
|
working = True
|
||||||
needs_auth = True
|
needs_auth = True
|
||||||
|
|
|
@ -1026,9 +1026,10 @@ async function on_api() {
|
||||||
});
|
});
|
||||||
|
|
||||||
providers = await api("providers")
|
providers = await api("providers")
|
||||||
providers.forEach((provider) => {
|
Object.entries(providers).forEach(([provider, label]) => {
|
||||||
let option = document.createElement("option");
|
let option = document.createElement("option");
|
||||||
option.value = option.text = provider;
|
option.value = provider;
|
||||||
|
option.text = label;
|
||||||
providerSelect.appendChild(option);
|
providerSelect.appendChild(option);
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,16 @@ class Api():
|
||||||
"""
|
"""
|
||||||
Return a list of all working providers.
|
Return a list of all working providers.
|
||||||
"""
|
"""
|
||||||
return [provider.__name__ for provider in __providers__ if provider.working]
|
return {
|
||||||
|
provider.__name__: (provider.label
|
||||||
|
if hasattr(provider, "label")
|
||||||
|
else provider.__name__) +
|
||||||
|
(" (WebDriver)"
|
||||||
|
if "webdriver" in provider.get_parameters()
|
||||||
|
else "")
|
||||||
|
for provider in __providers__
|
||||||
|
if provider.working
|
||||||
|
}
|
||||||
|
|
||||||
def get_version(self):
|
def get_version(self):
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Reference in New Issue