gpt4free/etc/testing/test_providers.py

99 lines
3.0 KiB
Python
Raw Normal View History

2023-10-15 19:46:58 -04:00
# from g4f.Provider import __all__, ProviderUtils
# from g4f import ChatCompletion
# import concurrent.futures
# _ = [
# 'BaseProvider',
# 'AsyncProvider',
# 'AsyncGeneratorProvider',
# 'RetryProvider'
# ]
# def test_provider(provider):
# try:
# provider = (ProviderUtils.convert[provider])
# if provider.working and not provider.needs_auth:
# print('testing', provider.__name__)
# completion = ChatCompletion.create(model='gpt-3.5-turbo',
# messages=[{"role": "user", "content": "hello"}], provider=provider)
# return completion, provider.__name__
# except Exception as e:
# #print(f'Failed to test provider: {provider} | {e}')
# return None
# with concurrent.futures.ThreadPoolExecutor() as executor:
# futures = []
# for provider in __all__:
# if provider not in _:
# futures.append(executor.submit(test_provider, provider))
# for future in concurrent.futures.as_completed(futures):
# result = future.result()
# if result:
# print(f'{result[1]} | {result[0]}')
2023-07-28 06:07:17 -04:00
import sys
from pathlib import Path
from colorama import Fore, Style
2023-07-28 06:07:17 -04:00
sys.path.append(str(Path(__file__).parent.parent))
2023-08-22 17:27:34 -04:00
from g4f import BaseProvider, models, Provider
2023-07-28 06:07:17 -04:00
2023-08-22 17:27:34 -04:00
logging = False
2023-07-28 06:07:17 -04:00
2023-07-28 06:07:17 -04:00
def main():
providers = get_providers()
2023-08-22 17:27:34 -04:00
failed_providers = []
2023-07-28 06:07:17 -04:00
for _provider in providers:
2023-08-22 17:27:34 -04:00
if _provider.needs_auth:
continue
print("Provider:", _provider.__name__)
2023-08-24 18:32:13 -04:00
result = test(_provider)
2023-08-22 17:27:34 -04:00
print("Result:", result)
if _provider.working and not result:
2023-08-24 18:32:13 -04:00
failed_providers.append(_provider)
2023-07-28 06:07:17 -04:00
2023-08-24 15:32:22 -04:00
print()
if failed_providers:
print(f"{Fore.RED + Style.BRIGHT}Failed providers:{Style.RESET_ALL}")
2023-08-24 18:32:13 -04:00
for _provider in failed_providers:
2023-08-24 15:32:22 -04:00
print(f"{Fore.RED}{_provider.__name__}")
else:
print(f"{Fore.GREEN + Style.BRIGHT}All providers are working")
2023-07-28 06:07:17 -04:00
def get_providers() -> list[type[BaseProvider]]:
2023-10-04 01:20:51 -04:00
providers = dir(Provider)
providers = [getattr(Provider, provider) for provider in providers if provider != "RetryProvider"]
providers = [provider for provider in providers if isinstance(provider, type)]
return [provider for provider in providers if issubclass(provider, BaseProvider)]
2023-07-28 06:07:17 -04:00
def create_response(_provider: type[BaseProvider]) -> str:
model = models.gpt_35_turbo.name if _provider.supports_gpt_35_turbo else models.default.name
2023-07-28 06:07:17 -04:00
response = _provider.create_completion(
model=model,
messages=[{"role": "user", "content": "Hello, who are you? Answer in detail much as possible."}],
2023-07-28 06:07:17 -04:00
stream=False,
)
return "".join(response)
2023-08-24 15:32:22 -04:00
def test(_provider: type[BaseProvider]) -> bool:
2023-07-28 06:07:17 -04:00
try:
response = create_response(_provider)
assert type(response) is str
2023-08-24 15:32:22 -04:00
assert len(response) > 0
2023-08-22 17:27:34 -04:00
return response
2023-07-28 06:07:17 -04:00
except Exception as e:
2023-08-22 17:27:34 -04:00
if logging:
print(e)
2023-07-28 06:07:17 -04:00
return False
if __name__ == "__main__":
2023-10-03 17:06:27 -04:00
main()