Add MetaAI Provider and some small improvments

This commit is contained in:
Heiner Lohaus 2024-04-20 10:43:53 +02:00
parent a27d908696
commit 932d3a3ca0
12 changed files with 221 additions and 12 deletions

View File

@ -15,6 +15,7 @@ class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin):
label = "Microsoft Designer"
url = "https://www.bing.com/images/create"
working = True
needs_auth = True
def __init__(self, cookies: Cookies = None, proxy: str = None) -> None:
self.cookies: Cookies = cookies

View File

@ -15,7 +15,8 @@ class Ecosia(AsyncGeneratorProvider, ProviderModelMixin):
working = True
supports_gpt_35_turbo = True
default_model = "gpt-3.5-turbo-0125"
model_aliases = {"gpt-3.5-turbo": "gpt-3.5-turbo-0125"}
models = [default_model, "green"]
model_aliases = {"gpt-3.5-turbo": default_model}
@classmethod
async def create_async_generator(
@ -23,11 +24,10 @@ class Ecosia(AsyncGeneratorProvider, ProviderModelMixin):
model: str,
messages: Messages,
connector: BaseConnector = None,
green: bool = False,
proxy: str = None,
**kwargs
) -> AsyncResult:
cls.get_model(model)
model = cls.get_model(model)
headers = {
"authority": "api.ecosia.org",
"accept": "*/*",
@ -39,7 +39,7 @@ class Ecosia(AsyncGeneratorProvider, ProviderModelMixin):
data = {
"messages": base64.b64encode(json.dumps(messages).encode()).decode()
}
api_url = f"https://api.ecosia.org/v2/chat/?sp={'eco' if green else 'productivity'}"
api_url = f"https://api.ecosia.org/v2/chat/?sp={'eco' if model == 'green' else 'productivity'}"
async with session.post(api_url, json=data) as response:
await raise_for_status(response)
async for chunk in response.content.iter_any():

199
g4f/Provider/MetaAI.py Normal file
View File

@ -0,0 +1,199 @@
import json
import uuid
import random
import time
import uuid
from typing import Dict, List
from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages, Cookies
from ..requests import raise_for_status, DEFAULT_HEADERS
from ..image import ImageResponse
from .base_provider import AsyncGeneratorProvider
from .helper import format_prompt, get_connector, get_cookies
class MetaAI(AsyncGeneratorProvider):
url = "https://www.meta.ai"
working = True
def __init__(self, proxy: str = None, connector: BaseConnector = None):
self.session = ClientSession(connector=get_connector(connector, proxy), headers=DEFAULT_HEADERS)
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
#cookies = get_cookies(".meta.ai", False, True)
async for chunk in cls(proxy).prompt(format_prompt(messages)):
yield chunk
async def get_access_token(self, cookies: Cookies, birthday: str = "1999-01-01") -> str:
url = "https://www.meta.ai/api/graphql/"
payload = {
"lsd": cookies["lsd"],
"fb_api_caller_class": "RelayModern",
"fb_api_req_friendly_name": "useAbraAcceptTOSForTempUserMutation",
"variables": json.dumps({
"dob": birthday,
"icebreaker_type": "TEXT",
"__relay_internal__pv__WebPixelRatiorelayprovider": 1,
}),
"doc_id": "7604648749596940",
}
headers = {
"x-fb-friendly-name": "useAbraAcceptTOSForTempUserMutation",
"x-fb-lsd": cookies["lsd"],
"x-asbd-id": "129477",
"alt-used": "www.meta.ai",
"sec-fetch-site": "same-origin"
}
async with self.session.post(url, headers=headers, cookies=cookies, data=payload) as response:
await raise_for_status(response, "Fetch access_token failed")
auth_json = await response.json(content_type=None)
access_token = auth_json["data"]["xab_abra_accept_terms_of_service"]["new_temp_user_auth"]["access_token"]
return access_token
async def prompt(self, message: str, cookies: Cookies = None) -> AsyncResult:
access_token = None
if cookies is None:
cookies = await self.get_cookies()
access_token = await self.get_access_token(cookies)
else:
cookies = await self.get_cookies(cookies)
url = "https://graph.meta.ai/graphql?locale=user"
#url = "https://www.meta.ai/api/graphql/"
payload = {
"access_token": access_token,
#"lsd": cookies["lsd"],
"fb_api_caller_class": "RelayModern",
"fb_api_req_friendly_name": "useAbraSendMessageMutation",
"variables": json.dumps({
"message": {"sensitive_string_value": message},
"externalConversationId": str(uuid.uuid4()),
"offlineThreadingId": generate_offline_threading_id(),
"suggestedPromptIndex": None,
"flashVideoRecapInput": {"images": []},
"flashPreviewInput": None,
"promptPrefix": None,
"entrypoint": "ABRA__CHAT__TEXT",
"icebreaker_type": "TEXT",
"__relay_internal__pv__AbraDebugDevOnlyrelayprovider": False,
"__relay_internal__pv__WebPixelRatiorelayprovider": 1,
}),
"server_timestamps": "true",
"doc_id": "7783822248314888",
}
headers = {
"x-asbd-id": "129477",
"x-fb-friendly-name": "useAbraSendMessageMutation",
#"x-fb-lsd": cookies["lsd"],
}
async with self.session.post(url, headers=headers, cookies=cookies, data=payload) as response:
await raise_for_status(response, "Fetch response failed")
last_snippet_len = 0
fetch_id = None
async for line in response.content:
try:
json_line = json.loads(line)
except json.JSONDecodeError:
continue
bot_response_message = json_line.get("data", {}).get("node", {}).get("bot_response_message", {})
streaming_state = bot_response_message.get("streaming_state")
fetch_id = bot_response_message.get("fetch_id")
if streaming_state in ("STREAMING", "OVERALL_DONE"):
#imagine_card = bot_response_message["imagine_card"]
snippet = bot_response_message["snippet"]
yield snippet[last_snippet_len:]
last_snippet_len = len(snippet)
elif streaming_state == "OVERALL_DONE":
break
#if last_streamed_response is None:
# if attempts > 3:
# raise Exception("MetaAI is having issues and was not able to respond (Server Error)")
# access_token = await self.get_access_token()
# return await self.prompt(message=message, attempts=attempts + 1)
if fetch_id is not None:
sources = await self.fetch_sources(fetch_id, cookies, access_token)
if sources is not None:
yield sources
async def get_cookies(self, cookies: Cookies = None) -> dict:
async with self.session.get("https://www.meta.ai/", cookies=cookies) as response:
await raise_for_status(response, "Fetch home failed")
text = await response.text()
if cookies is None:
cookies = {
"_js_datr": self.extract_value(text, "_js_datr"),
"abra_csrf": self.extract_value(text, "abra_csrf"),
"datr": self.extract_value(text, "datr"),
}
cookies["lsd"] = self.extract_value(text, start_str='"LSD",[],{"token":"', end_str='"}')
return cookies
async def fetch_sources(self, fetch_id: str, cookies: Cookies, access_token: str) -> List[Dict]:
url = "https://graph.meta.ai/graphql?locale=user"
payload = {
"access_token": access_token,
"fb_api_caller_class": "RelayModern",
"fb_api_req_friendly_name": "AbraSearchPluginDialogQuery",
"variables": json.dumps({"abraMessageFetchID": fetch_id}),
"server_timestamps": "true",
"doc_id": "6946734308765963",
}
headers = {
"authority": "graph.meta.ai",
"x-fb-friendly-name": "AbraSearchPluginDialogQuery",
}
async with self.session.post(url, headers=headers, cookies=cookies, data=payload) as response:
await raise_for_status(response)
response_json = await response.json()
try:
message = response_json["data"]["message"]
if message is not None:
searchResults = message["searchResults"]
if searchResults is not None:
return Sources(searchResults["references"])
except (KeyError, TypeError):
raise RuntimeError(f"Response: {response_json}")
@staticmethod
def extract_value(text: str, key: str = None, start_str = None, end_str = '",') -> str:
if start_str is None:
start_str = f'{key}":{{"value":"'
start = text.find(start_str)
if start >= 0:
start+= len(start_str)
end = text.find(end_str, start)
return text[start:end]
def generate_offline_threading_id() -> str:
"""
Generates an offline threading ID.
Returns:
str: The generated offline threading ID.
"""
# Generate a random 64-bit integer
random_value = random.getrandbits(64)
# Get the current timestamp in milliseconds
timestamp = int(time.time() * 1000)
# Combine timestamp and random value
threading_id = (timestamp << 22) | (random_value & ((1 << 22) - 1))
return str(threading_id)
class Sources():
def __init__(self, list: List[Dict[str, str]]) -> None:
self.list = list
def __str__(self) -> str:
return "\n\n" + ("\n".join([f"[{link['title']}]({link['link']})" for link in self.list]))

View File

@ -42,6 +42,7 @@ from .Koala import Koala
from .Liaobots import Liaobots
from .Llama import Llama
from .Local import Local
from .MetaAI import MetaAI
from .PerplexityLabs import PerplexityLabs
from .Pi import Pi
from .ReplicateImage import ReplicateImage

View File

@ -1,3 +1,3 @@
from ..providers.base_provider import *
from ..providers.types import FinishReason
from ..providers.types import FinishReason, Streaming
from .helper import get_cookies, format_prompt

View File

@ -4,7 +4,7 @@ from .Openai import Openai
from ...typing import AsyncResult, Messages
class Groq(Openai):
lebel = "Groq"
label = "Groq"
url = "https://console.groq.com/playground"
working = True
default_model = "mixtral-8x7b-32768"

View File

@ -3,5 +3,4 @@ from __future__ import annotations
from .OpenaiChat import OpenaiChat
class OpenaiAccount(OpenaiChat):
label = "OpenAI ChatGPT with Account"
needs_auth = True

View File

@ -111,5 +111,6 @@ def get_last_provider(as_dict: bool = False) -> Union[ProviderType, dict[str, st
"name": last.__name__,
"url": last.url,
"model": debug.last_model,
"label": last.label if hasattr(last, "label") else None
}
return last

View File

@ -890,7 +890,7 @@ a:-webkit-any-link {
resize: vertical;
max-height: 200px;
min-height: 80px;
min-height: 100px;
}
/* style for hljs copy */

View File

@ -302,7 +302,7 @@ async function add_message_chunk(message) {
window.provider_result = message.provider;
content.querySelector('.provider').innerHTML = `
<a href="${message.provider.url}" target="_blank">
${message.provider.name}
${message.provider.label ? message.provider.label : message.provider.name}
</a>
${message.provider.model ? ' with ' + message.provider.model : ''}
`
@ -545,7 +545,8 @@ const load_conversation = async (conversation_id, scroll=true) => {
last_model = item.provider?.model;
let next_i = parseInt(i) + 1;
let next_provider = item.provider ? item.provider : (messages.length > next_i ? messages[next_i].provider : null);
let provider_link = item.provider?.name ? `<a href="${item.provider.url}" target="_blank">${item.provider.name}</a>` : "";
let provider_label = item.provider?.label ? item.provider?.label : item.provider?.name;
let provider_link = item.provider?.name ? `<a href="${item.provider.url}" target="_blank">${provider_label}</a>` : "";
let provider = provider_link ? `
<div class="provider">
${provider_link}

View File

@ -99,7 +99,7 @@ class Api():
if api_key is not None:
kwargs["api_key"] = api_key
if json_data.get('web_search'):
if provider == "Bing":
if provider in ("Bing", "HuggingChat"):
kwargs['web_search'] = True
else:
from .internet import get_search_message

View File

@ -102,4 +102,11 @@ ProviderType = Union[Type[BaseProvider], BaseRetryProvider]
class FinishReason():
def __init__(self, reason: str):
self.reason = reason
self.reason = reason
class Streaming():
def __init__(self, data: str) -> None:
self.data = data
def __str__(self) -> str:
return self.data