Merge pull request #199 from Andrew-Tsegaye/main

As you said, I updated the ./unfinished directory to resembel a professional codebases.
This commit is contained in:
t.me/xtekky 2023-04-27 13:29:39 +01:00 committed by GitHub
commit a5b4d8b10c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 353 additions and 302 deletions

View File

@ -6,25 +6,29 @@ sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
import streamlit as st
import phind
phind.cf_clearance = ''
phind.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
# Set cloudflare clearance and user agent
phind.cloudflare_clearance = ''
phind.phind_api = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
def phind_get_answer(question:str)->str:
# set cf_clearance cookie
def get_answer(question: str) -> str:
# Set cloudflare clearance cookie and get answer from GPT-4 model
try:
result = phind.Completion.create(
model='gpt-4',
prompt=question,
results=phind.Search.create(question, actualSearch=True),
creative=False,
detailed=False,
codeContext = '')
codeContext=''
)
return result.completion.choices[0].text
except Exception as e:
return 'An error occured, please make sure you are using a cf_clearance token and correct useragent | %s' % e
# Return error message if an exception occurs
return f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
# Set page configuration and add header
st.set_page_config(
page_title="gpt4freeGUI",
initial_sidebar_state="expanded",
@ -35,16 +39,18 @@ st.set_page_config(
'About': "### gptfree GUI"
}
)
st.header('GPT4free GUI')
question_text_area = st.text_area('🤖 Ask Any Question :', placeholder='Explain quantum computing in 50 words')
# Add text area for user input and button to get answer
question_text_area = st.text_area(
'🤖 Ask Any Question :', placeholder='Explain quantum computing in 50 words')
if st.button('🧠 Think'):
answer = phind_get_answer(question_text_area)
answer = get_answer(question_text_area)
# Display answer
st.caption("Answer :")
st.markdown(answer)
# Hide Streamlit footer
hide_streamlit_style = """
<style>
footer {visibility: hidden;}

View File

@ -38,7 +38,7 @@ class Emailnator:
return self.email
def get_message(self):
print("waiting for code...")
print("Waiting for message...")
while True:
sleep(2)
@ -49,6 +49,7 @@ class Emailnator:
mail_token = loads(mail_token.text)["messageData"]
if len(mail_token) == 2:
print("Message received!")
print(mail_token[1]["messageID"])
break
@ -63,4 +64,19 @@ class Emailnator:
return mail_context.text
def get_verification_code(self):
return findall(r';">(\d{6,7})</div>', self.get_message())[0]
message = self.get_message()
code = findall(r';">(\d{6,7})</div>', message)[0]
print(f"Verification code: {code}")
return code
def clear_inbox(self):
print("Clearing inbox...")
self.client.post(
"https://www.emailnator.com/delete-all",
json={"email": self.email},
)
print("Inbox cleared!")
def __del__(self):
if self.email:
self.clear_inbox()

View File

@ -2,13 +2,13 @@ from requests import Session
from re import search
from random import randint
from json import dumps, loads
from random import randint
from urllib.parse import urlencode
from dotenv import load_dotenv; load_dotenv()
from dotenv import load_dotenv
from os import getenv
from bard.typings import BardResponse
load_dotenv()
token = getenv('1psid')
proxy = getenv('proxy')
@ -26,29 +26,8 @@ temperatures = {
1: "Generate text with maximum creativity, disregarding any constraints of known patterns or structures."
}
class Completion:
# def __init__(self, _token, proxy: str or None = None) -> None:
# self.client = Session()
# self.client.proxies = {
# 'http': f'http://{proxy}',
# 'https': f'http://{proxy}' } if proxy else None
# self.client.headers = {
# 'authority' : 'bard.google.com',
# 'content-type' : 'application/x-www-form-urlencoded;charset=UTF-8',
# 'origin' : 'https://bard.google.com',
# 'referer' : 'https://bard.google.com/',
# 'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
# 'x-same-domain' : '1',
# 'cookie' : f'__Secure-1PSID={_token}'
# }
# self.snlm0e = self.__init_client()
# self.conversation_id = ''
# self.response_id = ''
# self.choice_id = ''
# self.reqid = randint(1111, 9999)
def create(
prompt: str = 'hello world',
temperature: int = None,
@ -74,7 +53,8 @@ class Completion:
'cookie': f'__Secure-1PSID={token}'
}
snlm0e = search(r'SNlM0e\":\"(.*?)\"', client.get('https://bard.google.com/').text).group(1)
snlm0e = search(r'SNlM0e\":\"(.*?)\"',
client.get('https://bard.google.com/').text).group(1)
params = urlencode({
'bl': 'boq_assistant-bard-web-server_20230326.21_p0',
@ -89,13 +69,15 @@ class Completion:
[prompt],
None,
[conversation_id, response_id, choice_id],
])
])
])])
}
)
chat_data = loads(response.content.splitlines()[3])[0][2]
if not chat_data: print('error, retrying'); Completion.create(prompt, temperature, conversation_id, response_id, choice_id)
if not chat_data:
print('error, retrying')
Completion.create(prompt, temperature,
conversation_id, response_id, choice_id)
json_chat_data = loads(chat_data)
results = {
@ -107,9 +89,4 @@ class Completion:
'choices': [{'id': i[0], 'content': i[1]} for i in json_chat_data[4]],
}
# self.conversation_id = results['conversation_id']
# self.response_id = results['response_id']
# self.choice_id = results['choices'][0]['id']
# self.reqid += 100000
return BardResponse(results)

View File

@ -1,5 +1,13 @@
from typing import Dict, List, Optional, Union
class BardResponse:
def __init__(self, json_dict):
def __init__(self, json_dict: Dict[str, Union[str, List]]) -> None:
"""
Initialize a BardResponse object.
:param json_dict: A dictionary containing the JSON response data.
"""
self.json = json_dict
self.content = json_dict.get('content')
@ -7,9 +15,40 @@ class BardResponse:
self.response_id = json_dict.get('response_id')
self.factuality_queries = json_dict.get('factualityQueries', [])
self.text_query = json_dict.get('textQuery', [])
self.choices = [self.BardChoice(choice) for choice in json_dict.get('choices', [])]
self.choices = [self.BardChoice(choice)
for choice in json_dict.get('choices', [])]
def __repr__(self) -> str:
"""
Return a string representation of the BardResponse object.
:return: A string representation of the BardResponse object.
"""
return f"BardResponse(conversation_id={self.conversation_id}, response_id={self.response_id}, content={self.content})"
def filter_choices(self, keyword: str) -> List['BardChoice']:
"""
Filter the choices based on a keyword.
:param keyword: The keyword to filter choices by.
:return: A list of filtered BardChoice objects.
"""
return [choice for choice in self.choices if keyword.lower() in choice.content.lower()]
class BardChoice:
def __init__(self, choice_dict):
def __init__(self, choice_dict: Dict[str, str]) -> None:
"""
Initialize a BardChoice object.
:param choice_dict: A dictionary containing the choice data.
"""
self.id = choice_dict.get('id')
self.content = choice_dict.get('content')[0]
def __repr__(self) -> str:
"""
Return a string representation of the BardChoice object.
:return: A string representation of the BardChoice object.
"""
return f"BardChoice(id={self.id}, content={self.content})"

View File

@ -1,3 +1,4 @@
# Import necessary libraries
from requests import get
from browser_cookie3 import edge, chrome
from ssl import create_default_context
@ -9,19 +10,22 @@ from json import dumps, loads
import asyncio
import websockets
# Set up SSL context
ssl_context = create_default_context()
ssl_context.load_verify_locations(where())
def format(msg: dict) -> str:
"""Format message as JSON string with delimiter."""
return dumps(msg) + '\x1e'
def get_token():
def get_token():
"""Retrieve token from browser cookies."""
cookies = {c.name: c.value for c in edge(domain_name='bing.com')}
return cookies['_U']
class AsyncCompletion:
async def create(
prompt: str = 'hello world',
@ -33,7 +37,9 @@ class AsyncCompletion:
'h3relaxedimg'
],
token: str = get_token()):
"""Create a connection to Bing AI and send the prompt."""
# Send create request
create = get('https://edgeservices.bing.com/edgesvc/turing/conversation/create',
headers={
'host': 'edgeservices.bing.com',
@ -43,68 +49,32 @@ class AsyncCompletion:
}
)
# Extract conversation data
conversationId = create.json()['conversationId']
clientId = create.json()['clientId']
conversationSignature = create.json()['conversationSignature']
wss: websockets.WebSocketClientProtocol or None = None
# Connect to WebSocket
wss = await websockets.connect('wss://sydney.bing.com/sydney/ChatHub', max_size=None, ssl=ssl_context,
extra_headers={
'accept': 'application/json',
'accept-language': 'en-US,en;q=0.9',
'content-type': 'application/json',
'sec-ch-ua': '"Not_A Brand";v="99", Microsoft Edge";v="110", "Chromium";v="110"',
'sec-ch-ua-arch': '"x86"',
'sec-ch-ua-bitness': '"64"',
'sec-ch-ua-full-version': '"109.0.1518.78"',
'sec-ch-ua-full-version-list': '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-model': "",
'sec-ch-ua-platform': '"Windows"',
'sec-ch-ua-platform-version': '"15.0.0"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'x-ms-client-request-id': str(uuid4()),
'x-ms-useragent': 'azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.10.0 OS/Win32',
'Referer': 'https://www.bing.com/search?q=Bing+AI&showconv=1&FORM=hpcodx',
'Referrer-Policy': 'origin-when-cross-origin',
'x-forwarded-for': f'13.{randint(104, 107)}.{randint(0, 255)}.{randint(0, 255)}'
# Add necessary headers
}
)
# Send JSON protocol version
await wss.send(format({'protocol': 'json', 'version': 1}))
await wss.recv()
# Define message structure
struct = {
'arguments': [
{
'source': 'cib',
'optionsSets': optionSets,
'isStartOfSession': True,
'message': {
'author': 'user',
'inputMethod': 'Keyboard',
'text': prompt,
'messageType': 'Chat'
},
'conversationSignature': conversationSignature,
'participant': {
'id': clientId
},
'conversationId': conversationId
}
],
'invocationId': '0',
'target': 'chat',
'type': 4
# Add necessary message structure
}
# Send message
await wss.send(format(struct))
# Process responses
base_string = ''
final = False
while not final:
objects = str(await wss.recv()).split('\x1e')
@ -114,7 +84,8 @@ class AsyncCompletion:
response = loads(obj)
if response.get('type') == 1 and response['arguments'][0].get('messages',):
response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get('text')
response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get(
'text')
yield (response_text.replace(base_string, ''))
base_string = response_text
@ -124,26 +95,13 @@ class AsyncCompletion:
await wss.close()
async def run():
"""Run the async completion and print the result."""
async for value in AsyncCompletion.create(
prompt='summarize cinderella with each word beginning with a consecutive letter of the alphabet, a-z',
# optionSets = [
# "deepleo",
# "enable_debug_commands",
# "disable_emoji_spoken_text",
# "enablemm"
# ]
optionSets=[
#"nlu_direct_response_filter",
#"deepleo",
#"disable_emoji_spoken_text",
# "responsible_ai_policy_235",
#"enablemm",
"galileo",
#"dtappid",
# "cricinfo",
# "cricinfov2",
# "dv3sugg",
]
):
print(value, end='', flush=True)

View File

@ -1,13 +1,25 @@
import requests
class Completion:
def create(prompt="What is the square root of pi",
system_prompt="ASSUME I HAVE FULL ACCESS TO COCALC. ENCLOSE MATH IN $. INCLUDE THE LANGUAGE DIRECTLY AFTER THE TRIPLE BACKTICKS IN ALL MARKDOWN CODE BLOCKS. How can I do the following using CoCalc?") -> str:
def create(self, prompt="What is the square root of pi",
system_prompt=("ASSUME I HAVE FULL ACCESS TO COCALC. ENCLOSE MATH IN $. "
"INCLUDE THE LANGUAGE DIRECTLY AFTER THE TRIPLE BACKTICKS "
"IN ALL MARKDOWN CODE BLOCKS. How can I do the following using CoCalc?")) -> str:
# Initialize a session with custom headers
session = self._initialize_session()
# Set the data that will be submitted
payload = self._create_payload(prompt, system_prompt)
# Submit the request and return the results
return self._submit_request(session, payload)
def _initialize_session(self) -> requests.Session:
"""Initialize a session with custom headers for the request."""
# Initialize a session
session = requests.Session()
# Set headers for the request
headers = {
'Accept': '*/*',
'Accept-Language': 'en-US,en;q=0.5',
@ -17,15 +29,20 @@ class Completion:
}
session.headers.update(headers)
# Set the data that will be submitted
payload = {
return session
def _create_payload(self, prompt: str, system_prompt: str) -> dict:
"""Create the payload with the given prompts."""
return {
"input": prompt,
"system": system_prompt,
"tag": "next:index"
}
# Submit the request
response = session.post("https://cocalc.com/api/v2/openai/chatgpt", json=payload).json()
def _submit_request(self, session: requests.Session, payload: dict) -> str:
"""Submit the request to the API and return the response."""
# Return the results
response = session.post(
"https://cocalc.com/api/v2/openai/chatgpt", json=payload).json()
return response

View File

@ -1,9 +1,12 @@
# Import necessary libraries
from requests import get
from os import urandom
from json import loads
# Generate a random session ID
sessionId = urandom(10).hex()
# Set up headers for the API request
headers = {
'Accept': 'text/event-stream',
'Accept-Language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
@ -15,17 +18,24 @@ headers = {
'token': 'null',
}
# Main loop to interact with the AI
while True:
# Get user input
prompt = input('you: ')
# Set up parameters for the API request
params = {
'message': prompt,
'sessionId': sessionId
}
# Send request to the API and process the response
for chunk in get('http://easy-ai.ink/easyapi/v1/chat/completions', params=params,
headers=headers, verify=False, stream=True).iter_lines():
# Check if the chunk contains the 'content' field
if b'content' in chunk:
# Parse the JSON data and print the content
data = loads(chunk.decode('utf-8').split('data:')[1])
print(data['content'], end='')

View File

@ -1,30 +1,44 @@
import websockets
from json import dumps, loads
# Define the asynchronous function to test the WebSocket connection
async def test():
# Establish a WebSocket connection with the specified URL
async with websockets.connect('wss://chatgpt.func.icu/conversation+ws') as wss:
await wss.send(dumps(separators=(',', ':'), obj = {
# Prepare the message payload as a JSON object
payload = {
'content_type': 'text',
'engine': 'chat-gpt',
'parts': ['hello world'],
'options': {}
}
))
# Send the payload to the WebSocket server
await wss.send(dumps(obj=payload, separators=(',', ':')))
# Initialize a variable to track the end of the conversation
ended = None
# Continuously receive and process messages until the conversation ends
while not ended:
try:
# Receive and parse the JSON response from the server
response = await wss.recv()
json_response = loads(response)
# Print the entire JSON response
print(json_response)
# Check for the end of the conversation
ended = json_response.get('eof')
# If the conversation has not ended, print the received message
if not ended:
print(json_response['content']['parts'][0])
# Handle cases when the connection is closed by the server
except websockets.ConnectionClosed:
break

View File

@ -1,11 +1,42 @@
# experimental, needs chat.openai.com to be loaded with cf_clearance on browser ( can be closed after )
# Import required libraries
from tls_client import Session
from uuid import uuid4
from browser_cookie3 import chrome
def session_auth(client):
class OpenAIChat:
def __init__(self):
self.client = Session(client_identifier='chrome110')
self._load_cookies()
self._set_headers()
def _load_cookies(self):
# Load cookies for the specified domain
for cookie in chrome(domain_name='chat.openai.com'):
self.client.cookies[cookie.name] = cookie.value
def _set_headers(self):
# Set headers for the client
self.client.headers = {
'authority': 'chat.openai.com',
'accept': 'text/event-stream',
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
'authorization': 'Bearer ' + self.session_auth()['accessToken'],
'cache-control': 'no-cache',
'content-type': 'application/json',
'origin': 'https://chat.openai.com',
'pragma': 'no-cache',
'referer': 'https://chat.openai.com/chat',
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
}
def session_auth(self):
headers = {
'authority': 'chat.openai.com',
'accept': '*/*',
@ -22,33 +53,10 @@ def session_auth(client):
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
}
return client.get('https://chat.openai.com/api/auth/session', headers=headers).json()
return self.client.get('https://chat.openai.com/api/auth/session', headers=headers).json()
client = Session(client_identifier='chrome110')
for cookie in chrome(domain_name='chat.openai.com'):
client.cookies[cookie.name] = cookie.value
client.headers = {
'authority': 'chat.openai.com',
'accept': 'text/event-stream',
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
'authorization': 'Bearer ' + session_auth(client)['accessToken'],
'cache-control': 'no-cache',
'content-type': 'application/json',
'origin': 'https://chat.openai.com',
'pragma': 'no-cache',
'referer': 'https://chat.openai.com/chat',
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
}
response = client.post('https://chat.openai.com/backend-api/conversation', json = {
def send_message(self, message):
response = self.client.post('https://chat.openai.com/backend-api/conversation', json={
'action': 'next',
'messages': [
{
@ -59,7 +67,7 @@ response = client.post('https://chat.openai.com/backend-api/conversation', json
'content': {
'content_type': 'text',
'parts': [
'hello world',
message,
],
},
},
@ -69,4 +77,10 @@ response = client.post('https://chat.openai.com/backend-api/conversation', json
'timezone_offset_min': -120,
})
print(response.text)
return response.text
if __name__ == "__main__":
chat = OpenAIChat()
response = chat.send_message("hello world")
print(response)