Merge pull request #1505 from hlohaus/sort

Mish
This commit is contained in:
H Lohaus 2024-01-23 09:31:02 +01:00 committed by GitHub
commit c10f49de5f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 7044 additions and 204 deletions

View File

@ -217,7 +217,7 @@ def main():
if not pull:
print(f"No PR number found")
exit()
if pull.get_reviews().totalCount > 0 or pull.get_comments().totalCount > 0:
if pull.get_reviews().totalCount > 0 or pull.get_issue_comments().totalCount > 0:
print(f"Has already a review")
exit()
diff = get_diff(pull.diff_url)

View File

@ -1,13 +1,7 @@
from __future__ import annotations
from aiohttp import ClientSession
import json
from Crypto.Cipher import AES
from Crypto.Util import Padding
import base64
import hashlib
import time
import math
import execjs, os, json
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
@ -71,13 +65,18 @@ class GptForLove(AsyncGeneratorProvider):
def get_secret() -> str:
k = '14487141bvirvvG'
e = math.floor(time.time())
plaintext = str(e).encode('utf-8')
key = hashlib.md5(k.encode('utf-8')).digest()
cipher = AES.new(key, AES.MODE_ECB)
ciphertext = cipher.encrypt(Padding.pad(plaintext, AES.block_size, style='pkcs7'))
return base64.b64encode(ciphertext).decode()
dir = os.path.dirname(__file__)
include = f'{dir}/npm/node_modules/crypto-js/crypto-js'
source = """
CryptoJS = require({include})
var k = 'fjfsdwiuhfwf'
, e = Math.floor(new Date().getTime() / 1e3);
var t = CryptoJS.enc.Utf8.parse(e)
, o = CryptoJS.AES.encrypt(t, k, {
mode: CryptoJS.mode.ECB,
padding: CryptoJS.pad.Pkcs7
});
return o.toString()
"""
source = source.replace('{include}', json.dumps(include))
return execjs.compile(source).call('')

View File

@ -8,14 +8,23 @@ from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
from .helper import format_prompt, get_cookies
map = {
"openchat/openchat_3.5": "openchat/openchat-3.5-1210",
}
class HuggingChat(AsyncGeneratorProvider):
url = "https://huggingface.co/chat"
working = True
model = "meta-llama/Llama-2-70b-chat-hf"
default_model = "meta-llama/Llama-2-70b-chat-hf"
models = [
"mistralai/Mixtral-8x7B-Instruct-v0.1",
"meta-llama/Llama-2-70b-chat-hf",
"NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
"codellama/CodeLlama-34b-Instruct-hf",
"mistralai/Mistral-7B-Instruct-v0.2",
"openchat/openchat-3.5-0106"
]
model_map = {
"openchat/openchat_3.5": "openchat/openchat-3.5-1210",
"mistralai/Mixtral-8x7B-Instruct-v0.1": "mistralai/Mistral-7B-Instruct-v0.2"
}
@classmethod
async def create_async_generator(
@ -29,9 +38,11 @@ class HuggingChat(AsyncGeneratorProvider):
**kwargs
) -> AsyncResult:
if not model:
model = cls.model
elif model in map:
model = map[model]
model = cls.default_model
elif model in cls.model_map:
model = cls.model_map[model]
elif model not in cls.models:
raise ValueError(f"Model is not supported: {model}")
if not cookies:
cookies = get_cookies(".huggingface.co")

View File

@ -2,27 +2,28 @@ from __future__ import annotations
import random
import json
from aiohttp import ClientSession, WSMsgType
from aiohttp import ClientSession
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
API_URL = "https://labs-api.perplexity.ai/socket.io/"
WS_URL = "wss://labs-api.perplexity.ai/socket.io/"
MODELS = ['pplx-7b-online', 'pplx-70b-online', 'pplx-7b-chat', 'pplx-70b-chat', 'mistral-7b-instruct',
'codellama-34b-instruct', 'llama-2-70b-chat', 'llava-7b-chat', 'mixtral-8x7b-instruct',
'mistral-medium', 'related']
DEFAULT_MODEL = MODELS[1]
MODEL_MAP = {
"mistralai/Mistral-7B-Instruct-v0.1": "mistral-7b-instruct",
"meta-llama/Llama-2-70b-chat-hf": "llama-2-70b-chat",
"mistralai/Mixtral-8x7B-Instruct-v0.1": "mixtral-8x7b-instruct",
}
class PerplexityLabs(AsyncGeneratorProvider):
url = "https://labs.perplexity.ai"
working = True
supports_gpt_35_turbo = True
models = ['pplx-7b-online', 'pplx-70b-online', 'pplx-7b-chat', 'pplx-70b-chat', 'mistral-7b-instruct',
'codellama-34b-instruct', 'llama-2-70b-chat', 'llava-7b-chat', 'mixtral-8x7b-instruct',
'mistral-medium', 'related']
default_model = 'pplx-70b-online'
model_map = {
"mistralai/Mistral-7B-Instruct-v0.1": "mistral-7b-instruct",
"meta-llama/Llama-2-70b-chat-hf": "llama-2-70b-chat",
"mistralai/Mixtral-8x7B-Instruct-v0.1": "mixtral-8x7b-instruct",
"codellama/CodeLlama-34b-Instruct-hf": "codellama-34b-instruct"
}
@classmethod
async def create_async_generator(
@ -33,10 +34,10 @@ class PerplexityLabs(AsyncGeneratorProvider):
**kwargs
) -> AsyncResult:
if not model:
model = DEFAULT_MODEL
elif model in MODEL_MAP:
model = MODEL_MAP[model]
elif model not in MODELS:
model = cls.default_model
elif model in cls.model_map:
model = cls.model_map[model]
elif model not in cls.models:
raise ValueError(f"Model is not supported: {model}")
headers = {
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:121.0) Gecko/20100101 Firefox/121.0",

12
g4f/Provider/npm/node_modules/.package-lock.json generated vendored Normal file
View File

@ -0,0 +1,12 @@
{
"name": "npm",
"lockfileVersion": 2,
"requires": true,
"packages": {
"node_modules/crypto-js": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-4.2.0.tgz",
"integrity": "sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q=="
}
}
}

275
g4f/Provider/npm/node_modules/crypto-js/README.md generated vendored Normal file
View File

@ -0,0 +1,275 @@
# crypto-js
JavaScript library of crypto standards.
## Discontinued
Active development of CryptoJS has been discontinued. This library is no longer maintained.
Nowadays, NodeJS and modern browsers have a native `Crypto` module. The latest version of CryptoJS already uses the native Crypto module for random number generation, since `Math.random()` is not crypto-safe. Further development of CryptoJS would result in it only being a wrapper of native Crypto. Therefore, development and maintenance has been discontinued, it is time to go for the native `crypto` module.
## Node.js (Install)
Requirements:
- Node.js
- npm (Node.js package manager)
```bash
npm install crypto-js
```
### Usage
ES6 import for typical API call signing use case:
```javascript
import sha256 from 'crypto-js/sha256';
import hmacSHA512 from 'crypto-js/hmac-sha512';
import Base64 from 'crypto-js/enc-base64';
const message, nonce, path, privateKey; // ...
const hashDigest = sha256(nonce + message);
const hmacDigest = Base64.stringify(hmacSHA512(path + hashDigest, privateKey));
```
Modular include:
```javascript
var AES = require("crypto-js/aes");
var SHA256 = require("crypto-js/sha256");
...
console.log(SHA256("Message"));
```
Including all libraries, for access to extra methods:
```javascript
var CryptoJS = require("crypto-js");
console.log(CryptoJS.HmacSHA1("Message", "Key"));
```
## Client (browser)
Requirements:
- Node.js
- Bower (package manager for frontend)
```bash
bower install crypto-js
```
### Usage
Modular include:
```javascript
require.config({
packages: [
{
name: 'crypto-js',
location: 'path-to/bower_components/crypto-js',
main: 'index'
}
]
});
require(["crypto-js/aes", "crypto-js/sha256"], function (AES, SHA256) {
console.log(SHA256("Message"));
});
```
Including all libraries, for access to extra methods:
```javascript
// Above-mentioned will work or use this simple form
require.config({
paths: {
'crypto-js': 'path-to/bower_components/crypto-js/crypto-js'
}
});
require(["crypto-js"], function (CryptoJS) {
console.log(CryptoJS.HmacSHA1("Message", "Key"));
});
```
### Usage without RequireJS
```html
<script type="text/javascript" src="path-to/bower_components/crypto-js/crypto-js.js"></script>
<script type="text/javascript">
var encrypted = CryptoJS.AES(...);
var encrypted = CryptoJS.SHA256(...);
</script>
```
## API
See: https://cryptojs.gitbook.io/docs/
### AES Encryption
#### Plain text encryption
```javascript
var CryptoJS = require("crypto-js");
// Encrypt
var ciphertext = CryptoJS.AES.encrypt('my message', 'secret key 123').toString();
// Decrypt
var bytes = CryptoJS.AES.decrypt(ciphertext, 'secret key 123');
var originalText = bytes.toString(CryptoJS.enc.Utf8);
console.log(originalText); // 'my message'
```
#### Object encryption
```javascript
var CryptoJS = require("crypto-js");
var data = [{id: 1}, {id: 2}]
// Encrypt
var ciphertext = CryptoJS.AES.encrypt(JSON.stringify(data), 'secret key 123').toString();
// Decrypt
var bytes = CryptoJS.AES.decrypt(ciphertext, 'secret key 123');
var decryptedData = JSON.parse(bytes.toString(CryptoJS.enc.Utf8));
console.log(decryptedData); // [{id: 1}, {id: 2}]
```
### List of modules
- ```crypto-js/core```
- ```crypto-js/x64-core```
- ```crypto-js/lib-typedarrays```
---
- ```crypto-js/md5```
- ```crypto-js/sha1```
- ```crypto-js/sha256```
- ```crypto-js/sha224```
- ```crypto-js/sha512```
- ```crypto-js/sha384```
- ```crypto-js/sha3```
- ```crypto-js/ripemd160```
---
- ```crypto-js/hmac-md5```
- ```crypto-js/hmac-sha1```
- ```crypto-js/hmac-sha256```
- ```crypto-js/hmac-sha224```
- ```crypto-js/hmac-sha512```
- ```crypto-js/hmac-sha384```
- ```crypto-js/hmac-sha3```
- ```crypto-js/hmac-ripemd160```
---
- ```crypto-js/pbkdf2```
---
- ```crypto-js/aes```
- ```crypto-js/tripledes```
- ```crypto-js/rc4```
- ```crypto-js/rabbit```
- ```crypto-js/rabbit-legacy```
- ```crypto-js/evpkdf```
---
- ```crypto-js/format-openssl```
- ```crypto-js/format-hex```
---
- ```crypto-js/enc-latin1```
- ```crypto-js/enc-utf8```
- ```crypto-js/enc-hex```
- ```crypto-js/enc-utf16```
- ```crypto-js/enc-base64```
---
- ```crypto-js/mode-cfb```
- ```crypto-js/mode-ctr```
- ```crypto-js/mode-ctr-gladman```
- ```crypto-js/mode-ofb```
- ```crypto-js/mode-ecb```
---
- ```crypto-js/pad-pkcs7```
- ```crypto-js/pad-ansix923```
- ```crypto-js/pad-iso10126```
- ```crypto-js/pad-iso97971```
- ```crypto-js/pad-zeropadding```
- ```crypto-js/pad-nopadding```
## Release notes
### 4.2.0
Change default hash algorithm and iteration's for PBKDF2 to prevent weak security by using the default configuration.
Custom KDF Hasher
Blowfish support
### 4.1.1
Fix module order in bundled release.
Include the browser field in the released package.json.
### 4.1.0
Added url safe variant of base64 encoding. [357](https://github.com/brix/crypto-js/pull/357)
Avoid webpack to add crypto-browser package. [364](https://github.com/brix/crypto-js/pull/364)
### 4.0.0
This is an update including breaking changes for some environments.
In this version `Math.random()` has been replaced by the random methods of the native crypto module.
For this reason CryptoJS might not run in some JavaScript environments without native crypto module. Such as IE 10 or before or React Native.
### 3.3.0
Rollback, `3.3.0` is the same as `3.1.9-1`.
The move of using native secure crypto module will be shifted to a new `4.x.x` version. As it is a breaking change the impact is too big for a minor release.
### 3.2.1
The usage of the native crypto module has been fixed. The import and access of the native crypto module has been improved.
### 3.2.0
In this version `Math.random()` has been replaced by the random methods of the native crypto module.
For this reason CryptoJS might does not run in some JavaScript environments without native crypto module. Such as IE 10 or before.
If it's absolute required to run CryptoJS in such an environment, stay with `3.1.x` version. Encrypting and decrypting stays compatible. But keep in mind `3.1.x` versions still use `Math.random()` which is cryptographically not secure, as it's not random enough.
This version came along with `CRITICAL` `BUG`.
DO NOT USE THIS VERSION! Please, go for a newer version!
### 3.1.x
The `3.1.x` are based on the original CryptoJS, wrapped in CommonJS modules.

6657
g4f/Provider/npm/node_modules/crypto-js/crypto-js.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

24
g4f/Provider/npm/package-lock.json generated Normal file
View File

@ -0,0 +1,24 @@
{
"name": "npm",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"dependencies": {
"crypto-js": "^4.2.0"
}
},
"node_modules/crypto-js": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-4.2.0.tgz",
"integrity": "sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q=="
}
},
"dependencies": {
"crypto-js": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-4.2.0.tgz",
"integrity": "sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q=="
}
}
}

View File

@ -0,0 +1,5 @@
{
"dependencies": {
"crypto-js": "^4.2.0"
}
}

View File

@ -502,7 +502,7 @@ select {
backdrop-filter: blur(20px);
cursor: pointer;
background-color: var(--blur-bg);
background-color: var(--colour-1);
border: 1px solid var(--blur-border);
color: var(--colour-3);
display: block;

View File

@ -3,9 +3,9 @@ from dataclasses import dataclass
from .Provider import RetryProvider, ProviderType
from .Provider import (
Chatgpt4Online,
PerplexityLabs,
ChatgptDemoAi,
GeminiProChat,
PerplexityAi,
ChatgptNext,
HuggingChat,
ChatgptDemo,
@ -26,7 +26,6 @@ from .Provider import (
Bard,
Bing,
You,
H2o,
Pi,
)
@ -115,20 +114,26 @@ llama2_13b = Model(
llama2_70b = Model(
name = "meta-llama/Llama-2-70b-chat-hf",
base_provider = "huggingface",
best_provider = RetryProvider([Llama2, DeepInfra, HuggingChat, PerplexityAi])
best_provider = RetryProvider([Llama2, DeepInfra, HuggingChat, PerplexityLabs])
)
codellama_34b_instruct = Model(
name = "codellama/CodeLlama-34b-Instruct-hf",
base_provider = "huggingface",
best_provider = RetryProvider([HuggingChat, PerplexityLabs])
)
# Mistal
mixtral_8x7b = Model(
name = "mistralai/Mixtral-8x7B-Instruct-v0.1",
base_provider = "huggingface",
best_provider = RetryProvider([DeepInfra, HuggingChat, PerplexityAi])
best_provider = RetryProvider([DeepInfra, HuggingChat, PerplexityLabs])
)
mistral_7b = Model(
name = "mistralai/Mistral-7B-Instruct-v0.1",
base_provider = "huggingface",
best_provider = RetryProvider([DeepInfra, HuggingChat, PerplexityAi])
best_provider = RetryProvider([DeepInfra, HuggingChat, PerplexityLabs])
)
# Dolphin
@ -146,92 +151,23 @@ openchat_35 = Model(
)
# Bard
palm = Model(
bard = palm = Model(
name = 'palm',
base_provider = 'google',
best_provider = Bard)
# H2o
falcon_7b = Model(
name = 'h2oai/h2ogpt-gm-oasst1-en-2048-falcon-7b-v3',
base_provider = 'huggingface',
best_provider = H2o)
falcon_40b = Model(
name = 'h2oai/h2ogpt-gm-oasst1-en-2048-falcon-40b-v1',
base_provider = 'huggingface',
best_provider = H2o)
llama_13b = Model(
name = 'h2oai/h2ogpt-gm-oasst1-en-2048-open-llama-13b',
base_provider = 'huggingface',
best_provider = H2o)
# Vercel
claude_instant_v1 = Model(
name = 'claude-instant-v1',
base_provider = 'anthropic',
best_provider = Vercel)
claude_v1 = Model(
name = 'claude-v1',
base_provider = 'anthropic',
best_provider = Vercel)
best_provider = Bard
)
claude_v2 = Model(
name = 'claude-v2',
base_provider = 'anthropic',
best_provider = RetryProvider([FreeChatgpt, Vercel]))
command_light_nightly = Model(
name = 'command-light-nightly',
base_provider = 'cohere',
best_provider = Vercel)
command_nightly = Model(
name = 'command-nightly',
base_provider = 'cohere',
best_provider = Vercel)
gpt_neox_20b = Model(
name = 'EleutherAI/gpt-neox-20b',
base_provider = 'huggingface',
best_provider = Vercel)
oasst_sft_1_pythia_12b = Model(
name = 'OpenAssistant/oasst-sft-1-pythia-12b',
base_provider = 'huggingface',
best_provider = Vercel)
oasst_sft_4_pythia_12b_epoch_35 = Model(
name = 'OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5',
base_provider = 'huggingface',
best_provider = Vercel)
santacoder = Model(
name = 'bigcode/santacoder',
base_provider = 'huggingface',
best_provider = Vercel)
bloom = Model(
name = 'bigscience/bloom',
base_provider = 'huggingface',
best_provider = Vercel)
flan_t5_xxl = Model(
name = 'google/flan-t5-xxl',
base_provider = 'huggingface',
best_provider = Vercel)
code_davinci_002 = Model(
name = 'code-davinci-002',
base_provider = 'openai',
best_provider = Vercel)
best_provider = RetryProvider([FreeChatgpt, Vercel])
)
gpt_35_turbo_16k = Model(
name = 'gpt-3.5-turbo-16k',
base_provider = 'openai',
best_provider = gpt_35_long.best_provider)
best_provider = gpt_35_long.best_provider
)
gpt_35_turbo_16k_0613 = Model(
name = 'gpt-3.5-turbo-16k-0613',
@ -269,46 +205,6 @@ gemini_pro = Model(
best_provider = RetryProvider([FreeChatgpt, GeminiProChat])
)
text_ada_001 = Model(
name = 'text-ada-001',
base_provider = 'openai',
best_provider = Vercel)
text_babbage_001 = Model(
name = 'text-babbage-001',
base_provider = 'openai',
best_provider = Vercel)
text_curie_001 = Model(
name = 'text-curie-001',
base_provider = 'openai',
best_provider = Vercel)
text_davinci_002 = Model(
name = 'text-davinci-002',
base_provider = 'openai',
best_provider = Vercel)
text_davinci_003 = Model(
name = 'text-davinci-003',
base_provider = 'openai',
best_provider = Vercel)
llama13b_v2_chat = Model(
name = 'replicate:a16z-infra/llama13b-v2-chat',
base_provider = 'replicate',
best_provider = Vercel)
llama7b_v2_chat = Model(
name = 'replicate:a16z-infra/llama7b-v2-chat',
base_provider = 'replicate',
best_provider = Vercel)
llama70b_v2_chat = Model(
name = 'replicate/llama70b-v2-chat',
base_provider = 'replicate',
best_provider = Vercel)
pi = Model(
name = 'pi',
base_provider = 'inflection',
@ -342,55 +238,15 @@ class ModelUtils:
'llama2-7b' : llama2_7b,
'llama2-13b': llama2_13b,
'llama2-70b': llama2_70b,
'codellama-34b-instruct': codellama_34b_instruct,
# Mistral
'mixtral-8x7b': mixtral_8x7b,
'mistral-7b': mistral_7b,
# Dolphin
'dolphin-mixtral-8x7b': dolphin_mixtral_8x7b,
# OpenChat
'openchat_3.5': openchat_35,
# Gemini Pro
'gemini-pro': gemini_pro,
# Bard
'palm2' : palm,
'palm' : palm,
'google' : palm,
'google-bard' : palm,
'google-palm' : palm,
'bard' : palm,
# H2o
'falcon-40b' : falcon_40b,
'falcon-7b' : falcon_7b,
'llama-13b' : llama_13b,
# Vercel
#'claude-instant-v1' : claude_instant_v1,
#'claude-v1' : claude_v1,
#'claude-v2' : claude_v2,
'command-nightly' : command_nightly,
'gpt-neox-20b' : gpt_neox_20b,
'santacoder' : santacoder,
'bloom' : bloom,
'flan-t5-xxl' : flan_t5_xxl,
'code-davinci-002' : code_davinci_002,
'text-ada-001' : text_ada_001,
'text-babbage-001' : text_babbage_001,
'text-curie-001' : text_curie_001,
'text-davinci-002' : text_davinci_002,
'text-davinci-003' : text_davinci_003,
'llama70b-v2-chat' : llama70b_v2_chat,
'llama13b-v2-chat' : llama13b_v2_chat,
'llama7b-v2-chat' : llama7b_v2_chat,
'oasst-sft-1-pythia-12b' : oasst_sft_1_pythia_12b,
'oasst-sft-4-pythia-12b-epoch-3.5' : oasst_sft_4_pythia_12b_epoch_35,
'command-light-nightly' : command_light_nightly,
'bard': bard,
'claude-v2': claude_v2,
'pi': pi
}