Add copilot conversation mode

This commit is contained in:
Heiner Lohaus 2024-03-16 14:40:06 +01:00
parent 905ced06bd
commit f861f322aa
2 changed files with 40 additions and 25 deletions

View File

@ -26,6 +26,7 @@ class Tones:
creative = "Creative" creative = "Creative"
balanced = "Balanced" balanced = "Balanced"
precise = "Precise" precise = "Precise"
copilot = "Balanced"
class Bing(AsyncGeneratorProvider, ProviderModelMixin): class Bing(AsyncGeneratorProvider, ProviderModelMixin):
""" """
@ -35,10 +36,8 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
working = True working = True
supports_message_history = True supports_message_history = True
supports_gpt_4 = True supports_gpt_4 = True
default_model = Tones.balanced default_model = "balanced"
models = [ models = [key for key in Tones.__dict__ if not key.startswith("__")]
getattr(Tones, key) for key in dir(Tones) if not key.startswith("__")
]
@classmethod @classmethod
def create_async_generator( def create_async_generator(
@ -71,7 +70,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
context = create_context(messages[:-1]) if len(messages) > 1 else None context = create_context(messages[:-1]) if len(messages) > 1 else None
if tone is None: if tone is None:
tone = tone if model.startswith("gpt-4") else model tone = tone if model.startswith("gpt-4") else model
tone = cls.get_model(tone) tone = cls.get_model("" if tone is None else tone.lower())
gpt4_turbo = True if model.startswith("gpt-4-turbo") else False gpt4_turbo = True if model.startswith("gpt-4-turbo") else False
return stream_generate( return stream_generate(
@ -136,31 +135,32 @@ class Defaults:
] ]
sliceIds = { sliceIds = {
"Balanced": [ "balanced": [
"supllmnfe","archnewtf", "supllmnfe","archnewtf",
"stpstream", "stpsig", "vnextvoicecf", "scmcbase", "cmcpupsalltf", "sydtransctrl", "stpstream", "stpsig", "vnextvoicecf", "scmcbase", "cmcpupsalltf", "sydtransctrl",
"thdnsrch", "220dcl1s0", "0215wcrwips0", "0305hrthrots0", "0130gpt4t", "thdnsrch", "220dcl1s0", "0215wcrwips0", "0305hrthrots0", "0130gpt4t",
"bingfc", "0225unsticky1", "0228scss0", "bingfc", "0225unsticky1", "0228scss0",
"defquerycf", "defcontrol", "3022tphpv" "defquerycf", "defcontrol", "3022tphpv"
], ],
"Creative": [ "creative": [
"bgstream", "fltltst2c", "bgstream", "fltltst2c",
"stpstream", "stpsig", "vnextvoicecf", "cmcpupsalltf", "sydtransctrl", "stpstream", "stpsig", "vnextvoicecf", "cmcpupsalltf", "sydtransctrl",
"0301techgnd", "220dcl1bt15", "0215wcrwip", "0305hrthrot", "0130gpt4t", "0301techgnd", "220dcl1bt15", "0215wcrwip", "0305hrthrot", "0130gpt4t",
"bingfccf", "0225unsticky1", "0228scss0", "bingfccf", "0225unsticky1", "0228scss0",
"3022tpvs0" "3022tpvs0"
], ],
"Precise": [ "precise": [
"bgstream", "fltltst2c", "bgstream", "fltltst2c",
"stpstream", "stpsig", "vnextvoicecf", "cmcpupsalltf", "sydtransctrl", "stpstream", "stpsig", "vnextvoicecf", "cmcpupsalltf", "sydtransctrl",
"0301techgnd", "220dcl1bt15", "0215wcrwip", "0305hrthrot", "0130gpt4t", "0301techgnd", "220dcl1bt15", "0215wcrwip", "0305hrthrot", "0130gpt4t",
"bingfccf", "0225unsticky1", "0228scss0", "bingfccf", "0225unsticky1", "0228scss0",
"defquerycf", "3022tpvs0" "defquerycf", "3022tpvs0"
], ],
"copilot": []
} }
optionsSets = { optionsSets = {
"Balanced": [ "balanced": [
"nlu_direct_response_filter", "deepleo", "nlu_direct_response_filter", "deepleo",
"disable_emoji_spoken_text", "responsible_ai_policy_235", "disable_emoji_spoken_text", "responsible_ai_policy_235",
"enablemm", "dv3sugg", "autosave", "enablemm", "dv3sugg", "autosave",
@ -168,7 +168,7 @@ class Defaults:
"galileo", "saharagenconv5", "gldcl1p", "galileo", "saharagenconv5", "gldcl1p",
"gpt4tmncnp" "gpt4tmncnp"
], ],
"Creative": [ "creative": [
"nlu_direct_response_filter", "deepleo", "nlu_direct_response_filter", "deepleo",
"disable_emoji_spoken_text", "responsible_ai_policy_235", "disable_emoji_spoken_text", "responsible_ai_policy_235",
"enablemm", "dv3sugg", "enablemm", "dv3sugg",
@ -176,7 +176,7 @@ class Defaults:
"h3imaginative", "techinstgnd", "hourthrot", "clgalileo", "gencontentv3", "h3imaginative", "techinstgnd", "hourthrot", "clgalileo", "gencontentv3",
"gpt4tmncnp" "gpt4tmncnp"
], ],
"Precise": [ "precise": [
"nlu_direct_response_filter", "deepleo", "nlu_direct_response_filter", "deepleo",
"disable_emoji_spoken_text", "responsible_ai_policy_235", "disable_emoji_spoken_text", "responsible_ai_policy_235",
"enablemm", "dv3sugg", "enablemm", "dv3sugg",
@ -184,6 +184,13 @@ class Defaults:
"h3precise", "techinstgnd", "hourthrot", "techinstgnd", "hourthrot", "h3precise", "techinstgnd", "hourthrot", "techinstgnd", "hourthrot",
"clgalileo", "gencontentv3" "clgalileo", "gencontentv3"
], ],
"copilot": [
"nlu_direct_response_filter", "deepleo",
"disable_emoji_spoken_text", "responsible_ai_policy_235",
"enablemm", "dv3sugg",
"iyxapbing", "iycapbing",
"h3precise", "clgalileo", "gencontentv3", "prjupy"
],
} }
# Default location settings # Default location settings
@ -264,7 +271,7 @@ def create_message(
"allowedMessageTypes": Defaults.allowedMessageTypes, "allowedMessageTypes": Defaults.allowedMessageTypes,
"sliceIds": Defaults.sliceIds[tone], "sliceIds": Defaults.sliceIds[tone],
"verbosity": "verbose", "verbosity": "verbose",
"scenario": "SERP", "scenario": "CopilotMicrosoftCom", # "SERP",
"plugins": [{"id": "c310c353-b9f0-4d76-ab0d-1dd5e979cf68", "category": 1}] if web_search else [], "plugins": [{"id": "c310c353-b9f0-4d76-ab0d-1dd5e979cf68", "category": 1}] if web_search else [],
"traceId": get_random_hex(40), "traceId": get_random_hex(40),
"conversationHistoryOptionsSets": ["autosave","savemem","uprofupd","uprofgen"], "conversationHistoryOptionsSets": ["autosave","savemem","uprofupd","uprofgen"],
@ -282,8 +289,7 @@ def create_message(
"requestId": request_id, "requestId": request_id,
"messageId": request_id "messageId": request_id
}, },
"tone": tone, "tone": getattr(Tones, tone),
"extraExtensionParameters": {"gpt-creator-persona": {"personaId": "copilot"}},
"spokenTextMode": "None", "spokenTextMode": "None",
"conversationId": conversation.conversationId, "conversationId": conversation.conversationId,
"participant": {"id": conversation.clientId} "participant": {"id": conversation.clientId}
@ -322,6 +328,7 @@ async def stream_generate(
gpt4_turbo: bool = False, gpt4_turbo: bool = False,
timeout: int = 900, timeout: int = 900,
conversation: Conversation = None, conversation: Conversation = None,
return_conversation: bool = False,
raise_apology: bool = False, raise_apology: bool = False,
max_retries: int = 5, max_retries: int = 5,
sleep_retry: int = 15, sleep_retry: int = 15,
@ -344,10 +351,15 @@ async def stream_generate(
async with ClientSession( async with ClientSession(
timeout=ClientTimeout(total=timeout), connector=connector timeout=ClientTimeout(total=timeout), connector=connector
) as session: ) as session:
while conversation is None: first = True
while first or conversation is None:
first = False
do_read = True do_read = True
try: try:
conversation = await create_conversation(session, headers) if conversation is None:
conversation = await create_conversation(session, headers)
if return_conversation:
yield conversation
except ResponseStatusError as e: except ResponseStatusError as e:
max_retries -= 1 max_retries -= 1
if max_retries < 1: if max_retries < 1:
@ -358,7 +370,7 @@ async def stream_generate(
await asyncio.sleep(sleep_retry) await asyncio.sleep(sleep_retry)
continue continue
image_request = await upload_image(session, image, tone, headers) if image else None image_request = await upload_image(session, image, getattr(Tones, tone), headers) if image else None
async with session.ws_connect( async with session.ws_connect(
'wss://sydney.bing.com/sydney/ChatHub', 'wss://sydney.bing.com/sydney/ChatHub',
autoping=False, autoping=False,

View File

@ -811,7 +811,7 @@ async function on_api() {
register_settings_storage(); register_settings_storage();
versions = await api('version') versions = await api("version");
document.title = 'g4f - ' + versions["version"]; document.title = 'g4f - ' + versions["version"];
let text = "version ~ " let text = "version ~ "
if (versions["version"] != versions["latest_version"]) { if (versions["version"] != versions["latest_version"]) {
@ -825,19 +825,19 @@ async function on_api() {
models = await api("models"); models = await api("models");
models.forEach((model) => { models.forEach((model) => {
let option = document.createElement('option'); let option = document.createElement("option");
option.value = option.text = model; option.value = option.text = model;
modelSelect.appendChild(option); modelSelect.appendChild(option);
}); });
providers = await api('providers') providers = await api("providers")
providers.forEach((provider) => { providers.forEach((provider) => {
let option = document.createElement('option'); let option = document.createElement("option");
option.value = option.text = provider; option.value = option.text = provider;
providerSelect.appendChild(option); providerSelect.appendChild(option);
}) })
load_provider_models(); await load_provider_models(appStorage.getItem("provider"));
load_settings_storage() load_settings_storage()
} }
@ -965,8 +965,11 @@ async function read_response(response) {
} }
} }
async function load_provider_models() { async function load_provider_models(providerIndex=null) {
const provider = providerSelect.options[providerSelect.selectedIndex].value; if (!providerIndex) {
providerIndex = providerSelect.selectedIndex;
}
const provider = providerSelect.options[providerIndex].value;
if (!provider) { if (!provider) {
return; return;
} }
@ -986,7 +989,7 @@ async function load_provider_models() {
modelSelect.classList.remove("hidden"); modelSelect.classList.remove("hidden");
} }
}; };
providerSelect.addEventListener("change", load_provider_models) providerSelect.addEventListener("change", () => load_provider_models());
function save_storage() { function save_storage() {
let filename = new Date().toLocaleString() let filename = new Date().toLocaleString()