From 65b5227500f4204c69ca39670a6e4cab89314617 Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Fri, 3 Nov 2023 21:36:50 +0100 Subject: [PATCH 1/2] Fix NotImplementedError on WIn --- g4f/Provider/needs_auth/OpenaiChat.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py index fb81face..119626e5 100644 --- a/g4f/Provider/needs_auth/OpenaiChat.py +++ b/g4f/Provider/needs_auth/OpenaiChat.py @@ -2,6 +2,7 @@ from __future__ import annotations import uuid, json, time, os import tempfile, shutil, asyncio +import sys, subprocess from ..base_provider import AsyncGeneratorProvider from ..helper import get_browser, get_cookies, format_prompt, get_event_loop @@ -174,6 +175,15 @@ fun.getToken(config).then(token => { tmp.write(source.encode()) tmp.close() try: + if sys.platform == 'win32': + p = subprocess.Popen( + [node, tmp.name], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + if p.returncode == 0: + return p.stdout.read().decode() + raise RuntimeError(f"Exec Error: {p.stderr.read().decode()}") p = await asyncio.create_subprocess_exec( node, tmp.name, stderr=asyncio.subprocess.PIPE, From dfefd22aa18f3a8efc32748300ee7dbe8fbb450f Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Fri, 3 Nov 2023 22:24:35 +0100 Subject: [PATCH 2/2] Add exec_js helper def --- g4f/Provider/needs_auth/OpenaiChat.py | 46 +++++++++++++++------------ 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py index 119626e5..bd44628f 100644 --- a/g4f/Provider/needs_auth/OpenaiChat.py +++ b/g4f/Provider/needs_auth/OpenaiChat.py @@ -145,11 +145,6 @@ class OpenaiChat(AsyncGeneratorProvider): return f"g4f.provider.{cls.__name__} supports: ({param})" async def get_arkose_token(proxy: str = None) -> str: - node = shutil.which("node") - if not node: - if debug.logging: - print('OpenaiChat: "node" not found') - return dir = os.path.dirname(os.path.dirname(__file__)) include = f'{dir}/npm/node_modules/funcaptcha' config = { @@ -175,23 +170,32 @@ fun.getToken(config).then(token => { tmp.write(source.encode()) tmp.close() try: - if sys.platform == 'win32': - p = subprocess.Popen( - [node, tmp.name], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE - ) - if p.returncode == 0: - return p.stdout.read().decode() - raise RuntimeError(f"Exec Error: {p.stderr.read().decode()}") - p = await asyncio.create_subprocess_exec( - node, tmp.name, - stderr=asyncio.subprocess.PIPE, - stdout=asyncio.subprocess.PIPE + return await exec_js(tmp.name) + finally: + os.unlink(tmp.name) + +async def exec_js(file: str) -> str: + node = shutil.which("node") + if not node: + if debug.logging: + print('OpenaiChat: "node" not found') + return + if sys.platform == 'win32': + p = subprocess.Popen( + [node, file], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE ) - stdout, stderr = await p.communicate() + stdout, stderr = p.communicate() if p.returncode == 0: return stdout.decode() raise RuntimeError(f"Exec Error: {stderr.decode()}") - finally: - os.unlink(tmp.name) \ No newline at end of file + p = await asyncio.create_subprocess_exec( + node, file, + stderr=asyncio.subprocess.PIPE, + stdout=asyncio.subprocess.PIPE + ) + stdout, stderr = await p.communicate() + if p.returncode == 0: + return stdout.decode() + raise RuntimeError(f"Exec Error: {stderr.decode()}") \ No newline at end of file