From 7e9e67275e08fc4588318dfb60b9c1df0e9d147d Mon Sep 17 00:00:00 2001 From: micheal Date: Wed, 8 Mar 2023 12:24:31 -1100 Subject: [PATCH] Release 5.1.0, New function: OpenAI proxy --- code/default/launcher/web_control.py | 10 +- code/default/version.txt | 2 +- .../lang/zh_CN/LC_MESSAGES/messages.po | 6 + code/default/x_tunnel/local/global_var.py | 2 + code/default/x_tunnel/local/openai_handler.py | 92 ++++++++++ code/default/x_tunnel/local/proxy_handler.py | 13 +- code/default/x_tunnel/local/proxy_session.py | 2 + code/default/x_tunnel/local/web_control.py | 7 +- code/default/x_tunnel/web_ui/chatgpt.html | 167 ++++++++++++++++++ code/default/x_tunnel/web_ui/config.html | 13 ++ code/default/x_tunnel/web_ui/menu.json | 12 +- 11 files changed, 314 insertions(+), 12 deletions(-) create mode 100644 code/default/x_tunnel/local/openai_handler.py create mode 100644 code/default/x_tunnel/web_ui/chatgpt.html diff --git a/code/default/launcher/web_control.py b/code/default/launcher/web_control.py index 45d9213992..243cbf2e69 100644 --- a/code/default/launcher/web_control.py +++ b/code/default/launcher/web_control.py @@ -110,6 +110,7 @@ def do_POST(self): self.postvars = json.loads(content) else: self.postvars = {} + content = b'' except Exception as e: xlog.exception("do_POST %s except:%r", self.path, e) self.postvars = {} @@ -136,8 +137,13 @@ def do_POST(self): elif url_path == "/set_proxy_applist": return self.set_proxy_applist() - self.send_not_found() - xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path) + elif url_path.startswith("/openai/"): + return module_init.proc_handler["x_tunnel"]["imp"].local.openai_handler.handle_openai( + "POST", url_path, self.headers, content, self.connection) + + else: + self.send_not_found() + xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path) def do_GET(self): # self.headers = utils.to_str(self.headers) diff --git a/code/default/version.txt b/code/default/version.txt index 733db2686a..acf69b48b8 100644 --- a/code/default/version.txt +++ b/code/default/version.txt @@ -1 +1 @@ -5.0.8 \ No newline at end of file +5.1.0 \ No newline at end of file diff --git a/code/default/x_tunnel/lang/zh_CN/LC_MESSAGES/messages.po b/code/default/x_tunnel/lang/zh_CN/LC_MESSAGES/messages.po index e67656ff6b..5892195132 100644 --- a/code/default/x_tunnel/lang/zh_CN/LC_MESSAGES/messages.po +++ b/code/default/x_tunnel/lang/zh_CN/LC_MESSAGES/messages.po @@ -437,3 +437,9 @@ msgstr "指导" msgid " to troubleshoot." msgstr "去解决。" + +msgid "ChatGPT Manual" +msgstr "ChatGPT指南 " + +msgid "https://github.com/XX-net/XX-Net/wiki/ChatGPT_EN" +msgstr "https://github.com/XX-net/XX-Net/wiki/ChatGPT_CN " \ No newline at end of file diff --git a/code/default/x_tunnel/local/global_var.py b/code/default/x_tunnel/local/global_var.py index 9678110c13..66386faa89 100644 --- a/code/default/x_tunnel/local/global_var.py +++ b/code/default/x_tunnel/local/global_var.py @@ -29,6 +29,8 @@ server_port = 0 selectable = [] balance = 0 +openai_balance = 0 +openai_proxies = [] tls_relays = {} stat = { diff --git a/code/default/x_tunnel/local/openai_handler.py b/code/default/x_tunnel/local/openai_handler.py new file mode 100644 index 0000000000..8782e0ae9f --- /dev/null +++ b/code/default/x_tunnel/local/openai_handler.py @@ -0,0 +1,92 @@ +import random +import json +import base64 +import time +import zlib + +import utils + +from . import global_var as g +from . import front_dispatcher +from . import proxy_session + +from xlog import getLogger +xlog = getLogger("x_tunnel") + +openai_chat_token_price = 0.000002 +host = None + +gzip_decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS) + + +def get_auth_str(): + info = { + "login_account": g.config.login_account, + "login_password": g.config.login_password + } + json_str = utils.to_bytes(json.dumps(info)) + token = base64.b64encode(json_str) + return "Bearer " + utils.to_str(token) + + +auth_str = None + + +def get_openai_proxy(get_next_one=False): + global host + if get_next_one or not host: + + if not (g.config.login_account and g.config.login_password): + return False + + for _ in range(0, 3): + res, reason = proxy_session.request_balance(g.config.login_account, g.config.login_password) + if not res: + xlog.warn("x-tunnel request_balance fail when create_conn:%s", reason) + time.sleep(1) + + if not g.openai_proxies: + return None + + host = random.choice(g.openai_proxies) + return host + + +def handle_openai(method, path, headers, req_body, sock): + global auth_str + if not auth_str: + auth_str = get_auth_str() + + host = get_openai_proxy() + if not host: + return sock.send(b'HTTP/1.1 401 Fail\r\n\r\n') + + path = utils.to_str(path[7:]) + headers = utils.to_str(headers) + headers["Authorization"] = auth_str + del headers["Host"] + try: + del headers["Accept-Encoding"] + except: + pass + content, status, response = front_dispatcher.request(method, host, path=path, headers=headers, data=req_body) + + if status == 200: + try: + if response.headers.get(b"Content-Encoding") == b"gzip": + data = gzip_decompressor.decompress(content) + else: + data = content + + dat = json.loads(data) + total_tokens = dat["usage"]["total_tokens"] + cost = total_tokens * openai_chat_token_price + g.openai_balance -= cost + except Exception as e1: + xlog.exception("cal tokens err:%r", e1) + + sock.send(b'HTTP/1.1 %d OK\r\n' % (status)) + for key, value in response.headers.items(): + sock.send(b'%s: %s\r\n' % (key, value)) + sock.send(b'\r\n') + sock.send(content) diff --git a/code/default/x_tunnel/local/proxy_handler.py b/code/default/x_tunnel/local/proxy_handler.py index 20e21b9336..5fe6e986d3 100644 --- a/code/default/x_tunnel/local/proxy_handler.py +++ b/code/default/x_tunnel/local/proxy_handler.py @@ -13,6 +13,7 @@ from . import global_var as g from . import proxy_session +from . import openai_handler def netloc_to_host_port(netloc, default_port=80): @@ -351,16 +352,22 @@ def http_handler(self, first_char): lines = header_block.split(b"\r\n") path = url host = None + headers = {} for line in lines: - key, _, value = line.rpartition(b":") - if key.lower == b"host": + key, _, value = line.partition(b":") + headers[key] = value + if key.lower() == b"host": host, port = netloc_to_host_port(value) - break if host is None: xlog.warn("http proxy host can't parsed. %s %s", req_line, header_block) self.connection.send(b'HTTP/1.1 500 Fail\r\n\r\n') return + if url.startswith(b"/openai/"): + content_length = int(headers.get(b"Content-Length", 0)) + req_body = self.read_bytes(content_length) + return openai_handler.handle_openai(method, url, headers, req_body, self.connection) + sock = self.connection conn_id = proxy_session.create_conn(sock, host, port) if not conn_id: diff --git a/code/default/x_tunnel/local/proxy_session.py b/code/default/x_tunnel/local/proxy_session.py index e0a4ee1049..55888c32fe 100644 --- a/code/default/x_tunnel/local/proxy_session.py +++ b/code/default/x_tunnel/local/proxy_session.py @@ -976,6 +976,8 @@ def request_balance(account=None, password=None, is_register=False, update_serve g.promote_code = utils.to_str(info["promote_code"]) g.promoter = info["promoter"] g.balance = info["balance"] + g.openai_balance = info["openai_balance"] + g.openai_proxies = info["openai_proxies"] g.tls_relays = info["tls_relays"] if g.tls_relay_front and g.tls_relays.get("ips"): g.tls_relay_front.set_ips(g.tls_relays["ips"]) diff --git a/code/default/x_tunnel/local/web_control.py b/code/default/x_tunnel/local/web_control.py index 1042f5d0ee..a0ec1f5219 100644 --- a/code/default/x_tunnel/local/web_control.py +++ b/code/default/x_tunnel/local/web_control.py @@ -233,6 +233,7 @@ def req_info_handler(self): "paypal_button_id": g.paypal_button_id, "plans": g.plans, "balance": "%f" % (g.balance), + "openai_balance": float(g.openai_balance), "quota": "%d" % (g.quota), "quota_list": g.quota_list, "traffic": g.session.traffic, @@ -282,7 +283,8 @@ def req_token_login_handler(self): g.config.save() res_arr = { "res": "success", - "balance": float(g.balance) + "balance": float(g.balance), + "openai_balance": float(g.openai_balance) } g.last_refresh_time = time.time() g.session.start() @@ -335,7 +337,8 @@ def req_login_handler(self): g.config.save() res_arr = { "res": "success", - "balance": float(g.balance) + "balance": float(g.balance), + "openai_balance": float(g.openai_balance) } g.last_refresh_time = time.time() g.session.start() diff --git a/code/default/x_tunnel/web_ui/chatgpt.html b/code/default/x_tunnel/web_ui/chatgpt.html new file mode 100644 index 0000000000..13515db146 --- /dev/null +++ b/code/default/x_tunnel/web_ui/chatgpt.html @@ -0,0 +1,167 @@ +
+ +
+
{{ _( "Help" ) }}
+ +
+ +
+
+
+ +
+
+ +
+
+ + +
+
+ +
+ + + + + + + \ No newline at end of file diff --git a/code/default/x_tunnel/web_ui/config.html b/code/default/x_tunnel/web_ui/config.html index f252162b45..0ecc8ed25c 100644 --- a/code/default/x_tunnel/web_ui/config.html +++ b/code/default/x_tunnel/web_ui/config.html @@ -135,6 +135,18 @@

{{ _("Register") }}

N/a + +

+ + +
+
+ +
+
+

+ N/a +

@@ -534,6 +546,7 @@
{{ _("Transfer Bandwidth") }}
window.promote_code = result['promote_code']; $('#promoter').html(result['promoter']); $('#credit').html(getCreditForHumanReading(result['balance'])); + $('#openai_balance').html("$" + parseFloat(result['openai_balance']).toFixed(6)); $('#bandwidth').html(getBandwidthForHumanReading(result['quota'])); diff --git a/code/default/x_tunnel/web_ui/menu.json b/code/default/x_tunnel/web_ui/menu.json index cc709f5fdc..2243bebaab 100644 --- a/code/default/x_tunnel/web_ui/menu.json +++ b/code/default/x_tunnel/web_ui/menu.json @@ -6,19 +6,23 @@ "title": "{{ _( "Configuration" ) }}", "url": "config" }, - "2":{ + "3":{ + "title": "{{ _( "ChatGPT" ) }}", + "url": "chatgpt" + }, + "20":{ "title": "{{ _( "Status" ) }}", "url": "status" }, - "3":{ + "30":{ "title": "{{ _("Log") }}", "url": "logging" }, - "4":{ + "40":{ "title": "{{ _("Cloudflare") }}{{ _("Front") }}{{ _("Log") }}", "url": "cloudflare_front_logging" }, - "7":{ + "70":{ "title": "{{ _("TLS-Relay") }}{{ _("Front") }}{{ _("Log") }}", "url": "tls_relay_front_logging" }