-
Notifications
You must be signed in to change notification settings - Fork 7.7k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Release 5.1.0, New function: OpenAI proxy
- Loading branch information
1 parent
3960f27
commit 7e9e672
Showing
11 changed files
with
314 additions
and
12 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1 @@ | ||
5.0.8 | ||
5.1.0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,92 @@ | ||
import random | ||
import json | ||
import base64 | ||
import time | ||
import zlib | ||
|
||
import utils | ||
|
||
from . import global_var as g | ||
from . import front_dispatcher | ||
from . import proxy_session | ||
|
||
from xlog import getLogger | ||
xlog = getLogger("x_tunnel") | ||
|
||
openai_chat_token_price = 0.000002 | ||
host = None | ||
|
||
gzip_decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS) | ||
|
||
|
||
def get_auth_str(): | ||
info = { | ||
"login_account": g.config.login_account, | ||
"login_password": g.config.login_password | ||
} | ||
json_str = utils.to_bytes(json.dumps(info)) | ||
token = base64.b64encode(json_str) | ||
return "Bearer " + utils.to_str(token) | ||
|
||
|
||
auth_str = None | ||
|
||
|
||
def get_openai_proxy(get_next_one=False): | ||
global host | ||
if get_next_one or not host: | ||
|
||
if not (g.config.login_account and g.config.login_password): | ||
return False | ||
|
||
for _ in range(0, 3): | ||
res, reason = proxy_session.request_balance(g.config.login_account, g.config.login_password) | ||
if not res: | ||
xlog.warn("x-tunnel request_balance fail when create_conn:%s", reason) | ||
time.sleep(1) | ||
|
||
if not g.openai_proxies: | ||
return None | ||
|
||
host = random.choice(g.openai_proxies) | ||
return host | ||
|
||
|
||
def handle_openai(method, path, headers, req_body, sock): | ||
global auth_str | ||
if not auth_str: | ||
auth_str = get_auth_str() | ||
|
||
host = get_openai_proxy() | ||
if not host: | ||
return sock.send(b'HTTP/1.1 401 Fail\r\n\r\n') | ||
|
||
path = utils.to_str(path[7:]) | ||
headers = utils.to_str(headers) | ||
headers["Authorization"] = auth_str | ||
del headers["Host"] | ||
try: | ||
del headers["Accept-Encoding"] | ||
except: | ||
pass | ||
content, status, response = front_dispatcher.request(method, host, path=path, headers=headers, data=req_body) | ||
|
||
if status == 200: | ||
try: | ||
if response.headers.get(b"Content-Encoding") == b"gzip": | ||
data = gzip_decompressor.decompress(content) | ||
else: | ||
data = content | ||
|
||
dat = json.loads(data) | ||
total_tokens = dat["usage"]["total_tokens"] | ||
cost = total_tokens * openai_chat_token_price | ||
g.openai_balance -= cost | ||
except Exception as e1: | ||
xlog.exception("cal tokens err:%r", e1) | ||
|
||
sock.send(b'HTTP/1.1 %d OK\r\n' % (status)) | ||
for key, value in response.headers.items(): | ||
sock.send(b'%s: %s\r\n' % (key, value)) | ||
sock.send(b'\r\n') | ||
sock.send(content) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,167 @@ | ||
<div > | ||
|
||
<div class="row-fluid"> | ||
<div class="span1"><strong>{{ _( "Help" ) }}</strong></div> <!-- .span4 --> | ||
<div class="span7" id="about_current_version"> <a href="{{ _( "https://github.com/XX-net/XX-Net/wiki/ChatGPT_EN" ) }}" target="_blank">{{ _( "ChatGPT Manual" ) }}</a></div> <!-- .span8 --> | ||
</div> <!-- .div.fluid --> | ||
|
||
<div class="row-fluid" id="output-area"> | ||
<div id="output" class="span12"></div> <!-- #log --> | ||
</div> | ||
|
||
<div class="row-fluid" id="input-area"> | ||
<div class="span10" id="input-box"> | ||
<textarea id="prompt" rows="2"></textarea> | ||
</div> <!-- .span10 --> | ||
<div class="span2"> | ||
<button class="btn btn-primary btn-block ask-button" id="ask" type="submit"> | ||
<div id="submit-text" > | ||
{{ _( "Submit" ) }} | ||
</div> | ||
<div id="submit-loading" > | ||
<div class="lds-facebook"> <div></div><div></div><div></div></div> | ||
</div> | ||
</button> | ||
|
||
</div> <!-- .span12 --> | ||
</div> <!-- .row-fluid --> | ||
|
||
</div> <!-- #log-container --> | ||
|
||
<script type="text/javascript"> | ||
title('{{ _("ChatGPT") }}'); | ||
$('#submit-loading').addClass('hide'); | ||
</script> | ||
|
||
<script> | ||
function append_output(text, text_class) { | ||
text = text.replace(/\n/g, "<br>"); | ||
while (text.includes("```")){ | ||
text = text.replace("```", "</p><pre>"); | ||
text = text.replace("```", '</pre><p class="' + text_class + '">'); | ||
} | ||
var newlines = document.createDocumentFragment(); | ||
var template = '<p class="%s">%s</p>\n'; | ||
var newline = $(template.format(text_class, text)); | ||
$(newlines).append(newline); | ||
$('#output').append(newlines); | ||
} | ||
|
||
$('#ask').click(function () { | ||
var prompt = $('#prompt').val(); | ||
console.log(prompt); | ||
append_output(prompt, 'prompt_text'); | ||
$('#prompt').val(''); | ||
|
||
const req = { | ||
model: "gpt-3.5-turbo", | ||
// Submit only the role and content of the messages, provide the previous messages as well for context | ||
messages: [ | ||
{ | ||
role: "user", | ||
content: prompt | ||
} | ||
] | ||
}; | ||
$('#submit-text').addClass('hide'); | ||
$('#submit-loading').removeClass('hide'); | ||
|
||
$.ajax({ | ||
type: 'POST', | ||
url: '/openai/v1/chat/completions', | ||
data: JSON.stringify(req), | ||
dataType: 'JSON', | ||
success: function (result) { | ||
for (const choice of result['choices']) { | ||
const message = choice["message"]; | ||
const content = message["content"]; | ||
append_output(content, "complete_text"); | ||
} | ||
$('#output').scrollTop($('#output')[0].scrollHeight); | ||
|
||
$('#submit-text').removeClass('hide'); | ||
$('#submit-loading').addClass('hide'); | ||
}, | ||
error: function () { | ||
tip('{{ _( "Call API failed." ) }}', 'error'); | ||
$('#submit-text').removeClass('hide'); | ||
$('#submit-loading').addClass('hide'); | ||
} | ||
}); | ||
}); | ||
</script> | ||
|
||
|
||
<style type="text/css"> | ||
#output-area { | ||
} | ||
#output { | ||
background-color: #f4f6f6; | ||
border: 2px solid #d5dbdb; | ||
border-radius: 6px; | ||
color: #34495e; | ||
font-size: 14.994px; | ||
line-height: 24px; | ||
max-width: 100%; | ||
overflow-y: auto; | ||
padding: 5px 11px; | ||
text-indent: 0; | ||
height: calc(100vh - 290px); | ||
} | ||
#input-area { | ||
} | ||
#input-box { | ||
margin-top: 20px; | ||
} | ||
div#content textarea { | ||
height: 60px; | ||
} | ||
.prompt_text { | ||
color: blue; | ||
} | ||
.complete_text { | ||
color: green; | ||
} | ||
.ask-button { | ||
height: 80px; | ||
} | ||
#submit-loading{ | ||
margin-top: -10px; | ||
} | ||
.lds-facebook { | ||
display: inline-block; | ||
position: relative; | ||
width: 80px; | ||
height: 80px; | ||
} | ||
.lds-facebook div { | ||
display: inline-block; | ||
position: absolute; | ||
left: 8px; | ||
width: 16px; | ||
background: #fff; | ||
animation: lds-facebook 1.2s cubic-bezier(0, 0.5, 0.5, 1) infinite; | ||
} | ||
.lds-facebook div:nth-child(1) { | ||
left: 8px; | ||
animation-delay: -0.24s; | ||
} | ||
.lds-facebook div:nth-child(2) { | ||
left: 32px; | ||
animation-delay: -0.12s; | ||
} | ||
.lds-facebook div:nth-child(3) { | ||
left: 56px; | ||
animation-delay: 0; | ||
} | ||
@keyframes lds-facebook { | ||
0% { | ||
top: 8px; | ||
height: 64px; | ||
} | ||
50%, 100% { | ||
top: 24px; | ||
height: 32px; | ||
} | ||
} | ||
</style> |
Oops, something went wrong.