Skip to content

Commit

Permalink
更新
Browse files Browse the repository at this point in the history
  • Loading branch information
tonquer committed Jan 31, 2024
1 parent 0b16572 commit c4675e9
Show file tree
Hide file tree
Showing 17 changed files with 229 additions and 111 deletions.
4 changes: 3 additions & 1 deletion src/component/scroll/smooth_scroll.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from PySide6.QtGui import QWheelEvent
from PySide6.QtWidgets import QApplication

from config.setting import Setting
from view.read.read_enum import ReadMode


Expand Down Expand Up @@ -38,7 +39,8 @@ def wheelEvent(self, e):
accerationRatio = min(len(self.scrollStamps) / 15, 1)
self.qEventParam = (e.position(), e.globalPosition(), e.buttons())
# 计算步数
self.stepsTotal = self.fps * self.duration / 1000
duration = int(self.duration)
self.stepsTotal = self.fps * duration / 1000
# 计算每一个事件对应的移动距离
delta = e.angleDelta().y() * self.stepRatio
if self.acceleration > 0:
Expand Down
24 changes: 9 additions & 15 deletions src/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@


# Url = "https://jmcomic1.cc" # 域名
Url2 = "https://www.asjmapihost.cc" # 域名
Url2 = "https://www.jmapinode.biz" # 域名
# Url2 = "https://www.jmapibranch3.cc" # 域名
PicUrl2 = "https://cdn-msp.jmapiproxy2.cc" # 域名
PicUrl2 = "https://cdn-msp.jmapinodeudzn.net" # 域名

Url2List = ["https://www.jmapinode1.top", "https://www.jmapinode2.top", "https://www.jmapinode3.top", "https://www.jmapibranch2.cc"]
PicUrlList = ["https://cdn-msp.jmapiproxy1.cc", "https://cdn-msp.jmapiproxy2.cc", "https://cdn-msp.jmapiproxy3.cc", "https://cdn-msp.jmapiproxy4.cc"]
Url2List = ["https://www.jmapinode.biz", "https://www.jmapinode.vip", "https://www.jmapinode3.top", "https://www.jmapibranch2.cc"]
PicUrlList = ["https://cdn-msp.jmapinodeudzn.net", "https://cdn-msp2.jmapinodeudzn.net", "https://cdn-msp.jmapiproxy3.cc", "https://cdn-msp.jmapiproxy4.cc"]
Now = int(time.time())
ProxyApiDomain = "api.bika.life"
ProxyImgDomain = "img.bika.life"
Expand All @@ -31,21 +31,15 @@

IsLoadingPicture = True

UpdateUrl = "https://github.com/tonquer/JMComic-qt/releases/latest"
UpdateUrlApi = "https://api.github.com/repos/tonquer/JMComic-qt/releases"
UpdateUrlBack = "https://github.com/tonquer/JMComic-qt/"
AppUrl = "https://app.ggo.icu/JMComic"

UpdateUrl2 = "https://hub.ggo.icu/tonquer/JMComic-qt/releases/latest"
UpdateUrl2Api = "https://api.ggo.icu/repos/tonquer/JMComic-qt/releases"
UpdateUrlBack = "https://github.com/tonquer/JMComic-qt/"
UpdateUrl2Back = "https://hub.ggo.icu/tonquer/JMComic-qt/"

UpdateUrl3 = "https://hub.fastgit.xyz/tonquer/JMComic-qt/releases/latest"
UpdateUrl3Api = "https://api.fastgit.xyz/repos/tonquer/JMComic-qt/releases"
UpdateUrl3Back = "https://hub.fastgit.xyz/tonquer/JMComic-qt"

UpdateVersion = "v1.1.8"
RealVersion = "v1.1.8.1"
VersionTime = "2023-11-22"
UpdateVersion = "v1.1.9"
RealVersion = "v1.1.9"
VersionTime = "2024-1-31"

Waifu2xVersion = "1.1.6"
LoginUserName = ""
Expand Down
2 changes: 2 additions & 0 deletions src/config/setting.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,8 @@ class Setting:
TurnSpeed = SettingValue("ReadSetting", 5000, False)
ScrollSpeed = SettingValue("ReadSetting", 400, False)
PreDownWaifu2x = SettingValue("ReadSetting", 1, False)
UpDownScrollSpeed = SettingValue("ReadSetting", 100, False)
ScaleCnt = SettingValue("ReadSetting", 0, False)

# Other
UserId = SettingValue("Other", "", False)
Expand Down
37 changes: 26 additions & 11 deletions src/server/req.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def GetHeader(self, _url: str, method: str) -> dict:
ua = "Mozilla/5.0 (Linux; Android 7.1.2; DT1901A Build/N2G47O; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/86.0.4240.198 Mobile Safari/537.36"

header = {
"tokenparam": "{},1.6.1".format(self.now),
"tokenparam": "{},1.6.6".format(self.now),
"token": token,
"user-agent": ua,
"accept-encoding": "gzip",
Expand All @@ -81,7 +81,7 @@ def GetHeader2(self, _url: str, method: str) -> dict:
token = hashlib.md5(param.encode("utf-8")).hexdigest()

header = {
"tokenparam": "{},1.6.1".format(self.now),
"tokenparam": "{},1.6.6".format(self.now),
"token": token,
"user-agent": "Mozilla/5.0 (Linux; Android 7.1.2; DT1901A Build/N2G47O; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/86.0.4240.198 Mobile Safari/537.36",
"accept-encoding": "gzip",
Expand Down Expand Up @@ -132,26 +132,37 @@ def ParseData(self, data) -> str:

# 检查更新
class CheckUpdateReq(ServerReq):
def __init__(self, url):
# url = config.UpdateUrl
def __init__(self, isPre=False):
method = "GET"
data = dict()
data["version"] = config.UpdateVersion
data["ver_time"] = config.VersionTime
if not isPre:
url = config.AppUrl + "/version.txt?"
else:
url = config.AppUrl + "/version_pre.txt?"
url += ToolUtil.DictToUrl(data)
super(self.__class__, self).__init__(url, {}, method)
self.isParseRes = False
self.headers = {}
self.useImgProxy = False


# 检查Pre更新
class CheckPreUpdateReq(ServerReq):
def __init__(self, url=config.UpdateUrl):
# 检查更新
class CheckUpdateInfoReq(ServerReq):
def __init__(self, newVersion):
method = "GET"
super(self.__class__, self).__init__(url.replace("/latest", ""), {}, method)
data = dict()
data["version"] = config.UpdateVersion
url = config.AppUrl + "/{}.txt?".format(newVersion)
url += ToolUtil.DictToUrl(data)
super(self.__class__, self).__init__(url, {}, method)
self.isParseRes = False
self.useImgProxy = False


# 下载图片
class DownloadBookReq(ServerReq):
def __init__(self, url, loadPath="", cachePath="", savePath="", saveParam=(0, 0, ""), isReload=False):
def __init__(self, url, loadPath="", cachePath="", savePath="", saveParam=(0, 0, ""), isReload=False, resetCnt=1):
method = "Download"
self.url = url
if self.url in ServerReq.SPACE_PIC:
Expand All @@ -162,6 +173,8 @@ def __init__(self, url, loadPath="", cachePath="", savePath="", saveParam=(0, 0,
self.savePath = savePath
self.saveParam = saveParam
self.isReload = isReload
self.resetCnt = resetCnt
self.isReset = False
super(self.__class__, self).__init__(self.url, {}, method)
self.headers = dict()
self.headers["Accept-Encoding"] ="None"
Expand Down Expand Up @@ -755,4 +768,6 @@ def __init__(self):
self.headers['cache-control'] = 'no-cache'
self.headers['expires'] = '0'
self.headers['pragma'] = 'no-cache'
self.isReload = False
self.isReload = False
self.resetCnt = 2
self.isReset = False
36 changes: 34 additions & 2 deletions src/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,8 +316,14 @@ def Download(self, request, token="", backParams="", cacheAndLoadPath="", loadPa
else:
self._Download(task)

def ReDownload(self, task):
task.res = ""
task.status = Status.Ok
self._downloadQueue.put(task)

def _Download(self, task):
try:
task.req.resetCnt -= 1
if not task.req.isReload:
if not isinstance(task.req, req.SpeedTestReq) and not task.req.savePath:
for cachePath in [task.req.loadPath, task.req.cachePath]:
Expand All @@ -340,8 +346,30 @@ def _Download(self, task):

if request.headers == None:
request.headers = {}
Log.Info("request-> backId:{}, {}".format(task.backParam, task.req))
r = self.session.get(request.url, proxies=request.proxy, headers=request.headers, stream=True, timeout=task.timeout, verify=False)
if not request.isReset:
Log.Info("request-> backId:{}, {}".format(task.bakParam, task.req))
else:
Log.Info("request reset:{} -> backId:{}, {}".format(task.req.resetCnt, task.bakParam, task.req))

history = []
for i in range(10):
r = self.session.get(request.url, proxies=request.proxy, headers=request.headers, timeout=task.timeout,
verify=False, allow_redirects=False, stream=True)
if r.status_code == 302 or r.status_code == 301:
next = r.headers.get('Location')
if ToolUtil.GetUrlHost(next) == "":
next = "https://" + ToolUtil.GetUrlHost(request.url) + next
request.url = next
if not request.isReset:
Log.Info("request 301 -> backId:{}, {}".format(task.bakParam, task.req))
else:
Log.Info("request 301 reset:{} -> backId:{}, {}".format(task.req.resetCnt, task.bakParam, task.req))

history.append(r)
self.__DealHeaders(request, "")
else:
break
r.history = history
# task.res = res.BaseRes(r)
# print(r.elapsed.total_seconds())
task.res = r
Expand All @@ -362,6 +390,10 @@ def _Download(self, task):
else:
task.status = Status.NetError
Log.Warn(task.req.url + " " + es.__repr__())
if (task.req.resetCnt > 0):
task.req.isReset = True
self.ReDownload(task)
return
self.handler.get(task.req.__class__.__name__)(task)
if task.res:
task.res.close()
Expand Down
98 changes: 44 additions & 54 deletions src/server/user_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,27 +27,15 @@ def __call__(self, task):
if task.res.raw.status_code != 200:
return

updateInfo = re.findall(r"<meta property=\"og:description\" content=\"([^\"]*)\"", task.res.raw.text)
if updateInfo:
rawData = updateInfo[0]
else:
rawData = ""

versionInfo = re.findall("<meta property=\"og:url\" content=\".*tag/([^\"]*)\"", task.res.raw.text)
if versionInfo:
verData = versionInfo[0]
else:
verData = ""

verData = task.res.GetText()
info = verData.replace("v", "").split(".")
version = int(info[0]) * 100 + int(info[1]) * 10 + int(info[2]) * 1

info2 = re.findall(r"\d+\d*", os.path.basename(config.UpdateVersion))
curversion = int(info2[0]) * 100 + int(info2[1]) * 10 + int(info2[2]) * 1

rawData = "\n\nv" + ".".join(info) + "\n" + rawData

if version > curversion:
data["data"] = rawData
data["data"] = verData.replace("\r\n", "").replace("\n", "")
else:
data["data"] = "no"
except Exception as es:
Expand All @@ -57,38 +45,22 @@ def __call__(self, task):
TaskBase.taskObj.taskBack.emit(task.bakParam, pickle.dumps(data))


@handler(req.CheckPreUpdateReq)
class CheckPreUpdateReqHandler(object):
@handler(req.CheckUpdateInfoReq)
class CheckUpdateInfoHandler(object):
def __call__(self, task):
data = {"st": task.status, "data": ""}
try:
if not task.res.GetText() or task.status == Status.NetError:
return
if task.res.raw.status_code != 200:
return
rawData = json.loads(task.res.raw.text)
if not rawData:
return
v = rawData[0]
verData = v.get("tag_name")
info = verData.replace("v", "").split(".")
version = int(info[0]) * 100 + int(info[1]) * 10 + int(info[2]) * 1
info2 = re.findall(r"\d+\d*", os.path.basename(config.UpdateVersion))
curversion = int(info2[0]) * 100 + int(info2[1]) * 10 + int(info2[2]) * 1

rawData = v.get("body")

if version > curversion:
data["data"] = rawData
else:
data["data"] = "no"

data["data"] = task.res.GetText()
except Exception as es:
pass
finally:
if task.bakParam:
TaskBase.taskObj.taskBack.emit(task.bakParam, pickle.dumps(data))

# @handler(req.GetUserInfoReq)
# class GetUserInfoReqHandler(object):
# def __call__(self, task: Task):
Expand Down Expand Up @@ -761,27 +733,40 @@ def __call__(self, backData):
if backData.bakParam:
TaskBase.taskObj.downloadBack.emit(backData.bakParam, 0, -Status.Error, b"")
return

fileSize = int(r.headers.get('Content-Length', 0))
if fileSize <= 0:
fileSize = 100000
getSize = 0
data = b""
now = time.time()

addSize = 0
now = time.time()
isAlreadySend = False
# 网速快,太卡了,优化成最多100ms一次
for chunk in r.iter_content(chunk_size=4096):
cur = time.time()
tick = cur - now
addSize += len(chunk)
if tick >= 0.1:
if backData.bakParam and addSize > 0:
TaskBase.taskObj.downloadBack.emit(backData.bakParam, addSize, max(1, fileSize-getSize), b"")
addSize = 0
now = cur

getSize += len(chunk)
data += chunk
try:
for chunk in r.iter_content(chunk_size=4096):
cur = time.time()
tick = cur - now
addSize += len(chunk)
if tick >= 0.1:
isAlreadySend = True
if backData.bakParam and addSize > 0:
TaskBase.taskObj.downloadBack.emit(backData.bakParam, addSize,
max(1, fileSize - getSize), b"")
addSize = 0
now = cur

getSize += len(chunk)
data += chunk
if not isAlreadySend:
if backData.bakParam:
TaskBase.taskObj.downloadBack.emit(backData.bakParam, 0, getSize, b"")

except Exception as es:
Log.Error(es)
if backData.req.resetCnt > 0:
backData.req.isReset = True
Server().ReDownload(backData)
return

# Log.Info("size:{}, url:{}".format(ToolUtil.GetDownloadSize(fileSize), backData.req.url))
_, _, mat, isAni = ToolUtil.GetPictureSize(data)
Expand Down Expand Up @@ -829,15 +814,17 @@ def __call__(self, backData):
except Exception as es:
Log.Error(es)
# 保存失败了
if backData.backParam:
TaskBase.taskObj.downloadBack.emit(backData.backParam, 0, -2, b"")
if backData.bakParam:
TaskBase.taskObj.downloadBack.emit(backData.bakParam, 0, -2, b"")

if backData.bakParam:
TaskBase.taskObj.downloadBack.emit(backData.bakParam, 0, 0, data)

except Exception as es:
backData.status = Status.DownloadFail
Log.Error(es)
if backData.backParam:
TaskBase.taskObj.downloadBack.emit(backData.backParam, 0, -1, b"")
if backData.bakParam:
TaskBase.taskObj.downloadBack.emit(backData.bakParam, 0, -backData.status, b"")


@handler(req.DnsOverHttpsReq)
Expand Down Expand Up @@ -884,7 +871,9 @@ def __call__(self, backData):
r = backData.res
try:
if r.status_code != 200:
data["st"] = Status.Error
if backData.bakParam:
data["st"] = Status.Error
TaskBase.taskObj.taskBack.emit(backData.bakParam, pickle.dumps(data))
return

Expand All @@ -897,13 +886,14 @@ def __call__(self, backData):
if consume >= 3.0:
break
consume = time.time() - now
downloadSize = getSize / max(0.0001, consume)
downloadSize = getSize / consume
speed = ToolUtil.GetDownloadSize(downloadSize)
if backData.bakParam:
data["data"] = speed
TaskBase.taskObj.taskBack.emit(backData.bakParam, pickle.dumps(data))

except Exception as es:
Log.Error(es)
data["st"] = Status.DownloadFail
if backData.bakParam:
TaskBase.taskObj.taskBack.emit(backData.bakParam, pickle.dumps(data))
Loading

0 comments on commit c4675e9

Please sign in to comment.