Skip to content

Commit

Permalink
Merge pull request #28 from pedohorse/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
pedohorse authored Apr 12, 2021
2 parents e77036f + 9a2fc6c commit cf3b3f2
Show file tree
Hide file tree
Showing 4 changed files with 168 additions and 143 deletions.
167 changes: 90 additions & 77 deletions python2.7libs/hpaste/hpasteweb.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import hou # for ui only
import hpastewebplugins
import widcacher
import random # to shuffle plugins
Expand All @@ -7,91 +8,103 @@


def webPack(asciiText, pluginList=None, maxChunkSize=None):
allPackids=[]
done = False
allPackids=[]
done = False

while not done:
packid = None
if pluginList is None:
pluginClasses = [x for x in hpastewebplugins.pluginClassList]
random.shuffle(pluginClasses)
pluginClasses.sort(reverse=True, key=lambda x: x.speedClass())
else:
pluginClasses = []
for pname in pluginList:
pluginClasses += [x for x in hpastewebplugins.pluginClassList if x.__name__ == pname]
cls = None
for cls in pluginClasses:
try:
packer = cls()
chunklen = min(packer.maxStringLength(), len(asciiText))
if maxChunkSize is not None:
chunklen = min(chunklen, maxChunkSize)
chunk = asciiText[:chunklen]
packid = packer.webPackData(chunk)
asciiText = asciiText[chunklen:]
break
except Exception as e:
print("error: %s" % str(e.message))
print("failed to pack with plugin %s, looking for alternatives..." % cls.__name__)
continue
if packid is None or cls is None:
print("all web packing methods failed, sorry :(")
raise RuntimeError("couldnt web pack data")
# sanity check
if len(asciiText) > 2 ** 23:
if hou.isUIAvailable():
if hou.ui.displayMessage('you are about to save %d Mb into a snippet. This is HIGHLY DISCOURAGED!' % (len(asciiText) // 2 ** 20,),
buttons=('Proceed', 'Cancel'), default_choice=1, close_choice=1,
severity=hou.severityType.Warning) != 0:
raise RuntimeError('snippet too big. cancelled')
else:
raise RuntimeError('snippet too big. cancelled')
#

allPackids.append('@'.join((packid, cls.__name__)))
done = len(asciiText) == 0
# just a failsafe:
if len(allPackids) > 128:
raise RuntimeError("Failsafe triggered: for some reason too many chunks. plz check the chunkSize, or your plugins for too small allowed string sizes, or your data for sanity.")
while not done:
packid = None
if pluginList is None:
pluginClasses = [x for x in hpastewebplugins.pluginClassList]
random.shuffle(pluginClasses)
pluginClasses.sort(reverse=True, key=lambda x: x.speedClass())
else:
pluginClasses = []
for pname in pluginList:
pluginClasses += [x for x in hpastewebplugins.pluginClassList if x.__name__ == pname]

return '#'.join(allPackids)
cls = None
for cls in pluginClasses:
try:
packer = cls()
chunklen = min(packer.maxStringLength(), len(asciiText))
if maxChunkSize is not None:
chunklen = min(chunklen, maxChunkSize)
chunk = asciiText[:chunklen]
packid = packer.webPackData(chunk)
asciiText = asciiText[chunklen:]
break
except Exception as e:
print("error: %s" % str(e.message))
print("failed to pack with plugin %s, looking for alternatives..." % cls.__name__)
continue
if packid is None or cls is None:
print("all web packing methods failed, sorry :(")
raise RuntimeError("couldnt web pack data")

allPackids.append('@'.join((packid, cls.__name__)))
done = len(asciiText) == 0
# just a failsafe:
if len(allPackids) > 128:
raise RuntimeError("Failsafe triggered: for some reason too many chunks. plz check the chunkSize, or your plugins for too small allowed string sizes, or your data for sanity.")

return '#'.join(allPackids)


def webUnpack(wid, useCached=True, cache=None):
#just a bit cleanup the wid first, in case it was copied with extra spaces
# strip witespaces, just to be sure
wid = wid.strip()
# strip comments if there are
wid = re.sub(r'^\S+\s+', '', wid)
#
if useCached:
if cache is None: # default cacher
cache = widcacher.WidCacher.globalInstance()
if wid in cache:
return cache[wid]
#just a bit cleanup the wid first, in case it was copied with extra spaces
# strip witespaces, just to be sure
wid = wid.strip()
# strip comments if there are
wid = re.sub(r'^\S+\s+', '', wid)
#
if useCached:
if cache is None: # default cacher
cache = widcacher.WidCacher.globalInstance()
if wid in cache:
return cache[wid]

allPackids = wid.split('#')
asciiTextParts = []
for awid in allPackids:
if awid.count('@') != 1:
raise RuntimeError('given wid is not a valid wid')
id, cname = awid.split('@')
allPackids = wid.split('#')
asciiTextParts = []
for awid in allPackids:
if awid.count('@') != 1:
raise RuntimeError('given wid is not a valid wid')
id, cname = awid.split('@')

pretendents = [x for x in hpastewebplugins.pluginClassList if x.__name__ == cname]
if len(pretendents) == 0:
raise RuntimeError("No plugins that can process this wid found")
pretendents = [x for x in hpastewebplugins.pluginClassList if x.__name__ == cname]
if len(pretendents) == 0:
raise RuntimeError("No plugins that can process this wid found")

asciiText = None
for cls in pretendents:
try:
unpacker = cls()
asciiText = unpacker.webUnpackData(id)
break
except WebClipBoardWidNotFound as e:
raise RuntimeError('item "%s" does not exist. it may have expired' % e.wid)
except Exception as e:
print("error: %s: %s" % (str(type(e)), str(e.message)))
print("Exception: %s" % repr(e))
print("keep trying...")
continue
if asciiText is None:
print("failed")
raise RuntimeError("couldn't web unpack data")
asciiTextParts.append(asciiText)
asciiText = None
for cls in pretendents:
try:
unpacker = cls()
asciiText = unpacker.webUnpackData(id)
break
except WebClipBoardWidNotFound as e:
raise RuntimeError('item "%s" does not exist. it may have expired' % e.wid)
except Exception as e:
print("error: %s: %s" % (str(type(e)), str(e.message)))
print("Exception: %s" % repr(e))
print("keep trying...")
continue
if asciiText is None:
print("failed")
raise RuntimeError("couldn't web unpack data")
asciiTextParts.append(asciiText)

finalText = ''.join(asciiTextParts)
if useCached and cache is not None:
cache[wid] = finalText
finalText = ''.join(asciiTextParts)
if useCached and cache is not None:
cache[wid] = finalText

return finalText
return finalText
130 changes: 65 additions & 65 deletions python2.7libs/hpaste/hpastewebplugins/houhpaste.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,68 +6,68 @@


class HPaste(WebClipBoardBase):
def __init__(self):
self.__headers = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11'}

@classmethod
def speedClass(cls):
return opt.getOption('hpasteweb.plugins.%s.speed_class'%cls.__name__, 10)

@classmethod
def maxStringLength(cls):
return 400000

@classmethod
def urlopen(cls, url, timeout=30):
try:
rep = urllib2.urlopen(url, timeout=timeout)
except urllib2.URLError as e:
try:
import certifi
rep = urllib2.urlopen(url, timeout=timeout, cafile=certifi.where())
except ImportError:
import ssl
rep = urllib2.urlopen(url, timeout=timeout, context=ssl._create_unverified_context())
print "WARNING: connected with unverified context"
return rep

def webPackData(self, s):
if (not isinstance(s, str)):
s = str(s)
if (len(s) > self.maxStringLength()): raise RuntimeError("len of s it too big for web clipboard currently")

try:
req = urllib2.Request(r"https://hou-hpaste.herokuapp.com/documents", s, headers=self.__headers)
rep = self.urlopen(req, timeout=30)
repstring = rep.read()
except Exception as e:
raise RuntimeError("error/timeout connecting to web clipboard: " + str(e.message))

if (rep.getcode() != 200): raise RuntimeError("error code from web clipboard")

try:
repson = json.loads(repstring)
id=repson['key']
except Exception as e:
raise RuntimeError("Unknown Server responce: "+str(e.message))

return str(id)


def webUnpackData(self, id):
id=str(id)
try:
req = urllib2.Request(r"https://hou-hpaste.herokuapp.com/raw/" + id, headers=self.__headers)
rep = self.urlopen(req, timeout=30)
except urllib2.HTTPError as e:
if e.code == 404:
raise WebClipBoardWidNotFound(id)
raise RuntimeError("error connecting to web clipboard: " + e.message)
except Exception as e:
raise RuntimeError("error/timeout connecting to web clipboard: " + e.message)

if (rep.getcode() != 200): raise RuntimeError("error code from web clipboard")

repstring = rep.read()

return repstring
def __init__(self):
self.__headers = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11'}

@classmethod
def speedClass(cls):
return opt.getOption('hpasteweb.plugins.%s.speed_class'%cls.__name__, 10)

@classmethod
def maxStringLength(cls):
return 2**20

@classmethod
def urlopen(cls, url, timeout=30):
try:
rep = urllib2.urlopen(url, timeout=timeout)
except urllib2.URLError as e:
try:
import certifi
rep = urllib2.urlopen(url, timeout=timeout, cafile=certifi.where())
except ImportError:
import ssl
rep = urllib2.urlopen(url, timeout=timeout, context=ssl._create_unverified_context())
print "WARNING: connected with unverified context"
return rep

def webPackData(self, s):
if (not isinstance(s, str)):
s = str(s)
if (len(s) > self.maxStringLength()): raise RuntimeError("len of s it too big for web clipboard currently")

try:
req = urllib2.Request(r"https://hou-hpaste.herokuapp.com/documents", s, headers=self.__headers)
rep = self.urlopen(req, timeout=30)
repstring = rep.read()
except Exception as e:
raise RuntimeError("error/timeout connecting to web clipboard: " + str(e.message))

if (rep.getcode() != 200): raise RuntimeError("error code from web clipboard")

try:
repson = json.loads(repstring)
id=repson['key']
except Exception as e:
raise RuntimeError("Unknown Server responce: "+str(e.message))

return str(id)


def webUnpackData(self, id):
id=str(id)
try:
req = urllib2.Request(r"https://hou-hpaste.herokuapp.com/raw/" + id, headers=self.__headers)
rep = self.urlopen(req, timeout=30)
except urllib2.HTTPError as e:
if e.code == 404:
raise WebClipBoardWidNotFound(id)
raise RuntimeError("error connecting to web clipboard: " + e.message)
except Exception as e:
raise RuntimeError("error/timeout connecting to web clipboard: " + e.message)

if (rep.getcode() != 200): raise RuntimeError("error code from web clipboard")

repstring = rep.read()

return repstring
12 changes: 12 additions & 0 deletions python3.7libs/hpaste/hpasteweb.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import hou # for ui only
from . import hpastewebplugins
from . import widcacher
import random # to shuffle plugins
Expand All @@ -10,6 +11,17 @@ def webPack(asciiText: str, pluginList=None, maxChunkSize=None):
allPackids = []
done = False

# sanity check
if len(asciiText) > 2 ** 23:
if hou.isUIAvailable():
if hou.ui.displayMessage('you are about to save %d Mb into a snippet. This is HIGHLY DISCOURAGED!' % (len(asciiText) // 2 ** 20,),
buttons=('Proceed', 'Cancel'), default_choice=1, close_choice=1,
severity=hou.severityType.Warning) != 0:
raise RuntimeError('snippet too big. cancelled')
else:
raise RuntimeError('snippet too big. cancelled')
#

while not done:
packid = None
if pluginList is None:
Expand Down
2 changes: 1 addition & 1 deletion python3.7libs/hpaste/hpastewebplugins/houhpaste.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def speedClass(cls):

@classmethod
def maxStringLength(cls):
return 400000
return 2**20

@classmethod
def urlopen(cls, url, timeout=30):
Expand Down

0 comments on commit cf3b3f2

Please sign in to comment.