-
Notifications
You must be signed in to change notification settings - Fork 11
/
cache.py
131 lines (92 loc) · 3.05 KB
/
cache.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
from globals import *
import profiles
import items
import logging
import json
import os
def _capsule():
return {'uid': None,
'date': WORLD_INFO['ticks'],
'_on_disk': False,
'_allow_dump': False}
def _write_to_cache(cache_name, data):
with open(os.path.join(profiles.get_world_directory(WORLD_INFO['id']), '%s_history.dat' % cache_name), 'a') as f:
f.write(json.dumps(data)+'\n\n')
def _read_from_cache(cache_name, uid):
with open(os.path.join(profiles.get_world_directory(WORLD_INFO['id']), '%s_history.dat' % cache_name)) as f:
for item in [json.loads(s) for s in f]:
if item['uid'] == uid:
item['_on_disk'] = False
item['date'] = WORLD_INFO['ticks']
ITEMS_HISTORY[item['uid']].update(item)
logging.debug('Cache: Loaded item %s from cache.' % uid)
return item
def save_cache(cache_name):
_path = os.path.join(profiles.get_world_directory(WORLD_INFO['id']), '%s_history.dat' % cache_name)
_write_cache = []
if not os.path.exists(_path):
return False
with open(_path, 'r') as f:
_cache = f.readlines()
for _line in _cache:
line = _line.rstrip()
if line == '\n' or not line:
continue
_historic_item = json.loads(line)
if not _historic_item['_allow_dump']:
continue
_dump_string = json.dumps(_historic_item)
_write_cache.append(_dump_string)
with open(_path, 'w') as f:
f.write('\n'.join(_write_cache))
logging.debug('Cache: Saved to disk.')
def commit_cache(cache_name):
_path = os.path.join(profiles.get_world_directory(WORLD_INFO['id']), '%s_history.dat' % cache_name)
_write_cache = []
if not os.path.exists(_path):
return False
with open(_path, 'r') as f:
_cache = f.readlines()
for _line in _cache:
line = _line.rstrip()
if line == '\n' or not line:
continue
print repr(line)
_historic_item = json.loads(line)
_historic_item['_allow_dump'] = True
_write_cache.append(json.dumps(_historic_item))
if cache_name == 'items':
for item_uid in ITEMS_HISTORY:
_historic_item = ITEMS_HISTORY[item_uid]
if not _historic_item['_on_disk']:
continue
_historic_item['_allow_dump'] = True
with open(_path, 'w') as f:
f.write('\n'.join(_write_cache))
logging.debug('Cache: Committed.')
def scan_cache():
return False
for item_uid in ITEMS_HISTORY:
_historic_item = ITEMS_HISTORY[item_uid]
if _historic_item['_on_disk']:
continue
if WORLD_INFO['ticks']-_historic_item['date']>=100:
logging.debug('Cache: Moved item %s to disk' % item_uid)
_historic_item['_on_disk'] = True
_write_to_cache('items', _historic_item)
if 'item' in _historic_item:
del _historic_item['item']
def offload_item(raw_item):
_item = _capsule()
items.clean_item_for_save(raw_item)
_item['uid'] = raw_item['uid']
_item['item'] = raw_item
ITEMS_HISTORY[_item['uid']] = _item
logging.debug('Cache: Offloaded item (in memory)')
def retrieve_item(item_uid):
_historic_item = ITEMS_HISTORY[item_uid]
if _historic_item['_on_disk']:
raise Exception('Ahhhhhhhhhhhh')
#TODO: Grab from disk
pass
return _historic_item['item']