Skip to content

Commit

Permalink
commit correctly
Browse files Browse the repository at this point in the history
Signed-off-by: Praneeth Bedapudi <praneeth@bpraneeth.com>
  • Loading branch information
bedapudi6788 committed Nov 8, 2023
1 parent dc4927e commit fc00b1c
Show file tree
Hide file tree
Showing 2 changed files with 83 additions and 52 deletions.
133 changes: 82 additions & 51 deletions liteindex/defined_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,11 +149,15 @@ def deserialize_record(self, record, return_compressed=False):
_record[self.key_hash_to_original_key[k]] = None

elif self.schema[self.key_hash_to_original_key[k]] == "other":
_record[self.key_hash_to_original_key[k]] = pickle.loads(
self._decompressor.decompress(v)
if (self._decompressor is not False or return_compressed)
_record[self.key_hash_to_original_key[k]] = (
pickle.loads(
self._decompressor.decompress(v)
if (self._decompressor is not False or return_compressed)
else v
)
if not return_compressed
else v
) if not return_compressed else v
)
elif self.schema[self.key_hash_to_original_key[k]] == "datetime":
_record[
self.key_hash_to_original_key[k]
Expand All @@ -163,11 +167,15 @@ def deserialize_record(self, record, return_compressed=False):
elif self.schema[self.key_hash_to_original_key[k]] == "boolean":
_record[self.key_hash_to_original_key[k]] = bool(v)
elif self.schema[self.key_hash_to_original_key[k]] == "blob":
_record[self.key_hash_to_original_key[k]] = bytes(
self._decompressor.decompress(v)
if self._decompressor is not False
_record[self.key_hash_to_original_key[k]] = (
bytes(
self._decompressor.decompress(v)
if self._decompressor is not False
else v
)
if not return_compressed
else v
) if not return_compressed else v
)
else:
_record[self.key_hash_to_original_key[k]] = v

Expand Down Expand Up @@ -270,29 +278,28 @@ def _create_table_and_meta_table(self):

columns_str = ", ".join(columns)

self._connection.execute(
f"CREATE TABLE IF NOT EXISTS {self.name} (id TEXT PRIMARY KEY, updated_at NUMBER, {columns_str})"
)

self._connection.execute(
f"CREATE INDEX IF NOT EXISTS idx_{self.name}_updated_at ON {self.name} (updated_at)"
)
with self._connection:
self._connection.execute(
f"CREATE TABLE IF NOT EXISTS {self.name} (id TEXT PRIMARY KEY, updated_at NUMBER, {columns_str})"
)

self._connection.execute(
f"CREATE TABLE IF NOT EXISTS {self.meta_table_name} "
"(hash TEXT PRIMARY KEY, pickled BLOB, value_type TEXT)"
)
self._connection.execute(
f"CREATE INDEX IF NOT EXISTS idx_{self.name}_updated_at ON {self.name} (updated_at)"
)

self._connection.executemany(
f"INSERT OR IGNORE INTO {self.meta_table_name} (hash, pickled, value_type) "
f"VALUES (?, ?, ?)",
[
(hash_val, sqlite3.Binary(pickled), value_type)
for hash_val, pickled, value_type in meta_columns
],
)
self._connection.execute(
f"CREATE TABLE IF NOT EXISTS {self.meta_table_name} "
"(hash TEXT PRIMARY KEY, pickled BLOB, value_type TEXT)"
)

self._connection.commit()
self._connection.executemany(
f"INSERT OR IGNORE INTO {self.meta_table_name} (hash, pickled, value_type) "
f"VALUES (?, ?, ?)",
[
(hash_val, sqlite3.Binary(pickled), value_type)
for hash_val, pickled, value_type in meta_columns
],
)

def update(self, data):
ids_grouped_by_common_keys = {}
Expand Down Expand Up @@ -364,14 +371,15 @@ def get(self, ids, select_keys=[], return_compressed=False):

def clear(self):
# CLEAR function: deletes the content of the table but keeps the table itself and the metadata table
self._connection.execute(f"DROP TABLE IF EXISTS {self.name}")
self._create_table_and_meta_table()
with self._connection:
self._connection.execute(f"DROP TABLE IF EXISTS {self.name}")
self._create_table_and_meta_table()

def drop(self):
# DROP function: deletes both the table itself and the metadata table
self._connection.execute(f"DROP TABLE IF EXISTS {self.name}")
self._connection.execute(f"DROP TABLE IF EXISTS {self.meta_table_name}")
self._connection.commit()
with self._connection:
self._connection.execute(f"DROP TABLE IF EXISTS {self.name}")
self._connection.execute(f"DROP TABLE IF EXISTS {self.meta_table_name}")

def search(
self,
Expand All @@ -382,7 +390,7 @@ def search(
page_no=None,
select_keys=[],
update=None,
return_compressed=False
return_compressed=False,
):
if {k for k in query if k not in self.schema or self.schema[k] in {"other"}}:
raise ValueError("Invalid query")
Expand Down Expand Up @@ -442,7 +450,8 @@ def search(
for result in _results:
_id, updated_at = result[:2]
results[_id] = self.deserialize_record(
{h: val for h, val in zip(select_keys_hashes, result[2:])}, return_compressed
{h: val for h, val in zip(select_keys_hashes, result[2:])},
return_compressed,
)

return results
Expand Down Expand Up @@ -475,17 +484,31 @@ def group(self, keys, query):
for _ in self._connection.execute(sql_query, sql_params).fetchall()
}

def pop(self, ids=None, query={}, n=1, sort_by=None, reversed_sort=False, return_compressed=False):
def pop(
self,
ids=None,
query={},
n=1,
sort_by=None,
reversed_sort=False,
return_compressed=False,
):
if ids is not None:
return {
row[0]: self.deserialize_record(
{h: val for h, val in zip(self.column_names, row[2:]) if h in self.key_hash_to_original_key}, return_compressed
)
for row in self._connection.execute(
f"DELETE FROM {self.name} WHERE id IN ({', '.join(['?' for _ in ids])}) RETURNING *",
ids,
).fetchall()
}
with self._connection:
return {
row[0]: self.deserialize_record(
{
h: val
for h, val in zip(self.column_names, row[2:])
if h in self.key_hash_to_original_key
},
return_compressed,
)
for row in self._connection.execute(
f"DELETE FROM {self.name} WHERE id IN ({', '.join(['?' for _ in ids])}) RETURNING *",
ids,
).fetchall()
}

elif query is not None:
sql_query, sql_params = pop_query(
Expand All @@ -497,12 +520,20 @@ def pop(self, ids=None, query={}, n=1, sort_by=None, reversed_sort=False, return
n=n,
)

return {
row[0]: self.deserialize_record(
{h: val for h, val in zip(self.column_names, row[2:]) if h in self.key_hash_to_original_key}, return_compressed
)
for row in self._connection.execute(sql_query, sql_params).fetchall()
}
with self._connection:
return {
row[0]: self.deserialize_record(
{
h: val
for h, val in zip(self.column_names, row[2:])
if h in self.key_hash_to_original_key
},
return_compressed,
)
for row in self._connection.execute(
sql_query, sql_params
).fetchall()
}

else:
raise ValueError("Either ids or query must be provided")
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
EMAIL = "praneeth@bpraneeth.com"
AUTHOR = "BEDAPUDI PRANEETH"
REQUIRES_PYTHON = ">=3.6.0"
VERSION = "0.0.2.dev17"
VERSION = "0.0.2.dev18"

# What packages are required for this module to be executed?
REQUIRED = [
Expand Down

0 comments on commit fc00b1c

Please sign in to comment.