Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added Python support for JULEA #175

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,9 @@ bld*/

# Visual Studio Code
.vscode/

# Python
**/__pycache__/

# C/C++ LSP
.ccls-cache/
32 changes: 32 additions & 0 deletions benchmark/python/benchmark.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
from kv.kv import benchmark_kv
from object.object import benchmark_object
from object.distributed_object import benchmark_distributed_object
from db.entry import benchmark_db_entry
from db.iterator import benchmark_db_iterator
from db.schema import benchmark_db_schema
from item.collection import benchmark_collection
from item.item import benchmark_item
from benchmarkrun import print_header
from sys import argv

if __name__ == "__main__":
runs = []
iterations = 1000
machine_readable = ("-m" in argv)
print_header(machine_readable)

# KV Client
benchmark_kv(runs, iterations, machine_readable)

# Object Client
benchmark_distributed_object(runs, iterations, machine_readable)
benchmark_object(runs, iterations, machine_readable)

# DB Client
benchmark_db_entry(runs, iterations, machine_readable)
benchmark_db_iterator(runs, iterations, machine_readable)
benchmark_db_schema(runs, iterations, machine_readable)

# Item Client
benchmark_collection(runs, iterations, machine_readable)
benchmark_item(runs, iterations, machine_readable)
75 changes: 75 additions & 0 deletions benchmark/python/benchmarkrun.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
from time import perf_counter_ns

class BenchmarkRun:
def __init__(self, name, iterations, machine_readable):
self.name = name
self.iterations = iterations
self.timer_started = False
self.start = None
self.stop = None
self.operations = iterations
self.machine_readable = machine_readable

def start_timer(self):
self.timer_started = True
self.start = perf_counter_ns()

def stop_timer(self):
self.timer_started = False
self.stop = perf_counter_ns()

def get_runtime_ms(self):
val = self.get_runtime_ns()
return val / 1000000 if val != None else None

def get_runtime_s(self):
val = self.get_runtime_ns()
return val / 1000000000 if val != None else None

def get_runtime_ns(self):
if self.timer_started or self.stop == None:
return None
else:
return self.stop - self.start

def print_result(self):
if self.machine_readable:
print(f"{self.name},{self.get_runtime_s()},{self.operations}")
else:
name_col = self.name.ljust(60," ")
runtime_col = f"{self.get_runtime_s():.3f}".rjust(8," ") + " seconds"
operations_col = f"{int(self.operations/self.get_runtime_s())}/s".rjust(12," ")
print(f"{name_col} | {runtime_col} | {operations_col}")

def print_empty(self):
if self.machine_readable:
print(f"{self.name},-,-")
else:
name_col = self.name.ljust(60," ")
runtime_col = "-".rjust(8," ") + " seconds"
operations_col = "-/s".rjust(12," ")
print(f"{name_col} | {runtime_col} | {operations_col}")

def append_to_benchmark_list_and_run(_list, run, func):
_list.append(run)
try:
func(run)
run.print_result()
except:
run.print_empty()

def print_result_table_header():
name_col = "Name".ljust(60," ")
runtime_col = "Duration".ljust(16," ")
operations_col = f"Operations/s".rjust(12," ")
header = f"{name_col} | {runtime_col} | {operations_col}"
print(header+"\n"+len(header)*"-")

def print_machine_readable_header():
print("name,elapsed,operations")

def print_header(machine_readable):
if machine_readable:
print_machine_readable_header()
else:
print_result_table_header()
136 changes: 136 additions & 0 deletions benchmark/python/db/common.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
from julea import lib, encode, ffi

N = 1 << 12
N_GET_DIVIDER = N >> 8
N_PRIME = 11971
N_MODULUS = 256
CLASS_MODULUS = N >> 3
CLASS_LIMIT = CLASS_MODULUS >> 1
SIGNED_FACTOR = N_PRIME
USIGNED_FACTOR = N_PRIME
FLOAT_FACTOR = 3.1415926

def _benchmark_db_prepare_scheme(namespace_encoded, use_batch, use_index_all,
use_index_single, batch, delete_batch):
b_s_error_ptr = ffi.new("GError*")
b_s_error_ptr = ffi.NULL
table_name = encode("table")
string_name = encode("string")
float_name = encode("float")
uint_name = encode("uint")
sint_name = encode("sint")
blob_name = encode("blob")
b_scheme = lib.j_db_schema_new(namespace_encoded, table_name, b_s_error_ptr)
assert b_scheme != ffi.NULL
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_schema_add_field(b_scheme, string_name,
lib.J_DB_TYPE_STRING, b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_schema_add_field(b_scheme, float_name,
lib.J_DB_TYPE_FLOAT64, b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_schema_add_field(b_scheme, uint_name, lib.J_DB_TYPE_UINT64,
b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_schema_add_field(b_scheme, sint_name, lib.J_DB_TYPE_UINT64,
b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_schema_add_field(b_scheme, blob_name, lib.J_DB_TYPE_BLOB,
b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
if use_index_all:
names = ffi.new("char*[5]")
names[0] = encode("string")
names[1] = encode("float")
names[2] = encode("uint")
names[3] = encode("sint")
names[4] = ffi.NULL
assert lib.j_db_schema_add_index(b_scheme, names, b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
if use_index_single:
names = ffi.new("char*[2]")
names[1] = ffi.NULL
names[0] = encode("string")
assert lib.j_db_schema_add_index(b_scheme, names, b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
names[0] = encode("float")
assert lib.j_db_schema_add_index(b_scheme, names, b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
names[0] = encode("uint")
assert lib.j_db_schema_add_index(b_scheme, names, b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
names[0] = encode("sint")
assert lib.j_db_schema_add_index(b_scheme, names, b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_schema_create(b_scheme, batch, b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_schema_delete(b_scheme, delete_batch, b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
if not use_batch:
assert lib.j_batch_execute(batch)
return lib.j_db_schema_ref(b_scheme)

def _benchmark_db_get_identifier(i):
return f"{i * SIGNED_FACTOR % N_MODULUS:x}-benchmark-{i}"

def _benchmark_db_insert(run, scheme, namespace, use_batch, use_index_all,
use_index_single, use_timer):
b_scheme = None
b_s_error_ptr = ffi.new("GError*")
b_s_error_ptr = ffi.NULL
namespace_encoded = encode(namespace)
batch = lib.j_batch_new_for_template(lib.J_SEMANTICS_TEMPLATE_DEFAULT)
delete_batch = lib.j_batch_new_for_template(lib.J_SEMANTICS_TEMPLATE_DEFAULT)
if use_timer:
assert scheme == None
assert run != None
b_scheme = _benchmark_db_prepare_scheme(namespace_encoded, use_batch,
use_index_all, use_index_single,
batch, delete_batch)
assert b_scheme != None
run.start_timer()
else:
assert use_batch
assert run == None
lib.j_db_schema_ref(scheme)
b_scheme = scheme
for i in range(N):
i_signed_ptr = ffi.new("long*")
i_signed_ptr[0] = ((i * SIGNED_FACTOR) % CLASS_MODULUS) - CLASS_LIMIT
i_usigned_ptr = ffi.new("unsigned long*")
i_usigned_ptr[0] = ((i * USIGNED_FACTOR) % CLASS_MODULUS)
i_float_ptr = ffi.new("double*")
i_float_ptr[0] = i_signed_ptr[0] * FLOAT_FACTOR
string = encode(_benchmark_db_get_identifier(i))
string_name = encode("string")
float_name = encode("float")
sint_name = encode("sint")
uint_name = encode("uint")
entry = lib.j_db_entry_new(b_scheme, b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_entry_set_field(entry, string_name, string, 0,
b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_entry_set_field(entry, float_name, i_float_ptr, 0,
b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_entry_set_field(entry, sint_name, i_signed_ptr, 0,
b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_entry_set_field(entry, uint_name, i_usigned_ptr, 0,
b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
assert lib.j_db_entry_insert(entry, batch, b_s_error_ptr)
assert b_s_error_ptr == ffi.NULL
if not use_batch:
assert lib.j_batch_execute(batch)
if use_batch or not use_timer:
lib.j_batch_execute(batch)
if use_timer:
run.stop_timer()
assert lib.j_batch_execute(delete_batch)
run.operations = N
lib.j_batch_unref(batch)
lib.j_batch_unref(delete_batch)
lib.j_db_entry_unref(entry)
lib.j_db_schema_unref(b_scheme)
Loading