Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Avoid string formatting operations prior to invoking logger #1

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 17 additions & 17 deletions AltFS.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def __init__(self, storage_provider_name, machine_identification_method,
the global METHODS dictionary.
"""
logger.debug("initializing AltFS with storage provider: %s, " +
"machine identification method: %s" %
"machine identification method: %s",
storage_provider_name, machine_identification_method)
# calculate checksum of machine identification string, used for
# calculating the bucket index of the first file system block
Expand All @@ -193,13 +193,13 @@ def __init__(self, storage_provider_name, machine_identification_method,
# set the max data block size
self.max_block_size = max_block_size
# log calculated initialization info
logger.info("INIT:number of buckets (=divider): %s" %
logger.info("INIT:number of buckets (=divider): %s",
self._buckets_count)
logger.info("INIT:machine identification string: %s" %
logger.info("INIT:machine identification string: %s",
machine_identification_string)
logger.info("INIT:machine identification checksum: %s" %
logger.info("INIT:machine identification checksum: %s",
self._machine_id_checksum)
logger.info("INIT:first bucket ID: %s" %
logger.info("INIT:first bucket ID: %s",
self._first_bucket_id)
# iterate all buckets in storage to fill the blocks mapping
self._load_blocks_dict()
Expand Down Expand Up @@ -305,12 +305,12 @@ def _get_block(self, bucket_id, value_id):
block = Block.generate_block_from_packed_str(
self._storage_provider.get_block(bucket_id, value_id))
except Exception as e:
logger.error("reading of block at (%s:%s) has failed: %s" %
(bucket_id, value_id, str(e)))
logger.error("reading of block at (%s:%s) has failed: %s",
bucket_id, value_id, str(e))
raise InternalStorageOperationException(
InternalStorageOperationException.OPERATION_READ, str(e))
logger.debug("a block was read at (%s:%s):%s" %
(bucket_id, value_id, block.__dict__))
logger.debug("a block was read at (%s:%s):%s",
bucket_id, value_id, block.__dict__)
return block

def _get_block_by_id(self, block_id):
Expand Down Expand Up @@ -358,14 +358,14 @@ def _write_block(self, bucket_id, value_id, block):

Raises InternalStorageOperationException if provider failed to write
"""
logger.debug("writing block at (%s:%s):%s" %
(bucket_id, value_id, block.__dict__))
logger.debug("writing block at (%s:%s):%s",
bucket_id, value_id, block.__dict__)
try:
value_id = self._storage_provider.write_block(
bucket_id, value_id, data=block.serialize())
except Exception as e:
logger.error("writing of block (id:%s) to (%s:%s) has failed: %s" %
(block.block_id, bucket_id, value_id, str(e)))
logger.error("writing of block (id:%s) to (%s:%s) has failed: %s",
block.block_id, bucket_id, value_id, str(e))
raise InternalStorageOperationException(
InternalStorageOperationException.OPERATION_WRITE, str(e))
# add the new block mapping
Expand Down Expand Up @@ -393,14 +393,14 @@ def _delete_value(self, bucket_id, value_id):
Raises InternalStorageOperationException if provider failed to delete
"""
block = self._get_block(bucket_id, value_id)
logger.debug("deleting block ID %s (%s:%s)" %
(block.block_id, bucket_id, value_id))
logger.debug("deleting block ID %s (%s:%s)",
block.block_id, bucket_id, value_id)
try:
self._storage_provider.delete_block(bucket_id, value_id)
except Exception as e:
logger.error(
"deleting of block (id:%s) to (%s:%s) has failed: %s" %
(block.block_id, bucket_id, value_id, str(e)))
"deleting of block (id:%s) to (%s:%s) has failed: %s",
block.block_id, bucket_id, value_id, str(e))
raise InternalStorageOperationException(
InternalStorageOperationException.OPERATION_DELETE, str(e))
# remove the mapping of the deleted block
Expand Down
4 changes: 2 additions & 2 deletions providers/RegistryStorageProvider.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,8 @@ def write_block(self, bucket_id, value_id, data=""):
"value with id does not exist in specified bucket." +
" generating a new value name for bucket id %s" % bucket_id)
value_name = self._generate_value_name(bucket_id)
logger.debug("generated a new value name in bucket id %s: %s" % (
bucket_id, value_name))
logger.debug("generated a new value name in bucket id %s: %s",
bucket_id, value_name)
with self._get_bucket_key(bucket_id, _winreg.KEY_WRITE) as key:
_winreg.SetValueEx(key, value_name, 0,
_winreg.REG_BINARY, data)
Expand Down
39 changes: 19 additions & 20 deletions providers/UserDefaultsStorageProvider.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,33 +70,33 @@ def __init__(self, machine_identification_string, **kwargs):
# when calculating the bucket index
self._buckets_names = [self._domain_name]
self._buckets_count = len(self._buckets_names)
logger.debug("domain: %s" % self._domain_name)
logger.debug("domain: %s", self._domain_name)

def write_block(self, bucket_id, value_id, data=""):
"""Described in parent class"""
logger.debug("writing block at (%s:%s)" % (bucket_id, value_id))
logger.debug("writing block at (%s:%s)", bucket_id, value_id)
try:
value_name = self._get_value_name(
bucket_id, value_id)
logger.debug("value with id already exists at (%s:%s)" %
(bucket_id, value_id))
logger.debug("value with id already exists at (%s:%s)",
bucket_id, value_id)
except BucketValueMissingException:
logger.debug(
"value with id does not exist in specified bucket." +
" generating a new value name for bucket id %s" % bucket_id)
" generating a new value name for bucket id %s", bucket_id)
value_name = self._generate_value_name()
logger.debug("generated a new value name in bucket id %s: %s" % (
bucket_id, value_name))
logger.debug("generated a new value name in bucket id %s: %s",
bucket_id, value_name)
target_value_id = UserDefaultsStorageProvider.value_name_to_value_id(
value_name)
logger.debug("creating a new key at (%s:%s): %s" % (
bucket_id, target_value_id, value_name))
logger.debug("creating a new key at (%s:%s): %s",
bucket_id, target_value_id, value_name)
self._defaults_client.write_key(value_name, data)
return target_value_id

def get_block(self, bucket_id, value_id):
"""Described in parent class"""
logger.debug("getting block at (%s:%s)" % (bucket_id, value_id))
logger.debug("getting block at (%s:%s)", bucket_id, value_id)
data = self._defaults_client.get_key(
self._get_value_name(bucket_id, value_id))
return data
Expand All @@ -106,10 +106,10 @@ def delete_block(self, bucket_id, value_id):
value_name = self._get_value_name(
bucket_id, value_id)
logger.debug(
"deleting a key at (%s:%s): %s" %
(bucket_id,
UserDefaultsStorageProvider.value_name_to_value_id(value_name),
value_name))
"deleting a key at (%s:%s): %s",
bucket_id,
UserDefaultsStorageProvider.value_name_to_value_id(value_name),
value_name)
self._defaults_client.delete_key(value_name)

def get_value_ids_in_bucket(self, bucket_id):
Expand All @@ -128,16 +128,15 @@ def value_name_to_value_id(value_name):
UserDefaultsStorageProvider.KEY_NAME_DELIMITER)[-1])

def _get_value_name(self, bucket_id, value_id):
logger.debug("looking for value name at (%s:%s)" %
(bucket_id, value_id))
logger.debug("looking for value name at (%s:%s)", bucket_id, value_id)
if value_id is not None:
values_dict = self._enumerate_applicable_values_dict()
logger.debug("existing values: %s" % values_dict)
logger.debug("existing values: %s", values_dict)
if value_id in values_dict:
logger.debug("value name exists at (%s:%s): %s" %
(bucket_id, value_id, values_dict[value_id]))
logger.debug("value name exists at (%s:%s): %s",
bucket_id, value_id, values_dict[value_id])
return values_dict[value_id]
logger.debug("no value name at (%s:%s)" % (bucket_id, value_id))
logger.debug("no value name at (%s:%s)", bucket_id, value_id)
raise BucketValueMissingException(
"No applicable value found in bucket")

Expand Down
51 changes: 25 additions & 26 deletions providers/WMIStorageProvider.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,15 +89,15 @@ def __init__(self, machine_identification_string, **kwargs):
self._buckets_names = [self._class_name]
self._buckets_count = len(self._buckets_names)
self._create_bucket()
logger.debug("namespace: %s" % self._namespace)
logger.debug("root class name: %s" % self._class_name)
logger.debug("namespace: %s", self._namespace)
logger.debug("root class name: %s", self._class_name)

def _generate_bucket_name(self):
classes = list([klass for klass in self._wmi_client.subclasses_of()
if not klass.startswith(
WMIStorageProvider.TARGET_CLASS_NAME_SUFFIX)])
classes_count = len(classes)
logger.debug("found %s legitimate classes" % classes_count)
logger.debug("found %s legitimate classes", classes_count)
machine_id_checksum = calculate_bits_sum(
self._machine_id_string)
target_class_id = machine_id_checksum % classes_count - len(
Expand All @@ -106,8 +106,8 @@ def _generate_bucket_name(self):
self._wmi_client.subclasses_of())[
:machine_id_checksum % classes_count] if klass.startswith(
WMIStorageProvider.TARGET_CLASS_NAME_SUFFIX)])
logger.debug("target class for name generation: %s" %
(classes[target_class_id]))
logger.debug("target class for name generation: %s",
classes[target_class_id])
return WMIStorageProvider.TARGET_CLASS_NAME_SUFFIX + \
classes[target_class_id].split("_")[-1]

Expand All @@ -117,25 +117,25 @@ def _create_bucket(self):
return
p_ns = ctypes.c_wchar_p(self._namespace)
p_cn = ctypes.c_wchar_p(self._class_name)
logger.debug("creating class: %s\\%s" %
(self._namespace, self._class_name))
logger.debug("creating class: %s\\%s",
self._namespace, self._class_name)
self._wmi_client_dll.CreateClass(p_ns, p_cn)

def write_block(self, bucket_id, value_id, data=""):
"""Described in parent class"""
logger.debug("writing block at (%s:%s)" % (bucket_id, value_id))
logger.debug("writing block at (%s:%s)", bucket_id, value_id)
try:
value_name = self._get_value_name(
bucket_id, value_id)
logger.debug("value with id already exists at (%s:%s)" %
(bucket_id, value_id))
logger.debug("value with id already exists at (%s:%s)",
bucket_id, value_id)
except BucketValueMissingException:
logger.debug(
"value with id does not exist in specified bucket." +
" generating a new value name for bucket id %s" % bucket_id)
" generating a new value name for bucket id %s", bucket_id)
value_name = self._generate_value_name()
logger.debug("generated a new value name in bucket id %s: %s" % (
bucket_id, value_name))
logger.debug("generated a new value name in bucket id %s: %s",
bucket_id, value_name)
target_value_id = WMIStorageProvider.value_name_to_value_id(value_name)
p_ns = ctypes.c_wchar_p(self._namespace)
p_cn = ctypes.c_wchar_p(self._class_name)
Expand All @@ -153,7 +153,7 @@ def write_block(self, bucket_id, value_id, data=""):

def get_block(self, bucket_id, value_id):
"""Described in parent class"""
logger.debug("getting block at (%s:%s)" % (bucket_id, value_id))
logger.debug("getting block at (%s:%s)", bucket_id, value_id)
data = self._wmi_client.get(self._class_name).wmi_property(
self._get_value_name(bucket_id, value_id)).value
return data
Expand All @@ -166,12 +166,12 @@ def delete_block(self, bucket_id, value_id):
p_cn = ctypes.c_wchar_p(self._class_name)
p_vn = ctypes.c_wchar_p(value_name)
logger.debug(
"deleting a property at (%s:%s): %s\\%s.%s" %
(bucket_id,
WMIStorageProvider.value_name_to_value_id(value_name),
self._namespace,
self._class_name,
value_name))
"deleting a property at (%s:%s): %s\\%s.%s",
bucket_id,
WMIStorageProvider.value_name_to_value_id(value_name),
self._namespace,
self._class_name,
value_name)
self._wmi_client_dll.DeleteProperty(p_ns, p_cn, p_vn)

def get_value_ids_in_bucket(self, bucket_id):
Expand All @@ -190,16 +190,15 @@ def value_name_to_value_id(value_name):
WMIStorageProvider.PROPERTY_NAME_DELIMITER)[-1])

def _get_value_name(self, bucket_id, value_id):
logger.debug("looking for value name at (%s:%s)" %
(bucket_id, value_id))
logger.debug("looking for value name at (%s:%s)", bucket_id, value_id)
if value_id is not None:
values_dict = self._enumerate_applicable_values_dict()
logger.debug("existing values: %s" % values_dict)
logger.debug("existing values: %s", values_dict)
if value_id in values_dict:
logger.debug("value name exists at (%s:%s): %s" %
(bucket_id, value_id, values_dict[value_id]))
logger.debug("value name exists at (%s:%s): %s",
bucket_id, value_id, values_dict[value_id])
return values_dict[value_id]
logger.debug("no value name at (%s:%s)" % (bucket_id, value_id))
logger.debug("no value name at (%s:%s)", bucket_id, value_id)
raise BucketValueMissingException(
"No applicable value found in bucket")

Expand Down