Skip to content

Commit

Permalink
Merge pull request #1191 from FlorianLudwig/random-cleanup
Browse files Browse the repository at this point in the history
small cleanups
  • Loading branch information
nicholascar authored Oct 25, 2020
2 parents dde9db8 + b5d998c commit c5ff127
Show file tree
Hide file tree
Showing 7 changed files with 22 additions and 38 deletions.
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def find_version(filename):
exclude_trees = ["_build", "draft"]

# The reST default role (used for this markup: `text`) to use for all documents.
default_role = 'py:obj'
default_role = "py:obj"

# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
Expand Down
2 changes: 2 additions & 0 deletions rdflib/plugins/sparql/operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -1119,6 +1119,8 @@ def calculateFinalDateTime(obj1, dt1, obj2, dt2, operation):

def EBV(rt):
"""
Effective Boolean Value (EBV)
* If the argument is a typed literal with a datatype of xsd:boolean,
the EBV is the value of that argument.
* If the argument is a plain literal or a typed literal with a
Expand Down
2 changes: 1 addition & 1 deletion rdflib/plugins/sparql/parserutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def get(self, a, variables=False, errors=False):
def __getattr__(self, a):
# Hack hack: OrderedDict relies on this
if a in ("_OrderedDict__root", "_OrderedDict__end"):
raise AttributeError
raise AttributeError()
try:
return self[a]
except KeyError:
Expand Down
37 changes: 14 additions & 23 deletions rdflib/plugins/sparql/sparql.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def __getitem__(self, key):
if not isinstance(key, Node):
key = Variable(key)

if not type(key) in (BNode, Variable):
if not isinstance(key, (BNode, Variable)):
return key

if key not in self._d:
Expand All @@ -177,22 +177,20 @@ def project(self, vars):

def merge(self, other):
res = FrozenBindings(self.ctx, itertools.chain(self.items(), other.items()))

return res

def _now(self):
@property
def now(self):
return self.ctx.now

def _bnodes(self):
@property
def bnodes(self):
return self.ctx.bnodes

def _prologue(self):
@property
def prologue(self):
return self.ctx.prologue

prologue = property(_prologue)
bnodes = property(_bnodes)
now = property(_now)

def forget(self, before, _except=None):
"""
return a frozen dict only of bindings made in self
Expand Down Expand Up @@ -222,8 +220,7 @@ def remember(self, these):
return FrozenBindings(self.ctx, (x for x in self.items() if x[0] in these))


class QueryContext(object):

class QueryContext:
"""
Query context - passed along when evaluating the query
"""
Expand Down Expand Up @@ -260,7 +257,9 @@ def clone(self, bindings=None):
r.bnodes = self.bnodes
return r

def _get_dataset(self):
@property
def dataset(self):
""""current dataset"""
if self._dataset is None:
raise Exception(
"You performed a query operation requiring "
Expand All @@ -269,8 +268,6 @@ def _get_dataset(self):
)
return self._dataset

dataset = property(_get_dataset, doc="current dataset")

def load(self, source, default=False, **kwargs):
def _load(graph, source):
try:
Expand Down Expand Up @@ -306,7 +303,7 @@ def _load(graph, source):

def __getitem__(self, key):
# in SPARQL BNodes are just labels
if not type(key) in (BNode, Variable):
if not isinstance(key, (BNode, Variable)):
return key
try:
return self.bindings[key]
Expand Down Expand Up @@ -348,11 +345,6 @@ def push(self):
def clean(self):
return self.clone([])

# def pop(self):
# self.bindings = self.bindings.outer
# if self.bindings is None:
# raise Exception("We've bottomed out of the bindings stack!")

def thaw(self, frozenbindings):
"""
Create a new read/write query context from the given solution
Expand All @@ -362,8 +354,7 @@ def thaw(self, frozenbindings):
return c


class Prologue(object):

class Prologue:
"""
A class for holding prefixing bindings and base URI information
"""
Expand Down Expand Up @@ -402,7 +393,7 @@ def absolutize(self, iri):
return iri


class Query(object):
class Query:
"""
A parsed and translated query
"""
Expand Down
9 changes: 2 additions & 7 deletions rdflib/plugins/stores/sleepycat.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,11 @@ def bb(u):


try:
from bsddb import db
from bsddb3 import db

has_bsddb = True
except ImportError:
try:
from bsddb3 import db

has_bsddb = True
except ImportError:
has_bsddb = False
has_bsddb = False


if has_bsddb:
Expand Down
4 changes: 1 addition & 3 deletions rdflib/plugins/stores/sparqlstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,6 @@ def __init__(
auth=None,
**sparqlconnector_kwargs
):
"""
"""
super(SPARQLStore, self).__init__(
query_endpoint=query_endpoint, returnFormat=returnFormat, auth=auth, **sparqlconnector_kwargs
)
Expand Down Expand Up @@ -374,7 +372,7 @@ def remove_graph(self, graph):
raise TypeError("The SPARQL store is read only")

def _is_contextual(self, graph):
""" Returns `True` if the "GRAPH" keyword must appear
"""Returns `True` if the "GRAPH" keyword must appear
in the final SPARQL query sent to the endpoint.
"""
if (not self.context_aware) or (graph is None):
Expand Down
4 changes: 1 addition & 3 deletions rdflib/resource.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-

__doc__ = """
"""
The :class:`~rdflib.resource.Resource` class wraps a
:class:`~rdflib.graph.Graph`
and a resource reference (i.e. a :class:`rdflib.term.URIRef` or
Expand Down

0 comments on commit c5ff127

Please sign in to comment.