Skip to content

Commit

Permalink
Merge branch 'RDFLib:master' into fix-infixowl-old-idiom-use
Browse files Browse the repository at this point in the history
  • Loading branch information
Graham Higgins committed May 24, 2022
2 parents 69f87bf + a3a4611 commit a22ad4e
Show file tree
Hide file tree
Showing 16 changed files with 116 additions and 137 deletions.
39 changes: 19 additions & 20 deletions rdflib/graph.py
Expand Up @@ -22,7 +22,6 @@
)
from urllib.parse import urlparse
from urllib.request import url2pathname
from warnings import warn

import rdflib.exceptions as exceptions
import rdflib.namespace as namespace # noqa: F401 # This is here because it is used in a docstring.
Expand Down Expand Up @@ -390,7 +389,7 @@ def __str__(self):
"[a rdfg:Graph;rdflib:storage " + "[a rdflib:Store;rdfs:label '%s']]."
) % self.store.__class__.__name__

def toPython(self):
def toPython(self): # noqa: N802
return self

def destroy(self, configuration):
Expand Down Expand Up @@ -434,7 +433,7 @@ def add(self, triple: Tuple[Node, Node, Node]):
self.__store.add((s, p, o), self, quoted=False)
return self

def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]):
def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]): # noqa: N802
"""Add a sequence of triple with context"""

self.__store.addN(
Expand Down Expand Up @@ -919,7 +918,7 @@ def items(self, list):
raise ValueError("List contains a recursive rdf:rest reference")
chain.add(list)

def transitiveClosure(self, func, arg, seen=None):
def transitiveClosure(self, func, arg, seen=None): # noqa: N802
"""
Generates transitive closure of a user-defined
function against the graph
Expand Down Expand Up @@ -1174,7 +1173,7 @@ def parse(
source: Optional[
Union[IO[bytes], TextIO, InputSource, str, bytes, pathlib.PurePath]
] = None,
publicID: Optional[str] = None,
publicID: Optional[str] = None, # noqa: N803
format: Optional[str] = None,
location: Optional[str] = None,
file: Optional[Union[BinaryIO, TextIO]] = None,
Expand Down Expand Up @@ -1296,7 +1295,7 @@ def query(
query_object,
processor: Union[str, query.Processor] = "sparql",
result: Union[str, Type[query.Result]] = "sparql",
initNs=None,
initNs=None, # noqa: N803
initBindings=None,
use_store_provided: bool = True,
**kwargs,
Expand All @@ -1315,8 +1314,8 @@ def query(
"""

initBindings = initBindings or {}
initNs = initNs or dict(self.namespaces())
initBindings = initBindings or {} # noqa: N806
initNs = initNs or dict(self.namespaces()) # noqa: N806

if hasattr(self.store, "query") and use_store_provided:
try:
Expand All @@ -1341,14 +1340,14 @@ def update(
self,
update_object,
processor="sparql",
initNs=None,
initNs=None, # noqa: N803
initBindings=None,
use_store_provided=True,
**kwargs,
):
"""Update this graph with the given update query."""
initBindings = initBindings or {}
initNs = initNs or dict(self.namespaces())
initBindings = initBindings or {} # noqa: N806
initNs = initNs or dict(self.namespaces()) # noqa: N806

if hasattr(self.store, "update") and use_store_provided:
try:
Expand Down Expand Up @@ -1719,7 +1718,7 @@ def _graph(self, c: Optional[Union[Graph, Node, str]]) -> Optional[Graph]:
else:
return c

def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]):
def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]): # noqa: N802
"""Add a sequence of triples with context"""

self.store.addN(
Expand Down Expand Up @@ -1844,7 +1843,7 @@ def parse(
source: Optional[
Union[IO[bytes], TextIO, InputSource, str, bytes, pathlib.PurePath]
] = None,
publicID: Optional[str] = None,
publicID: Optional[str] = None, # noqa: N803
format: Optional[str] = None,
location: Optional[str] = None,
file: Optional[Union[BinaryIO, TextIO]] = None,
Expand Down Expand Up @@ -2067,7 +2066,7 @@ def graph(self, identifier=None, base=None):
def parse(
self,
source=None,
publicID=None,
publicID=None, # noqa: N803
format=None,
location=None,
file=None,
Expand Down Expand Up @@ -2140,7 +2139,7 @@ def add(self, triple: Tuple[Node, Node, Node]):
self.store.add((s, p, o), self, quoted=True)
return self

def addN(self, quads: Tuple[Node, Node, Node, Any]) -> "QuotedGraph": # type: ignore[override]
def addN(self, quads: Tuple[Node, Node, Node, Any]) -> "QuotedGraph": # type: ignore[override] # noqa: N802
"""Add a sequence of triple with context"""

self.store.addN(
Expand Down Expand Up @@ -2198,7 +2197,7 @@ def __init__(self, graph, subject):
"""

_list = self._list = list()
LI_INDEX = URIRef(str(RDF) + "_")
LI_INDEX = URIRef(str(RDF) + "_") # noqa: N806
for (p, o) in graph.predicate_objects(subject):
if p.startswith(LI_INDEX): # != RDF.Seq: #
i = int(p.replace(LI_INDEX, ""))
Expand All @@ -2208,7 +2207,7 @@ def __init__(self, graph, subject):
# by sorting the keys (by integer) we have what we want!
_list.sort()

def toPython(self):
def toPython(self): # noqa: N802
return self

def __iter__(self):
Expand Down Expand Up @@ -2290,7 +2289,7 @@ def close(self):
def add(self, triple):
raise ModificationException()

def addN(self, quads):
def addN(self, quads): # noqa: N802
raise ModificationException()

def remove(self, triple):
Expand Down Expand Up @@ -2389,7 +2388,7 @@ def namespaces(self):
def absolutize(self, uri, defrag=1):
raise UnSupportedAggregateOperation()

def parse(self, source, publicID=None, format=None, **args):
def parse(self, source, publicID=None, format=None, **args): # noqa: N803
raise ModificationException()

def n3(self):
Expand Down Expand Up @@ -2460,7 +2459,7 @@ def add(
self.batch.append(triple_or_quad)
return self

def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]):
def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]): # noqa: N802
if self.__batch_addn:
for q in quads:
self.add(q)
Expand Down
8 changes: 4 additions & 4 deletions rdflib/plugins/sparql/__init__.py
Expand Up @@ -4,6 +4,8 @@
.. versionadded:: 4.0
"""

import sys
from typing import TYPE_CHECKING

SPARQL_LOAD_GRAPHS = True
"""
Expand All @@ -30,11 +32,9 @@

PLUGIN_ENTRY_POINT = "rdf.plugins.sparqleval"

import sys
from typing import TYPE_CHECKING, Any

from . import operators, parser, parserutils
from .processor import prepareQuery, prepareUpdate, processUpdate
from . import operators, parser, parserutils # noqa: E402
from .processor import prepareQuery, prepareUpdate, processUpdate # noqa: F401, E402

assert parser
assert operators
Expand Down
10 changes: 5 additions & 5 deletions rdflib/plugins/sparql/algebra.py
Expand Up @@ -129,9 +129,9 @@ def _addvar(term, varsknown):
return [x[1] for x in l_]


def triples(l):
def triples(l): # noqa: E741

l = reduce(lambda x, y: x + y, l)
l = reduce(lambda x, y: x + y, l) # noqa: E741
if (len(l) % 3) != 0:
raise Exception("these aint triples")
return reorderTriples((l[x], l[x + 1], l[x + 2]) for x in range(0, len(l), 3))
Expand Down Expand Up @@ -322,7 +322,7 @@ def translateGroupGraphPattern(graphPattern):
return G


class StopTraversal(Exception):
class StopTraversal(Exception): # noqa: N818
def __init__(self, rv):
self.rv = rv

Expand Down Expand Up @@ -794,7 +794,7 @@ def translateQuery(q, base=None, initNs=None):
return Query(prologue, res)


class ExpressionNotCoveredException(Exception):
class ExpressionNotCoveredException(Exception): # noqa: N818
pass


Expand Down Expand Up @@ -1112,7 +1112,7 @@ def sparql_query_text(node):
elif node.name == "MultiplicativeExpression":
left_side = convert_node_arg(node.expr)
multiplication = left_side
for i, operator in enumerate(node.op):
for i, operator in enumerate(node.op): # noqa: F402
multiplication += (
operator + " " + convert_node_arg(node.other[i]) + " "
)
Expand Down
2 changes: 1 addition & 1 deletion rdflib/plugins/sparql/datatypes.py
Expand Up @@ -29,7 +29,7 @@
)
)

### adding dateTime datatypes
# adding dateTime datatypes

XSD_DateTime_DTs = set((XSD.dateTime, XSD.date, XSD.time))

Expand Down
3 changes: 0 additions & 3 deletions rdflib/plugins/sparql/evaluate.py
Expand Up @@ -28,19 +28,16 @@
from rdflib.plugins.sparql import CUSTOM_EVALS, parser
from rdflib.plugins.sparql.aggregates import Aggregator
from rdflib.plugins.sparql.evalutils import (
_diff,
_ebv,
_eval,
_fillTemplate,
_filter,
_join,
_minus,
_val,
)
from rdflib.plugins.sparql.parserutils import CompValue, value
from rdflib.plugins.sparql.sparql import (
AlreadyBound,
Bindings,
FrozenBindings,
FrozenDict,
Query,
Expand Down
2 changes: 1 addition & 1 deletion rdflib/plugins/sparql/evalutils.py
Expand Up @@ -53,7 +53,7 @@ def _ebv(expr, ctx):
elif isinstance(expr, Variable):
try:
return EBV(ctx[expr])
except:
except: # noqa: E722
return False
return False

Expand Down
12 changes: 6 additions & 6 deletions rdflib/plugins/sparql/operators.py
Expand Up @@ -65,7 +65,7 @@ def Builtin_isNUMERIC(expr, ctx):
try:
numeric(expr.arg)
return Literal(True)
except:
except: # noqa: E722
return Literal(False)


Expand Down Expand Up @@ -669,15 +669,15 @@ def default_cast(e, ctx):
if not isinstance(x, Literal):
raise SPARQLError("Can only cast Literals to non-string data-types")

if x.datatype and not x.datatype in XSD_DTs:
if x.datatype and not x.datatype in XSD_DTs: # noqa: E713
raise SPARQLError("Cannot cast literal with unknown datatype: %r" % x.datatype)

if e.iri == XSD.dateTime:
if x.datatype and x.datatype not in (XSD.dateTime, XSD.string):
raise SPARQLError("Cannot cast %r to XSD:dateTime" % x.datatype)
try:
return Literal(isodate.parse_datetime(x), datatype=e.iri)
except:
except: # noqa: E722
raise SPARQLError("Cannot interpret '%r' as datetime" % x)

if x.datatype == XSD.dateTime:
Expand All @@ -686,21 +686,21 @@ def default_cast(e, ctx):
if e.iri in (XSD.float, XSD.double):
try:
return Literal(float(x), datatype=e.iri)
except:
except: # noqa: E722
raise SPARQLError("Cannot interpret '%r' as float" % x)

elif e.iri == XSD.decimal:
if "e" in x or "E" in x: # SPARQL/XSD does not allow exponents in decimals
raise SPARQLError("Cannot interpret '%r' as decimal" % x)
try:
return Literal(Decimal(x), datatype=e.iri)
except:
except: # noqa: E722
raise SPARQLError("Cannot interpret '%r' as decimal" % x)

elif e.iri == XSD.integer:
try:
return Literal(int(x), datatype=XSD.integer)
except:
except: # noqa: E722
raise SPARQLError("Cannot interpret '%r' as int" % x)

elif e.iri == XSD.boolean:
Expand Down
5 changes: 2 additions & 3 deletions rdflib/plugins/sparql/parser.py
Expand Up @@ -15,7 +15,6 @@
Literal,
OneOrMore,
Optional,
ParseException,
ParseResults,
Regex,
Suppress,
Expand Down Expand Up @@ -95,7 +94,7 @@ def expandTriples(terms):
# "Length of triple-list is not divisible by 3: %d!"%len(res)

# return [tuple(res[i:i+3]) for i in range(len(res)/3)]
except:
except: # noqa: E722
if DEBUG:
import traceback

Expand Down Expand Up @@ -1528,7 +1527,7 @@ def expandUnicodeEscapes(q):
def expand(m):
try:
return chr(int(m.group(1), 16))
except:
except: # noqa: E722
raise Exception("Invalid unicode code point: " + m)

return expandUnicodeEscapes_re.sub(expand, q)
Expand Down
2 changes: 1 addition & 1 deletion rdflib/plugins/sparql/parserutils.py
Expand Up @@ -270,4 +270,4 @@ def prettify_parsetree(t, indent="", depth=0):


# hurrah for circular imports
from rdflib.plugins.sparql.sparql import NotBoundError, SPARQLError
from rdflib.plugins.sparql.sparql import NotBoundError, SPARQLError # noqa: E402
2 changes: 1 addition & 1 deletion rdflib/plugins/sparql/results/jsonresults.py
@@ -1,5 +1,5 @@
import json
from typing import IO, Any, Dict, Optional, TextIO, Union
from typing import IO, Any, Dict

from rdflib import BNode, Literal, URIRef, Variable
from rdflib.query import Result, ResultException, ResultParser, ResultSerializer
Expand Down
1 change: 0 additions & 1 deletion rdflib/plugins/sparql/results/tsvresults.py
Expand Up @@ -11,7 +11,6 @@
LineEnd,
Literal,
Optional,
ParseException,
ParserElement,
Suppress,
ZeroOrMore,
Expand Down
4 changes: 2 additions & 2 deletions rdflib/plugins/sparql/sparql.py
Expand Up @@ -24,7 +24,7 @@ def __init__(self, msg: Optional[str] = None):
SPARQLError.__init__(self, msg)


class AlreadyBound(SPARQLError):
class AlreadyBound(SPARQLError): # noqa: N818
"""Raised when trying to bind a variable that is already bound!"""

def __init__(self):
Expand Down Expand Up @@ -415,7 +415,7 @@ def absolutize(
return Literal(
iri.string, lang=iri.lang, datatype=self.absolutize(iri.datatype) # type: ignore[arg-type]
)
elif isinstance(iri, URIRef) and not ":" in iri:
elif isinstance(iri, URIRef) and not ":" in iri: # noqa: E713
return URIRef(iri, base=self.base)

return iri
Expand Down
2 changes: 1 addition & 1 deletion rdflib/plugins/sparql/update.py
Expand Up @@ -305,6 +305,6 @@ def evalUpdate(graph, update, initBindings={}):
evalModify(ctx, u)
else:
raise Exception("Unknown update operation: %s" % (u,))
except:
except: # noqa: E722
if not u.silent:
raise
2 changes: 1 addition & 1 deletion test/jsonld/test_context.py
Expand Up @@ -196,7 +196,7 @@ def test_ignore_base_remote_context():
ctx_url = "http://example.org/remote-base.jsonld"
SOURCES[ctx_url] = {"@context": {"@base": "/remote"}}
ctx = Context(ctx_url)
assert ctx.base == None
assert ctx.base is None


@_expect_exception(errors.RECURSIVE_CONTEXT_INCLUSION)
Expand Down

0 comments on commit a22ad4e

Please sign in to comment.