diff --git a/rdflib/plugins/stores/auditable.py b/rdflib/plugins/stores/auditable.py index 8fc048e47..8bbdcd2f5 100644 --- a/rdflib/plugins/stores/auditable.py +++ b/rdflib/plugins/stores/auditable.py @@ -20,7 +20,7 @@ from rdflib import ConjunctiveGraph, Graph from rdflib.store import Store -destructiveOpLocks = { +destructiveOpLocks = { # noqa: N816 "add": None, "remove": None, } @@ -59,7 +59,7 @@ def add(self, triple, context, quoted=False): if context is not None else None ) - ctxId = context.identifier if context is not None else None + ctxId = context.identifier if context is not None else None # noqa: N806 if list(self.store.triples(triple, context)): return # triple already in store, do nothing self.reverseOps.append((s, p, o, ctxId, "remove")) @@ -81,7 +81,7 @@ def remove(self, spo, context=None): if context is not None else None ) - ctxId = context.identifier if context is not None else None + ctxId = context.identifier if context is not None else None # noqa: N806 if None in [subject, predicate, object_, context]: if ctxId: for s, p, o in context.triples((subject, predicate, object_)): diff --git a/rdflib/plugins/stores/berkeleydb.py b/rdflib/plugins/stores/berkeleydb.py index b01f74778..b580b2f4f 100644 --- a/rdflib/plugins/stores/berkeleydb.py +++ b/rdflib/plugins/stores/berkeleydb.py @@ -79,7 +79,7 @@ def __get_identifier(self): identifier = property(__get_identifier) - def _init_db_environment(self, homeDir, create=True): + def _init_db_environment(self, homeDir, create=True): # noqa: N803 if not exists(homeDir): if create is True: mkdir(homeDir) @@ -100,7 +100,7 @@ def is_open(self): def open(self, path, create=True): if not has_bsddb: return NO_STORE - homeDir = path + homeDir = path # noqa: N806 if self.__identifier is None: self.__identifier = URIRef(pathname2url(abspath(homeDir))) diff --git a/rdflib/plugins/stores/concurrent.py b/rdflib/plugins/stores/concurrent.py index a258e778a..fd4167983 100644 --- a/rdflib/plugins/stores/concurrent.py +++ b/rdflib/plugins/stores/concurrent.py @@ -85,7 +85,7 @@ def __end_read(self): (s, p, o) = pending_removes.pop() try: self.store.remove((s, p, o)) - except: + except: # noqa: E722 # TODO: change to try finally? print(s, p, o, "Not in store to remove") pending_adds = self.__pending_adds diff --git a/rdflib/plugins/stores/memory.py b/rdflib/plugins/stores/memory.py index 99dbf39c1..1fd26a1b2 100644 --- a/rdflib/plugins/stores/memory.py +++ b/rdflib/plugins/stores/memory.py @@ -46,33 +46,33 @@ def add(self, triple, context, quoted=False): spo = self.__spo try: po = spo[subject] - except: + except: # noqa: E722 po = spo[subject] = {} try: o = po[predicate] - except: + except: # noqa: E722 o = po[predicate] = {} o[object] = 1 pos = self.__pos try: os = pos[predicate] - except: + except: # noqa: E722 os = pos[predicate] = {} try: s = os[object] - except: + except: # noqa: E722 s = os[object] = {} s[subject] = 1 osp = self.__osp try: sp = osp[object] - except: + except: # noqa: E722 sp = osp[object] = {} try: p = sp[subject] - except: + except: # noqa: E722 p = sp[subject] = {} p[predicate] = 1 @@ -88,7 +88,7 @@ def triples(self, triple_pattern, context=None): if subject != ANY: # subject is given spo = self.__spo if subject in spo: - subjectDictionary = spo[subject] + subjectDictionary = spo[subject] # noqa: N806 if predicate != ANY: # subject+predicate is given if predicate in subjectDictionary: if object != ANY: # subject+predicate+object is given @@ -116,7 +116,7 @@ def triples(self, triple_pattern, context=None): elif predicate != ANY: # predicate is given, subject unbound pos = self.__pos if predicate in pos: - predicateDictionary = pos[predicate] + predicateDictionary = pos[predicate] # noqa: N806 if object != ANY: # predicate+object is given, subject unbound if object in predicateDictionary: for s in predicateDictionary[object].keys(): @@ -130,14 +130,14 @@ def triples(self, triple_pattern, context=None): elif object != ANY: # object is given, subject+predicate unbound osp = self.__osp if object in osp: - objectDictionary = osp[object] + objectDictionary = osp[object] # noqa: N806 for s in objectDictionary.keys(): for p in objectDictionary[s].keys(): yield (s, p, object), self.__contexts() else: # subject+predicate+object unbound spo = self.__spo for s in spo.keys(): - subjectDictionary = spo[s] + subjectDictionary = spo[s] # noqa: N806 for p in subjectDictionary.keys(): for o in subjectDictionary[p].keys(): yield (s, p, o), self.__contexts() @@ -184,12 +184,12 @@ def namespaces(self): def __contexts(self): return (c for c in []) # TODO: best way to return empty generator - def query(self, query, initNs, initBindings, queryGraph, **kwargs): + def query(self, query, initNs, initBindings, queryGraph, **kwargs): # noqa: N803 super(SimpleMemory, self).query( query, initNs, initBindings, queryGraph, **kwargs ) - def update(self, update, initNs, initBindings, queryGraph, **kwargs): + def update(self, update, initNs, initBindings, queryGraph, **kwargs): # noqa: N803 super(SimpleMemory, self).update( update, initNs, initBindings, queryGraph, **kwargs ) @@ -347,7 +347,7 @@ def triples(self, triple_pattern, context=None): elif subject is not None: # subject is given spo = self.__spo if subject in spo: - subjectDictionary = spo[subject] + subjectDictionary = spo[subject] # noqa: N806 if predicate is not None: # subject+predicate is given if predicate in subjectDictionary: if object_ is not None: # subject+predicate+object is given @@ -383,7 +383,7 @@ def triples(self, triple_pattern, context=None): elif predicate is not None: # predicate is given, subject unbound pos = self.__pos if predicate in pos: - predicateDictionary = pos[predicate] + predicateDictionary = pos[predicate] # noqa: N806 if object_ is not None: # predicate+object is given, subject unbound if object_ in predicateDictionary: for s in list(predicateDictionary[object_].keys()): @@ -401,7 +401,7 @@ def triples(self, triple_pattern, context=None): elif object_ is not None: # object is given, subject+predicate unbound osp = self.__osp if object_ in osp: - objectDictionary = osp[object_] + objectDictionary = osp[object_] # noqa: N806 for s in list(objectDictionary.keys()): for p in list(objectDictionary[s].keys()): triple = (s, p, object_) @@ -411,7 +411,7 @@ def triples(self, triple_pattern, context=None): # Shouldn't get here if all other cases above worked correctly. spo = self.__spo for s in list(spo.keys()): - subjectDictionary = spo[s] + subjectDictionary = spo[s] # noqa: N806 for p in list(subjectDictionary.keys()): for o in list(subjectDictionary[p].keys()): triple = (s, p, o) @@ -530,7 +530,7 @@ def __add_triple_context(self, triple, triple_exists, context, quoted): if triple_context == self.__defaultContexts: del self.__tripleContexts[triple] - def __get_context_for_triple(self, triple, skipQuoted=False): + def __get_context_for_triple(self, triple, skipQuoted=False): # noqa: N803 """return a list of contexts (str) for the triple, skipping quoted contexts if skipQuoted==True""" @@ -582,8 +582,8 @@ def __contexts(self, triple): if ctx_str is not None ) - def query(self, query, initNs, initBindings, queryGraph, **kwargs): + def query(self, query, initNs, initBindings, queryGraph, **kwargs): # noqa: N803 super(Memory, self).query(query, initNs, initBindings, queryGraph, **kwargs) - def update(self, update, initNs, initBindings, queryGraph, **kwargs): + def update(self, update, initNs, initBindings, queryGraph, **kwargs): # noqa: N803 super(Memory, self).update(update, initNs, initBindings, queryGraph, **kwargs) diff --git a/rdflib/plugins/stores/regexmatching.py b/rdflib/plugins/stores/regexmatching.py index d1920620d..0f8da8fff 100644 --- a/rdflib/plugins/stores/regexmatching.py +++ b/rdflib/plugins/stores/regexmatching.py @@ -33,7 +33,7 @@ def __reduce__(self): return (REGEXTerm, (str(""),)) -def regexCompareQuad(quad, regexQuad): +def regexCompareQuad(quad, regexQuad): # noqa: N802, N803 for index in range(4): if isinstance(regexQuad[index], REGEXTerm) and not regexQuad[ index @@ -83,7 +83,7 @@ def remove(self, triple, context=None): or None ) - removeQuadList = [] + removeQuadList = [] # noqa: N806 for (s1, p1, o1), cg in self.storage.triples((s, p, o), c): for ctx in cg: ctx = ctx.identifier @@ -121,7 +121,7 @@ def triples(self, triple, context=None): or None ) for (s1, p1, o1), cg in self.storage.triples((s, p, o), c): - matchingCtxs = [] + matchingCtxs = [] # noqa: N806 for ctx in cg: if c is None: if context is None or context.identifier.compiledExpr.match( diff --git a/rdflib/plugins/stores/sparqlconnector.py b/rdflib/plugins/stores/sparqlconnector.py index cc9b0c963..1af3b369e 100644 --- a/rdflib/plugins/stores/sparqlconnector.py +++ b/rdflib/plugins/stores/sparqlconnector.py @@ -2,7 +2,7 @@ import logging from io import BytesIO from typing import TYPE_CHECKING, Optional, Tuple -from urllib.error import HTTPError, URLError +from urllib.error import HTTPError from urllib.parse import urlencode from urllib.request import Request, urlopen @@ -15,7 +15,7 @@ import typing_extensions as te -class SPARQLConnectorException(Exception): +class SPARQLConnectorException(Exception): # noqa: N818 pass @@ -38,7 +38,7 @@ def __init__( self, query_endpoint: Optional[str] = None, update_endpoint: Optional[str] = None, - returnFormat: str = "xml", + returnFormat: str = "xml", # noqa: N803 method: "te.Literal['GET', 'POST', 'POST_FORM']" = "GET", auth: Optional[Tuple[str, str]] = None, **kwargs, @@ -105,7 +105,7 @@ def query(self, query, default_graph: str = None, named_graph: str = None): res = urlopen( Request(self.query_endpoint + qsa, headers=args["headers"]) ) - except Exception as e: + except Exception as e: # noqa: F841 raise ValueError( "You did something wrong formulating either the URI or your SPARQL query" ) @@ -171,7 +171,7 @@ def update( args["headers"].update(headers) qsa = "?" + urlencode(args["params"]) - res = urlopen( + res = urlopen( # noqa: F841 Request( self.update_endpoint + qsa, data=query.encode(), headers=args["headers"] ) diff --git a/rdflib/plugins/stores/sparqlstore.py b/rdflib/plugins/stores/sparqlstore.py index d209cd3dc..fb46badad 100644 --- a/rdflib/plugins/stores/sparqlstore.py +++ b/rdflib/plugins/stores/sparqlstore.py @@ -99,7 +99,7 @@ def __init__( sparql11: bool = True, context_aware: bool = True, node_to_sparql: NodeToSparql = _node_to_sparql, - returnFormat: str = "xml", + returnFormat: str = "xml", # noqa: N803 auth: Optional[Tuple[str, str]] = None, **sparqlconnector_kwargs, ): @@ -147,13 +147,20 @@ def rollback(self): def add(self, _, context=None, quoted=False): raise TypeError("The SPARQL store is read only") - def addN(self, quads): + def addN(self, quads): # noqa: N802 raise TypeError("The SPARQL store is read only") def remove(self, _, context): raise TypeError("The SPARQL store is read only") - def update(self, query, initNs={}, initBindings={}, queryGraph=None, DEBUG=False): + def update( + self, + query, + initNs={}, # noqa: N803 + initBindings={}, + queryGraph=None, + DEBUG=False, + ): raise TypeError("The SPARQL store is read only") def _query(self, *args, **kwargs): @@ -174,7 +181,12 @@ def _inject_prefixes(self, query, extra_bindings): ) def query( - self, query, initNs=None, initBindings=None, queryGraph=None, DEBUG=False + self, + query, + initNs=None, # noqa: N803 + initBindings=None, + queryGraph=None, + DEBUG=False, ): self.debug = DEBUG assert isinstance(query, str) @@ -509,7 +521,7 @@ def __init__( update_endpoint: Optional[str] = None, sparql11: bool = True, context_aware: bool = True, - postAsEncoded: bool = True, + postAsEncoded: bool = True, # noqa: N803 autocommit: bool = True, dirty_reads: bool = False, **kwds, @@ -576,7 +588,7 @@ def __len__(self, *args, **kwargs): return SPARQLStore.__len__(self, *args, **kwargs) # TODO: FIXME: open is defined twice - def open(self, configuration, create=False): # type: ignore[no-redef] + def open(self, configuration, create=False): # type: ignore[no-redef] # noqa: F811 """ sets the endpoint URLs for this SPARQLStore @@ -634,7 +646,7 @@ def add(self, spo, context=None, quoted=False): if self.autocommit: self.commit() - def addN(self, quads): + def addN(self, quads): # noqa: N802 """Add a list of quads to the store.""" if not self.update_endpoint: raise Exception("UpdateEndpoint is not set - call 'open'") @@ -684,7 +696,7 @@ def remove(self, spo, context): if self.autocommit: self.commit() - def setTimeout(self, timeout): + def setTimeout(self, timeout): # noqa: N802 self._timeout = int(timeout) def _update(self, update): @@ -693,7 +705,14 @@ def _update(self, update): SPARQLConnector.update(self, update) - def update(self, query, initNs={}, initBindings={}, queryGraph=None, DEBUG=False): + def update( + self, + query, + initNs={}, # noqa: N803 + initBindings={}, + queryGraph=None, + DEBUG=False, + ): """ Perform a SPARQL Update Query against the endpoint, INSERT, LOAD, DELETE etc. diff --git a/rdflib/store.py b/rdflib/store.py index f890bf5e8..8367cf67b 100644 --- a/rdflib/store.py +++ b/rdflib/store.py @@ -220,7 +220,9 @@ def add( """ self.dispatcher.dispatch(TripleAddedEvent(triple=triple, context=context)) - def addN(self, quads: Iterable[Tuple["Node", "Node", "Node", "Graph"]]): + def addN( # noqa: N802 + self, quads: Iterable[Tuple["Node", "Node", "Node", "Graph"]] + ): """ Adds each item in the list of statements to a specific context. The quoted argument is interpreted by formula-aware stores to indicate this @@ -329,7 +331,7 @@ def contexts(self, triple=None): :returns: a generator over Nodes """ - def query(self, query, initNs, initBindings, queryGraph, **kwargs): + def query(self, query, initNs, initBindings, queryGraph, **kwargs): # noqa: N803 """ If stores provide their own SPARQL implementation, override this. @@ -345,7 +347,7 @@ def query(self, query, initNs, initBindings, queryGraph, **kwargs): raise NotImplementedError - def update(self, update, initNs, initBindings, queryGraph, **kwargs): + def update(self, update, initNs, initBindings, queryGraph, **kwargs): # noqa: N803 """ If stores provide their own (SPARQL) Update implementation, override this.