|
--- a/rdflib/graph.py |
|
+++ b/rdflib/graph.py |
|
@@ -47,11 +47,10 @@ |
|
Identifier, |
|
Literal, |
|
Node, |
|
RDFLibGenid, |
|
URIRef, |
|
- Variable, |
|
) |
|
|
|
if TYPE_CHECKING: |
|
import typing_extensions as te |
|
import rdflib.query |
|
--- a/rdflib/plugins/sparql/aggregates.py |
|
+++ b/rdflib/plugins/sparql/aggregates.py |
|
@@ -1,11 +1,29 @@ |
|
+from __future__ import annotations |
|
from decimal import Decimal |
|
-from rdflib import XSD, Literal |
|
+from typing import ( |
|
+ Any, |
|
+ Callable, |
|
+ Dict, |
|
+ Iterable, |
|
+ List, |
|
+ Mapping, |
|
+ MutableMapping, |
|
+ Optional, |
|
+ Set, |
|
+ Tuple, |
|
+ TypeVar, |
|
+ Union, |
|
+ overload, |
|
+) |
|
+from rdflib.namespace import XSD |
|
from rdflib.plugins.sparql.datatypes import type_promotion |
|
-from rdflib.plugins.sparql.evalutils import NotBoundError, _eval, _val |
|
+from rdflib.plugins.sparql.evalutils import _eval, _val |
|
from rdflib.plugins.sparql.operators import numeric |
|
-from rdflib.plugins.sparql.sparql import SPARQLTypeError |
|
+from rdflib.plugins.sparql.parserutils import CompValue |
|
+from rdflib.plugins.sparql.sparql import FrozenBindings, NotBoundError, SPARQLTypeError |
|
+from rdflib.term import BNode, Identifier, Literal, URIRef, Variable |
|
|
|
|
|
class Accumulator(object): |
|
def __init__(self, aggregation): |
|
self.var = aggregation.res |
|
@@ -52,10 +70,20 @@ |
|
def eval_full_row(self, row): |
|
return row |
|
|
|
def use_row(self, row): |
|
return self.eval_row(row) not in self.seen |
|
+ |
|
+ |
|
+@overload |
|
+def type_safe_numbers(*args): |
|
+ ... |
|
+ |
|
+ |
|
+@overload |
|
+def type_safe_numbers(*args): |
|
+ ... |
|
|
|
|
|
def type_safe_numbers(*args): |
|
if any((isinstance(arg, float) for arg in args)) and any( |
|
(isinstance(arg, Decimal) for arg in args) |
|
@@ -143,10 +171,13 @@ |
|
pass |
|
except SPARQLTypeError: |
|
pass |
|
|
|
|
|
+_ValueT = TypeVar("_ValueT", Variable, BNode, URIRef, Literal) |
|
+ |
|
+ |
|
class Minimum(Extremum): |
|
def compare(self, val1, val2): |
|
return min(val1, val2, key=_val) |
|
|
|
|
|
--- a/rdflib/plugins/sparql/datatypes.py |
|
+++ b/rdflib/plugins/sparql/datatypes.py |
|
@@ -1,7 +1,11 @@ |
|
-from rdflib import XSD |
|
+from __future__ import annotations |
|
+from typing import TYPE_CHECKING, Dict, List, Optional, Set |
|
+from rdflib.namespace import XSD |
|
|
|
+if TYPE_CHECKING: |
|
+ from rdflib.term import URIRef |
|
XSD_DTs = set( |
|
( |
|
XSD.integer, |
|
XSD.decimal, |
|
XSD.float, |
|
@@ -72,8 +76,10 @@ |
|
t1 = _super_types.get(t1, t1) |
|
t2 = _super_types.get(t2, t2) |
|
if t1 == t2: |
|
return t1 |
|
try: |
|
+ if TYPE_CHECKING: |
|
+ assert t2 is not None |
|
return _typePromotionMap[t1][t2] |
|
except KeyError: |
|
raise TypeError("Operators cannot combine datatypes %s and %s" % (t1, t2)) |
|
--- a/rdflib/plugins/sparql/evaluate.py |
|
+++ b/rdflib/plugins/sparql/evaluate.py |
|
@@ -1,10 +1,23 @@ |
|
+from __future__ import annotations |
|
import collections |
|
import itertools |
|
import json as j |
|
import re |
|
-from typing import Any, Deque, Dict, Generator, Iterable, List, Tuple, Union |
|
+from typing import ( |
|
+ TYPE_CHECKING, |
|
+ Any, |
|
+ Deque, |
|
+ Dict, |
|
+ Generator, |
|
+ Iterable, |
|
+ List, |
|
+ Mapping, |
|
+ Optional, |
|
+ Tuple, |
|
+ Union, |
|
+) |
|
from urllib.parse import urlencode |
|
from urllib.request import Request, urlopen |
|
from pyparsing import ParseException |
|
from rdflib.graph import Graph |
|
from rdflib.plugins.sparql import CUSTOM_EVALS, parser |
|
@@ -26,10 +39,12 @@ |
|
QueryContext, |
|
SPARQLError, |
|
) |
|
from rdflib.term import BNode, Identifier, Literal, URIRef, Variable |
|
|
|
+if TYPE_CHECKING: |
|
+ from rdflib.paths import Path |
|
_Triple = Tuple[Identifier, Identifier, Identifier] |
|
|
|
|
|
def evalBGP(ctx, bgp): |
|
if not bgp: |
|
@@ -148,10 +163,12 @@ |
|
graphSolution = [{part.term: graph.identifier}] |
|
for x in _join(evalPart(c, part.p), graphSolution): |
|
x.ctx.graph = prev_graph |
|
yield x |
|
else: |
|
+ if TYPE_CHECKING: |
|
+ assert not isinstance(graph, Graph) |
|
c = ctx.pushGraph(ctx.dataset.get_context(graph)) |
|
for x in evalPart(c, part.p): |
|
x.ctx.graph = prev_graph |
|
yield x |
|
|
|
--- a/rdflib/plugins/sparql/evalutils.py |
|
+++ b/rdflib/plugins/sparql/evalutils.py |
|
@@ -1,11 +1,32 @@ |
|
+from __future__ import annotations |
|
import collections |
|
-from typing import Dict, Iterable |
|
+from typing import ( |
|
+ Any, |
|
+ DefaultDict, |
|
+ Generator, |
|
+ Iterable, |
|
+ Mapping, |
|
+ Set, |
|
+ Tuple, |
|
+ TypeVar, |
|
+ Union, |
|
+ overload, |
|
+) |
|
from rdflib.plugins.sparql.operators import EBV |
|
from rdflib.plugins.sparql.parserutils import CompValue, Expr |
|
-from rdflib.plugins.sparql.sparql import FrozenDict, NotBoundError, SPARQLError |
|
-from rdflib.term import BNode, Literal, URIRef, Variable |
|
+from rdflib.plugins.sparql.sparql import ( |
|
+ FrozenBindings, |
|
+ FrozenDict, |
|
+ NotBoundError, |
|
+ QueryContext, |
|
+ SPARQLError, |
|
+) |
|
+from rdflib.term import BNode, Identifier, Literal, URIRef, Variable |
|
+ |
|
+_ContextType = Union[FrozenBindings, QueryContext] |
|
+_FrozenDictT = TypeVar("_FrozenDictT", bound=FrozenDict) |
|
|
|
|
|
def _diff(a, b, expr): |
|
res = set() |
|
for x in a: |
|
@@ -16,10 +37,20 @@ |
|
|
|
def _minus(a, b): |
|
for x in a: |
|
if all((not x.compatible(y) or x.disjointDomain(y) for y in b)): |
|
yield x |
|
+ |
|
+ |
|
+@overload |
|
+def _join(a, b): |
|
+ ... |
|
+ |
|
+ |
|
+@overload |
|
+def _join(a, b): |
|
+ ... |
|
|
|
|
|
def _join(a, b): |
|
for x in a: |
|
for y in b: |
|
@@ -43,10 +74,20 @@ |
|
try: |
|
return EBV(ctx[expr]) |
|
except: |
|
return False |
|
return False |
|
+ |
|
+ |
|
+@overload |
|
+def _eval(expr, ctx, raise_not_bound_error=...): |
|
+ ... |
|
+ |
|
+ |
|
+@overload |
|
+def _eval(expr, ctx, raise_not_bound_error=...): |
|
+ ... |
|
|
|
|
|
def _eval(expr, ctx, raise_not_bound_error=True): |
|
if isinstance(expr, (Literal, URIRef)): |
|
return expr |
|
@@ -85,10 +126,13 @@ |
|
] |
|
if _s is not None and _p is not None and _o is not None: |
|
yield (_s, _p, _o) |
|
|
|
|
|
+_ValueT = TypeVar("_ValueT", Variable, BNode, URIRef, Literal) |
|
+ |
|
+ |
|
def _val(v): |
|
if isinstance(v, Variable): |
|
return 0, v |
|
elif isinstance(v, BNode): |
|
return 1, v |
|
--- a/rdflib/plugins/sparql/parser.py |
|
+++ b/rdflib/plugins/sparql/parser.py |
|
@@ -1,8 +1,9 @@ |
|
+from __future__ import annotations |
|
import re |
|
import sys |
|
-from typing import Any, BinaryIO |
|
+from typing import Any, BinaryIO, List |
|
from typing import Optional as OptionalType |
|
from typing import TextIO, Tuple, Union |
|
from pyparsing import CaselessKeyword as Keyword |
|
from pyparsing import ( |
|
Combine, |
|
@@ -19,11 +20,11 @@ |
|
restOfLine, |
|
) |
|
import rdflib |
|
from rdflib.compat import decodeUnicodeEscape |
|
from . import operators as op |
|
-from .parserutils import Comp, Param, ParamList |
|
+from .parserutils import Comp, CompValue, Param, ParamList |
|
|
|
DEBUG = False |
|
|
|
|
|
def neg(literal): |
|
--- a/rdflib/plugins/sparql/parserutils.py |
|
+++ b/rdflib/plugins/sparql/parserutils.py |
|
@@ -1,8 +1,19 @@ |
|
+from __future__ import annotations |
|
from collections import OrderedDict |
|
from types import MethodType |
|
-from typing import TYPE_CHECKING, Any, List, Tuple, Union |
|
+from typing import ( |
|
+ TYPE_CHECKING, |
|
+ Any, |
|
+ Callable, |
|
+ List, |
|
+ Mapping, |
|
+ Optional, |
|
+ Tuple, |
|
+ TypeVar, |
|
+ Union, |
|
+) |
|
from pyparsing import ParseResults, TokenConverter, originalTextFor |
|
from rdflib import BNode, Variable |
|
from rdflib.term import Identifier |
|
|
|
if TYPE_CHECKING: |
|
@@ -56,10 +67,13 @@ |
|
|
|
|
|
class ParamList(Param): |
|
def __init__(self, name, expr): |
|
Param.__init__(self, name, expr, True) |
|
+ |
|
+ |
|
+_ValT = TypeVar("_ValT") |
|
|
|
|
|
class CompValue(OrderedDict): |
|
def __init__(self, name, **values): |
|
OrderedDict.__init__(self) |
|
--- a/rdflib/plugins/sparql/processor.py |
|
+++ b/rdflib/plugins/sparql/processor.py |
|
@@ -1,11 +1,15 @@ |
|
+from __future__ import annotations |
|
+from typing import Any, Mapping, Optional, Union |
|
+from rdflib.graph import Graph |
|
from rdflib.plugins.sparql.algebra import translateQuery, translateUpdate |
|
from rdflib.plugins.sparql.evaluate import evalQuery |
|
from rdflib.plugins.sparql.parser import parseQuery, parseUpdate |
|
-from rdflib.plugins.sparql.sparql import Query |
|
+from rdflib.plugins.sparql.sparql import Query, Update |
|
from rdflib.plugins.sparql.update import evalUpdate |
|
from rdflib.query import Processor, Result, UpdateProcessor |
|
+from rdflib.term import Identifier |
|
|
|
|
|
def prepareQuery(queryString, initNs={}, base=None): |
|
ret = translateQuery(parseQuery(queryString), base, initNs) |
|
ret._original_args = queryString, initNs, base |
|
--- a/rdflib/plugins/sparql/sparql.py |
|
+++ b/rdflib/plugins/sparql/sparql.py |
|
@@ -1,17 +1,34 @@ |
|
+from __future__ import annotations |
|
import collections |
|
import datetime |
|
import itertools |
|
import typing as t |
|
-from typing import Any, Container, Dict, Iterable, List, Optional, Tuple, Union |
|
+from typing import ( |
|
+ TYPE_CHECKING, |
|
+ Any, |
|
+ Container, |
|
+ Dict, |
|
+ Generator, |
|
+ Iterable, |
|
+ List, |
|
+ Optional, |
|
+ Tuple, |
|
+ TypeVar, |
|
+ Union, |
|
+) |
|
import isodate |
|
import rdflib.plugins.sparql |
|
from rdflib.compat import Mapping, MutableMapping |
|
from rdflib.graph import ConjunctiveGraph, Graph |
|
from rdflib.namespace import NamespaceManager |
|
from rdflib.plugins.sparql.parserutils import CompValue |
|
from rdflib.term import BNode, Identifier, Literal, Node, URIRef, Variable |
|
+ |
|
+if TYPE_CHECKING: |
|
+ from rdflib.paths import Path |
|
+_AnyT = TypeVar("_AnyT") |
|
|
|
|
|
class SPARQLError(Exception): |
|
def __init__(self, msg=None): |
|
Exception.__init__(self, msg) |
|
--- a/rdflib/plugins/sparql/update.py |
|
+++ b/rdflib/plugins/sparql/update.py |
|
@@ -1,9 +1,13 @@ |
|
-from rdflib import Graph, Variable |
|
+from __future__ import annotations |
|
+from typing import TYPE_CHECKING, Iterator, Mapping, Optional, Sequence |
|
+from rdflib.graph import Graph |
|
from rdflib.plugins.sparql.evaluate import evalBGP, evalPart |
|
from rdflib.plugins.sparql.evalutils import _fillTemplate, _join |
|
-from rdflib.plugins.sparql.sparql import QueryContext |
|
+from rdflib.plugins.sparql.parserutils import CompValue |
|
+from rdflib.plugins.sparql.sparql import FrozenDict, QueryContext, Update |
|
+from rdflib.term import Identifier, URIRef, Variable |
|
|
|
|
|
def _graphOrDefault(ctx, g): |
|
if g == "DEFAULT": |
|
return ctx.graph |
|
@@ -23,10 +27,12 @@ |
|
else: |
|
return [ctx.dataset.get_context(g)] |
|
|
|
|
|
def evalLoad(ctx, u): |
|
+ if TYPE_CHECKING: |
|
+ assert isinstance(u.iri, URIRef) |
|
if u.graphiri: |
|
ctx.load(u.iri, default=False, publicID=u.graphiri) |
|
else: |
|
ctx.load(u.iri, default=True) |
|
|
|
--- a/rdflib/plugins/stores/memory.py |
|
+++ b/rdflib/plugins/stores/memory.py |
|
@@ -25,11 +25,11 @@ |
|
_TriplePatternType, |
|
_TripleType, |
|
) |
|
from rdflib.plugins.sparql.sparql import Query, Update |
|
from rdflib.query import Result |
|
- from rdflib.term import Identifier, URIRef, Variable |
|
+ from rdflib.term import Identifier, URIRef |
|
__all__ = ["SimpleMemory", "Memory"] |
|
ANY = None |
|
|
|
|
|
class SimpleMemory(Store): |
|
--- a/rdflib/store.py |
|
+++ b/rdflib/store.py |
|
@@ -27,11 +27,11 @@ |
|
_TriplePatternType, |
|
_TripleType, |
|
) |
|
from rdflib.plugins.sparql.sparql import Query, Update |
|
from rdflib.query import Result |
|
- from rdflib.term import Identifier, Node, URIRef, Variable |
|
+ from rdflib.term import Identifier, Node, URIRef |
|
VALID_STORE = 1 |
|
CORRUPTED_STORE = 0 |
|
NO_STORE = -1 |
|
UNKNOWN = None |
|
Pickler = pickle.Pickler |