diff --git a/LICENSE b/LICENSE index 62826d5b..ed208512 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2022-2025 SINTEF +Copyright (c) 2022-2026 SINTEF Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/pyproject.toml b/pyproject.toml index 5bbd6fd7..e6812d9f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -144,9 +144,12 @@ addopts = """-rs --cov=tripper --cov-report=term \ """ filterwarnings = [ "ignore:.*imp module.*:DeprecationWarning", + "ignore:ConjunctiveGraph.*:DeprecationWarning", # in pyld + "ignore:builtin type SwigPy.*:DeprecationWarning", # in pyld "ignore:::tripper.literal:243", # Ignore warning in doctest ] + [tool.setuptools.package-data] "tripper.context" = ["*.json", "*.yaml"] diff --git a/tests/datadoc/test_context.py b/tests/datadoc/test_context.py index 98dd359e..f19c75cf 100644 --- a/tests/datadoc/test_context.py +++ b/tests/datadoc/test_context.py @@ -94,6 +94,33 @@ def test_get_prefixes(): assert "mediaType" not in prefixes +def test_get_properties(): + """Test get_properties() method.""" + properties = ctx.get_properties() + assert "adms" not in properties # prefix is not a property + assert "Document" not in properties # class is not a property + assert properties["mediaType"] == "http://www.w3.org/ns/dcat#mediaType" + + +def test_get_object_properties(): + """Test get_object_properties() method.""" + from tripper import DCTERMS + + objprop = ctx.get_object_properties() + assert "adms" not in objprop # prefix is not an object property + assert "Document" not in objprop # class is not an object property + assert "title" not in objprop # annotation is not an object property + assert objprop["hasPart"] == DCTERMS.hasPart + + +def test_get_classes(): + """Test get_prefixes() method.""" + classes = ctx.get_classes() + assert "adms" not in classes + assert "mediaType" not in classes + assert classes["Document"] == "http://xmlns.com/foaf/0.1/Document" + + def test_sync_prefixes(): """Test sync_prefixes() method.""" from tripper import Triplestore diff --git a/tests/datadoc/test_datadoc_utils.py b/tests/datadoc/test_datadoc_utils.py index 12861320..edaef562 100644 --- a/tests/datadoc/test_datadoc_utils.py +++ b/tests/datadoc/test_datadoc_utils.py @@ -112,3 +112,32 @@ def test_iriname(): assert iriname("abc") == "abc" assert iriname("rdf:JSON") == "JSON" assert iriname("https://w3id.org/emmo#Ampere") == "Ampere" + + +def test_getlabel(): + """Test utility function getlabel().""" + from tripper import SKOS + from tripper.datadoc.errors import InvalidDatadocError + from tripper.datadoc.utils import getlabel + + assert getlabel({"@id": "ex:A", "prefLabel": "a"}) == "a" + assert getlabel({"@id": "ex:A", "label": "a"}) == "a" + assert getlabel({"@id": "ex:A", "rdfs:label": "a"}) == "a" + assert getlabel({"@id": "ex:A"}, default="a") == "a" + assert getlabel({"@id": "ex:A"}) == "A" + + # Check for precedence of labels + assert ( + getlabel({"@id": "ex:A", "rdfs:label": "a", "prefLabel": "b"}) == "a" + ) + assert ( + getlabel({"@id": "ex:A", "rdfs:label": "a", "skos:prefLabel": "b"}) + == "b" + ) + assert ( + getlabel({"@id": "ex:A", "rdfs:label": "a", SKOS.prefLabel: "b"}) + == "b" + ) + + with pytest.raises(InvalidDatadocError): + getlabel({"x": "ex:A"}) diff --git a/tests/datadoc/test_dataset.py b/tests/datadoc/test_dataset.py index dc063f3b..2e7f79da 100644 --- a/tests/datadoc/test_dataset.py +++ b/tests/datadoc/test_dataset.py @@ -402,6 +402,48 @@ def test_store(): } +def test_update_context(): + """Test update_context().""" + from tripper import HUME, OWL, Namespace + from tripper.datadoc import get_context + from tripper.datadoc.dataset import update_context + + EX = Namespace("http://example.com/") + sources = { + "@context": { + "ex": str(EX), + "hume": str(HUME), + }, + "@graph": [ + { + # Instances are not added to context + "@id": "ex:instr", + "@type": "hume:Device", + }, + { + # Not added to context, since there is no @type + "@id": "ex:instr2", + }, + { + "@id": "ex:MyDevice", + "skos:prefLabel": "MyDevice", + "subClassOf": "hume:Device", + }, + ], + } + context = get_context(default_theme=None) + update_context(sources, context) + c = context.get_context_dict() + assert "instr" not in c + assert "instr2" not in c + assert "MyDevice" in c + assert c["MyDevice"] == {"@id": EX.MyDevice, "@type": OWL.Class} + assert c["Device"] == {"@id": HUME.Device, "@type": OWL.Class} + + # TODO: add tests for what happens if there is mismatch between + # previously added context and updated_context... + + def test_infer_restriction_types(): """Test infer_restriction_types().""" from tripper import DCTERMS, HUME, RDFS, Namespace @@ -426,7 +468,7 @@ def test_infer_restriction_types(): "http://example.org#A": { DCTERMS.creator: "some", DCTERMS.hasPart: "value", - DCTERMS.issued: "value", + # DCTERMS.issued: "value", } } @@ -452,7 +494,7 @@ def test_infer_restriction_types(): "@id": "ex:MyDevice", # "@type": "owl:Class", "subClassOf": HUME.Device, - "hasPart": HUME.MeasuringInstrument, + "hasPart": [HUME.MeasuringInstrument, "ex:MyDevice"], }, ], } @@ -577,6 +619,13 @@ def test_update_restrictions(): "@type": HUME.Device, "isDefinedBy": HUME.MeasuringInstrument, }, + { + # An individial relating to two classes and an individual. + # Should be converted to an existential restriction. + "@id": "ex:instr3", + "@type": HUME.Device, + "hasPart": [HUME.MeasuringInstrument, "MyDevice", "ex:instr"], + }, { # A class relating to a class. # Should be converted to an existential restriction. @@ -586,63 +635,68 @@ def test_update_restrictions(): "@id": "ex:MyDevice", # "@type": "owl:Class", "subClassOf": HUME.Device, + "label": "MyDevice", "hasPart": HUME.MeasuringInstrument, }, + { + # A class relating to two classes + "@id": "ex:MyDevice2", + "@type": "owl:Class", + "subClassOf": HUME.Device, + "label": "MyDevice2", + "hasPart": [HUME.MeasuringInstrument, "MyDevice"], + }, + # TODO: for completeness, add tests for individual + # relating to one individual and individual related to a + # list of individuals ], } r6 = deepcopy(d6) update_restrictions(r6, ctx) - assert r6 == { - "@context": { - "MeasuringInstrument": { - "@id": "https://w3id.org/emmo/hume#MeasuringInstrument", - "@type": "owl:Class", - } - }, - "@graph": [ - { - "@id": "ex:instr", - "@type": "https://w3id.org/emmo/hume#Device", - "isDefinedBy": "https://w3id.org/emmo/hume#MeasuringSystem", - }, + res6 = {d["@id"]: d for d in r6["@graph"]} + assert res6["ex:instr"] == { + "@id": "ex:instr", + "@type": "https://w3id.org/emmo/hume#Device", + "isDefinedBy": "https://w3id.org/emmo/hume#MeasuringSystem", + } + assert res6["ex:instr2"] == { + "@id": "ex:instr2", + "@type": [ + "https://w3id.org/emmo/hume#Device", { - "@id": "ex:instr2", - "@type": [ - "https://w3id.org/emmo/hume#Device", - { - "@type": "owl:Restriction", - "owl:onProperty": { - "@id": ( - "http://www.w3.org/2000/01/rdf-schema#" - "isDefinedBy" - ) - }, - "owl:someValuesFrom": { - "@id": ( - "https://w3id.org/emmo/hume#MeasuringInstrument" - ) - }, - }, - ], + "@type": "owl:Restriction", + "owl:onProperty": { + "@id": "http://www.w3.org/2000/01/rdf-schema#isDefinedBy", + }, + "owl:someValuesFrom": { + "@id": "https://w3id.org/emmo/hume#MeasuringInstrument", + }, }, + ], + } + assert res6["ex:instr3"] == { + # WRONG! Should be converted to restrictions + "@id": "ex:instr3", + "@type": "https://w3id.org/emmo/hume#Device", + "hasPart": [ + "https://w3id.org/emmo/hume#MeasuringInstrument", + "MyDevice", + "ex:instr", + ], + } + assert res6["ex:MyDevice"] == { + "@id": "ex:MyDevice", + "subClassOf": [ + "https://w3id.org/emmo/hume#Device", { - "@id": "ex:MyDevice", - "subClassOf": [ - "https://w3id.org/emmo/hume#Device", - { - "@type": "owl:Restriction", - "owl:onProperty": { - "@id": "http://purl.org/dc/terms/hasPart" - }, - "owl:someValuesFrom": { - "@id": ( - "https://w3id.org/emmo/hume#MeasuringInstrument" - ) - }, - }, - ], + "@type": "owl:Restriction", + "owl:onProperty": {"@id": "http://purl.org/dc/terms/hasPart"}, + "owl:someValuesFrom": { + "@id": "https://w3id.org/emmo/hume#MeasuringInstrument" + }, }, ], + "label": "MyDevice", } diff --git a/tests/datadoc/test_keywords.py b/tests/datadoc/test_keywords.py index 444cb601..ad554ab8 100644 --- a/tests/datadoc/test_keywords.py +++ b/tests/datadoc/test_keywords.py @@ -1,11 +1,12 @@ """Test the Keywords class.""" +# pylint: disable=too-many-statements,wrong-import-position + import pytest pytest.importorskip("yaml") pytest.importorskip("pyld") -# pylint: disable=wrong-import-position from tripper.datadoc import Keywords # A fixture used by all the tests @@ -14,10 +15,13 @@ def test_get_keywords(): """Test get_keywords() function.""" + import warnings + from dataset_paths import testdir # pylint: disable=import-error - from tripper import DDOC - from tripper.datadoc import get_keywords + from tripper import DDOC, OWL, XSD + from tripper.datadoc import get_context, get_keywords + from tripper.errors import TripperWarning kw1 = get_keywords() assert kw1.data == keywords.data @@ -65,6 +69,38 @@ def test_get_keywords(): assert kw6.data.theme == ["ddoc:datadoc", "ddoc:prefixes", "ddoc:process"] assert "batchNumber" in kw6 + kw7 = get_keywords(theme=None) + assert len(kw7) == 0 + kw7.add({"resources": {"MyClass": {"iri": "http://example.com/MyClass"}}}) + assert len(kw7) == 0 # no properties in keywords + + ctx = get_context(default_theme=None) + ctx.add_context( + { + "ex": "http://example.com/", + "owl": str(OWL), + "xsd": str(XSD), + "objprop": {"@id": "ex:objprop", "@type": "@id"}, + "dataprop": {"@id": "ex:dataprop", "@type": "xsd:string"}, + "cls": {"@id": "ex:cls", "@type": "owl:Class"}, + } + ) + + # Test `context` argument to get_keywords(). Ignore expected + # warnings about loss of information + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=TripperWarning) + + kw8 = get_keywords(kw7, context=ctx, theme=None) + assert len(kw8) == 2 # 2 properties in keywords + assert kw8.get_prefixes()["ex"] == "http://example.com/" + assert set(kw8.classnames()) == {"Resource", "MyClass", "cls"} + + kw9 = get_keywords(context=ctx, theme=None) + assert len(kw9) == 2 + assert kw9.get_prefixes()["ex"] == "http://example.com/" + assert set(kw9.classnames()) == {"Resource", "cls"} + def test_iter(): """Test __iter__() method.""" @@ -155,7 +191,11 @@ def test_load_yaml(): """ from dataset_paths import indir # pylint: disable=import-error - from tripper.datadoc.errors import ParseError + from tripper.datadoc.errors import ( + ParseError, + RedefineKeywordWarning, + SkipRedefineKeywordWarning, + ) kw = keywords.copy() @@ -195,10 +235,12 @@ def test_load_yaml(): # keywords are unchanged by failures # assert kw == keywords - kw.load_yaml(indir / "invalid_keywords9.yaml", redefine="skip") + with pytest.warns(SkipRedefineKeywordWarning): + kw.load_yaml(indir / "invalid_keywords9.yaml", redefine="skip") assert kw["title"].iri == "dcterms:title" - kw.load_yaml(indir / "invalid_keywords9.yaml", redefine="allow") + with pytest.warns(RedefineKeywordWarning): + kw.load_yaml(indir / "invalid_keywords9.yaml", redefine="allow") assert kw["title"].iri == "myonto:a" kw.load_yaml(indir / "valid_keywords.yaml") @@ -485,6 +527,7 @@ def test_load2(): from tripper import Triplestore from tripper.datadoc import get_keywords + from tripper.datadoc.errors import RedefineKeywordWarning from tripper.utils import AttrDict ts = Triplestore("rdflib") @@ -539,7 +582,8 @@ def test_load2(): # Create a new Keywords object with # default keywords and load from the triplestore kw2 = get_keywords() - kw2.load_rdf(ts, redefine="allow") + with pytest.warns(RedefineKeywordWarning): + kw2.load_rdf(ts, redefine="allow") # Ensure that the specified keywords are in kw2 assert not { diff --git a/tripper/datadoc/context.py b/tripper/datadoc/context.py index 2411f16d..dfee2a0f 100644 --- a/tripper/datadoc/context.py +++ b/tripper/datadoc/context.py @@ -12,6 +12,7 @@ from tripper import OWL, RDF, RDFS, Triplestore from tripper.datadoc.errors import InvalidContextError, PrefixMismatchError +from tripper.datadoc.utils import asseq from tripper.errors import NamespaceError, NamespaceWarning from tripper.utils import MATCH_IRI, MATCH_PREFIXED_IRI, openfile, prefix_iri @@ -255,6 +256,36 @@ def get_prefixes(self) -> dict: prefixes[k] = v["@id"] return prefixes + def get_properties(self) -> dict: + """Return a dict mapping property names to IRIs.""" + return { + k: v["@id"] + for k, v in self.ctx["mappings"].items() + if "@id" in v + and v.get("_prefix") is False + and OWL.Class not in asseq(v.get("@type")) + } + + def get_object_properties(self) -> dict: + """Return a dict mapping object property names to IRIs.""" + return { + k: v["@id"] + for k, v in self.ctx["mappings"].items() + if "@id" in v + and v.get("_prefix") is False + and v.get("@type") == "@id" + } + + def get_classes(self) -> dict: + """Return a dict mapping class names to IRIs.""" + return { + k: v["@id"] + for k, v in self.ctx["mappings"].items() + if "@id" in v + and v.get("_prefix") is False + and OWL.Class in asseq(v.get("@type")) + } + def sync_prefixes( self, ts: Triplestore, update: "Optional[bool]" = None ) -> None: diff --git a/tripper/datadoc/dataset.py b/tripper/datadoc/dataset.py index f16bb856..8fc25d0a 100644 --- a/tripper/datadoc/dataset.py +++ b/tripper/datadoc/dataset.py @@ -58,7 +58,8 @@ ValidateError, ) from tripper.datadoc.keywords import Keywords, get_keywords -from tripper.datadoc.utils import add, asseq, get, iriname +from tripper.datadoc.utils import add, asseq, get, getlabel, iriname +from tripper.errors import NamespaceError from tripper.utils import ( AttrDict, as_python, @@ -158,6 +159,8 @@ def told( Dict with an updated copy of `descr` as valid JSON-LD. """ + # pylint: disable=too-many-statements + single = "@id", "@type", "@graph" multi = "keywordfile", "prefixes", "base" singlerepr = isinstance(descr, list) or any(s in descr for s in single) @@ -174,6 +177,10 @@ def told( ) else: keywords = get_keywords(keywords=keywords) + + if prefixes: + keywords.add(prefixes, redefine="allow") + resources = keywords.data.resources # Whether the context has been copied. Used within addcontext() @@ -445,6 +452,7 @@ def store( context=context, prefixes=prefixes, default_theme=None, + copy=True, # we are calling update_context() below ) doc = told( @@ -454,6 +462,7 @@ def store( context=context, prefixes=prefixes, ) + update_context(doc, context) docs = doc if isinstance(doc, list) else doc.get("@graph", [doc]) for d in docs: @@ -550,23 +559,37 @@ def update_context( Currently this only adds classes defined in `source` to `context`. """ + subclassof = (RDFS.subClassOf, "rdfs:subClassOf", "subClassOf") + + if isinstance(source, dict) and "@context" in source: + context.add_context(source["@context"]) + sources = ( source if isinstance(source, list) else source["@graph"] if "@graph" in source else [source] ) - prefixes = context.get_prefixes() + for d in sources: - for k, v in d.items(): - if k == "@graph" or isinstance(v, dict): - update_context(v, context) - elif k == "subClassOf": + if not isinstance(d, dict): + continue + if "@id" in d: + try: + iri = context.expand(d["@id"], strict=True) + except NamespaceError: + continue + label = getlabel(d) + if "/" in label: + continue # do not add IDs with slash to context + superclasses = [d[s] for s in subclassof if s in d] + if d.get("@type") in (OWL.Class, "owl:Class"): + context.add_context({label: {"@id": iri, "@type": d["@type"]}}) + elif superclasses: + supercl = context.expand(superclasses[0], strict=True) context.add_context( { - k: { - "@id": expand_iri(k, prefixes, strict=True), - "@type": OWL.Class, - } + label: {"@id": iri, "@type": OWL.Class}, + iriname(supercl): {"@id": supercl, "@type": OWL.Class}, } ) @@ -668,12 +691,9 @@ def infer_restriction_types( ): vexp = context.expand(v, strict=False) d[kexp] = "some" if _isclass(vexp, context) else "value" - elif not isinstance(v, str) or ( - context - and kexp in context - and not context.is_annotation_property(kexp) - ): - d[kexp] = "value" + elif isinstance(v, list): + if any(_isclass(e, context) for e in v): + d[kexp] = "some" elif _isclass(v, context): d[kexp] = "some" if d: diff --git a/tripper/datadoc/errors.py b/tripper/datadoc/errors.py index e14344df..7db0441f 100644 --- a/tripper/datadoc/errors.py +++ b/tripper/datadoc/errors.py @@ -50,6 +50,10 @@ class ParseError(TripperError): """Error when parsing a file.""" +class InconsistentKeywordError(DatadocValueError): + """Inconsistent keyword.""" + + # ========== # Warnings # ========== diff --git a/tripper/datadoc/keywords.py b/tripper/datadoc/keywords.py index fbe43851..1b79fc6d 100644 --- a/tripper/datadoc/keywords.py +++ b/tripper/datadoc/keywords.py @@ -34,6 +34,7 @@ SkipRedefineKeywordWarning, ) from tripper.datadoc.utils import add, asseq, iriname, merge +from tripper.errors import TripperWarning from tripper.utils import ( AttrDict, expand_iri, @@ -48,6 +49,8 @@ if TYPE_CHECKING: # pragma: no cover from typing import IO, Any, Iterable, List, Optional, Set, Tuple, Union + from tripper.datadoc.context import ContextType + FileLoc = Union[Path, str] KeywordsType = Union["Keywords", dict, IO, Path, str, Sequence] @@ -77,7 +80,7 @@ def get_keywords( keywords: "Optional[KeywordsType]" = None, format: "Optional[str]" = None, theme: "Optional[Union[str, Sequence[str]]]" = "ddoc:datadoc", - yamlfile: "Optional[FileLoc]" = None, + context: "Optional[ContextType]" = None, timeout: float = 3, strict: bool = False, redefine: str = "raise", @@ -89,9 +92,7 @@ def get_keywords( format: Format of input if `keywords` refer to a file that can be loaded. theme: IRI of one of more themes to load keywords for. - yamlfile: YAML file with keyword definitions to parse. May also - be an URI in which case it will be accessed via HTTP GET. - Deprecated. Use the `load_yaml()` or `add()` methods instead. + context: Initialise from this Context instance. timeout: Timeout in case `yamlfile` is a URI. strict: Whether to raise an `InvalidKeywordError` exception if `d` contains an unknown key. @@ -106,28 +107,74 @@ def get_keywords( Returns: Keywords instance. """ - if isinstance(keywords, Keywords): - kw = keywords - else: - kw = Keywords(theme=theme) - if keywords: - kw.add( - keywords, - format=format, - timeout=timeout, - strict=strict, - redefine=redefine, - ) + # pylint: disable=import-outside-toplevel + from tripper.datadoc.context import get_context - if yamlfile: + def from_context(): + """Return a keywords dict from context.""" warnings.warn( - "The `yamlfile` argument is deprecated. Use the `load_yaml()` or " - "`add()` methods instead.", - DeprecationWarning, - ) - kw.load_yaml( - yamlfile, timeout=timeout, strict=strict, redefine=redefine + "Adding keywords from context - information may be lost. " + "Classes are added to the root and properties to 'Resource'.", + category=TripperWarning, + stacklevel=3, ) + prefixes = context.get_prefixes() + classes = context.get_classes() + properties = context.get_properties() + d = {} + if prefixes: + d["prefixes"] = prefixes + if classes: + d["resources"] = { + name: {"iri": iri} for name, iri in classes.items() + } + if properties: + props = {} + ctx = context.get_context_dict() + for name, iri in properties.items(): + if ctx[name]["@type"] == "@id": + props[name] = {"iri": iri} + else: + props[name] = { + "iri": iri, + "range": "rdf:Literal", + "datatype": ctx[name]["@type"], + } + if "resources" not in d: + d["resources"] = {} + d["resources"]["Resource"] = { + "iri": "dcat:Resource", + "keywords": props, + } + return d + + if context: + context = get_context(context) + + # If keywords AND context is given, the "redefine" argument + # determine whether the context can overwrite the keywords. + # + # If only context is given, we create a default keywords (from + # theme) and overwrite it with the context. + if keywords is not None: + if isinstance(keywords, Keywords): + kw = keywords + else: + kw = Keywords(theme=theme) + if keywords: + kw.add( + keywords, + format=format, + timeout=timeout, + strict=strict, + redefine=redefine, + ) + if context is not None: + kw.add(from_context(), redefine=redefine) + else: + kw = Keywords(theme=theme) + if context is not None: + kw.add(from_context(), redefine="allow") return kw @@ -572,6 +619,9 @@ def to_prefixed(x): key = prefix_iri(val["iri"], prefixes) if len(val) > 1 or key not in iridefs: iridefs[key] = val + expkey = expand_iri(val["iri"], prefixes) + if len(val) > 1 or expkey not in iridefs: + iridefs[expkey] = val # Resources for cls, defs in d.get("resources", AttrDict()).items(): diff --git a/tripper/datadoc/utils.py b/tripper/datadoc/utils.py index fd5933e1..f93637af 100644 --- a/tripper/datadoc/utils.py +++ b/tripper/datadoc/utils.py @@ -3,6 +3,9 @@ import re from typing import TYPE_CHECKING, Mapping, Sequence +from tripper.datadoc.errors import InvalidDatadocError +from tripper.namespace import RDFS, SKOS + if TYPE_CHECKING: # pragma: no cover from typing import Any, Iterable, Optional, Union @@ -184,7 +187,7 @@ def get( def asseq(value: "Union[str, Sequence]") -> "Sequence": """Returns a string or sequence as an iterable.""" - return [value] if isinstance(value, str) else value + return [value] if isinstance(value, str) else value if value else [] def iriname(value: str) -> str: @@ -193,7 +196,46 @@ def iriname(value: str) -> str: """ if ":" not in value: return value - m = re.search("[:/#]([a-zA-Z_][a-zA-Z0-9_.+-]*)$", value) + m = re.search("[:/#]([a-zA-Z_][a-zA-Z0-9_./+-]*)$", value) if not m or not m.groups(): raise ValueError(f"Cannot infer name of IRI: {value}") return m.groups()[0] + + +def getlabel(d: dict, default: "Optional[str]" = None) -> str: + """Return label from a JSON-LD dict `d`. + + Any of the following keys in `d` (listed in the order of + precedense, from high to low) will be interpreted as a label: + - skos:prefLabel + - rdfs:label + - prefLabel + - label + + If `d` has none of the above keys and `default` is not None, + `default` is returned. Otherwise `iriname(d["@id"])` is returned. + + Example: + + >>> getlabel({"@id": "ex:A", "label": "a"}) + 'a' + + """ + labels = ( + # The order is by purpose. prefLabel has precedense over label. + # But qualified IRIs has precedence over keywords. + SKOS.prefLabel, + "skos:prefLabel", + RDFS.label, + "rdfs:label", + "prefLabel", + "label", + ) + for label in labels: + if label in d: + return d[label] + if default: + return default + if "@id" in d: + return iriname(d["@id"]) + raise InvalidDatadocError(f"Cannot infer label from JSON-LD dict: {d}")