diff --git a/.gitignore b/.gitignore index b4c5bb79..88eef561 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ user.bazelrc # docs build artifacts /_build* docs/ubproject.toml +docs/schemas.json # Vale - editorial style guide .vale.ini diff --git a/docs/internals/requirements/requirements.rst b/docs/internals/requirements/requirements.rst index 816646cb..70a5413a 100644 --- a/docs/internals/requirements/requirements.rst +++ b/docs/internals/requirements/requirements.rst @@ -1097,6 +1097,6 @@ Grouped Requirements .. needextend:: c.this_doc() and type == 'tool_req' and not status :status: valid -.. needextend:: "metamodel.yaml" in source_code_link +.. needextend:: source_code_link is not None and "metamodel.yaml" in source_code_link :+satisfies: tool_req__docs_metamodel :+tags: config diff --git a/src/extensions/score_metamodel/__init__.py b/src/extensions/score_metamodel/__init__.py index 0a6c4dae..b0d207de 100644 --- a/src/extensions/score_metamodel/__init__.py +++ b/src/extensions/score_metamodel/__init__.py @@ -15,6 +15,7 @@ import pkgutil from collections.abc import Callable from pathlib import Path +from typing import Any from sphinx.application import Sphinx from sphinx_needs import logging @@ -31,6 +32,7 @@ from src.extensions.score_metamodel.metamodel_types import ( ScoreNeedType as ScoreNeedType, ) +from src.extensions.score_metamodel.sn_schemas import write_sn_schemas from src.extensions.score_metamodel.yaml_parser import ( default_options as default_options, ) @@ -237,10 +239,28 @@ def setup(app: Sphinx) -> dict[str, str | bool]: # load metamodel.yaml via ruamel.yaml metamodel = load_metamodel_data() + # Sphinx-Needs 6 requires extra options as dicts: {"name": ..., "schema": ...} + # Options WITH a schema get JSON schema validation (value must be a string). + # Options WITHOUT a schema are registered but not validated. + # non_schema_options = {"source_code_link", "testlink", "codelink"} + non_schema_options = {} # currently empty → all options get schema validation + extra_options_schema: list[dict[str, Any]] = [ + {"name": opt, "schema": {"type": "string"}} + for opt in metamodel.needs_extra_options + if opt not in non_schema_options + ] + extra_options_wo_schema: list[dict[str, Any]] = [ + {"name": opt} + for opt in metamodel.needs_extra_options + if opt in non_schema_options + ] + # extra_options = [{"name": opt} for opt in metamodel.needs_extra_options] + extra_options = extra_options_schema + extra_options_wo_schema + # Assign everything to Sphinx config app.config.needs_types = metamodel.needs_types app.config.needs_extra_links = metamodel.needs_extra_links - app.config.needs_extra_options = metamodel.needs_extra_options + app.config.needs_extra_options = extra_options app.config.graph_checks = metamodel.needs_graph_check app.config.prohibited_words_checks = metamodel.prohibited_words_checks @@ -251,6 +271,11 @@ def setup(app: Sphinx) -> dict[str, str | bool]: app.config.needs_reproducible_json = True app.config.needs_json_remove_defaults = True + # Generate schemas.json from the metamodel and register it with sphinx-needs. + # This enables sphinx-needs 6 schema validation: required fields, regex + # patterns on option values, and (eventually) link target type checks. + write_sn_schemas(app, metamodel) + # sphinx-collections runs on default prio 500. # We need to populate the sphinx-collections config before that happens. # --> 499 diff --git a/src/extensions/score_metamodel/metamodel.yaml b/src/extensions/score_metamodel/metamodel.yaml index 398195c7..6fb4c36b 100644 --- a/src/extensions/score_metamodel/metamodel.yaml +++ b/src/extensions/score_metamodel/metamodel.yaml @@ -266,8 +266,8 @@ needs_types: testcovered: ^(YES|NO)$ hash: ^.*$ # req-Id: tool_req__docs_req_attr_validity_correctness - valid_from: ^v(0|[1-9]\d*)\.(0|[1-9]\d*)(\.(0|[1-9]\d*))?$ - valid_until: ^v(0|[1-9]\d*)\.(0|[1-9]\d*)(\.(0|[1-9]\d*))?$ + valid_from: ^v(0|[1-9][0-9]*)\.(?:0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))?$ + valid_until: ^v(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))?$ tags: - requirement - requirement_excl_process @@ -299,8 +299,8 @@ needs_types: testcovered: ^(YES|NO)$ hash: ^.*$ # req-Id: tool_req__docs_req_attr_validity_correctness - valid_from: ^v(0|[1-9]\d*)\.(0|[1-9]\d*)(\.(0|[1-9]\d*))?$ - valid_until: ^v(0|[1-9]\d*)\.(0|[1-9]\d*)(\.(0|[1-9]\d*))?$ + valid_from: ^v(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))?$ + valid_until: ^v(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))?$ optional_links: belongs_to: feat # for evaluation tags: @@ -971,6 +971,12 @@ needs_extra_links: partially_verifies: incoming: partially_verified_by outgoing: partially_verifies + + # Decision Records + affects: + incoming: affected by + outgoing: affects + ############################################################## # Graph Checks # The graph checks focus on the relation of the needs and their attributes. diff --git a/src/extensions/score_metamodel/sn_schemas.py b/src/extensions/score_metamodel/sn_schemas.py new file mode 100644 index 00000000..fccad876 --- /dev/null +++ b/src/extensions/score_metamodel/sn_schemas.py @@ -0,0 +1,242 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +"""Transforms the YAML metamodel into sphinx-needs JSON schema definitions. + +Reads need types from the parsed metamodel (MetaModelData) and generates a +``schemas.json`` file that sphinx-needs uses to validate each need against +the S-CORE metamodel rules (required fields, regex patterns, link constraints). + +Schema structure per need type (sphinx-needs schema format): + - ``select`` : matches needs by their ``type`` field + - ``validate.local`` : validates the need's own properties (patterns, required) + - ``validate.network`` : validates properties of linked needs (NOT YET ACTIVE) +""" + +import json +from pathlib import Path +from typing import Any + +from sphinx.application import Sphinx +from sphinx.config import Config +from sphinx_needs import logging + +from src.extensions.score_metamodel.metamodel_types import ScoreNeedType +from src.extensions.score_metamodel.yaml_parser import MetaModelData + +# Fields whose values are lists in sphinx-needs (e.g. tags: ["safety", "security"]). +# These need an "array of strings" JSON schema instead of a plain "string" schema. +SN_ARRAY_FIELDS = { + "tags", + "sections", +} + +# Fields to skip during schema generation. +IGNORE_FIELDS = { + "content", # not yet available in ubCode +} + +LOGGER = logging.get_logger(__name__) + + +def write_sn_schemas(app: Sphinx, metamodel: MetaModelData) -> None: + """Build sphinx-needs schema definitions from the metamodel and write to JSON. + + Iterates over all need types, builds a schema for each one via + ``_build_need_type_schema``, and writes the result to + ``/schemas.json``. + """ + config: Config = app.config + schemas: list[dict[str, Any]] = [] + + for need_type in metamodel.needs_types: + schema = _build_need_type_schema(need_type) + if schema is not None: + schemas.append(schema) + + schema_definitions: dict[str, Any] = {"schemas": schemas} + + # Write the complete schema definitions to a JSON file in confdir + schemas_output_path = Path(app.confdir) / "schemas.json" + with open(schemas_output_path, "w", encoding="utf-8") as f: + json.dump(schema_definitions, f, indent=2, ensure_ascii=False) + + # Tell sphinx-needs to load the schema from the JSON file + config.needs_schema_definitions_from_json = "schemas.json" + # config.needs_schema_definitions = schema_definitions + + +def _classify_links( + links: dict[str, Any], type_name: str, mandatory: bool +) -> tuple[dict[str, str], dict[str, str]]: + """Classify link values into regex patterns vs. target type names. + + In the metamodel YAML, a link value can be either: + - A regex (starts with "^"), e.g. "^logic_arc_int(_op)*__.+$" + -> validated locally (the link ID must match the pattern) + - A plain type name, e.g. "comp" + -> validated via network (the linked need must have that type) + Multiple values are comma-separated, e.g. "comp, sw_unit". + + Returns: + A tuple of (regexes, targets) dicts, keyed by field name. + """ + label = "mandatory" if mandatory else "optional" + regexes: dict[str, str] = {} + targets: dict[str, str] = {} + + for field, value in links.items(): + link_values = [v.strip() for v in value.split(",")] + for link_value in link_values: + if link_value.startswith("^"): + if field in regexes: + LOGGER.error( + f"Multiple regex patterns for {label} link field " + f"'{field}' in need type '{type_name}'. " + "Only the first one will be used in the schema." + ) + regexes[field] = link_value + else: + targets[field] = link_value + + return regexes, targets + + +def _build_local_validator( + mandatory_fields: dict[str, str], + optional_fields: dict[str, str], + mandatory_links_regexes: dict[str, str], + optional_links_regexes: dict[str, str], +) -> dict[str, Any]: + """Build the local validator dict for a need type's schema. + + The local validator checks the need's own properties: + - Mandatory fields must be present and match their regex pattern. + - Optional fields, if present, must match their regex pattern. + - Mandatory links must have at least one entry. + """ + properties: dict[str, Any] = {} + required: list[str] = [] + + # Mandatory fields: must be present AND match the regex pattern + for field, pattern in mandatory_fields.items(): + if field in IGNORE_FIELDS: + continue + required.append(field) + properties[field] = get_field_pattern_schema(field, pattern) + + # Optional fields: if present, must match the regex pattern + for field, pattern in optional_fields.items(): + if field in IGNORE_FIELDS: + continue + properties[field] = get_field_pattern_schema(field, pattern) + + # Mandatory links (regex): must have at least one entry + # TODO: regex pattern matching on link IDs is not yet enabled + for field in mandatory_links_regexes: + properties[field] = {"type": "array", "minItems": 1} + required.append(field) + + # Optional links (regex): allowed but not required + # TODO: regex pattern matching on link IDs is not yet enabled + for field in optional_links_regexes: + properties[field] = {"type": "array"} + + return { + "properties": properties, + "required": required, + # "unevaluatedProperties": False, + } + + +def _build_need_type_schema(need_type: ScoreNeedType) -> dict[str, Any] | None: + """Build a sphinx-needs schema entry for a single need type. + + Returns ``None`` if the need type has no constraints (no mandatory/optional + fields or links), meaning no schema validation is needed. + + The returned dict has the sphinx-needs schema structure: + - ``select``: matches needs by their ``type`` field + - ``validate.local``: validates the need's own properties + - ``validate.network``: validates linked needs' types (NOT YET ACTIVE) + """ + mandatory_fields = need_type.get("mandatory_options", {}) + optional_fields = need_type.get("optional_options", {}) + mandatory_links = need_type.get("mandatory_links", {}) + optional_links = need_type.get("optional_links", {}) + + # Skip need types that have no constraints at all + if not (mandatory_fields or optional_fields or mandatory_links or optional_links): + return None + + type_name = need_type["directive"] + + # Classify link values as regex patterns vs. target type names. + # Note: links are still plain strings at this point (before postprocess_need_links). + mandatory_links_regexes, _ = _classify_links( + mandatory_links, type_name, mandatory=True + ) + optional_links_regexes, _ = _classify_links( + optional_links, type_name, mandatory=False + ) + + type_schema: dict[str, Any] = { + "id": f"need-type-{type_name}", + "severity": "violation", + "message": "Need does not conform to S-CORE metamodel", + # Selector: only apply this schema to needs with matching type + "select": { + "properties": {"type": {"const": type_name}}, + "required": ["type"], + }, + "validate": { + "local": _build_local_validator( + mandatory_fields, + optional_fields, + mandatory_links_regexes, + optional_links_regexes, + ), + }, + } + + # TODO: network validation is not yet enabled. + # When enabled, it would use the target type names (second return value + # of _classify_links) to check that linked needs have the expected type. + + return type_schema + + +def get_field_pattern_schema(field: str, pattern: str) -> dict[str, Any]: + """Return the appropriate JSON schema for a field's regex pattern. + + Array-valued fields (like ``tags``) get an array-of-strings schema; + scalar fields get a plain string schema. + """ + if field in SN_ARRAY_FIELDS: + return get_array_pattern_schema(pattern) + return get_pattern_schema(pattern) + + +def get_pattern_schema(pattern: str) -> dict[str, str]: + """Return a JSON schema that validates a string against a regex pattern.""" + return { + "type": "string", + "pattern": pattern, + } + + +def get_array_pattern_schema(pattern: str) -> dict[str, Any]: + """Return a JSON schema that validates an array where each item matches a regex.""" + return { + "type": "array", + "items": get_pattern_schema(pattern), + } diff --git a/src/extensions/score_metamodel/tests/test_metamodel_load.py b/src/extensions/score_metamodel/tests/test_metamodel_load.py index 3cb67965..72568592 100644 --- a/src/extensions/score_metamodel/tests/test_metamodel_load.py +++ b/src/extensions/score_metamodel/tests/test_metamodel_load.py @@ -40,8 +40,8 @@ def test_load_metamodel_data(): assert result.needs_types[0].get("color") == "blue" assert result.needs_types[0].get("style") == "bold" assert result.needs_types[0]["mandatory_options"] == { - # default id pattern: prefix + digits, lowercase letters and underscores - "id": "^T1[0-9a-z_]+$", + # default id pattern: prefix + digits, letters and underscores + "id": "^T1[0-9a-zA-Z_]+$", "opt1": "value1", } assert result.needs_types[0]["optional_options"] == { diff --git a/src/extensions/score_metamodel/tests/test_sn_schemas.py b/src/extensions/score_metamodel/tests/test_sn_schemas.py new file mode 100644 index 00000000..b4ed5c24 --- /dev/null +++ b/src/extensions/score_metamodel/tests/test_sn_schemas.py @@ -0,0 +1,402 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +# pyright: reportPrivateUsage=false +import json +from pathlib import Path +from typing import Any, cast +from unittest.mock import MagicMock, patch + +import pytest + +from src.extensions.score_metamodel.metamodel_types import ScoreNeedType +from src.extensions.score_metamodel.sn_schemas import ( + IGNORE_FIELDS, + SN_ARRAY_FIELDS, + _build_local_validator, + _build_need_type_schema, + _classify_links, + get_array_pattern_schema, + get_field_pattern_schema, + get_pattern_schema, + write_sn_schemas, +) + + +# ============================================================================= +# Tests for get_pattern_schema +# ============================================================================= + + +class TestGetPatternSchema: + def test_returns_string_type_with_pattern(self) -> None: + result = get_pattern_schema("^[A-Z]+$") + assert result == {"type": "string", "pattern": "^[A-Z]+$"} + + def test_preserves_complex_regex(self) -> None: + pattern = r"^(feat|fix|chore)\/.+$" + result = get_pattern_schema(pattern) + assert result["type"] == "string" + assert result["pattern"] == pattern + + +# ============================================================================= +# Tests for get_array_pattern_schema +# ============================================================================= + + +class TestGetArrayPatternSchema: + def test_returns_array_type_with_items(self) -> None: + result = get_array_pattern_schema("^tag_.*$") + assert result == { + "type": "array", + "items": {"type": "string", "pattern": "^tag_.*$"}, + } + + def test_items_match_get_pattern_schema(self) -> None: + pattern = "^[a-z]+$" + result = get_array_pattern_schema(pattern) + assert result["items"] == get_pattern_schema(pattern) + + +# ============================================================================= +# Tests for get_field_pattern_schema +# ============================================================================= + + +class TestGetFieldPatternSchema: + def test_scalar_field_returns_string_schema(self) -> None: + result = get_field_pattern_schema("title", "^.+$") + assert result == {"type": "string", "pattern": "^.+$"} + + def test_array_field_returns_array_schema(self) -> None: + for array_field in SN_ARRAY_FIELDS: + result = get_field_pattern_schema(array_field, "^[a-z]+$") + assert result["type"] == "array", f"Field '{array_field}' should be array" + assert "items" in result + + def test_unknown_field_returns_string_schema(self) -> None: + result = get_field_pattern_schema("some_custom_field", "^.*$") + assert result["type"] == "string" + + +# ============================================================================= +# Tests for _classify_links +# ============================================================================= + + +class TestClassifyLinks: + def test_regex_link_classified_as_regex(self) -> None: + links = {"parent_need": "^logic_arc_int__.+$"} + regexes, targets = _classify_links(links, "my_type", mandatory=True) + assert regexes == {"parent_need": "^logic_arc_int__.+$"} + assert targets == {} + + def test_plain_type_classified_as_target(self) -> None: + links = {"satisfies": "comp"} + regexes, targets = _classify_links(links, "my_type", mandatory=False) + assert regexes == {} + assert targets == {"satisfies": "comp"} + + def test_comma_separated_mixed_values(self) -> None: + links = {"related": "^arc_.+$, comp"} + regexes, targets = _classify_links(links, "my_type", mandatory=True) + assert regexes == {"related": "^arc_.+$"} + assert targets == {"related": "comp"} + + def test_empty_links(self) -> None: + regexes, targets = _classify_links({}, "my_type", mandatory=True) + assert regexes == {} + assert targets == {} + + def test_multiple_fields(self) -> None: + links = { + "satisfies": "req", + "parent": "^parent__.+$", + } + regexes, targets = _classify_links(links, "my_type", mandatory=False) + assert regexes == {"parent": "^parent__.+$"} + assert targets == {"satisfies": "req"} + + def test_multiple_regex_for_same_field_logs_error(self) -> None: + links = {"field": "^regex1$, ^regex2$"} + with patch("src.extensions.score_metamodel.sn_schemas.LOGGER") as mock_logger: + regexes, _ = _classify_links(links, "my_type", mandatory=True) + mock_logger.error.assert_called_once() + # Last regex overwrites previous ones + assert regexes == {"field": "^regex2$"} + + def test_multiple_plain_targets_last_wins(self) -> None: + links = {"field": "comp, sw_unit"} + regexes, targets = _classify_links(links, "my_type", mandatory=True) + assert regexes == {} + # Last target overwrites + assert targets == {"field": "sw_unit"} + + +# ============================================================================= +# Tests for _build_local_validator +# ============================================================================= + + +class TestBuildLocalValidator: + def test_mandatory_fields_are_required(self) -> None: + mandatory = {"status": "^(valid|draft)$"} + result = _build_local_validator(mandatory, {}, {}, {}) + assert "status" in result["required"] + assert "status" in result["properties"] + assert result["properties"]["status"]["pattern"] == "^(valid|draft)$" + + def test_optional_fields_not_required(self) -> None: + optional = {"comment": "^.*$"} + result = _build_local_validator({}, optional, {}, {}) + assert "comment" not in result["required"] + assert "comment" in result["properties"] + + def test_ignored_fields_excluded(self) -> None: + mandatory = {field: "^.*$" for field in IGNORE_FIELDS} + optional = {field: "^.*$" for field in IGNORE_FIELDS} + result = _build_local_validator(mandatory, optional, {}, {}) + for field in IGNORE_FIELDS: + assert field not in result["properties"] + assert field not in result["required"] + + def test_mandatory_link_regexes_required_with_min_items(self) -> None: + mandatory_link_regexes = {"satisfies": "^req__.+$"} + result = _build_local_validator({}, {}, mandatory_link_regexes, {}) + assert "satisfies" in result["required"] + assert result["properties"]["satisfies"] == {"type": "array", "minItems": 1} + + def test_optional_link_regexes_not_required(self) -> None: + optional_link_regexes = {"related": "^rel__.+$"} + result = _build_local_validator({}, {}, {}, optional_link_regexes) + assert "related" not in result["required"] + assert result["properties"]["related"] == {"type": "array"} + + def test_combined_fields_and_links(self) -> None: + mandatory = {"status": "^valid$"} + optional = {"comment": "^.*$"} + mandatory_link_re = {"satisfies": "^req__.+$"} + optional_link_re = {"related": "^rel__.+$"} + result = _build_local_validator( + mandatory, optional, mandatory_link_re, optional_link_re + ) + assert set(result["required"]) == {"status", "satisfies"} + assert set(result["properties"].keys()) == { + "status", + "comment", + "satisfies", + "related", + } + + def test_empty_inputs(self) -> None: + result = _build_local_validator({}, {}, {}, {}) + assert result["properties"] == {} + assert result["required"] == [] + + def test_array_field_in_mandatory(self) -> None: + mandatory = {"tags": "^(safety|security)$"} + result = _build_local_validator(mandatory, {}, {}, {}) + assert result["properties"]["tags"]["type"] == "array" + assert "items" in result["properties"]["tags"] + + +# ============================================================================= +# Tests for _build_need_type_schema +# ============================================================================= + + +def _make_need_type(**overrides: Any) -> ScoreNeedType: + """Helper to create a ScoreNeedType-like dict.""" + base: dict[str, Any] = { + "directive": "test_type", + "title": "Test Type", + "prefix": "TT_", + } + base.update(overrides) + return cast(ScoreNeedType, base) + + +class TestBuildNeedTypeSchema: + def test_returns_none_for_no_constraints(self) -> None: + need_type = _make_need_type() + assert _build_need_type_schema(need_type) is None + + def test_returns_none_for_empty_constraints(self) -> None: + need_type = _make_need_type( + mandatory_options={}, + optional_options={}, + mandatory_links={}, + optional_links={}, + ) + assert _build_need_type_schema(need_type) is None + + def test_schema_has_correct_structure(self) -> None: + need_type = _make_need_type( + mandatory_options={"status": "^valid$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + assert schema["id"] == "need-type-test_type" + assert schema["severity"] == "violation" + assert "select" in schema + assert schema["select"]["properties"]["type"]["const"] == "test_type" + assert "validate" in schema + assert "local" in schema["validate"] + + def test_mandatory_fields_in_local_validator(self) -> None: + need_type = _make_need_type( + mandatory_options={"status": "^(valid|draft)$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + assert "status" in local["required"] + assert "status" in local["properties"] + + def test_optional_fields_in_local_validator(self) -> None: + need_type = _make_need_type( + optional_options={"comment": "^.*$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + assert "comment" not in local["required"] + assert "comment" in local["properties"] + + def test_mandatory_links_with_regex(self) -> None: + need_type = _make_need_type( + mandatory_links={"satisfies": "^req__.+$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + assert "satisfies" in local["required"] + assert local["properties"]["satisfies"] == {"type": "array", "minItems": 1} + + def test_mandatory_links_with_plain_target(self) -> None: + need_type = _make_need_type( + mandatory_links={"satisfies": "comp"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + # Plain targets don't produce local validation entries + assert "satisfies" not in local["properties"] + + def test_optional_links_with_regex(self) -> None: + need_type = _make_need_type( + optional_links={"related": "^rel__.+$"}, + ) + schema = _build_need_type_schema(need_type) + assert schema is not None + local = schema["validate"]["local"] + assert "related" not in local["required"] + assert local["properties"]["related"] == {"type": "array"} + + +# ============================================================================= +# Tests for write_sn_schemas +# ============================================================================= + + +class TestWriteSnSchemas: + def test_writes_json_file(self, tmp_path: Path) -> None: + app = MagicMock() + app.confdir = str(tmp_path) + app.config = MagicMock() + + need_type: dict[str, Any] = { + "directive": "req", + "title": "Requirement", + "prefix": "REQ_", + "mandatory_options": {"status": "^valid$"}, + } + metamodel = MagicMock() + metamodel.needs_types = [need_type] + + write_sn_schemas(app, metamodel) + + output_path: Path = tmp_path / "schemas.json" + assert output_path.exists() + data = json.loads(output_path.read_text(encoding="utf-8")) + assert "schemas" in data + assert len(data["schemas"]) == 1 + assert data["schemas"][0]["id"] == "need-type-req" + + def test_sets_config_value(self, tmp_path: Path) -> None: + app = MagicMock() + app.confdir = str(tmp_path) + app.config = MagicMock() + + metamodel = MagicMock() + metamodel.needs_types = [] + + write_sn_schemas(app, metamodel) + + assert app.config.needs_schema_definitions_from_json == "schemas.json" + + def test_skips_need_types_without_constraints(self, tmp_path: Path) -> None: + app = MagicMock() + app.confdir = str(tmp_path) + app.config = MagicMock() + + need_type_with: dict[str, Any] = { + "directive": "req", + "title": "Requirement", + "prefix": "REQ_", + "mandatory_options": {"status": "^valid$"}, + } + need_type_without: dict[str, Any] = { + "directive": "info", + "title": "Info", + "prefix": "INF_", + } + metamodel = MagicMock() + metamodel.needs_types = [need_type_with, need_type_without] + + write_sn_schemas(app, metamodel) + + output_path: Path = tmp_path / "schemas.json" + data = json.loads(output_path.read_text(encoding="utf-8")) + assert len(data["schemas"]) == 1 + assert data["schemas"][0]["id"] == "need-type-req" + + def test_writes_valid_json_with_multiple_types(self, tmp_path: Path) -> None: + app = MagicMock() + app.confdir = str(tmp_path) + app.config = MagicMock() + + need_types: list[dict[str, Any]] = [ + { + "directive": "req", + "title": "Requirement", + "prefix": "REQ_", + "mandatory_options": {"status": "^valid$"}, + }, + { + "directive": "spec", + "title": "Specification", + "prefix": "SPEC_", + "optional_options": {"comment": "^.*$"}, + }, + ] + metamodel = MagicMock() + metamodel.needs_types = need_types + + write_sn_schemas(app, metamodel) + + output_path: Path = tmp_path / "schemas.json" + data = json.loads(output_path.read_text(encoding="utf-8")) + assert len(data["schemas"]) == 2 + ids = {s["id"] for s in data["schemas"]} + assert ids == {"need-type-req", "need-type-spec"} diff --git a/src/extensions/score_metamodel/tests/test_sn_schemas_integration.py b/src/extensions/score_metamodel/tests/test_sn_schemas_integration.py new file mode 100644 index 00000000..f9253814 --- /dev/null +++ b/src/extensions/score_metamodel/tests/test_sn_schemas_integration.py @@ -0,0 +1,438 @@ +# ******************************************************************************* +# Copyright (c) 2026 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +# pyright: reportPrivateUsage=false +"""Integration tests for schema generation against the real SCORE metamodel. + +Test Design +----------- + +Objective: + Verify that the schemas generated by ``sn_schemas.py`` from the real + ``metamodel.yaml`` correctly accept valid needs and reject invalid ones. + This proves the schema-based validation (sphinx-needs 6) is a faithful + translation of the metamodel rules previously enforced only by Python + checks in ``checks/check_options.py``. + +Approach: + 1. Load the S-Core ``metamodel.yaml`` via ``load_metamodel_data()``. + 2. Generate schemas for every need type via ``_build_need_type_schema()``. + 3. Validate sample needs against the generated schemas using + ``jsonschema_rs.Draft7Validator``, the same JSON Schema engine + sphinx-needs uses at build time. + +Test categories: + + **Structural sweep** (``TestAllSchemasStructural``) + Iterates over ALL need types from the S-Core metamodel and verifies: + - Every generated schema is a valid JSON Schema (constructable). + - Schema structure matches the sphinx-needs contract + (id, severity, select, validate.local). + - The ``select`` schema matches only the correct need type. + - Fields in ``IGNORE_FIELDS`` (e.g. ``content``) are excluded. + - Every ``mandatory_options`` field appears in ``required``. + - Every ``optional_options`` field appears in ``properties`` + but NOT in ``required``. + - Regex patterns in schemas match the metamodel definitions exactly. + - Types without any constraints produce no schema. + + **Representative type tests** (``TestFeatReqSchema``, ``TestCompSchema``, + ``TestFeatSchema``) + For a curated set of need types, construct valid and invalid need + dicts and assert the schema accepts or rejects them. This covers + the constraint categories that exist in the metamodel: + + - ``feat_req``: Mandatory fields with regex patterns + (reqtype, security, safety, status), optional fields with patterns + (reqcovered, testcovered), mandatory link with plain target + (satisfies -> stkh_req, no local link validation), and ``content`` + in ``IGNORE_FIELDS``. + - ``comp``: Mandatory fields only, no mandatory links. + - ``feat``: Mandatory link with regex pattern + (includes: ``^logic_arc_int(_op)*__.+$``), producing a local + ``minItems: 1`` constraint. + +Validation helpers: + ``assert_schema_valid`` / ``assert_schema_invalid`` replicate the + two-step sphinx-needs validation: first match the ``select`` schema + (ensures the schema applies to the need's type), then validate against + ``validate.local``. + +Limitations: + - Network validation (checking linked needs' types) is not yet active + in ``sn_schemas.py`` and therefore not tested here. + - Graph checks (safety level decomposition, prohibited words) remain in + Python code and are outside the scope of schema-based validation. + - The ``content`` field is excluded via ``IGNORE_FIELDS`` because it is + not yet available in ubCode; this exclusion is explicitly tested. +""" + +from typing import Any, cast + +import jsonschema_rs +import pytest + +from src.extensions.score_metamodel.metamodel_types import ScoreNeedType +from src.extensions.score_metamodel.sn_schemas import ( + IGNORE_FIELDS, + _build_need_type_schema, +) +from src.extensions.score_metamodel.yaml_parser import ( + MetaModelData, + load_metamodel_data, +) + + +# ============================================================================= +# Fixtures +# ============================================================================= + + +@pytest.fixture(scope="module") +def metamodel() -> MetaModelData: + """Load the S-Core metamodel.yaml once for all tests in this module.""" + return load_metamodel_data() + + +@pytest.fixture(scope="module") +def schemas_by_type(metamodel: MetaModelData) -> dict[str, dict[str, Any]]: + """Generate sphinx-needs schemas for all need types and index by directive name.""" + result: dict[str, dict[str, Any]] = {} + for need_type in metamodel.needs_types: + schema = _build_need_type_schema(need_type) + if schema is not None: + result[need_type["directive"]] = schema + return result + + +@pytest.fixture(scope="module") +def need_types_by_directive(metamodel: MetaModelData) -> dict[str, ScoreNeedType]: + """Index need types by directive name for easy lookup.""" + return {nt["directive"]: nt for nt in metamodel.needs_types} + + +# ============================================================================= +# Helpers +# ============================================================================= + + +def assert_schema_valid(need_dict: dict[str, Any], schema: dict[str, Any]) -> None: + """Assert that a need dict passes the schema's local validator.""" + select_validator = jsonschema_rs.Draft7Validator(schema["select"]) + assert select_validator.is_valid(need_dict), ( + f"Need type '{need_dict.get('type')}' did not match schema selector" + ) + local_validator = jsonschema_rs.Draft7Validator(schema["validate"]["local"]) + # raises ValidationError with details on failure + local_validator.validate(need_dict) + + +def assert_schema_invalid(need_dict: dict[str, Any], schema: dict[str, Any]) -> None: + """Assert that a need dict FAILS the schema's local validator.""" + select_validator = jsonschema_rs.Draft7Validator(schema["select"]) + assert select_validator.is_valid(need_dict), ( + f"Need type '{need_dict.get('type')}' did not match schema selector" + ) + local_validator = jsonschema_rs.Draft7Validator(schema["validate"]["local"]) + assert not local_validator.is_valid(need_dict), ( + f"Expected validation to fail for need: {need_dict}" + ) + + +# ============================================================================= +# Structural sweep over all types +# ============================================================================= + + +class TestAllSchemasStructural: + """Verify every schema generated from the real metamodel is well-formed.""" + + def test_at_least_one_schema_generated( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + assert len(schemas_by_type) > 0 + + def test_all_schemas_are_valid_json_schemas( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + """Every schema's select and validate.local must be constructable.""" + for schema in schemas_by_type.values(): + jsonschema_rs.Draft7Validator(schema["select"]) + jsonschema_rs.Draft7Validator(schema["validate"]["local"]) + + def test_every_schema_has_required_structure( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + for type_name, schema in schemas_by_type.items(): + assert schema["id"] == f"need-type-{type_name}" + assert "severity" in schema + assert "select" in schema + assert "local" in schema["validate"] + + def test_select_matches_correct_type_only( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + for type_name, schema in schemas_by_type.items(): + selector = jsonschema_rs.Draft7Validator(schema["select"]) + assert selector.is_valid({"type": type_name}) + assert not selector.is_valid({"type": f"NOT_{type_name}"}) + + def test_ignored_fields_never_in_schemas( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + for type_name, schema in schemas_by_type.items(): + local = schema["validate"]["local"] + for field in IGNORE_FIELDS: + assert field not in local.get("properties", {}), ( + f"{type_name}: '{field}' should be ignored" + ) + assert field not in local.get("required", []), ( + f"{type_name}: '{field}' should be ignored" + ) + + def test_mandatory_options_are_required( + self, + schemas_by_type: dict[str, dict[str, Any]], + need_types_by_directive: dict[str, ScoreNeedType], + ) -> None: + for type_name, schema in schemas_by_type.items(): + need_type = need_types_by_directive[type_name] + local = schema["validate"]["local"] + for field in need_type.get("mandatory_options", {}): + if field in IGNORE_FIELDS: + continue + assert field in local["required"], ( + f"{type_name}: mandatory field '{field}' missing from required" + ) + + def test_optional_options_not_required( + self, + schemas_by_type: dict[str, dict[str, Any]], + need_types_by_directive: dict[str, ScoreNeedType], + ) -> None: + for type_name, schema in schemas_by_type.items(): + need_type = need_types_by_directive[type_name] + local = schema["validate"]["local"] + for field in need_type.get("optional_options", {}): + if field in IGNORE_FIELDS: + continue + assert field in local["properties"], ( + f"{type_name}: optional field '{field}' missing from properties" + ) + assert field not in local["required"], ( + f"{type_name}: optional field '{field}' should not be required" + ) + + def test_mandatory_option_patterns_match_metamodel( + self, + schemas_by_type: dict[str, dict[str, Any]], + need_types_by_directive: dict[str, ScoreNeedType], + ) -> None: + for type_name, schema in schemas_by_type.items(): + need_type = need_types_by_directive[type_name] + local = schema["validate"]["local"] + for field, pattern in need_type.get("mandatory_options", {}).items(): + if field in IGNORE_FIELDS: + continue + prop = local["properties"][field] + if prop.get("type") == "array": + assert prop["items"]["pattern"] == pattern, ( + f"{type_name}.{field}: pattern mismatch" + ) + else: + assert prop["pattern"] == pattern, ( + f"{type_name}.{field}: pattern mismatch" + ) + + def test_types_without_constraints_have_no_schema( + self, + metamodel: MetaModelData, + schemas_by_type: dict[str, dict[str, Any]], + ) -> None: + for nt in metamodel.needs_types: + directive = nt["directive"] + has_constraints = bool( + nt.get("mandatory_options") + or nt.get("optional_options") + or nt.get("mandatory_links") + or nt.get("optional_links") + ) + if not has_constraints: + assert directive not in schemas_by_type, ( + f"{directive} has no constraints but got a schema" + ) + + +# ============================================================================= +# feat_req: mandatory fields, mandatory link (plain target = no local link check) +# ============================================================================= + + +class TestFeatReqSchema: + """Integration tests for feat_req using the real metamodel.""" + + @staticmethod + def _make_valid() -> dict[str, Any]: + return { + "type": "feat_req", + "id": "feat_req__test__001", + "reqtype": "Functional", + "security": "YES", + "safety": "QM", + "status": "valid", + } + + def test_valid_need_passes( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + assert_schema_valid(self._make_valid(), schemas_by_type["feat_req"]) + + def test_missing_status_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + del need["status"] + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_missing_safety_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + del need["safety"] + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_wrong_status_pattern_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["status"] = "approved" # not in ^(valid|invalid)$ + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_wrong_safety_pattern_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["safety"] = "ASIL_D" # not in ^(QM|ASIL_B)$ + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_wrong_reqtype_pattern_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["reqtype"] = "Performance" # not in ^(Functional|Interface|...)$ + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_content_not_validated( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + """content is in IGNORE_FIELDS — missing content must not fail.""" + need = self._make_valid() + # no 'content' key at all — should still pass + assert_schema_valid(need, schemas_by_type["feat_req"]) + + def test_invalid_optional_field_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["reqcovered"] = "MAYBE" # not in ^(YES|NO)$ + assert_schema_invalid(need, schemas_by_type["feat_req"]) + + def test_valid_optional_field_passes( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["reqcovered"] = "YES" + assert_schema_valid(need, schemas_by_type["feat_req"]) + + def test_extra_unknown_fields_pass( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["unknown_field"] = "anything" + assert_schema_valid(need, schemas_by_type["feat_req"]) + + +# ============================================================================= +# comp: mandatory fields, no mandatory links +# ============================================================================= + + +class TestCompSchema: + """Integration tests for comp using the real metamodel.""" + + @staticmethod + def _make_valid() -> dict[str, Any]: + return { + "type": "comp", + "id": "comp__my_component", + "security": "YES", + "safety": "QM", + "status": "valid", + } + + def test_valid_need_passes( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + assert_schema_valid(self._make_valid(), schemas_by_type["comp"]) + + def test_missing_security_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + del need["security"] + assert_schema_invalid(need, schemas_by_type["comp"]) + + def test_wrong_security_pattern_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["security"] = "MAYBE" # not in ^(YES|NO)$ + assert_schema_invalid(need, schemas_by_type["comp"]) + + +# ============================================================================= +# feat: mandatory link with regex (includes: ^logic_arc_int(_op)*__.+$) +# ============================================================================= + + +class TestFeatSchema: + """Integration tests for feat — has a mandatory link with regex pattern.""" + + @staticmethod + def _make_valid() -> dict[str, Any]: + return { + "type": "feat", + "id": "feat__my_feature", + "security": "YES", + "safety": "QM", + "status": "valid", + "includes": ["logic_arc_int__something"], + } + + def test_valid_need_passes( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + assert_schema_valid(self._make_valid(), schemas_by_type["feat"]) + + def test_missing_mandatory_link_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + del need["includes"] + assert_schema_invalid(need, schemas_by_type["feat"]) + + def test_empty_mandatory_link_fails( + self, schemas_by_type: dict[str, dict[str, Any]] + ) -> None: + need = self._make_valid() + need["includes"] = [] # minItems: 1 violated + assert_schema_invalid(need, schemas_by_type["feat"]) diff --git a/src/extensions/score_metamodel/yaml_parser.py b/src/extensions/score_metamodel/yaml_parser.py index 64916a90..8c83b4e5 100644 --- a/src/extensions/score_metamodel/yaml_parser.py +++ b/src/extensions/score_metamodel/yaml_parser.py @@ -119,7 +119,7 @@ def _parse_need_type( # Ensure ID regex is set if "id" not in t["mandatory_options"]: prefix = t["prefix"] - t["mandatory_options"]["id"] = f"^{prefix}[0-9a-z_]+$" + t["mandatory_options"]["id"] = f"^{prefix}[0-9a-zA-Z_]+$" if "color" in yaml_data: t["color"] = yaml_data["color"] diff --git a/src/extensions/score_source_code_linker/__init__.py b/src/extensions/score_source_code_linker/__init__.py index 094ebf4a..6e5e07b4 100644 --- a/src/extensions/score_source_code_linker/__init__.py +++ b/src/extensions/score_source_code_linker/__init__.py @@ -375,18 +375,25 @@ def inject_links_into_needs(app: Sphinx, env: BuildEnvironment) -> None: need_as_dict = cast(dict[str, object], need) - need_as_dict["source_code_link"] = ", ".join( - f"{get_github_link(n)}<>{n.file}:{n.line}" - for n in source_code_links.links.CodeLinks - ) - need_as_dict["testlink"] = ", ".join( - f"{get_github_link(n)}<>{n.name}" for n in source_code_links.links.TestLinks - ) + modified_need = False + if source_code_links.links.CodeLinks: + modified_need = True + need_as_dict["source_code_link"] = ", ".join( + f"{get_github_link(n)}<>{n.file}:{n.line}" + for n in source_code_links.links.CodeLinks + ) + if source_code_links.links.TestLinks: + modified_need = True + need_as_dict["testlink"] = ", ".join( + f"{get_github_link(n)}<>{n.name}" + for n in source_code_links.links.TestLinks + ) - # NOTE: Removing & adding the need is important to make sure - # the needs gets 're-evaluated'. - Needs_Data.remove_need(need["id"]) - Needs_Data.add_need(need) + if modified_need: + # NOTE: Removing & adding the need is important to make sure + # the needs gets 're-evaluated'. + Needs_Data.remove_need(need["id"]) + Needs_Data.add_need(need) # ╭──────────────────────────────────────╮ diff --git a/src/extensions/score_sync_toml/__init__.py b/src/extensions/score_sync_toml/__init__.py index 79ebfb7a..72e598e6 100644 --- a/src/extensions/score_sync_toml/__init__.py +++ b/src/extensions/score_sync_toml/__init__.py @@ -59,6 +59,12 @@ def setup(app: Sphinx) -> dict[str, str | bool]: ] # TODO remove the suppress_warnings once fixed + app.config.needscfg_exclude_vars = [ + "needs_from_toml", + "needs_from_toml_table", + # "needs_schema_definitions_from_json", + ] + return { "version": "0.1", "parallel_read_safe": True,