diff --git a/scripts_bazel/merge_sourcelinks.py b/scripts_bazel/merge_sourcelinks.py index 88499d11a..08b275e3d 100644 --- a/scripts_bazel/merge_sourcelinks.py +++ b/scripts_bazel/merge_sourcelinks.py @@ -39,7 +39,6 @@ def main(): ) _ = parser.add_argument( "--known_good", - required=True, help="Path to a required 'known good' JSON file (provided by Bazel).", ) _ = parser.add_argument( diff --git a/scripts_bazel/tests/generate_sourcelinks_cli_test.py b/scripts_bazel/tests/generate_sourcelinks_cli_test.py index f25acc5ab..652b71533 100644 --- a/scripts_bazel/tests/generate_sourcelinks_cli_test.py +++ b/scripts_bazel/tests/generate_sourcelinks_cli_test.py @@ -14,14 +14,19 @@ """Tests for generate_sourcelinks_cli.py""" import json -import subprocess import sys from pathlib import Path +import pytest + +import scripts_bazel.generate_sourcelinks_cli + _MY_PATH = Path(__file__).parent -def test_generate_sourcelinks_cli_basic(tmp_path: Path) -> None: +def test_generate_sourcelinks_cli_basic( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: """Test basic functionality of generate_sourcelinks_cli.""" # Create a test source file with a traceability tag test_file = tmp_path / "test_source.py" @@ -37,27 +42,30 @@ def some_function(): output_file = tmp_path / "output.json" # Execute the script - result = subprocess.run( + monkeypatch.setattr( + sys, + "argv", [ - sys.executable, _MY_PATH.parent / "generate_sourcelinks_cli.py", "--output", str(output_file), str(test_file), ], ) + result = scripts_bazel.generate_sourcelinks_cli.main() - assert result.returncode == 0 + assert result == 0 assert output_file.exists() # Check the output content with open(output_file) as f: data: list[dict[str, str | int]] = json.load(f) assert isinstance(data, list) - assert len(data) > 0 + # First element is the metadata dict; there must be at least one need entry after it + assert len(data) > 1 - # Verify schema of each entry - for entry in data: + # Verify schema of each need entry (skip the first metadata element) + for entry in data[1:]: assert "file" in entry assert "line" in entry assert "tag" in entry @@ -71,4 +79,4 @@ def some_function(): assert isinstance(entry["need"], str) assert isinstance(entry["full_line"], str) - assert any(entry["need"] == "tool_req__docs_arch_types" for entry in data) + assert any(entry["need"] == "tool_req__docs_arch_types" for entry in data[1:]) diff --git a/scripts_bazel/tests/merge_sourcelinks_test.py b/scripts_bazel/tests/merge_sourcelinks_test.py index 9f92cfd6c..75c63f4ff 100644 --- a/scripts_bazel/tests/merge_sourcelinks_test.py +++ b/scripts_bazel/tests/merge_sourcelinks_test.py @@ -14,27 +14,35 @@ """Tests for merge_sourcelinks.py""" import json -import subprocess import sys from pathlib import Path +import pytest + +import scripts_bazel.merge_sourcelinks + _MY_PATH = Path(__file__).parent -def test_merge_sourcelinks_basic(tmp_path: Path) -> None: +def test_merge_sourcelinks_basic( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: """Test basic merge functionality.""" - # Create test JSON files with correct schema + # Each sourcelinks JSON starts with a metadata dict followed by need entries + local_meta = {"repo_name": "local_repo", "hash": "", "url": ""} + file1 = tmp_path / "links1.json" file1.write_text( json.dumps( [ + local_meta, { "file": "test1.py", "line": 10, "tag": "# req-Id:", "need": "tool_req__docs_arch_types", "full_line": "# req-Id: tool_req__docs_arch_types", - } + }, ] ) ) @@ -43,22 +51,24 @@ def test_merge_sourcelinks_basic(tmp_path: Path) -> None: file2.write_text( json.dumps( [ + local_meta, { "file": "test2.py", "line": 20, "tag": "# req-Id:", "need": "gd_req__req_validity", "full_line": "# req-Id: gd_req__req_validity", - } + }, ] ) ) output_file = tmp_path / "merged.json" - result = subprocess.run( + monkeypatch.setattr( + sys, + "argv", [ - sys.executable, _MY_PATH.parent / "merge_sourcelinks.py", "--output", str(output_file), @@ -66,8 +76,9 @@ def test_merge_sourcelinks_basic(tmp_path: Path) -> None: str(file2), ], ) + result = scripts_bazel.merge_sourcelinks.main() - assert result.returncode == 0 + assert result == 0 assert output_file.exists() with open(output_file) as f: diff --git a/src/extensions/score_metamodel/metamodel.yaml b/src/extensions/score_metamodel/metamodel.yaml index a7a28c66a..208b1a99b 100644 --- a/src/extensions/score_metamodel/metamodel.yaml +++ b/src/extensions/score_metamodel/metamodel.yaml @@ -736,6 +736,7 @@ needs_types: violates: feat_arc_sta optional_options: mitigation_issue: ^https://github.com/.*$ + safety_relevant: ^(yes|no)$ optional_links: mitigated_by: stkh_req, aou_req parts: 3 @@ -758,6 +759,7 @@ needs_types: optional_options: # req-Id: tool_req__docs_saf_attrs_mitigation_issue mitigation_issue: ^https://github.com/.*$ + safety_relevant: ^(yes|no)$ optional_links: # req-Id: tool_req__docs_saf_attrs_mitigated_by # (only mandatory once valid status == valid) @@ -782,6 +784,7 @@ needs_types: optional_options: # req-Id: tool_req__docs_saf_attrs_mitigation_issue mitigation_issue: ^https://github.com/.*$ + safety_relevant: ^(yes|no)$ mandatory_links: # req-Id: tool_req__docs_saf_attrs_violates violates: comp_arc_sta @@ -810,6 +813,8 @@ needs_types: optional_options: # req-Id: tool_req__docs_saf_attrs_mitigation_issue mitigation_issue: ^https://github.com/.*$ + safety_relevant: ^(yes|no)$ + root_cause: ^[\s\S]+$ mandatory_links: # req-Id: tool_req__docs_saf_attrs_violates violates: feat_arc_dyn @@ -837,6 +842,8 @@ needs_types: optional_options: # req-Id: tool_req__docs_saf_attrs_mitigation_issue mitigation_issue: ^https://github.com/.*$ + safety_relevant: ^(yes|no)$ + root_cause: ^[\s\S]+$ mandatory_links: # req-Id: tool_req__docs_saf_attrs_violates violates: comp_arc_dyn diff --git a/src/extensions/score_metamodel/yaml_parser.py b/src/extensions/score_metamodel/yaml_parser.py index f40556bbf..4a28768f4 100644 --- a/src/extensions/score_metamodel/yaml_parser.py +++ b/src/extensions/score_metamodel/yaml_parser.py @@ -95,6 +95,10 @@ def default_options(): # Introduced with sphinx-needs 6.3.0 "is_import", "constraints", + # Auto-populated by sphinx-needs when document hierarchy is detected + # (e.g. via needimport placing needs as adjacent siblings in the doctree) + "parent_need", + "parent_needs", }