Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion scripts_bazel/merge_sourcelinks.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ def main():
)
_ = parser.add_argument(
"--known_good",
required=True,
help="Path to a required 'known good' JSON file (provided by Bazel).",
)
_ = parser.add_argument(
Expand Down
26 changes: 17 additions & 9 deletions scripts_bazel/tests/generate_sourcelinks_cli_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,19 @@
"""Tests for generate_sourcelinks_cli.py"""

import json
import subprocess
import sys
from pathlib import Path

import pytest

import scripts_bazel.generate_sourcelinks_cli

_MY_PATH = Path(__file__).parent


def test_generate_sourcelinks_cli_basic(tmp_path: Path) -> None:
def test_generate_sourcelinks_cli_basic(
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
) -> None:
"""Test basic functionality of generate_sourcelinks_cli."""
# Create a test source file with a traceability tag
test_file = tmp_path / "test_source.py"
Expand All @@ -37,27 +42,30 @@ def some_function():
output_file = tmp_path / "output.json"

# Execute the script
result = subprocess.run(
monkeypatch.setattr(
sys,
"argv",
[
sys.executable,
_MY_PATH.parent / "generate_sourcelinks_cli.py",
"--output",
str(output_file),
str(test_file),
],
)
result = scripts_bazel.generate_sourcelinks_cli.main()

assert result.returncode == 0
assert result == 0
assert output_file.exists()

# Check the output content
with open(output_file) as f:
data: list[dict[str, str | int]] = json.load(f)
assert isinstance(data, list)
assert len(data) > 0
# First element is the metadata dict; there must be at least one need entry after it
assert len(data) > 1

# Verify schema of each entry
for entry in data:
# Verify schema of each need entry (skip the first metadata element)
for entry in data[1:]:
assert "file" in entry
assert "line" in entry
assert "tag" in entry
Expand All @@ -71,4 +79,4 @@ def some_function():
assert isinstance(entry["need"], str)
assert isinstance(entry["full_line"], str)

assert any(entry["need"] == "tool_req__docs_arch_types" for entry in data)
assert any(entry["need"] == "tool_req__docs_arch_types" for entry in data[1:])
27 changes: 19 additions & 8 deletions scripts_bazel/tests/merge_sourcelinks_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,27 +14,35 @@
"""Tests for merge_sourcelinks.py"""

import json
import subprocess
import sys
from pathlib import Path

import pytest

import scripts_bazel.merge_sourcelinks

_MY_PATH = Path(__file__).parent


def test_merge_sourcelinks_basic(tmp_path: Path) -> None:
def test_merge_sourcelinks_basic(
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
) -> None:
"""Test basic merge functionality."""
# Create test JSON files with correct schema
# Each sourcelinks JSON starts with a metadata dict followed by need entries
local_meta = {"repo_name": "local_repo", "hash": "", "url": ""}

file1 = tmp_path / "links1.json"
file1.write_text(
json.dumps(
[
local_meta,
{
"file": "test1.py",
"line": 10,
"tag": "# req-Id:",
"need": "tool_req__docs_arch_types",
"full_line": "# req-Id: tool_req__docs_arch_types",
}
},
]
)
)
Expand All @@ -43,31 +51,34 @@ def test_merge_sourcelinks_basic(tmp_path: Path) -> None:
file2.write_text(
json.dumps(
[
local_meta,
{
"file": "test2.py",
"line": 20,
"tag": "# req-Id:",
"need": "gd_req__req_validity",
"full_line": "# req-Id: gd_req__req_validity",
}
},
]
)
)

output_file = tmp_path / "merged.json"

result = subprocess.run(
monkeypatch.setattr(
sys,
"argv",
[
sys.executable,
_MY_PATH.parent / "merge_sourcelinks.py",
"--output",
str(output_file),
str(file1),
str(file2),
],
)
result = scripts_bazel.merge_sourcelinks.main()

assert result.returncode == 0
assert result == 0
assert output_file.exists()

with open(output_file) as f:
Expand Down
7 changes: 7 additions & 0 deletions src/extensions/score_metamodel/metamodel.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -736,6 +736,7 @@ needs_types:
violates: feat_arc_sta
optional_options:
mitigation_issue: ^https://github.com/.*$
safety_relevant: ^(yes|no)$
optional_links:
mitigated_by: stkh_req, aou_req
parts: 3
Expand All @@ -758,6 +759,7 @@ needs_types:
optional_options:
# req-Id: tool_req__docs_saf_attrs_mitigation_issue
mitigation_issue: ^https://github.com/.*$
safety_relevant: ^(yes|no)$
optional_links:
# req-Id: tool_req__docs_saf_attrs_mitigated_by
# (only mandatory once valid status == valid)
Expand All @@ -782,6 +784,7 @@ needs_types:
optional_options:
# req-Id: tool_req__docs_saf_attrs_mitigation_issue
mitigation_issue: ^https://github.com/.*$
safety_relevant: ^(yes|no)$
mandatory_links:
# req-Id: tool_req__docs_saf_attrs_violates
violates: comp_arc_sta
Expand Down Expand Up @@ -810,6 +813,8 @@ needs_types:
optional_options:
# req-Id: tool_req__docs_saf_attrs_mitigation_issue
mitigation_issue: ^https://github.com/.*$
safety_relevant: ^(yes|no)$
root_cause: ^[\s\S]+$
mandatory_links:
# req-Id: tool_req__docs_saf_attrs_violates
violates: feat_arc_dyn
Expand Down Expand Up @@ -837,6 +842,8 @@ needs_types:
optional_options:
# req-Id: tool_req__docs_saf_attrs_mitigation_issue
mitigation_issue: ^https://github.com/.*$
safety_relevant: ^(yes|no)$
root_cause: ^[\s\S]+$
mandatory_links:
# req-Id: tool_req__docs_saf_attrs_violates
violates: comp_arc_dyn
Expand Down
4 changes: 4 additions & 0 deletions src/extensions/score_metamodel/yaml_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,10 @@ def default_options():
# Introduced with sphinx-needs 6.3.0
"is_import",
"constraints",
# Auto-populated by sphinx-needs when document hierarchy is detected
# (e.g. via needimport placing needs as adjacent siblings in the doctree)
"parent_need",
"parent_needs",
}


Expand Down
Loading