Skip to content

Finalize coverage schema #75

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Apr 3, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
include p3analysis/data/coverage-0.1.0.schema
include p3analysis/data/coverage-0.2.0.schema
include p3analysis/data/coverage-0.3.0.schema
include p3analysis/data/coverage.schema
include p3analysis/plot/backend/templates/cascade.tex
include p3analysis/plot/backend/templates/navchart.tex
12 changes: 6 additions & 6 deletions case-studies/babelstream/coverage.csv

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion docs/source/data.rst
Original file line number Diff line number Diff line change
Expand Up @@ -75,4 +75,4 @@ The JSON string format follows the schema `here`_, and should be considered
experimental.

.. _here:
https://raw.githubusercontent.com/intel/p3-analysis-library/master/p3/data/coverage-0.1.0.schema
https://raw.githubusercontent.com/intel/p3-analysis-library/master/p3/data/coverage.schema
4 changes: 2 additions & 2 deletions p3analysis/data/_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def _validate_coverage_json(json_string: str) -> object:

instance = json.loads(json_string)

schema_string = pkgutil.get_data(__name__, "coverage-0.3.0.schema")
schema_string = pkgutil.get_data(__name__, "coverage.schema")
if not schema_string:
msg = "Could not locate coverage schema file"
raise RuntimeError(msg)
Expand All @@ -47,7 +47,7 @@ def _validate_coverage_json(json_string: str) -> object:
msg = "Coverage string failed schema validation"
raise ValueError(msg)
except jsonschema.exceptions.SchemaError:
msg = "coverage-0.3.0.schema is not a valid schema"
msg = "coverage.schema is not a valid schema"
raise RuntimeError(msg)

return instance
37 changes: 0 additions & 37 deletions p3analysis/data/coverage-0.1.0.schema

This file was deleted.

41 changes: 0 additions & 41 deletions p3analysis/data/coverage-0.3.0.schema

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://raw.githubusercontent.com/intel/p3-analysis-library/main/p3/data/coverage-0.2.0.schema",
"$id": "https://raw.githubusercontent.com/intel/p3-analysis-library/main/p3/data/coverage.schema",
"title": "Coverage",
"description": "Lines of code used in each file of a code base.",
"type": "array",
Expand All @@ -10,31 +10,27 @@
"file": {
"type": "string"
},
"path": {
"id": {
"type": "string"
},
"regions": {
"used_lines": {
"type": "array",
"items": {
"type": "array",
"prefixItems": [
{
"type": "integer"
},
{
"type": "integer"
},
{
"type": "integer"
}
],
"items": false
"type": "integer"
}
},
"unused_lines": {
"type": "array",
"items": {
"type": "integer"
}
}
},
"required": [
"file",
"regions"
"id",
"used_lines",
"unused_lines"
]
}
}
12 changes: 2 additions & 10 deletions p3analysis/metrics/_divergence.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,16 +56,8 @@ def _coverage_to_divergence(maps):
for p, coverage in enumerate(maps):
for entry in coverage:
unique_fn = (entry["file"], entry["id"])
for region in entry["lines"]:
# If a region is a single integer, it represents one line.
if isinstance(region, int):
line = region
linemap[(unique_fn, line)].add(p)

# If a region is a list, it represents a [start, end] pair.
if isinstance(region, list):
for line in range(region[0], region[1]):
linemap[(unique_fn, line)].add(p)
for line in entry["used_lines"]:
linemap[(unique_fn, line)].add(p)

setmap = collections.defaultdict(int)
for key, platforms in linemap.items():
Expand Down
8 changes: 4 additions & 4 deletions tests/data/test_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,19 @@ class TestValidation(unittest.TestCase):

def test_coverage_json_valid(self):
"""p3analysis.data.validation.coverage_json_valid"""
json_string = '[{"file": "path", "id": "sha", "lines": [1, 2, [3, 5]]}]'
json_string = '[{"file": "path", "id": "sha", "used_lines": [1, 2, 3, 5], "unused_lines": []}]'
result_object = _validate_coverage_json(json_string)
expected_object = [{"file": "path", "id": "sha", "lines": [1, 2, [3, 5]]}]
expected_object = [{"file": "path", "id": "sha", "used_lines": [1, 2, 3, 5], "unused_lines": []}]
self.assertTrue(result_object == expected_object)

def test_coverage_json_invalid(self):
"""p3analysis.data.validation.coverage_json_invalid"""
json_string = '[{"file": "path", "id": "sha", "lines": [["1"]]}]'
json_string = '[{"file": "path", "id": "sha", "used_lines": [["1"]], "unused_lines": []}]'
with self.assertRaises(ValueError):
_validate_coverage_json(json_string)

with self.assertRaises(TypeError):
json_object = [{"file": "path", "id": "sha", "lines": [["1"]]}]
json_object = [{"file": "path", "id": "sha", "used_lines": [["1"]], "unused_lines": []}]
_validate_coverage_json(json_object)


Expand Down
21 changes: 14 additions & 7 deletions tests/metrics/test_divergence.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ def test_side_effects(self):
{
"file": "file.cpp",
"id": "0",
"lines": [0],
"used_lines": [0],
"unused_lines": [],
},
],
)
Expand Down Expand Up @@ -69,7 +70,8 @@ def test_divergence(self):
{
"file": "foo.cpp",
"id": "0",
"lines": [[0, 9]],
"used_lines": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
"unused_lines": [],
},
],
)
Expand All @@ -79,12 +81,14 @@ def test_divergence(self):
{
"file": "foo.cpp",
"id": "0",
"lines": [[0, 9]],
"used_lines": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
"unused_lines": [],
},
{
"file": "bar.cpp",
"id": "1",
"lines": [[0, 9]],
"used_lines": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
"unused_lines": [],
},
],
)
Expand Down Expand Up @@ -123,7 +127,8 @@ def test_divergence_single(self):
{
"file": "file.cpp",
"id": "0",
"lines": [0],
"used_lines": [0],
"unused_lines": [],
},
],
)
Expand Down Expand Up @@ -156,7 +161,8 @@ def test_divergence_duplicate(self):
{
"file": "foo.cpp",
"id": "0",
"lines": [[0, 9]],
"used_lines": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
"unused_lines": [],
},
],
)
Expand All @@ -168,7 +174,8 @@ def test_divergence_duplicate(self):
{
"file": "foo.cpp",
"id": "1",
"lines": [[0, 9]],
"used_lines": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
"unused_lines": [],
},
],
)
Expand Down
Loading