RULEAPI-753: Use modern ids and coalesce the coverage for legacy ids
This commit is contained in:
parent
9cb2845112
commit
c475f0d6de
2
.github/workflows/update_coverage.yml
vendored
2
.github/workflows/update_coverage.yml
vendored
@ -33,7 +33,7 @@ jobs:
|
|||||||
id: gen-coverage
|
id: gen-coverage
|
||||||
working-directory: 'rspec/rspec-tools'
|
working-directory: 'rspec/rspec-tools'
|
||||||
run: |
|
run: |
|
||||||
pipenv run rspec-tools update-coverage
|
pipenv run rspec-tools update-coverage --rulesdir ../rules
|
||||||
mv ./covered_rules.json ../frontend/public/covered_rules.json
|
mv ./covered_rules.json ../frontend/public/covered_rules.json
|
||||||
if git diff --exit-code ../frontend/public/covered_rules.json; then
|
if git diff --exit-code ../frontend/public/covered_rules.json; then
|
||||||
echo "::set-output name=new_coverage::false"
|
echo "::set-output name=new_coverage::false"
|
||||||
|
@ -120,17 +120,17 @@ def check_description(d, rules):
|
|||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@click.option('--rulesdir', required=True)
|
||||||
@click.option('--repository', required=False)
|
@click.option('--repository', required=False)
|
||||||
@click.option('--version', required=False)
|
@click.option('--version', required=False)
|
||||||
def update_coverage(repository: Optional[str], version: Optional[str]):
|
def update_coverage(rulesdir: str, repository: Optional[str], version: Optional[str]):
|
||||||
'''Update rule coverage by adding rules implemented in the {version} of {repository}.'''
|
'''Update rule coverage by adding rules implemented in the {version} of {repository}.'''
|
||||||
if repository is None:
|
if repository is None:
|
||||||
update_coverage_for_all_repos()
|
update_coverage_for_all_repos(Path(rulesdir))
|
||||||
elif version is None:
|
elif version is None:
|
||||||
update_coverage_for_repo(repository)
|
update_coverage_for_repo(repository, Path(rulesdir))
|
||||||
else:
|
else:
|
||||||
update_coverage_for_repo_version(repository, version)
|
update_coverage_for_repo_version(repository, version, Path(rulesdir))
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('--message', required=True)
|
@click.option('--message', required=True)
|
||||||
|
@ -58,10 +58,34 @@ def canonicalize(language):
|
|||||||
return CANONICAL_NAMES[language]
|
return CANONICAL_NAMES[language]
|
||||||
return language
|
return language
|
||||||
|
|
||||||
|
def read_all_alternative_keys(metadata):
|
||||||
|
ret = []
|
||||||
|
if 'sqKey' in metadata:
|
||||||
|
ret.append(metadata['sqKey'])
|
||||||
|
if 'ruleSpecification' in metadata:
|
||||||
|
ret.append(metadata['ruleSpecification'])
|
||||||
|
if 'extra' in metadata and 'legacyKeys' in metadata['extra']:
|
||||||
|
ret.extend(metadata['extra']['legacyKeys'])
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def read_canonical_rule_ids(rules_dir):
|
||||||
|
'''
|
||||||
|
Map all the keys identifying a rule to its modern key (which is also its directory name).
|
||||||
|
'''
|
||||||
|
print('Collecting the rule-id synonyms from ' + str(rules_dir))
|
||||||
|
canonical_id = {}
|
||||||
|
rule_dirs = [entry for entry in os.scandir(rules_dir) if entry.is_dir()]
|
||||||
|
for rule_dir in rule_dirs:
|
||||||
|
for metadata_path in Path(rule_dir).rglob('metadata.json'):
|
||||||
|
for alternative_key in read_all_alternative_keys(load_json(metadata_path)):
|
||||||
|
canonical_id[alternative_key] = rule_dir.name
|
||||||
|
return canonical_id
|
||||||
|
|
||||||
class Coverage:
|
class Coverage:
|
||||||
'''Keep and update the coverage DB: lang*rule_id -> analyzer version'''
|
'''Keep and update the coverage DB: lang*rule_id -> analyzer version'''
|
||||||
def __init__(self, filename):
|
def __init__(self, filename, rules_dir):
|
||||||
self.rules = {}
|
self.rules = {}
|
||||||
|
self.canonical_ids = read_canonical_rule_ids(rules_dir)
|
||||||
if os.path.exists(filename):
|
if os.path.exists(filename):
|
||||||
self.rules = load_json(filename)
|
self.rules = load_json(filename)
|
||||||
|
|
||||||
@ -86,6 +110,8 @@ class Coverage:
|
|||||||
def rule_implemented(self, rule_id, language, analyzer, version):
|
def rule_implemented(self, rule_id, language, analyzer, version):
|
||||||
repo_and_version = analyzer + ' ' + version
|
repo_and_version = analyzer + ' ' + version
|
||||||
language = canonicalize(language)
|
language = canonicalize(language)
|
||||||
|
if rule_id in self.canonical_ids:
|
||||||
|
rule_id = self.canonical_ids[rule_id]
|
||||||
|
|
||||||
if language not in self.rules:
|
if language not in self.rules:
|
||||||
print(f"Create entry for {language}")
|
print(f"Create entry for {language}")
|
||||||
@ -155,22 +181,22 @@ def collect_coverage_for_version(repo_name, git_repo, version, coverage):
|
|||||||
print(f"{repo_name} {version} checkout failed: {e}")
|
print(f"{repo_name} {version} checkout failed: {e}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def update_coverage_for_all_repos():
|
def update_coverage_for_all_repos(rules_dir):
|
||||||
print(f"batch mode for {REPOS}")
|
print(f"batch mode for {REPOS}")
|
||||||
coverage = Coverage(RULES_FILENAME)
|
coverage = Coverage(RULES_FILENAME, rules_dir)
|
||||||
for repo in REPOS:
|
for repo in REPOS:
|
||||||
collect_coverage_for_all_versions(repo, coverage)
|
collect_coverage_for_all_versions(repo, coverage)
|
||||||
coverage.save_to_file(RULES_FILENAME)
|
coverage.save_to_file(RULES_FILENAME)
|
||||||
|
|
||||||
def update_coverage_for_repo(repo):
|
def update_coverage_for_repo(repo, rules_dir):
|
||||||
print(f"batch mode for {repo}")
|
print(f"batch mode for {repo}")
|
||||||
coverage = Coverage(RULES_FILENAME)
|
coverage = Coverage(RULES_FILENAME, rules_dir)
|
||||||
collect_coverage_for_all_versions(repo, coverage)
|
collect_coverage_for_all_versions(repo, coverage)
|
||||||
coverage.save_to_file(RULES_FILENAME)
|
coverage.save_to_file(RULES_FILENAME)
|
||||||
|
|
||||||
def update_coverage_for_repo_version(repo, version):
|
def update_coverage_for_repo_version(repo, version, rules_dir):
|
||||||
print(f"checking {repo} version {version}")
|
print(f"checking {repo} version {version}")
|
||||||
coverage = Coverage(RULES_FILENAME)
|
coverage = Coverage(RULES_FILENAME, rules_dir)
|
||||||
git_repo = checkout_repo(repo)
|
git_repo = checkout_repo(repo)
|
||||||
collect_coverage_for_version(repo, git_repo, version, coverage)
|
collect_coverage_for_version(repo, git_repo, version, coverage)
|
||||||
coverage.save_to_file(RULES_FILENAME)
|
coverage.save_to_file(RULES_FILENAME)
|
||||||
|
@ -12,6 +12,9 @@
|
|||||||
"extra": {
|
"extra": {
|
||||||
"replacementRules": [
|
"replacementRules": [
|
||||||
|
|
||||||
|
],
|
||||||
|
"legacyKeys": [
|
||||||
|
"MethodName"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"securityStandards": {
|
"securityStandards": {
|
||||||
|
@ -5,6 +5,7 @@ from git import Repo
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from unittest.mock import (patch, PropertyMock)
|
from unittest.mock import (patch, PropertyMock)
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
from rspec_tools.coverage import (update_coverage_for_all_repos,
|
from rspec_tools.coverage import (update_coverage_for_all_repos,
|
||||||
update_coverage_for_repo,
|
update_coverage_for_repo,
|
||||||
@ -19,6 +20,10 @@ def clear_working_dir(repo_dir):
|
|||||||
else:
|
else:
|
||||||
os.remove(repo_dir / f)
|
os.remove(repo_dir / f)
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def rules_dir():
|
||||||
|
return Path(__file__).parent.joinpath('resources', 'rules')
|
||||||
|
|
||||||
JSTS_SONARPEDIA='{"rules-metadata-path": "rules", "languages":["JS", "TS"]}'
|
JSTS_SONARPEDIA='{"rules-metadata-path": "rules", "languages":["JS", "TS"]}'
|
||||||
MOCK_REPOS=[{'name':'SonarJS',
|
MOCK_REPOS=[{'name':'SonarJS',
|
||||||
'versions': [
|
'versions': [
|
||||||
@ -26,7 +31,8 @@ MOCK_REPOS=[{'name':'SonarJS',
|
|||||||
'date': '2020-03-03 10:00:00',
|
'date': '2020-03-03 10:00:00',
|
||||||
'files': [['sonarpedia.json', JSTS_SONARPEDIA],
|
'files': [['sonarpedia.json', JSTS_SONARPEDIA],
|
||||||
['rules/Sonar_way_profile.json', '{}'],
|
['rules/Sonar_way_profile.json', '{}'],
|
||||||
['rules/S100.json', '{}'], ['rules/S1145.json', '{}'],
|
# MethodName is a lagacy key for S100
|
||||||
|
['rules/MethodName.json', '{}'], ['rules/S1145.json', '{}'],
|
||||||
# not in the rules directory, so not a rule:
|
# not in the rules directory, so not a rule:
|
||||||
['S200.json', '{}']]},
|
['S200.json', '{}']]},
|
||||||
{'name': '5.0.0.6962',
|
{'name': '5.0.0.6962',
|
||||||
@ -104,26 +110,27 @@ def mock_git_analyzer_repos(tmpdir):
|
|||||||
mock.clone_from=PropertyMock(side_effect=mock_clone_repo)
|
mock.clone_from=PropertyMock(side_effect=mock_clone_repo)
|
||||||
return mock
|
return mock
|
||||||
|
|
||||||
def test_update_coverage_for_repo_version(tmpdir, mock_git_analyzer_repos):
|
def test_update_coverage_for_repo_version(tmpdir, rules_dir: Path, mock_git_analyzer_repos):
|
||||||
with pushd(tmpdir), patch('rspec_tools.coverage.Repo', mock_git_analyzer_repos):
|
with pushd(tmpdir), patch('rspec_tools.coverage.Repo', mock_git_analyzer_repos):
|
||||||
VER = '3.3.0.5702'
|
VER = '3.3.0.5702'
|
||||||
REPO = 'SonarJS'
|
REPO = 'SonarJS'
|
||||||
update_coverage_for_repo_version(REPO, VER)
|
update_coverage_for_repo_version(REPO, VER, rules_dir)
|
||||||
coverage = tmpdir.join('covered_rules.json')
|
coverage = tmpdir.join('covered_rules.json')
|
||||||
assert coverage.exists()
|
assert coverage.exists()
|
||||||
cov = load_json(coverage)
|
cov = load_json(coverage)
|
||||||
assert 'JAVASCRIPT' in cov
|
assert 'JAVASCRIPT' in cov
|
||||||
assert 'S100' in cov['JAVASCRIPT']
|
assert 'S100' in cov['JAVASCRIPT']
|
||||||
|
assert 'MethodName' not in cov['JAVASCRIPT'] # MethodName is a legacy key for S100
|
||||||
assert 'S200' not in cov['JAVASCRIPT'] # S200.json is not in the rules directory in mock
|
assert 'S200' not in cov['JAVASCRIPT'] # S200.json is not in the rules directory in mock
|
||||||
assert cov['JAVASCRIPT']['S100'] == {'since': REPO + ' ' + VER, 'until': REPO + ' ' + VER}
|
assert cov['JAVASCRIPT']['S100'] == {'since': REPO + ' ' + VER, 'until': REPO + ' ' + VER}
|
||||||
|
|
||||||
# Running it again changes nothing
|
# Running it again changes nothing
|
||||||
update_coverage_for_repo_version(REPO, VER)
|
update_coverage_for_repo_version(REPO, VER, rules_dir)
|
||||||
assert cov == load_json(coverage)
|
assert cov == load_json(coverage)
|
||||||
|
|
||||||
# Running it for a newer version doesn't change when the rules are first implemented
|
# Running it for a newer version doesn't change when the rules are first implemented
|
||||||
VER2 = '5.0.0.6962'
|
VER2 = '5.0.0.6962'
|
||||||
update_coverage_for_repo_version(REPO, VER2)
|
update_coverage_for_repo_version(REPO, VER2, rules_dir)
|
||||||
cov_new = load_json(coverage)
|
cov_new = load_json(coverage)
|
||||||
assert set(cov['JAVASCRIPT'].keys()).issubset(set(cov_new['JAVASCRIPT'].keys()))
|
assert set(cov['JAVASCRIPT'].keys()).issubset(set(cov_new['JAVASCRIPT'].keys()))
|
||||||
assert cov_new['JAVASCRIPT']['S100']['since'] == REPO + ' ' + VER
|
assert cov_new['JAVASCRIPT']['S100']['since'] == REPO + ' ' + VER
|
||||||
@ -132,49 +139,51 @@ def test_update_coverage_for_repo_version(tmpdir, mock_git_analyzer_repos):
|
|||||||
assert cov_new['JAVASCRIPT']['S1192']['until'] == REPO + ' ' + VER2
|
assert cov_new['JAVASCRIPT']['S1192']['until'] == REPO + ' ' + VER2
|
||||||
|
|
||||||
# For rules supported on master only the 'since' part is kept
|
# For rules supported on master only the 'since' part is kept
|
||||||
update_coverage_for_repo_version(REPO, 'master')
|
update_coverage_for_repo_version(REPO, 'master', rules_dir)
|
||||||
assert load_json(coverage)['JAVASCRIPT']['S100'] == REPO + ' ' + VER
|
assert load_json(coverage)['JAVASCRIPT']['S100'] == REPO + ' ' + VER
|
||||||
|
|
||||||
|
|
||||||
def test_update_coverage_for_repo(tmpdir, mock_git_analyzer_repos):
|
def test_update_coverage_for_repo(tmpdir, rules_dir: Path, mock_git_analyzer_repos):
|
||||||
with pushd(tmpdir), patch('rspec_tools.coverage.Repo', mock_git_analyzer_repos):
|
with pushd(tmpdir), patch('rspec_tools.coverage.Repo', mock_git_analyzer_repos):
|
||||||
REPO = 'SonarJS'
|
REPO = 'SonarJS'
|
||||||
update_coverage_for_repo(REPO)
|
update_coverage_for_repo(REPO, rules_dir)
|
||||||
coverage = tmpdir.join('covered_rules.json')
|
coverage = tmpdir.join('covered_rules.json')
|
||||||
assert coverage.exists()
|
assert coverage.exists()
|
||||||
cov = load_json(coverage)
|
cov = load_json(coverage)
|
||||||
assert 'JAVASCRIPT' in cov
|
assert 'JAVASCRIPT' in cov
|
||||||
assert 'TYPESCRIPT' in cov
|
assert 'TYPESCRIPT' in cov
|
||||||
assert 'S100' in cov['JAVASCRIPT']
|
assert 'S100' in cov['JAVASCRIPT']
|
||||||
|
assert 'MethodName' not in cov['JAVASCRIPT'] # MethodName is a legacy key for S100
|
||||||
assert cov['JAVASCRIPT']['S100'] == REPO + ' 3.3.0.5702'
|
assert cov['JAVASCRIPT']['S100'] == REPO + ' 3.3.0.5702'
|
||||||
assert 'S1145' in cov['JAVASCRIPT']
|
assert 'S1145' in cov['JAVASCRIPT']
|
||||||
assert cov['JAVASCRIPT']['S1145'] == {'since': REPO + ' 3.3.0.5702', 'until': REPO + ' 6.7.0.14237'}
|
assert cov['JAVASCRIPT']['S1145'] == {'since': REPO + ' 3.3.0.5702', 'until': REPO + ' 6.7.0.14237'}
|
||||||
|
|
||||||
|
|
||||||
@patch('rspec_tools.coverage.REPOS', ['SonarJS', 'sonar-xml'])
|
@patch('rspec_tools.coverage.REPOS', ['SonarJS', 'sonar-xml'])
|
||||||
def test_update_coverage_for_all_repos(tmpdir, mock_git_analyzer_repos):
|
def test_update_coverage_for_all_repos(tmpdir, rules_dir: Path, mock_git_analyzer_repos):
|
||||||
with pushd(tmpdir), patch('rspec_tools.coverage.Repo', mock_git_analyzer_repos):
|
with pushd(tmpdir), patch('rspec_tools.coverage.Repo', mock_git_analyzer_repos):
|
||||||
update_coverage_for_all_repos()
|
update_coverage_for_all_repos(rules_dir)
|
||||||
coverage = tmpdir.join('covered_rules.json')
|
coverage = tmpdir.join('covered_rules.json')
|
||||||
assert coverage.exists()
|
assert coverage.exists()
|
||||||
cov = load_json(coverage)
|
cov = load_json(coverage)
|
||||||
assert {'JAVASCRIPT', 'TYPESCRIPT', 'XML', 'CSS'} == set(cov.keys())
|
assert {'JAVASCRIPT', 'TYPESCRIPT', 'XML', 'CSS'} == set(cov.keys())
|
||||||
assert 'S100' in cov['JAVASCRIPT']
|
assert 'S100' in cov['JAVASCRIPT']
|
||||||
|
assert 'MethodName' not in cov['JAVASCRIPT'] # MethodName is a legacy key for S100
|
||||||
assert {'S100'} == set(cov['CSS'].keys())
|
assert {'S100'} == set(cov['CSS'].keys())
|
||||||
assert {'S103', 'S1000'} == set(cov['XML'].keys())
|
assert {'S103', 'S1000'} == set(cov['XML'].keys())
|
||||||
assert cov['XML']['S1000'] == 'SonarJS 7.0.0.14528'
|
assert cov['XML']['S1000'] == 'SonarJS 7.0.0.14528'
|
||||||
|
|
||||||
def test_update_coverage_no_sonarpedia(tmpdir, mock_git_analyzer_repos, capsys):
|
def test_update_coverage_no_sonarpedia(tmpdir, rules_dir: Path, mock_git_analyzer_repos, capsys):
|
||||||
with pushd(tmpdir), patch('rspec_tools.coverage.Repo', mock_git_analyzer_repos):
|
with pushd(tmpdir), patch('rspec_tools.coverage.Repo', mock_git_analyzer_repos):
|
||||||
update_coverage_for_repo_version('broken', 'v1')
|
update_coverage_for_repo_version('broken', 'v1', rules_dir)
|
||||||
assert 'failed to collect implemented rules for' in capsys.readouterr().out
|
assert 'failed to collect implemented rules for' in capsys.readouterr().out
|
||||||
coverage = tmpdir.join('covered_rules.json')
|
coverage = tmpdir.join('covered_rules.json')
|
||||||
assert coverage.exists()
|
assert coverage.exists()
|
||||||
cov = load_json(coverage)
|
cov = load_json(coverage)
|
||||||
assert cov == {}
|
assert cov == {}
|
||||||
|
|
||||||
def test_update_coverage_nonexisting_versio(tmpdir, mock_git_analyzer_repos, capsys):
|
def test_update_coverage_nonexisting_versio(tmpdir, rules_dir: Path, mock_git_analyzer_repos, capsys):
|
||||||
with pushd(tmpdir), patch('rspec_tools.coverage.Repo', mock_git_analyzer_repos):
|
with pushd(tmpdir), patch('rspec_tools.coverage.Repo', mock_git_analyzer_repos):
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
update_coverage_for_repo_version('broken', 'non-existing')
|
update_coverage_for_repo_version('broken', 'non-existing', rules_dir)
|
||||||
assert 'checkout failed' in capsys.readouterr().out
|
assert 'checkout failed' in capsys.readouterr().out
|
||||||
|
Loading…
x
Reference in New Issue
Block a user