ci: add linter for gitlab yaml files

- remove duplicated artifacts default values
- migrate check_artifacts_expire_time.py
- migrate check_rules_yml.py
pull/13090/head
Fu Hanxi 2023-12-18 15:42:56 +01:00
rodzic 96aeead6f5
commit b709c880dd
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 19399699CF3C4B16
16 zmienionych plików z 190 dodań i 281 usunięć

Wyświetl plik

@ -32,8 +32,6 @@
# keep the size info to help track the binary size
- size_info.txt
- "**/build*/size.json"
when: always
expire_in: 4 days
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
@ -376,12 +374,10 @@ build_only_tools_test_apps:
BUILD_LOG_CMAKE: "${LOG_PATH}/cmake_@t_@w.txt"
BUILD_COMMAND_ARGS: ""
artifacts:
when: always
paths:
- log_template_app/*
- size_info.txt
- build_template_app/**/size.json
expire_in: 1 week
script:
# Set the variable for 'esp-idf-template' testing
- ESP_IDF_TEMPLATE_GIT=${ESP_IDF_TEMPLATE_GIT:-"https://github.com/espressif/esp-idf-template.git"}
@ -554,7 +550,6 @@ pytest_build_system:
paths:
- XUNIT_RESULT.xml
- test_build_system
when: always
expire_in: 2 days
reports:
junit: XUNIT_RESULT.xml
@ -571,7 +566,6 @@ pytest_build_system_macos:
paths:
- XUNIT_RESULT.xml
- test_build_system
when: always
expire_in: 2 days
reports:
junit: XUNIT_RESULT.xml
@ -603,7 +597,6 @@ pytest_build_system_win:
paths:
- XUNIT_RESULT.xml
- test_build_system
when: always
expire_in: 2 days
reports:
junit: XUNIT_RESULT.xml

Wyświetl plik

@ -345,6 +345,9 @@ default:
- *setup_tools_and_idf_python_venv
- add_gitlab_ssh_keys
- fetch_submodules
artifacts:
expire_in: 1 week
when: always
retry:
max: 2
when:

Wyświetl plik

@ -121,7 +121,6 @@ build_docs_html_full:
artifacts: false
optional: true
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/html/*
@ -135,7 +134,6 @@ build_docs_html_full_prod:
- .doc-rules:build:docs-full-prod
dependencies: [] # Stop build_docs jobs from downloading all previous job's artifacts
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/html/*
@ -152,7 +150,6 @@ build_docs_html_partial:
artifacts: false
optional: true
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/html/*
@ -175,7 +172,6 @@ build_docs_pdf:
artifacts: false
optional: true
artifacts:
when: always
paths:
- docs/_build/*/*/latex/*
expire_in: 4 days
@ -188,7 +184,6 @@ build_docs_pdf_prod:
- .doc-rules:build:docs-full-prod
dependencies: [] # Stop build_docs jobs from downloading all previous job's artifacts
artifacts:
when: always
paths:
- docs/_build/*/*/latex/*
expire_in: 4 days
@ -266,11 +261,9 @@ check_doc_links:
artifacts: false
tags: ["build", "amd64", "internet"]
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/linkcheck/*.txt
expire_in: 1 week
allow_failure: true
script:
- cd docs

Wyświetl plik

@ -28,7 +28,6 @@ test_nvs_coverage:
artifacts:
paths:
- components/nvs_flash/test_nvs_host/coverage_report
expire_in: 1 week
script:
- cd components/nvs_flash/test_nvs_host
- make coverage_report
@ -65,7 +64,6 @@ test_reproducible_build:
- "**/build*/*.bin"
- "**/build*/bootloader/*.bin"
- "**/build*/partition_table/*.bin"
expire_in: 1 week
test_spiffs_on_host:
extends: .host_test_template
@ -110,7 +108,6 @@ test_cli_installer:
paths:
- tools/tools.new.json
- tools/test_idf_tools/test_python_env_logs.txt
expire_in: 1 week
image:
name: $ESP_ENV_IMAGE
entrypoint: [""] # use system python3. no extra pip package installed
@ -130,7 +127,6 @@ test_cli_installer:
when: on_failure
paths:
- components/efuse/${IDF_TARGET}/esp_efuse_table.c
expire_in: 1 week
script:
- cd ${IDF_PATH}/components/efuse/
- ./efuse_table_gen.py -t "${IDF_TARGET}" ${IDF_PATH}/components/efuse/${IDF_TARGET}/esp_efuse_table.csv
@ -173,7 +169,6 @@ test_logtrace_proc:
paths:
- tools/esp_app_trace/test/logtrace/output
- tools/esp_app_trace/test/logtrace/.coverage
expire_in: 1 week
script:
- cd ${IDF_PATH}/tools/esp_app_trace/test/logtrace
- ./test.sh
@ -185,7 +180,6 @@ test_sysviewtrace_proc:
paths:
- tools/esp_app_trace/test/sysview/output
- tools/esp_app_trace/test/sysview/.coverage
expire_in: 1 week
script:
- cd ${IDF_PATH}/tools/esp_app_trace/test/sysview
- ./test.sh
@ -194,13 +188,11 @@ test_tools:
extends:
- .host_test_template
artifacts:
when: always
paths:
- ${IDF_PATH}/*.out
- ${IDF_PATH}/XUNIT_*.xml
reports:
junit: ${IDF_PATH}/XUNIT_*.xml
expire_in: 1 week
variables:
LC_ALL: C.UTF-8
INSTALL_QEMU: 1 # for test_idf_qemu.py
@ -280,13 +272,11 @@ test_pytest_qemu:
- .host_test_template
- .before_script:build
artifacts:
when: always
paths:
- XUNIT_RESULT.xml
- pytest_embedded_log/
reports:
junit: XUNIT_RESULT.xml
expire_in: 1 week
allow_failure: true # IDFCI-1752
parallel:
matrix:
@ -316,14 +306,12 @@ test_pytest_linux:
- .host_test_template
- .before_script:build
artifacts:
when: always
paths:
- XUNIT_RESULT.xml
- pytest_embedded_log/
- "**/build*/build_log.txt"
reports:
junit: XUNIT_RESULT.xml
expire_in: 1 week
script:
- run_cmd python tools/ci/ci_build_apps.py components examples tools/test_apps -vv
--target linux

Wyświetl plik

@ -84,7 +84,6 @@ check_chip_support_components:
paths:
- esp_hw_support_part.h
- bootloader_support_part.h
expire_in: 1 week
script:
- python tools/ci/check_soc_headers_leak.py
- find ${IDF_PATH}/components/soc/*/include/soc/ -name "*_struct.h" -print0 | xargs -0 -n1 ./tools/ci/check_soc_struct_headers.py
@ -98,7 +97,6 @@ check_esp_err_to_name:
when: on_failure
paths:
- components/esp_common/esp_err_to_name.c
expire_in: 1 week
script:
- cd ${IDF_PATH}/tools/
- ./gen_esp_err_to_name.py
@ -122,12 +120,6 @@ check_version_tag:
script:
- (git cat-file -t $CI_COMMIT_REF_NAME | grep tag) || (echo "ESP-IDF versions must be annotated tags." && exit 1)
check_artifacts_expire_time:
extends: .pre_check_template
script:
# check if we have set expire time for all artifacts
- python tools/ci/check_artifacts_expire_time.py
check_test_scripts_build_test_rules:
extends:
- .pre_check_template
@ -165,4 +157,3 @@ pipeline_variables:
artifacts:
reports:
dotenv: pipeline.env
expire_in: 4 days

Wyświetl plik

@ -344,9 +344,6 @@
.if-dev-push: &if-dev-push
if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && $CI_COMMIT_TAG !~ /^qa-test/ && ($CI_PIPELINE_SOURCE == "push" || $CI_PIPELINE_SOURCE == "merge_request_event")'
.if-merge_request: &if-merge_request
if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
.if-schedule: &if-schedule
if: '$CI_PIPELINE_SOURCE == "schedule"'
@ -356,9 +353,6 @@
.if-schedule-test-build-system-windows: &if-schedule-test-build-system-windows
if: '$CI_PIPELINE_SOURCE == "schedule" && $SCHEDULED_BUILD_SYSTEM_TEST_WIN == "true"'
.if-trigger: &if-trigger
if: '$CI_PIPELINE_SOURCE == "trigger"'
.if-label-build-only: &if-label-build-only
if: '$CI_JOB_STAGE == "target_test" && $CI_MERGE_REQUEST_LABELS =~ /^(?:[^,\n\r]+,)*For Maintainers: Only Build Tests(?:,[^,\n\r]+)*$/i'
@ -398,10 +392,6 @@
- <<: *if-protected-no_label
when: always
.rules:mr:
rules:
- <<: *if-merge_request
.rules:tag:release:
rules:
- <<: *if-tag-release

Wyświetl plik

@ -6,8 +6,6 @@ clang_tidy_check:
artifacts:
paths:
- clang_tidy_reports/
when: always
expire_in: 1 day
variables:
IDF_TOOLCHAIN: clang
script:
@ -23,10 +21,8 @@ check_pylint:
needs:
- pipeline_variables
artifacts:
when: always
reports:
codequality: pylint.json
expire_in: 1 week
script:
- |
if [ -n "$CI_MERGE_REQUEST_IID" ]; then
@ -72,10 +68,8 @@ check_pylint:
GIT_DEPTH: 0
REPORT_PATTERN: clang_tidy_reports/**/*.txt
artifacts:
when: always
paths:
- $REPORT_PATTERN
expire_in: 1 week
dependencies: # Here is not a hard dependency relationship, could be skipped when only python files changed. so we do not use "needs" here.
- clang_tidy_check

Wyświetl plik

@ -18,13 +18,11 @@
extends:
- .target_test_template
artifacts:
when: always
paths:
- XUNIT_RESULT.xml
- pytest_embedded_log/
reports:
junit: XUNIT_RESULT.xml
expire_in: 1 week
script:
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
# get runner env config file

Wyświetl plik

@ -91,20 +91,13 @@ repos:
always_run: true
files: '\.gitlab/CODEOWNERS'
pass_filenames: false
- id: check-rules-yml
name: Check rules.yml all rules have at lease one job applied, all rules needed exist
entry: tools/ci/check_rules_yml.py
language: python
files: '\.gitlab/ci/.+\.yml|\.gitlab-ci.yml|\.gitmodules'
pass_filenames: false
additional_dependencies:
- PyYAML == 5.3.1
- id: check-generated-rules
name: Check rules are generated (based on .gitlab/ci/dependencies/dependencies.yml)
entry: tools/ci/generate_rules.py
language: python
files: '\.gitlab/ci/dependencies/.+|\.gitlab/ci/.*\.yml'
pass_filenames: false
require_serial: true
additional_dependencies:
- PyYAML == 5.3.1
- id: mypy-check
@ -185,6 +178,14 @@ repos:
language: python
always_run: true
require_serial: true
- id: gitlab-yaml-linter
name: Check gitlab yaml files
entry: tools/ci/gitlab_yaml_linter.py
language: python
files: '\.gitlab-ci\.yml|\.gitlab/ci/.+\.yml'
pass_filenames: false
additional_dependencies:
- PyYAML == 5.3.1
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
hooks:

Wyświetl plik

@ -1,54 +0,0 @@
#!/usr/bin/env python
# SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# internal use only
# check if expire time is set for all artifacts
import os
import yaml
IDF_PATH = os.getenv('IDF_PATH')
if not IDF_PATH:
print('Please set IDF_PATH before running this script')
raise SystemExit(-1)
GITLAB_CONFIG_FILE = os.path.join(IDF_PATH, '.gitlab-ci.yml')
def check_artifacts_expire_time() -> None:
with open(GITLAB_CONFIG_FILE, 'r') as f:
config = yaml.load(f, Loader=yaml.FullLoader)
# load files listed in `include`
if 'include' in config:
for _file in config['include']:
with open(os.path.join(IDF_PATH or '', _file)) as f:
config.update(yaml.load(f, Loader=yaml.FullLoader))
print('expire time for jobs:')
errors = []
job_names = list(config.keys())
job_names.sort()
for job_name in job_names:
try:
if 'expire_in' not in config[job_name]['artifacts']:
errors.append(job_name)
else:
print('{}: {}'.format(job_name, config[job_name]['artifacts']['expire_in']))
except (KeyError, TypeError):
# this is not job, or the job does not have artifacts
pass
if errors:
print('\n\nThe following jobs did not set expire time for its artifacts')
for error in errors:
print(error)
raise SystemExit(-2)
if __name__ == '__main__':
check_artifacts_expire_time()

Wyświetl plik

@ -1,157 +0,0 @@
#!/usr/bin/env python
#
# SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""
Check if all rules in rules.yml used or not in CI yaml files.
"""
import argparse
import os
import re
import sys
from copy import deepcopy
from typing import Any, Dict, List, Optional, Set, Union
import yaml
from idf_ci_utils import IDF_PATH
ROOT_YML_FP = os.path.join(IDF_PATH, '.gitlab-ci.yml')
def load_yaml(file_path: str) -> Any:
return yaml.load(open(file_path), Loader=yaml.FullLoader)
class YMLConfig:
def __init__(self, root_yml_file_path: str) -> None:
self._config: Optional[Dict] = None
self._all_extends: Optional[Set] = None
self.root_yml = load_yaml(root_yml_file_path)
assert self.root_yml
@staticmethod
def _list(str_or_list: Union[str, List]) -> List:
if isinstance(str_or_list, str):
return [str_or_list]
if isinstance(str_or_list, list):
return str_or_list
raise ValueError(
'Wrong type: {}. Only supports str or list.'.format(type(str_or_list))
)
@property
def config(self) -> Dict:
if self._config:
return self._config
all_config = dict()
for item in self.root_yml['include']:
all_config.update(load_yaml(os.path.join(IDF_PATH, item)))
self._config = all_config
return self._config
@property
def all_extends(self) -> Set:
if self._all_extends:
return self._all_extends
res = set([])
for v in self.config.values():
if 'extends' in v:
for item in self._list(v['extends']):
if item.startswith('.rules:'):
res.add(item)
self._all_extends = res
return self._all_extends
def exists(self, key: str) -> bool:
if key in self.all_extends:
return True
return False
YML_CONFIG = YMLConfig(ROOT_YML_FP)
def get_needed_rules() -> Set[str]:
return deepcopy(YML_CONFIG.all_extends)
def validate_needed_rules(rules_yml: 'os.PathLike[str]') -> int:
res = 0
needed_rules = deepcopy(YML_CONFIG.all_extends)
with open(rules_yml) as fr:
for index, line in enumerate(fr):
if line.startswith('.rules:'):
key = line.strip().rsplit(':', 1)[0]
if not YML_CONFIG.exists(key):
print(
'{}:{}:WARNING:rule "{}" unused'.format(rules_yml, index, key)
)
else:
needed_rules.remove(key)
if needed_rules:
for item in needed_rules:
print('ERROR: missing rule: "{}"'.format(item))
res = 1
if res == 0:
print('Pass')
return res
def parse_submodule_paths(
gitsubmodules: str = os.path.join(IDF_PATH, '.gitmodules')
) -> List[str]:
path_regex = re.compile(r'^\s+path = (.+)$', re.MULTILINE)
with open(gitsubmodules, 'r') as f:
data = f.read()
res = []
for item in path_regex.finditer(data):
res.append(item.group(1))
return res
def validate_submodule_patterns() -> int:
submodule_paths = sorted(['.gitmodules'] + parse_submodule_paths())
submodule_paths_in_patterns = sorted(
YML_CONFIG.config.get('.patterns-submodule', [])
)
res = 0
if submodule_paths != submodule_paths_in_patterns:
res = 1
print('please update the pattern ".patterns-submodule"')
should_remove = set(submodule_paths_in_patterns) - set(submodule_paths)
if should_remove:
print(f'- should remove: {should_remove}')
should_add = set(submodule_paths) - set(submodule_paths_in_patterns)
if should_add:
print(f'- should add: {should_add}')
return res
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'rules_yml',
nargs='?',
default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'rules.yml'),
help='rules.yml file path',
)
args = parser.parse_args()
exit_code = 0
if validate_needed_rules(args.rules_yml):
exit_code = 1
if validate_submodule_patterns():
exit_code = 1
sys.exit(exit_code)

Wyświetl plik

@ -40,3 +40,4 @@ tools/templates/sample_component/main.c
tools/ci/cleanup_ignore_lists.py
tools/ci/artifacts_handler.py
tools/unit-test-app/**/*
tools/ci/gitlab_yaml_linter.py

Wyświetl plik

@ -62,7 +62,6 @@ tools/ci/check_kconfigs.py
tools/ci/check_readme_links.py
tools/ci/check_requirement_files.py
tools/ci/check_rules_components_patterns.py
tools/ci/check_rules_yml.py
tools/ci/check_soc_struct_headers.py
tools/ci/check_tools_files_patterns.py
tools/ci/check_type_comments.py
@ -74,6 +73,7 @@ tools/ci/fix_empty_prototypes.sh
tools/ci/generate_rules.py
tools/ci/get-full-sources.sh
tools/ci/get_supported_examples.sh
tools/ci/gitlab_yaml_linter.py
tools/ci/mirror-submodule-update.sh
tools/ci/multirun_with_pyenv.sh
tools/ci/push_to_github.sh

Wyświetl plik

@ -1,6 +1,6 @@
#!/usr/bin/env python
#
# SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2021-2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
@ -11,8 +11,7 @@ from collections import defaultdict
from itertools import product
import yaml
from check_rules_yml import get_needed_rules
from idf_ci_utils import IDF_PATH
from idf_ci_utils import IDF_PATH, GitlabYmlConfig
try:
import pygraphviz as pgv
@ -100,6 +99,7 @@ class RulesWriter:
self.cfg = self.expand_matrices()
self.rules = self.expand_rules()
self.yml_config = GitlabYmlConfig()
self.graph = None
def expand_matrices(self): # type: () -> dict
@ -201,7 +201,7 @@ class RulesWriter:
def new_rules_str(self): # type: () -> str
res = []
for k, v in sorted(self.rules.items()):
if '.rules:' + k not in get_needed_rules():
if '.rules:' + k not in self.yml_config.used_rules:
print(f'WARNING: unused rule: {k}, skipping...')
continue
res.append(self.RULES_TEMPLATE.format(k, self._format_rule(k, v)))

Wyświetl plik

@ -0,0 +1,100 @@
#!/usr/bin/env python
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""
Check gitlab ci yaml files
"""
import argparse
import os
import typing as t
from functools import cached_property
from idf_ci_utils import IDF_PATH, GitlabYmlConfig, get_submodule_dirs
class YmlLinter:
def __init__(self, yml_config: GitlabYmlConfig) -> None:
self.yml_config = yml_config
self._errors: t.List[str] = []
@cached_property
def lint_functions(self) -> t.List[str]:
funcs = []
for func in dir(self):
if func.startswith('_lint_'):
funcs.append(func)
return funcs
def lint(self) -> None:
exit_code = 0
for func in self.lint_functions:
getattr(self, func)()
if self._errors:
print(f'Errors found while running {func}:')
exit_code = 1
print('\t- ' + '\n\t- '.join(self._errors))
self._errors = [] # reset
exit(exit_code)
# name it like _1_ to make it run first
def _lint_1_yml_parser(self) -> None:
for k, v in self.yml_config.config.items():
if (
k not in self.yml_config.global_keys
and k not in self.yml_config.anchors
and k not in self.yml_config.jobs
):
raise SystemExit(f'Parser incorrect. Key {k} not in global keys, rules or jobs')
def _lint_default_values_artifacts(self) -> None:
defaults_artifacts = self.yml_config.default.get('artifacts', {})
for job_name, d in self.yml_config.jobs.items():
for k, v in d.get('artifacts', {}).items():
if k not in defaults_artifacts:
continue
if v == defaults_artifacts[k]:
self._errors.append(f'job {job_name} key {k} has same value as default value {v}')
def _lint_submodule_patterns(self) -> None:
submodule_paths = sorted(['.gitmodules'] + get_submodule_dirs())
submodule_paths_in_patterns = sorted(self.yml_config.config.get('.patterns-submodule', []))
if submodule_paths != submodule_paths_in_patterns:
unused_patterns = set(submodule_paths_in_patterns) - set(submodule_paths)
if unused_patterns:
for item in unused_patterns:
self._errors.append(f'non-exist pattern {item}. Please remove {item} from .patterns-submodule')
undefined_patterns = set(submodule_paths) - set(submodule_paths_in_patterns)
if undefined_patterns:
for item in undefined_patterns:
self._errors.append(f'undefined pattern {item}. Please add {item} to .patterns-submodule')
def _lint_gitlab_yml_rules(self) -> None:
unused_rules = self.yml_config.rules - self.yml_config.used_rules
for item in unused_rules:
self._errors.append(f'Unused rule: {item}, please remove it')
undefined_rules = self.yml_config.used_rules - self.yml_config.rules
for item in undefined_rules:
self._errors.append(f'Undefined rule: {item}')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--root-yml-filepath', help='root yml file path', default=os.path.join(IDF_PATH, '.gitlab-ci.yml')
)
args = parser.parse_args()
config = GitlabYmlConfig(args.root_yml_filepath)
linter = YmlLinter(config)
linter.lint()

Wyświetl plik

@ -8,12 +8,13 @@ import logging
import os
import subprocess
import sys
from typing import Any, List
import typing as t
from functools import cached_property
IDF_PATH = os.path.abspath(os.getenv('IDF_PATH', os.path.join(os.path.dirname(__file__), '..', '..')))
def get_submodule_dirs(full_path: bool = False) -> List[str]:
def get_submodule_dirs(full_path: bool = False) -> t.List[str]:
"""
To avoid issue could be introduced by multi-os or additional dependency,
we use python and git to get this output
@ -71,7 +72,7 @@ def is_executable(full_path: str) -> bool:
return os.access(full_path, os.X_OK)
def get_git_files(path: str = IDF_PATH, full_path: bool = False) -> List[str]:
def get_git_files(path: str = IDF_PATH, full_path: bool = False) -> t.List[str]:
"""
Get the result of git ls-files
:param path: path to run git ls-files
@ -102,7 +103,10 @@ def is_in_directory(file_path: str, folder: str) -> bool:
return os.path.realpath(file_path).startswith(os.path.realpath(folder) + os.sep)
def to_list(s: Any) -> List[Any]:
def to_list(s: t.Any) -> t.List[t.Any]:
if not s:
return []
if isinstance(s, (set, tuple)):
return list(s)
@ -110,3 +114,67 @@ def to_list(s: Any) -> List[Any]:
return s
return [s]
class GitlabYmlConfig:
def __init__(self, root_yml_filepath: str = os.path.join(IDF_PATH, '.gitlab-ci.yml')) -> None:
self._config: t.Dict[str, t.Any] = {}
self._defaults: t.Dict[str, t.Any] = {}
self._load(root_yml_filepath)
def _load(self, root_yml_filepath: str) -> None:
# avoid unused import in other pre-commit hooks
import yaml
all_config = dict()
root_yml = yaml.load(open(root_yml_filepath), Loader=yaml.FullLoader)
for item in root_yml['include']:
all_config.update(yaml.load(open(os.path.join(IDF_PATH, item)), Loader=yaml.FullLoader))
if 'default' in all_config:
self._defaults = all_config.pop('default')
self._config = all_config
@property
def default(self) -> t.Dict[str, t.Any]:
return self._defaults
@property
def config(self) -> t.Dict[str, t.Any]:
return self._config
@cached_property
def global_keys(self) -> t.List[str]:
return ['default', 'include', 'workflow', 'variables', 'stages']
@cached_property
def anchors(self) -> t.Dict[str, t.Any]:
return {k: v for k, v in self.config.items() if k.startswith('.')}
@cached_property
def jobs(self) -> t.Dict[str, t.Any]:
return {k: v for k, v in self.config.items() if not k.startswith('.') and k not in self.global_keys}
@cached_property
def rules(self) -> t.Set[str]:
return {k for k, _ in self.anchors.items() if self._is_rule_key(k)}
@cached_property
def used_rules(self) -> t.Set[str]:
res = set()
for v in self.config.values():
if not isinstance(v, dict):
continue
for item in to_list(v.get('extends')):
if self._is_rule_key(item):
res.add(item)
return res
@staticmethod
def _is_rule_key(key: str) -> bool:
return key.startswith('.rules:') or key.endswith('template')