Merge branch 'ci/cache_submodules' into 'master'

CI: cache submodules

Closes IDFCI-1158

See merge request espressif/esp-idf!19009
pull/9408/head
Fu Hanxi 2022-07-14 20:34:34 +08:00
commit 9653af8d04
9 zmienionych plików z 147 dodań i 39 usunięć

3
.gitignore vendored
Wyświetl plik

@ -21,6 +21,9 @@ GPATH
# MacOS directory files
.DS_Store
# cache dir
.cache/
# Components Unit Test Apps files
components/**/build/
components/**/build_*_*/

Wyświetl plik

@ -101,12 +101,16 @@ variables:
CI_PYTHON_TOOL_BRANCH: ""
cache:
key: pip-cache
paths:
- .cache/pip
# pull only for most of the use cases since it's cache dir.
# Only set "push" policy for "upload_cache" stage jobs since it would install all pypi packages
policy: pull
# Only set "push" policy for "upload_cache" stage jobs
- key: pip-cache
paths:
- .cache/pip
policy: pull
- key: submodule-cache
paths:
- .cache/submodule_archives
policy: pull
.setup_tools_unless_target_test: &setup_tools_unless_target_test |
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then

Wyświetl plik

@ -5,11 +5,14 @@
tags:
- host_test
dependencies: []
# run host_test jobs immediately, only after upload cache
needs:
- job: upload-pip-cache-shiny
optional: true # run host_test jobs immediately, only after upload cache
- job: upload-pip-cache-brew
optional: true # run host_test jobs immediately, only after upload cache
- job: upload-pip-cache
optional: true
artifacts: false
- job: upload-submodules-cache
optional: true
artifacts: false
test_nvs_on_host:
extends: .host_test_template

Wyświetl plik

@ -142,7 +142,6 @@
- "tools/split_paths_by_spaces.py"
.patterns-windows: &patterns-windows
- "tools/windows/**/*"
@ -150,7 +149,6 @@
- "tools/docker/**/*"
.patterns-submodule: &patterns-submodule
- "components/asio/asio"
- "components/bootloader/subproject/components/micro-ecc/micro-ecc"
- "components/bt/controller/lib_esp32"
- "components/bt/controller/lib_esp32c2/esp32c2-bt-lib"
@ -172,6 +170,7 @@
- "components/tinyusb/tinyusb"
- "components/unity/unity"
- "examples/peripherals/secure_element/atecc608_ecdsa/components/esp-cryptoauthlib"
- ".gitmodules"
.patterns-example_test-related_changes-ota: &patterns-example_test-related_changes-ota
- "examples/system/ota/**/*"
@ -248,6 +247,11 @@
- <<: *if-dev-push
changes: *patterns-python-cache
.rules:patterns:submodule:
rules:
- <<: *if-dev-push
changes: *patterns-submodule
.rules:dev:
rules:
- <<: *if-trigger

Wyświetl plik

@ -1,17 +1,22 @@
# pull only for most of the use cases for cache
# only set "push" policy for the jobs under this file.
# The cache would be updated when files matched specified patterns changes.
.upload_cache_template:
stage: upload_cache
image: $ESP_ENV_IMAGE
.upload_pip_cache_template:
upload-pip-cache:
extends:
- .upload_cache_template
- .rules:patterns:python-cache
tags:
- $GEO
- build
cache:
key: pip-cache
paths:
- .cache/pip
# pull only for most of the use cases since it's cache dir.
# Only set "push" policy for "upload_cache" stage jobs since it would install all pypi packages
policy: push
before_script: []
script:
@ -22,15 +27,29 @@
- $IDF_PATH/tools/idf_tools.py install-python-env --features pytest
# TODO: remove this, IDFCI-1207
- pip install esptool -c ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
parallel:
matrix:
- GEO: [ 'shiny', 'brew' ]
upload-pip-cache-shiny:
extends: .upload_pip_cache_template
upload-submodules-cache:
extends:
- .upload_cache_template
- .rules:patterns:submodule
tags:
- shiny
- build
upload-pip-cache-brew:
extends: .upload_pip_cache_template
tags:
- brew
- $GEO
- build
cache:
key: submodule-cache
paths:
- .cache/submodule_archives
policy: push
before_script: []
script:
- source tools/ci/utils.sh
- is_based_on_commits $REQUIRED_ANCESTOR_COMMITS
- source tools/ci/setup_python.sh
- rm -rf .cache/submodule_archives # clear old submodule archives
- fetch_submodules
parallel:
matrix:
- GEO: [ 'shiny', 'brew' ]

2
.gitmodules vendored
Wyświetl plik

@ -5,7 +5,7 @@
[submodule "components/bt/controller/lib_esp32"]
path = components/bt/controller/lib_esp32
url = ../../espressif/esp32-bt-lib.git
url = ../../espressif/esp32-bt-lib.git
[submodule "components/bootloader/subproject/components/micro-ecc/micro-ecc"]
path = components/bootloader/subproject/components/micro-ecc/micro-ecc

Wyświetl plik

@ -9,8 +9,10 @@ Check if all rules in rules.yml used or not in CI yaml files.
import argparse
import os
import re
import sys
from copy import deepcopy
from typing import List
import yaml
from idf_ci_utils import IDF_PATH
@ -36,7 +38,9 @@ class YMLConfig:
return [str_or_list]
if isinstance(str_or_list, list):
return str_or_list
raise ValueError('Wrong type: {}. Only supports str or list.'.format(type(str_or_list)))
raise ValueError(
'Wrong type: {}. Only supports str or list.'.format(type(str_or_list))
)
@property
def config(self):
@ -45,8 +49,7 @@ class YMLConfig:
all_config = dict()
for item in self.root_yml['include']:
if not item.endswith('rules.yml'):
all_config.update(load_yaml(os.path.join(IDF_PATH, item)))
all_config.update(load_yaml(os.path.join(IDF_PATH, item)))
self._config = all_config
return self._config
@ -70,16 +73,20 @@ class YMLConfig:
return False
def validate(rules_yml):
yml_config = YMLConfig(ROOT_YML_FP)
YML_CONFIG = YMLConfig(ROOT_YML_FP)
def validate_needed_rules(rules_yml):
res = 0
needed_rules = deepcopy(yml_config.all_extends)
needed_rules = deepcopy(YML_CONFIG.all_extends)
with open(rules_yml) as fr:
for index, line in enumerate(fr):
if line.startswith('.rules:'):
key = line.strip().rsplit(':', 1)[0]
if not yml_config.exists(key):
print('{}:{}:WARNING:rule "{}" unused'.format(rules_yml, index, key))
if not YML_CONFIG.exists(key):
print(
'{}:{}:WARNING:rule "{}" unused'.format(rules_yml, index, key)
)
else:
needed_rules.remove(key)
@ -93,10 +100,54 @@ def validate(rules_yml):
return res
def parse_submodule_paths(
gitsubmodules: str = os.path.join(IDF_PATH, '.gitmodules')
) -> List[str]:
path_regex = re.compile(r'^\s+path = (.+)$', re.MULTILINE)
with open(gitsubmodules, 'r') as f:
data = f.read()
res = []
for item in path_regex.finditer(data):
res.append(item.group(1))
return res
def validate_submodule_patterns():
submodule_paths = sorted(['.gitmodules'] + parse_submodule_paths())
submodule_paths_in_patterns = sorted(
YML_CONFIG.config.get('.patterns-submodule', [])
)
res = 0
if submodule_paths != submodule_paths_in_patterns:
res = 1
print('please update the pattern ".patterns-submodule"')
should_remove = set(submodule_paths_in_patterns) - set(submodule_paths)
if should_remove:
print(f'- should remove: {should_remove}')
should_add = set(submodule_paths) - set(submodule_paths_in_patterns)
if should_add:
print(f'- should add: {should_add}')
return res
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('rules_yml', nargs='?', default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'rules.yml'),
help='rules.yml file path')
parser.add_argument(
'rules_yml',
nargs='?',
default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'rules.yml'),
help='rules.yml file path',
)
args = parser.parse_args()
sys.exit(validate(args.rules_yml))
exit_code = 0
if validate_needed_rules(args.rules_yml):
exit_code = 1
if validate_submodule_patterns():
exit_code = 1
sys.exit(exit_code)

Wyświetl plik

@ -19,6 +19,8 @@ PATH_PATTERN = re.compile(r'path\s+=\s+(\S+)')
URL_PATTERN = re.compile(r'url\s+=\s+(\S+)')
SUBMODULE_ARCHIVE_TEMP_FOLDER = 'submodule_archive'
# need to match the one defined in CI yaml files for caching purpose
SUBMODULE_ARCHIVE_CACHE_DIR = '.cache/submodule_archives'
class SubModule(object):
@ -28,6 +30,7 @@ class SubModule(object):
def __init__(self, gitlab_inst, path, url):
self.path = path
self.url = url
self.gitlab_inst = gitlab_inst
self.project_id = self._get_project_id(url)
self.commit_id = self._get_commit_id(path)
@ -48,7 +51,7 @@ class SubModule(object):
def download_archive(self):
print('Update submodule: {}: {}'.format(self.path, self.commit_id))
path_name = self.gitlab_inst.download_archive(self.commit_id, SUBMODULE_ARCHIVE_TEMP_FOLDER,
self.project_id)
self.project_id, SUBMODULE_ARCHIVE_CACHE_DIR)
renamed_path = os.path.join(os.path.dirname(path_name), os.path.basename(self.path))
os.rename(path_name, renamed_path)
shutil.rmtree(self.path, ignore_errors=True)

Wyświetl plik

@ -177,7 +177,7 @@ class Gitlab(object):
return job_id_list
@retry
def download_archive(self, ref: str, destination: str, project_id: Optional[int] = None) -> str:
def download_archive(self, ref: str, destination: str, project_id: Optional[int] = None, cache_dir: Optional[str] = None) -> str:
"""
Download archive of certain commit of a repository and extract to destination path
@ -191,6 +191,23 @@ class Gitlab(object):
else:
project = self.gitlab_inst.projects.get(project_id)
if cache_dir:
local_archive_file = os.path.join(cache_dir, f'{ref}.tar.gz')
os.makedirs(os.path.dirname(local_archive_file), exist_ok=True)
if os.path.isfile(local_archive_file):
print('Use cached archive file. Skipping download...')
else:
with open(local_archive_file, 'wb') as fw:
try:
project.repository_archive(sha=ref, streamed=True, action=fw.write)
except gitlab.GitlabGetError as e:
print('Failed to archive from project {}'.format(project_id))
raise e
print('Downloaded archive size: {:.03f}MB'.format(float(os.path.getsize(local_archive_file)) / (1024 * 1024)))
return self.decompress_archive(local_archive_file, destination)
# no cache
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
try:
project.repository_archive(sha=ref, streamed=True, action=temp_file.write)
@ -198,9 +215,13 @@ class Gitlab(object):
print('Failed to archive from project {}'.format(project_id))
raise e
print('archive size: {:.03f}MB'.format(float(os.path.getsize(temp_file.name)) / (1024 * 1024)))
print('Downloaded archive size: {:.03f}MB'.format(float(os.path.getsize(temp_file.name)) / (1024 * 1024)))
with tarfile.open(temp_file.name, 'r') as archive_file:
return self.decompress_archive(temp_file.name, destination)
@staticmethod
def decompress_archive(path: str, destination: str) -> str:
with tarfile.open(path, 'r') as archive_file:
root_name = archive_file.getnames()[0]
archive_file.extractall(destination)