From 7b7d1e11015e93c0a60d555ae3a521b955eaa6bb Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Mon, 24 Mar 2025 18:10:54 +0100 Subject: [PATCH 1/3] Python caching revisited --- .github/actions/apt_requirements/action.yml | 29 ++-- .../misc/compute_files_hash/action.yml | 38 +++++ .../create_dev_requirements_file/action.yml | 28 ++++ .../create_docs_requirements_file/action.yml | 37 ++++ .../action.yml | 87 ++++++++++ .../create_virtualenv/action.yml | 26 +++ .../restore_pip_cache/action.yml | 52 ++++++ .../restore_virtualenv/action.yml | 43 +++++ .../save_pip_cache/action.yml | 37 ++++ .../save_virtualenv/action.yml | 29 ++++ .github/workflows/_python.yml | 158 +++++++++--------- .github/workflows/_release_and_tag.yml | 4 +- .github/workflows/create_apt_cache.yaml | 4 +- CHANGELOG.md | 2 +- actions/apt_requirements/action.yml | 29 ++-- workflows/_python.yml | 158 +++++++++--------- workflows/create_apt_cache.yaml | 18 +- workflows/create_python_cache.yaml | 50 ++++++ 18 files changed, 631 insertions(+), 198 deletions(-) create mode 100644 .github/actions/misc/compute_files_hash/action.yml create mode 100644 .github/actions/python_requirements/create_dev_requirements_file/action.yml create mode 100644 .github/actions/python_requirements/create_docs_requirements_file/action.yml create mode 100644 .github/actions/python_requirements/create_linter_requirements_file/action.yml create mode 100644 .github/actions/python_requirements/create_virtualenv/action.yml create mode 100644 .github/actions/python_requirements/restore_pip_cache/action.yml create mode 100644 .github/actions/python_requirements/restore_virtualenv/action.yml create mode 100644 .github/actions/python_requirements/save_pip_cache/action.yml create mode 100644 .github/actions/python_requirements/save_virtualenv/action.yml create mode 100644 workflows/create_python_cache.yaml diff --git a/.github/actions/apt_requirements/action.yml b/.github/actions/apt_requirements/action.yml index 0024e2c..bbddcbf 100644 --- a/.github/actions/apt_requirements/action.yml +++ b/.github/actions/apt_requirements/action.yml @@ -1,5 +1,5 @@ -name: Composite action install apt requirements -description: Composite action install apt requirements +name: Composite action install APT requirements +description: Composite action to install APT requirements inputs: requirements_file: description: Requirements file @@ -8,12 +8,11 @@ inputs: runs: using: "composite" steps: - - name: Compute APT requirements file SHA256 hash - id: compute_apt_requirements_file_sha256_hash - run: | - COMPUTED_HASH=$(sha256sum ${{ inputs.requirements_file }} | cut -d ' ' -f 1) - echo "cache_key=$COMPUTED_HASH" >> $GITHUB_OUTPUT - shell: bash + - name: Compute apt_packages.txt SHA256 hash + id: compute_apt_packages_sha256_hash + uses: ./.github/actions/misc/compute_files_hash + with: + file_paths: ${{ inputs.requirements_file }} # Vital to be able to restore cache # If write permission is not set, a permissions error will be raised @@ -26,17 +25,19 @@ runs: id: restore_cache_from_parent_branch with: path: /var/cache/apt/archives/*.deb - key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.cache_key }} + key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - uses: actions/cache/restore@v4 id: restore_cache_from_current_branch if: steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' with: path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.cache_key }} + key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - name: Refresh repositories - if: steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' + if: > + steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && + steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' run: | sudo apt-get update shell: bash @@ -48,7 +49,9 @@ runs: - uses: actions/cache/save@v4 id: cache_apt_requirements_for_current_branch - if: steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' + if: > + steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && + steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' with: path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.cache_key }} \ No newline at end of file + key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} \ No newline at end of file diff --git a/.github/actions/misc/compute_files_hash/action.yml b/.github/actions/misc/compute_files_hash/action.yml new file mode 100644 index 0000000..2c3b905 --- /dev/null +++ b/.github/actions/misc/compute_files_hash/action.yml @@ -0,0 +1,38 @@ +name: Composite action compute files hash +description: Composite action to compute a single hash of one or more files +inputs: + file_paths: + description: Comma separeted list of files. + required: false + +outputs: + computed_hash: + description: The hash of the concatenated files + value: ${{ steps.compute_files_sha256_hash.outputs.computed_hash }} + +runs: + using: "composite" + steps: + - name: Compute files SHA256 hash + id: compute_files_sha256_hash + run: | + JOINED_FILES="cat " + # Create a bash array of file paths + IFS=',' read -r -a files <<< "${{ inputs.file_paths }}" + echo "::debug::File paths array is composed by: ${files[@]}" + for file in ${files[@]}; + do + if [[ -f $file ]]; then + # Concat file path to cat command + JOINED_FILES+="$file " + echo "::debug::Current file is $file" + echo "::debug::JOINED_FILES variable state is $JOINED_FILES" + else + echo "::error::$file does not exist or it is not a regular file!" + exit 1 + fi + done + COMPUTED_HASH=$($JOINED_FILES | sha256sum | cut -d ' ' -f 1) + echo "::debug::Hash is $COMPUTED_HASH" + echo "computed_hash=$COMPUTED_HASH" >> $GITHUB_OUTPUT + shell: bash \ No newline at end of file diff --git a/.github/actions/python_requirements/create_dev_requirements_file/action.yml b/.github/actions/python_requirements/create_dev_requirements_file/action.yml new file mode 100644 index 0000000..eb86a04 --- /dev/null +++ b/.github/actions/python_requirements/create_dev_requirements_file/action.yml @@ -0,0 +1,28 @@ +name: Composite action create Python dev requirements file +description: Composite action to create Python dev requirements file +inputs: + install_from: + description: Directory that must be used to install the packages + required: false + default: . + project_dev_requirements_file: + description: An additional project dev requirements file + required: false + use_coverage: + description: Use coverage.py + required: false + +runs: + using: "composite" + steps: + - name: Create requirements-dev.txt + run: | + echo > requirements-dev.txt + if [[ '${{ inputs.use_coverage }}' != 'false' ]]; then + echo "coverage>=7.3.2" >> requirements-dev.txt + fi + if [[ -z '${{ inputs.project_dev_requirements_file }}' ]];then + cat $(echo ${{ inputs.project_dev_requirements_file }}) >> requirements-dev.txt + fi + shell: bash + working-directory: ${{ inputs.install_from }} \ No newline at end of file diff --git a/.github/actions/python_requirements/create_docs_requirements_file/action.yml b/.github/actions/python_requirements/create_docs_requirements_file/action.yml new file mode 100644 index 0000000..fb674f8 --- /dev/null +++ b/.github/actions/python_requirements/create_docs_requirements_file/action.yml @@ -0,0 +1,37 @@ +name: Composite action create Python docs requirements file +description: Composite action to create Python docs requirements file +inputs: + install_from: + description: Directory that must be used to install the packages + required: false + default: . + project_docs_requirements_file: + description: An additional project docs requirements file + required: false + django_settings_module: + description: Path to the django settings file + required: false + check_docs_directory: + description: Check docs using rstcheck inside this directory + required: false + +runs: + using: "composite" + steps: + - name: Create requirements-docs.txt + run: | + echo > requirements-docs.txt + if [[ -n '${{ inputs.check_docs_directory }}' ]]; then + echo "rstcheck[sphinx]" >> requirements-docs.txt + echo "sphinx==7.2.6" >> requirements-docs.txt + echo "sphinx_rtd_theme==1.3.0" >> requirements-docs.txt + echo "sphinxcontrib-spelling==8.0.0" >> requirements-docs.txt + if [[ -n '${{ inputs.django_settings_module }}' ]]; then + echo "sphinxcontrib-django2==1.9" >> requirements-docs.txt + fi + if [[ -z '${{ inputs.project_docs_requirements_file }}' ]];then + cat $(echo ${{ inputs.project_docs_requirements_file }}) >> requirements-docs.txt + fi + fi + shell: bash + working-directory: ${{ inputs.install_from }} \ No newline at end of file diff --git a/.github/actions/python_requirements/create_linter_requirements_file/action.yml b/.github/actions/python_requirements/create_linter_requirements_file/action.yml new file mode 100644 index 0000000..7edb47f --- /dev/null +++ b/.github/actions/python_requirements/create_linter_requirements_file/action.yml @@ -0,0 +1,87 @@ +name: Composite action create Python linter requirements file +description: Composite action to create Python linter requirements file +inputs: + install_from: + description: Directory that must be used to install the packages + required: false + default: . + project_linter_requirements_file: + description: An additional project linter requirements file + required: false + django_settings_module: + description: Path to the django settings file + required: false + use_autoflake: + description: Use autoflake linter + required: false + use_bandit: + description: Use bandit linter + required: false + use_flake8: + description: Use flake8 linter + required: false + use_pylint: + description: Use pylint linter + required: false + + + +runs: + using: "composite" + steps: + - name: Create requirements-linters.txt + run: | + function check_linter_dependency_and_append_to_file { + # + # Function to check whether a specific linter is in the requirements file + # If it can be found inside the requirements, said linter dependency will be appended to a newly created requirements-linter.txt file. + # If the linter is not found inside the requirements file an error will be raised. + # + # 1st parameter: Name of the linter. + # 2nd parameter: Path of the requirements file. + # + if [[ -z $(grep -P "^$1[^a-zA-Z0-9_-].*" "$2") ]]; then + echo "::error::$1 dependency not found in $2 file!" + exit 1 + else + echo "$1 dependency found in $2!" + echo "$(grep -P ^$1[^a-zA-Z0-9_-].* $2)" >> requirements-linters.txt + fi + } + CI_REQUIREMENTS_LINTERS="${GITHUB_WORKSPACE}/.github/configurations/python_linters/requirements-linters.txt" + echo > requirements-linters.txt + + if [[ '${{ inputs.use_black }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "black" "$CI_REQUIREMENTS_LINTERS" + fi + + if [[ '${{ inputs.use_isort }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "isort" "$CI_REQUIREMENTS_LINTERS" + fi + + if [[ '${{ inputs.use_flake8 }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "flake8" "$CI_REQUIREMENTS_LINTERS" + if [[ -n '${{ inputs.django_settings_module }}' ]]; then + check_linter_dependency_and_append_to_file "flake8-django" "$CI_REQUIREMENTS_LINTERS" + fi + fi + + if [[ '${{ inputs.use_pylint }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "pylint" "$CI_REQUIREMENTS_LINTERS" + if [[ -n '${{ inputs.django_settings_module }}' ]]; then + check_linter_dependency_and_append_to_file "pylint-django" "$CI_REQUIREMENTS_LINTERS" + fi + fi + + if [[ '${{ inputs.use_bandit }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "bandit" "$CI_REQUIREMENTS_LINTERS" + fi + if [[ '${{ inputs.use_autoflake }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "autoflake" "$CI_REQUIREMENTS_LINTERS" + fi + + if [[ -z '${{ inputs.project_linter_requirements_file }}' ]]; then + cat $(echo ${{ inputs.project_linter_requirements_file }}) >> requirements-linters.txt + fi + shell: bash + working-directory: ${{ inputs.install_from }} \ No newline at end of file diff --git a/.github/actions/python_requirements/create_virtualenv/action.yml b/.github/actions/python_requirements/create_virtualenv/action.yml new file mode 100644 index 0000000..00a40a4 --- /dev/null +++ b/.github/actions/python_requirements/create_virtualenv/action.yml @@ -0,0 +1,26 @@ +name: Composite action create Python virtual environment +description: Composite action create Python virtual environment +inputs: + virtualenv_path: + description: Python's virtual environment path. + required: false + default: ".venv" + activate_only: + description: Whether to create the virtual environment or only activate it. + required: false + default: false + +runs: + using: "composite" + steps: + - name: Python's virtualenv creation + if: inputs.activate_only != 'true' + run: python -m venv ${{ inputs.virtualenv_path }} + shell: bash + - name: Activate newly created virtualenv + id: activate_newly_created_virtualenv + run: | + source ${{ inputs.virtualenv_path }}/bin/activate + echo "VIRTUAL_ENV=$VIRTUAL_ENV" >> $GITHUB_ENV + echo "$VIRTUAL_ENV/bin" >> $GITHUB_PATH + shell: bash diff --git a/.github/actions/python_requirements/restore_pip_cache/action.yml b/.github/actions/python_requirements/restore_pip_cache/action.yml new file mode 100644 index 0000000..0c4f93b --- /dev/null +++ b/.github/actions/python_requirements/restore_pip_cache/action.yml @@ -0,0 +1,52 @@ +name: Composite action restore pip's cache +description: Composite action to restore pip's cache +inputs: + custom_pip_cache_path: + description: Path to pip cache. + required: false + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +outputs: + cache-hit: + description: Whether pip cache was found in the cache or not. + value: ${{ steps.explain_cache_output.outputs.real_cache_hit }} + +runs: + using: "composite" + steps: + - name: Generate random UUID + id: generate_random_uuid + run: | + random_uuid=$(uuidgen -r) + echo "::debug::Random uuid generated is $random_uuid. Should only cause a cache-miss" + echo "computed_uuid=$random_uuid" >> $GITHUB_OUTPUT + shell: bash + - name: Get pip cache directory + id: get_pip_cache_directory + run: | + if [[ -z '${{ inputs.custom_pip_cache_path }}' ]]; then + echo "pip_cache_path=$(pip cache dir)" >> $GITHUB_OUTPUT + else + echo "pip_cache_path=${{ inputs.custom_pip_cache_path }}" >> $GITHUB_OUTPUT + fi + shell: bash + - name: Restore pip cache + id: restore_pip_cache + uses: actions/cache/restore@v4 + with: + key: ${{ steps.generate_random_uuid.outputs.computed_uuid }} + path: ${{ steps.get_pip_cache_directory.outputs.pip_cache_path }} + restore-keys: ${{ inputs.git_reference }}-pip-cache- + - name: Explain cache output + id: explain_cache_output + run: | + echo "::debug::Restore action for pip's cache returned cache-hit: ${{ steps.restore_pip_cache.outputs.cache-hit }} with cache-matched-key: ${{ steps.restore_pip_cache.outputs.cache-matched-key }}" + if [[ -z '${{ steps.restore_pip_cache.outputs.cache-matched-key }}' ]]; then + echo "real_cache_hit=false" >> $GITHUB_OUTPUT + else + echo "real_cache_hit=true" >> $GITHUB_OUTPUT + fi + shell: bash \ No newline at end of file diff --git a/.github/actions/python_requirements/restore_virtualenv/action.yml b/.github/actions/python_requirements/restore_virtualenv/action.yml new file mode 100644 index 0000000..1798a87 --- /dev/null +++ b/.github/actions/python_requirements/restore_virtualenv/action.yml @@ -0,0 +1,43 @@ +name: Composite action restore Python virtual environment +description: Composite action to restore Python virtual environment +inputs: + virtual_environment_path: + description: Path to where virtual environment will be restored. + required: false + default: ".venv" + requirements_paths: + description: Comma separeted list of requirements files. They will be used to compute the hash for the cache key. + required: false + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +outputs: + cache-hit: + description: Whether virtual environment was found in the cache or not. + value: ${{ steps.restore_virtual_environment.outputs.cache-hit }} + +runs: + using: "composite" + steps: + - name: Compute requirements files SHA256 hash + id: compute_requirements_files_sha256_hash + uses: ./.github/actions/misc/compute_files_hash + with: + file_paths: ${{ inputs.requirements_paths }} + + - name: Restore virtual environment + id: restore_virtual_environment + uses: actions/cache/restore@v4 + with: + path: ${{ inputs.virtual_environment_path }} + key: ${{ inputs.git_reference }}-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} + + - name: Activate restored virtual environment + if: > + steps.restore_virtual_environment.outputs.cache-hit == 'true' + uses: ./.github/actions/python_requirements/create_virtualenv + with: + virtualenv_path: ${{ inputs.virtual_environment_path }} + activate_only: true \ No newline at end of file diff --git a/.github/actions/python_requirements/save_pip_cache/action.yml b/.github/actions/python_requirements/save_pip_cache/action.yml new file mode 100644 index 0000000..218e508 --- /dev/null +++ b/.github/actions/python_requirements/save_pip_cache/action.yml @@ -0,0 +1,37 @@ +name: Composite action save pip's cache +description: Composite action to save pip's cache +inputs: + custom_pip_cache_path: + description: Path to pip cache. + required: false + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +runs: + using: "composite" + steps: + # TODO non posso usare l'hash del file perchè quando vado ad aggiornare i requirements + # l'hash cambia e sicuramente non trovo la cache salvata + - name: Generate random UUID + id: generate_random_uuid + run: | + random_uuid=$(uuidgen -r) + echo "::debug::Random uuid generated is $random_uuid" + echo "computed_uuid=$random_uuid" >> $GITHUB_OUTPUT + shell: bash + - name: Get pip cache directory + id: get_pip_cache_directory + run: | + if [[ -z '${{ inputs.custom_pip_cache_path }}' ]]; then + echo "pip_cache_path=$(pip cache dir)" >> $GITHUB_OUTPUT + else + echo "pip_cache_path=${{ inputs.custom_pip_cache_path }}" >> $GITHUB_OUTPUT + fi + shell: bash + - name: Save pip cache + uses: actions/cache/save@v4 + with: + path: ${{ steps.get_pip_cache_directory.outputs.pip_cache_path }} + key: ${{ inputs.git_reference }}-pip-cache-${{ steps.generate_random_uuid.outputs.computed_uuid }} \ No newline at end of file diff --git a/.github/actions/python_requirements/save_virtualenv/action.yml b/.github/actions/python_requirements/save_virtualenv/action.yml new file mode 100644 index 0000000..b93d66c --- /dev/null +++ b/.github/actions/python_requirements/save_virtualenv/action.yml @@ -0,0 +1,29 @@ +name: Composite action save Python virtual environment +description: Composite action to save Python virtual environment +inputs: + virtual_environment_path: + description: Path to virtual environment. + required: false + default: ".venv" + requirements_paths: + description: Comma separeted list of requirements files. They will be used to compute the hash for the cache key. + required: false + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +runs: + using: "composite" + steps: + - name: Compute requirements files SHA256 hash + id: compute_requirements_files_sha256_hash + uses: ./.github/actions/misc/compute_files_hash + with: + file_paths: ${{ inputs.requirements_paths }} + + - name: Cache virtual environment + uses: actions/cache/save@v4 + with: + path: ${{ inputs.virtual_environment_path }} + key: ${{ inputs.git_reference }}-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} \ No newline at end of file diff --git a/.github/workflows/_python.yml b/.github/workflows/_python.yml index 8e67133..1f6624f 100644 --- a/.github/workflows/_python.yml +++ b/.github/workflows/_python.yml @@ -248,7 +248,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python_version }} @@ -292,99 +292,97 @@ jobs: exit 1 shell: bash - # not the best solution because i do not think that dependabot supports this - - name: Create requirements-linters.txt - run: | - echo > requirements-linters.txt - - if [[ '${{ inputs.use_black}}' != 'false' ]]; then - echo "black==24.8.0" >> requirements-linters.txt - fi - - if [[ '${{ inputs.use_isort}}' != 'false' ]]; then - echo "isort==5.13.2" >> requirements-linters.txt - fi - - if [[ '${{ inputs.use_flake8}}' != 'false' ]]; then - echo "flake8==7.1.1" >> requirements-linters.txt - if [[ -n '${{ inputs.django_settings_module }}' ]]; then - echo "flake8-django @ git+https://github.com/terencehonles/flake8-django.git@a6e369e89d275dfd5514f2aa9d091aa36c5ff84b" >> requirements-linters.txt - fi - fi + - name: Create linter requirements file + uses: ./.github/actions/python_requirements/create_linter_requirements_file + with: + install_from: ${{ inputs.install_from }} + django_settings_module: ${{ inputs.django_settings_module }} + use_autoflake: ${{ inputs.use_autoflake }} + use_bandit: ${{ inputs.use_bandit }} + use_flake8: ${{ inputs.use_flake8 }} + use_pylint: ${{ inputs.use_pylint }} - if [[ '${{ inputs.use_pylint}}' != 'false' ]]; then - echo "pylint==3.2.6" >> requirements-linters.txt - if [[ -n '${{ inputs.django_settings_module }}' ]]; then - echo "pylint-django==2.5.5" >> requirements-linters.txt - fi - fi + - name: Create dev requirements file + uses: ./.github/actions/python_requirements/create_dev_requirements_file + with: + install_from: ${{ inputs.install_from }} + use_coverage: ${{ inputs.use_coverage }} - if [[ '${{ inputs.use_bandit}}' != 'false' ]]; then - echo "bandit==1.7.9" >> requirements-linters.txt - fi - if [[ '${{ inputs.use_autoflake}}' != 'false' ]]; then - echo "autoflake==2.3.1" >> requirements-linters.txt - fi - cat $(echo ${{ inputs.requirements_path }} | sed -e 's/.txt/-linter.txt/') >> requirements-linters.txt 2>/dev/null || exit 0 - shell: bash - working-directory: ${{ inputs.install_from }} + - name: Create docs requirements file + uses: ./.github/actions/python_requirements/create_docs_requirements_file + with: + install_from: ${{ inputs.install_from }} + check_docs_directory: ${{ inputs.check_docs_directory }} + django_settings_module: ${{ inputs.django_settings_module }} - - name: Create requirements-dev.txt - run: | - echo > requirements-dev.txt - if [[ '${{ inputs.use_coverage }}' != 'false' ]]; then - echo "coverage>=7.3.2" >> requirements-dev.txt - fi - cat $(echo ${{ inputs.requirements_path }} | sed -e 's/.txt/-dev.txt/') >> requirements-dev.txt 2>/dev/null || exit 0 - shell: bash - working-directory: ${{ inputs.install_from }} + - name: Restore Python virtual environment related to PR event + id: restore_python_virtual_environment_pr + uses: ./.github/actions/python_requirements/restore_virtualenv/ + with: + requirements_paths: "${{ inputs.requirements_path }},requirements-linters.txt,requirements-dev.txt,requirements-docs.txt" - - name: Create requirements-docs.txt - run: | - echo > requirements-docs.txt - if [[ -n '${{ inputs.check_docs_directory }}' ]]; then - echo "rstcheck[sphinx]" >> requirements-docs.txt - echo "sphinx==7.2.6" >> requirements-docs.txt - echo "sphinx_rtd_theme==1.3.0" >> requirements-docs.txt - echo "sphinxcontrib-spelling==8.0.0" >> requirements-docs.txt - if [[ -n '${{ inputs.django_settings_module }}' ]]; then - echo "sphinxcontrib-django2==1.9" >> requirements-docs.txt - fi - cat $(echo ${{ inputs.requirements_path }} | sed -e 's/.txt/-docs.txt/') >> requirements-docs.txt 2>/dev/null || exit 0 - fi + - name: Restore Python virtual environment related to target branch + id: restore_python_virtual_environment_target_branch + if: steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/restore_virtualenv/ + with: + requirements_paths: ${{ inputs.requirements_path }} + git_reference: ${{ github.base_ref }} + + - name: Create Python virtual environment + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && + steps.restore_python_virtual_environment_target_branch.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/create_virtualenv + + - name: Restore pip cache related to PR event + id: restore_pip_cache_pr + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && + steps.restore_python_virtual_environment_target_branch.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/restore_pip_cache + + - name: Restore pip cache related to target branch + id: restore_pip_cache_target_branch + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && + steps.restore_python_virtual_environment_target_branch.outputs.cache-hit != 'true' && + steps.restore_pip_cache_pr.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/restore_pip_cache + with: + git_reference: ${{ github.base_ref }} + + - name: Install project requirements + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && + steps.restore_python_virtual_environment_target_branch.outputs.cache-hit != 'true' + run: pip install -r ${{ inputs.requirements_path }} shell: bash working-directory: ${{ inputs.install_from }} - - name: Check virtualenv cache - uses: syphar/restore-virtualenv@v1 - id: cache-virtualenv - with: - requirement_files: | - ${{ inputs.install_from }}/${{ inputs.requirements_path }} - ${{ inputs.install_from }}/requirements-dev.txt - ${{ inputs.install_from }}/requirements-linters.txt - ${{ inputs.install_from }}/requirements-docs.txt - - - name: Check pip cache - uses: syphar/restore-pip-download-cache@v1 - if: steps.cache-virtualenv.outputs.cache-hit != 'true' - with: - requirement_files: | - ${{ inputs.install_from }}/${{ inputs.requirements_path }} - ${{ inputs.install_from }}/requirements-dev.txt - ${{ inputs.install_from }}/requirements-linters.txt - ${{ inputs.install_from }}/requirements-docs.txt - - - name: Install requirements - if: steps.cache-virtualenv.outputs.cache-hit != 'true' + - name: Install other requirements + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' run: | - pip install -r ${{ inputs.requirements_path }} pip install -r requirements-dev.txt pip install -r requirements-linters.txt pip install -r requirements-docs.txt shell: bash working-directory: ${{ inputs.install_from }} + - name: Save Python virtual environment related to the PR event + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/save_virtualenv + with: + requirements_paths: "${{ inputs.requirements_path }},requirements-linters.txt,requirements-dev.txt,requirements-docs.txt" + + - name: Save pip cache related to the PR event + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && + steps.restore_pip_cache_pr.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/save_pip_cache + - name: Run linters uses: ./.github/actions/python_linter if: inputs.use_black || inputs.use_isort || inputs.use_flake8 || inputs.use_pylint || inputs.use_bandit || inputs.use_autoflake diff --git a/.github/workflows/_release_and_tag.yml b/.github/workflows/_release_and_tag.yml index aaa5124..e899456 100644 --- a/.github/workflows/_release_and_tag.yml +++ b/.github/workflows/_release_and_tag.yml @@ -161,14 +161,14 @@ jobs: fetch-depth: 0 # otherwise, you do not retrieve the tags - name: Push on ecr branch uses: ./.github/actions/push_on_ecr - if: github.base_ref == 'master' || github.base_ref == 'main' || github.base_ref == 'develop' || github.base_ref == 'dev' || github.base_ref == 'test' + if: github.base_ref == 'master' || github.base_ref == 'main' || github.base_ref == 'develop' || github.base_ref == 'dev' with: repository: ${{ inputs.repository }} aws_account_id: ${{ secrets.AWS_ACCOUNT_ID }} aws_access_key: ${{ secrets.AWS_ACCESS_KEY}} aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} dockerfile: ${{ matrix.dockerfile }} - image_tag: ${{ ( github.base_ref == 'main' || github.base_ref == 'master' ) && 'prod' || ( github.base_ref == 'develop' || github.base_ref == 'dev' ) && 'stag' || 'test' }} + image_tag: ${{ ( github.base_ref == 'main' || github.base_ref == 'master' ) && 'prod' || 'stag' }} aws_region: ${{ inputs.aws_region }} working_directory: ${{ inputs.working_directory }} diff --git a/.github/workflows/create_apt_cache.yaml b/.github/workflows/create_apt_cache.yaml index 2315fc9..7cd6937 100644 --- a/.github/workflows/create_apt_cache.yaml +++ b/.github/workflows/create_apt_cache.yaml @@ -16,13 +16,13 @@ on: jobs: create-cache: - name: Create cache for dependencies + name: Create cache for APT dependencies runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 # Remember to set the same APT requirements file path set before! - - name: Install dependencies + - name: Install APT dependencies run: | sudo apt-get update sudo apt-get -y install --no-install-recommends $(tr '\n' ' ' < apt_packages.txt) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6e93115..36aee59 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,6 @@ * Added *create_apt_cache.yaml* workflow to cache APT requirements each time a commit is pushed on selected branch and **when the requirements file has changed**. ### Bugfix * Updated python linters also in '_python.yml' workflow (missing from previous release) -* ### Changes * Deprecation of license check table-headers * Updated Python linters: @@ -19,3 +18,4 @@ * Added both frontend and backend exclusions on _detect_changes.yaml (paths that won't be considered by git diff) * Updated CodeQL action v2 -> v3 (v2 has been [deprecated](https://github.blog/changelog/2024-01-12-code-scanning-deprecation-of-codeql-action-v2/) on december '24) * Removed `setup-python-dependencies` from `codeql/action.yml` since it has no effect anymore. See [this](https://github.blog/changelog/2024-01-23-codeql-2-16-python-dependency-installation-disabled-new-queries-and-bug-fixes/) for more information. +* Reworked Python requirements caching. diff --git a/actions/apt_requirements/action.yml b/actions/apt_requirements/action.yml index 0024e2c..bbddcbf 100644 --- a/actions/apt_requirements/action.yml +++ b/actions/apt_requirements/action.yml @@ -1,5 +1,5 @@ -name: Composite action install apt requirements -description: Composite action install apt requirements +name: Composite action install APT requirements +description: Composite action to install APT requirements inputs: requirements_file: description: Requirements file @@ -8,12 +8,11 @@ inputs: runs: using: "composite" steps: - - name: Compute APT requirements file SHA256 hash - id: compute_apt_requirements_file_sha256_hash - run: | - COMPUTED_HASH=$(sha256sum ${{ inputs.requirements_file }} | cut -d ' ' -f 1) - echo "cache_key=$COMPUTED_HASH" >> $GITHUB_OUTPUT - shell: bash + - name: Compute apt_packages.txt SHA256 hash + id: compute_apt_packages_sha256_hash + uses: ./.github/actions/misc/compute_files_hash + with: + file_paths: ${{ inputs.requirements_file }} # Vital to be able to restore cache # If write permission is not set, a permissions error will be raised @@ -26,17 +25,19 @@ runs: id: restore_cache_from_parent_branch with: path: /var/cache/apt/archives/*.deb - key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.cache_key }} + key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - uses: actions/cache/restore@v4 id: restore_cache_from_current_branch if: steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' with: path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.cache_key }} + key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} - name: Refresh repositories - if: steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' + if: > + steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && + steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' run: | sudo apt-get update shell: bash @@ -48,7 +49,9 @@ runs: - uses: actions/cache/save@v4 id: cache_apt_requirements_for_current_branch - if: steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' + if: > + steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && + steps.restore_cache_from_current_branch.outputs.cache-hit != 'true' with: path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.cache_key }} \ No newline at end of file + key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }} \ No newline at end of file diff --git a/workflows/_python.yml b/workflows/_python.yml index 8e67133..1f6624f 100644 --- a/workflows/_python.yml +++ b/workflows/_python.yml @@ -248,7 +248,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python_version }} @@ -292,99 +292,97 @@ jobs: exit 1 shell: bash - # not the best solution because i do not think that dependabot supports this - - name: Create requirements-linters.txt - run: | - echo > requirements-linters.txt - - if [[ '${{ inputs.use_black}}' != 'false' ]]; then - echo "black==24.8.0" >> requirements-linters.txt - fi - - if [[ '${{ inputs.use_isort}}' != 'false' ]]; then - echo "isort==5.13.2" >> requirements-linters.txt - fi - - if [[ '${{ inputs.use_flake8}}' != 'false' ]]; then - echo "flake8==7.1.1" >> requirements-linters.txt - if [[ -n '${{ inputs.django_settings_module }}' ]]; then - echo "flake8-django @ git+https://github.com/terencehonles/flake8-django.git@a6e369e89d275dfd5514f2aa9d091aa36c5ff84b" >> requirements-linters.txt - fi - fi + - name: Create linter requirements file + uses: ./.github/actions/python_requirements/create_linter_requirements_file + with: + install_from: ${{ inputs.install_from }} + django_settings_module: ${{ inputs.django_settings_module }} + use_autoflake: ${{ inputs.use_autoflake }} + use_bandit: ${{ inputs.use_bandit }} + use_flake8: ${{ inputs.use_flake8 }} + use_pylint: ${{ inputs.use_pylint }} - if [[ '${{ inputs.use_pylint}}' != 'false' ]]; then - echo "pylint==3.2.6" >> requirements-linters.txt - if [[ -n '${{ inputs.django_settings_module }}' ]]; then - echo "pylint-django==2.5.5" >> requirements-linters.txt - fi - fi + - name: Create dev requirements file + uses: ./.github/actions/python_requirements/create_dev_requirements_file + with: + install_from: ${{ inputs.install_from }} + use_coverage: ${{ inputs.use_coverage }} - if [[ '${{ inputs.use_bandit}}' != 'false' ]]; then - echo "bandit==1.7.9" >> requirements-linters.txt - fi - if [[ '${{ inputs.use_autoflake}}' != 'false' ]]; then - echo "autoflake==2.3.1" >> requirements-linters.txt - fi - cat $(echo ${{ inputs.requirements_path }} | sed -e 's/.txt/-linter.txt/') >> requirements-linters.txt 2>/dev/null || exit 0 - shell: bash - working-directory: ${{ inputs.install_from }} + - name: Create docs requirements file + uses: ./.github/actions/python_requirements/create_docs_requirements_file + with: + install_from: ${{ inputs.install_from }} + check_docs_directory: ${{ inputs.check_docs_directory }} + django_settings_module: ${{ inputs.django_settings_module }} - - name: Create requirements-dev.txt - run: | - echo > requirements-dev.txt - if [[ '${{ inputs.use_coverage }}' != 'false' ]]; then - echo "coverage>=7.3.2" >> requirements-dev.txt - fi - cat $(echo ${{ inputs.requirements_path }} | sed -e 's/.txt/-dev.txt/') >> requirements-dev.txt 2>/dev/null || exit 0 - shell: bash - working-directory: ${{ inputs.install_from }} + - name: Restore Python virtual environment related to PR event + id: restore_python_virtual_environment_pr + uses: ./.github/actions/python_requirements/restore_virtualenv/ + with: + requirements_paths: "${{ inputs.requirements_path }},requirements-linters.txt,requirements-dev.txt,requirements-docs.txt" - - name: Create requirements-docs.txt - run: | - echo > requirements-docs.txt - if [[ -n '${{ inputs.check_docs_directory }}' ]]; then - echo "rstcheck[sphinx]" >> requirements-docs.txt - echo "sphinx==7.2.6" >> requirements-docs.txt - echo "sphinx_rtd_theme==1.3.0" >> requirements-docs.txt - echo "sphinxcontrib-spelling==8.0.0" >> requirements-docs.txt - if [[ -n '${{ inputs.django_settings_module }}' ]]; then - echo "sphinxcontrib-django2==1.9" >> requirements-docs.txt - fi - cat $(echo ${{ inputs.requirements_path }} | sed -e 's/.txt/-docs.txt/') >> requirements-docs.txt 2>/dev/null || exit 0 - fi + - name: Restore Python virtual environment related to target branch + id: restore_python_virtual_environment_target_branch + if: steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/restore_virtualenv/ + with: + requirements_paths: ${{ inputs.requirements_path }} + git_reference: ${{ github.base_ref }} + + - name: Create Python virtual environment + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && + steps.restore_python_virtual_environment_target_branch.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/create_virtualenv + + - name: Restore pip cache related to PR event + id: restore_pip_cache_pr + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && + steps.restore_python_virtual_environment_target_branch.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/restore_pip_cache + + - name: Restore pip cache related to target branch + id: restore_pip_cache_target_branch + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && + steps.restore_python_virtual_environment_target_branch.outputs.cache-hit != 'true' && + steps.restore_pip_cache_pr.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/restore_pip_cache + with: + git_reference: ${{ github.base_ref }} + + - name: Install project requirements + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && + steps.restore_python_virtual_environment_target_branch.outputs.cache-hit != 'true' + run: pip install -r ${{ inputs.requirements_path }} shell: bash working-directory: ${{ inputs.install_from }} - - name: Check virtualenv cache - uses: syphar/restore-virtualenv@v1 - id: cache-virtualenv - with: - requirement_files: | - ${{ inputs.install_from }}/${{ inputs.requirements_path }} - ${{ inputs.install_from }}/requirements-dev.txt - ${{ inputs.install_from }}/requirements-linters.txt - ${{ inputs.install_from }}/requirements-docs.txt - - - name: Check pip cache - uses: syphar/restore-pip-download-cache@v1 - if: steps.cache-virtualenv.outputs.cache-hit != 'true' - with: - requirement_files: | - ${{ inputs.install_from }}/${{ inputs.requirements_path }} - ${{ inputs.install_from }}/requirements-dev.txt - ${{ inputs.install_from }}/requirements-linters.txt - ${{ inputs.install_from }}/requirements-docs.txt - - - name: Install requirements - if: steps.cache-virtualenv.outputs.cache-hit != 'true' + - name: Install other requirements + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' run: | - pip install -r ${{ inputs.requirements_path }} pip install -r requirements-dev.txt pip install -r requirements-linters.txt pip install -r requirements-docs.txt shell: bash working-directory: ${{ inputs.install_from }} + - name: Save Python virtual environment related to the PR event + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/save_virtualenv + with: + requirements_paths: "${{ inputs.requirements_path }},requirements-linters.txt,requirements-dev.txt,requirements-docs.txt" + + - name: Save pip cache related to the PR event + if: > + steps.restore_python_virtual_environment_pr.outputs.cache-hit != 'true' && + steps.restore_pip_cache_pr.outputs.cache-hit != 'true' + uses: ./.github/actions/python_requirements/save_pip_cache + - name: Run linters uses: ./.github/actions/python_linter if: inputs.use_black || inputs.use_isort || inputs.use_flake8 || inputs.use_pylint || inputs.use_bandit || inputs.use_autoflake diff --git a/workflows/create_apt_cache.yaml b/workflows/create_apt_cache.yaml index 2315fc9..06f26cc 100644 --- a/workflows/create_apt_cache.yaml +++ b/workflows/create_apt_cache.yaml @@ -13,27 +13,31 @@ on: # Path to APT requirements file - '.github/test/python_test/packages.txt' +# discard previous execution if you commit to a branch that is already running +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: create-cache: - name: Create cache for dependencies + name: Create cache for APT dependencies runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 # Remember to set the same APT requirements file path set before! - - name: Install dependencies + - name: Install APT dependencies run: | sudo apt-get update - sudo apt-get -y install --no-install-recommends $(tr '\n' ' ' < apt_packages.txt) + sudo apt-get -y install --no-install-recommends $(tr '\n' ' ' < .github/test/python_test/packages.txt) - name: Compute apt_packages.txt SHA256 hash id: compute_apt_packages_sha256_hash - run: | - COMPUTED_HASH=$(sha256sum apt_packages.txt | cut -d ' ' -f 1) - echo "cache_key=$COMPUTED_HASH" >> $GITHUB_OUTPUT + uses: ./.github/actions/misc/compute_files_hash + with: + file_paths: .github/test/python_test/packages.txt - uses: actions/cache/save@v4 with: path: /var/cache/apt/archives/*.deb - key: ${{ github.ref_name }}-${{ steps.compute_apt_packages_sha256_hash.outputs.cache_key }} + key: ${{ github.ref_name }}-${{ steps.compute_apt_packages_sha256_hash.outputs.computed_hash }} diff --git a/workflows/create_python_cache.yaml b/workflows/create_python_cache.yaml new file mode 100644 index 0000000..a6b65b5 --- /dev/null +++ b/workflows/create_python_cache.yaml @@ -0,0 +1,50 @@ +name: Create Python cache + +# GitHub will remove any cache entries that have not been accessed in over 7 days. + +# Only project dependencies will be cached here + +on: + push: + branches: + - main + - master + - develop + - dev + paths: + - '.github/test/python_test/requirements.txt' + +# discard previous execution if you commit to a branch that is already running +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + create-python-cache: + name: Create cache for Python dependencies + runs-on: ubuntu-latest + steps: + - name: Check out latest commit + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Set up Python's virtual environment + uses: ./.github/actions/python_requirements/create_virtualenv + + - name: Install Python dependencies + run: | + pip install -r .github/test/python_test/requirements.txt + working-directory: "." + + - name: Save pip cache + uses: ./.github/actions/python_requirements/save_pip_cache + + - name: Create virtual environment cache + uses: ./.github/actions/python_requirements/save_virtualenv + with: + requirements_paths: .github/test/python_test/requirements.txt + From 3951d915c28bd29e8446d049f625262a2862446f Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Mon, 24 Mar 2025 18:16:34 +0100 Subject: [PATCH 2/3] renamed step in apt_requirements composite action --- .github/actions/apt_requirements/action.yml | 4 ++-- actions/apt_requirements/action.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/actions/apt_requirements/action.yml b/.github/actions/apt_requirements/action.yml index bbddcbf..7134837 100644 --- a/.github/actions/apt_requirements/action.yml +++ b/.github/actions/apt_requirements/action.yml @@ -8,8 +8,8 @@ inputs: runs: using: "composite" steps: - - name: Compute apt_packages.txt SHA256 hash - id: compute_apt_packages_sha256_hash + - name: Compute apt requirements file SHA256 hash + id: compute_apt_requirements_file_sha256_hash uses: ./.github/actions/misc/compute_files_hash with: file_paths: ${{ inputs.requirements_file }} diff --git a/actions/apt_requirements/action.yml b/actions/apt_requirements/action.yml index bbddcbf..7134837 100644 --- a/actions/apt_requirements/action.yml +++ b/actions/apt_requirements/action.yml @@ -8,8 +8,8 @@ inputs: runs: using: "composite" steps: - - name: Compute apt_packages.txt SHA256 hash - id: compute_apt_packages_sha256_hash + - name: Compute apt requirements file SHA256 hash + id: compute_apt_requirements_file_sha256_hash uses: ./.github/actions/misc/compute_files_hash with: file_paths: ${{ inputs.requirements_file }} From 554419ae342b9d2e8711621f84ab135e8acffd23 Mon Sep 17 00:00:00 2001 From: Luca Cigarini Date: Tue, 25 Mar 2025 11:26:54 +0100 Subject: [PATCH 3/3] updated .github and review changelog --- .github/workflows/pull_request_automation.yml | 2 +- CHANGELOG.md | 11 ++- actions/misc/compute_files_hash/action.yml | 38 ++++++++ .../create_dev_requirements_file/action.yml | 28 ++++++ .../create_docs_requirements_file/action.yml | 37 ++++++++ .../action.yml | 87 +++++++++++++++++++ .../create_virtualenv/action.yml | 26 ++++++ .../restore_pip_cache/action.yml | 52 +++++++++++ .../restore_virtualenv/action.yml | 43 +++++++++ .../save_pip_cache/action.yml | 37 ++++++++ .../save_virtualenv/action.yml | 29 +++++++ workflows/pull_request_automation.yml | 2 +- 12 files changed, 388 insertions(+), 4 deletions(-) create mode 100644 actions/misc/compute_files_hash/action.yml create mode 100644 actions/python_requirements/create_dev_requirements_file/action.yml create mode 100644 actions/python_requirements/create_docs_requirements_file/action.yml create mode 100644 actions/python_requirements/create_linter_requirements_file/action.yml create mode 100644 actions/python_requirements/create_virtualenv/action.yml create mode 100644 actions/python_requirements/restore_pip_cache/action.yml create mode 100644 actions/python_requirements/restore_virtualenv/action.yml create mode 100644 actions/python_requirements/save_pip_cache/action.yml create mode 100644 actions/python_requirements/save_virtualenv/action.yml diff --git a/.github/workflows/pull_request_automation.yml b/.github/workflows/pull_request_automation.yml index 7c3ffb4..71deca6 100644 --- a/.github/workflows/pull_request_automation.yml +++ b/.github/workflows/pull_request_automation.yml @@ -101,6 +101,6 @@ jobs: env: >- {"KEY": "VALUE"} python_versions: >- - ["3.10"] + ["3.12"] max_timeout: 15 ubuntu_version: latest diff --git a/CHANGELOG.md b/CHANGELOG.md index 36aee59..8af0aef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,18 @@ -# 1.6.x -## 1.6.0 +# 2.0.x + +## 2.0.0 + ### Features + * Added "release.yml" action to to push containers to AWS ECR * Added *create_apt_cache.yaml* workflow to cache APT requirements each time a commit is pushed on selected branch and **when the requirements file has changed**. + ### Bugfix + * Updated python linters also in '_python.yml' workflow (missing from previous release) + ### Changes + * Deprecation of license check table-headers * Updated Python linters: * bandit 1.7.9 -> 1.8.3 diff --git a/actions/misc/compute_files_hash/action.yml b/actions/misc/compute_files_hash/action.yml new file mode 100644 index 0000000..2c3b905 --- /dev/null +++ b/actions/misc/compute_files_hash/action.yml @@ -0,0 +1,38 @@ +name: Composite action compute files hash +description: Composite action to compute a single hash of one or more files +inputs: + file_paths: + description: Comma separeted list of files. + required: false + +outputs: + computed_hash: + description: The hash of the concatenated files + value: ${{ steps.compute_files_sha256_hash.outputs.computed_hash }} + +runs: + using: "composite" + steps: + - name: Compute files SHA256 hash + id: compute_files_sha256_hash + run: | + JOINED_FILES="cat " + # Create a bash array of file paths + IFS=',' read -r -a files <<< "${{ inputs.file_paths }}" + echo "::debug::File paths array is composed by: ${files[@]}" + for file in ${files[@]}; + do + if [[ -f $file ]]; then + # Concat file path to cat command + JOINED_FILES+="$file " + echo "::debug::Current file is $file" + echo "::debug::JOINED_FILES variable state is $JOINED_FILES" + else + echo "::error::$file does not exist or it is not a regular file!" + exit 1 + fi + done + COMPUTED_HASH=$($JOINED_FILES | sha256sum | cut -d ' ' -f 1) + echo "::debug::Hash is $COMPUTED_HASH" + echo "computed_hash=$COMPUTED_HASH" >> $GITHUB_OUTPUT + shell: bash \ No newline at end of file diff --git a/actions/python_requirements/create_dev_requirements_file/action.yml b/actions/python_requirements/create_dev_requirements_file/action.yml new file mode 100644 index 0000000..eb86a04 --- /dev/null +++ b/actions/python_requirements/create_dev_requirements_file/action.yml @@ -0,0 +1,28 @@ +name: Composite action create Python dev requirements file +description: Composite action to create Python dev requirements file +inputs: + install_from: + description: Directory that must be used to install the packages + required: false + default: . + project_dev_requirements_file: + description: An additional project dev requirements file + required: false + use_coverage: + description: Use coverage.py + required: false + +runs: + using: "composite" + steps: + - name: Create requirements-dev.txt + run: | + echo > requirements-dev.txt + if [[ '${{ inputs.use_coverage }}' != 'false' ]]; then + echo "coverage>=7.3.2" >> requirements-dev.txt + fi + if [[ -z '${{ inputs.project_dev_requirements_file }}' ]];then + cat $(echo ${{ inputs.project_dev_requirements_file }}) >> requirements-dev.txt + fi + shell: bash + working-directory: ${{ inputs.install_from }} \ No newline at end of file diff --git a/actions/python_requirements/create_docs_requirements_file/action.yml b/actions/python_requirements/create_docs_requirements_file/action.yml new file mode 100644 index 0000000..fb674f8 --- /dev/null +++ b/actions/python_requirements/create_docs_requirements_file/action.yml @@ -0,0 +1,37 @@ +name: Composite action create Python docs requirements file +description: Composite action to create Python docs requirements file +inputs: + install_from: + description: Directory that must be used to install the packages + required: false + default: . + project_docs_requirements_file: + description: An additional project docs requirements file + required: false + django_settings_module: + description: Path to the django settings file + required: false + check_docs_directory: + description: Check docs using rstcheck inside this directory + required: false + +runs: + using: "composite" + steps: + - name: Create requirements-docs.txt + run: | + echo > requirements-docs.txt + if [[ -n '${{ inputs.check_docs_directory }}' ]]; then + echo "rstcheck[sphinx]" >> requirements-docs.txt + echo "sphinx==7.2.6" >> requirements-docs.txt + echo "sphinx_rtd_theme==1.3.0" >> requirements-docs.txt + echo "sphinxcontrib-spelling==8.0.0" >> requirements-docs.txt + if [[ -n '${{ inputs.django_settings_module }}' ]]; then + echo "sphinxcontrib-django2==1.9" >> requirements-docs.txt + fi + if [[ -z '${{ inputs.project_docs_requirements_file }}' ]];then + cat $(echo ${{ inputs.project_docs_requirements_file }}) >> requirements-docs.txt + fi + fi + shell: bash + working-directory: ${{ inputs.install_from }} \ No newline at end of file diff --git a/actions/python_requirements/create_linter_requirements_file/action.yml b/actions/python_requirements/create_linter_requirements_file/action.yml new file mode 100644 index 0000000..7edb47f --- /dev/null +++ b/actions/python_requirements/create_linter_requirements_file/action.yml @@ -0,0 +1,87 @@ +name: Composite action create Python linter requirements file +description: Composite action to create Python linter requirements file +inputs: + install_from: + description: Directory that must be used to install the packages + required: false + default: . + project_linter_requirements_file: + description: An additional project linter requirements file + required: false + django_settings_module: + description: Path to the django settings file + required: false + use_autoflake: + description: Use autoflake linter + required: false + use_bandit: + description: Use bandit linter + required: false + use_flake8: + description: Use flake8 linter + required: false + use_pylint: + description: Use pylint linter + required: false + + + +runs: + using: "composite" + steps: + - name: Create requirements-linters.txt + run: | + function check_linter_dependency_and_append_to_file { + # + # Function to check whether a specific linter is in the requirements file + # If it can be found inside the requirements, said linter dependency will be appended to a newly created requirements-linter.txt file. + # If the linter is not found inside the requirements file an error will be raised. + # + # 1st parameter: Name of the linter. + # 2nd parameter: Path of the requirements file. + # + if [[ -z $(grep -P "^$1[^a-zA-Z0-9_-].*" "$2") ]]; then + echo "::error::$1 dependency not found in $2 file!" + exit 1 + else + echo "$1 dependency found in $2!" + echo "$(grep -P ^$1[^a-zA-Z0-9_-].* $2)" >> requirements-linters.txt + fi + } + CI_REQUIREMENTS_LINTERS="${GITHUB_WORKSPACE}/.github/configurations/python_linters/requirements-linters.txt" + echo > requirements-linters.txt + + if [[ '${{ inputs.use_black }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "black" "$CI_REQUIREMENTS_LINTERS" + fi + + if [[ '${{ inputs.use_isort }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "isort" "$CI_REQUIREMENTS_LINTERS" + fi + + if [[ '${{ inputs.use_flake8 }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "flake8" "$CI_REQUIREMENTS_LINTERS" + if [[ -n '${{ inputs.django_settings_module }}' ]]; then + check_linter_dependency_and_append_to_file "flake8-django" "$CI_REQUIREMENTS_LINTERS" + fi + fi + + if [[ '${{ inputs.use_pylint }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "pylint" "$CI_REQUIREMENTS_LINTERS" + if [[ -n '${{ inputs.django_settings_module }}' ]]; then + check_linter_dependency_and_append_to_file "pylint-django" "$CI_REQUIREMENTS_LINTERS" + fi + fi + + if [[ '${{ inputs.use_bandit }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "bandit" "$CI_REQUIREMENTS_LINTERS" + fi + if [[ '${{ inputs.use_autoflake }}' != 'false' ]]; then + check_linter_dependency_and_append_to_file "autoflake" "$CI_REQUIREMENTS_LINTERS" + fi + + if [[ -z '${{ inputs.project_linter_requirements_file }}' ]]; then + cat $(echo ${{ inputs.project_linter_requirements_file }}) >> requirements-linters.txt + fi + shell: bash + working-directory: ${{ inputs.install_from }} \ No newline at end of file diff --git a/actions/python_requirements/create_virtualenv/action.yml b/actions/python_requirements/create_virtualenv/action.yml new file mode 100644 index 0000000..00a40a4 --- /dev/null +++ b/actions/python_requirements/create_virtualenv/action.yml @@ -0,0 +1,26 @@ +name: Composite action create Python virtual environment +description: Composite action create Python virtual environment +inputs: + virtualenv_path: + description: Python's virtual environment path. + required: false + default: ".venv" + activate_only: + description: Whether to create the virtual environment or only activate it. + required: false + default: false + +runs: + using: "composite" + steps: + - name: Python's virtualenv creation + if: inputs.activate_only != 'true' + run: python -m venv ${{ inputs.virtualenv_path }} + shell: bash + - name: Activate newly created virtualenv + id: activate_newly_created_virtualenv + run: | + source ${{ inputs.virtualenv_path }}/bin/activate + echo "VIRTUAL_ENV=$VIRTUAL_ENV" >> $GITHUB_ENV + echo "$VIRTUAL_ENV/bin" >> $GITHUB_PATH + shell: bash diff --git a/actions/python_requirements/restore_pip_cache/action.yml b/actions/python_requirements/restore_pip_cache/action.yml new file mode 100644 index 0000000..0c4f93b --- /dev/null +++ b/actions/python_requirements/restore_pip_cache/action.yml @@ -0,0 +1,52 @@ +name: Composite action restore pip's cache +description: Composite action to restore pip's cache +inputs: + custom_pip_cache_path: + description: Path to pip cache. + required: false + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +outputs: + cache-hit: + description: Whether pip cache was found in the cache or not. + value: ${{ steps.explain_cache_output.outputs.real_cache_hit }} + +runs: + using: "composite" + steps: + - name: Generate random UUID + id: generate_random_uuid + run: | + random_uuid=$(uuidgen -r) + echo "::debug::Random uuid generated is $random_uuid. Should only cause a cache-miss" + echo "computed_uuid=$random_uuid" >> $GITHUB_OUTPUT + shell: bash + - name: Get pip cache directory + id: get_pip_cache_directory + run: | + if [[ -z '${{ inputs.custom_pip_cache_path }}' ]]; then + echo "pip_cache_path=$(pip cache dir)" >> $GITHUB_OUTPUT + else + echo "pip_cache_path=${{ inputs.custom_pip_cache_path }}" >> $GITHUB_OUTPUT + fi + shell: bash + - name: Restore pip cache + id: restore_pip_cache + uses: actions/cache/restore@v4 + with: + key: ${{ steps.generate_random_uuid.outputs.computed_uuid }} + path: ${{ steps.get_pip_cache_directory.outputs.pip_cache_path }} + restore-keys: ${{ inputs.git_reference }}-pip-cache- + - name: Explain cache output + id: explain_cache_output + run: | + echo "::debug::Restore action for pip's cache returned cache-hit: ${{ steps.restore_pip_cache.outputs.cache-hit }} with cache-matched-key: ${{ steps.restore_pip_cache.outputs.cache-matched-key }}" + if [[ -z '${{ steps.restore_pip_cache.outputs.cache-matched-key }}' ]]; then + echo "real_cache_hit=false" >> $GITHUB_OUTPUT + else + echo "real_cache_hit=true" >> $GITHUB_OUTPUT + fi + shell: bash \ No newline at end of file diff --git a/actions/python_requirements/restore_virtualenv/action.yml b/actions/python_requirements/restore_virtualenv/action.yml new file mode 100644 index 0000000..1798a87 --- /dev/null +++ b/actions/python_requirements/restore_virtualenv/action.yml @@ -0,0 +1,43 @@ +name: Composite action restore Python virtual environment +description: Composite action to restore Python virtual environment +inputs: + virtual_environment_path: + description: Path to where virtual environment will be restored. + required: false + default: ".venv" + requirements_paths: + description: Comma separeted list of requirements files. They will be used to compute the hash for the cache key. + required: false + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +outputs: + cache-hit: + description: Whether virtual environment was found in the cache or not. + value: ${{ steps.restore_virtual_environment.outputs.cache-hit }} + +runs: + using: "composite" + steps: + - name: Compute requirements files SHA256 hash + id: compute_requirements_files_sha256_hash + uses: ./.github/actions/misc/compute_files_hash + with: + file_paths: ${{ inputs.requirements_paths }} + + - name: Restore virtual environment + id: restore_virtual_environment + uses: actions/cache/restore@v4 + with: + path: ${{ inputs.virtual_environment_path }} + key: ${{ inputs.git_reference }}-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} + + - name: Activate restored virtual environment + if: > + steps.restore_virtual_environment.outputs.cache-hit == 'true' + uses: ./.github/actions/python_requirements/create_virtualenv + with: + virtualenv_path: ${{ inputs.virtual_environment_path }} + activate_only: true \ No newline at end of file diff --git a/actions/python_requirements/save_pip_cache/action.yml b/actions/python_requirements/save_pip_cache/action.yml new file mode 100644 index 0000000..218e508 --- /dev/null +++ b/actions/python_requirements/save_pip_cache/action.yml @@ -0,0 +1,37 @@ +name: Composite action save pip's cache +description: Composite action to save pip's cache +inputs: + custom_pip_cache_path: + description: Path to pip cache. + required: false + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +runs: + using: "composite" + steps: + # TODO non posso usare l'hash del file perchè quando vado ad aggiornare i requirements + # l'hash cambia e sicuramente non trovo la cache salvata + - name: Generate random UUID + id: generate_random_uuid + run: | + random_uuid=$(uuidgen -r) + echo "::debug::Random uuid generated is $random_uuid" + echo "computed_uuid=$random_uuid" >> $GITHUB_OUTPUT + shell: bash + - name: Get pip cache directory + id: get_pip_cache_directory + run: | + if [[ -z '${{ inputs.custom_pip_cache_path }}' ]]; then + echo "pip_cache_path=$(pip cache dir)" >> $GITHUB_OUTPUT + else + echo "pip_cache_path=${{ inputs.custom_pip_cache_path }}" >> $GITHUB_OUTPUT + fi + shell: bash + - name: Save pip cache + uses: actions/cache/save@v4 + with: + path: ${{ steps.get_pip_cache_directory.outputs.pip_cache_path }} + key: ${{ inputs.git_reference }}-pip-cache-${{ steps.generate_random_uuid.outputs.computed_uuid }} \ No newline at end of file diff --git a/actions/python_requirements/save_virtualenv/action.yml b/actions/python_requirements/save_virtualenv/action.yml new file mode 100644 index 0000000..b93d66c --- /dev/null +++ b/actions/python_requirements/save_virtualenv/action.yml @@ -0,0 +1,29 @@ +name: Composite action save Python virtual environment +description: Composite action to save Python virtual environment +inputs: + virtual_environment_path: + description: Path to virtual environment. + required: false + default: ".venv" + requirements_paths: + description: Comma separeted list of requirements files. They will be used to compute the hash for the cache key. + required: false + git_reference: + description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key. + required: false + default: ${{ github.ref_name }} + +runs: + using: "composite" + steps: + - name: Compute requirements files SHA256 hash + id: compute_requirements_files_sha256_hash + uses: ./.github/actions/misc/compute_files_hash + with: + file_paths: ${{ inputs.requirements_paths }} + + - name: Cache virtual environment + uses: actions/cache/save@v4 + with: + path: ${{ inputs.virtual_environment_path }} + key: ${{ inputs.git_reference }}-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }} \ No newline at end of file diff --git a/workflows/pull_request_automation.yml b/workflows/pull_request_automation.yml index 7c3ffb4..71deca6 100644 --- a/workflows/pull_request_automation.yml +++ b/workflows/pull_request_automation.yml @@ -101,6 +101,6 @@ jobs: env: >- {"KEY": "VALUE"} python_versions: >- - ["3.10"] + ["3.12"] max_timeout: 15 ubuntu_version: latest