Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 15 additions & 12 deletions .github/actions/apt_requirements/action.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name: Composite action install apt requirements
description: Composite action install apt requirements
name: Composite action install APT requirements
description: Composite action to install APT requirements
inputs:
requirements_file:
description: Requirements file
Expand All @@ -8,12 +8,11 @@ inputs:
runs:
using: "composite"
steps:
- name: Compute APT requirements file SHA256 hash
- name: Compute apt requirements file SHA256 hash
id: compute_apt_requirements_file_sha256_hash
run: |
COMPUTED_HASH=$(sha256sum ${{ inputs.requirements_file }} | cut -d ' ' -f 1)
echo "cache_key=$COMPUTED_HASH" >> $GITHUB_OUTPUT
shell: bash
uses: ./.github/actions/misc/compute_files_hash
with:
file_paths: ${{ inputs.requirements_file }}

# Vital to be able to restore cache
# If write permission is not set, a permissions error will be raised
Expand All @@ -26,17 +25,19 @@ runs:
id: restore_cache_from_parent_branch
with:
path: /var/cache/apt/archives/*.deb
key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.cache_key }}
key: ${{ github.base_ref }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }}

- uses: actions/cache/restore@v4
id: restore_cache_from_current_branch
if: steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true'
with:
path: /var/cache/apt/archives/*.deb
key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.cache_key }}
key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }}

- name: Refresh repositories
if: steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && steps.restore_cache_from_current_branch.outputs.cache-hit != 'true'
if: >
steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' &&
steps.restore_cache_from_current_branch.outputs.cache-hit != 'true'
run: |
sudo apt-get update
shell: bash
Expand All @@ -48,7 +49,9 @@ runs:

- uses: actions/cache/save@v4
id: cache_apt_requirements_for_current_branch
if: steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' && steps.restore_cache_from_current_branch.outputs.cache-hit != 'true'
if: >
steps.restore_cache_from_parent_branch.outputs.cache-hit != 'true' &&
steps.restore_cache_from_current_branch.outputs.cache-hit != 'true'
with:
path: /var/cache/apt/archives/*.deb
key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.cache_key }}
key: ${{ github.ref_name }}-${{ steps.compute_apt_requirements_file_sha256_hash.outputs.computed_hash }}
38 changes: 38 additions & 0 deletions .github/actions/misc/compute_files_hash/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
name: Composite action compute files hash
description: Composite action to compute a single hash of one or more files
inputs:
file_paths:
description: Comma separeted list of files.
required: false

outputs:
computed_hash:
description: The hash of the concatenated files
value: ${{ steps.compute_files_sha256_hash.outputs.computed_hash }}

runs:
using: "composite"
steps:
- name: Compute files SHA256 hash
id: compute_files_sha256_hash
run: |
JOINED_FILES="cat "
# Create a bash array of file paths
IFS=',' read -r -a files <<< "${{ inputs.file_paths }}"
echo "::debug::File paths array is composed by: ${files[@]}"
for file in ${files[@]};
do
if [[ -f $file ]]; then
# Concat file path to cat command
JOINED_FILES+="$file "
echo "::debug::Current file is $file"
echo "::debug::JOINED_FILES variable state is $JOINED_FILES"
else
echo "::error::$file does not exist or it is not a regular file!"
exit 1
fi
done
COMPUTED_HASH=$($JOINED_FILES | sha256sum | cut -d ' ' -f 1)
echo "::debug::Hash is $COMPUTED_HASH"
echo "computed_hash=$COMPUTED_HASH" >> $GITHUB_OUTPUT
shell: bash
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: Composite action create Python dev requirements file
description: Composite action to create Python dev requirements file
inputs:
install_from:
description: Directory that must be used to install the packages
required: false
default: .
project_dev_requirements_file:
description: An additional project dev requirements file
required: false
use_coverage:
description: Use coverage.py
required: false

runs:
using: "composite"
steps:
- name: Create requirements-dev.txt
run: |
echo > requirements-dev.txt
if [[ '${{ inputs.use_coverage }}' != 'false' ]]; then
echo "coverage>=7.3.2" >> requirements-dev.txt
fi
if [[ -z '${{ inputs.project_dev_requirements_file }}' ]];then
cat $(echo ${{ inputs.project_dev_requirements_file }}) >> requirements-dev.txt
fi
shell: bash
working-directory: ${{ inputs.install_from }}
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
name: Composite action create Python docs requirements file
description: Composite action to create Python docs requirements file
inputs:
install_from:
description: Directory that must be used to install the packages
required: false
default: .
project_docs_requirements_file:
description: An additional project docs requirements file
required: false
django_settings_module:
description: Path to the django settings file
required: false
check_docs_directory:
description: Check docs using rstcheck inside this directory
required: false

runs:
using: "composite"
steps:
- name: Create requirements-docs.txt
run: |
echo > requirements-docs.txt
if [[ -n '${{ inputs.check_docs_directory }}' ]]; then
echo "rstcheck[sphinx]" >> requirements-docs.txt
echo "sphinx==7.2.6" >> requirements-docs.txt
echo "sphinx_rtd_theme==1.3.0" >> requirements-docs.txt
echo "sphinxcontrib-spelling==8.0.0" >> requirements-docs.txt
if [[ -n '${{ inputs.django_settings_module }}' ]]; then
echo "sphinxcontrib-django2==1.9" >> requirements-docs.txt
fi
if [[ -z '${{ inputs.project_docs_requirements_file }}' ]];then
cat $(echo ${{ inputs.project_docs_requirements_file }}) >> requirements-docs.txt
fi
fi
shell: bash
working-directory: ${{ inputs.install_from }}
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
name: Composite action create Python linter requirements file
description: Composite action to create Python linter requirements file
inputs:
install_from:
description: Directory that must be used to install the packages
required: false
default: .
project_linter_requirements_file:
description: An additional project linter requirements file
required: false
django_settings_module:
description: Path to the django settings file
required: false
use_autoflake:
description: Use autoflake linter
required: false
use_bandit:
description: Use bandit linter
required: false
use_flake8:
description: Use flake8 linter
required: false
use_pylint:
description: Use pylint linter
required: false



runs:
using: "composite"
steps:
- name: Create requirements-linters.txt
run: |
function check_linter_dependency_and_append_to_file {
#
# Function to check whether a specific linter is in the requirements file
# If it can be found inside the requirements, said linter dependency will be appended to a newly created requirements-linter.txt file.
# If the linter is not found inside the requirements file an error will be raised.
#
# 1st parameter: Name of the linter.
# 2nd parameter: Path of the requirements file.
#
if [[ -z $(grep -P "^$1[^a-zA-Z0-9_-].*" "$2") ]]; then
echo "::error::$1 dependency not found in $2 file!"
exit 1
else
echo "$1 dependency found in $2!"
echo "$(grep -P ^$1[^a-zA-Z0-9_-].* $2)" >> requirements-linters.txt
fi
}
CI_REQUIREMENTS_LINTERS="${GITHUB_WORKSPACE}/.github/configurations/python_linters/requirements-linters.txt"
echo > requirements-linters.txt

if [[ '${{ inputs.use_black }}' != 'false' ]]; then
check_linter_dependency_and_append_to_file "black" "$CI_REQUIREMENTS_LINTERS"
fi

if [[ '${{ inputs.use_isort }}' != 'false' ]]; then
check_linter_dependency_and_append_to_file "isort" "$CI_REQUIREMENTS_LINTERS"
fi

if [[ '${{ inputs.use_flake8 }}' != 'false' ]]; then
check_linter_dependency_and_append_to_file "flake8" "$CI_REQUIREMENTS_LINTERS"
if [[ -n '${{ inputs.django_settings_module }}' ]]; then
check_linter_dependency_and_append_to_file "flake8-django" "$CI_REQUIREMENTS_LINTERS"
fi
fi

if [[ '${{ inputs.use_pylint }}' != 'false' ]]; then
check_linter_dependency_and_append_to_file "pylint" "$CI_REQUIREMENTS_LINTERS"
if [[ -n '${{ inputs.django_settings_module }}' ]]; then
check_linter_dependency_and_append_to_file "pylint-django" "$CI_REQUIREMENTS_LINTERS"
fi
fi

if [[ '${{ inputs.use_bandit }}' != 'false' ]]; then
check_linter_dependency_and_append_to_file "bandit" "$CI_REQUIREMENTS_LINTERS"
fi
if [[ '${{ inputs.use_autoflake }}' != 'false' ]]; then
check_linter_dependency_and_append_to_file "autoflake" "$CI_REQUIREMENTS_LINTERS"
fi

if [[ -z '${{ inputs.project_linter_requirements_file }}' ]]; then
cat $(echo ${{ inputs.project_linter_requirements_file }}) >> requirements-linters.txt
fi
shell: bash
working-directory: ${{ inputs.install_from }}
26 changes: 26 additions & 0 deletions .github/actions/python_requirements/create_virtualenv/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: Composite action create Python virtual environment
description: Composite action create Python virtual environment
inputs:
virtualenv_path:
description: Python's virtual environment path.
required: false
default: ".venv"
activate_only:
description: Whether to create the virtual environment or only activate it.
required: false
default: false

runs:
using: "composite"
steps:
- name: Python's virtualenv creation
if: inputs.activate_only != 'true'
run: python -m venv ${{ inputs.virtualenv_path }}
shell: bash
- name: Activate newly created virtualenv
id: activate_newly_created_virtualenv
run: |
source ${{ inputs.virtualenv_path }}/bin/activate
echo "VIRTUAL_ENV=$VIRTUAL_ENV" >> $GITHUB_ENV
echo "$VIRTUAL_ENV/bin" >> $GITHUB_PATH
shell: bash
52 changes: 52 additions & 0 deletions .github/actions/python_requirements/restore_pip_cache/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
name: Composite action restore pip's cache
description: Composite action to restore pip's cache
inputs:
custom_pip_cache_path:
description: Path to pip cache.
required: false
git_reference:
description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key.
required: false
default: ${{ github.ref_name }}

outputs:
cache-hit:
description: Whether pip cache was found in the cache or not.
value: ${{ steps.explain_cache_output.outputs.real_cache_hit }}

runs:
using: "composite"
steps:
- name: Generate random UUID
id: generate_random_uuid
run: |
random_uuid=$(uuidgen -r)
echo "::debug::Random uuid generated is $random_uuid. Should only cause a cache-miss"
echo "computed_uuid=$random_uuid" >> $GITHUB_OUTPUT
shell: bash
- name: Get pip cache directory
id: get_pip_cache_directory
run: |
if [[ -z '${{ inputs.custom_pip_cache_path }}' ]]; then
echo "pip_cache_path=$(pip cache dir)" >> $GITHUB_OUTPUT
else
echo "pip_cache_path=${{ inputs.custom_pip_cache_path }}" >> $GITHUB_OUTPUT
fi
shell: bash
- name: Restore pip cache
id: restore_pip_cache
uses: actions/cache/restore@v4
with:
key: ${{ steps.generate_random_uuid.outputs.computed_uuid }}
path: ${{ steps.get_pip_cache_directory.outputs.pip_cache_path }}
restore-keys: ${{ inputs.git_reference }}-pip-cache-
- name: Explain cache output
id: explain_cache_output
run: |
echo "::debug::Restore action for pip's cache returned cache-hit: ${{ steps.restore_pip_cache.outputs.cache-hit }} with cache-matched-key: ${{ steps.restore_pip_cache.outputs.cache-matched-key }}"
if [[ -z '${{ steps.restore_pip_cache.outputs.cache-matched-key }}' ]]; then
echo "real_cache_hit=false" >> $GITHUB_OUTPUT
else
echo "real_cache_hit=true" >> $GITHUB_OUTPUT
fi
shell: bash
43 changes: 43 additions & 0 deletions .github/actions/python_requirements/restore_virtualenv/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
name: Composite action restore Python virtual environment
description: Composite action to restore Python virtual environment
inputs:
virtual_environment_path:
description: Path to where virtual environment will be restored.
required: false
default: ".venv"
requirements_paths:
description: Comma separeted list of requirements files. They will be used to compute the hash for the cache key.
required: false
git_reference:
description: A git reference (name of the branch, reference to the PR) that will be used to build the cache key.
required: false
default: ${{ github.ref_name }}

outputs:
cache-hit:
description: Whether virtual environment was found in the cache or not.
value: ${{ steps.restore_virtual_environment.outputs.cache-hit }}

runs:
using: "composite"
steps:
- name: Compute requirements files SHA256 hash
id: compute_requirements_files_sha256_hash
uses: ./.github/actions/misc/compute_files_hash
with:
file_paths: ${{ inputs.requirements_paths }}

- name: Restore virtual environment
id: restore_virtual_environment
uses: actions/cache/restore@v4
with:
path: ${{ inputs.virtual_environment_path }}
key: ${{ inputs.git_reference }}-${{ steps.compute_requirements_files_sha256_hash.outputs.computed_hash }}

- name: Activate restored virtual environment
if: >
steps.restore_virtual_environment.outputs.cache-hit == 'true'
uses: ./.github/actions/python_requirements/create_virtualenv
with:
virtualenv_path: ${{ inputs.virtual_environment_path }}
activate_only: true
Loading
Loading