--- /dev/null
+[flake8]
+#ignore = E226,E302,E41
+max-line-length = 90
+exclude = tests/*
+max-complexity = 10
--- /dev/null
+---
+name: '🧪 Test builds (matrix)'
+
+# yamllint disable-line rule:truthy
+on:
+ workflow_dispatch:
+
+jobs:
+ pre-release:
+ runs-on: 'ubuntu-latest'
+ continue-on-error: true
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ['3.9', '3.10', '3.11']
+ steps:
+ - name: 'Populate environment variables'
+ id: setenv
+ run: |
+ echo "Action triggered by user: ${GITHUB_TRIGGERING_ACTOR}"
+ set -x
+ datetime=$(date +'%Y%m%d%H%M')
+ export datetime
+ echo "datetime=${datetime}" >> "$GITHUB_OUTPUT"
+ vernum="${{ matrix.python-version }}.${datetime}"
+ echo "vernum=${vernum}" >> "$GITHUB_OUTPUT"
+
+ - name: 'Checkout repository'
+ uses: actions/checkout@v4
+
+ - name: 'Set up Python ${{ matrix.python-version }}'
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: 'Install dependencies'
+ run: |
+ python -m pip install --upgrade pip
+ pip install tox tox-gh-actions
+
+ - name: 'Tag for test release'
+ # Delete all local tags, then create a synthetic tag for testing
+ # Use the date/time to avoid conflicts uploading to Test PyPI
+ run: |
+ scripts/dev-versioning.sh "${{ steps.setenv.outputs.vernum }}"
+ git tag | xargs -L 1 | xargs git tag --delete
+ git tag "v${{ steps.setenv.outputs.vernum }}"
+ git checkout "tags/v${{ steps.setenv.outputs.vernum }}"
+ grep version pyproject.toml
+
+ - name: 'Build with TOX'
+ run: |
+ tox -e build
# yamllint disable-line rule:truthy
on:
- workflow_dispatch:
- inputs:
- GERRIT_BRANCH:
- description: 'Branch that change is against'
- required: true
- type: string
- GERRIT_CHANGE_ID:
- description: 'The ID for the change'
- required: true
- type: string
- GERRIT_CHANGE_NUMBER:
- description: 'The Gerrit number'
- required: true
- type: string
- GERRIT_CHANGE_URL:
- description: 'URL to the change'
- required: true
- type: string
- GERRIT_EVENT_TYPE:
- description: 'Type of Gerrit event'
- required: true
- type: string
- GERRIT_PATCHSET_NUMBER:
- description: 'The patch number for the change'
- required: true
- type: string
- GERRIT_PATCHSET_REVISION:
- description: 'The revision sha'
- required: true
- type: string
- GERRIT_PROJECT:
- description: 'Project in Gerrit'
- required: true
- type: string
- GERRIT_REFSPEC:
- description: 'Gerrit refspec of change'
- required: true
- type: string
-
+ workflow_dispatch:
+ inputs:
+ GERRIT_BRANCH:
+ description: 'Branch that change is against'
+ required: true
+ type: string
+ GERRIT_CHANGE_ID:
+ description: 'The ID for the change'
+ required: true
+ type: string
+ GERRIT_CHANGE_NUMBER:
+ description: 'The Gerrit number'
+ required: true
+ type: string
+ GERRIT_CHANGE_URL:
+ description: 'URL to the change'
+ required: true
+ type: string
+ GERRIT_EVENT_TYPE:
+ description: 'Type of Gerrit event'
+ required: true
+ type: string
+ GERRIT_PATCHSET_NUMBER:
+ description: 'The patch number for the change'
+ required: true
+ type: string
+ GERRIT_PATCHSET_REVISION:
+ description: 'The revision sha'
+ required: true
+ type: string
+ GERRIT_PROJECT:
+ description: 'Project in Gerrit'
+ required: true
+ type: string
+ GERRIT_REFSPEC:
+ description: 'Gerrit refspec of change'
+ required: true
+ type: string
concurrency:
- group: ${{ github.event.inputs.GERRIT_CHANGE_ID || github.run_id }}
- cancel-in-progress: true
-
+ group: ${{ github.event.inputs.GERRIT_CHANGE_ID || github.run_id }}
+ cancel-in-progress: true
jobs:
- prepare:
- runs-on: ubuntu-latest
- outputs:
- wheel-distribution: ${{ steps.wheel-distribution.outputs.path }}
- steps:
- - name: Clear votes
- uses: lfit/gerrit-review-action@v0.3
- with:
- host: ${{ vars.LFIT_GERRIT_SERVER }}
- username: ${{ vars.LFIT_GERRIT_SSH_USER }}
- key: ${{ secrets.LFIT_GERRIT_SSH_PRIVKEY_NP }}
- known_hosts: ${{ vars.LFIT_GERRIT_KNOWN_HOSTS }}
- gerrit-change-number: ${{ inputs.GERRIT_CHANGE_NUMBER }}
- gerrit-patchset-number: ${{ inputs.GERRIT_PATCHSET_NUMBER }}
- vote-type: clear
- - uses: lfit/checkout-gerrit-change-action@v0.3
- with:
- gerrit-refspec: ${{ inputs.GERRIT_REFSPEC }}
- - uses: actions/setup-python@v4
- with:
- python-version: '3.9'
- - name: Run static analysis and format checkers
- run: pipx run pre-commit run --all-files --show-diff-on-failure
- - name: Build package distribution files
- run: >-
- pipx run tox -e clean,build
- - name: Record the path of wheel distribution
- id: wheel-distribution
- run: echo "path=$(ls dist/*.whl)" >> $GITHUB_OUTPUT
- - name: Store the distribution files for use in other stages
- # `tests` and `publish` will use the same pre-built distributions,
- # so we make sure to release the exact same package that was tested
- uses: actions/upload-artifact@v3
- with:
- name: python-distribution-files
- path: dist/
- retention-days: 1
-
- test:
- needs: prepare
- strategy:
- matrix:
- python:
- - "3.9"
- - "3.10"
- - "3.11"
- platform:
- - ubuntu-latest
- runs-on: ${{ matrix.platform }}
- steps:
- - uses: lfit/checkout-gerrit-change-action@v0.3
- with:
- gerrit-refspec: ${{ inputs.GERRIT_REFSPEC }}
- delay: "0s"
- - uses: actions/setup-python@v4
- id: setup-python
- with:
- python-version: ${{ matrix.python }}
- - name: Retrieve pre-built distribution files
- uses: actions/download-artifact@v3
- with: {name: python-distribution-files, path: dist/}
- - name: Run tests
- run: >-
- pipx run --python '${{ steps.setup-python.outputs.python-path }}'
- tox --installpkg '${{ needs.prepare.outputs.wheel-distribution }}'
- -- -rFEx --durations 10 --color yes # pytest args
- - name: Generate coverage report
- run: pipx run coverage lcov -o coverage.lcov
- - name: Upload partial coverage report
- uses: coverallsapp/github-action@master
- with:
- path-to-lcov: coverage.lcov
- github-token: ${{ secrets.github_token }}
- flag-name: ${{ matrix.platform }} - py${{ matrix.python }}
- parallel: true
+ prepare:
+ runs-on: ubuntu-latest
+ outputs:
+ wheel-distribution: ${{ steps.wheel-distribution.outputs.path }}
+ steps:
+ - name: Clear votes
+ uses: lfit/gerrit-review-action@v0.3
+ with:
+ host: ${{ vars.LFIT_GERRIT_SERVER }}
+ username: ${{ vars.LFIT_GERRIT_SSH_USER }}
+ key: ${{ secrets.LFIT_GERRIT_SSH_PRIVKEY_NP }}
+ known_hosts: ${{ vars.LFIT_GERRIT_KNOWN_HOSTS }}
+ gerrit-change-number: ${{ inputs.GERRIT_CHANGE_NUMBER }}
+ gerrit-patchset-number: ${{ inputs.GERRIT_PATCHSET_NUMBER }}
+ vote-type: clear
+ - uses: lfit/checkout-gerrit-change-action@v0.3
+ with:
+ gerrit-refspec: ${{ inputs.GERRIT_REFSPEC }}
+ - uses: actions/setup-python@v4
+ with:
+ python-version: '3.9'
+ - name: Run static analysis and format checkers
+ run: pipx run pre-commit run --all-files --show-diff-on-failure
+ - name: Build package distribution files
+ run: >-
+ pipx run tox -e clean,build
+ - name: Record the path of wheel distribution
+ id: wheel-distribution
+ run: echo "path=$(ls dist/*.whl)" >> "${GITHUB_OUTPUT}"
+ - name: Store the distribution files for use in other stages
+ # `tests` and `publish` will use the same pre-built distributions,
+ # so we make sure to release the exact same package that was tested
+ uses: actions/upload-artifact@v3
+ with:
+ name: python-distribution-files
+ path: dist/
+ retention-days: 1
+ test:
+ needs: prepare
+ strategy:
+ matrix:
+ python:
+ - '3.9'
+ - '3.10'
+ - '3.11'
+ platform:
+ - ubuntu-latest
+ runs-on: ${{ matrix.platform }}
+ steps:
+ - uses: lfit/checkout-gerrit-change-action@v0.3
+ with:
+ gerrit-refspec: ${{ inputs.GERRIT_REFSPEC }}
+ delay: '0s'
+ - uses: actions/setup-python@v4
+ id: setup-python
+ with:
+ python-version: ${{ matrix.python }}
+ - name: Retrieve pre-built distribution files
+ uses: actions/download-artifact@v3
+ with: { name: python-distribution-files, path: dist/ }
+ - name: Run tests
+ run: >-
+ pipx run --python '${{ steps.setup-python.outputs.python-path }}'
+ tox --installpkg '${{ needs.prepare.outputs.wheel-distribution }}'
+ -- -rFEx --durations 10 --color yes # pytest args
+ - name: Generate coverage report
+ run: pipx run coverage lcov -o coverage.lcov
+ - name: Upload partial coverage report
+ uses: coverallsapp/github-action@master
+ with:
+ path-to-lcov: coverage.lcov
+ github-token: ${{ secrets.github_token }}
+ flag-name: ${{ matrix.platform }} - py${{ matrix.python }}
+ parallel: true
- finalize:
- needs: test
- runs-on: ubuntu-latest
- steps:
- - name: Finalize coverage report
- uses: coverallsapp/github-action@master
- with:
- github-token: ${{ secrets.GITHUB_TOKEN }}
- parallel-finished: true
+ finalize:
+ needs: test
+ runs-on: ubuntu-latest
+ steps:
+ - name: Finalize coverage report
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ parallel-finished: true
- vote:
- if: ${{ always() }}
- needs: [prepare, test, finalize]
- runs-on: ubuntu-latest
- steps:
- - uses: technote-space/workflow-conclusion-action@v3
- - name: Set vote
- uses: lfit/gerrit-review-action@v0.3
- with:
- host: ${{ vars.LFIT_GERRIT_SERVER }}
- username: ${{ vars.LFIT_GERRIT_SSH_USER }}
- key: ${{ secrets.LFIT_GERRIT_SSH_PRIVKEY_NP }}
- known_hosts: ${{ vars.LFIT_GERRIT_KNOWN_HOSTS }}
- gerrit-change-number: ${{ inputs.GERRIT_CHANGE_NUMBER }}
- gerrit-patchset-number: ${{ inputs.GERRIT_PATCHSET_NUMBER }}
- vote-type: ${{ env.WORKFLOW_CONCLUSION }}
+ vote:
+ if: ${{ always() }}
+ needs: [prepare, test, finalize]
+ runs-on: ubuntu-latest
+ steps:
+ - uses: technote-space/workflow-conclusion-action@v3
+ - name: Set vote
+ uses: lfit/gerrit-review-action@v0.3
+ with:
+ host: ${{ vars.LFIT_GERRIT_SERVER }}
+ username: ${{ vars.LFIT_GERRIT_SSH_USER }}
+ key: ${{ secrets.LFIT_GERRIT_SSH_PRIVKEY_NP }}
+ known_hosts: ${{ vars.LFIT_GERRIT_KNOWN_HOSTS }}
+ gerrit-change-number: ${{ inputs.GERRIT_CHANGE_NUMBER }}
+ gerrit-patchset-number: ${{ inputs.GERRIT_PATCHSET_NUMBER }}
+ vote-type: ${{ env.WORKFLOW_CONCLUSION }}
---
-name: PyPI release
+name: '🐍📦 Production build and release'
+
+# GitHub/PyPI trusted publisher documentation:
+# https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
# yamllint disable-line rule:truthy
-on: push
+on:
+ # workflow_dispatch:
+ push:
+ # Only invoked on release tag pushes
+ tags:
+ - v*.*.*
+
+env:
+ python-version: '3.10'
+
+### BUILD ###
jobs:
- publish:
- if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repository
- uses: actions/checkout@v3
- - name: Configure Python
- uses: actions/setup-python@v4
- with:
- python-version: '3.8'
- - name: Build package distribution files
- run: >-
- pipx run tox -e clean,build
- - name: Configure TWINE
- run: |
- echo "TWINE_USERNAME=__token__" >> $GITHUB_ENV
- echo "TWINE_PASSWORD=${{ secrets.PYPI_API_TOKEN }}" >> $GITHUB_ENV
- - name: Publish to PyPI
- run: >-
- pipx run tox -e publish -- --repository pypi
+ build:
+ name: '🐍 Build packages'
+ runs-on: ubuntu-latest
+ permissions:
+ # IMPORTANT: mandatory for Sigstore
+ id-token: write
+ steps:
+ ### BUILDING ###
+
+ - name: 'Checkout repository'
+ uses: actions/checkout@v4
+
+ - name: 'Setup PDM for build commands'
+ uses: pdm-project/setup-pdm@v3
+ with:
+ version: 2.10.0
+
+ - name: 'Setup Python 3.10'
+ uses: actions/setup-python@v4.7.0
+ with:
+ python-version: ${{ env.python-version }}
+
+ - name: 'Update version from tags for production release'
+ run: |
+ echo "Github versioning: ${{ github.ref_name }}"
+ scripts/release-versioning.sh
+
+ - name: 'Build with PDM backend'
+ run: |
+ pdm build
+
+ ### SIGNING ###
+
+ - name: 'Sign packages with Sigstore'
+ uses: sigstore/gh-action-sigstore-python@v1.2.3
+ with:
+ inputs: >-
+ ./dist/*.tar.gz
+ ./dist/*.whl
+
+ - name: Store the distribution packages
+ uses: actions/upload-artifact@v3
+ with:
+ name: ${{ github.ref_name }}
+ path: dist/
+
+ ### PUBLISH GITHUB ###
+
+ github:
+ name: '📦 Publish to GitHub'
+ # Only publish on tag pushes
+ if: startsWith(github.ref, 'refs/tags/')
+ needs:
+ - build
+ runs-on: ubuntu-latest
+ permissions:
+ # IMPORTANT: mandatory to publish artefacts
+ contents: write
+ steps:
+ - name: '⬇ Download build artefacts'
+ uses: actions/download-artifact@v3
+ with:
+ name: ${{ github.ref_name }}
+ path: dist/
+
+ - name: '📦 Publish release to GitHub'
+ uses: ModeSevenIndustrialSolutions/action-automatic-releases@latest
+ with:
+ # Valid inputs are:
+ # repo_token, automatic_release_tag, draft, prerelease, title, files
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ prerelease: false
+ automatic_release_tag: ${{ github.ref_name }}
+ title: ${{ github.ref_name }}
+ files: |
+ dist/*.tar.gz
+ dist/*.whl
+
+ ### PUBLISH PYPI TEST ###
+
+ testpypi:
+ name: '📦 Publish to PyPi Test'
+ # Only publish on tag pushes
+ if: startsWith(github.ref, 'refs/tags/')
+ needs:
+ - build
+ runs-on: ubuntu-latest
+ environment:
+ name: testpypi
+ permissions:
+ # IMPORTANT: mandatory for trusted publishing
+ id-token: write
+ steps:
+ - name: '⬇ Download build artefacts'
+ uses: actions/download-artifact@v3
+ with:
+ name: ${{ github.ref_name }}
+ path: dist/
+
+ - name: 'Remove files unsupported by PyPi'
+ run: |
+ if [ -f dist/buildvars.txt ]; then
+ rm dist/buildvars.txt
+ fi
+ rm dist/*.crt dist/*.sig*
+
+ - name: Publish distribution to Test PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ repository-url: https://test.pypi.org/legacy/
+ verbose: true
+
+ ### PUBLISH PYPI ###
+
+ pypi:
+ name: '📦 Publish to PyPi'
+ # Only publish on tag pushes
+ if: startsWith(github.ref, 'refs/tags/')
+ needs:
+ - testpypi
+ runs-on: ubuntu-latest
+ environment:
+ name: pypi
+ permissions:
+ # IMPORTANT: mandatory for trusted publishing
+ id-token: write
+ steps:
+ - name: '⬇ Download build artefacts'
+ uses: actions/download-artifact@v3
+ with:
+ name: ${{ github.ref_name }}
+ path: dist/
+
+ - name: 'Remove files unsupported by PyPi'
+ run: |
+ if [ -f dist/buildvars.txt ]; then
+ rm dist/buildvars.txt
+ fi
+ rm dist/*.crt dist/*.sig*
+
+ - name: 'Setup PDM for build commands'
+ uses: pdm-project/setup-pdm@v3
+
+ - name: 'Publish release to PyPI'
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ verbose: true
--- /dev/null
+---
+# This workflow will install Python dependencies
+# run tests and lint with a variety of Python versions
+# For more information see:
+# https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
+
+name: '⛔️ Security auditing'
+
+# yamllint disable-line rule:truthy
+on:
+ workflow_dispatch:
+
+jobs:
+ build:
+ name: 'Audit Python dependencies'
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ['3.9', '3.10', '3.11']
+ steps:
+ - name: 'Checkout repository'
+ uses: actions/checkout@v4
+
+ - name: 'Setup PDM for build commands'
+ uses: pdm-project/setup-pdm@v3
+ with:
+ version: 2.10.0
+
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: 'Install dependencies'
+ run: |
+ pip install --upgrade pip
+ pdm lock
+ pdm export -o requirements.txt
+ python -m pip install -r requirements.txt
+ python -m pip install .
+
+ - name: 'Run: pip-audit'
+ uses: pypa/gh-action-pip-audit@v1.0.8
+ with:
+ ignore-vulns: |
+ PYSEC-2023-163
--- /dev/null
+---
+name: '🐍📦 Test build and release'
+
+# GitHub/PyPI trusted publisher documentation:
+# https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
+
+# yamllint disable-line rule:truthy
+on:
+ workflow_dispatch:
+
+env:
+ python-version: '3.10'
+
+### BUILD ###
+
+jobs:
+ build:
+ name: '🐍 Build packages'
+ runs-on: ubuntu-latest
+ permissions:
+ # IMPORTANT: mandatory for Sigstore
+ id-token: write
+ steps:
+ ### BUILDING ###
+
+ - name: 'Checkout repository'
+ uses: actions/checkout@v4
+
+ - name: 'Setup PDM for build commands'
+ uses: pdm-project/setup-pdm@v3
+ with:
+ version: 2.10.0
+
+ - name: 'Populate environment variables'
+ id: setenv
+ run: |
+ vernum="${{ env.python-version }}.$(date +'%Y%m%d%H%M')"
+ echo "vernum=${vernum}" >> "$GITHUB_OUTPUT"
+ echo "vernum=${vernum}" >> buildvars.txt
+
+ - name: 'Setup Python 3.10'
+ uses: actions/setup-python@v4.7.0
+ with:
+ python-version: ${{ env.python-version }}
+
+ - name: 'Tag for test release'
+ # Delete all local tags, then create a synthetic tag for testing
+ # Use the date/time to avoid conflicts uploading to Test PyPI
+ run: |
+ scripts/dev-versioning.sh "${{ steps.setenv.outputs.vernum }}"
+ git tag | xargs -L 1 | xargs git tag --delete
+ git tag "v${{ steps.setenv.outputs.vernum }}"
+ git checkout "tags/v${{ steps.setenv.outputs.vernum }}"
+ grep version pyproject.toml
+
+ - name: 'Build with PDM backend'
+ run: |
+ pdm build
+ # Need to save the build environment for subsequent steps
+ mv buildvars.txt dist/buildvars.txt
+
+ ### SIGNING ###
+
+ - name: 'Sign packages with Sigstore'
+ uses: sigstore/gh-action-sigstore-python@v1.2.3
+ with:
+ inputs: >-
+ ./dist/*.tar.gz
+ ./dist/*.whl
+
+ - name: Store the distribution packages
+ uses: actions/upload-artifact@v3
+ with:
+ name: Development
+ path: dist/
+
+ ### PUBLISH GITHUB ###
+
+ github:
+ name: '📦 Test publish to GitHub'
+ needs:
+ - build
+ runs-on: ubuntu-latest
+ permissions:
+ # IMPORTANT: mandatory to publish artefacts
+ contents: write
+ steps:
+ - name: '⬇ Download build artefacts'
+ uses: actions/download-artifact@v3
+ with:
+ name: Development
+ path: dist/
+
+ - name: 'Source environment variables'
+ id: setenv
+ run: |
+ if [ -f dist/buildvars.txt ]; then
+ source dist/buildvars.txt
+ echo "vernum=${vernum}" >> "$GITHUB_OUTPUT"
+ else
+ echo "Build environment variables could not be sourced"
+ fi
+ echo "tarball=$(ls dist/*.tgz)" >> "$GITHUB_OUTPUT"
+ echo "wheel=$(ls dist/*.whl)" >> "$GITHUB_OUTPUT"
+
+ - name: '📦 Publish packages to GitHub'
+ uses: ModeSevenIndustrialSolutions/action-automatic-releases@latest
+ with:
+ # Valid inputs are:
+ # repo_token, automatic_release_tag, draft, prerelease, title, files
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ prerelease: true
+ automatic_release_tag: ${{ steps.setenv.outputs.vernum }}
+ title: "Development Build \
+ ${{ steps.setenv.outputs.vernum }}"
+ files: |
+ dist/*.tar.gz
+ dist/*.whl
+
+ ### PUBLISH TEST PYPI ###
+
+ testpypi:
+ name: '📦 Test publish to PyPi'
+ needs:
+ - build
+ runs-on: ubuntu-latest
+ environment:
+ name: testpypi
+ permissions:
+ # IMPORTANT: mandatory for trusted publishing
+ id-token: write
+ steps:
+ - name: '⬇ Download build artefacts'
+ uses: actions/download-artifact@v3
+ with:
+ name: Development
+ path: dist/
+
+ - name: 'Remove files unsupported by PyPi'
+ run: |
+ if [ -f dist/buildvars.txt ]; then
+ rm dist/buildvars.txt
+ fi
+ rm dist/*.crt dist/*.sig*
+
+ - name: Publish distribution to Test PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ verbose: true
+ repository-url: https://test.pypi.org/legacy/
--- /dev/null
+---
+name: '🧪 Unit tests'
+
+# yamllint disable-line rule:truthy
+on:
+ workflow_dispatch:
+
+jobs:
+ build:
+ name: 'Run unit tests'
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ['3.9', '3.10', '3.11']
+ steps:
+ - name: 'Checkout repository'
+ uses: actions/checkout@v4
+
+ - name: 'Setup PDM for build commands'
+ uses: pdm-project/setup-pdm@v3
+ with:
+ version: 2.10.0
+
+ - name: 'Setup Python ${{ matrix.python-version }}'
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: 'Install dependencies'
+ run: |
+ python -m pip install --upgrade pip
+ pdm export -o requirements.txt
+ pip install -r requirements.txt
+ pip install .
+
+ - name: 'Run unit tests: pytest'
+ run: python -m pytest test
---
-exclude: '^docs/conf.py'
+ci:
+ autofix_commit_msg: "Chore: pre-commit autoupdate"
+ skip:
+ # pre-commit.ci cannot install WGET, so tomlint must be disabled
+ - tomllint
+ # - pre-commit-update
+
+exclude: |
+ (?x)^(
+ docs\/conf.py|
+ dco-signoffs/$
+ )$
repos:
+
+ # - repo: https://gitlab.com/vojko.pribudic/pre-commit-update
+ # rev: v0.1.0
+ # hooks:
+ # - id: pre-commit-update
+ # args: [--dry-run]
+
+ - repo: local
+ hooks:
+ - id: tomllint
+ name: "Script: scripts/tomllint.sh"
+ language: script
+ # pass_filenames: false
+ files: \^*.toml
+ types: [file]
+ entry: scripts/tomllint.sh .
+
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
+ rev: v4.5.0
hooks:
- - id: trailing-whitespace
- id: check-added-large-files
- id: check-ast
+ - id: check-case-conflict
+ - id: check-executables-have-shebangs
- id: check-json
- id: check-merge-conflict
+ - id: check-shebang-scripts-are-executable
+ - id: check-symlinks
+ - id: check-toml
+ # - id: detect-aws-credentials
- id: check-xml
- id: check-yaml
- id: debug-statements
+ - id: detect-private-key
- id: end-of-file-fixer
- - id: requirements-txt-fixer
- id: mixed-line-ending
- args: ['--fix=lf']
+ args: ["--fix=lf"]
+ - id: name-tests-test
+ # Do not allow direct push to main/master branches
+ - id: no-commit-to-branch
+ # - id: pretty-format-json
+ - id: requirements-txt-fixer
+ - id: trailing-whitespace
+
+ # Autoformat: YAML, JSON, Markdown, etc.
+ - repo: https://github.com/pre-commit/mirrors-prettier
+ rev: v4.0.0-alpha.8
+ hooks:
+ - id: prettier
+ args:
+ ["--ignore-unknown", "--no-error-on-unmatched-pattern",
+ --log-level=debug]
+
+ # Lint: Markdown
+ - repo: https://github.com/igorshubovych/markdownlint-cli
+ rev: v0.38.0
+ hooks:
+ - id: markdownlint
+ args: ["--fix"]
+
+ # - repo: https://github.com/asottile/pyupgrade
+ # rev: v3.15.0
+ # hooks:
+ # - id: pyupgrade
+ # args: ['--py37-plus']
+
+ - repo: https://github.com/psf/black-pre-commit-mirror
+ rev: 23.12.0
+ hooks:
+ - id: black
+
+ - repo: https://github.com/psf/black-pre-commit-mirror
+ rev: 23.12.0
+ hooks:
+ - id: black-jupyter
- repo: https://github.com/jorisroovers/gitlint
- rev: v0.17.0
+ rev: v0.19.1
hooks:
- id: gitlint
-# If you want to automatically "modernize" your Python code:
-# - repo: https://github.com/asottile/pyupgrade
-# rev: v3.3.1
-# hooks:
-# - id: pyupgrade
-# args: ['--py37-plus']
-
-# If you want to avoid flake8 errors due to unused vars or imports:
-# - repo: https://github.com/PyCQA/autoflake
-# rev: v2.0.0
-# hooks:
-# - id: autoflake
-# args: [
-# --in-place,
-# --remove-all-unused-imports,
-# --remove-unused-variables,
-# ]
+ - repo: https://github.com/openstack/bashate
+ rev: 2.1.1
+ hooks:
+ - id: bashate
+ args: ["--ignore=E006"]
+
+ - repo: https://github.com/shellcheck-py/shellcheck-py
+ rev: v0.9.0.6
+ hooks:
+ - id: shellcheck
+ # Optionally only show errors and warnings
+ # args: ["--severity=warning"]
+
+ # If you want to avoid flake8 errors due to unused vars or imports:
+ # - repo: https://github.com/PyCQA/autoflake
+ # rev: v2.0.0
+ # hooks:
+ # - id: autoflake
+ # args: [
+ # --in-place,
+ # --remove-all-unused-imports,
+ # --remove-unused-variables,
+ # ]
- repo: https://github.com/PyCQA/isort
- rev: 5.12.0
+ rev: 5.13.2
hooks:
- id: isort
- - repo: https://github.com/psf/black
- rev: 23.1.0
+ - repo: https://github.com/adrienverge/yamllint.git
+ rev: v1.33.0
hooks:
- - id: black
- language_version: python3
+ - id: yamllint
+ args: [--strict]
+
+ - repo: https://github.com/Mateusz-Grzelinski/actionlint-py
+ rev: v1.6.26.11
+ hooks:
+ - id: actionlint
-# If like to embrace black styles even in the docs:
-# - repo: https://github.com/asottile/blacken-docs
-# rev: v1.13.0
-# hooks:
-# - id: blacken-docs
-# additional_dependencies: [black]
+ # If like to embrace black styles even in the docs:
+ # - repo: https://github.com/asottile/blacken-docs
+ # rev: v1.13.0
+ # hooks:
+ # - id: blacken-docs
+ # additional_dependencies: [black]
- - repo: https://github.com/PyCQA/flake8
- rev: 6.0.0
+ - repo: https://github.com/pycqa/flake8
+ rev: "6.1.0"
hooks:
- id: flake8
- args: ["--max-line-length=120"]
- ## You can add flake8 plugins via `additional_dependencies`:
- # additional_dependencies: [flake8-bugbear]
-
-# Check for misspells in documentation files:
-# - repo: https://github.com/codespell-project/codespell
-# rev: v2.2.2
-# hooks:
-# - id: codespell
-
- - repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.0.0
- hooks:
- - id: mypy
+ entry: pflake8
+ additional_dependencies: [pyproject-flake8]
- - repo: https://github.com/adrienverge/yamllint.git
- rev: v1.29.0
+ # Check for misspells in documentation files:
+ # - repo: https://github.com/codespell-project/codespell
+ # rev: v2.2.2
+ # hooks:
+ # - id: codespell
+
+ - repo: local
hooks:
- - id: yamllint
- types: [yaml]
+ - id: mypy-with-deps
+ name: "mypy-with-dependecies"
+ language: script
+ pass_filenames: false
+ types: [file]
+ entry: scripts/mypy-deps.sh
+
+ # - repo: https://github.com/pre-commit/mirrors-mypy
+ # rev: "v1.7.1"
+ # hooks:
+ # - id: mypy
+ # verbose: true
+ # args: [--show-error-codes]
+ # additional_dependencies: ["types-requests"]
--- /dev/null
+**/.pre-commit-config.yaml
+**/*.yaml
+**/*.yml
build:
os: ubuntu-22.04
tools:
- python: "3.11"
+ python: '3.11'
python:
install:
- requirements: docs/requirements.txt
- - {path: ., method: pip}
+ - { path: ., method: pip }
--- /dev/null
+---
+
+extends: default
+
+rules:
+ braces:
+ level: warning
+ max-spaces-inside: 1
+ brackets:
+ level: warning
+ max-spaces-inside: 1
+ colons:
+ level: warning
+ commas:
+ level: warning
+ comments: disable
+ comments-indentation: disable
+ document-start: disable
+ empty-lines:
+ level: warning
+ hyphens:
+ level: warning
+ indentation:
+ level: warning
+ indent-sequences: consistent
+ line-length:
+ max: 100
+ level: warning
# under `install_requires` in `setup.cfg` is also listed here!
sphinx>=3.2.1
# sphinx_rtd_theme
+apidoc
[tool.pdm]
package-dir = "src"
+[tool.isort]
+profile = "black"
+
[project]
name = "python_one_password"
version = "0.1.2"
description = "Imports metadata from 1Password vaults and allows for bulk manipulation of tags"
-authors = [
- {name = "Matthew Watkins", email = "mwatkins@linuxfoundation.org"},
-]
-dependencies = [
- "setuptools>=68.2.2",
- "typer[all]~=0.9.0",
-]
-requires-python = ">=3.8"
+authors = [{ name = "Matthew Watkins", email = "mwatkins@linuxfoundation.org" }]
+dependencies = ["setuptools>=68.2.2", "typer[all]~=0.9.0"]
+requires-python = ">=3.9"
readme = "README.rst"
-license = {text = "Apache-2.0"}
+license = { text = "Apache-2.0" }
classifiers = [
- "Development Status :: 4 - Beta",
- "Programming Language :: Python",
+ "Development Status :: 4 - Beta",
+ "Programming Language :: Python",
]
[project.urls]
Changelog = "https://gerrit.linuxfoundation.org/infra/q/project:releng/python-one-password"
[project.optional-dependencies]
-testing = [
- "pytest",
- "pytest-cov",
- "setuptools",
- "typer",
-]
+testing = ["pytest", "pytest-cov", "setuptools", "typer"]
[project.scripts]
python-one-password = "python_one_password.cli:run"
--- /dev/null
+#!/bin/sh
+if [ -f pyproject.toml ]; then
+ pip install pdm > /dev/null 2>&1
+ pdm export -o /tmp/requirements.txt > /dev/null 2>&1
+ pip install -r /tmp/requirements.txt > /dev/null 2>&1
+fi
+if [ -f docs/requirements.txt ]; then
+ pip install -r docs/requirements.txt > /dev/null 2>&1
+fi
+mypy .
--- /dev/null
+#!/bin/bash
+
+# set -x
+
+status_code="0"
+TAPLO_URL=https://github.com/tamasfe/taplo/releases/download/0.8.1
+
+# Process commmand-line arguments
+if [ $# -eq 0 ]; then
+ TARGET=$(pwd)
+elif [ $# -eq 1 ]; then
+ TARGET="$1"
+fi
+
+check_platform() {
+ # Enumerate platform and set binary name appropriately
+ PLATFORM=$(uname -a)
+ if (echo "${PLATFORM}" | grep Darwin | grep arm64); then
+ TAPLO_BIN="taplo-darwin-aarch64"
+ elif (echo "${PLATFORM}" | grep Darwin | grep x86_64); then
+ TAPLO_BIN="taplo-darwin-x86_64"
+ elif (echo "${PLATFORM}" | grep Linux | grep aarch64); then
+ TAPLO_BIN="taplo-full-linux-aarch64"
+ elif (echo "${PLATFORM}" | grep Linux | grep x86_64); then
+ TAPLO_BIN="taplo-full-linux-x86_64"
+ else
+ echo "Unsupported platform!"; exit 1
+ fi
+ TAPLO_GZIP="$TAPLO_BIN.gz"
+
+}
+
+check_file() {
+ local file_path="$1"
+ cp "$file_path" "$file_path.original"
+ /tmp/"${TAPLO_BIN}" format "$file_path" >/dev/null
+ diff "$file_path" "$file_path.original"
+ local exit_code=$?
+ if [ $exit_code -ne 0 ]; then
+ status_code=$exit_code
+ echo "::error file={$file_path},line={line},col={col}::{TOML unformatted}"
+ elif [ -f "$file_path.original" ]; then
+ rm "$file_path.original"
+ fi
+}
+
+check_all() {
+ if [ -d "${TARGET}" ]; then
+ echo "Scanning all the TOML files at folder: ${TARGET}"
+ fi
+ while IFS= read -r current_file; do
+ echo "Check file $current_file"
+ check_file "$current_file"
+ done < <(find . -name '*.toml' -type f -not -path '*/.*')
+}
+
+download_taplo() {
+ if [ ! -f /tmp/"${TAPLO_GZIP}" ]; then
+ "${WGET_BIN}" -q -e robots=off -P /tmp "${TAPLO_URL}"/"${TAPLO_GZIP}"
+ fi
+ TAPLO_PATH="/tmp/${TAPLO_BIN}"
+ if [ ! -x "${TAPLO_PATH}" ]; then
+ gzip -d "/tmp/${TAPLO_GZIP}"
+ chmod +x "/tmp/${TAPLO_BIN}"
+ fi
+ TAPLO_BIN="/tmp/${TAPLO_BIN}"
+}
+
+cleanup_tmp() {
+ # Only clean the temp directory if it was used
+ if [ -f /tmp/"${TAPLO_BIN}" ] || [ -f /tmp/"${TAPLO_GZIP}" ]; then
+ echo "Cleaning up..."
+ rm /tmp/"${TAPLO_BIN}"*
+ fi
+}
+
+check_wget() {
+ # Pre-flight binary checks and download
+ WGET_BIN=$(which wget)
+ if [ ! -x "${WGET_BIN}" ]; then
+ echo "WGET command not found"
+ sudo apt update; sudo apt-get install -y wget
+ fi
+ WGET_BIN=$(which wget)
+ if [ ! -x "${WGET_BIN}" ]; then
+ echo "WGET could not be installed"; exit 1
+ fi
+}
+
+TAPLO_BIN=$(which taplo)
+if [ ! -x "${TAPLO_BIN}" ]; then
+ check_wget && check_platform && download_taplo
+fi
+
+if [ ! -x "${TAPLO_BIN}" ]; then
+ echo "Download failed: TOML linting binary not found [taplo]"
+ status_code="1"
+else
+ # To avoid execution when sourcing this script for testing
+ [ "$0" = "${BASH_SOURCE[0]}" ] && check_all "$@"
+fi
+
+cleanup_tmp
+exit $status_code
with open(filename, encoding="utf-8") as open_file:
data = json.loads(open_file.read())
log.debug("JSON read from file: %s", filename)
- except IOError as error:
+ except OSError as error:
log.error("Error reading JSON from file: %s", filename)
log.error(error)
sys.exit(1)
with open(filename, "w", encoding="utf-8") as write_file:
json.dump(json_data, write_file)
log.debug("JSON written to file: %s", filename)
- except IOError as error:
+ except OSError as error:
log.error("Error writing JSON to file: %s", filename)
log.error(error)
sys.exit(1)