Skip to content
50 changes: 26 additions & 24 deletions .github/workflows/reusable-build-test-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -485,20 +485,22 @@ jobs:
python${{ env.PYTHON_VERSION }} -m pip install poetry==${{ env.POETRY_VERSION }} poetry-plugin-export==${{ env.POETRY_EXPORT_PLUGIN_VERSION }}
poetry check
poetry export --without-hashes -o package/lib/requirements.txt
poetry export --without-hashes --with dev -o requirements_dev.txt
fi
if [ ! -f requirements_dev.txt ]; then echo no requirements;exit 0 ;fi
if [ ! -f dev_deps/requirements_dev.txt ]; then echo no dev_deps/requirements;exit 1 ;fi
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf https://github.com
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf ssh://[email protected]
poetry install --with dev
python${{ env.PYTHON_VERSION }} -m venv .dev_venv
echo "Found requirements_dev.txt. Installing dev dependencies in an isolated environment"
./.dev_venv/bin/python${{ env.PYTHON_VERSION }} -m pip install -r dev_deps/requirements_dev.txt
./.dev_venv/bin/python${{ env.PYTHON_VERSION }} -m pip install -r package/lib/requirements.txt
- name: Create directories
run: |
mkdir -p /opt/splunk/var/log/splunk
chmod -R 777 /opt/splunk/var/log/splunk
- name: Copy pytest ini
run: cp tests/unit/pytest-ci.ini pytest.ini
- name: Run Pytest with coverage
run: poetry run pytest --cov=./ --cov-report=xml --junitxml=test-results/junit.xml tests/unit
run: ./.dev_venv/bin/python${{ env.PYTHON_VERSION }} -m pytest --cov=./ --cov-report=xml --junitxml=test-results/junit.xml tests/unit
- name: Job summary
continue-on-error: true
run: |
Expand Down Expand Up @@ -568,48 +570,46 @@ jobs:
echo "No prod dependencies were found"
rm requirements.txt
fi
poetry export --without-hashes --with dev -o requirements_dev.txt
cat requirements_dev.txt
fi
- name: Setup UCC
run: |
if [ -f "requirements_ucc.txt" ]; then
if [ -f "dev_deps/requirements_ucc.txt" ]; then
python${{ env.PYTHON_VERSION }} -m venv .ucc_venv
echo "Found requirements_ucc.txt. Installing UCC dependencies in an isolated environment"
./.ucc_venv/bin/python -m pip install -r requirements_ucc.txt
./.ucc_venv/bin/python${{ env.PYTHON_VERSION }} -m pip install -r dev_deps/requirements_ucc.txt
export UCC_GEN="$PWD/.ucc_venv/bin/ucc-gen"

if [ ! -f "$UCC_GEN" ]; then
echo "ucc-gen not found after installing requirements from requirements_ucc.txt"
echo "ucc-gen not found after installing requirements from dev_deps/requirements_ucc.txt"
exit 1
fi

echo "UCC_GEN=$UCC_GEN" >> "$GITHUB_ENV"
else
echo "No UCC requirements file found, skipping UCC setup"
echo "Consider adding a requirements_ucc.txt file and place UCC requirement there, to avoid dependency conflicts"
echo "Consider adding a dev_deps/requirements_ucc.txt file and place UCC requirement there, to avoid dependency conflicts"
fi
- name: Get pip cache dir
id: pip-cache
run: |
echo "dir=$(pip cache dir)" >> "$GITHUB_OUTPUT"
- name: Run Check there are libraries to scan
id: checklibs
run: if [ -f requirements_dev.txt ]; then echo "ENABLED=true" >> "$GITHUB_OUTPUT"; fi
run: if [ -f dev_deps/requirements_dev.txt ]; then echo "ENABLED=true" >> "$GITHUB_OUTPUT"; fi
- name: pip cache
if: ${{ steps.checklibs.outputs.ENABLED == 'true' }}
uses: actions/cache@v4
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('requirements_dev.txt') }}
key: ${{ runner.os }}-pip-${{ hashFiles('dev_deps/requirements_dev.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install deps
if: ${{ steps.checklibs.outputs.ENABLED == 'true' }}
run: |
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf https://github.com
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf ssh://[email protected]
pip install -r requirements_dev.txt
python${{ env.PYTHON_VERSION }} -m pip install -r dev_deps/requirements_dev.txt
- name: Semantic Release Get Next
id: semantic
if: github.event_name != 'pull_request'
Expand Down Expand Up @@ -902,11 +902,11 @@ jobs:
env:
PYTHON_KEYRING_BACKEND: keyring.backends.null.Keyring
run: |
poetry install --only modinput
python${{ env.PYTHON_VERSION }} -m pip install -r dev_deps/requirements_dev.txt
if [ -f "tests/ucc_modinput_functional/tmp/openapi.json" ]; then
poetry run ucc-test-modinput gen -o tests/ucc_modinput_functional/tmp/openapi.json -t ${{ steps.download-openapi.outputs.download-path }}/tmp/
ucc-test-modinput gen -o tests/ucc_modinput_functional/tmp/openapi.json -t ${{ steps.download-openapi.outputs.download-path }}/tmp/
else
poetry run ucc-test-modinput gen -o ${{ steps.download-openapi.outputs.download-path }}/openapi.json -t ${{ steps.download-openapi.outputs.download-path }}/tmp/
ucc-test-modinput gen -o ${{ steps.download-openapi.outputs.download-path }}/openapi.json -t ${{ steps.download-openapi.outputs.download-path }}/tmp/
fi
- name: upload-libs-to-s3
id: upload-libs-to-s3
Expand All @@ -915,9 +915,11 @@ jobs:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
run: |
poetry install --with dev
python${{ env.PYTHON_VERSION }} -m venv .dev_venv
echo "Found requirements_dev.txt. Installing dev dependencies in an isolated environment"
./.dev_venv/bin/python${{ env.PYTHON_VERSION }} -m pip install -r dev_deps/requirements_dev.txt
libs_archive=libs_$(basename "$BUILD_NAME" .spl).tgz
cp -r "$(find "$(poetry env info --path)" -maxdepth 3 -type d -name "site-packages")" libs/
cp -r ./.dev_venv/lib/python${{ env.PYTHON_VERSION }}/site-packages/ libs/
tar -czf "$libs_archive" libs
aws s3 cp "$libs_archive" "s3://${{ needs.setup-workflow.outputs.s3_bucket_k8s }}/ta-apps/$libs_archive" --only-show-errors
- name: upload-swagger-artifacts-to-s3
Expand Down Expand Up @@ -1265,7 +1267,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down Expand Up @@ -1550,7 +1552,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down Expand Up @@ -1826,7 +1828,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down Expand Up @@ -2101,7 +2103,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down Expand Up @@ -2365,7 +2367,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down Expand Up @@ -2634,7 +2636,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down
58 changes: 58 additions & 0 deletions .github/workflows/reusable-publish-to-splunkbase.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
name: publish-to-splunkbase
on:
workflow_call:
inputs:
addon_version:
description: 'The version of the add-on to publish to Splunkbase'
required: true
type: string
splunk_versions:
description: 'Comma-separated list of supported Splunk versions'
required: true
type: string
cim_versions:
description: 'Comma-separated list of supported CIM versions'
required: true
type: string
secrets:
SPL_COM_USERNAME:
description: 'Splunk Community username'
required: true
SPL_COM_PASSWORD:
description: 'Splunk Community password'
required: true

jobs:
inputs-validator:
runs-on: ubuntu-latest
steps:
- id: matrix
uses: splunk/[email protected]
with:
features: PYTHON39
publish:
runs-on: ubuntu-latest
needs:
- inputs-validator
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
- run: pip install splunk_add_on_ucc_framework-5.69.1-py3-none-any.whl
- name: Fetch build
env:
GH_TOKEN: ${{ github.token }}
run: |
gh release download v${{ inputs.addon_version }} --pattern "*${{ inputs.addon_version }}.spl" --output release.spl
- run: |
APP_ID=$(cat .splunkbase)
export APP_ID
ucc-gen publish \
--stage \
--app-id "$APP_ID" \
--package-path release.spl \
--splunk-versions ${{ inputs.splunk_versions }} \
--cim-versions ${{ inputs.cim_versions }} \
--username ${{ secrets.SPL_COM_USERNAME }} \
--password ${{ secrets.SPL_COM_PASSWORD }}
61 changes: 61 additions & 0 deletions .github/workflows/reusable-validate-deploy-docs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
name: validate-deploy-docs

on:
workflow_call:

jobs:
validate-docs-change:
runs-on: ubuntu-latest
outputs:
status: ${{ steps.validate.outputs.status }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.12
- name: Install mkdocs and plugins
run: pip install mkdocs==1.6.0 mkdocs-material==9.5.32 mkdocs-print-site-plugin==2.6.0
- name: Validate docs change
id: validate
shell: bash
run: |
RED='\033[0;31m'
GREEN='\033[0;32m'
NC='\033[0m'
if mkdocs build --strict; then
echo "status=success" >> "$GITHUB_OUTPUT"
echo -e "${GREEN}Docs validation success${NC}"
else
echo "status=failure" >> "$GITHUB_OUTPUT"
echo -e "${RED}Docs validation failure${NC}"
exit 1
fi

deploy-docs:
needs:
- validate-docs-change
runs-on: ubuntu-latest
permissions:
contents: write
pages: write
if: github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/main'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.12
- name: Install mkdocs and plugins
run: pip install mkdocs==1.6.0 mkdocs-material==9.5.32 mkdocs-print-site-plugin==2.6.0
- name: Build and Deploy docs
id: deploy
shell: bash
run: |
RED='\033[0;31m'
GREEN='\033[0;32m'
NC='\033[0m'
if [ "${{ needs.validate-docs-change.outputs.status }}" == "failure" ]; then
echo -e "${RED}Docs validation failed, abort docs deployment... (for more details look at Validate docs change job)${NC}"
exit 1
fi
mkdocs gh-deploy --force
echo -e "${GREEN}Deployed docs on github!${NC}"
Loading