init first version of pbs_special_agent
Some checks failed
build / Build Checkmk package (push) Failing after 36s
build-release / Build Release Package (push) Failing after 35s
Lint / flake8_py3 (push) Failing after 18s
pytest / pytest (push) Failing after 32s

This commit is contained in:
Simon Zeyer 2026-03-10 19:28:57 +00:00
commit 19cf015e95
29 changed files with 1229 additions and 0 deletions

15
.devcontainer/Dockerfile Normal file
View File

@ -0,0 +1,15 @@
ARG VARIANT
FROM checkmk/check-mk-cloud:${VARIANT}
RUN /docker-entrypoint.sh /bin/true
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install --no-install-recommends nodejs gcc
ADD requirements.txt /tmp/requirements.txt
USER cmk
RUN PATH="/omd/sites/cmk/bin:${PATH}" \
OMD_ROOT="/omd/sites/cmk" \
/omd/sites/cmk/bin/pip3 install -r /tmp/requirements.txt
ENTRYPOINT ["/bin/bash"]

15
.devcontainer/build.sh Executable file
View File

@ -0,0 +1,15 @@
#!/bin/bash
NAME=$(python -c 'print(eval(open("package").read())["name"])')
rm /omd/sites/cmk/var/check_mk/packages/* ||:
ln -s $WORKSPACE/package /omd/sites/cmk/var/check_mk/packages/$NAME
mkp -v pack $NAME
# Set Outputs for GitHub Workflow steps
if [ -n "$GITHUB_WORKSPACE" ]; then
echo "::set-output name=pkgfile::$(ls *.mkp)"
echo "::set-output name=pkgname::${NAME}"
VERSION=$(python -c 'print(eval(open("package").read())["version"])')
echo "::set-output name=pkgversion::$VERSION"
fi

View File

@ -0,0 +1,55 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.155.1/containers/ubuntu
{
"name": "Checkmk",
"build": {
"dockerfile": "Dockerfile",
"args": { "VARIANT": "2.3.0p42" }
},
"customizations": {
"vscode": {
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"ms-python.python",
"ms-python.flake8",
"ms-python.isort",
"ms-python.pylint",
"ms-python.black-formatter",
"ms-python.vscode-pylance",
"littlefoxteam.vscode-python-test-adapter",
"rioj7.command-variable"
],
// Bash as default shell.
"settings": {"terminal.integrated.defaultProfile.linux": "bash"},
"python.defaultInterpreterPath": "/omd/sites/cmk/bin/python3"
}
},
// Mount complete volume for site directories
"mounts": [
{ "source": "omd-sites", "target": "/opt/omd/sites", "type": "volume" },
{ "source": "${localWorkspaceFolder}/lib", "target": "/opt/omd/sites/cmk/local/lib/python3/cmk", "type": "bind"},
{ "source": "${localWorkspaceFolder}/plugins", "target": "/opt/omd/sites/cmk/local/lib/python3/cmk_addons/plugins", "type": "bind" },
{ "source": "${localWorkspaceFolder}/plugins_legacy", "target": "/opt/omd/sites/cmk/local/share/check_mk", "type": "bind"},
{ "source": "${localWorkspaceFolder}/packages_local", "target": "/opt/omd/sites/cmk/var/check_mk/packages_local/", "type": "bind"}
],
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": ".devcontainer/setpwd.sh",
// Start omd every time the container is started
"postStartCommand": ".devcontainer/startup.sh",
"postAttachCommand": "omd restart && cmk-update-license-usage",
// Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "cmk",
"remoteEnv": {
"PATH": "/omd/sites/cmk/bin:/omd/sites/cmk/local/lib/python3/bin/:${containerEnv:PATH}",
"OMD_ROOT": "/omd/sites/cmk",
"OMD_SITE": "cmk",
"CMK_SITE_ID": "cmk",
"WORKSPACE": "${containerWorkspaceFolder}"
}
}

View File

@ -0,0 +1,6 @@
urllib3<2
flake8
pytest
pytest-cov
requests-mock
black

9
.devcontainer/setpwd.sh Executable file
View File

@ -0,0 +1,9 @@
#!/bin/bash
rm -rfv $OMD_ROOT/local/lib/nagios/plugins
ln -sv $WORKSPACE/nagios_plugins $OMD_ROOT/local/lib/nagios/plugins
rm -rfv $OMD_ROOT/local/tmp
ln -sv $WORKSPACE/temp $OMD_ROOT/local/tmp
source /omd/sites/cmk/.profile && echo 'cmkadmin' | /omd/sites/cmk/bin/cmk-passwd -i cmkadmin

3
.devcontainer/startup.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/bash
find $OMD_ROOT/tmp/ -name "*.pid" -exec rm {} \;

View File

@ -0,0 +1,10 @@
# DO NOT EDIT - Change template-sync.conf
include .devcontainer/
include .devcontainer/**
exclude .github/template-sync.conf
include .github/
include .github/**
include .vscode/
include .vscode/**
include .flake8
include .gitignore

View File

@ -0,0 +1,4 @@
# Add additional sync excludes for this repo
#exclude .github/do-not-sync
#exclude .flake8
#exclude .gitignore

View File

@ -0,0 +1,23 @@
#!/usr/bin/bash
TEMPDIR=$(mktemp --directory)
cleanup() {
echo "Removing $TEMPDIR"
rm -rf $TEMPDIR
}
trap cleanup EXIT
git -C $TEMPDIR clone https://github.com/Yogibaer75/checkmk_template.git
CMD="rsync --archive --cvs-exclude --no-owner --no-group --no-times --verbose"
if [ -e ".devcontainer/template-sync.conf" ]; then
CMD="${CMD} --filter='merge .devcontainer/template-sync.conf'"
fi
if [ -e "${TEMPDIR}/checkmk_template/.devcontainer/template-sync-includes.conf" ]; then
CMD="${CMD} --filter='merge ${TEMPDIR}/checkmk_template/.devcontainer/template-sync-includes.conf'"
fi
CMD="${CMD} --filter='exclude *' ${TEMPDIR}/checkmk_template/ $(pwd)/"
bash -c "$CMD"
echo $CMD

39
.editorconfig Normal file
View File

@ -0,0 +1,39 @@
# To see what this is about, have a look at
# https://editorconfig.org/
root = true
[*]
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 4
tab_width = 8
charset = utf-8
[*.{bat,ps1,vbs,cmd}]
end_of_line = crlf
[*.{md,rst}]
max_line_length = 80
[active_checks/check_*]
max_line_length = 100
[checks/[!.]*]
max_line_length = 100
[*.{cc,h,js,py,pl,pm,t}]
trim_trailing_whitespace = true
[*.{cc,h}]
max_line_length = 100
[*.{js,py,pl,pm,t}]
max_line_length = 100
[{*.scss,package.json,.envrc}]
indent_size = 2
[{Makefile,*.make,*.am}]
indent_style = tab

1
.env Normal file
View File

@ -0,0 +1 @@
CHECKHOST=HPE

37
.flake8 Normal file
View File

@ -0,0 +1,37 @@
[flake8]
ignore=
################################################################################################
# Black is our formatting tool, so ignore any formatting-related findings.
################################################################################################
# whitespace before ':'
E203,
# line too long
E501,
# line break before binary operator
W503,
# multiple statements on one line (colon)
E701,
# multiple statements on one line (def)
E704,
################################################################################################
# Ignore findings which are incompatible with our "import" techonology.
################################################################################################
# 'FOO' imported but unused
F401,
# module level import not at top of file
E402,
# 'from FOO import *' used; unable to detect undefined names
F403,
# 'FOO' may be undefined, or defined from star imports: BAR
F405,
################################################################################################
# We should probably have a look at these findings.
################################################################################################
# do not assign a lambda expression, use a def
E731,
# ambiguous variable name 'FOO'
E741,
# undefined name 'FOO'
F821,
# local variable 'FOO' is assigned to but never used
F841,

53
.github/workflows/build-release.yml vendored Normal file
View File

@ -0,0 +1,53 @@
name: build-release
on:
push:
tags:
- 'v*'
- '!v*[a-z]'
jobs:
build-release:
name: Build Release Package
runs-on: ubuntu-latest
container:
image: checkmk/check-mk-raw:2.0.0-latest
env:
OMD_ROOT: /omd/sites/cmk
OMD_SITE: cmk
CMK_SITE_ID: cmk
WORKSPACE: ${{ github.workspace }}
steps:
- name: Initialize Checkmk Site
run: /docker-entrypoint.sh /bin/true
- uses: actions/checkout@v2
- name: Setup links
run: .devcontainer/symlink.sh
- name: Update GITHUB_PATH
run: echo "/omd/sites/cmk/bin" >> $GITHUB_PATH
- name: Build Extension
run: .devcontainer/build.sh
id: cmkpkg
- name: Create Release
id: create_release
uses: actions/create-release@v1.0.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ github.ref }}
draft: false
prerelease: false
- name: Upload Release Asset
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./${{ steps.cmkpkg.outputs.pkgfile }}
asset_name: ${{ steps.cmkpkg.outputs.pkgfile }}
asset_content_type: application/octet-stream

39
.github/workflows/build.yml vendored Normal file
View File

@ -0,0 +1,39 @@
name: build
on:
push:
branches:
- '**'
tags-ignore:
- 'v*'
pull_request:
jobs:
build:
name: Build Checkmk package
runs-on: ubuntu-latest
container:
image: checkmk/check-mk-raw:2.0.0-latest
env:
OMD_ROOT: /omd/sites/cmk
OMD_SITE: cmk
CMK_SITE_ID: cmk
WORKSPACE: ${{ github.workspace }}
steps:
- name: Initialize Checkmk Site
run: /docker-entrypoint.sh /bin/true
- uses: actions/checkout@v2
- name: Setup links
run: .devcontainer/symlink.sh
- name: Update GITHUB_PATH
run: echo "/omd/sites/cmk/bin" >> $GITHUB_PATH
- name: Build Extension
run: .devcontainer/build.sh
id: cmkpkg
- name: Upload artifact
uses: actions/upload-artifact@v2
with:
name: ${{ steps.cmkpkg.outputs.pkgfile }}
path: ${{ steps.cmkpkg.outputs.pkgfile }}

26
.github/workflows/lint.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: Lint
on:
push:
paths:
- '**.py'
jobs:
flake8_py3:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v1
with:
python-version: 3.8
- name: Install flake8
run: pip install flake8
- name: Run flake8
uses: suo/flake8-github-action@releases/v1
with:
checkName: 'flake8_py3' # NOTE: this needs to be the same as the job name
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

30
.github/workflows/pytest.yml vendored Normal file
View File

@ -0,0 +1,30 @@
name: pytest
on:
push: []
jobs:
pytest:
runs-on: ubuntu-latest
container:
image: checkmk/check-mk-raw:2.0.0-latest
env:
OMD_ROOT: /omd/sites/cmk
OMD_SITE: cmk
CMK_SITE_ID: cmk
WORKSPACE: ${{ github.workspace }}
steps:
- name: Initialize Checkmk Site
run: /docker-entrypoint.sh /bin/true
- uses: actions/checkout@v2
- name: Setup links
run: ./.devcontainer/symlink.sh
- name: Install pytest
run: su -l -c "REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt pip3 install -r $GITHUB_WORKSPACE/.devcontainer/requirements.txt" cmk
- name: Update GITHUB_PATH
run: echo "/omd/sites/cmk/bin" >> $GITHUB_PATH
- name: Run pytest
run: python3 -m pytest

5
.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
*.mkp
.coverage
__pycache__
debug.log
/temp/*

172
.pylintrc Normal file
View File

@ -0,0 +1,172 @@
[MASTER]
# Setup the Python paths needed for our tests.
init-hook=
import sys;
sys.path.insert(0, __file__[:__file__.rfind("/.venv")]); # __file__ is somewhere deep inside the .venv
from tests.testlib.common.repo import add_protocols_path, add_python_paths, add_otel_collector_path;
add_protocols_path();
add_python_paths();
add_otel_collector_path();
load-plugins=
tests.pylint.cmk_edition_ignores,
tests.pylint.checker_localization,
tests.pylint.checker_cmk_module_layers,
tests.pylint.checker_layering_violation,
pylint_pydantic
jobs=0
# pickle collected data for later comparisons. Not used in our CI and makes runs faster
persistent=no
extension-pkg-whitelist=rrdtool,_ldap,netifaces,pymssql,pydantic,lxml
signature-mutators=cmk.utils.store.with_lock_dict
[MESSAGES CONTROL]
disable=
#---------------------------------------------------------------------------
# This checker is flaky. Recheck later.
# Might be related to https://github.com/pylint-dev/pylint/issues/9101
too-many-ancestors,
#---------------------------------------------------------------------------
# Not useless if that's exporting a type as done often enough in the
# standard library.
useless-import-alias,
#---------------------------------------------------------------------------
# Use local suppressions or (even better) refactor the code.
import-outside-toplevel,
#---------------------------------------------------------------------------
# Enabling this would be very desirable, it vastly improves readability and
# it might even be necessary for tools like mypy. Fixing this involves some
# amount of relatively easy work, especially if we want to avoid code
# duplication (introduce new classes, combine methods, etc.)
attribute-defined-outside-init,
#---------------------------------------------------------------------------
# Enabling these warnings would be nice, they are mostly a sign of sloppy
# programming practice. In some cases, they can even hide bugs.
broad-except,
#---------------------------------------------------------------------------
# Enabling this would be nice, but not crucial. At the moment, we have quite
# a few violations, so we postpone fixing this. When we do it eventually, we
# probably want to use "include-naming-hint=yes" in the BASIC section.
invalid-name,
#---------------------------------------------------------------------------
# We can probably re-enable this if we move __version__ definitions and
# conditional imports after the normal imports.
wrong-import-position,
#---------------------------------------------------------------------------
# Enabling this would be nice, but not crucial. At the moment, we have quite
# a few violations, so we postpone fixing this.
unused-argument,
#---------------------------------------------------------------------------
# Alas, these maintenance/security nightmares are still part of our base
# "technology"... :-/ Nevertheless, reducing their usage is a very worthy
# goal.
exec-used,
global-statement,
#---------------------------------------------------------------------------
# Enabling these would be nice, but given the current state of affairs
# (gigantic modules with deeply nested humungous functions/methods), this
# will be a non-trivial amount of work.
too-few-public-methods,
too-many-arguments,
too-many-positional-arguments,
too-many-boolean-expressions,
too-many-instance-attributes,
too-many-lines,
too-many-locals,
too-many-nested-blocks,
too-many-public-methods,
too-many-return-statements,
too-many-statements,
#---------------------------------------------------------------------------
# Enabling these would be nice, but at the moment pylint is a bit too dumb,
# so it stumbles over e.g. initialization with None. It ignores control
# flow, so even adding e.g. isinstance() guards wouldn't help, see:
# https://github.com/PyCQA/pylint/issues/1498.
unsubscriptable-object,
#---------------------------------------------------------------------------
# Our code is still full of FIXMEs/XXXs/TODOs, perhaps fixing or removing
# them might be a good idea some day...
fixme,
#---------------------------------------------------------------------------
# Black doesn't split long strings, we'll have to this by ourselves, see
# https://github.com/psf/black/issues/1331
line-too-long,
# Since Black 24 black and pylint started to disagree on `def foo(): ...`
# https://github.com/psf/black/issues/4173
# black is our formatter so the black way is the proper way...
multiple-statements,
#---------------------------------------------------------------------------
# We are light years away from enabling these...
missing-module-docstring,
missing-class-docstring,
missing-function-docstring,
#---------------------------------------------------------------------------
# Enabling the two spelling-related checks increases pylints runtime from
# 11 min to 40 min, so we better keep those disabled for normal runs.
# NOTE: If we want to enable one of these checks, we need to add pyenchant
# to our dev dependencies.
wrong-spelling-in-comment,
wrong-spelling-in-docstring,
#---------------------------------------------------------------------------
# Pylint is full of bugs regarding this, leading to tons of false positives
# when pathlib.path is used. Furthermore, the handling of NewTypes is totally
# broken, see e.g. https://github.com/PyCQA/pylint/issues/2296 and
# https://github.com/PyCQA/pylint/issues/3162.
no-member,
#---------------------------------------------------------------------------
# Lots of warning due to this, but we should really go through them one by
# one, this might drastically improve the usefulness of our backtraces.
raise-missing-from,
#---------------------------------------------------------------------------
# pylint 2.7.[012] has totally screwed this up... :-/ Takes ages, too.
duplicate-code,
R0801,
#---------------------------------------------------------------------------
# A stylistic thing only, many findings, but all fixes are mechanical.
consider-using-from-import,
#---------------------------------------------------------------------------
# A good idea in general, but each of the many findings has to be looked at:
# We often mutate a dictionary while iterating over it, which is :-P
consider-using-dict-items,
#---------------------------------------------------------------------------
# Purely mechanical & aesthetical, lots of findings.
redundant-u-string-prefix,
#---------------------------------------------------------------------------
# Fixing this is URGENT: There are potentially lots of encoding problems
# sleeping in our code when we are not explicit in open().
unspecified-encoding,
#---------------------------------------------------------------------------
# Tons of findings, we fix this incrementally, f-strings are much more
# performant than old-skool string splicing/formatting.
consider-using-f-string,
#---------------------------------------------------------------------------
# New in version 2.15.0 which causes a few findings
missing-timeout,
#---------------------------------------------------------------------------
# New in version 2.16.0 which causes a few findings
broad-exception-raised,
#---------------------------------------------------------------------------
# Import order is checked by isort
wrong-import-order,
#---------------------------------------------------------------------------
# New in version 3.2.0 which causes new findings
# TODO: fix these new findings - https://jira.lan.tribe29.com/browse/CMK-17473
possibly-used-before-assignment,
contextmanager-generator-missing-cleanup
[IMPORTS]
# This complies with PEP 8 and avoids code duplication in some cases.
allow-wildcard-with-all=yes
[REPORTS]
output-format=colorized
[FORMAT]
max-line-length=100
[VARIABLES]
# Be a little bit more mypy-friendly.
additional-builtins=reveal_type
[LAYERING_VIOLATION]
# layering-definition=.layering.yaml

110
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,110 @@
{
// Verwendet IntelliSense zum Ermitteln möglicher Attribute.
// Zeigen Sie auf vorhandene Attribute, um die zugehörigen Beschreibungen anzuzeigen.
// Weitere Informationen finden Sie unter https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Aktuelle Datei",
"type": "debugpy",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal"
},
{
"name": "cmk - check",
"type": "debugpy",
"request": "launch",
"program": "~/bin/cmk",
"args": [
"-vv",
"--debug",
"-n",
"${input:envHOST}"
],
"console": "integratedTerminal",
},
{
"name": "cmk - discover",
"type": "debugpy",
"request": "launch",
"program": "~/bin/cmk",
"args": [
"-vv",
"--debug",
"-I",
"${input:envHOST}"
],
"console": "integratedTerminal",
},
{
"name": "cmk - rediscover",
"type": "debugpy",
"request": "launch",
"program": "~/bin/cmk",
"args": [
"-vv",
"--debug",
"-II",
"${input:envHOST}"
],
"console": "integratedTerminal",
},
{
"name": "cmk - agent build",
"type": "debugpy",
"request": "launch",
"program": "~/bin/cmk",
"args": [
"-vv",
"--debug",
"-A",
"-f",
"${input:envHOST}"
],
"console": "integratedTerminal",
},
{
"name": "agent_redfish",
"type": "debugpy",
"request": "launch",
"program": "/workspaces/checkmk_template_23/plugins/redfish/special_agents/agent_redfish.py",
"args": [
"-vvv",
"--debug",
"-P",
"http",
"-u",
"admin",
"--password-id",
"rfpass:/omd/sites/cmk/var/check_mk/passwords_merged",
"-p",
"8000",
"192.168.188.223",
]
},
{
"name": "cmk - show host config",
"type": "debugpy",
"request": "launch",
"program": "~/bin/cmk",
"args": [
"--debug",
"-vv",
"-D",
"${input:envHOST}"
]
},
],
"inputs": [
{
"id": "envHOST",
"type": "command",
"command": "extension.commandvariable.file.content",
"args": {
"fileName": "${workspaceFolder}/.env",
"key": "CHECKHOST"
}
}
]
}

12
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,12 @@
{
"python.testing.pytestArgs": [
"."
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"python.defaultInterpreterPath": "/omd/sites/cmk/bin/python3",
"python.analysis.typeCheckingMode": "off",
"python.analysis.autoImportCompletions": true,
"flake8.cwd": "/omd/sites/cmk/",
"flake8.enabled": false
}

35
.vscode/tasks.json vendored Normal file
View File

@ -0,0 +1,35 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "Install",
"type": "shell",
"command": ".devcontainer/build.sh",
"problemMatcher": [],
"group": {
"kind": "build",
"isDefault": true
}
},
{
"label": "Start Site",
"type": "shell",
"command": "omd start",
"problemMatcher": []
},
{
"label": "Re-Start Site",
"type": "shell",
"command": "omd restart",
"problemMatcher": []
},
{
"label": "Stop Site",
"type": "shell",
"command": "omd stop",
"problemMatcher": []
}
]
}

6
.yamllint.yml Normal file
View File

@ -0,0 +1,6 @@
---
extends: default
rules:
line-length:
max: 100

19
README.md Normal file
View File

@ -0,0 +1,19 @@
# Checkmk extension devcontainer template
## Description
This is a template to develop Checkmk Extensions derived from the original made by [Marius Rieder](https://github.com/jiuka/)
## Development
For the best development experience use [VSCode](https://code.visualstudio.com/) with the [Remote Containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) extension. This maps your workspace into a checkmk docker container giving you access to the python environment and libraries the installed extension has.
## Directories
The following directories in this repo are getting mapped into the Checkmk site.
* `agents`, `checkman`, `checks`, `doc`, `inventory`, `notifications`, `web` are mapped into `local/share/check_mk/`
* `agent_based` is mapped to `local/lib/check_mk/base/plugins/agent_based`
* `nagios_plugins` is mapped to `local/lib/nagios/plugins`
* `bakery` is mapped to `local/lib/check_mk/base/cee/plugins/bakery`
* `temp` is mapped to `local/tmp` for storing precreated agent output

19
package Normal file
View File

@ -0,0 +1,19 @@
{
'author': u'Simon Zeyer',
'description': u'Proxmox Backup Server Checks via REST API',
'download_url': 'https://gitea.simonzeyer.de/simon/cmk_pbs_special_agent',
'files': {
'cmk_addons_plugins': [
'agent_based/plugin_name.py',
'checkman/plugin_name',
'rulesets/pbs_special_agent.py',
'server_side_calls/pbs_special_agent.py'
'libexec/agent_pbs_special_agent'
],
},
'name': 'pbs_special_agent',
'title': u'Proxmox PBS',
'version': '0.1',
'version.min_required': '2.1.0',
'version.packaged': '2.3.0p42.cce'
}

View File

@ -0,0 +1,243 @@
#!/usr/bin/env python3
import itertools
import json
from datetime import datetime
from cmk.agent_based.v2 import (
AgentSection,
CheckPlugin,
Service,
Result,
State,
Metric,
get_value_store,
)
from cmk.plugins.lib.df import (
df_check_filesystem_single,
FILESYSTEM_DEFAULT_LEVELS,
)
ITEM_STATUS = "API Status"
ITEM_DS_USAGE = "Datastore Usage "
ITEM_SYNC_JOBS = "Sync Job "
ITEM_GC = "Garbage Collection "
ITEM_PRUNE = "Prune Job "
ITEM_VERIFY = "Verify Job "
ITEM_FS_ROOT = "HD Space (root)"
ITEM_FS_SWAP = "Swap Usage"
ITEM_CPU = "CPU Usage"
ITEM_MEM = "Memory Usage"
ITEM_LOAD = "Load Average"
def parse_jobs(item: str, item_key: str, section: dict, section_key: str, section_item_key: str):
if item.startswith(item_key):
for s in section[section_key]:
if item.replace(item_key, '') == s[section_item_key]:
next_run = ""
if "last-run-state" in s:
if 'next-run' in s:
next_run = (
f", next run: "
f"{datetime.fromtimestamp(s['next-run'])}"
)
if s["last-run-state"] == "OK":
yield Result(
state=State.OK,
summary=(
f"last run state: OK"
f"{next_run}"
),
)
else:
yield Result(
state=State.CRIT,
summary=(
f"last run state: {s["last-run-state"]}"
f"{next_run}"
),
)
else:
yield Result(
state=State.OK,
summary=(
"Job running"
),
)
def parse_pbs_special_agent(string_table):
flatlist = list(itertools.chain.from_iterable(string_table))
# parsed = json.loads(" ".join(flatlist).replace("'", "\""))
parsed = json.loads(" ".join(flatlist))
return parsed
def discover_pbs_special_agent(section):
yield Service(item=ITEM_STATUS)
if "status" in section:
yield Service(item=ITEM_CPU)
yield Service(item=ITEM_MEM)
yield Service(item=ITEM_LOAD)
if "sync" in section:
for s in section["sync"]:
yield Service(item=ITEM_SYNC_JOBS + s["id"])
if "gc" in section:
for gc in section["gc"]:
yield Service(item=ITEM_GC + gc["store"])
if "prune" in section:
for prune in section["prune"]:
yield Service(item=ITEM_PRUNE + prune["id"])
if "verify" in section:
for verify in section["verify"]:
yield Service(item=ITEM_VERIFY + verify["id"])
def discover_pbs_special_agent_storage(section):
if "ds_usage" in section:
for ds in section["ds_usage"]:
yield Service(item=ITEM_DS_USAGE + ds["store"])
if "status" in section:
yield Service(item=ITEM_FS_ROOT)
# yield Service(item=ITEM_FS_SWAP)
def check_pbs_special_agent(item, section):
# pprint(section)
if "error" in section:
if item == ITEM_STATUS:
yield Result(state=State.CRIT, summary=section["error"])
return
if item == ITEM_STATUS:
status = section["status"]
version = section["version"]
yield Result(
state=State.OK,
summary=(
f"Version {version['version']}."
f"{version['release']}"
),
details=(
f"CPU: {status['cpuinfo']["model"]}, "
f"{status['cpuinfo']['sockets']} sockets, "
f"{status['cpuinfo']['cpus']} cores\n"
f"Kernel: {status['current-kernel']["sysname"]} "
f"{status['current-kernel']["release"]} "
f"{status['current-kernel']["machine"]} "
f"{status['current-kernel']["version"]}\n"
f"Bios: {status['boot-info']['mode']}, "
f"secureboot: {'on' if status['boot-info']['secureboot'] else 'off'}\n"
)
)
yield from parse_jobs(item, ITEM_SYNC_JOBS, section, "sync", "id")
yield from parse_jobs(item, ITEM_GC, section, "gc", "store")
yield from parse_jobs(item, ITEM_PRUNE, section, "prune", "id")
yield from parse_jobs(item, ITEM_VERIFY, section, "verify", "id")
if item == ITEM_CPU:
status = section["status"]
cpu_busy = float(status.get("cpu", 0.0)) * 100 # Prozent
cpu_wait = float(status.get("wait", 0.0)) * 100
yield Result(
state=State.OK,
summary=f"CPU Busy: {cpu_busy:.1f}%, IOWait: {cpu_wait:.1f}%"
)
yield Metric("cpu_busy", cpu_busy)
yield Metric("cpu_wait", cpu_wait)
if item == ITEM_MEM:
status = section["status"]
mem = status.get("memory", {})
total = mem.get("total", 0)
used = mem.get("used", 0)
free = mem.get("free", 0)
yield Result(
state=State.OK,
summary=f"Memory used: {used}, free: {free}, total: {total}",
)
yield Metric("mem_total", total)
yield Metric("mem_used", used)
yield Metric("mem_free", free)
if item == ITEM_LOAD:
status = section["status"]
loadavg = status.get("loadavg", [0.0, 0.0, 0.0])
load1, load5, load15 = [float(x) for x in loadavg]
yield Result(
state=State.OK,
summary=f"Load avg: {load1:.2f}, {load5:.2f}, {load15:.2f}",
)
yield Metric("load1", load1)
yield Metric("load5", load5)
yield Metric("load15", load15)
def check_pbs_special_agent_storage(item: str, params: list, section: dict):
if "error" in section:
return
if item.startswith(ITEM_DS_USAGE):
for ds in section["ds_usage"]:
if item.replace(ITEM_DS_USAGE, '') == ds["store"]:
try:
size_mb = float(ds['total']) / (1024 * 1024)
avail_mb = float(ds['avail']) / (1024 * 1024)
value_store = get_value_store()
yield from df_check_filesystem_single(
value_store=value_store,
mountpoint=ds["store"],
filesystem_size=size_mb,
free_space=avail_mb,
reserved_space=0,
inodes_total=None,
inodes_avail=None,
params=params,
this_time=None,
)
except Exception:
yield Result(
state=State.UNKNOWN,
summary="error checking datastore status"
)
if item == ITEM_FS_ROOT:
try:
fs_root = section["status"]["root"]
size_mb = float(fs_root['total'])/1024/1024 #ds['total'] returning bytes instead of mb
avail_mb = float(fs_root['avail'])/1024/1024 #ds['avail'] returning bytes instead of mb
print(size_mb)
value_store = get_value_store()
yield from df_check_filesystem_single(
value_store=value_store,
mountpoint="/root",
filesystem_size=size_mb,
free_space=avail_mb,
reserved_space=0, # See df.py: ... if (filesystem_size is None) or (free_space is None) or (reserved_space is None): yield Result(state=State.OK, summary="no filesystem size information")
inodes_total=None,
inodes_avail=None,
params=params,
this_time=None,
)
except Exception as e:
yield Result(
state=State.UNKNOWN,
summary=f"error checking root fs status"
)
agent_section_pbs_special_agent = AgentSection(
name = "pbs_special_agent",
parse_function = parse_pbs_special_agent,
)
check_plugin_pbs_special_agent_status = CheckPlugin(
name = "pbs_special_agent_status",
sections = [ "pbs_special_agent" ],
service_name = "PBS %s",
discovery_function = discover_pbs_special_agent,
check_function = check_pbs_special_agent,
)
check_plugin_pbs_special_agent_datastore = CheckPlugin(
name = "pbs_special_agent_ds",
sections = [ "pbs_special_agent" ],
service_name = "PBS %s",
discovery_function = discover_pbs_special_agent_storage,
check_function = check_pbs_special_agent_storage,
check_default_parameters=FILESYSTEM_DEFAULT_LEVELS,
check_ruleset_name="filesystem",
)

View File

@ -0,0 +1,174 @@
#!/usr/bin/env python3
import requests
import argparse
import json
import ssl
import hashlib
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from urllib3.poolmanager import PoolManager
def fingerprint_checking_SSLSocket(_fingerprint:str):
class SSLSocket(ssl.SSLSocket):
fingerprint = _fingerprint.replace(":", "").lower()
def do_handshake(self, *args, **kw):
res = super().do_handshake(*args, **kw)
# Get full certificate in DER format
der_bytes = self.getpeercert(binary_form=True)
crt_sha256 = hashlib.sha256(der_bytes).hexdigest()
if crt_sha256.lower() != self.fingerprint.lower():
raise ssl.SSLError(
"Server %r certificate fingerprint (sha1) %s does not match %r"
% (
self.server_hostname,
crt_sha256,
self.fingerprint,
)
)
return res
return SSLSocket
def api_get(session, url):
try:
r = session.get(url, timeout=50)
if r.status_code == 404:
raise RuntimeError(f"API endpoint not found (404): {url}")
if not r.ok:
raise RuntimeError(f"API error {r.status_code}: {url} - {r.text}")
return r.json().get("data", {})
except ConnectionError as e:
raise RuntimeError(
f"Connection refused to {url} (host or port unreachable)"
) from e
except requests.exceptions.Timeout as e:
raise RuntimeError(
f"Timeout while connecting to {url}"
) from e
except requests.exceptions.HTTPError as e:
raise RuntimeError(
f"HTTP error from {url}: {e}"
) from e
except requests.exceptions.RequestException as e:
raise RuntimeError(
f"Request failed for {url}: {e}"
) from e
def create_session():
session = requests.Session()
retry = Retry(
total=3, # total retries
connect=3, # connection retries
read=3, # read retries
backoff_factor=0.5, # 0.5s, 1s, 2s
status_forcelist=[500, 502, 503, 504],
allowed_methods=["GET"],
raise_on_status=False,
)
adapter = HTTPAdapter(
max_retries=retry,
pool_connections=5,
pool_maxsize=5,
)
session.mount("https://", adapter)
session.mount("http://", adapter)
return session
class FingerprintAdapter(HTTPAdapter):
def init_poolmanager(self, connections, maxsize, block=False, **kwargs):
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
ctx.sslsocket_class = fingerprint_checking_SSLSocket(PBS_FINGERPRINT)
self.poolmanager = PoolManager(
num_pools=connections,
maxsize=maxsize,
block=block,
ssl_context=ctx,
assert_hostname=False,
)
parser = argparse.ArgumentParser("agent_pbs_special_agent")
parser.add_argument(
"--host",
help="PBS Host address or fqdn without https:// and port.",
type=str,
default="pbs")
parser.add_argument(
"--port",
help="PBS https port.",
type=int,
default="8007")
parser.add_argument(
"--fingerprint",
help="Fingerprint of the PBS if not using valid cert.",
type=str,
default="")
parser.add_argument(
"--tokenid",
help="API Token with audit permissions.",
type=str,
default="admin@pbs!checkmk")
parser.add_argument(
"--secret",
help="Secret for API Token.",
type=str)
args = parser.parse_args()
PBS_HOST = args.host
API_TOKEN_ID = args.tokenid
PBS_PORT = args.port
PBS_FINGERPRINT = args.fingerprint
API_TOKEN_SECRET = args.secret
headers = {
"Authorization": f"PBSAPIToken={API_TOKEN_ID}:{API_TOKEN_SECRET}"
}
# Disable SSL verification only if you use self-signed certs
session = create_session()
if PBS_FINGERPRINT != "":
session.mount("https://", FingerprintAdapter())
# session.verify = False
session.headers.update(headers)
API_URL = f"https://{PBS_HOST}:{PBS_PORT}"
try:
return_json = {}
return_json["version"] = api_get(session, f"{API_URL}/api2/json/version")
return_json["status"] = api_get(session, f"{API_URL}/api2/json/nodes/localhost/status")
return_json["tasks"] = api_get(session, f"{API_URL}/api2/json/nodes/localhost/tasks")
return_json["ds_usage"] = api_get(session, f"{API_URL}/api2/json/status/datastore-usage")
return_json["sync"] = api_get(session, f"{API_URL}/api2/json/admin/sync")
return_json["gc"] = api_get(session, f"{API_URL}/api2/json/admin/gc")
return_json["prune"] = api_get(session, f"{API_URL}/api2/json/admin/prune")
return_json["verify"] = api_get(session, f"{API_URL}/api2/json/admin/verify")
for d in return_json["ds_usage"]:
# we do not need history data
d.pop("history", None)
print('<<<pbs_special_agent:sep(0)>>>')
print(json.dumps(return_json))
exit(0)
except Exception as e:
print('<<<pbs_special_agent:sep(0)>>>')
print(json.dumps({"error": str(e)}))
exit(0)

View File

@ -0,0 +1,47 @@
#!/usr/bin/env python3
# Shebang needed only for editors
from cmk.rulesets.v1.form_specs import Dictionary, DictElement, String, Password, migrate_to_password, DefaultValue
from cmk.rulesets.v1.rule_specs import SpecialAgent, Topic, Help, Title
def _formspec():
return Dictionary(
title=Title("PBS API-Token Login"),
help_text=Help("This rule is used to showcase a special agent with configuration."),
elements={
"port": DictElement(
required=True,
parameter_form=String(
title=Title("Port of the PBS API."),
prefill=DefaultValue("8007"),
),
),
"fingerprint": DictElement(
required=True,
parameter_form=String(
title=Title("Fingerprint of the PBS"),
prefill=DefaultValue(""),
),
),
"tokenid": DictElement(
required=True,
parameter_form=String(
title=Title("API Token with audit permissions."),
),
),
"secret": DictElement(
required=True,
parameter_form=Password(
title=Title("Secret for API Token."),
migrate=migrate_to_password,
),
),
}
)
rule_spec_pbs_special_agent = SpecialAgent(
topic=Topic.CLOUD,
name="pbs_special_agent",
title=Title("Proxmox PBS"),
parameter_form=_formspec
)

View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
# Shebang needed only for editors
from cmk.server_side_calls.v1 import noop_parser, SpecialAgentConfig, SpecialAgentCommand, HostConfig
def _agent_arguments(params, host_config: HostConfig):
args = [
"--host", host_config.name,
"--port", params['port'],
"--fingerprint", str(params['fingerprint']),
"--tokenid", str(params['tokenid']),
"--secret", params['secret'].unsafe()
]
yield SpecialAgentCommand(command_arguments=args)
special_agent_pbs_special_agent = SpecialAgentConfig(
name="pbs_special_agent",
parameter_parser=noop_parser,
commands_function=_agent_arguments
)

2
setup.cfg Normal file
View File

@ -0,0 +1,2 @@
[flake8]
max-line-length = 100