From 2150565ab86c8a49e34030f9d32b57cab86866ca Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Mon, 28 Nov 2022 17:19:42 +0300 Subject: [PATCH 01/12] host & item-names filtering added --- .coveragerc | 4 - .editorconfig | 21 -- .gitignore | 258 ++++++++++++++++++++- .travis.yml | 37 --- AUTHORS.rst | 2 +- Dockerfile | 6 +- HISTORY.rst | 5 + README.md | 54 +++++ README.rst | 62 ----- config-example.yaml | 23 ++ config-example.yml | 9 - requirements.txt | 5 + setup.py | 74 ++---- tests/configs/asterisk.conf.yml | 7 - tests/configs/disable_timestamps.yaml | 23 -- tests/configs/explicit_config.yaml | 23 -- tests/configs/reject_labels.conf.yml | 11 - tests/conftest.py | 71 ------ tests/fixtures/host.get_success.json | 14 -- tests/fixtures/items.asterisk_mapping.json | 33 --- tests/fixtures/items.get_success.json | 78 ------- tests/fixtures/items.reject_labels.json | 34 --- tests/func_test.py | 90 ------- tests/unit_test.py | 59 ----- tox.ini | 15 -- zabbix_exporter/__init__.py | 9 +- zabbix_exporter/commands.py | 104 +++------ zabbix_exporter/compat.py | 10 - zabbix_exporter/core.py | 159 ------------- zabbix_exporter/logger.py | 6 + zabbix_exporter/prometheus.py | 31 +-- zabbix_exporter/server.py | 29 +++ zabbix_exporter/utils.py | 34 ++- zabbix_exporter/zabbix_collector.py | 228 ++++++++++++++++++ 34 files changed, 714 insertions(+), 914 deletions(-) delete mode 100644 .coveragerc delete mode 100644 .editorconfig delete mode 100644 .travis.yml create mode 100644 README.md delete mode 100644 README.rst create mode 100644 config-example.yaml delete mode 100644 config-example.yml create mode 100644 requirements.txt delete mode 100644 tests/configs/asterisk.conf.yml delete mode 100644 tests/configs/disable_timestamps.yaml delete mode 100644 tests/configs/explicit_config.yaml delete mode 100644 tests/configs/reject_labels.conf.yml delete mode 100644 tests/conftest.py delete mode 100644 tests/fixtures/host.get_success.json delete mode 100644 tests/fixtures/items.asterisk_mapping.json delete mode 100644 tests/fixtures/items.get_success.json delete mode 100644 tests/fixtures/items.reject_labels.json delete mode 100644 tests/func_test.py delete mode 100644 tests/unit_test.py delete mode 100644 tox.ini mode change 100755 => 100644 zabbix_exporter/__init__.py delete mode 100644 zabbix_exporter/compat.py delete mode 100644 zabbix_exporter/core.py create mode 100644 zabbix_exporter/logger.py create mode 100644 zabbix_exporter/server.py create mode 100644 zabbix_exporter/zabbix_collector.py diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 050a417..0000000 --- a/.coveragerc +++ /dev/null @@ -1,4 +0,0 @@ -[run] -branch = True -include = - zabbix_exporter/* diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index d4a2c44..0000000 --- a/.editorconfig +++ /dev/null @@ -1,21 +0,0 @@ -# http://editorconfig.org - -root = true - -[*] -indent_style = space -indent_size = 4 -trim_trailing_whitespace = true -insert_final_newline = true -charset = utf-8 -end_of_line = lf - -[*.bat] -indent_style = tab -end_of_line = crlf - -[LICENSE] -insert_final_newline = false - -[Makefile] -indent_style = tab diff --git a/.gitignore b/.gitignore index 1dbc687..8c0e88d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,14 +1,158 @@ +# Created by https://www.toptal.com/developers/gitignore/api/c,python,pycharm+all +# Edit at https://www.toptal.com/developers/gitignore?templates=c,python,pycharm+all + +### C ### +# Prerequisites +*.d + +# Object files +*.o +*.ko +*.obj +*.elf + +# Linker output +*.ilk +*.map +*.exp + +# Precompiled Headers +*.gch +*.pch + +# Libraries +*.lib +*.a +*.la +*.lo + +# Shared objects (inc. Windows DLLs) +*.dll +*.so +*.so.* +*.dylib + +# Executables +*.exe +*.out +*.app +*.i*86 +*.x86_64 +*.hex + +# Debug files +*.dSYM/ +*.su +*.idb +*.pdb + +# Kernel Module Compile Results +*.mod* +*.cmd +.tmp_versions/ +modules.order +Module.symvers +Mkfile.old +dkms.conf + +### PyCharm+all ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### PyCharm+all Patch ### +# Ignore everything but code style settings and run configurations +# that are supposed to be shared within teams. + +.idea/* + +!.idea/codeStyles +!.idea/runConfigurations + +### Python ### # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions -*.so # Distribution / packaging .Python -env/ build/ develop-eggs/ dist/ @@ -20,9 +164,12 @@ lib64/ parts/ sdist/ var/ +wheels/ +share/python-wheels/ *.egg-info/ .installed.cfg *.egg +MANIFEST # PyInstaller # Usually these files are written by a python script from a template @@ -37,13 +184,17 @@ pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ +.nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml -*,cover +*.cover +*.py,cover .hypothesis/ +.pytest_cache/ +cover/ # Translations *.mo @@ -51,12 +202,111 @@ coverage.xml # Django stuff: *.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy # Sphinx documentation docs/_build/ # PyBuilder +.pybuilder/ target/ -#Ipython Notebook +# Jupyter Notebook .ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +### Python Patch ### +# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration +poetry.toml + + +# End of https://www.toptal.com/developers/gitignore/api/c,python,pycharm+all diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index f6b589f..0000000 --- a/.travis.yml +++ /dev/null @@ -1,37 +0,0 @@ -sudo: required -language: python -services: - - docker -env: - global: - - TRAVIS_COMMIT_SHORT=${TRAVIS_COMMIT::8} - -python: - - '2.7' - - '3.5' - - '3.6' - - pypy -install: - - pip install tox-travis -script: - - tox -before_install: - - pip install codecov -after_success: - - codecov - -notifications: - email: false - -deploy: - - provider: script - script: make travis-build-docker - on: - branch: master - - provider: pypi - user: mybook - distributions: sdist bdist_wheel - password: - secure: 3JtgqlD3CptovnS2LW2ZnewtA7f9UXALtW5UvvA28aADbxDmaIdXgs5QuKrYeY6UXbdXX2MUS1VyLZLpt0/yYthwmK1WCxCkcCoYRhjwZkgfkH3UUZTp8K6POu+zaCOnvIqyXTtebzEKlQ2bFrlE+ZM6R0f8ju1mSggK8wxYO//h1eztbkgONJoc38tZYSRJ22AAmskmMpRMzgZp3TxGaE4jFdGcj1CpvnzCijdYYgRmLDwrRa/olT97taKil7ii2IZMzH2yA8/JC9BwJ/YGiHPP+taF/v0f2wLyK1d4WlmtqtQAR6AFKR8gJoPwGhFV2fcv3L0W6FVHdj3V7zFEWmUwdm/VeI0xeD2EGpzXvacJduDLN0RcSbUGIe5CGanFaQJ04NBdmxUNlKqGmUgGS1zYEj+EbpkgLvNFiMjz4GzQx7FSgB/H0Odz3ytmbL2WCiDJgizW6Ovup5WvS3BiBBR+izq0iUC9AGBPqLsjWVQ9O/+REqWlqLA8+uXEeGk1A3jSWzYgfHbrh3Tc4cIiNRPlYFrB0aljbGSNqFEBuQ5v60MwdTuKRDF5bNJNivjPtwOIgtZyLIB5VUWhruSviOvvRxe4Og4uj2WYFZoHCkc5jrpwOYn7Y40L5rn4F9gIVBzg1BgKE9g43dy5p0fXhFTfb2Y0/d3Al851gsnkCI8= - on: - tags: true diff --git a/AUTHORS.rst b/AUTHORS.rst index 8f45ef7..e9c1fd9 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -1,4 +1,4 @@ Contributors ------------ - * Ilya Baryshev +* Kazakov Kirill \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index ee5c816..58bd028 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.5-slim +FROM python:3.6-slim MAINTAINER The MyBook Developers RUN groupadd zabbix_exporter && useradd --no-create-home --gid zabbix_exporter zabbix_exporter @@ -6,7 +6,7 @@ COPY . /tmp/zabbix_exporter WORKDIR /tmp/zabbix_exporter RUN pip install -e . -COPY config-example.yml /zabbix_exporter/zabbix_exporter.yml +COPY config-example.yaml /zabbix_exporter/zabbix_exporter.yaml EXPOSE 9224 VOLUME [ "/zabbix_exporter" ] @@ -14,5 +14,5 @@ VOLUME [ "/zabbix_exporter" ] USER zabbix_exporter WORKDIR /zabbix_exporter ENTRYPOINT [ "/usr/local/bin/zabbix_exporter" ] -CMD [ "--config=/zabbix_exporter/zabbix_exporter.yml", \ +CMD [ "--config=/zabbix_exporter/zabbix_exporter.yaml", \ "--timeout=10" ] diff --git a/HISTORY.rst b/HISTORY.rst index ccb2dc3..36c1705 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -2,6 +2,11 @@ History ======= +1.0.3 (2022-11-28) +------------------ + +* Add zabbix filtering by hosts and item names + 1.0.2 (2017-02-25) ------------------ diff --git a/README.md b/README.md new file mode 100644 index 0000000..39db9e7 --- /dev/null +++ b/README.md @@ -0,0 +1,54 @@ +# Zabbix Exporter + +This project is a fork of [original zabbix exporter](https://github.com/MyBook/zabbix-exporter) +with advances functionality: +filtering was added for each zabbix metric with configuration-defined hosts and item names, which significantly +improves execution time. + +### Usage example +```shell +Usage: zabbix_exporter [OPTIONS] + Zabbix metrics exporter for Prometheus + + Use config file to map zabbix metrics names/labels into prometheus. Config + below transforms this: + local.metric[uwsgi,workers,myapp,busy] = 8 + local.metric[uwsgi,workers,myapp,idle] = 6 + + into familiar Prometheus gauges: + uwsgi_workers{instance="host1",app="myapp",status="busy"} 8 + uwsgi_workers{instance="host1",app="myapp",status="idle"} 6 + + YAML config example: + metrics: + - key: 'local.metric[uwsgi,workers,*,*]' + name: 'uwsgi_workers' + labels: + app: $1 + status: $2 + reject: + - 'total' + hosts: + - name.of.host.1 + - name.of.host.2 + item_names: + - '*item.name.substr.1*' + - '*item.name.substr.2*' + +Options: + --config PATH Path to exporter config + --port INTEGER Port to serve prometheus stats [default: 9224] + --url TEXT HTTP URL for zabbix instance + --login TEXT Zabbix username + --password TEXT Zabbix password + --verify-tls / --no-verify Enable TLS cert verification [default: true] + --timeout INTEGER API read/connect timeout + --verbose + --help Show this message and exit. + +``` + +### Deploying with Docker +```shell +docker run -d --name zabbix_exporter -v /path/to/your/config.yml:/zabbix_exporter/zabbix_exporter.yml --env=ZABBIX_URL="https://zabbix.example.com/" --env="ZABBIX_LOGIN=username" --env="ZABBIX_PASSWORD=secret" mybook/zabbix-exporter +``` \ No newline at end of file diff --git a/README.rst b/README.rst deleted file mode 100644 index bbcd516..0000000 --- a/README.rst +++ /dev/null @@ -1,62 +0,0 @@ -zabbix_exporter -=============== - -.. image:: https://img.shields.io/badge/python-2.7,%203.6-blue.svg - :target: https://pypi.python.org/pypi/zabbix-exporter/ - -.. image:: https://travis-ci.org/MyBook/zabbix-exporter.svg?branch=master - :target: https://travis-ci.org/MyBook/zabbix-exporter - -.. image:: https://codecov.io/gh/MyBook/zabbix-exporter/branch/master/graph/badge.svg - :target: https://codecov.io/gh/MyBook/zabbix-exporter - -Usage -===== -:: - - Usage: zabbix_exporter [OPTIONS] - - Zabbix metrics exporter for Prometheus - - Use config file to map zabbix metrics names/labels into prometheus. Config - below transfroms this: - - local.metric[uwsgi,workers,myapp,busy] = 8 - local.metric[uwsgi,workers,myapp,idle] = 6 - - into familiar Prometheus gauges: - - uwsgi_workers{instance="host1",app="myapp",status="busy"} 8 - uwsgi_workers{instance="host1",app="myapp",status="idle"} 6 - - YAML: - - metrics: - - key: 'local.metric[uwsgi,workers,*,*]' - name: 'uwsgi_workers' - labels: - app: $1 - status: $2 - reject: - - 'total' - - Options: - --config PATH Path to exporter config - --port INTEGER Port to serve prometheus stats [default: 9224] - --url TEXT HTTP URL for zabbix instance - --login TEXT Zabbix username - --password TEXT Zabbix password - --verify-tls / --no-verify Enable TLS cert verification [default: true] - --timeout INTEGER API read/connect timeout - --verbose - --dump-metrics Output all metrics for human to write yaml - config - --version - --help Show this message and exit. - - -Deploying with Docker -===================== -:: - - docker run -d --name zabbix_exporter -v /path/to/your/config.yml:/zabbix_exporter/zabbix_exporter.yml --env=ZABBIX_URL="https://zabbix.example.com/" --env="ZABBIX_LOGIN=username" --env="ZABBIX_PASSWORD=secret" mybook/zabbix-exporter diff --git a/config-example.yaml b/config-example.yaml new file mode 100644 index 0000000..3ebf7ae --- /dev/null +++ b/config-example.yaml @@ -0,0 +1,23 @@ +parsing: + explicit_metrics: true + enable_timestamps: false + enable_empty_hosts: true # if true, will load metrics with empty or missing 'hosts' field without host restriction + +metrics: + - key: 'local.metric[uwsgi,workers,*,*]' + name: 'uwsgi_workers' + type: summary + labels: + app: $1 + status: $2 + reject: + - 'total' + hosts: # list of hosts to load this metric from + - name.of.host.1 + - name.of.host.2 + item_names: # only items with names fitting one of the given patterns will be exported + - '*item.name.substr.1*' + - '*item.name.substr.2*' + - key: 'metric.with.minimal.settings' + name: 'minimal_example' + type: summary diff --git a/config-example.yml b/config-example.yml deleted file mode 100644 index 49119cb..0000000 --- a/config-example.yml +++ /dev/null @@ -1,9 +0,0 @@ -explicit_metrics: true -metrics: -- key: 'local.metric[uwsgi,workers,*,*]' - name: 'uwsgi_workers' - labels: - app: $1 - status: $2 - reject: - - 'total' diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..623bef9 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +click==8.0.4 +prometheus-client==0.15.0 +pyzabbix==1.2.1 +qrconfig==1.0.4 +coloredlogs==15.0.1 \ No newline at end of file diff --git a/setup.py b/setup.py index 37ca1ee..709e928 100755 --- a/setup.py +++ b/setup.py @@ -1,48 +1,15 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- import os import re -import sys -from setuptools import setup -from setuptools.command.test import test as TestCommand +import setuptools - -with open('README.rst') as readme_file: +with open('README.md') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read() -requirements = [ - 'prometheus-client>=0.0.13', - 'pyzabbix>=0.7.4', - 'PyYAML>=3.11', - 'click>=6.4', -] - -test_requirements = [ - 'pytest>=3.0.0', - 'pytest-localserver>=0.3.5', - 'pytest-cov>=2.4.0', -] - - -class PyTest(TestCommand): - user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")] - - def initialize_options(self): - TestCommand.initialize_options(self) - self.pytest_args = [] - - def finalize_options(self): - TestCommand.finalize_options(self) - self.test_args = [] - self.test_suite = True - - def run_tests(self): - import pytest - errno = pytest.main(self.pytest_args) - sys.exit(errno) +with open('requirements.txt') as reqs_file: + requirements = reqs_file.readlines() def get_version(package): @@ -53,39 +20,32 @@ def get_version(package): return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1) -setup( - name='zabbix_exporter', +setuptools.setup( + name='zabbix_selective_exporter', version=get_version('zabbix_exporter'), + url='https://github.com/qoollo/zabbix-exporter', + packages=setuptools.find_packages(), + install_requires=requirements, + python_requires='>=3.6', + description="zabbix metrics for Prometheus", + long_description_content_type="text/markdown", long_description=readme + '\n\n' + history, - author="MyBook", - author_email='coagulant@mybook.ru', - url='https://github.com/MyBook/zabbix-exporter', - packages=['zabbix_exporter'], - package_dir={'zabbix_exporter': 'zabbix_exporter'}, - include_package_data=True, - install_requires=requirements, + + author="MyBook, Qoollo", + author_email='kirill.kazakov@qoollo.com', license="BSD", - zip_safe=False, + keywords='zabbix_exporter', entry_points=""" [console_scripts] zabbix_exporter=zabbix_exporter:main """, classifiers=[ - 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', - 'Natural Language :: English', - "Programming Language :: Python :: 2", - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", ], - test_suite='tests', - tests_require=test_requirements, - cmdclass={'test': PyTest}, -) +) \ No newline at end of file diff --git a/tests/configs/asterisk.conf.yml b/tests/configs/asterisk.conf.yml deleted file mode 100644 index e9d0780..0000000 --- a/tests/configs/asterisk.conf.yml +++ /dev/null @@ -1,7 +0,0 @@ ---- -explicit_metrics: true -metrics: - - key: 'local.metric[uwsgi,sum,*,*,foo]' - name: 'uwsgi_$2' - labels: - app: $1 diff --git a/tests/configs/disable_timestamps.yaml b/tests/configs/disable_timestamps.yaml deleted file mode 100644 index b844665..0000000 --- a/tests/configs/disable_timestamps.yaml +++ /dev/null @@ -1,23 +0,0 @@ ---- -explicit_metrics: true -enable_timestamps: false -metrics: - - key: 'local.metric[uwsgi,workers,*,*]' - name: 'uwsgi_workers' - help: 'UWSGI workers' - type: 'gauge' - labels: - app: $1 - status: $2 - reject: - - 'total' - - key: 'local.metric[uwsgi,sum,*,rss]' - name: 'uwsgi_rss' - labels: - app: $1 - - key: 'local.metric[redis,*,*]' - name: 'redis_$1' - labels: - port: $2 - - key: 'system.metric' - - key: 'zfs.total_bytes' diff --git a/tests/configs/explicit_config.yaml b/tests/configs/explicit_config.yaml deleted file mode 100644 index 4516741..0000000 --- a/tests/configs/explicit_config.yaml +++ /dev/null @@ -1,23 +0,0 @@ ---- -explicit_metrics: true -enable_timestamps: true -metrics: - - key: 'local.metric[uwsgi,workers,*,*]' - name: 'uwsgi_workers' - help: 'UWSGI workers' - type: 'gauge' - labels: - app: $1 - status: $2 - reject: - - 'total' - - key: 'local.metric[uwsgi,sum,*,rss]' - name: 'uwsgi_rss' - labels: - app: $1 - - key: 'local.metric[redis,*,*]' - name: 'redis_$1' - labels: - port: $2 - - key: 'system.metric' - - key: 'zfs.total_bytes' diff --git a/tests/configs/reject_labels.conf.yml b/tests/configs/reject_labels.conf.yml deleted file mode 100644 index d8a181a..0000000 --- a/tests/configs/reject_labels.conf.yml +++ /dev/null @@ -1,11 +0,0 @@ ---- -explicit_metrics: true -metrics: - - key: 'local.metric[zpool,*,*]' - name: 'zpool_size' - labels: - mode: $1 - pool: $2 - reject: - - 'pfree' - diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index baa902b..0000000 --- a/tests/conftest.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding: utf-8 -import threading -from functools import partial -from time import sleep - -import pytest - -from prometheus_client import REGISTRY -from pytest_localserver.http import WSGIServer -from werkzeug.wrappers import Response, Request -from zabbix_exporter.commands import cli - - -def zabbix_fake_app(environ, start_response): - request = Request(environ) - request_body = request.get_data(as_text=True) - - if getattr(zabbix_fake_app, 'status', False): - response = Response(status=zabbix_fake_app.status) - response.data = zabbix_fake_app.content - return response(environ, start_response) - - response = Response(status=200, headers=[('Content-type', 'application/json')]) - if '"method": "user.login"' in request_body: - json_string = '{"jsonrpc":"2.0","result":"9287f336ffb611e586aa5e5517507c66","id":0}' - elif '"method": "host.get"' in request_body: - json_string = open('tests/fixtures/host.get_success.json').read() - elif '"method": "item.get"' in request_body: - json_string = open('tests/fixtures/items.get_success.json').read() - else: - json_string = 'Unrecognized test request' - response.data = json_string - return response(environ, start_response) - - -@pytest.fixture -def zabbixserver(request): - def func(): # noqa - if getattr(server.app, 'status', None): - del server.app.status - del server.app.content - server = WSGIServer(application=zabbix_fake_app) - server.start() - request.addfinalizer(server.stop) - request._addfinalizer(func, scope='function') - def serve_content(self, content, status=200): # noqa - self.app.content = content - self.app.status = status - server.serve_content = partial(serve_content, server) - return server - - -@pytest.fixture -def zabbix_exporter_cli(request): - def cli_launcher(args): - httpd = cli(prog_name='zabbix_exporter', args=args + ['--return-server'], standalone_mode=False) - thread = threading.Thread(target=httpd.serve_forever) - def stop_server(): # noqa - httpd.shutdown() - httpd.server_close() - thread.join() - request.addfinalizer(stop_server) - thread.start() - sleep(1) - return cli_launcher - - -@pytest.fixture(autouse=True) -def _clear_registry_collectors(): - REGISTRY._collector_to_names.clear() - REGISTRY._names_to_collectors.clear() diff --git a/tests/fixtures/host.get_success.json b/tests/fixtures/host.get_success.json deleted file mode 100644 index e40a8ec..0000000 --- a/tests/fixtures/host.get_success.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "jsonrpc": "2.0", - "result": [ - { - "name": "rough-snowflake-db", - "hostid": "3" - }, - { - "name": "rough-snowflake-web", - "hostid": "4" - } - ], - "id":1 -} diff --git a/tests/fixtures/items.asterisk_mapping.json b/tests/fixtures/items.asterisk_mapping.json deleted file mode 100644 index ff9a70d..0000000 --- a/tests/fixtures/items.asterisk_mapping.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "jsonrpc": "2.0", - "result": [ - { - "itemid": "120", - "name": "UWSGI exceptions projectA", - "key_": "local.metric[uwsgi,sum,projectA,exceptions,foo]", - "hostid": "4", - "value_type": "3", - "lastclock": "1460359130", - "lastvalue": "10" - }, - { - "itemid": "121", - "name": "UWSGI requests projectA", - "key_": "local.metric[uwsgi,sum,projectA,requests,foo]", - "hostid": "4", - "value_type": "3", - "lastclock": "1460359130", - "lastvalue": "100" - }, - { - "itemid": "122", - "name": "UWSGI exceptions projectB", - "key_": "local.metric[uwsgi,sum,projectB,exceptions,foo]", - "hostid": "4", - "value_type": "3", - "lastclock": "1460359130", - "lastvalue": "1000" - } - ], - "id":1 -} diff --git a/tests/fixtures/items.get_success.json b/tests/fixtures/items.get_success.json deleted file mode 100644 index c26d2e5..0000000 --- a/tests/fixtures/items.get_success.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "jsonrpc": "2.0", - "result": [ - { - "itemid": "120", - "name": "Redis connected clients", - "key_": "local.metric[redis,connected_clients,6380]", - "hostid": "4", - "value_type": "3", - "lastclock": "1460359130", - "lastvalue": "10" - }, - { - "itemid": "126", - "name": "UWSGI RSS sum", - "key_": "local.metric[uwsgi,sum,rough-snowflake,rss]", - "hostid": "4", - "value_type": "3", - "lastclock": "1460359130", - "lastvalue": "351182848" - }, - { - "itemid": "123", - "name": "UWSGI rough-snowflake busy workers", - "key_": "local.metric[uwsgi,workers,rough-snowflake,busy]", - "hostid": "4", - "value_type": "3", - "lastclock": "1460359143", - "lastvalue": "6" - }, - { - "itemid": "124", - "name": "UWSGI rough-snowflake idle workers", - "key_": "local.metric[uwsgi,workers,rough-snowflake,idle]", - "hostid": "4", - "value_type": "3", - "lastclock": "1460359143", - "lastvalue": "10" - }, - { - "itemid": "125", - "name": "UWSGI rough-snowflake total workers", - "key_": "local.metric[uwsgi,workers,rough-snowflake,total]", - "hostid": "4", - "value_type": "3", - "lastclock": "1460359143", - "lastvalue": "16" - }, - { - "itemid": "122", - "name": "Relevant, but unsupported metric type", - "key_": "system.metric[uname]", - "hostid": "4", - "value_type": "1", - "lastclock": "1460359140", - "lastvalue": "Darwin" - }, - { - "itemid": "121", - "name": "Some non-relevant float metric", - "key_": "wtf", - "hostid": "4", - "value_type": "3", - "lastclock": "1460359140", - "lastvalue": "16.2" - }, - { - "itemid": "130", - "name": "ZFS bytes", - "key_": "zfs.total_bytes", - "hostid": "3", - "value_type": "3", - "lastclock": "1460359140", - "lastvalue": "23243473482" - } - ], - "id":1 -} diff --git a/tests/fixtures/items.reject_labels.json b/tests/fixtures/items.reject_labels.json deleted file mode 100644 index ea82cae..0000000 --- a/tests/fixtures/items.reject_labels.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "jsonrpc": "2.0", - "result": [ - { - "itemid": "130", - "name": "ZFS bytes used", - "key_": "local.metric[zpool,used,tank]", - "hostid": "3", - "value_type": "3", - "lastclock": "1460359140", - "lastvalue": "749471793152" - }, - { - "itemid": "131", - "name": "ZFS bytes total", - "key_": "local.metric[zpool,total,tank]", - "hostid": "3", - "value_type": "3", - "lastclock": "1460359140", - "lastvalue": "970662608896" - }, - { - "itemid": "132", - "name": "ZFS bytes free percent", - "key_": "local.metric[zpool,pfree,tank]", - "hostid": "3", - "value_type": "3", - "lastclock": "1460359140", - "lastvalue": "23.0000" - } - ], - "id":1 -} - diff --git a/tests/func_test.py b/tests/func_test.py deleted file mode 100644 index e89b4ce..0000000 --- a/tests/func_test.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding: utf-8 - -import pytest -import requests - -from zabbix_exporter.prometheus import text_string_to_metric_families - - -@pytest.mark.parametrize("config_name,timestamps", [ - ("disable_timestamps", (None, None, None, None, None)), - ("explicit_config", (1460359130, 1460359130, 1460359143, 1460359143, 1460359140)), -]) -def test_configs(zabbixserver, zabbix_exporter_cli, config_name, timestamps): - args = ['--url', zabbixserver.url, - '--no-verify', '--config', 'tests/configs/%s.yaml' % config_name, - '--login', 'demo', '--password', 'demo', '--port', '9224', '--verbose'] - zabbix_exporter_cli(args) - - response = requests.get('http://localhost:9224/metrics/') - metrics = [m for m in text_string_to_metric_families(response.text) - if not m.name.startswith('zabbix_exporter_') and not m.name.startswith('process_')] - - assert len(metrics) == 4 - assert metrics[0].name == 'redis_connected_clients' - assert metrics[0].type == 'untyped' - assert metrics[0].samples == [ - (u'redis_connected_clients', - {u'port': u'6380', - u'instance': u'rough-snowflake-web'}, - 10.0, timestamps[0])] - - assert metrics[1].name == 'uwsgi_rss' - assert metrics[1].documentation == 'UWSGI RSS sum' - assert metrics[1].type == 'untyped' - assert metrics[1].samples == [ - (u'uwsgi_rss', - {u'app': u'rough-snowflake', - u'instance': u'rough-snowflake-web'}, - 351182848.0, timestamps[1])] - - assert metrics[2].name == 'uwsgi_workers' - assert metrics[2].documentation == 'UWSGI workers' - assert metrics[2].type == 'gauge' - assert metrics[2].samples == [ - (u'uwsgi_workers', - {u'app': u'rough-snowflake', - u'instance': u'rough-snowflake-web', - u'status': u'busy'}, - 6.0, timestamps[2]), - (u'uwsgi_workers', - {u'app': u'rough-snowflake', - u'instance': u'rough-snowflake-web', - u'status': u'idle'}, - 10.0, timestamps[3])] - - assert metrics[3].name == 'zfs_total_bytes' - assert metrics[3].samples == [ - (u'zfs_total_bytes', - {u'instance': u'rough-snowflake-db'}, - 23243473482, timestamps[4]), - ] - - -def test_implicit_config(zabbixserver, zabbix_exporter_cli): - args = ['--url', zabbixserver.url, - '--login', 'demo', '--password', 'demo', '--port', '9224', '--verbose'] - zabbix_exporter_cli(args) - response = requests.get('http://localhost:9224/metrics/') - metrics = [m for m in text_string_to_metric_families(response.text) - if not m.name.startswith('zabbix_exporter') and not m.name.startswith('process_')] - - assert [m.name for m in metrics] == [ - u'local_metric_redis_connected_clients_6380_', - u'local_metric_uwsgi_sum_rough_snowflake_rss_', - u'local_metric_uwsgi_workers_rough_snowflake_busy_', - u'local_metric_uwsgi_workers_rough_snowflake_idle_', - u'local_metric_uwsgi_workers_rough_snowflake_total_', - u'wtf', - u'zfs_total_bytes' - ] - - -def test_exporter_returns_500_on_scrape_errors(zabbixserver, zabbix_exporter_cli): - args = ['--url', zabbixserver.url, - '--no-verify', '--config', 'tests/configs/explicit_config.yaml', - '--login', 'demo', '--password', 'demo', '--port', '9224', '--verbose'] - zabbix_exporter_cli(args) - zabbixserver.serve_content('', 500) - response = requests.get('http://localhost:9224/metrics/') - assert response.status_code == 500 diff --git a/tests/unit_test.py b/tests/unit_test.py deleted file mode 100644 index 176f341..0000000 --- a/tests/unit_test.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding: utf-8 -import yaml - -from zabbix_exporter.core import SortedDict, ZabbixCollector - - -def test_sorted_keys_dict(): - d = SortedDict() - for i, letter in enumerate('clkefgnhidjmbaop'): - d[letter] = i - assert ''.join(d.keys()) == 'abcdefghijklmnop' - assert '-'.join(map(str, d.values())) == '13-12-0-9-3-4-5-7-8-10-2-1-11-6-14-15' - - -def test_metric_families_dont_override_each_other(zabbixserver): - config = yaml.safe_load(open('tests/configs/asterisk.conf.yml')) - collector = ZabbixCollector(base_url=zabbixserver.url, login='demo', password='demo', **config) - - result_json = open('tests/fixtures/items.asterisk_mapping.json').read() - zabbixserver.serve_content(result_json) - metrics = [m.samples for m in collector.collect()] - - assert metrics == [ - [(u'uwsgi_exceptions', - {'app': u'projectA', 'instance': u'rough-snowflake-web'}, - 10.0, - None), - (u'uwsgi_exceptions', - {'app': u'projectB', 'instance': u'rough-snowflake-web'}, - 1000.0, - None) - ], - [(u'uwsgi_requests', - {'app': u'projectA', 'instance': u'rough-snowflake-web'}, - 100.0, - None), - ] - ] - - -def test_reject_labels(zabbixserver): - config = yaml.safe_load(open('tests/configs/reject_labels.conf.yml')) - collector = ZabbixCollector(base_url=zabbixserver.url, login='demo', password='demo', **config) - - result_json = open('tests/fixtures/items.reject_labels.json').read() - zabbixserver.serve_content(result_json) - metrics = [m.samples for m in collector.collect()] - - assert metrics == [ - [(u'zpool_size', - {'mode': u'used', 'pool': u'tank', 'instance': u'rough-snowflake-db'}, - 749471793152, - None), - (u'zpool_size', - {'mode': u'total', 'pool': u'tank', 'instance': u'rough-snowflake-db'}, - 970662608896, - None) - ], - ] diff --git a/tox.ini b/tox.ini deleted file mode 100644 index dc01080..0000000 --- a/tox.ini +++ /dev/null @@ -1,15 +0,0 @@ -[tox] -envlist = py27, py35, py36, pypy -skipsdist = true - -[testenv] -commands = - python setup.py develop - python setup.py test -a "--cov=zabbix_exporter --cov-append" - -[pytest] -addopts = --tb=native --cov-config .coveragerc --cov zabbix_exporter/ - -[flake8] -max-line-length = 120 -exclude = .tox, .git diff --git a/zabbix_exporter/__init__.py b/zabbix_exporter/__init__.py old mode 100755 new mode 100644 index 3f37173..788f3ec --- a/zabbix_exporter/__init__.py +++ b/zabbix_exporter/__init__.py @@ -1,11 +1,6 @@ -# coding: utf-8 -__author__ = 'MyBook' -__email__ = 'dev@mybook.ru' -__version__ = '1.0.2' - envvar_prefix = 'ZABBIX' - +__version__ = '1.0.3' def main(): from .commands import cli - return cli(auto_envvar_prefix=envvar_prefix) + return cli(auto_envvar_prefix=envvar_prefix) \ No newline at end of file diff --git a/zabbix_exporter/commands.py b/zabbix_exporter/commands.py index c120813..28a40ac 100644 --- a/zabbix_exporter/commands.py +++ b/zabbix_exporter/commands.py @@ -1,29 +1,19 @@ -# coding: utf-8 -import logging - import click -import sys -import yaml from prometheus_client import REGISTRY +from qrconfig import QRYamlConfig -import zabbix_exporter -from zabbix_exporter.core import ZabbixCollector, MetricsHandler -from .compat import HTTPServer - -logger = logging.getLogger(__name__) +from .server import create_exporter_server +from .zabbix_collector import ZabbixSelectiveCollector +from .logger import cmd_logger def validate_settings(settings): if not settings['url']: - click.echo('Please provide Zabbix API URL', err=True) - sys.exit(1) + raise Exception('Please provide Zabbix API URL') if not settings['login']: - click.echo('Please provide Zabbix username', err=True) - sys.exit(1) + raise Exception('Please provide Zabbix username') if not settings['password']: - click.echo('Please provide Zabbix account password', err=True) - sys.exit(1) - return True + raise Exception('Please provide Zabbix account password') @click.command() @@ -36,14 +26,11 @@ def validate_settings(settings): @click.option('--verify-tls/--no-verify', help='Enable TLS cert verification [default: true]', default=True) @click.option('--timeout', help='API read/connect timeout', default=5) @click.option('--verbose', is_flag=True) -@click.option('--dump-metrics', help='Output all metrics for human to write yaml config', is_flag=True) -@click.option('--version', is_flag=True) -@click.option('--return-server', is_flag=True, help='Developer flag. Please ignore.') def cli(**settings): """Zabbix metrics exporter for Prometheus Use config file to map zabbix metrics names/labels into prometheus. - Config below transfroms this: + Config below transforms this: local.metric[uwsgi,workers,myapp,busy] = 8 local.metric[uwsgi,workers,myapp,idle] = 6 @@ -53,7 +40,7 @@ def cli(**settings): uwsgi_workers{instance="host1",app="myapp",status="busy"} 8 uwsgi_workers{instance="host1",app="myapp",status="idle"} 6 - YAML: + YAML config example: \b metrics: @@ -64,59 +51,44 @@ def cli(**settings): status: $2 reject: - 'total' + hosts: + - name.of.host.1 + - name.of.host.2 + item_names: + - '*item.name.substr.1*' + - '*item.name.substr.2*' """ - if settings['version']: - click.echo('Version %s' % zabbix_exporter.__version__) - return - - if not validate_settings(settings): - return - if settings['config']: - exporter_config = yaml.safe_load(open(settings['config'])) - else: - exporter_config = {} + validate_settings(settings) - base_logger = logging.getLogger('zabbix_exporter') - handler = logging.StreamHandler() - base_logger.addHandler(handler) - base_logger.setLevel(logging.ERROR) - handler.setFormatter(logging.Formatter('[%(asctime)s] %(message)s', "%Y-%m-%d %H:%M:%S")) if settings['verbose']: - base_logger.setLevel(logging.DEBUG) - - collector = ZabbixCollector( - base_url=settings['url'].rstrip('/'), - login=settings['login'], - password=settings['password'], - verify_tls=settings['verify_tls'], - timeout=settings['timeout'], - **exporter_config + cmd_logger.setLevel('DEBUG') + else: + cmd_logger.setLevel('ERROR') + + exporter_config = QRYamlConfig(settings['config']) if settings['config'] else dict() + + # create zabbix collector + collector = ZabbixSelectiveCollector( + explicit_metrics=exporter_config.parsing.explicit_metrics, + enable_timestamps=exporter_config.parsing.enable_timestamps, + enable_empty_hosts=exporter_config.parsing.enable_empty_hosts, + metrics=exporter_config.metrics, + zabbix_config=dict(base_url=settings['url'].rstrip('/'), + login=settings['login'], + password=settings['password'], + verify_tls=settings['verify_tls'], + timeout=settings['timeout'], ), ) - - if settings['dump_metrics']: - return dump_metrics(collector) - REGISTRY.register(collector) - httpd = HTTPServer(('', int(settings['port'])), MetricsHandler) + + # setup server + server = create_exporter_server(int(settings['port'])) click.echo('Exporter for {base_url}, user: {login}, password: ***'.format( base_url=settings['url'].rstrip('/'), login=settings['login'], password=settings['password'] )) - if settings['return_server']: - return httpd + click.echo('Exporting Zabbix metrics on http://0.0.0.0:{}'.format(settings['port'])) - httpd.serve_forever() - - -def dump_metrics(collector): - for item in collector.zapi.item.get(output=['name', 'key_', 'hostid', 'lastvalue', 'lastclock', 'value_type'], - sortfield='key_'): - click.echo('{host:20}{key} = {value}\n{name:>20}'.format( - host=collector.host_mapping.get(item['hostid'], item['hostid']), - key=item['key_'], - value=item['lastvalue'], - name=item['name'] - )) - return + server.serve_forever() diff --git a/zabbix_exporter/compat.py b/zabbix_exporter/compat.py deleted file mode 100644 index f4a14a4..0000000 --- a/zabbix_exporter/compat.py +++ /dev/null @@ -1,10 +0,0 @@ -# flake8: noqa -try: - from http.server import HTTPServer, BaseHTTPRequestHandler -except ImportError: - from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler - -try: - import io as StringIO -except ImportError: - import StringIO diff --git a/zabbix_exporter/core.py b/zabbix_exporter/core.py deleted file mode 100644 index 851fdd6..0000000 --- a/zabbix_exporter/core.py +++ /dev/null @@ -1,159 +0,0 @@ -# coding: utf-8 -import logging -import re -from collections import OrderedDict - -import pyzabbix -from prometheus_client import CONTENT_TYPE_LATEST, REGISTRY, Counter, Gauge, CollectorRegistry - -from .compat import BaseHTTPRequestHandler -from .prometheus import MetricFamily, generate_latest -from .utils import SortedDict - -logger = logging.getLogger(__name__) - - -exporter_registry = CollectorRegistry() # makes sure to collect metrics after ZabbixCollector - -scrapes_total = Counter('zabbix_exporter_scrapes_total', 'Number of scrapes', registry=exporter_registry) -api_requests_total = Counter('zabbix_exporter_api_requests_total', 'Requests to Zabbix API', registry=exporter_registry) -api_bytes_total = Counter('zabbix_exporter_api_bytes_total', 'Bytes in response from Zabbix API (after decompression)', registry=exporter_registry) -api_seconds_total = Counter('zabbix_exporter_api_seconds_total', 'Seconds spent fetching from Zabbix API', registry=exporter_registry) -metrics_count_total = Gauge('zabbix_exporter_metrics_total', 'Number of exported zabbix metrics', registry=exporter_registry) -series_count_total = Gauge('zabbix_exporter_series_total', 'Number of exported zabbix values', registry=exporter_registry) - - -def sanitize_key(string): - return re.sub('[^a-zA-Z0-9:_]+', '_', string) - - -def prepare_regex(key_pattern): - return re.escape(key_pattern).replace('\*', '([^,]*?)') - - -class ZabbixCollector(object): - - def __init__(self, base_url, login, password, verify_tls=True, timeout=None, **options): - self.options = options - self.key_patterns = {prepare_regex(metric['key']): metric - for metric in options.get('metrics', [])} - - self.zapi = pyzabbix.ZabbixAPI(base_url, timeout=timeout) - if not verify_tls: - import requests.packages.urllib3 as urllib3 - urllib3.disable_warnings() - self.zapi.session.verify = verify_tls - - def measure_api_request(r, *args, **kwargs): - api_requests_total.inc() - api_bytes_total.inc(len(r.content)) - api_seconds_total.inc(r.elapsed.total_seconds()) - self.zapi.session.hooks = {'response': measure_api_request} - - self.zapi.login(login, password) - - self.host_mapping = {row['hostid']: row['name'] - for row in self.zapi.host.get(output=['hostid', 'name'])} - - def process_metric(self, item): - if not self.is_exportable(item): - logger.debug('Dropping unsupported metric %s', item['key_']) - return - - metric = item['key_'] - metric_options = {} - labels_mapping = SortedDict() - for pattern, attrs in self.key_patterns.items(): - match = re.match(pattern, item['key_']) - if match: - # process metric name - metric = attrs.get('name', metric) - - def repl(m): - asterisk_index = int(m.group(1)) - return match.group(asterisk_index) - metric = re.sub('\$(\d+)', repl, metric) - - # ignore metrics with rejected placeholders - rejected_matches = [r for r in attrs.get('reject', []) if re.search(r, item['key_'])] - if rejected_matches: - logger.debug('Rejecting metric %s (matched %s)', rejected_matches[0], metric) - continue # allow to process metric by another rule - - # create labels - for label_name, match_group in attrs.get('labels', {}).items(): - if match_group[0] == '$': - label_value = match.group(int(match_group[1])) - else: - label_value = match_group - labels_mapping[label_name] = label_value - metric_options = attrs - break - else: - if self.options.get('explicit_metrics', False): - logger.debug('Dropping implicit metric name %s', item['key_']) - return - - # automatic host -> instance labeling - labels_mapping['instance'] = self.host_mapping[item['hostid']] - - logger.debug('Converted: %s -> %s [%s]', item['key_'], metric, labels_mapping) - return { - 'name': sanitize_key(metric), - 'type': metric_options.get('type', 'untyped'), # untyped by default - 'documentation': metric_options.get('help', item['name']), - 'labels_mapping': labels_mapping, - } - - def collect(self): - series_count = 0 - enable_timestamps = self.options.get('enable_timestamps', False) - # We need to iterate metrics twice, because zabbix metric names order - # does not come in same order as prometheus metric names - metric_families = OrderedDict() - items = self.zapi.item.get(output=['name', 'key_', 'hostid', 'lastvalue', 'lastclock', 'value_type'], - sortfield='key_') - - for item in items: - metric = self.process_metric(item) - if not metric: - continue - - if metric['name'] not in metric_families: - family = MetricFamily(typ=metric['type'], - name=metric['name'], - documentation=metric['documentation'], - labels=metric['labels_mapping'].keys()) - metric_families[metric['name']] = family - metric_families[metric['name']].add_metric( - metric['labels_mapping'].values(), float(item['lastvalue']), - int(item['lastclock']) if enable_timestamps else None) - series_count += 1 - - for f in metric_families.values(): - yield f - - metrics_count_total.set(len(metric_families)) - series_count_total.set(series_count) - - def is_exportable(self, item): - return item['value_type'] in {'0', '3'} # only numeric/float values - - -class MetricsHandler(BaseHTTPRequestHandler): - def do_GET(self): - try: - scrapes_total.inc() - response = generate_latest(REGISTRY) + generate_latest(exporter_registry) - status = 200 - except Exception: - logger.exception('Fetch failed') - response = '' - status = 500 - self.send_response(status) - self.send_header('Content-Type', CONTENT_TYPE_LATEST) - self.end_headers() - self.wfile.write(response) - - def log_message(self, format, *args): - return diff --git a/zabbix_exporter/logger.py b/zabbix_exporter/logger.py new file mode 100644 index 0000000..a2466b9 --- /dev/null +++ b/zabbix_exporter/logger.py @@ -0,0 +1,6 @@ +import coloredlogs +import logging + +cmd_logger = logging.getLogger(__name__) +coloredlogs.install(fmt='%(asctime)s %(levelname)s %(message)s', + level='DEBUG', logger=cmd_logger) diff --git a/zabbix_exporter/prometheus.py b/zabbix_exporter/prometheus.py index a86832b..36ccd44 100644 --- a/zabbix_exporter/prometheus.py +++ b/zabbix_exporter/prometheus.py @@ -1,14 +1,15 @@ -# coding: utf-8 -"""Code in this module is based on prometheus client https://github.com/prometheus/client_python - Code is vendored and forked to enable timestamps support in python client - Copyright 2015 The Prometheus Authors """ -from .compat import StringIO -from prometheus_client import core +Code in this module is based on prometheus client https://github.com/prometheus/client_python +Code is vendored and forked to enable timestamps support in python client +Copyright 2015 The Prometheus Authors +""" +import io +import prometheus_client.core as core +from prometheus_client.utils import floatToGoString -class MetricFamily(core.Metric): +class MetricFamily(core.Metric): def __init__(self, typ, name, documentation, value=None, labels=None): core.Metric.__init__(self, name, documentation, typ) if labels is not None and value is not None: @@ -24,7 +25,7 @@ def add_metric(self, labels, value, timestamp=None): def generate_latest(registry=core.REGISTRY): - '''Returns the metrics from the registry in latest text format as a string.''' + """Returns the metrics from the registry in latest text format as a string.""" output = [] for metric in registry.collect(): output.append('# HELP {0} {1}'.format( @@ -34,16 +35,18 @@ def generate_latest(registry=core.REGISTRY): if len(sample) == 3: name, labels, value = sample timestamp = None - else: + elif len(sample) == 4: name, labels, value, timestamp = sample + else: + name, labels, value, timestamp, *etc = sample if labels: labelstr = '{{{0}}}'.format(','.join( ['{0}="{1}"'.format( - k, v.replace('\\', r'\\').replace('\n', r'\n').replace('"', r'\"')) - for k, v in sorted(labels.items())])) + k, v.replace('\\', r'\\').replace('\n', r'\n').replace('"', r'\"')) + for k, v in sorted(labels.items())])) else: labelstr = '' - output.append('{0}{1} {2}{3}\n'.format(name, labelstr, core._floatToGoString(value), + output.append('{0}{1} {2}{3}\n'.format(name, labelstr, floatToGoString(value), ' %s' % timestamp if timestamp else '')) return ''.join(output).encode('utf-8') @@ -53,7 +56,7 @@ def text_string_to_metric_families(text): See text_fd_to_metric_families. """ - for metric_family in text_fd_to_metric_families(StringIO.StringIO(text)): + for metric_family in text_fd_to_metric_families(io.StringIO(text)): yield metric_family @@ -240,7 +243,7 @@ def build_metric(name, documentation, typ, samples): 'summary': ['_count', '_sum', ''], 'histogram': ['_count', '_sum', '_bucket'], 'untyped': [''], - }.get(typ, [parts[2]]) + }.get(typ, [parts[2]]) allowed_names = [name + n for n in allowed_names] else: # Ignore other comment tokens diff --git a/zabbix_exporter/server.py b/zabbix_exporter/server.py new file mode 100644 index 0000000..0eb387e --- /dev/null +++ b/zabbix_exporter/server.py @@ -0,0 +1,29 @@ +from prometheus_client import CONTENT_TYPE_LATEST, REGISTRY +from http.server import HTTPServer, BaseHTTPRequestHandler + +from .logger import cmd_logger +from .prometheus import generate_latest +from .utils import exporter_registry + + +def create_exporter_server(port: int): + return HTTPServer(('', port), MetricsHandler) + + +class MetricsHandler(BaseHTTPRequestHandler): + def do_GET(self): + try: + exporter_registry.scrapes_total.inc() + response = generate_latest(REGISTRY) + generate_latest(exporter_registry) + status = 200 + except Exception: + cmd_logger.exception('Fetch failed') + response = '' + status = 500 + self.send_response(status) + self.send_header('Content-Type', CONTENT_TYPE_LATEST) + self.end_headers() + self.wfile.write(response) + + def log_message(self, format, *args): + return diff --git a/zabbix_exporter/utils.py b/zabbix_exporter/utils.py index a8bfb95..c6b6ec8 100644 --- a/zabbix_exporter/utils.py +++ b/zabbix_exporter/utils.py @@ -1,10 +1,40 @@ -# coding: utf-8 +import re +from prometheus_client import Counter, Gauge, CollectorRegistry + + +def sanitize_key(string): + return re.sub('[^a-zA-Z0-9:_]+', '_', string) + + +def prepare_regex(key_pattern): + return re.escape(key_pattern).replace('\*', '([^,]*?)') class SortedDict(dict): - """Hackish container to guarantee consistent label sequence for prometheus""" + """Dictionary wrapper to guarantee consistent label sequence for prometheus""" + def keys(self): return sorted(super(SortedDict, self).keys()) def values(self): return [self[key] for key in self.keys()] + + +class LocalCollectorRegistry(CollectorRegistry): + def __init__(self): + super().__init__() + + self.scrapes_total = Counter('zabbix_exporter_scrapes_total', 'Number of scrapes', registry=self) + self.api_requests_total = Counter('zabbix_exporter_api_requests_total', 'Requests to Zabbix API', registry=self) + self.api_bytes_total = Counter('zabbix_exporter_api_bytes_total', + 'Bytes in response from Zabbix API (after decompression)', + registry=self) + self.api_seconds_total = Counter('zabbix_exporter_api_seconds_total', 'Seconds spent fetching from Zabbix API', + registry=self) + self.metrics_count_total = Gauge('zabbix_exporter_metrics_total', 'Number of exported zabbix metrics', + registry=self) + self.series_count_total = Gauge('zabbix_exporter_series_total', 'Number of exported zabbix values', + registry=self) + + +exporter_registry = LocalCollectorRegistry() diff --git a/zabbix_exporter/zabbix_collector.py b/zabbix_exporter/zabbix_collector.py new file mode 100644 index 0000000..82f4dec --- /dev/null +++ b/zabbix_exporter/zabbix_collector.py @@ -0,0 +1,228 @@ +import copy +from collections import OrderedDict +from functools import reduce +from typing import List + +import pyzabbix +from prometheus_client.registry import Collector + +from .logger import cmd_logger +from .prometheus import MetricFamily +from .utils import * + + +class ZabbixSelectiveCollector(Collector): + """ + Zabbix metric collector. + Sends filtered API requests to Zabbix to obtain only needed instances: + - hosts (by hosts.get) filtered by name + - items (by items.get) filtered by key mask and used hosts (both provided by configuration) + """ + + def __init__(self, + zabbix_config: dict, + metrics: List = None, + explicit_metrics: bool = True, + enable_timestamps: bool = False, + enable_empty_hosts: bool = True): + """ + :param zabbix_config - a dictionary to setup Zabbix client: + - required keys: base_url: str, login: str, password: str + - optional keys: verify_tls:bool, timeout: int + + :param metrics - list of dictionaries with keys, each defines: key, name, lables, reject, hosts, item_names + """ + self.explicit_metrics = explicit_metrics + self.enable_timestamps = enable_timestamps + self.enable_empty_hosts = enable_empty_hosts + self.metrics = self.__validate_metrics(metrics) + + self.key_patterns = {prepare_regex(metric['key']): metric + for metric in self.metrics} + + self.zapi = self.__create_zabbix_client(**zabbix_config) + + self.host_mapping = self.__get_used_hosts() # hostname: hostid + self.reverse_host_mapping = {v: k for k, v in self.host_mapping.items()} # hostid: hostname + + def collect(self): + series_count = 0 + + items = self.__load_zabbix_metrics() + + metric_families = OrderedDict() + for item in items: + metric = self.__process_metric(item) + if not metric: + continue + + if metric['name'] not in metric_families: + family = MetricFamily(typ=metric['type'], + name=metric['name'], + documentation=metric['documentation'], + labels=metric['labels_mapping'].keys()) + metric_families[metric['name']] = family + metric_families[metric['name']].add_metric( + metric['labels_mapping'].values(), float(item['lastvalue']), + int(item['lastclock']) if self.enable_timestamps else None) + series_count += 1 + + for f in metric_families.values(): + yield f + + exporter_registry.metrics_count_total.set(len(metric_families)) + exporter_registry.series_count_total.set(series_count) + + def __validate_metrics(self, metrics): + """use on collector initialization: validate, copy & update metrics configuration""" + metrics = copy.deepcopy(metrics) if metrics is not None else [] + + check_keys = ['key', 'name'] + + has_empty_hosts = False + for m in metrics: + m_ok = reduce(lambda ok, key: ok & (m.get(key) is not None), check_keys, True) + if not m_ok: + raise Exception( + f'invalid metric found! required fields: {check_keys}, found: {m} (enable_empty_hosts = {self.enable_empty_hosts})') + if m.get('hosts') in [None, []]: + has_empty_hosts = True + m['hosts'] = [] + if m.get('reject') in [None, []]: + m['reject'] = [] + if m.get('lables') in [None, []]: + m['lables'] = dict() + + if m.get('item_names') in [None, []]: + m['item_names'] = None + else: + m['item_names'] = list(map(prepare_regex, m['item_names'])) + + if has_empty_hosts: + if not self.enable_empty_hosts: + cmd_logger.error( + 'found empty "hosts" field for one of metrics, but forbidden with "enable_empty_hosts"=False. You may change this behaviour in config ') + raise Exception('empty "hosts" field') + else: + cmd_logger.warning('found empty "hosts" field for one of metrics') + + return metrics + + def __load_zabbix_metrics(self): + total_items = [] + for metric in self.metrics: + valid_hosts = [self.host_mapping.get(hname, None) for hname in metric['hosts']] + if None in valid_hosts: + cmd_logger.error(f'some of metric hosts not found in host_mapping: metric hosts = {metric["hosts"]}') + valid_hosts = [h for h in valid_hosts if h is not None] + + key = metric['key'] + + params = dict( + output=['name', 'key_', 'hostid', 'lastvalue', 'lastclock', 'value_type'], + searchWildcardsEnabled='true', + search={'key_': key}, + sortfield='key_', + ) + if len(valid_hosts) != 0: + params['hostids'] = valid_hosts + + items = self.zapi.item.get(**params) + total_items.extend(items) + + return total_items + + def __get_used_hosts(self): + """load info to map host names into host ids""" + host_names = reduce(lambda acc, metric: acc + metric['hosts'], self.metrics, []) + host_names = list(set(host_names)) + + host_dict = self.zapi.host.get(output=['hostid', 'name'], + filter={'host': host_names}) + if len(host_dict) != len(host_names): + cmd_logger.error(f'FAILED TO GET ALL HOSTS. required {len(host_names)} hosts, loaded {len(host_dict)}') + + host_dict = {x['name']: x['hostid'] for x in host_dict} + return host_dict + + def __create_zabbix_client(self, base_url, login, password, verify_tls=True, timeout=None): + zapi = pyzabbix.ZabbixAPI(base_url, timeout=timeout) + if not verify_tls: + import requests.packages.urllib3 as urllib3 + urllib3.disable_warnings() + zapi.session.verify = verify_tls + + def measure_api_request(r, *args, **kwargs): + exporter_registry.api_requests_total.inc() + exporter_registry.api_bytes_total.inc(len(r.content)) + exporter_registry.api_seconds_total.inc(r.elapsed.total_seconds()) + + zapi.session.hooks = {'response': measure_api_request} + + zapi.login(login, password) + return zapi + + def __process_metric(self, item): + if not self.__is_exportable(item): + cmd_logger.debug('Dropping unsupported metric %s', item['key_']) + return + + metric = item['key_'] + metric_options = {} + labels_mapping = SortedDict() + for pattern, attrs in self.key_patterns.items(): + match = re.match(pattern, item['key_']) + if match: + if attrs['item_names'] is not None: + # check item name to fit any of the given patterns + name_matches = [re.match(p, item['name']) for p in attrs['item_names']] + ok = sum([nm is not None for nm in name_matches]) > 0 + if not ok: + continue + else: + print('***') + + # process metric name + metric = attrs.get('name', metric) + + def repl(m): + asterisk_index = int(m.group(1)) + return match.group(asterisk_index) + + metric = re.sub('\$(\d+)', repl, metric) + + # ignore metrics with rejected placeholders + rejected_matches = [r for r in attrs['reject'] if re.search(r, item['key_'])] + if rejected_matches: + cmd_logger.debug('Rejecting metric %s (matched %s)', rejected_matches[0], metric) + continue # allow to process metric by another rule + + # create labels + for label_name, match_group in attrs['lables'].items(): + if match_group[0] == '$': + label_value = match.group(int(match_group[1])) + else: + label_value = match_group + labels_mapping[label_name] = label_value + metric_options = attrs + break + else: + # no key match found + if self.explicit_metrics: + cmd_logger.debug('Dropping implicit metric name %s', item['key_']) + return + + # automatic host -> instance labeling + # todo: may load all hosts from zabbix hosts.get + labels_mapping['instance'] = self.reverse_host_mapping.get(item['hostid'], 'unknown_host') + + cmd_logger.debug('Converted: %s -> %s [%s]', item['key_'], metric, labels_mapping) + return { + 'name': sanitize_key(metric), + 'type': metric_options.get('type', 'untyped'), # untyped by default + 'documentation': metric_options.get('help', item['name']), + 'labels_mapping': labels_mapping, + } + + def __is_exportable(self, item): + return item['value_type'] in {'0', '3'} # only numeric/float values From f043d6f63b0e9384cc793e26ad0e23691abfbef1 Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Mon, 28 Nov 2022 17:28:18 +0300 Subject: [PATCH 02/12] dockerfile updated --- Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 58bd028..7287b3d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,4 @@ FROM python:3.6-slim -MAINTAINER The MyBook Developers RUN groupadd zabbix_exporter && useradd --no-create-home --gid zabbix_exporter zabbix_exporter COPY . /tmp/zabbix_exporter From 0eeca67e49eb7e611636b7a6265d660d696c1302 Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Mon, 28 Nov 2022 17:39:55 +0300 Subject: [PATCH 03/12] config example updated --- README.md | 38 +++++++++++++++++++++++-------------- zabbix_exporter/commands.py | 37 ++++++++++++++++++++++-------------- 2 files changed, 47 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index 39db9e7..4f5ac33 100644 --- a/README.md +++ b/README.md @@ -20,20 +20,30 @@ Usage: zabbix_exporter [OPTIONS] uwsgi_workers{instance="host1",app="myapp",status="idle"} 6 YAML config example: - metrics: - - key: 'local.metric[uwsgi,workers,*,*]' - name: 'uwsgi_workers' - labels: - app: $1 - status: $2 - reject: - - 'total' - hosts: - - name.of.host.1 - - name.of.host.2 - item_names: - - '*item.name.substr.1*' - - '*item.name.substr.2*' + parsing: + explicit_metrics: true + enable_timestamps: false + enable_empty_hosts: true # if true, will load metrics with empty or missing 'hosts' field without host restriction + + metrics: + - key: 'local.metric[uwsgi,workers,*,*]' + name: 'uwsgi_workers' + type: summary + labels: + app: $1 + status: $2 + reject: + - 'total' + hosts: # list of hosts to load this metric from + - name.of.host.1 + - name.of.host.2 + item_names: # only items with names fitting one of the given patterns will be exported + - '*item.name.substr.1*' + - '*item.name.substr.2*' + - key: 'metric.with.minimal.settings' + name: 'minimal_example' + type: summary + Options: --config PATH Path to exporter config diff --git a/zabbix_exporter/commands.py b/zabbix_exporter/commands.py index 28a40ac..a2a6bb6 100644 --- a/zabbix_exporter/commands.py +++ b/zabbix_exporter/commands.py @@ -43,20 +43,29 @@ def cli(**settings): YAML config example: \b - metrics: - - key: 'local.metric[uwsgi,workers,*,*]' - name: 'uwsgi_workers' - labels: - app: $1 - status: $2 - reject: - - 'total' - hosts: - - name.of.host.1 - - name.of.host.2 - item_names: - - '*item.name.substr.1*' - - '*item.name.substr.2*' + parsing: + explicit_metrics: true + enable_timestamps: false + enable_empty_hosts: true # if true, will load metrics with empty or missing 'hosts' field without host restriction + metrics: + - key: 'local.metric[uwsgi,workers,*,*]' + name: 'uwsgi_workers' + type: summary + labels: + app: $1 + status: $2 + reject: + - 'total' + hosts: # list of hosts to load this metric from + - name.of.host.1 + - name.of.host.2 + item_names: # only items with names fitting one of the given patterns will be exported + - '*item.name.substr.1*' + - '*item.name.substr.2*' + - key: 'metric.with.minimal.settings' + name: 'minimal_example' + type: summary + """ validate_settings(settings) From 92db15a50dce77ff6013e5cacc5402e5c48078c6 Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Mon, 28 Nov 2022 18:02:26 +0300 Subject: [PATCH 04/12] .rst -> .md convertions --- AUTHORS.rst => AUTHORS.md | 3 +- CONTRIBUTING.rst => CONTRIBUTING.md | 95 ++++++++++++----------------- HISTORY.md | 15 +++++ HISTORY.rst | 25 -------- 4 files changed, 55 insertions(+), 83 deletions(-) rename AUTHORS.rst => AUTHORS.md (75%) rename CONTRIBUTING.rst => CONTRIBUTING.md (68%) create mode 100644 HISTORY.md delete mode 100644 HISTORY.rst diff --git a/AUTHORS.rst b/AUTHORS.md similarity index 75% rename from AUTHORS.rst rename to AUTHORS.md index e9c1fd9..df203d7 100644 --- a/AUTHORS.rst +++ b/AUTHORS.md @@ -1,4 +1,3 @@ -Contributors ------------- +# Contributors * Ilya Baryshev * Kazakov Kirill \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.md similarity index 68% rename from CONTRIBUTING.rst rename to CONTRIBUTING.md index 268ec9f..d828bdd 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.md @@ -1,20 +1,13 @@ -.. highlight:: shell - -============ -Contributing -============ +# Contributing Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. You can contribute in many ways: -Types of Contributions ----------------------- - -Report Bugs -~~~~~~~~~~~ +## Types of Contributions +### Report Bugs Report bugs at https://github.com/coagulant/zabbix_exporter/issues. If you are reporting a bug, please include: @@ -23,28 +16,20 @@ If you are reporting a bug, please include: * Any details about your local setup that might be helpful in troubleshooting. * Detailed steps to reproduce the bug. -Fix Bugs -~~~~~~~~ - +### Fix Bugs Look through the GitHub issues for bugs. Anything tagged with "bug" is open to whoever wants to implement it. -Implement Features -~~~~~~~~~~~~~~~~~~ - +### Implement Features Look through the GitHub issues for features. Anything tagged with "feature" is open to whoever wants to implement it. -Write Documentation -~~~~~~~~~~~~~~~~~~~ - +### Write Documentation zabbix exporter could always use more documentation, whether as part of the official zabbix exporter docs, in docstrings, or even on the web in blog posts, articles, and such. -Submit Feedback -~~~~~~~~~~~~~~~ - +### Submit Feedback The best way to send feedback is to file an issue at https://github.com/coagulant/zabbix_exporter/issues. If you are proposing a feature: @@ -54,47 +39,46 @@ If you are proposing a feature: * Remember that this is a volunteer-driven project, and that contributions are welcome :) -Get Started! ------------- - +## Get Started! Ready to contribute? Here's how to set up `zabbix_exporter` for local development. 1. Fork the `zabbix_exporter` repo on GitHub. 2. Clone your fork locally:: - - $ git clone git@github.com:your_name_here/zabbix_exporter.git +```shell +git clone git@github.com:your_name_here/zabbix_exporter.git +``` 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: - - $ mkvirtualenv zabbix_exporter - $ cd zabbix_exporter/ - $ python setup.py develop +```shell +mkvirtualenv zabbix_exporter +cd zabbix_exporter/ +python setup.py develop +```` 4. Create a branch for local development:: +```shell +git checkout -b name-of-your-bugfix-or-feature +``` - $ git checkout -b name-of-your-bugfix-or-feature - - Now you can make your changes locally. +Now you can make your changes locally. 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: - - $ flake8 zabbix_exporter tests - $ python setup.py test - $ tox - - To get flake8 and tox, just pip install them into your virtualenv. - -6. Commit your changes and push your branch to GitHub:: - - $ git add . - $ git commit -m "Your detailed description of your changes." - $ git push origin name-of-your-bugfix-or-feature - +```shell +flake8 zabbix_exporter tests +python setup.py test +tox +```` +To get flake8 and tox, just pip install them into your virtualenv. + +6. Commit your changes and push your branch to GitHub: +```shell +git add . +git commit -m "Your detailed description of your changes." +git push origin name-of-your-bugfix-or-feature +``` 7. Submit a pull request through the GitHub website. -Pull Request Guidelines ------------------------ - +## Pull Request Guidelines Before you submit a pull request, check that it meets these guidelines: 1. The pull request should include tests. @@ -105,9 +89,8 @@ Before you submit a pull request, check that it meets these guidelines: https://travis-ci.org/coagulant/zabbix_exporter/pull_requests and make sure that the tests pass for all supported Python versions. -Tips ----- - -To run a subset of tests:: - - $ python -m unittest tests.test_zabbix_exporter +## Tips +To run a subset of tests: +```shell +python -m unittest tests.test_zabbix_exporter +``` diff --git a/HISTORY.md b/HISTORY.md new file mode 100644 index 0000000..7e4b51e --- /dev/null +++ b/HISTORY.md @@ -0,0 +1,15 @@ +#History + +### v1.0.3 (2022-11-28) +* Add zabbix filtering by hosts and item names + +### v1.0.2 (2017-02-25) +* Fix build script + + +### v1.0.1 (2017-02-25) +* Add docker image ``mybook/zabbix-exporter`` + + +### v1.0.0 (2017-01-20) +* Initial PyPI release diff --git a/HISTORY.rst b/HISTORY.rst deleted file mode 100644 index 36c1705..0000000 --- a/HISTORY.rst +++ /dev/null @@ -1,25 +0,0 @@ -======= -History -======= - -1.0.3 (2022-11-28) ------------------- - -* Add zabbix filtering by hosts and item names - -1.0.2 (2017-02-25) ------------------- - -* Fix build script - - -1.0.1 (2017-02-25) ------------------- - -* Add docker image ``mybook/zabbix-exporter`` - - -1.0.0 (2017-01-20) ------------------- - -* Initial PyPI release From 52db623507f034037663e03369425ae1df406d18 Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Mon, 28 Nov 2022 18:05:01 +0300 Subject: [PATCH 05/12] history.md filename fix --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 709e928..6f5c78a 100755 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ with open('README.md') as readme_file: readme = readme_file.read() -with open('HISTORY.rst') as history_file: +with open('HISTORY.md') as history_file: history = history_file.read() with open('requirements.txt') as reqs_file: From 2e500bb1c45925c33cbdaf8498a9a8fa0f41d168 Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Mon, 28 Nov 2022 22:01:26 +0300 Subject: [PATCH 06/12] debug codes removed --- zabbix_exporter/zabbix_collector.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/zabbix_exporter/zabbix_collector.py b/zabbix_exporter/zabbix_collector.py index 82f4dec..a75dd8c 100644 --- a/zabbix_exporter/zabbix_collector.py +++ b/zabbix_exporter/zabbix_collector.py @@ -179,8 +179,6 @@ def __process_metric(self, item): ok = sum([nm is not None for nm in name_matches]) > 0 if not ok: continue - else: - print('***') # process metric name metric = attrs.get('name', metric) From d06c51fde0dbeb8400bac7f3ee73e92e92814f7d Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Mon, 28 Nov 2022 22:53:37 +0300 Subject: [PATCH 07/12] complex hosts added with additional item_name masks --- .gitignore | 4 ++ README.md | 10 +++-- config-example.yaml | 12 +++--- zabbix_exporter/commands.py | 10 +++-- zabbix_exporter/zabbix_collector.py | 57 +++++++++++++++++++++++++---- 5 files changed, 73 insertions(+), 20 deletions(-) diff --git a/.gitignore b/.gitignore index 8c0e88d..1538ce0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,10 @@ # Created by https://www.toptal.com/developers/gitignore/api/c,python,pycharm+all # Edit at https://www.toptal.com/developers/gitignore?templates=c,python,pycharm+all +# developer-specific +config_examples +main.py + ### C ### # Prerequisites *.d diff --git a/README.md b/README.md index 4f5ac33..231cd9c 100644 --- a/README.md +++ b/README.md @@ -34,10 +34,12 @@ Usage: zabbix_exporter [OPTIONS] status: $2 reject: - 'total' - hosts: # list of hosts to load this metric from - - name.of.host.1 - - name.of.host.2 - item_names: # only items with names fitting one of the given patterns will be exported + hosts: # list of hosts to load this metric from + - name.of.host.1 # simple host + - name: name.of.host.2 # complex host with additional item_name masks + item_names: + - '*host.specific.item.name*' + item_names: # only items with names fitting one of the given patterns will be exported - '*item.name.substr.1*' - '*item.name.substr.2*' - key: 'metric.with.minimal.settings' diff --git a/config-example.yaml b/config-example.yaml index 3ebf7ae..afb4f8e 100644 --- a/config-example.yaml +++ b/config-example.yaml @@ -1,7 +1,7 @@ parsing: explicit_metrics: true enable_timestamps: false - enable_empty_hosts: true # if true, will load metrics with empty or missing 'hosts' field without host restriction + enable_empty_hosts: true # if true, will load metrics with empty or missing 'hosts' field without host restriction metrics: - key: 'local.metric[uwsgi,workers,*,*]' @@ -12,10 +12,12 @@ metrics: status: $2 reject: - 'total' - hosts: # list of hosts to load this metric from - - name.of.host.1 - - name.of.host.2 - item_names: # only items with names fitting one of the given patterns will be exported + hosts: # list of hosts to load this metric from + - name.of.host.1 # simple host + - name: name.of.host.2 # complex host with additional item_name masks + item_names: + - '*host.specific.item.name*' + item_names: # only items with names fitting one of the given patterns will be exported - '*item.name.substr.1*' - '*item.name.substr.2*' - key: 'metric.with.minimal.settings' diff --git a/zabbix_exporter/commands.py b/zabbix_exporter/commands.py index a2a6bb6..450d6fd 100644 --- a/zabbix_exporter/commands.py +++ b/zabbix_exporter/commands.py @@ -56,10 +56,12 @@ def cli(**settings): status: $2 reject: - 'total' - hosts: # list of hosts to load this metric from - - name.of.host.1 - - name.of.host.2 - item_names: # only items with names fitting one of the given patterns will be exported + hosts: # list of hosts to load this metric from + - name.of.host.1 # simple host + - name: name.of.host.2 # complex host with additional item_name masks + item_names: + - '*host.specific.item.name*' + item_names: # only items with names fitting one of the given patterns will be exported - '*item.name.substr.1*' - '*item.name.substr.2*' - key: 'metric.with.minimal.settings' diff --git a/zabbix_exporter/zabbix_collector.py b/zabbix_exporter/zabbix_collector.py index a75dd8c..4ea1d20 100644 --- a/zabbix_exporter/zabbix_collector.py +++ b/zabbix_exporter/zabbix_collector.py @@ -31,11 +31,18 @@ def __init__(self, - optional keys: verify_tls:bool, timeout: int :param metrics - list of dictionaries with keys, each defines: key, name, lables, reject, hosts, item_names + + Hosts may be eiter simple or complex: + - simple host: consists only of host name. If global item_names are set, they are applied to it, + if not - all data will be loaded + - complex host: description includes host name and list of item_names specific for this specific host. + Global item_names, if present, are concatenated with these local ones """ self.explicit_metrics = explicit_metrics self.enable_timestamps = enable_timestamps self.enable_empty_hosts = enable_empty_hosts self.metrics = self.__validate_metrics(metrics) + self.__simplify_metric_hosts() self.key_patterns = {prepare_regex(metric['key']): metric for metric in self.metrics} @@ -94,7 +101,7 @@ def __validate_metrics(self, metrics): m['lables'] = dict() if m.get('item_names') in [None, []]: - m['item_names'] = None + m['item_names'] = list() else: m['item_names'] = list(map(prepare_regex, m['item_names'])) @@ -108,6 +115,32 @@ def __validate_metrics(self, metrics): return metrics + def __simplify_metric_hosts(self): + """extracts complex hosts' additional data into separate map and replaces them with string names in metrics""" + + for m in self.metrics: + m['host_item_names'] = dict() + for i, h in enumerate(m['hosts']): + if isinstance(h, str): # simple host + continue + elif isinstance(h, dict): # complex host + hname = h.get('name') + if hname is None: + msg = f'failed to read complex host: "name" field expected, got: {h}' + cmd_logger.error(msg) + raise Exception(msg) + + item_names = h.get('item_names') + if item_names not in [None, []]: # strange: why to use complex host if not to provide additional data? + m['host_item_names'][hname] = list(map(prepare_regex, item_names)) + + m['hosts'][i] = hname + + else: + msg = f'failed to read host: expected either string or dictionary, got {type(h)}' + cmd_logger.error(msg) + raise Exception(msg) + def __load_zabbix_metrics(self): total_items = [] for metric in self.metrics: @@ -167,18 +200,28 @@ def __process_metric(self, item): cmd_logger.debug('Dropping unsupported metric %s', item['key_']) return + def item_name_match(attrs, item): + host = self.reverse_host_mapping.get(item['hostid'], 'unknown_host') + item_name = item['name'] + + item_names_global = attrs.get('item_names', []) + item_names_local = attrs['host_item_names'].get(host, []) + item_names = item_names_global + item_names_local + if len(item_names) == 0: # have no item-name restrictions + return True + + name_matches = [re.match(p, item_name) for p in item_names] + ok = sum([nm is not None for nm in name_matches]) > 0 + return ok + metric = item['key_'] metric_options = {} labels_mapping = SortedDict() for pattern, attrs in self.key_patterns.items(): match = re.match(pattern, item['key_']) if match: - if attrs['item_names'] is not None: - # check item name to fit any of the given patterns - name_matches = [re.match(p, item['name']) for p in attrs['item_names']] - ok = sum([nm is not None for nm in name_matches]) > 0 - if not ok: - continue + if not item_name_match(attrs, item): + continue # process metric name metric = attrs.get('name', metric) From d10780166fa7a44a0bb0133fecb9d7445bd37462 Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Mon, 28 Nov 2022 23:19:03 +0300 Subject: [PATCH 08/12] readme: filtering logic described --- README.md | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/README.md b/README.md index 231cd9c..7bc99e6 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,29 @@ with advances functionality: filtering was added for each zabbix metric with configuration-defined hosts and item names, which significantly improves execution time. +### Filtering logic +Filtering is supported via two fields of a zabbix 'item' instance: +- host +- name (item_name) + +For each metric a list of available hosts (host names) may be specified. In this case Zabbix API search +parameters will be used to get metric items which only belong to the given host list. This +provides significant performance boost. Note: if no host is given ('enable_empty_hosts' must be set to True in configuration file), +then no host restriction is set, but hostname will remain unknown in prometheus output for all hosts not mentioned in the config. + +After receiving items, they are manually filtered by item_names using the following strategy. +We define two types of item_name filters: +- metric-specific item_names ('global' ones), which are visible within the concrete metric +- host-specific item_names ('local' ones), which ere visible within the concrete host of concrete metric +(so that two equal hosts in different metrics do not share local filters). + +Both item_name filters are presented as a set of item_name regular expressions (note: only asterisk is supported!). + +So, getting to the item_name filtering logic itself, we have four possible situations: +- both global and local (for the specific host, of course) filters are missing. In this case all items will pass (no filters applied) +- either global or local filters are given. Then item will pass if its item_name suits at least one of the given masks +- both local and global filters are present. Same as in previous case, but mask lists are merged + ### Usage example ```shell Usage: zabbix_exporter [OPTIONS] From efbaee9529d6b96e8c634f5468a139f0b7de7d83 Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Mon, 28 Nov 2022 23:20:46 +0300 Subject: [PATCH 09/12] readme: fixes --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 7bc99e6..9be2406 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ We define two types of item_name filters: Both item_name filters are presented as a set of item_name regular expressions (note: only asterisk is supported!). -So, getting to the item_name filtering logic itself, we have four possible situations: +So, getting to the item_name filtering logic itself, we have three possible situations: - both global and local (for the specific host, of course) filters are missing. In this case all items will pass (no filters applied) - either global or local filters are given. Then item will pass if its item_name suits at least one of the given masks - both local and global filters are present. Same as in previous case, but mask lists are merged From 6921f3395fa229d1b595a7a1e9b05919ca46fbd7 Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Mon, 28 Nov 2022 23:21:32 +0300 Subject: [PATCH 10/12] readme: fixes --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9be2406..fad59fc 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Zabbix Exporter This project is a fork of [original zabbix exporter](https://github.com/MyBook/zabbix-exporter) -with advances functionality: +with advanced functionality: filtering was added for each zabbix metric with configuration-defined hosts and item names, which significantly improves execution time. From e52e8c8c9c0b2531946307d647c6d0ae3ee369b3 Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Tue, 29 Nov 2022 16:04:47 +0300 Subject: [PATCH 11/12] metric aggregation field changed: from metric name to item name --- zabbix_exporter/zabbix_collector.py | 54 ++++++++++++++--------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/zabbix_exporter/zabbix_collector.py b/zabbix_exporter/zabbix_collector.py index 4ea1d20..f7512a8 100644 --- a/zabbix_exporter/zabbix_collector.py +++ b/zabbix_exporter/zabbix_collector.py @@ -63,13 +63,13 @@ def collect(self): if not metric: continue - if metric['name'] not in metric_families: + if metric['documentation'] not in metric_families.keys(): family = MetricFamily(typ=metric['type'], name=metric['name'], documentation=metric['documentation'], labels=metric['labels_mapping'].keys()) - metric_families[metric['name']] = family - metric_families[metric['name']].add_metric( + metric_families[metric['documentation']] = family + metric_families[metric['documentation']].add_metric( metric['labels_mapping'].values(), float(item['lastvalue']), int(item['lastclock']) if self.enable_timestamps else None) series_count += 1 @@ -141,30 +141,6 @@ def __simplify_metric_hosts(self): cmd_logger.error(msg) raise Exception(msg) - def __load_zabbix_metrics(self): - total_items = [] - for metric in self.metrics: - valid_hosts = [self.host_mapping.get(hname, None) for hname in metric['hosts']] - if None in valid_hosts: - cmd_logger.error(f'some of metric hosts not found in host_mapping: metric hosts = {metric["hosts"]}') - valid_hosts = [h for h in valid_hosts if h is not None] - - key = metric['key'] - - params = dict( - output=['name', 'key_', 'hostid', 'lastvalue', 'lastclock', 'value_type'], - searchWildcardsEnabled='true', - search={'key_': key}, - sortfield='key_', - ) - if len(valid_hosts) != 0: - params['hostids'] = valid_hosts - - items = self.zapi.item.get(**params) - total_items.extend(items) - - return total_items - def __get_used_hosts(self): """load info to map host names into host ids""" host_names = reduce(lambda acc, metric: acc + metric['hosts'], self.metrics, []) @@ -195,6 +171,30 @@ def measure_api_request(r, *args, **kwargs): zapi.login(login, password) return zapi + def __load_zabbix_metrics(self): + total_items = [] + for metric in self.metrics: + valid_hosts = [self.host_mapping.get(hname, None) for hname in metric['hosts']] + if None in valid_hosts: + cmd_logger.error(f'some of metric hosts not found in host_mapping: metric hosts = {metric["hosts"]}') + valid_hosts = [h for h in valid_hosts if h is not None] + + key = metric['key'] + + params = dict( + output=['name', 'key_', 'hostid', 'lastvalue', 'lastclock', 'value_type'], + searchWildcardsEnabled='true', + search={'key_': key}, + sortfield='key_', + ) + if len(valid_hosts) != 0: + params['hostids'] = valid_hosts + + items = self.zapi.item.get(**params) + total_items.extend(items) + + return total_items + def __process_metric(self, item): if not self.__is_exportable(item): cmd_logger.debug('Dropping unsupported metric %s', item['key_']) From e59cfdfa1d7aaa3b3d8d2978f305efb8bb4eecd7 Mon Sep 17 00:00:00 2001 From: Kurush7 Date: Tue, 29 Nov 2022 16:06:28 +0300 Subject: [PATCH 12/12] metric aggregation field changes fix --- zabbix_exporter/zabbix_collector.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/zabbix_exporter/zabbix_collector.py b/zabbix_exporter/zabbix_collector.py index f7512a8..15dbd51 100644 --- a/zabbix_exporter/zabbix_collector.py +++ b/zabbix_exporter/zabbix_collector.py @@ -261,7 +261,8 @@ def repl(m): return { 'name': sanitize_key(metric), 'type': metric_options.get('type', 'untyped'), # untyped by default - 'documentation': metric_options.get('help', item['name']), + 'documentation': item['name'], + # 'documentation': metric_options.get('help', item['name']), 'labels_mapping': labels_mapping, }