view next/scrape_job.py @ 62:8134cd480817

make next/ a complete standalone setup dir- no deps on ./
author drewp@bigasterisk.com
date Thu, 02 May 2024 20:33:29 -0700
parents config/scrape_job.py@fb0519859645
children def1aa2bfa3f
line wrap: on
line source

import json
from pathlib import Path
import subprocess


def jobConfig(name, targets, scrape_interval=None, ping_job=False, metrics_path=None, params=None):
    """one scrape job config"""
    ret = {
        "job_name": name,
        "static_configs": [{
            "targets": targets,
        }],
        "relabel_configs": [
            {
                "target_label": "namespace",
                "replacement": "default"
            },
            {
                "source_labels": ["__meta_kubernetes_pod_node_name"],
                "target_label": "node"
            },
        ]
    }

    if metrics_path:
        ret['metrics_path'] = metrics_path

    if scrape_interval:
        ret['scrape_interval'] = scrape_interval

    if params:
        ret['params'] = params

    if ping_job:
        ret['metrics_path'] = '/probe'
        ret['params'] = {'module': ['icmp']}
        ret["relabel_configs"] = [
            {
                "source_labels": ["__address__"],
                "target_label": "__param_target"
            },
            {
                "source_labels": ["__param_target"],
                "target_label": "instance"
            },
            {
                "target_label": "__address__",
                "replacement": "prober"
            },
        ]

    return ret


def current_deployments():
    deploys = json.loads(subprocess.check_output(['kubectl', 'get', 'deploy', '-o=json']))
    for deploy in deploys['items']:
        name = deploy['metadata']['name']
        yield name


def scrape_deployments(deploy_doesnt_serve_metrics, forever_jobs):
    ret = []
    for name in current_deployments():
        if name in deploy_doesnt_serve_metrics:
            continue
        if name in [j['job_name'] for j in forever_jobs]:
            continue
        targets = [name]
        ret.append(jobConfig(name=name, targets=targets))
    return ret


def writeJobConfigs(outDir: Path, jobConfs: list, retention: str):
    outDir.mkdir(exist_ok=True, parents=True)
    filenames_written = []
    for job in jobConfs:
        filename = f'job_{job["job_name"]}.yaml'
        (outDir / filename).write_text(json.dumps([job], indent=2, sort_keys=True))
        filenames_written.append(filename)

    (outDir / f'scrape_{retention}.yaml').write_text(json.dumps({
        "global": {
            "scrape_interval": "1m",
            "scrape_timeout": "10s"
        },
        "scrape_config_files": filenames_written,
    }, indent=2))