view next/scrape_job.py @ 64:def1aa2bfa3f

more targets polish. reorg code into next/
author drewp@bigasterisk.com
date Thu, 02 May 2024 23:06:31 -0700
parents 8134cd480817
children
line wrap: on
line source

import json
from pathlib import Path
import subprocess

class FromName:
    pass

def jobConfig(name, targets, scrape_interval=None, ping_job=False, metrics_path=None, params=None, https=False):
    """one scrape job config"""
    ret = {
        "job_name": name,
        "relabel_configs": [
            {
                "target_label": "namespace",
                "replacement": "default"
            },
            {
                "source_labels": ["__meta_kubernetes_pod_node_name"],
                "target_label": "node"
            },
        ]
    }
    
    if targets is FromName:
        targets = [name]

    if targets:
        ret["static_configs"] = [{
            "targets": targets,
        }]

    if metrics_path:
        ret.setdefault('relabel_configs', []).append({
            "action": "replace",
            "target_label": "__metrics_path__",
            "replacement": metrics_path,
        })

    if scrape_interval:
        ret['scrape_interval'] = scrape_interval

    if params:
        ret['params'] = params

    if ping_job:
        ret['metrics_path'] = '/probe'
        ret['params'] = {'module': ['icmp']}
        ret["relabel_configs"] = [
            {
                "source_labels": ["__address__"],
                "target_label": "__param_target"
            },
            {
                "source_labels": ["__param_target"],
                "target_label": "instance"
            },
            {
                "target_label": "__address__",
                "replacement": "prober"
            },
        ]

    if https:
        ret['scheme'] = 'https'
        ret["tls_config"] = {"ca_file": "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt"}
        ret["bearer_token_file"] = "/var/run/secrets/kubernetes.io/serviceaccount/token"

    return ret


def current_deployments():
    deploys = json.loads(subprocess.check_output(['kubectl', 'get', 'deploy', '-o=json']))
    for deploy in deploys['items']:
        name = deploy['metadata']['name']
        yield name


def scrape_deployments(skip_names):
    ret = []
    for name in current_deployments():
        if name in skip_names:
            continue
        targets = [name]
        ret.append(jobConfig(name=name, targets=targets))
    return ret


def writeJobConfigs(outDir: Path, jobConfs: list, retention: str):
    outDir.mkdir(exist_ok=True, parents=True)
    filenames_written = []
    for job in jobConfs:
        filename = f'job_{job["job_name"]}.yaml'
        (outDir / filename).write_text(json.dumps([job], indent=2, sort_keys=True))
        filenames_written.append(filename)

    (outDir / f'scrape_{retention}.yaml').write_text(json.dumps({
        "global": {
            "scrape_interval": "1m",
            "scrape_timeout": "10s"
        },
        "scrape_config_files": sorted(filenames_written),
    }, indent=2))