Mercurial > code > home > repos > victoriametrics
diff config/scrape_job.py @ 61:fb0519859645
better py configs
author | drewp@bigasterisk.com |
---|---|
date | Thu, 02 May 2024 18:35:46 -0700 |
parents | |
children |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/config/scrape_job.py Thu May 02 18:35:46 2024 -0700 @@ -0,0 +1,88 @@ +import json +from pathlib import Path +import subprocess + + +def jobConfig(name, targets, scrape_interval=None, ping_job=False, metrics_path=None, params=None): + """one scrape job config""" + ret = { + "job_name": name, + "static_configs": [{ + "targets": targets, + }], + "relabel_configs": [ + { + "target_label": "namespace", + "replacement": "default" + }, + { + "source_labels": ["__meta_kubernetes_pod_node_name"], + "target_label": "node" + }, + ] + } + + if metrics_path: + ret['metrics_path'] = metrics_path + + if scrape_interval: + ret['scrape_interval'] = scrape_interval + + if params: + ret['params'] = params + + if ping_job: + ret['metrics_path'] = '/probe' + ret['params'] = {'module': ['icmp']} + ret["relabel_configs"] = [ + { + "source_labels": ["__address__"], + "target_label": "__param_target" + }, + { + "source_labels": ["__param_target"], + "target_label": "instance" + }, + { + "target_label": "__address__", + "replacement": "prober" + }, + ] + + return ret + + +def current_deployments(): + deploys = json.loads(subprocess.check_output(['kubectl', 'get', 'deploy', '-o=json'])) + for deploy in deploys['items']: + name = deploy['metadata']['name'] + yield name + + +def scrape_deployments(deploy_doesnt_serve_metrics, forever_jobs): + ret = [] + for name in current_deployments(): + if name in deploy_doesnt_serve_metrics: + continue + if name in [j['job_name'] for j in forever_jobs]: + continue + targets = [name] + ret.append(jobConfig(name=name, targets=targets)) + return ret + + +def writeJobConfigs(outDir: Path, jobConfs: list, retention: str): + (outDir / retention).mkdir(exist_ok=True, parents=True) + filenames_written = [] + for job in jobConfs: + filename = f'job_{job["job_name"]}.yaml' + (outDir / filename).write_text(json.dumps([job], indent=2, sort_keys=True)) + filenames_written.append(filename) + + (outDir / f'scrape_{retention}.yaml').write_text(json.dumps({ + "global": { + "scrape_interval": "1m", + "scrape_timeout": "10s" + }, + "scrape_config_files": filenames_written, + }, indent=2))