promtail: Move scrape_configs into prometheus_agent.agents.promtail.scrape_jobs dict
This commit is contained in:
parent
7d0415ecf6
commit
ba32c8ee83
2 changed files with 31 additions and 13 deletions
|
|
@ -51,15 +51,20 @@ prometheus_agent:
|
||||||
positions:
|
positions:
|
||||||
filename: /var/lib/promtail/positions.yaml
|
filename: /var/lib/promtail/positions.yaml
|
||||||
# clients is generated based on prometheus_agent.scrapers
|
# clients is generated based on prometheus_agent.scrapers
|
||||||
scrape_configs:
|
# scrape_configs is generated based on prometheus_agent.agents.promtail.scrape_jobs
|
||||||
- job_name: system
|
# "scrape_jobs" items have the same format as the "scrape_jobs" promtail
|
||||||
|
# config key. However, using a dictionary simplifies extending or changing
|
||||||
|
# the default scrape configs. Items with an empty value are ignored.
|
||||||
|
# The "job_name" field defaults to the item key.
|
||||||
|
scrape_jobs:
|
||||||
|
system:
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets:
|
- targets:
|
||||||
- localhost
|
- localhost
|
||||||
labels:
|
labels:
|
||||||
job: varlogs
|
job: varlogs
|
||||||
__path__: /var/log/*log
|
__path__: /var/log/*log
|
||||||
- job_name: journal
|
journal:
|
||||||
journal:
|
journal:
|
||||||
max_age: 12h
|
max_age: 12h
|
||||||
labels:
|
labels:
|
||||||
|
|
@ -72,14 +77,14 @@ prometheus_agent:
|
||||||
pipeline_stages:
|
pipeline_stages:
|
||||||
- structured_metadata:
|
- structured_metadata:
|
||||||
level:
|
level:
|
||||||
- job_name: nginx
|
nginx:
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets:
|
- targets:
|
||||||
- localhost
|
- localhost
|
||||||
labels:
|
labels:
|
||||||
job: nginx
|
job: nginx
|
||||||
__path__: /var/log/nginx/access-promtail.log
|
__path__: /var/log/nginx/access-promtail.log
|
||||||
- job_name: postfix-bounces
|
postfix-bounces:
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets:
|
- targets:
|
||||||
- localhost
|
- localhost
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,21 @@
|
||||||
{% if 'clients' not in prometheus_agent.agents.promtail.config %}
|
{%- set config = prometheus_agent.agents.promtail.config.copy() -%}
|
||||||
{% set tmp = prometheus_agent.agents.promtail.config.setdefault('clients', []) %}
|
|
||||||
{% for i in prometheus_agent.scrapers if prometheus_agent.scrapers[i].get("loki") %}
|
{%- if 'clients' not in config -%}
|
||||||
{% set lokiconfig = {}|combine({ "external_labels": merged_prometheus_labels|combine( {"instance": inventory_hostname} ) }, prometheus_agent.scrapers[i]['loki'] ) %}
|
{%- set tmp = config.setdefault('clients', []) -%}
|
||||||
{% set tmp = prometheus_agent.agents.promtail.config.clients.append(lokiconfig) %}
|
{%- for i in prometheus_agent.scrapers if prometheus_agent.scrapers[i].get("loki") -%}
|
||||||
{% endfor %}
|
{%- set lokiconfig = {}|combine({ "external_labels": merged_prometheus_labels|combine( {"instance": inventory_hostname} ) }, prometheus_agent.scrapers[i]['loki'] ) -%}
|
||||||
{% endif %}
|
{%- set tmp = config.clients.append(lokiconfig) -%}
|
||||||
{{ prometheus_agent.agents.promtail.config|to_nice_yaml(indent=2) }}
|
{%- endfor -%}
|
||||||
|
{%- endif -%}
|
||||||
|
|
||||||
|
{%- if 'scrape_configs' not in config -%}
|
||||||
|
{%- set tmp = config.setdefault('scrape_configs', []) -%}
|
||||||
|
{%- for key, value in prometheus_agent.agents.promtail.scrape_jobs.items() -%}
|
||||||
|
{%- set tmp = value.setdefault('job_name', key) -%}
|
||||||
|
{%- set tmp = config.scrape_configs.append(value) -%}
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- endif -%}
|
||||||
|
|
||||||
|
# {{ ansible_managed }}
|
||||||
|
|
||||||
|
{{ config|to_nice_yaml(indent=2) }}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue