Add ability to read sql from files, align dbname
This commit is contained in:
parent
449c202098
commit
237ee9d2a2
@ -3,7 +3,7 @@ metrics:
|
|||||||
discover_dbs:
|
discover_dbs:
|
||||||
type: set
|
type: set
|
||||||
query:
|
query:
|
||||||
0: SELECT datname FROM pg_database
|
0: SELECT datname AS dbname FROM pg_database
|
||||||
discover_rep:
|
discover_rep:
|
||||||
type: set
|
type: set
|
||||||
query:
|
query:
|
||||||
@ -23,8 +23,14 @@ metrics:
|
|||||||
db_stats:
|
db_stats:
|
||||||
type: row
|
type: row
|
||||||
query:
|
query:
|
||||||
0: SELECT numbackends, xact_commit, xact_rollback, blks_read, blks_hit, tup_returned, tup_fetched, tup_inserted, tup_updated, tup_deleted, conflicts, temp_files, temp_bytes, deadlocks, blk_read_time, blk_write_time, extract('epoch' from stats_reset)::float FROM pg_stat_database WHERE datname = %(datname)s
|
0: SELECT numbackends, xact_commit, xact_rollback, blks_read, blks_hit, tup_returned, tup_fetched, tup_inserted, tup_updated, tup_deleted, conflicts, temp_files, temp_bytes, deadlocks, blk_read_time, blk_write_time, extract('epoch' from stats_reset)::float FROM pg_stat_database WHERE datname = %(dbname)s
|
||||||
140000: SELECT numbackends, xact_commit, xact_rollback, blks_read, blks_hit, tup_returned, tup_fetched, tup_inserted, tup_updated, tup_deleted, conflicts, temp_files, temp_bytes, deadlocks, COALESCE(checksum_failures, 0) AS checksum_failures, blk_read_time, blk_write_time, session_time, active_time, idle_in_transaction_time, sessions, sessions_abandoned, sessions_fatal, sessions_killed, extract('epoch' from stats_reset)::float FROM pg_stat_database WHERE datname = %(datname)s
|
140000: SELECT numbackends, xact_commit, xact_rollback, blks_read, blks_hit, tup_returned, tup_fetched, tup_inserted, tup_updated, tup_deleted, conflicts, temp_files, temp_bytes, deadlocks, COALESCE(checksum_failures, 0) AS checksum_failures, blk_read_time, blk_write_time, session_time, active_time, idle_in_transaction_time, sessions, sessions_abandoned, sessions_fatal, sessions_killed, extract('epoch' from stats_reset)::float FROM pg_stat_database WHERE datname = %(dbname)s
|
||||||
|
|
||||||
|
# Debugging
|
||||||
|
ntables:
|
||||||
|
type: value
|
||||||
|
query:
|
||||||
|
0: file:sql/ntables.sql
|
||||||
|
|
||||||
# Per-replication metrics
|
# Per-replication metrics
|
||||||
rep_stats:
|
rep_stats:
|
||||||
|
|||||||
33
pgmon.py
33
pgmon.py
@ -69,6 +69,27 @@ default_config = {
|
|||||||
'metrics': {}
|
'metrics': {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def update_deep(d1, d2):
|
||||||
|
"""
|
||||||
|
Recursively update a dict, adding keys to dictionaries and appending to
|
||||||
|
lists. Note that this both modifies and returns the first dict.
|
||||||
|
|
||||||
|
Params:
|
||||||
|
d1: the dictionary to update
|
||||||
|
d2: the dictionary to get new values from
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The new d1
|
||||||
|
"""
|
||||||
|
for k, v in d2.items():
|
||||||
|
if isinstance(v, dict):
|
||||||
|
d1[k] = update_deep(d1.get(k, {}), v)
|
||||||
|
elif isinstance(v, list):
|
||||||
|
d1[k] = d1.get(k, []) + v
|
||||||
|
else:
|
||||||
|
d1[k] = v
|
||||||
|
return d1
|
||||||
|
|
||||||
def read_config(path, included = False):
|
def read_config(path, included = False):
|
||||||
"""
|
"""
|
||||||
Read a config file.
|
Read a config file.
|
||||||
@ -84,7 +105,7 @@ def read_config(path, included = False):
|
|||||||
|
|
||||||
# Read any included config files
|
# Read any included config files
|
||||||
for inc in cfg.get('include', []):
|
for inc in cfg.get('include', []):
|
||||||
cfg.update(read_config(inc, included=True))
|
update_deep(cfg, read_config(inc, included=True))
|
||||||
|
|
||||||
# Return the config we read if this is an include, otherwise set the final
|
# Return the config we read if this is an include, otherwise set the final
|
||||||
# config
|
# config
|
||||||
@ -93,7 +114,15 @@ def read_config(path, included = False):
|
|||||||
else:
|
else:
|
||||||
new_config = {}
|
new_config = {}
|
||||||
new_config.update(default_config)
|
new_config.update(default_config)
|
||||||
new_config.update(cfg)
|
update_deep(new_config, cfg)
|
||||||
|
|
||||||
|
# Read any external queries
|
||||||
|
for metric in new_config.get('metrics', {}).values():
|
||||||
|
for vers, query in metric['query'].items():
|
||||||
|
if query.startswith('file:'):
|
||||||
|
path = query[5:]
|
||||||
|
with open(path, 'r') as f:
|
||||||
|
metric['query'][vers] = f.read()
|
||||||
|
|
||||||
# Minor sanity checks
|
# Minor sanity checks
|
||||||
if len(new_config['metrics']) == 0:
|
if len(new_config['metrics']) == 0:
|
||||||
|
|||||||
@ -84,7 +84,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- '0.001'
|
- '0.001'
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -101,7 +101,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.numbackends
|
- $.numbackends
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -118,7 +118,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.blks_hit
|
- $.blks_hit
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -135,7 +135,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.blks_read
|
- $.blks_read
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -157,7 +157,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- '0.001'
|
- '0.001'
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -179,7 +179,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- '0.001'
|
- '0.001'
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -201,7 +201,7 @@ zabbix_export:
|
|||||||
error_handler: CUSTOM_VALUE
|
error_handler: CUSTOM_VALUE
|
||||||
error_handler_params: '0'
|
error_handler_params: '0'
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -218,7 +218,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.conflicts
|
- $.conflicts
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -235,7 +235,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.deadlocks
|
- $.deadlocks
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -257,7 +257,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- '0.001'
|
- '0.001'
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -274,7 +274,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.sessions
|
- $.sessions
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -291,7 +291,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.sessions_abandoned
|
- $.sessions_abandoned
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -308,7 +308,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.sessions_fatal
|
- $.sessions_fatal
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -325,7 +325,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.sessions_killed
|
- $.sessions_killed
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -343,7 +343,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.temp_bytes
|
- $.temp_bytes
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -360,7 +360,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.temp_files
|
- $.temp_files
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -377,7 +377,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.tup_deleted
|
- $.tup_deleted
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -394,7 +394,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.tup_fetched
|
- $.tup_fetched
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -411,7 +411,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.tup_inserted
|
- $.tup_inserted
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -428,7 +428,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.tup_returned
|
- $.tup_returned
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -445,7 +445,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.tup_updated
|
- $.tup_updated
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -462,7 +462,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.xact_commit
|
- $.xact_commit
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -479,7 +479,7 @@ zabbix_export:
|
|||||||
parameters:
|
parameters:
|
||||||
- $.xact_rollback
|
- $.xact_rollback
|
||||||
master_item:
|
master_item:
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
tags:
|
tags:
|
||||||
- tag: Application
|
- tag: Application
|
||||||
value: PostgreSQL
|
value: PostgreSQL
|
||||||
@ -557,7 +557,7 @@ zabbix_export:
|
|||||||
value: '{#DBNAME}'
|
value: '{#DBNAME}'
|
||||||
- uuid: 492b3cac15f348c2b85f97b69c114d1b
|
- uuid: 492b3cac15f348c2b85f97b69c114d1b
|
||||||
name: 'Database Stats for {#DBNAME}'
|
name: 'Database Stats for {#DBNAME}'
|
||||||
key: 'web.page.get[localhost,/db_stats?datname={#DBNAME},{$AGENT_PORT}]'
|
key: 'web.page.get[localhost,/db_stats?dbname={#DBNAME},{$AGENT_PORT}]'
|
||||||
history: '0'
|
history: '0'
|
||||||
value_type: TEXT
|
value_type: TEXT
|
||||||
preprocessing:
|
preprocessing:
|
||||||
@ -599,7 +599,7 @@ zabbix_export:
|
|||||||
key: 'pgmon_db[blk_write_time,{#DBNAME}]'
|
key: 'pgmon_db[blk_write_time,{#DBNAME}]'
|
||||||
lld_macro_paths:
|
lld_macro_paths:
|
||||||
- lld_macro: '{#DBNAME}'
|
- lld_macro: '{#DBNAME}'
|
||||||
path: $.datname
|
path: $.dbname
|
||||||
preprocessing:
|
preprocessing:
|
||||||
- type: REGEX
|
- type: REGEX
|
||||||
parameters:
|
parameters:
|
||||||
@ -796,12 +796,8 @@ zabbix_export:
|
|||||||
- tag: Database
|
- tag: Database
|
||||||
value: '{#DBNAME}'
|
value: '{#DBNAME}'
|
||||||
lld_macro_paths:
|
lld_macro_paths:
|
||||||
- lld_macro: '{#AGENT}'
|
|
||||||
path: $.agent
|
|
||||||
- lld_macro: '{#CLIENT_ADDR}'
|
- lld_macro: '{#CLIENT_ADDR}'
|
||||||
path: $.client_addr
|
path: $.client_addr
|
||||||
- lld_macro: '{#CLUSTER}'
|
|
||||||
path: $.cluster
|
|
||||||
- lld_macro: '{#REPID}'
|
- lld_macro: '{#REPID}'
|
||||||
path: $.repid
|
path: $.repid
|
||||||
- lld_macro: '{#STATE}'
|
- lld_macro: '{#STATE}'
|
||||||
|
|||||||
1
sql/ntables.sql
Normal file
1
sql/ntables.sql
Normal file
@ -0,0 +1 @@
|
|||||||
|
SELECT count(*) AS ntables FROM pg_stat_user_tables;
|
||||||
Loading…
Reference in New Issue
Block a user