pihole.py 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. import requests
  4. from datetime import datetime
  5. from enum import Enum
  6. from influxdb_client import Point
  7. class PiHole:
  8. def __init__(self, host, password):
  9. self.host = host
  10. if host.startswith("http"):
  11. self.url = host
  12. else:
  13. self.url = f"http://{host}"
  14. if password:
  15. json = self.post("auth", {'password': password}).json()
  16. if not 'session' in json or not json['session']['valid']:
  17. print(f"auth response: {json}")
  18. self.sid = json['session']['sid']
  19. self.csrf = json['session'].get('csrf', None)
  20. def post(self, endpoint, params={}):
  21. return requests.post(f"{self.url}/api/{endpoint}", json=params)
  22. def query(self, endpoint, params={}):
  23. return requests.get(f"{self.url}/api/{endpoint}", params=params)
  24. def request_all_queries(self, start: float, end: float):
  25. if not self.sid:
  26. raise Exception("Password required")
  27. params = {
  28. "from": int(start),
  29. "until": int(end),
  30. "length": 100000,
  31. "sid": self.sid
  32. }
  33. json = self.query("queries", params=params).json()
  34. if not 'queries' in json:
  35. print(f"API response: {json}")
  36. return json['queries']
  37. def request_summary(self):
  38. if not self.sid:
  39. raise Exception("Password required")
  40. params = {
  41. "sid": self.sid
  42. }
  43. json = self.query("stats/summary", params=params).json()
  44. return json
  45. def request_forward_destinations(self):
  46. if not self.sid:
  47. raise Exception("Password required")
  48. params = {
  49. "sid": self.sid
  50. }
  51. json = self.query("stats/upstreams", params=params).json()
  52. if not 'upstreams' in json:
  53. print(f"API response: {json}")
  54. return json['upstreams']
  55. def get_totals_for_influxdb(self):
  56. summary = self.request_summary()
  57. timestamp = datetime.now().astimezone()
  58. yield Point("domains") \
  59. .time(timestamp) \
  60. .tag("hostname", self.host) \
  61. .field("domain_count", summary['gravity']['domains_being_blocked']) \
  62. .field("unique_domains", summary['queries']['unique_domains']) \
  63. .field("forwarded", summary['queries']['forwarded']) \
  64. .field("cached", summary['queries']['cached'])
  65. yield Point("queries") \
  66. .time(timestamp) \
  67. .tag("hostname", self.host) \
  68. .field("queries", summary['queries']['total']) \
  69. .field("blocked", summary['queries']['blocked']) \
  70. .field("ads_percentage", summary['queries']['percent_blocked'])
  71. yield Point("clients") \
  72. .time(timestamp) \
  73. .tag("hostname", self.host) \
  74. .field("total_clients", summary['clients']['total']) \
  75. .field("unique_clients", summary['clients']['active']) \
  76. .field("total_queries", sum(summary['queries']['types'].values()))
  77. yield Point("other") \
  78. .time(timestamp) \
  79. .tag("hostname", self.host) \
  80. .field("gravity_last_update", summary['gravity']['last_update'])
  81. for key, value in summary['queries']['types'].items():
  82. yield Point("query_types") \
  83. .time(timestamp) \
  84. .tag("hostname", self.host) \
  85. .tag("query_type", key) \
  86. .field("value", float(value))
  87. forward_destinations = self.request_forward_destinations()
  88. for upstream in forward_destinations:
  89. yield Point("forward_destinations") \
  90. .time(timestamp) \
  91. .tag("hostname", self.host) \
  92. .tag("ip", upstream['ip']) \
  93. .tag("destination", upstream['name'] or upstream['ip']) \
  94. .field("value", float(upstream['count']))
  95. def get_queries_for_influxdb(self, query_date: datetime, sample_period: int):
  96. # Get all queries since last sample
  97. end_time = query_date.timestamp()
  98. start_time = end_time - sample_period + 1
  99. queries = self.request_all_queries(start_time, end_time)
  100. timestamp = datetime.now().astimezone()
  101. # we still need some stats from the summary
  102. summary = self.request_summary()
  103. yield Point("domains") \
  104. .time(timestamp) \
  105. .tag("hostname", self.host) \
  106. .field("domain_count", summary['gravity']['domains_being_blocked']) \
  107. .field("unique_domains", len(set(x['domain'] for x in queries))) \
  108. .field("forwarded", sum(1 for x in queries if x['status'].startswith("FORWARDED"))) \
  109. .field("cached", sum(1 for x in queries if x['status'].startswith("CACHED")))
  110. blocked_count = sum(1 for x in queries if x['status'].startswith("BLOCKED") or x['status'].startswith("BLACKLIST"))
  111. queries_point = Point("queries") \
  112. .time(timestamp) \
  113. .tag("hostname", self.host) \
  114. .field("queries", len(queries)) \
  115. .field("blocked", blocked_count) \
  116. .field("ads_percentage", blocked_count * 100.0 / max(1, len(queries)))
  117. yield queries_point
  118. clients = {}
  119. for query in queries:
  120. name = query['client']['name'] or query['client']['ip']
  121. group = clients.get(name, [])
  122. group.append(query)
  123. clients[name] = group
  124. for name, group in clients.items():
  125. blocked_count = sum(1 for x in group if x['status'].startswith("BLOCKED") or x['status'].startswith("BLACKLIST"))
  126. clients_point = Point("clients") \
  127. .time(timestamp) \
  128. .tag("hostname", self.host) \
  129. .tag("client", name) \
  130. .field("queries", len(group)) \
  131. .field("blocked", blocked_count) \
  132. .field("ads_percentage", blocked_count * 100.0 / max(1, len(group)))
  133. yield clients_point
  134. yield Point("other") \
  135. .time(timestamp) \
  136. .tag("hostname", self.host) \
  137. .field("gravity_last_update", summary['gravity']['last_update'])
  138. for key in summary['queries']['types']:
  139. yield Point("query_types") \
  140. .time(timestamp) \
  141. .tag("hostname", self.host) \
  142. .tag("query_type", key) \
  143. .field("queries", sum(1 for x in queries if x['type'] == key))
  144. destinations = {}
  145. for query in queries:
  146. if query['upstream']:
  147. name = query['upstream'].split('#')[0]
  148. group = clients.get(name, [])
  149. group.append(query)
  150. clients[name] = group
  151. for name, group in destinations.items():
  152. yield Point("forward_destinations") \
  153. .time(timestamp) \
  154. .tag("hostname", self.host) \
  155. .tag("destination", name) \
  156. .field("queries", len(group))
  157. def get_query_logs_for_influxdb(self, query_date: datetime, sample_period: int):
  158. end_time = query_date.timestamp()
  159. start_time = end_time - sample_period + 1
  160. for query in self.request_all_queries(start_time, end_time):
  161. p = Point("logs") \
  162. .time(datetime.fromtimestamp(query['time'])) \
  163. .tag("hostname", self.host) \
  164. .tag("query_type", query['type']) \
  165. .field("domain", query['domain']) \
  166. .tag("client", query['client']['name'] or query['client']['ip']) \
  167. .tag("status", query['status'][0] + query['status'][1:].lower()) \
  168. .tag("reply_type", query['reply']['type']) \
  169. .field("reply_time", query['reply']['time']) \
  170. .tag("dnssec", query['dnssec'][0] + query['dnssec'][1:].lower())
  171. if query['upstream']:
  172. p.tag("destination", query['upstream'].split('#')[0])
  173. yield p
  174. if __name__ == "__main__":
  175. import argparse
  176. parser = argparse.ArgumentParser(description='Export Pi-Hole statistics')
  177. parser.add_argument('--host', required=True, type=str, help='Pi-Hole host')
  178. parser.add_argument('--password', '-t', required=True, type=str, help='Pi-Hole API password')
  179. args = parser.parse_args()
  180. pihole = PiHole(host=args.host, password=args.password)
  181. points = list(pihole.get_queries_for_influxdb(datetime.now(), 600))
  182. for p in points:
  183. print(p._time, p._name, p._tags, p._fields)