Skip to content

Commit

Permalink
inode and line tracking, workflow
Browse files Browse the repository at this point in the history
  • Loading branch information
sholdee committed Jun 22, 2024
1 parent 7bdd3b6 commit 49a054f
Show file tree
Hide file tree
Showing 3 changed files with 123 additions and 37 deletions.
48 changes: 48 additions & 0 deletions .github/workflows/build-and-push.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
name: Build and Push Docker Image

on:
push:
branches:
- master
pull_request:
branches:
- master

jobs:
build-and-push:
runs-on: ubuntu-latest

steps:
- name: Checkout repository
uses: actions/checkout@v4

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}

- name: Cache Docker layers
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
push: true
tags: sholdee/adguardexporter:latest
platforms: linux/amd64,linux/arm64
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache

- name: Verify Docker manifest
run: |
docker buildx imagetools inspect sholdee/adguardexporter:latest
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
querylog.json
__pycache__
.position
109 changes: 72 additions & 37 deletions adguard_exporter.py
Original file line number Diff line number Diff line change
@@ -1,43 +1,78 @@
from prometheus_client import start_http_server, Gauge
import json
import time
import json
import os
from prometheus_client import start_http_server, Gauge, Counter

# Define Prometheus metrics
dns_queries_total = Counter('dns_queries_total', 'Total number of DNS queries')
dns_query_duration_seconds = Gauge('dns_query_duration_seconds', 'Duration of DNS queries', ['qh', 'ip', 'qt', 'response_size', 'result_reason', 'status', 'upstream'])

log_file_path = 'querylog.json'
position_file_path = '.position'

def get_last_position():
try:
with open(position_file_path, 'r') as f:
pos = int(f.read().strip())
inode = os.stat(log_file_path).st_ino
return pos, inode
except (FileNotFoundError, ValueError):
return 0, None

def save_last_position(pos, inode):
with open(position_file_path, 'w') as f:
f.write(f"{pos}\n{inode}")

def read_new_lines(file, start_pos):
file.seek(start_pos)
lines = file.readlines()
new_pos = file.tell()
return lines, new_pos

def reset_metrics():
# Reset the Counter and Gauge metrics
dns_queries_total._value.set(0)
dns_query_duration_seconds.clear()

# Define Prometheus metric
dns_queries = Gauge('dns_queries', 'DNS query metrics', ['qh', 'qt', 'upstream', 'result_reason', 'ip', 'status', 'response_size'])

def process_log_line(line):
data = json.loads(line)
qh = data.get('QH', 'unknown')
qt = data.get('QT', 'unknown')
upstream = data.get('Upstream', 'unknown')
result = data.get('Result', {})
result_reason = result.get('Reason', 'unknown')
elapsed = data.get('Elapsed', 0)
ip = data.get('IP', 'unknown')
answer = data.get('Answer', '')
response_size = len(answer)
status = 'success' if not result else 'failure'

# Check if the query was blocked
if result.get('IsFiltered', False):
status = 'blocked'

# Update Prometheus metric
dns_queries.labels(qh, qt, upstream, result_reason, ip, status, response_size).set(elapsed)

def read_querylog():
file_path = '/opt/adguardhome/work/data/querylog.json'
with open(file_path, 'r') as file:
for line in file:
process_log_line(line)

def main():
# Start up the server to expose the metrics.
def parse_and_export(lines):
for line in lines:
if line.strip():
data = json.loads(line)
dns_queries_total.inc()
dns_query_duration_seconds.labels(
qh=data.get('QH', 'unknown'),
ip=data.get('IP', 'unknown'),
qt=data.get('QT', 'unknown'),
response_size=str(len(data.get('Answer', ''))),
result_reason=str(data.get('Result', {}).get('Reason', 'unknown')),
status='blocked' if data.get('Result', {}).get('IsFiltered', False) else 'success',
upstream=data.get('Upstream', 'unknown')
).set(data.get('Elapsed', 0))

if __name__ == '__main__':
# Start the Prometheus metrics server
start_http_server(8000)

# Get the last read position and inode
last_position, last_inode = get_last_position()

while True:
read_querylog()
time.sleep(60)
current_inode = os.stat(log_file_path).st_ino

if __name__ == '__main__':
main()
# Check for log rotation
if last_inode and last_inode != current_inode:
last_position = 0
reset_metrics()

with open(log_file_path, 'r') as log_file:
new_lines, new_position = read_new_lines(log_file, last_position)
if new_lines:
parse_and_export(new_lines)
save_last_position(new_position, current_inode)

# Update last position and inode
last_position = new_position
last_inode = current_inode

# Sleep for a while before reading the log again
time.sleep(10)

0 comments on commit 49a054f

Please sign in to comment.