Skip to content

Commit

Permalink
cleanup logs
Browse files Browse the repository at this point in the history
  • Loading branch information
sholdee committed Jun 22, 2024
1 parent 502c9a7 commit 20db503
Showing 1 changed file with 13 additions and 14 deletions.
27 changes: 13 additions & 14 deletions adguard_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,30 +11,31 @@
position_file_path = '/opt/adguardhome/work/data/.position'

def get_last_position():
try:
with open(position_file_path, 'r') as f:
pos = int(f.read().strip())
inode = os.stat(log_file_path).st_ino
print(f"Read last position: {pos}, inode: {inode}")
if os.path.exists(position_file_path):
try:
with open(position_file_path, 'r') as f:
pos = int(f.read().strip())
inode = os.stat(log_file_path).st_ino
print(f"Read last position: {pos}, inode: {inode}")
sys.stdout.flush()
return pos, inode
except (ValueError, OSError) as e:
print(f"Error reading last position: {e}")
sys.stdout.flush()
return pos, inode
except (FileNotFoundError, ValueError) as e:
print(f"Error reading last position: {e}")
return 0, None
else:
print("Position file not found, starting from the beginning.")
sys.stdout.flush()
return 0, None

def save_last_position(pos, inode):
with open(position_file_path, 'w') as f:
f.write(f"{pos}\n{inode}")
print(f"Saved position: {pos}, inode: {inode}")
sys.stdout.flush()

def read_new_lines(file, start_pos):
file.seek(start_pos)
lines = file.readlines()
new_pos = file.tell()
print(f"Read {len(lines)} new lines, new position: {new_pos}")
sys.stdout.flush()
return lines, new_pos

def reset_metrics():
Expand All @@ -57,8 +58,6 @@ def parse_and_export(lines):
status='blocked' if data.get('Result', {}).get('IsFiltered', False) else 'success',
upstream=data.get('Upstream', 'unknown')
).inc()
print(f"Exported metric for query: {data.get('QH', 'unknown')}")
sys.stdout.flush()
except json.JSONDecodeError as e:
print(f"Error decoding JSON: {e}, line: {line}")
sys.stdout.flush()
Expand Down

0 comments on commit 20db503

Please sign in to comment.