-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathhandles.py
120 lines (92 loc) · 3.55 KB
/
handles.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import json
from http.server import BaseHTTPRequestHandler
import os
from config import Config
from utils import flatten
class ServerHandler(BaseHTTPRequestHandler):
def do_POST(self):
"""
Expose nodes for cobra to post requests to
parse the input according to request and send it to proper parser
"""
print(self.headers['Content-Type'])
if self.path == '/push/json' and 'application/json' in self.headers['Content-Type']:
data = self.rfile.read(int(self.headers['Content-Length'])).decode('utf-8')
parse_json(data)
elif self.path == '/push/xml' and 'application/xml' in self.headers['Content-Type']:
data = self.rfile.read(int(self.headers['Content-Length'])).decode('utf-8')
parse_xml(data)
else:
self.respond(404)
return
self.respond(200)
def respond(self, status):
self.send_response(status)
self.send_header('Content-type', 'text/html')
self.end_headers()
if status == 200:
self.wfile.write(bytes('OK', 'UTF-8'))
if status == 404:
self.wfile.write(bytes('404 Not Found', 'UTF-8'))
def save(dict_data: dict):
"""
Accept data in common format, convert them and save it.
The folder to save to and the format to save it is specified in Config.
The file name is composed of specified file name, MAC address specified in the request and
file format
[fname] + [mac] + [.json | .csv ...]
"""
serial = dict_data['Status']['Agent']['MAC']
fname = Config.fname + serial.lower().replace(':', '') + '.' + Config.format
fpath = os.path.join(Config.folder, fname)
if not os.path.exists(Config.folder):
os.makedirs(Config.folder)
is_first = not(os.path.exists(fpath))
with open(fpath, 'a') as fp:
if Config.format == 'CSV':
save_to_csv(dict_data, fp, is_first)
elif Config.format == 'JSON':
save_to_json(dict_data, fp)
else:
raise Exception('invalid save format or not supported')
def parse_json(data: str) -> None:
"""
parse the incoming json string to dictionary and save it
:param data:
:return:
"""
json_data = json.loads(data)
save(json_data)
def parse_xml(data: str) -> None:
"""
parse the incoming xml request to dictionary and save it
"""
raise NotImplementedError('XML parsing was not yet implemented, but you could be the one '
'who will do it')
def save_to_json(data: dict, fp):
"""
Convert to json and write the json to file with trailing new-line, each request to new line.
The whole file will not be JSON compatible as it will have more top-level entries per file,
but filebeat can work with that.
:param data: dictionary to save
:param fp: stream to save the json to
"""
fp.write(json.dumps(data))
fp.write('\n')
def save_to_csv(json_data: dict, fp, is_first=False):
"""
Let's abuse the fact, that cobra scheme doesn't change between each entry, so we log the
scheme on the first request and then assume it will not change
:param json_data: dictionary with data to log
:param fp: file stream to write to
:param is_first: if True, save the scheme
"""
if is_first:
for key, value in flatten(json_data).items():
fp.write(str(key))
fp.write(',')
fp.write('\n')
for key, value in flatten(json_data).items():
fp.write(str(value))
fp.write(',')
fp.write('\n')