summaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorToni Uhlig <matzeton@googlemail.com>2022-02-04 00:32:04 +0100
committerToni Uhlig <matzeton@googlemail.com>2022-02-04 01:12:18 +0100
commit6fd6dff14d964aa8e5cf7ff3ec5a70c220ea61b4 (patch)
tree48a59cdf9cd204577fa7706d455de9c239e13dc6 /examples
parentf9e4c5885423c6f5b3d8b46c1c872b9e9330b054 (diff)
Added additional (minimalistic) detection information to flow updates.
This will only affect flows with the state `FT_FINISHED' (detection done). * nDPIsrvd.py: force use of JSON schema Draft 7 validator * flow-dash.py: gather/use total processed layer4 payload size * flow-info.py: added additional event filter * flow-info.py: prettified flow events printing whose detection is in progress * py-semantic-validation.py: added validation checks for FT_FINISHED * updated flow event JSON schema Signed-off-by: Toni Uhlig <matzeton@googlemail.com>
Diffstat (limited to 'examples')
-rwxr-xr-xexamples/py-flow-dashboard/flow-dash.py40
-rw-r--r--examples/py-flow-dashboard/plotly_dash.py22
-rwxr-xr-xexamples/py-flow-info/flow-info.py44
-rwxr-xr-xexamples/py-semantic-validation/py-semantic-validation.py39
4 files changed, 110 insertions, 35 deletions
diff --git a/examples/py-flow-dashboard/flow-dash.py b/examples/py-flow-dashboard/flow-dash.py
index e3ac0776d..2e3ea3dcf 100755
--- a/examples/py-flow-dashboard/flow-dash.py
+++ b/examples/py-flow-dashboard/flow-dash.py
@@ -24,6 +24,11 @@ def nDPIsrvd_worker_onFlowCleanup(instance, current_flow, global_user_data):
shared_flow_dict['current-flows'] -= 1
+ if flow_id not in shared_flow_dict:
+ return True
+
+ shared_flow_dict['total-l4-bytes'] += shared_flow_dict[flow_id]['total-l4-bytes']
+
if shared_flow_dict[flow_id]['is_detected'] is True:
shared_flow_dict['current-detected-flows'] -= 1
@@ -47,7 +52,7 @@ def nDPIsrvd_worker_onJsonLineRecvd(json_dict, instance, current_flow, global_us
nsock, shared_flow_dict = global_user_data
shared_flow_dict['total-events'] += 1
- shared_flow_dict['total-bytes'] = nsock.received_bytes
+ shared_flow_dict['total-json-bytes'] = nsock.received_bytes
if 'basic_event_name' in json_dict:
shared_flow_dict['total-base-events'] += 1
@@ -74,10 +79,14 @@ def nDPIsrvd_worker_onJsonLineRecvd(json_dict, instance, current_flow, global_us
shared_flow_dict[flow_id]['is_not_detected'] = False
shared_flow_dict[flow_id]['is_midstream'] = False
shared_flow_dict[flow_id]['is_risky'] = False
+ shared_flow_dict[flow_id]['total-l4-bytes'] = 0
shared_flow_dict['total-flows'] += 1
shared_flow_dict['current-flows'] += 1
+ if 'flow_tot_l4_payload_len' in json_dict:
+ shared_flow_dict[flow_id]['total-l4-bytes'] = json_dict['flow_tot_l4_payload_len']
+
if 'midstream' in json_dict and json_dict['midstream'] != 0:
if shared_flow_dict[flow_id]['is_midstream'] is False:
shared_flow_dict['total-midstream-flows'] += 1
@@ -93,6 +102,13 @@ def nDPIsrvd_worker_onJsonLineRecvd(json_dict, instance, current_flow, global_us
if 'flow_event_name' not in json_dict:
return True
+ if json_dict['flow_state'] == 'finished' and \
+ json_dict['ndpi']['proto'] != 'Unknown' and \
+ shared_flow_dict[flow_id]['is_detected'] is False:
+ shared_flow_dict['total-detected-flows'] += 1
+ shared_flow_dict['current-detected-flows'] += 1
+ shared_flow_dict[flow_id]['is_detected'] = True
+
if json_dict['flow_event_name'] == 'new':
shared_flow_dict['total-flow-new-events'] += 1
@@ -155,11 +171,20 @@ def nDPIsrvd_worker(address, shared_flow_dict):
.format(address[0]+':'+str(address[1])
if type(address) is tuple else address))
- nsock = nDPIsrvdSocket()
- nsock.connect(address)
- nsock.loop(nDPIsrvd_worker_onJsonLineRecvd,
- nDPIsrvd_worker_onFlowCleanup,
- (nsock, shared_flow_dict))
+ try:
+ while True:
+ try:
+ nsock = nDPIsrvdSocket()
+ nsock.connect(address)
+ nsock.loop(nDPIsrvd_worker_onJsonLineRecvd,
+ nDPIsrvd_worker_onFlowCleanup,
+ (nsock, shared_flow_dict))
+ except nDPIsrvd.SocketConnectionBroken:
+ sys.stderr.write('Lost connection to {} .. reconnecting\n'
+ .format(address[0]+':'+str(address[1])
+ if type(address) is tuple else address))
+ except KeyboardInterrupt:
+ pass
if __name__ == '__main__':
@@ -185,7 +210,8 @@ if __name__ == '__main__':
shared_flow_dict['total-base-events'] = 0
shared_flow_dict['total-daemon-events'] = 0
- shared_flow_dict['total-bytes'] = 0
+ shared_flow_dict['total-json-bytes'] = 0
+ shared_flow_dict['total-l4-bytes'] = 0
shared_flow_dict['total-flows'] = 0
shared_flow_dict['total-detected-flows'] = 0
shared_flow_dict['total-risky-flows'] = 0
diff --git a/examples/py-flow-dashboard/plotly_dash.py b/examples/py-flow-dashboard/plotly_dash.py
index 4822307c0..c3ce95e12 100644
--- a/examples/py-flow-dashboard/plotly_dash.py
+++ b/examples/py-flow-dashboard/plotly_dash.py
@@ -95,7 +95,7 @@ def generate_tab_flow():
dt.DataTable(
id='table-info',
columns=[{'id': c.lower(), 'name': c, 'editable': False}
- for c in ['Key', 'Value']],
+ for c in ['Name', 'Total']],
)
], style={'display': 'flex', 'flex-direction': 'row'}),
@@ -212,13 +212,14 @@ def prettifyBytes(bytes_received):
inputs=[Input('tab-flow-default-interval', 'n_intervals')])
def tab_flow_update_components(n):
- return [[{'key': 'Total JSON Events', 'value': shared_flow_dict['total-events']},
- {'key': 'Total JSON Bytes', 'value': prettifyBytes(shared_flow_dict['total-bytes'])},
- {'key': 'Total Flows', 'value': shared_flow_dict['total-flows']},
- {'key': 'Total Risky Flows', 'value': shared_flow_dict['total-risky-flows']},
- {'key': 'Total Midstream Flows', 'value': shared_flow_dict['total-midstream-flows']},
- {'key': 'Total Guessed Flows', 'value': shared_flow_dict['total-guessed-flows']},
- {'key': 'Total Not Detected Flows', 'value': shared_flow_dict['total-not-detected-flows']}],
+ return [[{'name': 'JSON Events', 'total': shared_flow_dict['total-events']},
+ {'name': 'JSON Bytes', 'total': prettifyBytes(shared_flow_dict['total-json-bytes'])},
+ {'name': 'Layer4 Bytes', 'total': prettifyBytes(shared_flow_dict['total-l4-bytes'])},
+ {'name': 'Flows', 'total': shared_flow_dict['total-flows']},
+ {'name': 'Risky Flows', 'total': shared_flow_dict['total-risky-flows']},
+ {'name': 'Midstream Flows', 'total': shared_flow_dict['total-midstream-flows']},
+ {'name': 'Guessed Flows', 'total': shared_flow_dict['total-guessed-flows']},
+ {'name': 'Not Detected Flows', 'total': shared_flow_dict['total-not-detected-flows']}],
build_piechart(['Detected', 'Guessed', 'Not-Detected', 'Unclassified'],
[shared_flow_dict['current-detected-flows'],
shared_flow_dict['current-guessed-flows'],
@@ -355,4 +356,7 @@ def web_worker(mp_shared_flow_dict, listen_host, listen_port):
shared_flow_dict = mp_shared_flow_dict
- app.run_server(debug=False, host=listen_host, port=listen_port)
+ try:
+ app.run_server(debug=False, host=listen_host, port=listen_port)
+ except KeyboardInterrupt:
+ pass
diff --git a/examples/py-flow-info/flow-info.py b/examples/py-flow-info/flow-info.py
index 42f70d813..47ae0fcc3 100755
--- a/examples/py-flow-info/flow-info.py
+++ b/examples/py-flow-info/flow-info.py
@@ -177,29 +177,29 @@ def prettifyEvent(color_list, whitespaces, text):
return fmt.format(term_attrs, text, TermColor.END)
def checkEventFilter(json_dict):
- if json_dict['flow_event_name'] == 'new':
- if args.new is True:
- return True
- if json_dict['flow_event_name'] == 'detected' or \
- json_dict['flow_event_name'] == 'detection-update':
- if args.detection is True:
- return True
- if json_dict['flow_event_name'] == 'guessed':
- if args.guessed is True:
- return True
- if json_dict['flow_event_name'] == 'not-detected':
- if args.undetected is True:
- return True
+ flow_events = {'new': args.new, 'end': args.end, 'idle': args.idle,
+ 'guessed': args.guessed, 'detected': args.detected,
+ 'detection-update': args.detection_update,
+ 'not-detected': args.not_detected,
+ 'update': args.update}
+
+ if flow_events[json_dict['flow_event_name']] is True:
+ return True
+
if 'ndpi' in json_dict and 'flow_risk' in json_dict['ndpi']:
if args.risky is True:
return True
+
if json_dict['midstream'] != 0:
if args.midstream is True:
return True
- if args.new is False and args.detection is False and \
- args.guessed is False and args.undetected is False and \
- args.risky is False and args.midstream is False:
+ flow_event_filter_disabled = True
+ for flow_event in list(flow_events.values()) + [args.risky, args.midstream]:
+ if flow_event is True:
+ flow_event_filter_disabled = False
+ break
+ if flow_event_filter_disabled is True:
return True
return False
@@ -269,8 +269,9 @@ def onJsonLineRecvd(json_dict, instance, current_flow, global_user_data):
line_suffix = ''
flow_event_name = ''
+ flow_active_color = '' if json_dict['flow_state'] == 'finished' else TermColor.BOLD
if json_dict['flow_event_name'] == 'guessed':
- flow_event_name += '{}{:>16}{}'.format(TermColor.HINT, json_dict['flow_event_name'], TermColor.END)
+ flow_event_name += '{}{:>16}{}'.format(TermColor.HINT + flow_active_color, json_dict['flow_event_name'], TermColor.END)
elif json_dict['flow_event_name'] == 'not-detected':
flow_event_name += '{}{:>16}{}'.format(TermColor.WARNING + TermColor.BOLD + TermColor.BLINK,
json_dict['flow_event_name'], TermColor.END)
@@ -292,7 +293,7 @@ def onJsonLineRecvd(json_dict, instance, current_flow, global_user_data):
if src_whois is None and dst_whois is None:
line_suffix += TermColor.WARNING + 'WHOIS empty' + TermColor.END
line_suffix += ']'
- flow_event_name += '{:>16}'.format(json_dict['flow_event_name'])
+ flow_event_name += '{}{:>16}{}'.format(flow_active_color, json_dict['flow_event_name'], TermColor.END)
if json_dict['l3_proto'] == 'ip4':
print('{} {}: [{:.>6}] [{}][{:.>5}] [{:.>15}]{} -> [{:.>15}]{} {}{}' \
@@ -325,10 +326,15 @@ def onJsonLineRecvd(json_dict, instance, current_flow, global_user_data):
if __name__ == '__main__':
argparser = nDPIsrvd.defaultArgumentParser()
argparser.add_argument('--guessed', action='store_true', default=False, help='Print only guessed flow events.')
- argparser.add_argument('--undetected', action='store_true', default=False, help='Print only undetected flow events.')
+ argparser.add_argument('--not-detected', action='store_true', default=False, help='Print only undetected flow events.')
+ argparser.add_argument('--detected', action='store_true', default=False, help='Print only detected flow events.')
+ argparser.add_argument('--detection-update', action='store_true', default=False, help='Print only detection-update flow events.')
argparser.add_argument('--risky', action='store_true', default=False, help='Print only risky flow events.')
argparser.add_argument('--midstream', action='store_true', default=False, help='Print only midstream flow events.')
argparser.add_argument('--new', action='store_true', default=False, help='Print only new flow events.')
+ argparser.add_argument('--end', action='store_true', default=False, help='Print only end flow events.')
+ argparser.add_argument('--idle', action='store_true', default=False, help='Print only idle flow events.')
+ argparser.add_argument('--update', action='store_true', default=False, help='Print only update flow events.')
argparser.add_argument('--detection', action='store_true', default=False, help='Print only detected/detection-update flow events.')
argparser.add_argument('--ipwhois', action='store_true', default=False, help='Use Python-IPWhois to print additional location information.')
args = argparser.parse_args()
diff --git a/examples/py-semantic-validation/py-semantic-validation.py b/examples/py-semantic-validation/py-semantic-validation.py
index 109a968b3..019194dac 100755
--- a/examples/py-semantic-validation/py-semantic-validation.py
+++ b/examples/py-semantic-validation/py-semantic-validation.py
@@ -164,6 +164,12 @@ def onJsonLineRecvd(json_dict, instance, current_flow, global_user_data):
else:
td = thread_data_dict[json_dict['thread_id']] = ThreadData()
+ for event_name in ['basic_event_name', 'daemon_event_name',
+ 'packet_event_name', 'flow_event_name']:
+ if event_name in json_dict and json_dict[event_name].lower() == 'invalid':
+ raise SemanticValidationException(current_flow,
+ 'Received an invalid event for {}'.format(event_name))
+
lowest_possible_flow_id = td.lowest_possible_flow_id
lowest_possible_packet_id = td.lowest_possible_packet_id
@@ -242,6 +248,38 @@ def onJsonLineRecvd(json_dict, instance, current_flow, global_user_data):
if 'flow_event_name' in json_dict:
try:
+ if current_flow.flow_detection_finished == True and \
+ (json_dict['flow_event_name'] == 'detected' or \
+ json_dict['flow_event_name'] == 'guessed'):
+ raise SemanticValidationException(current_flow,
+ 'Received another detected/guessed event after '
+ 'a flow was already detected')
+
+ if current_flow.flow_detected == True and \
+ json_dict['flow_state'] == 'finished' and \
+ json_dict['ndpi']['proto'] == 'Unknown' and \
+ json_dict['ndpi']['category'] == 'Unknown':
+ raise SemanticValidationException(current_flow,
+ 'Flow detection successfully finished, but '
+ 'flow update indiciates an unknown flow.')
+ except AttributeError:
+ pass
+
+ try:
+ if json_dict['flow_state'] == 'finished':
+ current_flow.flow_finished = True
+
+ if current_flow.flow_finished == True and \
+ json_dict['flow_event_name'] != 'update' and \
+ json_dict['flow_event_name'] != 'idle' and \
+ json_dict['flow_event_name'] != 'end':
+ raise SemanticValidationException(current_flow,
+ 'Flow detection finished, but received another '
+ '{} event'.format(json_dict['flow_event_name']))
+ except AttributeError:
+ pass
+
+ try:
if json_dict['flow_first_seen'] > current_flow.ts_msec or \
json_dict['flow_last_seen'] > current_flow.ts_msec or \
json_dict['flow_first_seen'] > json_dict['flow_last_seen']:
@@ -281,6 +319,7 @@ def onJsonLineRecvd(json_dict, instance, current_flow, global_user_data):
except AttributeError:
pass
current_flow.flow_detection_finished = True
+ current_flow.flow_detected = True if json_dict['flow_event_name'] == 'detected' else False
try:
if current_flow.flow_new_seen is True and lowest_possible_flow_id > current_flow.flow_id: