summaryrefslogtreecommitdiff
path: root/examples/py-flow-dashboard/flow-dash.py
blob: e94048e132439fe6c8b4d4faea39929418fcbd28 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
#!/usr/bin/env python3

from collections import deque
import dash
from dash.dependencies import Output, Input
import dash_core_components as dcc
import dash_html_components as html
import multiprocessing
import os
import plotly
import plotly.graph_objs as go
import random
import sys
import time

sys.path.append(os.path.dirname(sys.argv[0]) + '/../share/nDPId')
sys.path.append(os.path.dirname(sys.argv[0]) + '/../usr/share/nDPId')
try:
    import nDPIsrvd
    from nDPIsrvd import nDPIsrvdSocket, TermColor
except ImportError:
    sys.path.append(os.path.dirname(sys.argv[0]) + '/../../dependencies')
    import nDPIsrvd
    from nDPIsrvd import nDPIsrvdSocket, TermColor

mgr = multiprocessing.Manager()

global shared_flow_dict
shared_flow_dict = mgr.dict()

X = deque(maxlen = 20)
X.append(1)
  
Y = deque(maxlen = 20)
Y.append(1)
  
app = dash.Dash(__name__)
  
app.layout = html.Div(
    [
        dcc.Graph(id = 'live-graph', animate = True),
        dcc.Interval(
            id = 'graph-update',
            interval = 1000,
            n_intervals = 0
        ),
    ]
)


@app.callback(
    Output('live-graph', 'figure'),
    [ Input('graph-update', 'n_intervals') ]
)
def update_graph_scatter(n):
    X.append(X[-1]+1)
    Y.append(len(shared_flow_dict))
  
    data = plotly.graph_objs.Scatter(
            x=list(X),
            y=list(Y),
            name='Scatter',
            mode= 'lines+markers'
    )
  
    return {'data': [data],
            'layout' : go.Layout(xaxis=dict(range=[min(X),max(X)]),yaxis = dict(range = [min(Y),max(Y)]),)}

def web_worker():
    app.run_server()

    import time
    while True:
        s = str()
        for key in shared_flow_dict.keys():
            s += '{}, '.format(str(key))
        time.sleep(1)

def nDPIsrvd_worker_onJsonLineRecvd(json_dict, current_flow, global_user_data):
    if not 'flow_event_name' in json_dict:
        return True

    if json_dict['flow_event_name'] == 'new':
        shared_flow_dict[json_dict['flow_id']] = current_flow
    elif json_dict['flow_event_name'] == 'idle' or \
         json_dict['flow_event_name'] == 'end':
        if json_dict['flow_id'] in shared_flow_dict:
            del shared_flow_dict[json_dict['flow_id']]

    return True

def nDPIsrvd_worker(address, nDPIsrvd_global_user_data):
    sys.stderr.write('Recv buffer size: {}\n'.format(nDPIsrvd.NETWORK_BUFFER_MAX_SIZE))
    sys.stderr.write('Connecting to {} ..\n'.format(address[0]+':'+str(address[1]) if type(address) is tuple else address))

    nsock = nDPIsrvdSocket()
    nsock.connect(address)
    nsock.loop(nDPIsrvd_worker_onJsonLineRecvd, nDPIsrvd_global_user_data)

if __name__ == '__main__':
    argparser = nDPIsrvd.defaultArgumentParser()
    args = argparser.parse_args()
    address = nDPIsrvd.validateAddress(args)

    nDPIsrvd_job = multiprocessing.Process(target = nDPIsrvd_worker, args = (address, None))
    nDPIsrvd_job.start()

    web_job = multiprocessing.Process(target = web_worker, args = ())
    web_job.start()

    nDPIsrvd_job.join()
    web_job.terminate()
    web_job.join()