Initial commit
Signed-off-by: Tuan-Dat Tran <tuan-dat.tran@tudattr.dev>
This commit is contained in:
131
pqos/pqos.py
Normal file
131
pqos/pqos.py
Normal file
@@ -0,0 +1,131 @@
|
||||
from flask import Flask, request
|
||||
import threading
|
||||
import numpy as np
|
||||
from sklearn.preprocessing import MinMaxScaler
|
||||
import tensorflow as tf
|
||||
from time import time_ns
|
||||
import pickle
|
||||
import pandas as pd
|
||||
import requests
|
||||
import sys
|
||||
|
||||
# Part to be hardcoded for now, expected to be "ip:port"
|
||||
destination = sys.argv[1]
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route('/accept_data', methods=['POST'])
|
||||
def accept_data():
|
||||
|
||||
data = request.data
|
||||
data = data.decode("utf-8")
|
||||
formatted_lines = []
|
||||
for line in data.strip().split("\n"):
|
||||
elements = line.split(",")
|
||||
formatted_line = f"{elements[0]}, {elements[1]}, {elements[2].split()[0]}"
|
||||
formatted_lines.append(formatted_line)
|
||||
new_data = "\n".join(formatted_lines)
|
||||
new_data = pd.DataFrame(
|
||||
[line.split(",") for line in new_data.strip().split("\n")],
|
||||
columns=["lat", "long", "rtt"],
|
||||
)
|
||||
new_data["lat"] = new_data["lat"].astype(float)
|
||||
new_data["long"] = new_data["long"].astype(float)
|
||||
new_data["rtt"] = new_data["rtt"].astype(int)
|
||||
global df_final
|
||||
df_final = new_data
|
||||
dataset_received.set()
|
||||
return "Received new datapoints from the network monitoring tool", 200
|
||||
|
||||
|
||||
|
||||
def run_flask():
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=5000)
|
||||
|
||||
def scale(data, min_val, max_val):
|
||||
# lat_min_val = 0
|
||||
# lat_max_val = 50
|
||||
|
||||
# lon_min_val = 0
|
||||
# lon_max_val = 10
|
||||
|
||||
# rtt_min_val = 0
|
||||
# rtt_max_val = 1000
|
||||
|
||||
range_max = 1
|
||||
range_min = -1
|
||||
|
||||
return ((data - min_val) / (max_val - min_val)) * (range_max - range_min) + range_min
|
||||
|
||||
def reverse_scale(data, min_val, max_val):
|
||||
range_min = -1
|
||||
range_max = 1
|
||||
return ((data - range_min) / (range_max - range_min)) * (max_val - min_val) + min_val
|
||||
|
||||
def main():
|
||||
flask_thread = threading.Thread(target=run_flask)
|
||||
flask_thread.setDaemon(True)
|
||||
flask_thread.start()
|
||||
pd.set_option('mode.chained_assignment', None)
|
||||
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
|
||||
best_model = tf.keras.models.load_model("trained_rtt.h5", compile=False)
|
||||
|
||||
global q_alpha, n_future, n_past, dataset_received
|
||||
dataset_received = threading.Event()
|
||||
q_alpha = 0.95
|
||||
n_features = 3
|
||||
n_future= 5
|
||||
n_past = 100
|
||||
|
||||
while True:
|
||||
dataset_received.wait()
|
||||
uc6_06_start = time_ns()
|
||||
|
||||
dataset_100 = df_final[0:100]
|
||||
# dataset_compare_5 = df_final[100:105, 2]
|
||||
scalers={}
|
||||
|
||||
dataset_100.loc[:,"lat"] = scale(dataset_100["lat"],0,50)
|
||||
dataset_100.loc[:,"long"] = scale(dataset_100["long"],0,10)
|
||||
dataset_100.loc[:,"rtt"] = scale(dataset_100["rtt"],0,1000)
|
||||
|
||||
# Scaling train data
|
||||
for i in dataset_100.columns:
|
||||
scaler = MinMaxScaler(feature_range=(-1,1))
|
||||
s_s = scaler.fit_transform(dataset_100[i].values.reshape(-1,1))
|
||||
s_s=np.reshape(s_s,len(s_s))
|
||||
scalers['scaler_'+ i] = scaler
|
||||
dataset_100[i]=s_s.copy()
|
||||
|
||||
|
||||
X_test = np.array(dataset_100)
|
||||
X_test = X_test.reshape((1, X_test.shape[0], n_features))
|
||||
|
||||
pred = best_model.predict(X_test)
|
||||
pred = reverse_scale(pred,0,1000)
|
||||
pred = np.ceil(pred)
|
||||
|
||||
dataset_compare_5 = df_final.iloc[100:105, 2]
|
||||
# df_final['column'] = df_final['column'].astype(str)
|
||||
# print(df_final)
|
||||
# dataset_compare_5 = df_final["column"].iloc[100:106].str().split(',')[2].astype(float)
|
||||
numpy_actual_values = (np.array(dataset_compare_5)/100000).astype(int)
|
||||
# errors = np.sum(pred < numpy_actual_values)
|
||||
|
||||
uc6_06_end = time_ns()
|
||||
kpi_uc6_06 = (uc6_06_end-uc6_06_start)/1000000000 # Time required by the PQoS to provide a response in sec (Target <0.2)
|
||||
|
||||
|
||||
try:
|
||||
response = requests.post(f"http://{destination}/upload_predictions", json={f"Predicitons": np.array2string(pred), "Response time": kpi_uc6_06})
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Error while sending the prediction results: {e}")
|
||||
# Time required by the PQoS to provide a response in sec (Target <0.2)
|
||||
#print(f"Predictions: \n{pred}")
|
||||
#print (f"Time required to process the request: {kpi_uc6_06}s (Target <0.2s)\n\n")
|
||||
dataset_received.clear()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
Reference in New Issue
Block a user