Initial commit
Signed-off-by: Tuan-Dat Tran <tuan-dat.tran@tudattr.dev>
This commit is contained in:
16
pqos/Dockerfile
Normal file
16
pqos/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
FROM python:3.11 AS compile-image
|
||||
WORKDIR /federated-example
|
||||
COPY requirements.txt .
|
||||
RUN python3 -m pip install --upgrade pip
|
||||
RUN python3 -m venv /venv
|
||||
RUN . /venv/bin/activate && \
|
||||
python3 -m ensurepip --upgrade && \
|
||||
python3 -m pip install -r /federated-example/requirements.txt
|
||||
|
||||
FROM python:3.11 AS run-image
|
||||
COPY --from=compile-image /venv /venv
|
||||
|
||||
WORKDIR /federated-example
|
||||
|
||||
COPY . /federated-example/
|
||||
CMD . /venv/bin/activate && python pqos.py $ENDPOINT
|
||||
0
pqos/Example files/.gitkeep
Normal file
0
pqos/Example files/.gitkeep
Normal file
29
pqos/Example files/pqos_curl.py
Normal file
29
pqos/Example files/pqos_curl.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import requests
|
||||
import pandas as pd
|
||||
import pickle
|
||||
from time import sleep
|
||||
|
||||
# This part of the code is to be manually edited:
|
||||
#
|
||||
# url='http://IP_OF_THE_PQoS:PORT_5000_INTERNALLY/accept_data'
|
||||
url = 'http://192.168.2.213:5000/accept_data' # url to send the request to
|
||||
total_sets = 10 # The total number of sets of 100s to send to the PQoS
|
||||
#
|
||||
# End of the part to manually edit
|
||||
|
||||
def send_dataset(start, end, sets):
|
||||
try:
|
||||
dataset = pd.read_csv("test.csv")
|
||||
elements = dataset[start-1:end]
|
||||
to_send = pickle.dumps(elements)
|
||||
requests.post(url, data= to_send)
|
||||
sets += 1
|
||||
print("Dataset sent to PQoS")
|
||||
if (end < len(dataset)) and (sets != total_sets):
|
||||
sleep(5)
|
||||
send_dataset(start + 100, end + 100, sets)
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Error while sending data to PQoS: {e}")
|
||||
|
||||
sets = 0
|
||||
send_dataset(1, 100, sets)
|
||||
2404
pqos/Example files/test.csv
Normal file
2404
pqos/Example files/test.csv
Normal file
File diff suppressed because it is too large
Load Diff
101
pqos/Example files/test100.csv
Normal file
101
pqos/Example files/test100.csv
Normal file
@@ -0,0 +1,101 @@
|
||||
lat,long,rtt
|
||||
48.4339443,9.967161733333334,78900000
|
||||
48.43394475,9.9671683,78900000
|
||||
48.43393966666667,9.9671245,51800000
|
||||
48.4339408,9.96712915,51800000
|
||||
48.43393145,9.96710145,77200000
|
||||
48.43391836666667,9.967084033333334,82100000
|
||||
48.4339061,9.9670742,17200000
|
||||
48.43389823333333,9.9670709,17200000
|
||||
48.4339024,9.96707235,17200000
|
||||
48.4338805,9.9670667,21400000
|
||||
48.4338755,9.96706675,21400000
|
||||
48.43385146666666,9.9670702,41500000
|
||||
48.433824900000005,9.96708105,92800000
|
||||
48.43379503333333,9.967094366666666,75400000
|
||||
48.43376073333334,9.967111333333332,172000000
|
||||
48.4337206,9.9671297,119000000
|
||||
48.43372695,9.9671269,119000000
|
||||
48.43368280000001,9.967145766666668,28000000
|
||||
48.43364845,9.9671582,18000000
|
||||
48.43364193333334,9.967160833333333,18000000
|
||||
48.4336012,9.967175066666666,50600000
|
||||
48.433549825,9.96718925,94500000
|
||||
48.43355746666666,9.967187366666668,94500000
|
||||
48.43349716666666,9.9672049,38000000
|
||||
48.43345243333332,9.967218666666668,21900000
|
||||
48.4334024,9.96723545,26100000
|
||||
48.433353966666665,9.967254733333334,34100000
|
||||
48.43331236666666,9.967272533333334,18400000
|
||||
48.43327283333334,9.967288466666666,22700000
|
||||
48.433266875,9.967290825,22700000
|
||||
48.433233400000006,9.96730325,28600000
|
||||
48.433228533333335,9.967305133333332,28600000
|
||||
48.4332024,9.96731445,26800000
|
||||
48.43319576666666,9.967316466666666,26800000
|
||||
48.4331661,9.96732555,20200000
|
||||
48.433161000000005,9.967327233333334,20200000
|
||||
48.43313016666667,9.967335133333334,22100000
|
||||
48.4331365,9.96733405,22100000
|
||||
48.43310035,9.967341,34500000
|
||||
48.4330944,9.9673422,34500000
|
||||
48.433064,9.9673496,28400000
|
||||
48.433057600000005,9.967351266666666,28400000
|
||||
48.4330266,9.9673621,26500000
|
||||
48.43301893333333,9.967365466666664,26500000
|
||||
48.43298155,9.9673846,30600000
|
||||
48.43297520000001,9.967387166666668,30600000
|
||||
48.4329455,9.9674008,30500000
|
||||
48.43293645,9.96740435,30500000
|
||||
48.43290655,9.96741765,16700000
|
||||
48.432897966666665,9.967420833333334,16700000
|
||||
48.4328529,9.967438200000002,27200000
|
||||
48.432813566666674,9.967453433333334,37000000
|
||||
48.4327773,9.967465,31800000
|
||||
48.4327722,9.967466675,31800000
|
||||
48.43274195,9.9674775,37400000
|
||||
48.43273713333334,9.967479633333332,37400000
|
||||
48.43270859999999,9.9674912,33800000
|
||||
48.43267659999999,9.967503575,52400000
|
||||
48.4326811,9.967501733333334,52400000
|
||||
48.4326455,9.967515566666666,42300000
|
||||
48.43264985,9.96751385,42300000
|
||||
48.432623750000005,9.96752325,82800000
|
||||
48.43261966666668,9.967524666666666,82800000
|
||||
48.43259766666666,9.967532233333332,75400000
|
||||
48.432594775,9.967533275,75400000
|
||||
48.43257795,9.9675397,37800000
|
||||
48.43257533333334,9.967540666666666,37800000
|
||||
48.43255946666667,9.967545433333337,36000000
|
||||
48.432543900000006,9.967550766666667,32400000
|
||||
48.4325268,9.96755595,20900000
|
||||
48.4325292,9.967555333333332,20900000
|
||||
48.43251245,9.96755925,29300000
|
||||
48.43250983333333,9.967559766666668,29300000
|
||||
48.4324989,9.96756175,25500000
|
||||
48.432496833333325,9.9675621,25500000
|
||||
48.4324861,9.9675637,39500000
|
||||
48.43248413333333,9.967563833333331,39500000
|
||||
48.4324748,9.9675651,27200000
|
||||
48.43247515,9.9675647,27200000
|
||||
48.4324734,9.967566266666667,33600000
|
||||
48.4324737,9.9675665,19700000
|
||||
48.4324737,9.9675665,22500000
|
||||
48.4324737,9.9675665,22500000
|
||||
48.43247363333334,9.9675666,37100000
|
||||
48.43247362500001,9.96756665,37100000
|
||||
48.4324736,9.9675668,35500000
|
||||
48.4324736,9.9675668,26700000
|
||||
48.4324736,9.9675668,26700000
|
||||
48.4324736,9.9675668,28400000
|
||||
48.4324736,9.9675668,43600000
|
||||
48.432473,9.967566,26200000
|
||||
48.432471825,9.9675648,26200000
|
||||
48.43247246666667,9.967565366666667,26200000
|
||||
48.43246805,9.96756225,32000000
|
||||
48.43246783333333,9.967562166666667,32000000
|
||||
48.4324674,9.967562,28000000
|
||||
48.4324674,9.967562,28000000
|
||||
48.4324674,9.967562,19000000
|
||||
48.4324674,9.967562,19000000
|
||||
48.4324674,9.967562,26000000
|
||||
|
22
pqos/README.md
Normal file
22
pqos/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# PQOS
|
||||
|
||||
`This branch uses the updated model from Oct 2024 (created from the 'deploy' branch)`
|
||||
|
||||
The PQOS expects 100 datapoints as input and returns 5 predicted values as a json file:
|
||||
{
|
||||
"Predicitons": str_of_5_predictions,
|
||||
"Response time": int (in Seconds)
|
||||
}
|
||||
|
||||
## Running the code using Docker
|
||||
1. To create the Docker Image, run "Dockerfile" using this command: `docker build -f Dockerfile -t pqos-deploy-image .`
|
||||
2. Create a container from the above image using this command: `docker run -p 5000:5000 --name pqos -e ENDPOINT=IP:PORT --rm pqos-deploy-image` where ENDPOINT is the ip address and the port (e.g. 192.168.0.1:5000) to which the results will be sent as a json file expecting the function "/upload_predictions"
|
||||
3. The script for the PQOS will run automatically, and will await the receival of a dataset from a sender. (See additional notes below)
|
||||
4. (For testing purposes without an endpoint) In another terminal, enter the command `python3 -m http.server {port_number}` to simulate an endpoint receiving the predictions. This will show a 501 server error given it does not have a backend implementation of an endpoint.
|
||||
|
||||
|
||||
* **Notes**:
|
||||
- The `-p` flag is used to map the docker ports to the devices ports.
|
||||
- The `-e` flag is used to enter command line variables.
|
||||
- The execution can be stopped by opening another terminal and using this command `docker kill pqos`.
|
||||
- The "Example files" directory contains a dummy dataset to send to the PQOS for testing purposes. In this case run the pqos_curl.py python script in that same directory. (The IP address to which the dataset should be sent is hardcoded there as it is for testing. The part to edit manually is marked there)
|
||||
12
pqos/docker-push.sh
Executable file
12
pqos/docker-push.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/sh
|
||||
|
||||
# docker tag SOURCE_IMAGE[:TAG] 192.168.100.2:5000/uulm/<COMPONENT_NAME>:<VERSION>
|
||||
# docker push 192.168.100.2:5000/uulm/<COMPONENT_NAME>:<VERSION>
|
||||
|
||||
TA_VERSION=v1.3.0
|
||||
LOCAL_IMAGE="pqos"
|
||||
REMOTE_IMAGE="uc6pqos"
|
||||
|
||||
docker build -t $LOCAL_IMAGE .
|
||||
docker tag $LOCAL_IMAGE:latest 192.168.100.2:5000/uulm/$REMOTE_IMAGE:$TA_VERSION
|
||||
docker push 192.168.100.2:5000/uulm/$REMOTE_IMAGE:$TA_VERSION
|
||||
131
pqos/pqos.py
Normal file
131
pqos/pqos.py
Normal file
@@ -0,0 +1,131 @@
|
||||
from flask import Flask, request
|
||||
import threading
|
||||
import numpy as np
|
||||
from sklearn.preprocessing import MinMaxScaler
|
||||
import tensorflow as tf
|
||||
from time import time_ns
|
||||
import pickle
|
||||
import pandas as pd
|
||||
import requests
|
||||
import sys
|
||||
|
||||
# Part to be hardcoded for now, expected to be "ip:port"
|
||||
destination = sys.argv[1]
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route('/accept_data', methods=['POST'])
|
||||
def accept_data():
|
||||
|
||||
data = request.data
|
||||
data = data.decode("utf-8")
|
||||
formatted_lines = []
|
||||
for line in data.strip().split("\n"):
|
||||
elements = line.split(",")
|
||||
formatted_line = f"{elements[0]}, {elements[1]}, {elements[2].split()[0]}"
|
||||
formatted_lines.append(formatted_line)
|
||||
new_data = "\n".join(formatted_lines)
|
||||
new_data = pd.DataFrame(
|
||||
[line.split(",") for line in new_data.strip().split("\n")],
|
||||
columns=["lat", "long", "rtt"],
|
||||
)
|
||||
new_data["lat"] = new_data["lat"].astype(float)
|
||||
new_data["long"] = new_data["long"].astype(float)
|
||||
new_data["rtt"] = new_data["rtt"].astype(int)
|
||||
global df_final
|
||||
df_final = new_data
|
||||
dataset_received.set()
|
||||
return "Received new datapoints from the network monitoring tool", 200
|
||||
|
||||
|
||||
|
||||
def run_flask():
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=5000)
|
||||
|
||||
def scale(data, min_val, max_val):
|
||||
# lat_min_val = 0
|
||||
# lat_max_val = 50
|
||||
|
||||
# lon_min_val = 0
|
||||
# lon_max_val = 10
|
||||
|
||||
# rtt_min_val = 0
|
||||
# rtt_max_val = 1000
|
||||
|
||||
range_max = 1
|
||||
range_min = -1
|
||||
|
||||
return ((data - min_val) / (max_val - min_val)) * (range_max - range_min) + range_min
|
||||
|
||||
def reverse_scale(data, min_val, max_val):
|
||||
range_min = -1
|
||||
range_max = 1
|
||||
return ((data - range_min) / (range_max - range_min)) * (max_val - min_val) + min_val
|
||||
|
||||
def main():
|
||||
flask_thread = threading.Thread(target=run_flask)
|
||||
flask_thread.setDaemon(True)
|
||||
flask_thread.start()
|
||||
pd.set_option('mode.chained_assignment', None)
|
||||
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
|
||||
best_model = tf.keras.models.load_model("trained_rtt.h5", compile=False)
|
||||
|
||||
global q_alpha, n_future, n_past, dataset_received
|
||||
dataset_received = threading.Event()
|
||||
q_alpha = 0.95
|
||||
n_features = 3
|
||||
n_future= 5
|
||||
n_past = 100
|
||||
|
||||
while True:
|
||||
dataset_received.wait()
|
||||
uc6_06_start = time_ns()
|
||||
|
||||
dataset_100 = df_final[0:100]
|
||||
# dataset_compare_5 = df_final[100:105, 2]
|
||||
scalers={}
|
||||
|
||||
dataset_100.loc[:,"lat"] = scale(dataset_100["lat"],0,50)
|
||||
dataset_100.loc[:,"long"] = scale(dataset_100["long"],0,10)
|
||||
dataset_100.loc[:,"rtt"] = scale(dataset_100["rtt"],0,1000)
|
||||
|
||||
# Scaling train data
|
||||
for i in dataset_100.columns:
|
||||
scaler = MinMaxScaler(feature_range=(-1,1))
|
||||
s_s = scaler.fit_transform(dataset_100[i].values.reshape(-1,1))
|
||||
s_s=np.reshape(s_s,len(s_s))
|
||||
scalers['scaler_'+ i] = scaler
|
||||
dataset_100[i]=s_s.copy()
|
||||
|
||||
|
||||
X_test = np.array(dataset_100)
|
||||
X_test = X_test.reshape((1, X_test.shape[0], n_features))
|
||||
|
||||
pred = best_model.predict(X_test)
|
||||
pred = reverse_scale(pred,0,1000)
|
||||
pred = np.ceil(pred)
|
||||
|
||||
dataset_compare_5 = df_final.iloc[100:105, 2]
|
||||
# df_final['column'] = df_final['column'].astype(str)
|
||||
# print(df_final)
|
||||
# dataset_compare_5 = df_final["column"].iloc[100:106].str().split(',')[2].astype(float)
|
||||
numpy_actual_values = (np.array(dataset_compare_5)/100000).astype(int)
|
||||
# errors = np.sum(pred < numpy_actual_values)
|
||||
|
||||
uc6_06_end = time_ns()
|
||||
kpi_uc6_06 = (uc6_06_end-uc6_06_start)/1000000000 # Time required by the PQoS to provide a response in sec (Target <0.2)
|
||||
|
||||
|
||||
try:
|
||||
response = requests.post(f"http://{destination}/upload_predictions", json={f"Predicitons": np.array2string(pred), "Response time": kpi_uc6_06})
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Error while sending the prediction results: {e}")
|
||||
# Time required by the PQoS to provide a response in sec (Target <0.2)
|
||||
#print(f"Predictions: \n{pred}")
|
||||
#print (f"Time required to process the request: {kpi_uc6_06}s (Target <0.2s)\n\n")
|
||||
dataset_received.clear()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
62
pqos/requirements.txt
Normal file
62
pqos/requirements.txt
Normal file
@@ -0,0 +1,62 @@
|
||||
absl-py==2.0.0
|
||||
astunparse==1.6.3
|
||||
blinker==1.7.0
|
||||
cachetools==5.3.2
|
||||
certifi==2023.7.22
|
||||
cffi==1.16.0
|
||||
charset-normalizer==3.3.2
|
||||
click==8.1.7
|
||||
cryptography==41.0.5
|
||||
Flask==3.0.0
|
||||
flatbuffers==23.5.26
|
||||
flwr==1.5.0
|
||||
gast==0.5.4
|
||||
google-auth==2.23.4
|
||||
google-auth-oauthlib==1.0.0
|
||||
google-pasta==0.2.0
|
||||
grpcio==1.59.2
|
||||
h5py==3.10.0
|
||||
idna==3.4
|
||||
iterators==0.0.2
|
||||
itsdangerous==2.1.2
|
||||
Jinja2==3.1.2
|
||||
joblib==1.3.2
|
||||
keras==2.14.0
|
||||
libclang==16.0.6
|
||||
Markdown==3.5.1
|
||||
MarkupSafe==2.1.3
|
||||
ml-dtypes==0.2.0
|
||||
netifaces==0.11.0
|
||||
numpy==1.26.1
|
||||
oauthlib==3.2.2
|
||||
opt-einsum==3.3.0
|
||||
packaging==23.2
|
||||
pandas==2.1.2
|
||||
protobuf==3.20.3
|
||||
psutil==5.9.6
|
||||
pyasn1==0.5.0
|
||||
pyasn1-modules==0.3.0
|
||||
pycparser==2.21
|
||||
pycryptodome==3.19.0
|
||||
Pympler==1.0.1
|
||||
python-dateutil==2.8.2
|
||||
pytz==2023.3.post1
|
||||
requests==2.31.0
|
||||
requests-oauthlib==1.3.1
|
||||
rsa==4.9
|
||||
scikit-learn==1.3.2
|
||||
scipy==1.11.3
|
||||
six==1.16.0
|
||||
tensorboard==2.14.1
|
||||
tensorboard-data-server==0.7.2
|
||||
tensorflow==2.14.0
|
||||
tensorflow-estimator==2.14.0
|
||||
tensorflow-io-gcs-filesystem==0.34.0
|
||||
termcolor==2.3.0
|
||||
threadpoolctl==3.2.0
|
||||
typing_extensions==4.8.0
|
||||
tzdata==2023.3
|
||||
urllib3==2.0.7
|
||||
watchdog==3.0.0
|
||||
Werkzeug==3.0.1
|
||||
wrapt==1.14.1
|
||||
BIN
pqos/trained_rtt.h5
Normal file
BIN
pqos/trained_rtt.h5
Normal file
Binary file not shown.
Reference in New Issue
Block a user