Skip to content

Commit

Permalink
Add more fine-grained traffic control
Browse files Browse the repository at this point in the history
  • Loading branch information
steckhelena committed Jul 6, 2022
1 parent 345b9d8 commit 33e9a0e
Show file tree
Hide file tree
Showing 5 changed files with 212 additions and 78 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@ files
Band_data
id_*
logs
__pycache__/
85 changes: 85 additions & 0 deletions datasets5G.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
datasets5G = [
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.01_12.11.21.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.01_13.05.00.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.02_07.26.15.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.02_09.39.35.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.02_12.41.17.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.03_07.29.22.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.03_10.42.14.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.03_13.58.03.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.04_07.26.30.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.04_11.58.01.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.04_14.24.21.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.06_07.31.16.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.06_09.32.38.csv",
"5G-production-dataset/Amazon_Prime/Driving/Season3-TheExpanse/B_2019.12.06_11.49.24.csv",
"5G-production-dataset/Amazon_Prime/Driving/animated-AdventureTime/B_2019.11.28_07.27.57.csv",
"5G-production-dataset/Amazon_Prime/Driving/animated-AdventureTime/B_2019.11.28_10.14.25.csv",
"5G-production-dataset/Amazon_Prime/Driving/animated-AdventureTime/B_2019.11.28_12.05.29.csv",
"5G-production-dataset/Amazon_Prime/Driving/animated-AdventureTime/B_2019.11.28_14.53.29.csv",
"5G-production-dataset/Amazon_Prime/Driving/animated-AdventureTime/B_2019.11.28_17.18.56.csv",
"5G-production-dataset/Amazon_Prime/Driving/animated-AdventureTime/B_2019.11.29_09.37.23.csv",
"5G-production-dataset/Amazon_Prime/Driving/animated-AdventureTime/B_2019.11.29_13.05.25.csv",
"5G-production-dataset/Amazon_Prime/Static/Season3-TheExpanse/B_2019.12.19_15.00.47.csv",
"5G-production-dataset/Amazon_Prime/Static/Season3-TheExpanse/B_2019.12.23_10.30.35.csv",
"5G-production-dataset/Amazon_Prime/Static/Season3-TheExpanse/B_2020.01.03_08.59.40.csv",
"5G-production-dataset/Amazon_Prime/Static/Season3-TheExpanse/B_2020.01.03_10.54.39.csv",
"5G-production-dataset/Amazon_Prime/Static/Season3-TheExpanse/B_2020.01.05_14.55.57.csv",
"5G-production-dataset/Amazon_Prime/Static/animated-Ninjago/B_2020.01.06_09.55.13.csv",
"5G-production-dataset/Amazon_Prime/Static/animated-Ninjago/B_2020.01.06_11.08.12.csv",
"5G-production-dataset/Amazon_Prime/Static/animated-Ninjago/B_2020.01.06_11.40.29.csv",
"5G-production-dataset/Download/Driving/B_2019.12.14_10.16.30.csv",
"5G-production-dataset/Download/Driving/B_2019.12.16_07.22.43.csv",
"5G-production-dataset/Download/Driving/B_2019.12.16_11.49.59.csv",
"5G-production-dataset/Download/Driving/B_2019.12.16_12.27.05.csv",
"5G-production-dataset/Download/Driving/B_2019.12.16_14.23.32.csv",
"5G-production-dataset/Download/Driving/B_2019.12.17_07.32.39.csv",
"5G-production-dataset/Download/Driving/B_2020.01.16_07.26.43.csv",
"5G-production-dataset/Download/Driving/B_2020.01.16_09.56.56.csv",
"5G-production-dataset/Download/Driving/B_2020.01.16_12.10.03.csv",
"5G-production-dataset/Download/Driving/B_2020.02.13_13.03.24.csv",
"5G-production-dataset/Download/Driving/B_2020.02.13_15.02.01.csv",
"5G-production-dataset/Download/Driving/B_2020.02.14_07.29.00.csv",
"5G-production-dataset/Download/Driving/B_2020.02.14_09.38.22.csv",
"5G-production-dataset/Download/Driving/B_2020.02.14_12.58.17.csv",
"5G-production-dataset/Download/Driving/B_2020.02.27_17.30.15.csv",
"5G-production-dataset/Download/Driving/B_2020.02.27_20.35.57.csv",
"5G-production-dataset/Download/Static/B_2019.12.16_13.40.04.csv",
"5G-production-dataset/Download/Static/B_2020.01.16_10.43.34.csv",
"5G-production-dataset/Download/Static/B_2020.02.13_13.57.29.csv",
"5G-production-dataset/Download/Static/B_2020.02.14_13.21.26.csv",
"5G-production-dataset/Download/Static/B_2020.02.27_18.39.27.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.23_12.27.34.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.23_13.28.26.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.23_16.08.19.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.24_12.19.13.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.24_13.06.36.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.25_07.25.43.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.25_15.34.34.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.26_07.29.24.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.26_09.53.40.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.26_13.00.56.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.26_13.20.42.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.26_15.02.50.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.27_07.29.47.csv",
"5G-production-dataset/Netflix/Driving/Season3-StrangerThings/B_2019.11.27_16.33.16.csv",
"5G-production-dataset/Netflix/Driving/animated-RickandMorty/B_2019.11.20_16.55.16.csv",
"5G-production-dataset/Netflix/Driving/animated-RickandMorty/B_2019.11.21_07.59.06.csv",
"5G-production-dataset/Netflix/Driving/animated-RickandMorty/B_2019.11.21_09.03.55.csv",
"5G-production-dataset/Netflix/Driving/animated-RickandMorty/B_2019.11.21_09.44.52.csv",
"5G-production-dataset/Netflix/Driving/animated-RickandMorty/B_2019.11.21_10.20.17.csv",
"5G-production-dataset/Netflix/Driving/animated-RickandMorty/B_2019.11.21_14.33.25.csv",
"5G-production-dataset/Netflix/Driving/animated-RickandMorty/B_2019.11.21_15.38.35.csv",
"5G-production-dataset/Netflix/Driving/animated-RickandMorty/B_2019.11.21_16.15.17.csv",
"5G-production-dataset/Netflix/Driving/animated-RickandMorty/B_2019.11.22_07.30.04.csv",
"5G-production-dataset/Netflix/Static/Season3-StrangerThings/B_2019.11.26_13.50.48.csv",
"5G-production-dataset/Netflix/Static/Season3-StrangerThings/B_2019.11.28_09.21.45.csv",
"5G-production-dataset/Netflix/Static/Season3-StrangerThings/B_2019.12.03_08.02.05.csv",
"5G-production-dataset/Netflix/Static/Season3-StrangerThings/B_2019.12.04_12.46.14.csv",
"5G-production-dataset/Netflix/Static/Season3-StrangerThings/B_2019.12.06_09.55.13.csv",
"5G-production-dataset/Netflix/Static/Season3-StrangerThings/B_2019.12.16_13.03.47.csv",
"5G-production-dataset/Netflix/Static/animated-RickandMorty/B_2019.11.23_14.30.10.csv",
"5G-production-dataset/Netflix/Static/animated-RickandMorty/B_2019.11.26_08.02.38.csv",
"5G-production-dataset/Netflix/Static/animated-RickandMorty/B_2019.11.26_09.27.10.csv",
"5G-production-dataset/Netflix/Static/animated-RickandMorty/B_2019.11.28_08.02.19.csv",
]
72 changes: 47 additions & 25 deletions lab.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,31 +3,21 @@
import pathlib
from collections import OrderedDict
from multiprocessing import Process
from time import sleep
from typing import List, Literal, TypedDict, Union

from mininet.clean import cleanup
from mininet.log import info, setLogLevel
from mininet.net import Mininet
from mininet.node import Host, Switch

MobilityType = Union[
Literal["Driving-1"],
Literal["Driving-10"],
Literal["Driving-2"],
Literal["Driving-6"],
Literal["Driving-7"],
Literal["Driving-8"],
Literal["Driving-9"],
Literal["Static-1"],
Literal["Static-2"],
Literal["Static-3"],
]
from normalize_datasets import NormalizedDataset, get_normalized_datasets


class Experiment(TypedDict):
id: int
mode: Literal["5g"]
mobility: MobilityType
mobility: NormalizedDataset
clients: int
server_type: Union[Literal["asgi"], Literal["wsgi"]]
adaptation_algorithm: Union[
Expand Down Expand Up @@ -136,7 +126,7 @@ def print_experiment(experiment: Experiment):
print(f"Network Trace Type : {experiment['server_type']}\n")

print("-------------------------------")
print(f"Network Trace Mobility : {experiment['mobility']}\n")
print(f"Network Trace Mobility : {experiment['mobility']['name']}\n")

print("-------------------------------")
print(f"Number of clients: {experiment['clients']}\n")
Expand All @@ -151,7 +141,7 @@ def print_experiment(experiment: Experiment):
def get_experiment_folder_name(experiment: Experiment) -> str:
experiment_folder = (
f"id_{experiment['id']}_mode_{experiment['mode']}_trace_"
+ f"{experiment['mobility']}_host_{experiment['clients']}_algo_"
+ f"{experiment['mobility']['name']}_host_{experiment['clients']}_algo_"
+ f"{experiment['adaptation_algorithm']}_protocol_"
+ f"{experiment['server_protocol']}_server_{experiment['server_type']}"
)
Expand Down Expand Up @@ -212,17 +202,47 @@ def pcap(experiment: Experiment):


def tc(experiment: Experiment, client: Host):
proc = client.popen(
f"./topo.sh {client.intf()} {experiment['mobility']}",
shell=True,
stdout=None,
stderr=None,
intf = client.intf()
initial_download_speed = experiment["mobility"]["data"][0].download_kbps
initial_upload_speed = experiment["mobility"]["data"][0].upload_kbps
initial_interval = experiment["mobility"]["data"][0].change_interval_seconds

filter_command_base = "tc filter add dev {intf} protocol ip parent 1: prio 1 u32"

# create root interface
client.cmd(f"tc qdisc add dev {intf} root handle 1: htb default 30")

# create initial download class
client.cmd(
f"tc class add dev {intf} parent 1:1 classid 1:10 htb rate {initial_download_speed}kbps burst 15k"
)
# create initial upload class
client.cmd(
f"tc class add dev {intf} parent 1:1 classid 1:20 htb rate {initial_upload_speed}kbps burst 15k"
)

# add filters for download and upload traffic to redirect to proper classes
client.cmd(f"{filter_command_base} match ip dst 0.0.0.0/0 flowid 1:10")
client.cmd(f"{filter_command_base} match ip src 0.0.0.0/0 flowid 1:20")

try:
proc.communicate()
finally:
proc.kill()
# sleep before changing values
sleep(initial_interval)

for current_data in experiment["mobility"]["data"][1:]:
curr_download_speed = current_data.download_kbps
curr_upload_speed = current_data.upload_kbps
curr_interval = current_data.change_interval_seconds

# change download class rate
client.cmd(
f"tc class change dev {intf} parent 1:1 classid 1:10 htb rate {curr_download_speed}kbps burst 15k"
)
# change upload class rate
client.cmd(
f"tc class change dev {intf} parent 1:1 classid 1:20 htb rate {curr_upload_speed}kbps burst 15k"
)
# sleep before changing rate again
sleep(curr_interval)


def player(experiment: Experiment, client: Host):
Expand All @@ -238,8 +258,10 @@ def player(experiment: Experiment, client: Host):
if __name__ == "__main__":
setLogLevel("info")

normalized_datasets = get_normalized_datasets()

experiment: Experiment = {
"mobility": "Driving-8",
"mobility": normalized_datasets[0],
"server_type": "wsgi",
"server_protocol": "tcp",
"clients": 1,
Expand Down
79 changes: 79 additions & 0 deletions normalize_datasets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import pathlib
from dataclasses import dataclass
from typing import List, TypedDict

import pandas as pd
from pandas.core.frame import DataFrame

from datasets5G import datasets5G


@dataclass
class Limits:
upload_kbps: float
download_kbps: float
change_interval_seconds: int


class NormalizedDataset(TypedDict):
name: str
data: List[Limits]
total_duration: int
platform: str
mobility: str
case: str
dataset: str


def get_normalized_datasets() -> List[NormalizedDataset]:
normalized_datasets = []

for filename in datasets5G:
# Normalize dataset using pandas
csv_data: DataFrame = pd.read_csv(filename) # type: ignore

filtered_data: DataFrame = csv_data[["Timestamp", "DL_bitrate", "UL_bitrate"]]
filtered_data = filtered_data.groupby("Timestamp").mean().reset_index()

filtered_data["Timestamp"] = pd.to_datetime(
filtered_data["Timestamp"], format="%Y.%m.%d_%H.%M.%S"
)
filtered_data["Timestamp"] = (
((filtered_data["Timestamp"] - filtered_data["Timestamp"].shift()))
.shift(-1)
.fillna(pd.Timedelta(seconds=1))
.dt.seconds
)

total_duration = filtered_data["Timestamp"].sum()

filtered_data.rename(
columns={
"Timestamp": "change_interval_seconds",
"DL_bitrate": "download_kbps",
"UL_bitrate": "upload_kbps",
},
inplace=True,
)

# Normalize name removing first path
parts = pathlib.Path(filename).parts
normalized_name = "-".join(parts[1:]).strip(".csv")
platform = parts[1]
mobility = parts[2]
case = parts[3] if len(parts) > 4 else parts[1]
dataset = parts[4] if len(parts) > 4 else parts[3]

normalized_datasets.append(
{
"name": normalized_name,
"data": filtered_data.to_dict("records"),
"total_duration": total_duration,
"platform": platform,
"mobility": mobility,
"case": case,
"dataset": dataset,
}
)

return normalized_datasets
53 changes: 0 additions & 53 deletions topo.sh

This file was deleted.

0 comments on commit 33e9a0e

Please sign in to comment.