Skip to content

Commit 572ce3a

Browse files
committed
refactoring and cleanup
1 parent 561df4f commit 572ce3a

39 files changed

+2716
-23238
lines changed

Dockerfile

Lines changed: 0 additions & 22 deletions
This file was deleted.

README.md

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,29 @@
1-
# NetSIM
2-
3-
🚧 Work in progress! 🚧
1+
# NetSim
42

53
![Python-test](https://github.com/networmix/NetSim/workflows/Python-test/badge.svg?branch=main)
64

75
## Introduction
86

9-
NetSim is a discrete event simulation toolkit adapted for network simulation use-cases.
7+
NetSim is a discrete event simulation toolkit adapted for a variety of network simulation use-cases.
8+
It enables modeling, simulating, and analyzing network topologies, packet flows, and system behaviors under different policies and conditions.
9+
10+
### What Problems Can NetSim Solve?
11+
- **Packet Queueing in Network Devices**
12+
Model FIFO, RED, tail-drop, and other queue disciplines. Investigate performance metrics such as packet loss, queue occupancy, and latency under different traffic loads.
13+
- **Flow-Based Analysis**
14+
Explore how different flow rates, flow volumes, and congestion-control strategies impact network performance in switches, routers, or other custom nodes.
15+
- **Advanced Topology Simulations**
16+
Simulate networks with multiple switches, hosts, and complex packet-processing pipelines. Attach custom modules (e.g., PacketProcessors) for specialized logic.
1017

11-
Types of problems this toolkit can help solving:
18+
## Key Features
1219

13-
* Simulation of failures in a network graph
14-
* Simulation of routing protocol behavior in stochastic environment
20+
- **Discrete Event Engine**
21+
Built around an event-based simulation core.
22+
- **Flexible Network Objects**
23+
Includes packet sources, switches, and sinks that can be combined to form multi-layered topologies.
24+
- **Queueing Models**
25+
Several queueing approaches such as FIFO, tail-drop, and RED (Random Early Detection) are supported out of the box.
26+
- **Statistical Tracking**
27+
Provides detailed statistics for throughput (bytes/packets per second), latency, packet drops, queue length, and more.
28+
- **Modular and Extensible**
29+
Add new admission-control policies, scheduling algorithms, or custom processing nodes.

dev/Dockerfile

Lines changed: 0 additions & 25 deletions
This file was deleted.

dev/buildenv.sh

Lines changed: 0 additions & 86 deletions
This file was deleted.

dev/requirements.txt

Lines changed: 0 additions & 15 deletions
This file was deleted.

netsim/applications/packet_network/analysis.py

Lines changed: 92 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -12,70 +12,122 @@
1212

1313

1414
class NetAnalyser:
15+
"""
16+
Base class for network data analysis. Subclasses should implement specialized analysis logic.
17+
"""
18+
1519
...
1620

1721

1822
class NetSimIntAnalyser(NetAnalyser):
1923
"""
20-
Analyser of NetSim interval statistics
24+
Analyser of NetSim interval statistics. It reads lines of JSON data, converts them to pandas DataFrame objects,
25+
and stores them keyed by the NetSim object name.
2126
"""
2227

2328
def __init__(self):
2429
self.data_frames: Dict[NetSimObjectName, pd.DataFrame] = {}
2530

2631
@classmethod
2732
def init_with_nsim_stat(cls, fd: TextIOWrapper) -> NetSimIntAnalyser:
33+
"""
34+
Initialize the analyser from a text file descriptor where each line is JSON data from the simulator.
35+
36+
Args:
37+
fd: A file descriptor with JSON lines.
38+
39+
Returns:
40+
A NetSimIntAnalyser instance with data_frames populated.
41+
"""
2842
analyser = cls()
2943

3044
for interval_idx, line in enumerate(fd):
3145
data: Dict[str, Dict[str, Any]] = loads(line)
32-
entry = next(iter(data.values()))
46+
# Each line is expected to have a top-level key (e.g. an interval).
47+
# We take the first value of that top-level dict to get the object stats.
48+
if not data:
49+
continue
50+
entry = next(iter(data.values()), {})
3351
for obj_name, obj_stat in entry.items():
34-
df = pd.DataFrame(obj_stat, index=[interval_idx])
35-
analyser.data_frames.setdefault(
36-
obj_name, pd.DataFrame(columns=list(obj_stat.keys()))
37-
)
38-
analyser.data_frames[obj_name] = analyser.data_frames[obj_name].append(
39-
df
52+
# Convert the JSON dictionary into a single-row DataFrame
53+
row_df = pd.DataFrame(obj_stat, index=[interval_idx])
54+
55+
if obj_name not in analyser.data_frames:
56+
analyser.data_frames[obj_name] = pd.DataFrame(
57+
columns=list(obj_stat.keys())
58+
)
59+
60+
# Concatenate instead of DataFrame.append (which is deprecated)
61+
analyser.data_frames[obj_name] = pd.concat(
62+
[analyser.data_frames[obj_name], row_df], ignore_index=False
4063
)
4164
return analyser
4265

4366

4467
class NetSimIntQueueAnalyser(NetSimIntAnalyser):
68+
"""
69+
Performs queue-specific analyses of the collected NetSim interval statistics for a given queue-like object.
70+
"""
71+
4572
def analyse_queue(self, obj_name: str) -> None:
73+
"""
74+
Analyse queue statistics (latency, queue length, throughput, etc.) for the specified object name.
75+
76+
Args:
77+
obj_name: The name of the queue-like object in self.data_frames to be analysed.
78+
"""
79+
if obj_name not in self.data_frames:
80+
raise KeyError(f"No data found for object {obj_name}")
81+
4682
df = self.data_frames[obj_name]
47-
int_duration = df.duration[0]
48-
int_count = len(df)
49-
if int_count < 2:
83+
if "duration" not in df.columns:
84+
raise RuntimeError("Duration column is missing in the data.")
85+
86+
# Basic checks
87+
if len(df) < 2:
5088
raise RuntimeError(
5189
"NetSimIntQueueAnalyser requires data from at least two intervals."
5290
)
91+
92+
# We assume each interval has the same duration, though the code could be extended to handle otherwise
93+
int_duration = df["duration"].iloc[0]
94+
int_count = len(df)
5395
total_duration = int_duration * int_count
96+
5497
sns.set_theme()
5598
sns.set_context("paper")
5699
fig = plt.figure()
100+
101+
# 1) Mean system delay over time
57102
ax1 = fig.add_subplot(2, 2, 1)
58103
ax1.set_xlim([0, total_duration])
59104
ax1.set_title("Mean system delay over time")
60105
ax1.set_xlabel("Simulated Time (seconds)")
61106
ax1.set_ylabel("Mean system delay (seconds)")
107+
if "avg_latency_at_departure" not in df.columns:
108+
raise RuntimeError("Column avg_latency_at_departure is missing.")
62109
sns.scatterplot(x="timestamp", y="avg_latency_at_departure", data=df, ax=ax1)
63-
ax1.axhline(y=df.avg_latency_at_departure.mean(), color="red")
110+
ax1.axhline(y=df["avg_latency_at_departure"].mean(), color="red")
64111

112+
# 2) Mean system delay (histogram)
65113
ax2 = fig.add_subplot(2, 2, 2)
66114
ax2.set_title("Mean system delay (histogram)")
67115
ax2.set_xlabel("Mean system delay (seconds)")
68116
ax2.set_ylabel("")
69117
sns.histplot(x="avg_latency_at_departure", data=df, ax=ax2, stat="probability")
70118

119+
# 3) Mean queue length over time
71120
ax3 = fig.add_subplot(2, 2, 3)
72121
ax3.set_xlim([0, total_duration])
73122
ax3.set_title("Mean queue length over time")
74123
ax3.set_xlabel("Simulated Time (seconds)")
75124
ax3.set_ylabel("Mean queue len")
125+
if "avg_queue_len" not in df.columns:
126+
raise RuntimeError("Column avg_queue_len is missing.")
76127
sns.scatterplot(x="timestamp", y="avg_queue_len", data=df, ax=ax3)
77-
ax3.axhline(y=df.avg_queue_len.mean(), color="red")
128+
ax3.axhline(y=df["avg_queue_len"].mean(), color="red")
78129

130+
# 4) Mean queue length (histogram)
79131
ax4 = fig.add_subplot(2, 2, 4)
80132
ax4.set_title("Mean queue length (histogram)")
81133
ax4.set_xlabel("Mean queue len")
@@ -85,36 +137,44 @@ def analyse_queue(self, obj_name: str) -> None:
85137
fig.tight_layout()
86138
fig.savefig("NetSimIntQueueAnalyser.png", dpi=600)
87139

88-
mean = df.avg_latency_at_departure.mean()
89-
sigma = sample_stdev(df.avg_latency_at_departure)
90-
ci = 1.96 * sigma / (len(df.avg_latency_at_departure)) ** 0.5
140+
# Compute confidence intervals for average latency
141+
mean = df["avg_latency_at_departure"].mean()
142+
sigma = sample_stdev(df["avg_latency_at_departure"])
143+
ci = 1.96 * sigma / (len(df["avg_latency_at_departure"])) ** 0.5
91144

92145
print("Avg latency")
93146
print(f"\tMean: {mean:.12f}")
94147
print(f"\tStdev: {sigma:.12f}")
95148
print(f"\tCI: {ci:.12f}")
96-
print(f"\tError %: {ci/mean*100:.12f}")
149+
print(f"\tError %: {ci / mean * 100:.12f}" if mean != 0 else "\tMean is 0")
97150

98-
mean = df.avg_queue_len.mean()
99-
sigma = sample_stdev(df.avg_queue_len)
100-
ci = 1.96 * sigma / (len(df.avg_queue_len)) ** 0.5
151+
# Compute confidence intervals for average queue length
152+
mean = df["avg_queue_len"].mean()
153+
sigma = sample_stdev(df["avg_queue_len"])
154+
ci = 1.96 * sigma / (len(df["avg_queue_len"])) ** 0.5
101155

102156
print("Avg queue len")
103157
print(f"\tMean: {mean:.12f}")
104158
print(f"\tStdev: {sigma:.12f}")
105159
print(f"\tCI: {ci:.12f}")
106-
print(f"\tError %: {ci/mean*100:.12f}")
107-
108-
df["throughput"] = df.avg_send_rate_bps / 10000000
109-
mean = df.throughput.mean()
110-
sigma = sample_stdev(df.throughput)
111-
ci = 1.96 * sigma / (len(df.throughput)) ** 0.5
112-
113-
print("Avg throughput")
114-
print(f"\tMean: {mean:.12f}")
115-
print(f"\tStdev: {sigma:.12f}")
116-
print(f"\tCI: {ci:.12f}")
117-
print(f"\tError %: {ci/mean*100:.12f}")
160+
print(f"\tError %: {ci / mean * 100:.12f}" if mean != 0 else "\tMean is 0")
161+
162+
# Compute confidence intervals for throughput (convert Bps to a fraction of 10Mbps for example)
163+
if "avg_send_rate_bps" in df.columns:
164+
df["throughput"] = df["avg_send_rate_bps"] / 1e7
165+
mean = df["throughput"].mean()
166+
sigma = sample_stdev(df["throughput"])
167+
ci = 1.96 * sigma / (len(df["throughput"])) ** 0.5
168+
169+
print("Avg throughput")
170+
print(f"\tMean: {mean:.12f}")
171+
print(f"\tStdev: {sigma:.12f}")
172+
print(f"\tCI: {ci:.12f}")
173+
print(f"\tError %: {ci / mean * 100:.12f}" if mean != 0 else "\tMean is 0")
174+
else:
175+
print(
176+
"avg_send_rate_bps column is missing. Throughput analysis is skipped."
177+
)
118178

119179

120180
ANALYSER_TYPE_MAP: Dict[str, NetAnalyser] = {

0 commit comments

Comments
 (0)