Skip to content

Commit e91e9f9

Browse files
Add files via upload
1 parent 64c562c commit e91e9f9

File tree

56 files changed

+4700
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+4700
-0
lines changed
Lines changed: 206 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,206 @@
1+
"""
2+
6G Upload Simulator — 6G-upload-simulator.py
3+
4+
Purpose
5+
-------
6+
This single-file project is a self-contained simulator for high-throughput "6G-like" uploads
7+
that you can upload to GitHub as an example/demo repository.
8+
9+
It does NOT implement a real 6G radio stack (impractical here). Instead it provides:
10+
- an asyncio-based HTTP server (aiohttp) that accepts uploads and measures arrival rate
11+
- a client that opens concurrent connections and streams generated payload to the server
12+
- throughput, latency and loss measurements saved to CSV for easy plotting
13+
- configurable parameters (concurrency, payload size, duration) to emulate large-bandwidth tests
14+
15+
How to use
16+
----------
17+
1) Install requirements:
18+
python -m pip install aiohttp aiofiles
19+
20+
2) Run server (example):
21+
python 6G-upload-simulator.py --mode server --host 0.0.0.0 --port 8080 --out results_server.csv
22+
23+
3) Run client (example):
24+
python 6G-upload-simulator.py --mode client --url http://localhost:8080/upload --clients 8 --duration 20 --chunk-size 1048576 --out results_client.csv
25+
26+
Files
27+
-----
28+
This single file acts as both server and client depending on the --mode argument.
29+
30+
Notes
31+
-----
32+
- This is a testing/demo tool to measure application-level throughput (how fast data can be POSTed
33+
to a web endpoint using multiple concurrent streams). Real 6G involves radio, PHY/MAC layers,
34+
spectrum (THz), and specialized hardware — outside the scope of a simple Python demo.
35+
36+
"""
37+
38+
import argparse
39+
import asyncio
40+
import csv
41+
import os
42+
import sys
43+
import time
44+
from aiohttp import web, ClientSession
45+
import aiofiles
46+
from datetime import datetime
47+
48+
# -----------------
49+
# Server code
50+
# -----------------
51+
52+
class UploadServer:
53+
def __init__(self, host='0.0.0.0', port=8080, out='server_results.csv'):
54+
self.host = host
55+
self.port = port
56+
self.app = web.Application()
57+
self.app.router.add_post('/upload', self.handle_upload)
58+
self.start_time = None
59+
self.out = out
60+
self.stats = [] # list of dicts: timestamp, bytes_received, duration
61+
62+
async def handle_upload(self, request):
63+
# Accept streaming upload and count bytes
64+
reader = request.content
65+
bytes_received = 0
66+
t0 = time.perf_counter()
67+
# read in chunks
68+
async for chunk in reader.iter_chunked(64 * 1024):
69+
bytes_received += len(chunk)
70+
t1 = time.perf_counter()
71+
duration = max(t1 - t0, 1e-9)
72+
throughput_bps = bytes_received / duration
73+
record = {
74+
'timestamp': datetime.utcnow().isoformat() + 'Z',
75+
'bytes_received': bytes_received,
76+
'duration_s': duration,
77+
'throughput_Bps': throughput_bps,
78+
'throughput_Mbps': throughput_bps * 8 / 1e6
79+
}
80+
self.stats.append(record)
81+
return web.Response(text=str(record))
82+
83+
async def _save_periodic(self):
84+
# persist stats to CSV every 2 seconds
85+
while True:
86+
if self.stats:
87+
await self._save()
88+
await asyncio.sleep(2)
89+
90+
async def _save(self):
91+
header = ['timestamp', 'bytes_received', 'duration_s', 'throughput_Bps', 'throughput_Mbps']
92+
tmp = self.out + '.tmp'
93+
async with aiofiles.open(tmp, 'w') as f:
94+
await f.write(','.join(header) + '\n')
95+
for r in self.stats:
96+
row = [r[h] if h in r else '' for h in header]
97+
await f.write(','.join(map(str, row)) + '\n')
98+
os.replace(tmp, self.out)
99+
100+
def run(self):
101+
loop = asyncio.get_event_loop()
102+
loop.create_task(self._save_periodic())
103+
web.run_app(self.app, host=self.host, port=self.port)
104+
105+
# -----------------
106+
# Client code
107+
# -----------------
108+
109+
class UploadClient:
110+
def __init__(self, url, clients=4, duration=10, chunk_size=256 * 1024, out='client_results.csv'):
111+
self.url = url
112+
self.clients = clients
113+
self.duration = duration
114+
self.chunk_size = chunk_size
115+
self.out = out
116+
self.results = [] # per-client throughput
117+
118+
async def _generate_payload(self):
119+
# Return a bytes object of size chunk_size. We reuse a single buffer to avoid large memory use.
120+
return b'a' * self.chunk_size
121+
122+
async def _uploader(self, client_id):
123+
t_end = time.perf_counter() + self.duration
124+
bytes_sent = 0
125+
chunks_sent = 0
126+
payload = await self._generate_payload()
127+
async with ClientSession() as ses:
128+
while time.perf_counter() < t_end:
129+
try:
130+
# stream by sending many chunks in a single POST using a streaming generator
131+
async def gen():
132+
nonlocal bytes_sent, chunks_sent
133+
while time.perf_counter() < t_end:
134+
yield payload
135+
bytes_sent += len(payload)
136+
chunks_sent += 1
137+
t0 = time.perf_counter()
138+
async with ses.post(self.url, data=gen(), timeout=self.duration + 5) as resp:
139+
await resp.text()
140+
t1 = time.perf_counter()
141+
except Exception as e:
142+
# on error, break or continue depending on severity
143+
print(f"client {client_id} error: {e}")
144+
break
145+
duration_actual = max(time.perf_counter() - (t_end - self.duration), 1e-9)
146+
throughput_bps = bytes_sent / duration_actual
147+
r = {
148+
'client_id': client_id,
149+
'bytes_sent': bytes_sent,
150+
'duration_s': duration_actual,
151+
'throughput_Bps': throughput_bps,
152+
'throughput_Mbps': throughput_bps * 8 / 1e6,
153+
'chunks_sent': chunks_sent
154+
}
155+
self.results.append(r)
156+
157+
async def run(self):
158+
tasks = []
159+
for i in range(self.clients):
160+
tasks.append(asyncio.create_task(self._uploader(i)))
161+
await asyncio.gather(*tasks)
162+
await self._save()
163+
164+
async def _save(self):
165+
header = ['client_id', 'bytes_sent', 'duration_s', 'throughput_Bps', 'throughput_Mbps', 'chunks_sent']
166+
tmp = self.out + '.tmp'
167+
async with aiofiles.open(tmp, 'w') as f:
168+
await f.write(','.join(header) + '\n')
169+
for r in self.results:
170+
row = [r[h] if h in r else '' for h in header]
171+
await f.write(','.join(map(str, row)) + '\n')
172+
os.replace(tmp, self.out)
173+
174+
# -----------------
175+
# CLI
176+
# -----------------
177+
178+
def parse_args():
179+
p = argparse.ArgumentParser(description='6G Upload Simulator (server + client)')
180+
p.add_argument('--mode', choices=['server', 'client'], required=True)
181+
p.add_argument('--host', default='0.0.0.0')
182+
p.add_argument('--port', type=int, default=8080)
183+
p.add_argument('--url', default='http://localhost:8080/upload')
184+
p.add_argument('--clients', type=int, default=4, help='Number of concurrent upload clients')
185+
p.add_argument('--duration', type=int, default=10, help='Test duration in seconds')
186+
p.add_argument('--chunk-size', type=int, default=256*1024, help='Bytes per chunk')
187+
p.add_argument('--out', default='results.csv')
188+
return p.parse_args()
189+
190+
async def main():
191+
args = parse_args()
192+
if args.mode == 'server':
193+
s = UploadServer(host=args.host, port=args.port, out=args.out)
194+
print(f"Starting upload server on {args.host}:{args.port}. Results -> {args.out}")
195+
s.run()
196+
else:
197+
c = UploadClient(url=args.url, clients=args.clients, duration=args.duration, chunk_size=args.chunk_size, out=args.out)
198+
print(f"Running client: url={args.url} clients={args.clients} duration={args.duration}s chunk_size={args.chunk_size}")
199+
await c.run()
200+
print(f"Client results saved to {args.out}")
201+
202+
if __name__ == '__main__':
203+
try:
204+
asyncio.run(main())
205+
except KeyboardInterrupt:
206+
print('Interrupted')

0 commit comments

Comments
 (0)