Commit 2d6ae20f authored by Jarrod Pas's avatar Jarrod Pas
Browse files

Generalize shed example

parent 16b0c379
import csv
import sys
from argparse import ArgumentParser
from collections import defaultdict, namedtuple
from itertools import groupby, count
from multiprocessing import Pool
from pydtn import Network, random_traffic, Node, EpidemicNode, Contact
from pydtn.community import BubbleNode, HCBFNode, LouvainCommunity
class ShedTrace:
def __init__(self, path, slot_size=300):
self.path = path
self.slot_size = slot_size
pairs = defaultdict(set)
with open(path) as slots:
reader = csv.reader(slots)
next(reader)
for row in reader:
_, source, _, target, _, slot = row
pair = min(source, target), max(source, target)
slot = int(slot)
pairs[pair].add(slot)
node = count()
nodes = {}
self.contacts = []
for (source, target), slots in pairs.items():
if source not in nodes:
nodes[source] = next(node)
source = nodes[source]
if target not in nodes:
nodes[target] = next(node)
target = nodes[target]
slots = sorted(slots)
# groups consecutive slots
# if the lambda is mapped it will return:
# [1, 2, 3, 6, 7, 9] -> [-1, -1, -1, -3, -3, -4]
for _, group in groupby(enumerate(slots), lambda p: p[0]-p[1]):
times = list(map(lambda g: g[1], group))
start = times[0] * self.slot_size
end = (times[-1] + 1) * self.slot_size
self.contacts.append(Contact(start, source, target, True))
self.contacts.append(Contact(end, source, target, False))
self.contacts.sort()
self.nodes = len(nodes)
def __iter__(self):
return iter(self.contacts)
Task = namedtuple('Task', ['trace', 'node_type', 'seed'])
def run_task(task):
seed = task.seed
trace = task.trace
epoch = 7*24*60*60 # 7 days
node_type = task.node_type
node_options = {
'tick_rate': 5 * 60, # 5 mins
'community': LouvainCommunity(epoch),
}
traffic_speed = 30 * 60 # 1 packet every 30 mins
nodes = {
node_id: task.node_type(**node_options)
for node_id in range(trace.nodes)
}
traffic = random_traffic(nodes,
start=epoch,
speed=traffic_speed,
seed=seed)
network = Network(nodes, traffic=traffic, trace=trace)
network.run()
stats = {
'trace': trace.path,
'node_type': node_type.__name__,
'seed': seed,
}
stats.update(network.stats_summary)
return stats
def main(args):
trace = ShedTrace(args['shed'])
pool = Pool()
tasks = []
for seed in args['seeds']:
for node_type in [Node, EpidemicNode, BubbleNode, HCBFNode]:
tasks.append(Task(trace=trace, node_type=node_type, seed=seed))
for stats in pool.imap_unordered(run_task, tasks):
print(stats)
def parse_args(args):
parser = ArgumentParser()
parser.add_argument('shed')
parser.add_argument('--seeds', '-s',
metavar='SEED', type=int, nargs='+', default=[None])
args = parser.parse_args(args)
return vars(args)
if __name__ == '__main__':
exit(main(parse_args(sys.argv[1:])))
from multiprocessing import Pool
from os import path
from pydtn import Network, csv_trace, random_traffic, Node, EpidemicNode
from pydtn.community import BubbleNode, HCBFNode, LouvainCommunity
def task(spec):
seed, node_type = spec
shed1 = path.join('shed', 'data', 'pp_shed1_reduced.csv')
epoch = 7*24*60*60 # 7 days
nodes = 39
node_options = {
'tick_rate': 5 * 60, # 5 mins
'community': LouvainCommunity(epoch),
}
traffic_speed = 30 * 60 # 1 packet every 30 mins
nodes = {
node_id: node_type(**node_options)
for node_id in range(nodes)
}
trace = csv_trace(shed1)
traffic = random_traffic(nodes, start=epoch,
speed=traffic_speed,
seed=seed)
network = Network(nodes, traffic=traffic, trace=trace)
network.run()
stats = {
'node_type': node_type.__name__,
'seed': seed,
}
stats.update(network.stats_summary)
return stats
def main():
pool = Pool()
tasks = []
for seed in range(1):
for node_type in [Node, EpidemicNode, BubbleNode, HCBFNode]:
tasks.append((seed, node_type))
for stats in pool.imap_unordered(task, tasks):
print(stats)
if __name__ == '__main__':
exit(main())
#!/usr/bin/env python3
from collections import defaultdict
import csv
import sys
SLOT_SIZE = 300
def fint(a):
try:
return int(a)
except:
return -1
def counter():
count = 0
while True:
yield count
count += 1
node_id = counter()
pairs = defaultdict(set)
with open(sys.argv[1]) as f:
reader = csv.reader(f)
next(reader)
for row in reader:
_, a, _, b, _, slot = map(fint, row)
p = (min(a,b), max(a,b))
pairs[p].add(slot)
id_map = {}
traces = []
for (a, b), slots in pairs.items():
contacts = []
first = None
prev = None
nslots = 0
for slot in slots:
if first is None:
first = prev = slot
nslots = 1
elif slot - prev == 1:
prev = slot
nslots += 1
else:
contacts.append((first, first+nslots))
first = prev = slot
nslots = 1
contacts.append((first, first+nslots))
if a not in id_map:
id_map[a] = next(node_id)
a = id_map[a]
if b not in id_map:
id_map[b] = next(node_id)
b = id_map[b]
for c in contacts:
traces.append((c[0] * SLOT_SIZE, a, b, 1))
traces.append((c[1] * SLOT_SIZE, a, b, 0))
traces.sort()
duration = traces[-1][0] + SLOT_SIZE
nodes = next(node_id)
activity = (0.5*len(traces)) / ((nodes*(nodes-1))*0.5*(duration/SLOT_SIZE))
path = sys.argv[1].split('.csv')[0] + '_reduced.csv'
with open(path, 'w') as f:
writer = csv.writer(f)
writer.writerow(['time', 'a', 'b', 'state'])
writer.writerow([duration, f'{activity:.3E}', -1, nodes])
for trace in traces:
writer.writerow(trace)
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment