Commit 5b3b63b1 authored by Jarrod Pas's avatar Jarrod Pas
Browse files

Merge branch 'document-examples' into 'develop'

Document examples

See merge request !2
parents 76feb6fc 15fb9f33
Pipeline #1645 passed with stage
in 1 minute and 34 seconds
image: python:alpine
before_script:
- pip install --editable .
pylint:
stage: test
script:
......
"""Example simulation with custom random nodes."""
__author__ = "Jarrod Pas <j.pas@usask.ca>"
from random import Random
from pydtn import Network, Node, random_trace, random_traffic
class RandomNode(Node):
"""Node that sends packets to a random neightbour."""
def __init__(self, seed=None, **options):
"""
Create a random node.
Keyword Arguments:
seed -- seed for random number generator
"""
super().__init__(**options)
self.random = Random(seed)
def forward(self, packet):
"""Pick a random neighbour to send packet to."""
neighbours = list(self.neighbours)
if neighbours:
target = self.random.choice(neighbours)
......@@ -17,26 +30,33 @@ class RandomNode(Node):
def main():
"""Run a simulation with random nodes."""
duration = 500
nodes = 50
seed = 42
traffic_speed = 1
node_options = {
'seed': seed,
'tick_rate': 1,
}
nodes = {
node_id: RandomNode(**node_options)
for node_id in range(nodes)
}
trace = random_trace(nodes, seed=seed)
traffic = random_traffic(nodes, speed=traffic_speed, seed=seed)
network = Network(nodes, traffic=traffic, trace=trace)
traffic_options = {
'seed': seed,
'speed': 1,
}
traffic = random_traffic(nodes, **traffic_options)
trace_options = {
'seed': seed,
}
trace = random_trace(nodes, **trace_options)
# run simulation for 500 ticks
network.run(until=500)
network = Network(nodes, traffic=traffic, trace=trace)
network.run(until=duration)
print(network.stats_summary)
......
import csv
"""Example to run a batch of simlations on SHED data."""
import sys
from argparse import ArgumentParser
from collections import defaultdict, namedtuple
from itertools import groupby, count
from collections import namedtuple
from multiprocessing import Pool
from pydtn import Network, random_traffic, Node, EpidemicNode, Contact
from pydtn import Network, random_traffic, Node, EpidemicNode
from pydtn.community import BubbleNode, HCBFNode, LouvainCommunity
import pydtn.shed as shed
class ShedTrace:
def __init__(self, path, slot_size=300):
self.path = path
self.slot_size = slot_size
pairs = defaultdict(set)
with open(path) as slots:
reader = csv.reader(slots)
next(reader)
for row in reader:
_, source, _, target, _, slot = row
pair = min(source, target), max(source, target)
slot = int(slot)
pairs[pair].add(slot)
node = count()
nodes = {}
self.contacts = []
for (source, target), slots in pairs.items():
if source not in nodes:
nodes[source] = next(node)
source = nodes[source]
if target not in nodes:
nodes[target] = next(node)
target = nodes[target]
slots = sorted(slots)
# groups consecutive slots
# if the lambda is mapped it will return:
# [1, 2, 3, 6, 7, 9] -> [-1, -1, -1, -3, -3, -4]
for _, group in groupby(enumerate(slots), lambda p: p[0]-p[1]):
times = list(map(lambda g: g[1], group))
start = times[0] * self.slot_size
end = (times[-1] + 1) * self.slot_size
self.contacts.append(Contact(start, source, target, True))
self.contacts.append(Contact(end, source, target, False))
self.contacts.sort()
self.nodes = len(nodes)
Simulation = namedtuple('Simulation', ['trace', 'node_type', 'seed'])
def __iter__(self):
return iter(self.contacts)
def run_simulation(simulation):
"""Run a simulation."""
seed = simulation.seed
Task = namedtuple('Task', ['trace', 'node_type', 'seed'])
trace_metadata = shed.read_meta_file(simulation.trace)
trace = shed.shed_trace(simulation.trace)
def run_task(task):
seed = task.seed
trace = task.trace
epoch = 7*24*60*60 # 7 days
node_type = task.node_type
node_type = simulation.node_type
node_options = {
'tick_rate': 5 * 60, # 5 mins
'community': LouvainCommunity(epoch),
}
traffic_speed = 30 * 60 # 1 packet every 30 mins
nodes = {
node_id: task.node_type(**node_options)
for node_id in range(trace.nodes)
node_id: simulation.node_type(**node_options)
for node_id in range(trace_metadata['nodes'])
}
traffic = random_traffic(nodes,
start=epoch,
speed=traffic_speed,
seed=seed)
traffic_options = {
'seed': seed,
'start': epoch,
'speed': 30 * 60, # 1 packet every 30 mins
}
traffic = random_traffic(nodes, **traffic_options)
network = Network(nodes, traffic=traffic, trace=trace)
network.run()
stats = {
'trace': trace.path,
'trace': simulation.trace,
'node_type': node_type.__name__,
'seed': seed,
}
stats.update(network.stats_summary)
# return stats because we can't pickle the network as it is a generator.
return stats
def main(args):
trace = ShedTrace(args['shed'])
"""Run simulation for each seed in args."""
trace = args['shed']
pool = Pool()
tasks = []
simulations = []
for seed in args['seeds']:
for node_type in [Node, EpidemicNode, BubbleNode, HCBFNode]:
tasks.append(Task(trace=trace, node_type=node_type, seed=seed))
sim = Simulation(trace=trace, node_type=node_type, seed=seed)
simulations.append(sim)
for stats in pool.imap_unordered(run_task, tasks):
for stats in pool.imap_unordered(run_simulation, simulations):
print(stats)
def parse_args(args):
"""Parse arguments."""
parser = ArgumentParser()
parser.add_argument('shed')
......
......@@ -251,7 +251,7 @@ class Buffer:
Can be removed from while being iterated over.
"""
def __init__(self, capacity=None, **options):
def __init__(self, capacity=None):
"""
Create a buffer.
......@@ -411,10 +411,12 @@ class Node:
Removes the sent packet from the buffer.
"""
# pylint: disable=unused-argument
self.buffer.remove(packet)
def send_failure(self, packet, target):
"""Call when a send fails."""
# pylint: disable=unused-argument
pass
def packet_expiry(self, packet):
......@@ -427,6 +429,7 @@ class Node:
If the packet has reached it's destination notes the time then
"""
# pylint: disable=unused-argument
if packet.destination is self:
return
......
"""pydtn module for SHED dataset specific tools."""
__all__ = [
'write_meta_file',
'read_meta_file',
'shed_trace',
]
__author__ = 'Jarrod Pas <j.pas@usask.ca>'
import csv
import json
from collections import defaultdict
from itertools import groupby, count
from os import path
from pydtn import Contact
def write_meta_file(meta_path, csv_path, duty_cycle_length=300):
"""Return metadata for a data set, from the dataset."""
nodes = set()
last = -1
with open(csv_path) as csv_file:
csv_file = csv.reader(csv_file)
next(csv_file)
for row in csv_file:
_, source, _, target, _, slot = row
nodes.add(source)
nodes.add(target)
last = max(last, int(slot))
common = path.commonprefix([meta_path, csv_path])
csv_path = path.relpath(csv_path, common)
meta_data = {
'data': csv_path,
'nodes': len(nodes),
'duration': last * duty_cycle_length,
'duty_cycle_length': duty_cycle_length,
}
with open(meta_path, 'w') as meta_file:
json.dump(meta_data, meta_file, sort_keys=True, indent=2)
meta_file.write('\n')
def read_meta_file(meta_path):
"""Return metadata for a data set, from a metadata file."""
with open(meta_path) as meta_file:
meta = json.load(meta_file)
meta['data'] = path.join(path.dirname(meta_path), meta['data'])
return meta
raise RuntimeError('Should not get here...')
def _get_contact_pairs(csv_path):
pairs = defaultdict(set)
with open(csv_path) as csv_file:
csv_file = csv.reader(csv_file)
next(csv_file)
for row in csv_file:
_, source, _, target, _, slot = row
pair = min(source, target), max(source, target)
slot = int(slot)
pairs[pair].add(slot)
return dict(pairs)
def shed_trace(meta_path):
"""
Generate contact trace for a duty cycle based SHED dataset.
Keyword Arguments:
duty_cycle_length -- duration of each duty cycle (default 300)
"""
meta = read_meta_file(meta_path)
pairs = _get_contact_pairs(meta['data'])
node = count()
nodes = {}
contacts = []
for (source, target), slots in pairs.items():
# get canonical node id for source
if source not in nodes:
nodes[source] = next(node)
source = nodes[source]
# get canonical node id for source
if target not in nodes:
nodes[target] = next(node)
target = nodes[target]
slots = sorted(slots)
# groups consecutive slots
# if the lambda is mapped it will return:
# [1, 2, 3, 6, 7, 9] -> [-1, -1, -1, -3, -3, -4]
for _, group in groupby(enumerate(slots), lambda p: p[0]-p[1]):
times = list(map(lambda g: g[1], group))
start = times[0] * meta['duty_cycle_length']
end = (times[-1] + 1) * meta['duty_cycle_length']
contacts.append(Contact(start, source, target, True))
contacts.append(Contact(end, source, target, False))
yield from sorted(contacts)
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment