Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
discus
pydtnsim
Commits
d220a138
Commit
d220a138
authored
Aug 16, 2017
by
Jarrod Pas
Browse files
Use shed module from pydtn
parent
2e9badb5
Pipeline
#1627
failed with stage
in 55 seconds
Changes
1
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
examples/shed.py
View file @
d220a138
import
csv
"""Example to run a batch of simlations on SHED data."""
import
sys
from
argparse
import
ArgumentParser
from
collections
import
defaultdict
,
namedtuple
from
itertools
import
groupby
,
count
from
collections
import
namedtuple
from
multiprocessing
import
Pool
from
pydtn
import
Network
,
random_traffic
,
Node
,
EpidemicNode
,
Contact
from
pydtn
import
Network
,
random_traffic
,
Node
,
EpidemicNode
from
pydtn.community
import
BubbleNode
,
HCBFNode
,
LouvainCommunity
import
pydtn.shed
as
shed
class
ShedTrace
:
def
__init__
(
self
,
path
,
slot_size
=
300
):
self
.
path
=
path
self
.
slot_size
=
slot_size
pairs
=
defaultdict
(
set
)
with
open
(
path
)
as
slots
:
reader
=
csv
.
reader
(
slots
)
next
(
reader
)
for
row
in
reader
:
_
,
source
,
_
,
target
,
_
,
slot
=
row
pair
=
min
(
source
,
target
),
max
(
source
,
target
)
slot
=
int
(
slot
)
pairs
[
pair
].
add
(
slot
)
node
=
count
()
nodes
=
{}
self
.
contacts
=
[]
for
(
source
,
target
),
slots
in
pairs
.
items
():
if
source
not
in
nodes
:
nodes
[
source
]
=
next
(
node
)
source
=
nodes
[
source
]
if
target
not
in
nodes
:
nodes
[
target
]
=
next
(
node
)
target
=
nodes
[
target
]
slots
=
sorted
(
slots
)
# groups consecutive slots
# if the lambda is mapped it will return:
# [1, 2, 3, 6, 7, 9] -> [-1, -1, -1, -3, -3, -4]
for
_
,
group
in
groupby
(
enumerate
(
slots
),
lambda
p
:
p
[
0
]
-
p
[
1
]):
times
=
list
(
map
(
lambda
g
:
g
[
1
],
group
))
start
=
times
[
0
]
*
self
.
slot_size
end
=
(
times
[
-
1
]
+
1
)
*
self
.
slot_size
self
.
contacts
.
append
(
Contact
(
start
,
source
,
target
,
True
))
self
.
contacts
.
append
(
Contact
(
end
,
source
,
target
,
False
))
self
.
contacts
.
sort
()
self
.
nodes
=
len
(
nodes
)
Simulation
=
namedtuple
(
'Simulation'
,
[
'trace'
,
'node_type'
,
'seed'
])
def
__iter__
(
self
):
return
iter
(
self
.
contacts
)
def
run_simulation
(
simulation
):
"""Run a simulation."""
seed
=
simulation
.
seed
Task
=
namedtuple
(
'Task'
,
[
'trace'
,
'node_type'
,
'seed'
])
trace_metadata
=
shed
.
read_meta_file
(
simulation
.
trace
)
trace
=
shed
.
shed_trace
(
simulation
.
trace
)
def
run_task
(
task
):
seed
=
task
.
seed
trace
=
task
.
trace
epoch
=
7
*
24
*
60
*
60
# 7 days
node_type
=
task
.
node_type
node_type
=
simulation
.
node_type
node_options
=
{
'tick_rate'
:
5
*
60
,
# 5 mins
'community'
:
LouvainCommunity
(
epoch
),
}
traffic_speed
=
30
*
60
# 1 packet every 30 mins
nodes
=
{
node_id
:
task
.
node_type
(
**
node_options
)
for
node_id
in
range
(
trace
.
nodes
)
node_id
:
simulation
.
node_type
(
**
node_options
)
for
node_id
in
range
(
trace
_metadata
[
'
nodes
'
]
)
}
traffic
=
random_traffic
(
nodes
,
start
=
epoch
,
speed
=
traffic_speed
,
seed
=
seed
)
traffic_options
=
{
'seed'
:
seed
,
'start'
:
epoch
,
'speed'
:
30
*
60
,
# 1 packet every 30 mins
}
traffic
=
random_traffic
(
nodes
,
**
traffic_options
)
network
=
Network
(
nodes
,
traffic
=
traffic
,
trace
=
trace
)
network
.
run
()
stats
=
{
'trace'
:
trace
.
path
,
'trace'
:
simulation
.
trace
,
'node_type'
:
node_type
.
__name__
,
'seed'
:
seed
,
}
stats
.
update
(
network
.
stats_summary
)
# return stats because we can't pickle the network as it is a generator.
return
stats
def
main
(
args
):
trace
=
ShedTrace
(
args
[
'shed'
])
"""Run simulation for each seed in args."""
trace
=
args
[
'shed'
]
pool
=
Pool
()
task
s
=
[]
simulation
s
=
[]
for
seed
in
args
[
'seeds'
]:
for
node_type
in
[
Node
,
EpidemicNode
,
BubbleNode
,
HCBFNode
]:
tasks
.
append
(
Task
(
trace
=
trace
,
node_type
=
node_type
,
seed
=
seed
))
sim
=
Simulation
(
trace
=
trace
,
node_type
=
node_type
,
seed
=
seed
)
simulations
.
append
(
sim
)
for
stats
in
pool
.
imap_unordered
(
run_
task
,
task
s
):
for
stats
in
pool
.
imap_unordered
(
run_
simulation
,
simulation
s
):
print
(
stats
)
def
parse_args
(
args
):
"""Parse arguments."""
parser
=
ArgumentParser
()
parser
.
add_argument
(
'shed'
)
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment