Commit 55eba8d6 authored by Peter Jentsch's avatar Peter Jentsch
Browse files

"fake" Poisson sampling with alias sampling

parent 915acb07
...@@ -206,6 +206,12 @@ version = "0.1.1" ...@@ -206,6 +206,12 @@ version = "0.1.1"
deps = ["Mmap"] deps = ["Mmap"]
uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab"
[[Dictionaries]]
deps = ["Indexing", "Random"]
git-tree-sha1 = "12e8b690de74848e9f41853817a94900a9e13bff"
uuid = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4"
version = "0.3.8"
[[Distributed]] [[Distributed]]
deps = ["Random", "Serialization", "Sockets"] deps = ["Random", "Serialization", "Sockets"]
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
...@@ -383,6 +389,11 @@ git-tree-sha1 = "5aa65f6204e1b68dfef024421e61fbcacecfbfa6" ...@@ -383,6 +389,11 @@ git-tree-sha1 = "5aa65f6204e1b68dfef024421e61fbcacecfbfa6"
uuid = "c65182e5-40f4-518f-8165-175b85689199" uuid = "c65182e5-40f4-518f-8165-175b85689199"
version = "1.1.0" version = "1.1.0"
[[Indexing]]
git-tree-sha1 = "ce1566720fd6b19ff3411404d4b977acd4814f9f"
uuid = "313cdc1a-70c2-5d6a-ae34-0150d3930a38"
version = "1.1.1"
[[Inflate]] [[Inflate]]
git-tree-sha1 = "f5fc07d4e706b84f72d54eedcc1c13d92fb0871c" git-tree-sha1 = "f5fc07d4e706b84f72d54eedcc1c13d92fb0871c"
uuid = "d25df0c9-e2be-5dd7-82c8-3ad0b3e990b9" uuid = "d25df0c9-e2be-5dd7-82c8-3ad0b3e990b9"
......
...@@ -10,6 +10,7 @@ DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" ...@@ -10,6 +10,7 @@ DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab" DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab"
Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
ImportAll = "c65182e5-40f4-518f-8165-175b85689199" ImportAll = "c65182e5-40f4-518f-8165-175b85689199"
Intervals = "d8418881-c3e1-53bb-8760-2df7ec849ed5" Intervals = "d8418881-c3e1-53bb-8760-2df7ec849ed5"
......
...@@ -33,4 +33,4 @@ function generate_contact_vectors!(ij_dist,ji_dist,i_to_j_contacts::Vector{T}, j ...@@ -33,4 +33,4 @@ function generate_contact_vectors!(ij_dist,ji_dist,i_to_j_contacts::Vector{T}, j
end end
end end
return nothing return nothing
end end
\ No newline at end of file
...@@ -3,9 +3,7 @@ struct GraphEdge ...@@ -3,9 +3,7 @@ struct GraphEdge
a::Int a::Int
b::Int b::Int
end end
function Base.hash(e::GraphEdge) function Base.hash(e::GraphEdge)
# display(e)
return hash(minmax(e.a,e.b)) return hash(minmax(e.a,e.b))
end end
function Base.isequal(e1::GraphEdge,e2::GraphEdge) function Base.isequal(e1::GraphEdge,e2::GraphEdge)
...@@ -16,7 +14,7 @@ function sample_mixing_graph!(mixing_graph,population_demographics) ...@@ -16,7 +14,7 @@ function sample_mixing_graph!(mixing_graph,population_demographics)
for i in 1:size(mixing_edges.contact_array)[1], j in 1:i #diagonal for i in 1:size(mixing_edges.contact_array)[1], j in 1:i #diagonal
demo_i = Int(population_demographics[i]) demo_i = Int(population_demographics[i])
demo_j = Int(population_demographics[j]) demo_j = Int(population_demographics[j])
rand!(RNG, mixing_edges.weights_distribution_matrix[demo_i,demo_j],mixing_edges.sample_cache[i,j]) rand!(RNG, mixing_edges.sampler_matrix[demo_i,demo_j],mixing_edges.sample_cache[i,j])
for k in 1:length(mixing_edges.contact_array[i,j][1]) for k in 1:length(mixing_edges.contact_array[i,j][1])
kth_node_i = mixing_edges.contact_array[i,j][1][k] kth_node_i = mixing_edges.contact_array[i,j][1][k]
kth_node_j = mixing_edges.contact_array[i,j][2][k] kth_node_j = mixing_edges.contact_array[i,j][2][k]
...@@ -32,19 +30,12 @@ struct MixingEdges{M} ...@@ -32,19 +30,12 @@ struct MixingEdges{M}
contact_array::Matrix{Tuple{Vector{Int},Vector{Int}}} contact_array::Matrix{Tuple{Vector{Int},Vector{Int}}}
sample_cache::Matrix{Vector{Int}} sample_cache::Matrix{Vector{Int}}
weights_dict::Dictionary{GraphEdge,UInt8} weights_dict::Dictionary{GraphEdge,UInt8}
weights_distribution_matrix::M sampler_matrix::M
function MixingEdges(total_edges,contact_array,sampler_matrix::Matrix{M}) where M<:Sampleable{Univariate, Discrete} function MixingEdges(total_edges,contact_array,sampler_matrix::Matrix{M}) where M<:Sampleable{Univariate, Discrete}
sample_cache = map(v-> Vector{Int}(undef,length(v[1])),contact_array) sample_cache = map(v-> Vector{Int}(undef,length(v[1])),contact_array)
weights_dict = Dictionary{GraphEdge,UInt8}() weights_dict = Dictionary{GraphEdge,UInt8}()
new{typeof(sampler_matrix)}(total_edges,contact_array,sample_cache,weights_dict,sampler_matrix) new{typeof(sampler_matrix)}(total_edges,contact_array,sample_cache,weights_dict,sampler_matrix)
end end
function MixingEdges(total_edges,contact_array,weights_distribution_matrix::Matrix{M}) where M<:UnivariateDistribution
sample_cache = map(v-> Vector{Int}(undef,length(v[1])),contact_array)
weights_dict = Dictionary{GraphEdge,UInt8}()
sampler_matrix = map(m -> Distributions.PoissonCountSampler.(m),weights_distribution_matrix)
new{typeof(sampler_matrix)}(total_edges,contact_array,sample_cache,weights_dict,sampler_matrix)
end
end end
function create_mixing_edges(demographic_index_vectors,mixing_matrix,weights_distribution_matrix) function create_mixing_edges(demographic_index_vectors,mixing_matrix,weights_distribution_matrix)
...@@ -59,7 +50,6 @@ function create_mixing_edges(demographic_index_vectors,mixing_matrix,weights_dis ...@@ -59,7 +50,6 @@ function create_mixing_edges(demographic_index_vectors,mixing_matrix,weights_dis
stubs_i = Vector{Int}(undef,m) stubs_i = Vector{Int}(undef,m)
stubs_j = Vector{Int}(undef,m) stubs_j = Vector{Int}(undef,m)
if m>0 if m>0
# @assert m == sum(num_degrees_ji)
sample!(RNG,demographic_index_vectors[i],Weights(num_degrees_ij./m),stubs_i) sample!(RNG,demographic_index_vectors[i],Weights(num_degrees_ij./m),stubs_i)
sample!(RNG,demographic_index_vectors[j],Weights(num_degrees_ji./m),stubs_j) sample!(RNG,demographic_index_vectors[j],Weights(num_degrees_ji./m),stubs_j)
tot += m tot += m
...@@ -125,7 +115,7 @@ function remake!(time_dep_mixing_graph,demographic_index_vectors,mixing_matrix) ...@@ -125,7 +115,7 @@ function remake!(time_dep_mixing_graph,demographic_index_vectors,mixing_matrix)
for weighted_graph in time_dep_mixing_graph.resampled_graphs for weighted_graph in time_dep_mixing_graph.resampled_graphs
empty!.(weighted_graph.g.fadjlist) #empty all the vector edgelists empty!.(weighted_graph.g.fadjlist) #empty all the vector edgelists
weighted_graph.g.ne = 0 weighted_graph.g.ne = 0
weighted_graph.mixing_edges = create_mixing_edges(demographic_index_vectors,mixing_matrix,weighted_graph.mixing_edges.weights_distribution_matrix) weighted_graph.mixing_edges = create_mixing_edges(demographic_index_vectors,mixing_matrix,weighted_graph.mixing_edges.sampler_matrix)
graph_from_mixing_edges(weighted_graph.g,weighted_graph.mixing_edges) graph_from_mixing_edges(weighted_graph.g,weighted_graph.mixing_edges)
end end
end end
......
...@@ -108,6 +108,9 @@ function load_contact_time_distributions() ...@@ -108,6 +108,9 @@ function load_contact_time_distributions()
ws = "ws", ws = "ws",
rest = "rest" rest = "rest"
) )
function make_sampler(λ) #our samples can only be in 0:143 so we can fake a poisson distribution by explicitly sampling
return Distributions.DiscreteNonParametricSampler(0:durmax,[pdf(Poisson(λ),x) for x in 0:durmax])
end
contact_distributions_tuple = map(fnames) do fname contact_distributions_tuple = map(fnames) do fname
dat = deserialize(joinpath(PACKAGE_FOLDER,"intervals_model_output","simulation_output","$fname.dat")) dat = deserialize(joinpath(PACKAGE_FOLDER,"intervals_model_output","simulation_output","$fname.dat"))
return map(p -> Poisson(mode(p.particles)), as_symmetric_matrix(dat[distkey].P)) return map(p -> Poisson(mode(p.particles)), as_symmetric_matrix(dat[distkey].P))
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment