Commit 915acb07 authored by Peter Jentsch's avatar Peter Jentsch
Browse files

pursing the platonic ideal of ABM through relentless refactoring and overoptimization

parent 9a071e76
@inline function reindex!(k,csum,index_list_i,index_list_j,j_to_i_contacts,i_to_j_contacts,sample_list_i,sample_list_j)
i_index = index_list_i[k]
j_index = index_list_j[k]
csum += j_to_i_contacts[j_index] - i_to_j_contacts[i_index] + sample_list_i[k] - sample_list_j[k]
i_to_j_contacts[i_index] = sample_list_i[k]
j_to_i_contacts[j_index] = sample_list_j[k]
return csum
end
function generate_contact_vectors!(ij_dist,ji_dist,i_to_j_contacts::Vector{T}, j_to_i_contacts::Vector{T}) where T
rand!(RNG,ij_dist,i_to_j_contacts)
rand!(RNG,ji_dist,j_to_i_contacts)
l_i = length(i_to_j_contacts)
l_j = length(j_to_i_contacts)
csum = sum(i_to_j_contacts) - sum(j_to_i_contacts)
inner_iter = 10
index_list_i = @MVector zeros(Int,inner_iter)
index_list_j = similar(index_list_i)
sample_list_i = @MVector zeros(T,inner_iter)
sample_list_j = similar(sample_list_i)
while csum != 0
sample!(RNG,1:l_i,index_list_i)
sample!(RNG,1:l_j,index_list_j)
rand!(RNG,ij_dist,sample_list_i)
rand!(RNG,ji_dist,sample_list_j)
@inbounds for i = 1:inner_iter
if csum != 0
csum = reindex!(i,csum,index_list_i,index_list_j,j_to_i_contacts,i_to_j_contacts,sample_list_i,sample_list_j)
end
end
end
return nothing
end
using Dictionaries
struct GraphEdge
a::Int
b::Int
end
function Base.hash(e::GraphEdge)
# display(e)
return hash(minmax(e.a,e.b))
end
function Base.isequal(e1::GraphEdge,e2::GraphEdge)
return isequal(minmax(e1.a,e1.b),minmax(e2.a,e2.b))
end
function sample_mixing_graph!(mixing_graph,population_demographics)
mixing_edges = mixing_graph.mixing_edges
for i in 1:size(mixing_edges.contact_array)[1], j in 1:i #diagonal
demo_i = Int(population_demographics[i])
demo_j = Int(population_demographics[j])
rand!(RNG, mixing_edges.weights_distribution_matrix[demo_i,demo_j],mixing_edges.sample_cache[i,j])
for k in 1:length(mixing_edges.contact_array[i,j][1])
kth_node_i = mixing_edges.contact_array[i,j][1][k]
kth_node_j = mixing_edges.contact_array[i,j][2][k]
edge_weight_k = mixing_edges.sample_cache[i,j][k]
set!(mixing_edges.weights_dict,GraphEdge(kth_node_i,kth_node_j), edge_weight_k)
end
end
end
#A type that defines a matrix of vectors, such that the ijth vector contains the nodes to connect to the jith vector, ala. Chung-Lu bipartite
struct MixingEdges
struct MixingEdges{M}
total_edges::Int
contact_array::Matrix{Tuple{Vector{Int},Vector{Int}}}
function MixingEdges(demographic_index_vectors,mixing_matrix)
contact_array =[(Vector{Int}(),Vector{Int}()) for i in 1:length(demographic_index_vectors),j in 1:length(demographic_index_vectors)]
tot = 0
for i in 1:size(mixing_matrix)[1], j in 1:i #diagonal
num_degrees_ij = zeros(Int,length(demographic_index_vectors[i]))
num_degrees_ji = zeros(Int,length(demographic_index_vectors[j]))
generate_contact_vectors!(mixing_matrix[i,j],mixing_matrix[j,i],num_degrees_ij,num_degrees_ji)
sample_cache::Matrix{Vector{Int}}
weights_dict::Dictionary{GraphEdge,UInt8}
weights_distribution_matrix::M
function MixingEdges(total_edges,contact_array,sampler_matrix::Matrix{M}) where M<:Sampleable{Univariate, Discrete}
sample_cache = map(v-> Vector{Int}(undef,length(v[1])),contact_array)
weights_dict = Dictionary{GraphEdge,UInt8}()
new{typeof(sampler_matrix)}(total_edges,contact_array,sample_cache,weights_dict,sampler_matrix)
end
function MixingEdges(total_edges,contact_array,weights_distribution_matrix::Matrix{M}) where M<:UnivariateDistribution
sample_cache = map(v-> Vector{Int}(undef,length(v[1])),contact_array)
weights_dict = Dictionary{GraphEdge,UInt8}()
sampler_matrix = map(m -> Distributions.PoissonCountSampler.(m),weights_distribution_matrix)
new{typeof(sampler_matrix)}(total_edges,contact_array,sample_cache,weights_dict,sampler_matrix)
end
end
m = sum(num_degrees_ij)
stubs_i = Vector{Int}(undef,m)
stubs_j = Vector{Int}(undef,m)
if m>0
# @assert m == sum(num_degrees_ji)
rand!(RNG,DiscreteNonParametric(demographic_index_vectors[i],num_degrees_ij./m),stubs_i)
rand!(RNG,DiscreteNonParametric(demographic_index_vectors[j],num_degrees_ji./m),stubs_j)
tot += m
end
contact_array[j,i] = (stubs_i,stubs_j)
end
function create_mixing_edges(demographic_index_vectors,mixing_matrix,weights_distribution_matrix)
contact_array =[(Vector{Int}(),Vector{Int}()) for i in 1:length(demographic_index_vectors),j in 1:length(demographic_index_vectors)]
tot = 0
for i in 1:size(mixing_matrix)[1], j in 1:i #diagonal
num_degrees_ij = zeros(Int,length(demographic_index_vectors[i]))
num_degrees_ji = zeros(Int,length(demographic_index_vectors[j]))
generate_contact_vectors!(mixing_matrix[i,j],mixing_matrix[j,i],num_degrees_ij,num_degrees_ji)
new(tot,contact_array)
m = sum(num_degrees_ij)
stubs_i = Vector{Int}(undef,m)
stubs_j = Vector{Int}(undef,m)
if m>0
# @assert m == sum(num_degrees_ji)
sample!(RNG,demographic_index_vectors[i],Weights(num_degrees_ij./m),stubs_i)
sample!(RNG,demographic_index_vectors[j],Weights(num_degrees_ji./m),stubs_j)
tot += m
end
contact_array[j,i] = (stubs_i,stubs_j)
end
function MixingEdges(g::SimpleGraph,demographics,demographic_index_vectors)
contact_array = [(Vector{Int}(),Vector{Int}()) for i in 1:length(demographic_index_vectors),j in 1:length(demographic_index_vectors)]
for e in edges(g)
i = src(e)
j = dst(e)
push!(contact_array[Int(demographics[j]),Int(demographics[i])][1],i)
push!(contact_array[Int(demographics[j]),Int(demographics[i])][2],j)
end
# @show ne(g)
new(ne(g),contact_array)
return MixingEdges(tot,contact_array,weights_distribution_matrix)
end
function create_mixing_edges(g::SimpleGraph,demographics,demographic_index_vectors,weights_distribution_matrix)
contact_array = [(Vector{Int}(),Vector{Int}()) for i in 1:length(demographic_index_vectors),j in 1:length(demographic_index_vectors)]
for e in edges(g)
i = src(e)
j = dst(e)
push!(contact_array[Int(demographics[j]),Int(demographics[i])][1],i)
push!(contact_array[Int(demographics[j]),Int(demographics[i])][2],j)
end
sample_cache = map(v-> Vector{Int}(undef,length(v[1])),contact_array)
return MixingEdges(ne(g),contact_array,weights_distribution_matrix)
end
struct TimeDepMixingGraph{N,G}
......@@ -47,7 +91,6 @@ struct TimeDepMixingGraph{N,G}
)
end
end
function time_dep_mixing_graphs(len,base_network,demographics,index_vectors,ws_matrix_tuple,rest_matrix_tuple)
home_static_edges = WeightedGraph(base_network,demographics,index_vectors,contact_time_distributions.hh) #network with households and LTC homes
ws_static_edges = WeightedGraph(demographics,index_vectors,ws_matrix_tuple.daily,contact_time_distributions.ws)
......@@ -81,11 +124,9 @@ end
function remake!(time_dep_mixing_graph,demographic_index_vectors,mixing_matrix)
for weighted_graph in time_dep_mixing_graph.resampled_graphs
empty!.(weighted_graph.g.fadjlist) #empty all the vector edgelists
empty!(weighted_graph.weights_dict)
weighted_graph.g.ne = 0
weighted_graph.mixing_edges = MixingEdges(demographic_index_vectors,mixing_matrix)
weighted_graph.mixing_edges = create_mixing_edges(demographic_index_vectors,mixing_matrix,weighted_graph.mixing_edges.weights_distribution_matrix)
graph_from_mixing_edges(weighted_graph.g,weighted_graph.mixing_edges)
weighted_graph.sample_cache = map(v-> Vector{Int}(undef,length(v[1])),weighted_graph.mixing_edges.contact_array)
end
end
......@@ -96,98 +137,31 @@ function graph_from_mixing_edges(g,mixing_edges)
end
end
end
#defining my own weighted graph type cause we need to be able to resample the edge weights in a very particular way
mutable struct WeightedGraph{G, V,M}
mutable struct WeightedGraph{G,M}
g::G
weights_dict::V
weights_distribution_matrix::M
mixing_edges::MixingEdges
sample_cache::Matrix{Vector{Int}}
mixing_edges::M
function WeightedGraph(demographics,demographic_index_vectors,mixing_matrix,weights_distribution_matrix)
mixing_edges = MixingEdges(demographic_index_vectors,mixing_matrix)
mixing_edges = create_mixing_edges(demographic_index_vectors,mixing_matrix,weights_distribution_matrix)
g = Graph(length(demographics))
graph_from_mixing_edges(g,mixing_edges)
weights_dict = RobinDict{Tuple{Int,Int},UInt8}()
sampler_matrix = map(m -> Distributions.PoissonCountSampler.(m),weights_distribution_matrix)
sample_cache = map(v-> Vector{Int}(undef,length(v[1])),mixing_edges.contact_array)
return new{typeof(g),typeof(weights_dict),typeof(sampler_matrix)}(
return new{typeof(g),typeof(mixing_edges)}(
g,
weights_dict,
sampler_matrix,
mixing_edges,
sample_cache
mixing_edges
)
end
function WeightedGraph(g::SimpleGraph,demographics,demographic_index_vectors,weights_distribution_matrix)
mixing_edges = MixingEdges(g, demographics, demographic_index_vectors)
weights_dict = RobinDict{Tuple{Int,Int},UInt8}()
sampler_matrix = map(m -> Distributions.PoissonCountSampler.(m),weights_distribution_matrix)
# @show map(t->length.(t), mixing_edges.contact_array)
mixing_edges = create_mixing_edges(g, demographics, demographic_index_vectors,weights_distribution_matrix)
weights_dict = Dict{GraphEdge,UInt8}()
sample_cache = map(v-> Vector{Int}(undef,length(v[1])),mixing_edges.contact_array)
return new{typeof(g),typeof(weights_dict),typeof(sampler_matrix)}(
return new{typeof(g),typeof(mixing_edges)}(
g,
weights_dict,
sampler_matrix,
mixing_edges,
sample_cache
)
end
end
get_weight(g::WeightedGraph,e) = g.mixing_edges.weights_dict[e]
function Base.show(io::IO, g::WeightedGraph)
print(io, "WG $(ne(g.g))")
end
function sample_mixing_graph!(mixing_graph,population_demographics)
@unpack mixing_edges = mixing_graph
for i in 1:size(mixing_edges.contact_array)[1], j in 1:i #diagonal
demo_i = Int(population_demographics[i])
demo_j = Int(population_demographics[j])
rand!(RNG, mixing_graph.weights_distribution_matrix[demo_i,demo_j],mixing_graph.sample_cache[i,j])
for k in 1:length(mixing_edges.contact_array[i,j][1])
kth_node_i = mixing_edges.contact_array[i,j][1][k]
kth_node_j = mixing_edges.contact_array[i,j][2][k]
edge_weight_k = mixing_graph.sample_cache[i,j][k]
mixing_graph.weights_dict[(kth_node_i,kth_node_j)] = edge_weight_k
mixing_graph.weights_dict[(kth_node_j,kth_node_i)] = edge_weight_k
end
end
end
@inline function reindex!(k,csum,index_list_i,index_list_j,j_to_i_contacts,i_to_j_contacts,sample_list_i,sample_list_j)
i_index = index_list_i[k]
j_index = index_list_j[k]
csum += j_to_i_contacts[j_index] - i_to_j_contacts[i_index] + sample_list_i[k] - sample_list_j[k]
i_to_j_contacts[i_index] = sample_list_i[k]
j_to_i_contacts[j_index] = sample_list_j[k]
return csum
end
using StaticArrays
function generate_contact_vectors!(ij_dist,ji_dist,i_to_j_contacts::Vector{T}, j_to_i_contacts::Vector{T}) where T
rand!(RNG,ij_dist,i_to_j_contacts)
rand!(RNG,ji_dist,j_to_i_contacts)
l_i = length(i_to_j_contacts)
l_j = length(j_to_i_contacts)
csum = sum(i_to_j_contacts) - sum(j_to_i_contacts)
inner_iter = 10
index_list_i = @MVector zeros(Int,inner_iter)
index_list_j = similar(index_list_i)
sample_list_i = @MVector zeros(T,inner_iter)
sample_list_j = similar(sample_list_i)
while csum != 0
sample!(RNG,1:l_i,index_list_i)
sample!(RNG,1:l_j,index_list_j)
rand!(RNG,ij_dist,sample_list_i)
rand!(RNG,ji_dist,sample_list_j)
@inbounds for i = 1:inner_iter
if csum != 0
csum = reindex!(i,csum,index_list_i,index_list_j,j_to_i_contacts,i_to_j_contacts,sample_list_i,sample_list_j)
end
end
end
return nothing
end
\ No newline at end of file
......@@ -69,7 +69,7 @@ struct ModelSolution{T,InfNet,SocNet,WSMixingDist,RestMixingDist}
infected_mixing_graph,soc_mixing_graph = time_dep_mixing_graphs(sim_length,base_network,demographics,index_vectors,ws_matrix_tuple,rest_matrix_tuple)
covid_alert_times = zeros(Int,length(app_user_index),14) #two weeks worth of values
covid_alert_times = zeros(Int,14,length(app_user_index)) #two weeks worth of values
time_of_last_alert = fill(-1,length(app_user_index)) #time of last alert is negative if no alert has been recieved
status_totals = [count(==(AgentStatus(i)), u_0_inf) for i in 1:AgentStatus.size]
......
......@@ -5,29 +5,27 @@ end
function update_alert_durations!(t,modelsol)
@unpack notification_parameter = modelsol.params
@unpack time_of_last_alert, app_user_index,inf_network,covid_alert_times,app_user = modelsol
for (i,node) in enumerate(modelsol.app_user_index), mixing_graph in modelsol.inf_network.graph_list[t]
for j in 2:14
covid_alert_times[i,j-1] = covid_alert_times[i,j] #shift them all back
covid_alert_times[j-1,i] = covid_alert_times[j,i] #shift them all back
end
for j in neighbors(mixing_graph.g,node)
if app_user[j]
covid_alert_times[i,end] += mixing_graph.weights_dict[(node,j)] #add the contact times for today to the back
covid_alert_times[end,i] += get_weight(mixing_graph,GraphEdge(node,j)) #add the contact times for today to the back
end
end
if rand(RNG) < 1 - (1- notification_parameter)^sum(covid_alert_times[i,:])
if rand(RNG) < 1 - (1- notification_parameter)^sum(covid_alert_times[:,i])
time_of_last_alert[i] = t
end
end
end
function update_infection_state!(t,modelsol)
@unpack base_transmission_probability,immunization_loss_prob,recovery_rate = modelsol.params
@unpack u_inf,u_vac,u_next_inf,u_next_vac,demographics,inf_network,status_totals = modelsol
function agent_transition!(node, from::AgentStatus,to::AgentStatus)
# @show ((from,to))
status_totals[Int(from)] -= 1
status_totals[Int(to)] += 1
u_next_inf[node] = to
......@@ -45,7 +43,7 @@ function update_infection_state!(t,modelsol)
for mixing_graph in inf_network.graph_list[t]
for j in neighbors(mixing_graph.g,i)
if u_inf[j] == Infected && u_next_inf[i] != Infected
if rand(RNG) < contact_weight(base_transmission_probability,mixing_graph.weights_dict[(i,j)])
if rand(RNG) < contact_weight(base_transmission_probability,get_weight(mixing_graph,GraphEdge(i,j)))
agent_transition!(i, Susceptible,Infected)
......
......@@ -24,6 +24,7 @@ using KissABC
using CSV
import Pandas: read_csv
using DataFrames
using StaticArrays
export intervalsmodel, hh, ws, rest, abm
const DNDEBUG = false
......@@ -41,6 +42,7 @@ const RNG = Xoroshiro128Star(1)
const color_palette = palette(:seaborn_pastel) #color theme for the plots
include("utils.jl")
include("data.jl")
include("ABM/contact_vectors.jl")
include("ABM/mixing_distributions.jl")
include("ABM/mixing_graphs.jl")
include("ABM/agents.jl")
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment