Skip to content
Snippets Groups Projects
Commit a58e410f authored by Peter Jentsch's avatar Peter Jentsch
Browse files

more performance improvements to network generation, contact distributions now...

more performance improvements to network generation, contact distributions now depend on google mobility data
parent e27ac728
No related branches found
No related tags found
No related merge requests found
using Distributions
using Random
using RandomNumbers.Xorshifts
struct ZWDist{BaseDistType,T} <: Sampleable{Univariate,Discrete}
α::T
base_dist::BaseDistType
end
function r1!(rng::AbstractRNG, s::ZWDist, l::T) where T<:AbstractVector
Random.rand!(rng,l)
l[l .< s.α] .= 0
Random.rand!(rng,s.base_dist,@view l[l .>= s.α])
return nothing
end
function r2!(rng::AbstractRNG, s::ZWDist, l::T) where T<:AbstractVector
rand!(rng, l)
l .= (l .>= s.α) .* rand(rng, s.base_dist, length(l))
end
function r3!(rng::AbstractRNG, s::ZWDist, l::T) where T<:AbstractVector
rand!(rng, l)
tmp = rand(rng, s.base_dist, length(l))
@inbounds for i in eachindex(l)
l[i] = (l[i] >= s.α) * tmp[i]
end
nothing
end
@views function r4!(rng::AbstractRNG, s::ZWDist, l::T) where T<:AbstractVector
rand!(rng, l)
mask = l .< s.α
fill!( l[mask], zero(eltype(l)))
mask .= .!mask
rand!(rng, s.base_dist, l[mask])
return nothing
end
using BenchmarkTools
l = zeros(1000)
r = Xoroshiro128Plus(1)
s = ZWDist(0.5,Poisson())
# @btime r1!(r,s,l);
# @btime r2!(r,s,l);
# @btime r3!(r,s,l);
# @btime r4!(r,s,l);
function test()
s = ZWDist(0.5,Poisson())
res = Matrix{Float64}(undef, 4, 5)
for (i,size) in enumerate([10, 100, 1000, 10_000, 100_000])
l = zeros(size)
r = Xoroshiro128Plus(1)
res[1,i] = mean(@benchmark r1!(r,s,l)) |> time
r = Xoroshiro128Plus(1)
res[2,i] = mean(@benchmark r2!(r,s,l)) |> time
r = Xoroshiro128Plus(1)
res[3,i] = mean(@benchmark r3!(r,s,l)) |> time
r = Xoroshiro128Plus(1)
res[4,i] = mean(@benchmark r4!(r,s,l)) |> time
end
res
end
\ No newline at end of file
File moved
......@@ -17,6 +17,7 @@ using NamedTupleTools
using NetworkLayout:Stress
using NetworkLayout:SFDP
import Base.rand
import Random.rand!
import LightGraphs.add_edge!
include("agents.jl")
......@@ -26,6 +27,7 @@ include("model.jl")
include("plotting.jl")
include("graphs.jl")
const RNG = Xoroshiro128Star(1)
const population = 14.57e6 #population of ontario
const color_palette = palette(:seaborn_bright) #color theme for the plots
......@@ -39,12 +41,12 @@ default(dpi = 300)
default(framestyle = :box)
using BenchmarkTools
function main()
rng = Xoroshiro128Star(1)
agent_model = AgentModel(rng,5000,2)
agent_model = AgentModel(5000,2)
# display(size.(agent_model.demographic_index_vectors))
u_0 = get_u_0(rng,length(agent_model.demographics))
steps = 10
@btime solve!($rng,$u_0,$get_parameters(),$steps,$agent_model,$vaccinate_uniformly!);
u_0 = get_u_0(length(agent_model.demographics))
steps = 300
@btime solve!($u_0,$get_parameters(),$steps,$agent_model,$vaccinate_uniformly!);
# solve!(u_0,get_parameters(),steps,agent_model,vaccinate_uniformly!);
# plot_model(agent_model.base_network,graphs,sol1)
end
......
......@@ -15,17 +15,45 @@ end
end
struct AgentModel{GraphType,DistMatrix1,DistMatrix2}
struct AgentModel{GraphType,DistMatrixList1,DistMatrixList2,DistMatrixList3}
demographics::Vector{AgentDemographic}
demographic_index_vectors::Vector{Vector{Int}}
base_network::GraphType
workschool_contacts_mean_adjusted::DistMatrix1
rest_contacts_mean_adjusted::DistMatrix2
function AgentModel(rng,num_households,num_ltc)
pop_list,base_network,index_vectors = generate_population(rng,num_households,num_ltc)
workschool_contacts_mean_adjusted = symmetrize_means(length.(index_vectors),initial_workschool_mixing_matrix)
rest_contacts_mean_adjusted = symmetrize_means(length.(index_vectors),initial_rest_mixing_matrix)
return new{typeof(base_network),typeof(workschool_contacts_mean_adjusted),typeof(rest_contacts_mean_adjusted)}(pop_list,index_vectors,base_network,workschool_contacts_mean_adjusted,rest_contacts_mean_adjusted)
workschool_contacts_mean_adjusted_list::DistMatrixList1
rest_contacts_mean_adjusted_list::DistMatrixList2
home_contacts_duration_list::DistMatrixList3
function AgentModel(num_households,num_ltc)
pop_list,base_network,index_vectors = generate_population(num_households,num_ltc)
pop_sizes = length.(index_vectors)
workschool_contacts_mean_adjusted_list = [symmetrize_means(pop_sizes,initial_workschool_mixing_matrix)]
rest_contacts_mean_adjusted_list = [symmetrize_means(pop_sizes,initial_rest_mixing_matrix)]
home_contacts_duration_list = [symmetrize_means(pop_sizes,contact_time_distribution_matrix)]
workplace_data, distancing_data,residential_data = get_distancing_data_sets()
@assert length(workplace_data) == length(distancing_data) == length(residential_data) "uh oh your data is different lengths!"
for t in 1:length(workplace_data)
push!(workschool_contacts_mean_adjusted_list, adjust_distributions_mean(workschool_contacts_mean_adjusted_list[1], workplace_data[t]))
push!(rest_contacts_mean_adjusted_list, adjust_distributions_mean(rest_contacts_mean_adjusted_list[1], distancing_data[t]))
push!(home_contacts_duration_list, adjust_distributions_mean(home_contacts_duration_list[1], residential_data[t]))
end
return new{
typeof(base_network),
typeof(workschool_contacts_mean_adjusted_list),
typeof(rest_contacts_mean_adjusted_list),
typeof(home_contacts_duration_list)
}(
pop_list,
index_vectors,
base_network,
workschool_contacts_mean_adjusted_list,
rest_contacts_mean_adjusted_list,
home_contacts_duration_list
)
end
end
......
......@@ -10,7 +10,15 @@ function get_canada_demographic_distribution()::Vector{Float64}
source_bins = map(parse_string_as_float_pair, df[:,:demographic_data_bins])
return [sum(binned_data[1:5]),sum(binned_data[6:13]),sum(binned_data[14:end])]
end
function get_distancing_data_sets()::NTuple{3,Vector{Float64}}
f = readdlm(joinpath(PACKAGE_FOLDER,"data/csv/distancing_data.csv"), ',')
df = filter(row -> row[:country_region_code] == "CA" && row[:sub_region_1] == "Ontario", DataFrame([f[1,i] => f[2:end, i] for i = 1:length(f[1,:])]))
distancing_data = df[:,:retail_and_recreation_percent_change_from_baseline] ./ 100
workplace_data = df[:,:workplaces_percent_change_from_baseline] ./ 100
residental_data = df[:,:residential_percent_change_from_baseline] ./ 100
return (workplace_data, distancing_data,residental_data)
end
function get_canada_case_fatality()::Tuple{Vector{Tuple{Float64,Float64}},Vector{Float64}}
f = readdlm(joinpath(PACKAGE_FOLDER,"data/csv/case_fatality_data.csv"), ',')
......@@ -35,12 +43,12 @@ function find_household_composition(df_row)
age_distribution[age_resp_to_bin[df_row[:AGERESP]]] += 1
return age_distribution
end
function sample_household_data(rng,n)
function sample_household_data(n)
f = readdlm(joinpath(PACKAGE_FOLDER,"data/csv/home_compositions.csv"), ',')
df = DataFrame([f[1,i] => f[2:end, i] for i = 1:length(f[1,:])])
weight_vector::Vector{Float64} = df[!,:WGHT_PER]/sum(df[!,:WGHT_PER])
households = map(find_household_composition,eachrow(df))
return sample(rng,households, Weights(weight_vector),n)
return sample(RNG,households, Weights(weight_vector),n)
# https://www.publichealthontario.ca/-/media/documents/ncov/epi/covid-19-severe-outcomes-ontario-epi-summary.pdf?la=en
end
......
#add a bipartite graph derived from mixing matrices onto g
function generate_mixing_graph!(rng,g,index_vectors,mixing_matrix)
function generate_mixing_graph!(g,index_vectors,mixing_matrix)
for i in 1:5, j in 1:i #diagonal
agent_contact_dist_i = mixing_matrix[i,j]
agent_contact_dist_j = mixing_matrix[j,i]
i_to_j_contacts = rand(rng,agent_contact_dist_i,length(index_vectors[i]))
j_to_i_contacts = rand(rng,agent_contact_dist_j,length(index_vectors[j]))
# equalize_degree_lists!(rng,i_to_j_contacts,j_to_i_contacts)
l_i = length(index_vectors[i])
l_j = length(index_vectors[j])
i_to_j_contacts = rand(RNG,agent_contact_dist_i,l_i)
j_to_i_contacts = rand(RNG,agent_contact_dist_j,l_j)
contacts_sums = sum(i_to_j_contacts) - sum(j_to_i_contacts)
while contacts_sums != 0
i_index = sample(rng,1:length(index_vectors[i]))
j_index = sample(rng,1:length(index_vectors[j]))
contacts_sums -= i_to_j_contacts[i_index]
contacts_sums += j_to_i_contacts[j_index]
i_to_j_contacts[i_index] = rand(rng,agent_contact_dist_i)
j_to_i_contacts[j_index] = rand(rng,agent_contact_dist_j)
contacts_sums += i_to_j_contacts[i_index]
contacts_sums -= j_to_i_contacts[j_index]
# display((mean(agent_contact_dist_i)*l_i,mean(agent_contact_dist_j)*l_j))
sample_list_length = max(l_j,l_i) #better heuristic for this based on stddev of dist?
index_list_i = sample(RNG,1:l_i,sample_list_length)
index_list_j = sample(RNG,1:l_j,sample_list_length)
sample_list_i = rand(RNG,agent_contact_dist_i,sample_list_length)
sample_list_j = rand(RNG,agent_contact_dist_j,sample_list_length)
for k = 1:sample_list_length
if (contacts_sums != 0)
i_index = index_list_i[k]
j_index = index_list_j[k]
contacts_sums += j_to_i_contacts[j_index] - i_to_j_contacts[i_index]
i_to_j_contacts[i_index] = sample_list_i[k]
j_to_i_contacts[j_index] = sample_list_j[k]
contacts_sums += i_to_j_contacts[i_index] - j_to_i_contacts[j_index]
else
break
end
end
while contacts_sums != 0
i_index = sample(RNG,1:l_i)
j_index = sample(RNG,1:l_j)
contacts_sums += j_to_i_contacts[j_index] - i_to_j_contacts[i_index]
i_to_j_contacts[i_index] = rand(RNG,agent_contact_dist_i)
j_to_i_contacts[j_index] = rand(RNG,agent_contact_dist_j)
contacts_sums += i_to_j_contacts[i_index] - j_to_i_contacts[j_index]
end
# random_bipartite_graph_fast_CL!(rng,g,index_vectors[i],index_vectors[j],i_to_j_contacts,j_to_i_contacts)
random_bipartite_graph_fast_CL!(g,index_vectors[i],index_vectors[j],i_to_j_contacts,j_to_i_contacts)
end
return g
end
# function searchsortedfirst(l,things_to_find::T) where T<:AbstractVector
# end
#modify g so that nodes specified in anodes and bnodes are connected by a bipartite graph with expected degrees given by aseq and bseq
#implemented from Aksoy, S. G., Kolda, T. G., & Pinar, A. (2017). Measuring and modeling bipartite graphs with community structure
#simple algorithm, might produce parallel edges for small graphs
function random_bipartite_graph_fast_CL!(rng,g,anodes,bnodes,aseq,bseq)
using StatsBase
function random_bipartite_graph_fast_CL!(g,anodes,bnodes,aseq,bseq)
lena = length(aseq)
lenb = length(bseq)
m = sum(aseq)
m = Int(sum(aseq))
@assert sum(aseq) == sum(bseq) "degree sequences must have equal sum"
astubs = sample(rng,anodes,StatsBase.weights(aseq./m), m; replace = true)
bstubs = sample(rng,bnodes,StatsBase.weights(bseq./m), m; replace = true)
astubs = sample(RNG,anodes,StatsBase.weights(aseq./m), m)
bstubs = sample(RNG,bnodes,StatsBase.weights(bseq./m), m)
for k in 1:m
add_edge!(g,astubs[k],bstubs[k])
end
......@@ -51,35 +64,33 @@ end
function symmetrize_means(population_sizes,mixing_matrix::Matrix{<:ZWDist})
mixing_means = mean.(mixing_matrix)
symmetrized_mixing = similar(mixing_matrix)
symmetrized_means = zeros((length(population_sizes),length(population_sizes)))
for i in 1:length(population_sizes), j in 1:length(population_sizes)
symmetrized_mean = 0.5*(mixing_means[i,j] + population_sizes[j]/population_sizes[i] * mixing_means[j,i])
symmetrized_mixing[i,j] = from_mean(typeof(mixing_matrix[i,j]),mixing_matrix[i,j].α,symmetrized_mean)
symmetrized_means[i,j] = 0.5*(mixing_means[i,j] + population_sizes[j]/population_sizes[i] * mixing_means[j,i])
end
return symmetrized_mixing
return map(((dst,sym_mean),)->from_mean(typeof(dst),dst.α,sym_mean), zip(mixing_matrix,symmetrized_means))
end
function symmetrize_means(population_sizes,mixing_matrix::Matrix{<:Distribution})
mixing_means = mean.(mixing_matrix)
symmetrized_mixing = similar(mixing_matrix)
symmetrized_means = zeros((length(population_sizes),length(population_sizes)))
for i in 1:length(population_sizes), j in 1:length(population_sizes)
symmetrized_mean = 0.5*(mixing_means[i,j] + population_sizes[j]/population_sizes[i] * mixing_means[j,i])
symmetrized_mixing[i,j] = from_mean(typeof(mixing_matrix[i,j]),symmetrized_mean)
symmetrized_means[i,j] = 0.5*(mixing_means[i,j] + population_sizes[j]/population_sizes[i] * mixing_means[j,i])
end
return symmetrized_mixing
return map(((dst,sym_mean),)->from_mean(typeof(dst),sym_mean), zip(mixing_matrix,symmetrized_means))
end
#generate population with households distributed according to household_size_distribution
#Assume 5% of elderly are in LTC roughly, these are divide evenly among a specific number of LTC ltc_homes
#LTC homes and households are assumed to be complete static graphs.
function generate_population(rng,num_households,num_LTC)
function generate_population(num_households,num_LTC)
# LTC_sizes = rand(LTC_distribution,num_LTC)
prob_adult_is_HCW = 0.01#assume 1% of adults are HCW
prob_elderly_in_LTC = 0.05 #5% of elderly are in LTC
households_composition = sample_household_data(rng,num_households)
households_composition = sample_household_data(num_households)
households = map( l -> [fill(AgentDemographic(i),n) for (i,n) in enumerate(l)],households_composition) |>
l -> reduce(vcat,l) |>
l -> reduce(vcat,l)
......@@ -90,7 +101,7 @@ function generate_population(rng,num_households,num_LTC)
ltc_pop_list = fill(ElderlyLTC,ltc_pop)
adults_indices = findall(x -> x == Adult, households)
households[sample(rng,adults_indices,hcw_pop)] .= AdultHCW
households[sample(RNG,adults_indices,hcw_pop)] .= AdultHCW
ltc_home_size = Int(ceil(ltc_pop/num_LTC))
@assert ltc_home_size != 0 error("must have at least one LTC resident,make problem larger")
......
......@@ -3,18 +3,27 @@ struct ZWDist{BaseDistType,T} <: Sampleable{Univariate,Discrete}
α::T
base_dist::BaseDistType
end
function Base.rand(rng::AbstractRNG, s::ZWDist)
if Base.rand(rng) < s.α
return 0
else
return Base.rand(rng,s.base_dist)
end
function Base.rand(rng::AbstractRNG, s::ZWDist{DType,EType}, n::T) where {T<:Int, EType, DType}
l = Vector{EType}(undef,n)
Random.rand!(rng,l)
l[l .< s.α] .= 0
Random.rand!(rng,s.base_dist,@view l[l .>= s.α])
return l
end
function Random.rand!(rng::AbstractRNG, s::ZWDist, l::T) where T<:AbstractVector
Random.rand!(rng,l)
l[l .< s.α] .= 0
Random.rand!(rng,s.base_dist,@view l[l .>= s.α])
return l
end
function Base.rand(rng::AbstractRNG, s::ZWDist, n::T) where T<:Int
return ifelse.(Base.rand(rng,n) .< s.α, 0, Base.rand(rng,s.base_dist,n))
function Base.rand(rng::AbstractRNG, s::ZWDist{DType,T}) where {DType,T}
return ifelse(Base.rand(rng) < s.α, zero(T), Base.rand(rng,s.base_dist))
end
function from_mean(::Type{Geometric{T}},μ) where T
if μ > 1.0
return Geometric(1/(μ+1))
......@@ -36,27 +45,43 @@ function ZeroGeometric(α,p)
return ZWDist(α,Geometric(p))
end
function adjust_distributions_mean(distribution_matrix::AbstractArray{T},mean_shift_percentage) where T<:ZWDist
return map(distribution_matrix) do dist
new_mean = mean(dist)*(1 + mean_shift_percentage)
return from_mean(typeof(dist),dist.α, new_mean)
end
end
function adjust_distributions_mean(distribution_matrix::AbstractArray{T},mean_shift_percentage) where T<:Distribution
return map(distribution_matrix) do dist
new_mean = mean(dist)*(1 + mean_shift_percentage)
return from_mean(typeof(dist), new_mean)
end
end
StatsBase.mean(d::ZWDist{Dist,T}) where {Dist,T} = (1 - d.α)*StatsBase.mean(d.base_dist)
const initial_workschool_type = Union{ZWDist{Geometric{Float64},Float64},ZWDist{Poisson{Float64},Float64}}
const initial_workschool_mixing_matrix = map(t->from_mean(t...),[
const initial_workschool_mixing_matrix = convert(Array{initial_workschool_type},map(t->from_mean(t...),[
(ZWDist{Geometric{Float64},Float64}, 0.433835,4.104848) (ZWDist{Geometric{Float64},Float64},0.406326,2.568782) (ZWDist{Poisson{Float64},Float64},0.888015,0.017729) (ZWDist{Geometric{Float64},Float64},0.406326,2.568782) (ZWDist{Poisson{Float64},Float64},0.888015,0.017729)
(ZWDist{Geometric{Float64},Float64}, 0.600966,0.975688) (ZWDist{Geometric{Float64},Float64},0.4306,5.057572) (ZWDist{Poisson{Float64},Float64}, 0.84513,0.021307) (ZWDist{Geometric{Float64},Float64},0.4306,5.057572) (ZWDist{Poisson{Float64},Float64}, 0.84513,0.021307)
(ZWDist{Poisson{Float64},Float64},0.887392,0.001937) (ZWDist{Poisson{Float64},Float64},0.793004,0.00722) (ZWDist{Poisson{Float64},Float64},0.940473, 0.022134) (ZWDist{Poisson{Float64},Float64},0.793004,0.00722) (ZWDist{Poisson{Float64},Float64},0.940473, 0.022134)
(ZWDist{Geometric{Float64},Float64}, 0.600966,0.975688) (ZWDist{Geometric{Float64},Float64},0.4306,5.057572) (ZWDist{Poisson{Float64},Float64}, 0.84513,0.021307) (ZWDist{Geometric{Float64},Float64},0.4306,5.057572) (ZWDist{Poisson{Float64},Float64}, 0.84513,0.021307)
(ZWDist{Poisson{Float64},Float64},0.887392,0.001937) (ZWDist{Poisson{Float64},Float64},0.793004,0.00722) (ZWDist{Poisson{Float64},Float64},0.940473, 0.022134) (ZWDist{Poisson{Float64},Float64},0.793004,0.00722) (ZWDist{Poisson{Float64},Float64},0.940473, 0.022134)
])
]))
const initial_rest_type = Union{Geometric{Float64},Poisson{Float64}}
const initial_rest_mixing_matrix = map(t->from_mean(t...),[
const initial_rest_mixing_matrix = convert(Array{initial_rest_type},map(t->from_mean(t...),[
(Geometric{Float64},2.728177) (Geometric{Float64},1.382557) (Poisson{Float64},0.206362) (Geometric{Float64},1.382557) (Poisson{Float64},0.206362)
(Poisson{Float64},1.139072) (Geometric{Float64},3.245594) (Poisson{Float64},0.785297) (Geometric{Float64},3.245594) (Poisson{Float64},0.785297)
(Poisson{Float64},0.264822) (Poisson{Float64},0.734856) (Poisson{Float64},0.667099) (Poisson{Float64},0.734856) (Poisson{Float64},0.667099)
(Poisson{Float64},1.139072) (Geometric{Float64},3.245594) (Poisson{Float64},0.785297) (Geometric{Float64},3.245594) (Poisson{Float64},0.785297)
(Poisson{Float64},0.264822) (Poisson{Float64},0.734856) (Poisson{Float64},0.667099) (Poisson{Float64},0.734856) (Poisson{Float64},0.667099)
])
]))
const contact_time_distribution_matrix = [Geometric() for i in 1:AgentDemographic.size + 1, j in 1:AgentDemographic.size + 1]
const contact_time_distribution_matrix = convert(Array{initial_rest_type},[Geometric() for i in 1:AgentDemographic.size + 1, j in 1:AgentDemographic.size + 1])
function get_u_0(rng,nodes)
function get_u_0(nodes)
u_0 = fill(Susceptible,nodes)
init_indices = rand(rng, 1:nodes, 5)
init_indices = rand(RNG, 1:nodes, 5)
u_0[init_indices] .= Infected #start with two infected
return u_0
end
function contact_weight(rand_gen,p, agent_v,agent_w)
contact_time = rand(rand_gen,contact_time_distribution_matrix[Int(agent_v),Int(agent_w)])
function contact_weight(p, agent_v,agent_w)
contact_time = rand(RNG,contact_time_distribution_matrix[Int(agent_v),Int(agent_w)])
return 1 - (1-p)^contact_time
end
function vaccinate_uniformly!(rng,num_vaccines,u_next,u,graph,population_list,index_vectors) #vaccination_algorithm
vaccination_indices = rand(rng,1:length(u),num_vaccines)
function vaccinate_uniformly!(num_vaccines,u_next,u,graph,population_list,index_vectors) #vaccination_algorithm
vaccination_indices = rand(RNG,1:length(u),num_vaccines)
u_next[vaccination_indices] .= Immunized
end
function hotspotting!(rng,num_vaccines,u_next,u,graph,population_list,index_vectors) #vaccination_algorithm
# vaccination_indices = rand(rng,1:length(u),num_vaccines)
function hotspotting!(num_vaccines,u_next,u,graph,population_list,index_vectors) #vaccination_algorithm
# vaccination_indices = rand(RNG,1:length(u),num_vaccines)
# u_next[vaccination_indices] .= Immunized
end
function agents_step!(rand_gen,t,u_next,u,population_list,graph,params,index_vectors,vaccination_algorithm!)
function agents_step!(t,u_next,u,population_list,graph,params,index_vectors,vaccination_algorithm!)
@unpack p, recovery_rate, vaccines_per_day,vaccination_start_day = params
u_next .= u#keep state the same if nothing else happens
if t >= vaccination_start_day
vaccination_algorithm!(rand_gen,Int(floor(vaccines_per_day*length(u))),u_next,u,graph,population_list,index_vectors)
vaccination_algorithm!(Int(floor(vaccines_per_day*length(u))),u_next,u,graph,population_list,index_vectors)
end
for i in 1:length(u)
agent_status = u[i]
agent_demo = population_list[i]
if agent_status == Susceptible
for j in LightGraphs.neighbors(graph,i)
if u[j] == Infected && rand(rand_gen) < contact_weight(rand_gen,p, agent_demo,population_list[j])
if u[j] == Infected && rand(RNG) < contact_weight(p, agent_demo,population_list[j])
u_next[i] = Infected
end
end
elseif agent_status == Infected
if rand(rand_gen) < recovery_rate
if rand(RNG) < recovery_rate
u_next[i] = Recovered
end
end
......@@ -43,7 +43,7 @@ function agents_step!(rand_gen,t,u_next,u,population_list,graph,params,index_vec
end
function solve!(rng,u_0,params,steps,agent_model,vaccination_algorithm!)
function solve!(u_0,params,steps,agent_model,vaccination_algorithm!)
solution = vcat([u_0], [fill(Susceptible,length(u_0)) for i in 1:steps])
population_list = agent_model.demographics #list of demographic status for each agent
......@@ -52,10 +52,10 @@ function solve!(rng,u_0,params,steps,agent_model,vaccination_algorithm!)
graphs = [base_network]
for t in 1:steps
graph_t = deepcopy(base_network) #copy static network to modify with dynamic workschool/rest contacts
generate_mixing_graph!(rng,graph_t,index_vectors,agent_model.workschool_contacts_mean_adjusted) #add workschool contacts
# generate_mixing_graph!(rng,graph_t,index_vectors,agent_model.rest_contacts_mean_adjusted) #add rest contacts
# push!(graphs,graph_t) #add the generated graph for this timestep onto the list of graphs
# agents_step!(rng,t,solution[t+1],solution[t],population_list,graph_t,params,index_vectors,vaccination_algorithm!)
generate_mixing_graph!(graph_t,index_vectors,agent_model.workschool_contacts_mean_adjusted_list[t]) #add workschool contacts
generate_mixing_graph!(graph_t,index_vectors,agent_model.rest_contacts_mean_adjusted_list[t]) #add rest contacts
push!(graphs,graph_t) #add the generated graph for this timestep onto the list of graphs
agents_step!(t,solution[t+1],solution[t],population_list,graph_t,params,index_vectors,vaccination_algorithm!)
#advance agent states based on the new network, vaccination process given by vaccination_algorithm!, which is just a function defined as above
end
......
abstract type VaccinationStrategy end
......@@ -3,44 +3,47 @@ using RandomNumbers.Xorshifts
using Test
using ThreadsX
import StatsBase.mean
rng = Xoroshiro128Plus(1)
model_sizes = [(100,1),(1000,3),(5000,3),(5000,50)]
model_sizes = [(100,1),(1000,3),(5000,3)]
vaccination_stratgies = [vaccinate_uniformly!]
vaccination_rates = [0.0001,0.001,0.005,0.01,0.05]
infection_rates = [0.01,0.05,0.1]
agent_models = ThreadsX.map(model_size -> AgentModel(rng,model_size...), model_sizes)
agent_models = ThreadsX.map(model_size -> AgentModel(model_size...), model_sizes)
dem_cat = AgentDemographic.size + 1
@testset "mixing matrices, size: $sz" for (m,sz) in zip(agent_models,model_sizes)
ws_dist = m.workschool_contacts_mean_adjusted
r_dist = m.rest_contacts_mean_adjusted
ws_dist = m.workschool_contacts_mean_adjusted_list
r_dist = m.rest_contacts_mean_adjusted_list
index_vec =m.demographic_index_vectors
@testset "workschool" for i in dem_cat, j in dem_cat
@test mean(ws_dist[i,j])*length(index_vec[i]) == mean(ws_dist[j,i])*length(index_vec[j])
for t in 1:length(ws_dist)
@test mean(ws_dist[t][i,j])*length(index_vec[i]) == mean(ws_dist[t][j,i])*length(index_vec[j])
end
end
@testset "rest" for i in dem_cat, j in dem_cat
@test mean(r_dist[i,j])*length(index_vec[i]) == mean(r_dist[j,i])*length(index_vec[j])
for t in 1:length(ws_dist)
@test mean(r_dist[t][i,j])*length(index_vec[i]) == mean(r_dist[t][j,i])*length(index_vec[j])
end
end
end
function vac_rate_test(model,vac_strategy, vac_rate; rng = Xoroshiro128Plus())
u_0 = get_u_0(rng,length(model.demographics))
u_0 = get_u_0(length(model.demographics))
params = merge(get_parameters(),(vaccines_per_day = vac_rate,))
steps = 300
sol1,graphs = solve!(rng,u_0,params,steps,model,vac_strategy);
sol1,graphs = solve!(u_0,params,steps,model,vac_strategy);
total_infections = count(x->x == AgentStatus(3),sol1[end])
return total_infections
end
function infection_rate_test(model, inf_parameter; rng = Xoroshiro128Plus())
u_0 = get_u_0(rng,length(model.demographics))
u_0 = get_u_0(length(model.demographics))
params = merge(get_parameters(),(p = inf_parameter,))
steps = 300
sol1,graphs = solve!(rng,u_0,params,steps,model,vaccinate_uniformly!);
sol1,graphs = solve!(u_0,params,steps,model,vaccinate_uniformly!);
total_infections = count(x->x == AgentStatus(3),sol1[end])
return total_infections
......@@ -54,11 +57,11 @@ end
@testset "vaccination efficacy $sz" for (m,sz) in zip(deepcopy(agent_models),model_sizes)
@testset "strategy" for strat in vaccination_stratgies
@test test_comparison(x->vac_rate_test(m,strat,x),vaccination_rates,>=)
@test test_comparison(x->vac_rate_test(m,strat,x),vaccination_rates,>)
end
end
@testset "infection efficacy $sz" for (m,sz) in zip(deepcopy(agent_models),model_sizes)
@test test_comparison(x->infection_rate_test(m,x),infection_rates,<=)
@test test_comparison(x->infection_rate_test(m,x),infection_rates,<)
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment