diff --git a/CovidAlertVaccinationModel/src/CovidAlertVaccinationModel.jl b/CovidAlertVaccinationModel/src/CovidAlertVaccinationModel.jl
index 9e8fbd5f38283f4ab00bed691e749a5606cd6982..c9b198f5941b49b655851f7053200a0b5b2664cf 100644
--- a/CovidAlertVaccinationModel/src/CovidAlertVaccinationModel.jl
+++ b/CovidAlertVaccinationModel/src/CovidAlertVaccinationModel.jl
@@ -27,7 +27,7 @@ include("model.jl")
 include("plotting.jl")
 include("graphs.jl")
 
-const RNG = Xoroshiro128Star(1)
+const RNG = Xoroshiro128Star()
 const population = 14.57e6 #population of ontario
 
 const color_palette = palette(:seaborn_bright) #color theme for the plots
@@ -41,14 +41,15 @@ default(dpi = 300)
 default(framestyle = :box)
 using BenchmarkTools
 function main()
-    agent_model = AgentModel(5000,2)
+    agent_model = AgentModel(5000,5)
     # display(size.(agent_model.demographic_index_vectors))
     u_0 = get_u_0(length(agent_model.demographics))
     steps = 300
-    # @btime solve!($u_0,$get_parameters(),$steps,$agent_model,$vaccinate_uniformly!);
-    sol,graphs = solve!(u_0,get_parameters(),steps,agent_model,vaccinate_uniformly!);
-    return aggregate_timeseries(sol)
-    plot_model_spatial_gif(agent_model.base_network,graphs,sol1)
+    # Random.seed!(RNG,1)
+    @btime solve!($u_0,$get_parameters(),$steps,$agent_model,$vaccinate_uniformly!);
+    # sol,graphs = solve!(u_0,get_parameters(),steps,agent_model,vaccinate_uniformly!);
+    # return aggregate_timeseries(sol)
+    # plot_model_spatial_gif(agent_model.base_network,graphs,sol1)
 end
 
 
diff --git a/CovidAlertVaccinationModel/src/graphs.jl b/CovidAlertVaccinationModel/src/graphs.jl
index dc6874975f29eb1c710b0454dab3f25b55dba393..df195d619a5f5d7dd75f4bbb9504200d7de9bee7 100644
--- a/CovidAlertVaccinationModel/src/graphs.jl
+++ b/CovidAlertVaccinationModel/src/graphs.jl
@@ -1,55 +1,102 @@
+#This defines a type for the time-dependent-graph
+#The tradeoff is (partially) memory-usage vs speed, so it tries to preallocate as much as possible
+#As result, it stores a lot of redundant information
+#Julia's LightGraphs.jl package uses edgelists to store their graphs, but here I am experimenting with bitarrays, which seem to be 4-5x faster, even for huge problems.
+#Also completely decouple the generation of the contact vectors (the number of contacts each node has in each WorkSchool/Rest layer) from the
+#generation of the corresponding graphs 
+#bitmatrices are much faster at the graph generation but we lose access to neighbors in O(1)
+struct MixingGraphs{GraphVector,A}
+    graph::GraphVector
+    base_graph::GraphVector
+    contact_vector_ws::A
+    contact_vector_rest::A
+    function MixingGraphs(static_contacts, ws_mixing_matrices, rest_mixing_matrices, index_vectors)
+        (length(ws_mixing_matrices) ==  length(rest_mixing_matrices)) || throw(ArgumentError("mixing matrix lists must be of equal length")) 
+        ts_length = length(ws_mixing_matrices)
+        contact_vector_ws = map(mm -> MixingContacts(index_vectors,mm), ws_mixing_matrices)
+        contact_vector_rest = map(mm -> MixingContacts(index_vectors,mm), rest_mixing_matrices)
+        base_graph = convert(BitArray,adjacency_matrix(static_contacts))
+        return new{typeof(base_graph),typeof(contact_vector_ws)}(deepcopy(base_graph),base_graph,contact_vector_ws,contact_vector_rest)
+    end
+end
+
+
+#A type that defines a matrix of vectors, such that the sum of the ijth vector is equal to the sum of the jith vector
+struct MixingContacts{V}
+    contact_array::V
+    function MixingContacts(index_vectors,mixing_matrix)
+        contacts = map(CartesianIndices(mixing_matrix)) do ind
+            zeros(length(index_vectors[ind[1]]))
+        end
+        for i in 1:length(mixing_matrix[:,1]), j in 1:i  #diagonal
+            generate_contact_vectors!(mixing_matrix[i,j],mixing_matrix[j,i],contacts[i,j],contacts[j,i] )
+        end
+        new{typeof(contacts)}(contacts)
+    end
+end
+
+#generate a new mixing graph for the current timestep t
+function advance_mixing_graph!(t,mixing_graph,index_vectors)
+    mixing_graph.graph .= 0
+    mixing_graph.graph .= mixing_graph.base_graph
+    generate_mixing_graph!(mixing_graph.graph, index_vectors,mixing_graph.contact_vector_rest[t])
+    generate_mixing_graph!(mixing_graph.graph, index_vectors,mixing_graph.contact_vector_ws[t])
+    return nothing
+end
 
+
+function update_contacts!(mixing_contacts,mixing_matrix)
+    for i in 1:length(mixing_matrix[:,1]), j in 1:i  #diagonal  
+        generate_contact_vectors!(mixing_matrix[i,j],mixing_matrix[j,i],mixing_contacts.contact_array[i,j],mixing_contacts.contact_array[j,i] )
+    end
+end
+function generate_contact_vectors!(ij_dist,ji_dist,i_to_j_contacts, j_to_i_contacts)
+    rand!(RNG,ij_dist,i_to_j_contacts)
+    rand!(RNG,ji_dist,j_to_i_contacts)
+    l_i = length(i_to_j_contacts)
+    l_j = length(j_to_i_contacts)
+    contacts_sums = sum(i_to_j_contacts) - sum(j_to_i_contacts)
+    sample_list_length = max(l_i,l_j) #better heuristic for this based on stddev of dist?
+    index_list_i = sample(RNG,1:l_i,sample_list_length)
+    index_list_j = sample(RNG,1:l_j,sample_list_length)
+    sample_list_i = rand(RNG,ij_dist,sample_list_length)
+    sample_list_j = rand(RNG,ji_dist,sample_list_length)
+    for k = 1:sample_list_length
+        if (contacts_sums != 0)
+            i_index = index_list_i[k]
+            j_index = index_list_j[k]    
+            contacts_sums +=  j_to_i_contacts[j_index] - i_to_j_contacts[i_index]
+
+            i_to_j_contacts[i_index] = sample_list_i[k]
+            j_to_i_contacts[j_index] = sample_list_j[k]    
+            contacts_sums += i_to_j_contacts[i_index] -  j_to_i_contacts[j_index]
+        else
+            break
+        end
+    end
+    while contacts_sums != 0
+        i_index = sample(RNG,1:l_i)
+        j_index = sample(RNG,1:l_j)
+        contacts_sums +=  j_to_i_contacts[j_index] - i_to_j_contacts[i_index]
+        i_to_j_contacts[i_index] = rand(RNG,ij_dist)
+        j_to_i_contacts[j_index] = rand(RNG,ji_dist)
+        contacts_sums += i_to_j_contacts[i_index] -  j_to_i_contacts[j_index]
+    end
+    return nothing
+end
 #add a bipartite graph derived from mixing matrices onto g
-function generate_mixing_graph!(g,index_vectors,mixing_matrix)
-    for i in 1:5, j in 1:i #diagonal
-            agent_contact_dist_i = mixing_matrix[i,j]
-            agent_contact_dist_j = mixing_matrix[j,i]
-            l_i = length(index_vectors[i])
-            l_j = length(index_vectors[j])
-            i_to_j_contacts = rand(RNG,agent_contact_dist_i,l_i)
-            j_to_i_contacts = rand(RNG,agent_contact_dist_j,l_j)
-            contacts_sums = sum(i_to_j_contacts) - sum(j_to_i_contacts)
-
-
-            # display((mean(agent_contact_dist_i)*l_i,mean(agent_contact_dist_j)*l_j))
-            sample_list_length = max(l_j,l_i) #better heuristic for this based on stddev of dist?
-            index_list_i = sample(RNG,1:l_i,sample_list_length)
-            index_list_j = sample(RNG,1:l_j,sample_list_length)
-            sample_list_i = rand(RNG,agent_contact_dist_i,sample_list_length)
-            sample_list_j = rand(RNG,agent_contact_dist_j,sample_list_length)
-            for k = 1:sample_list_length
-                if (contacts_sums != 0)
-                    i_index = index_list_i[k]
-                    j_index = index_list_j[k]    
-                    contacts_sums +=  j_to_i_contacts[j_index] - i_to_j_contacts[i_index]
-
-                    i_to_j_contacts[i_index] = sample_list_i[k]
-                    j_to_i_contacts[j_index] = sample_list_j[k]    
-                    contacts_sums += i_to_j_contacts[i_index] -  j_to_i_contacts[j_index]
-                else
-                    break
-                end
-            end
-            while contacts_sums != 0
-                i_index = sample(RNG,1:l_i)
-                j_index = sample(RNG,1:l_j)
-                contacts_sums +=  j_to_i_contacts[j_index] - i_to_j_contacts[i_index]
-                i_to_j_contacts[i_index] = rand(RNG,agent_contact_dist_i)
-                j_to_i_contacts[j_index] = rand(RNG,agent_contact_dist_j)
-                contacts_sums += i_to_j_contacts[i_index] -  j_to_i_contacts[j_index]
-            end
-            random_bipartite_graph_fast_CL!(g,index_vectors[i],index_vectors[j],i_to_j_contacts,j_to_i_contacts)
+function generate_mixing_graph!(g,index_vectors,contacts)
+    for i in 1:5, j in 1:5  
+        random_bipartite_graph_fast_CL!(g,index_vectors[i],index_vectors[j],contacts.contact_array[i,j],contacts.contact_array[j,i])
     end
     return g
 end
 
+using StatsBase
 #modify g so that nodes specified in anodes and bnodes are connected by a bipartite graph with expected degrees given by aseq and bseq
 #implemented from Aksoy, S. G., Kolda, T. G., & Pinar, A. (2017). Measuring and modeling bipartite graphs with community structure
 #simple algorithm, might produce parallel edges for small graphs
-
-using StatsBase
-
-function random_bipartite_graph_fast_CL!(g,anodes,bnodes,aseq,bseq) 
+function random_bipartite_graph_fast_CL!(g::SimpleGraph,anodes,bnodes,aseq,bseq) 
     lena = length(aseq)
     lenb = length(bseq)
     m = Int(sum(aseq))
@@ -62,24 +109,18 @@ function random_bipartite_graph_fast_CL!(g,anodes,bnodes,aseq,bseq)
     return g
 end
 
-function symmetrize_means(population_sizes,mixing_matrix::Matrix{<:ZWDist})
-    mixing_means = mean.(mixing_matrix)
-    symmetrized_means = zeros((length(population_sizes),length(population_sizes)))
-    for i in 1:length(population_sizes), j in 1:length(population_sizes)
-        symmetrized_means[i,j] = 0.5*(mixing_means[i,j] + population_sizes[j]/population_sizes[i] * mixing_means[j,i])
-    end
-    return map(((dst,sym_mean),)->from_mean(typeof(dst),dst.α,sym_mean), zip(mixing_matrix,symmetrized_means))
-end
-
-
-function symmetrize_means(population_sizes,mixing_matrix::Matrix{<:Distribution})
-    mixing_means = mean.(mixing_matrix)
-    symmetrized_means = zeros((length(population_sizes),length(population_sizes)))
-
-    for i in 1:length(population_sizes), j in 1:length(population_sizes)
-        symmetrized_means[i,j] = 0.5*(mixing_means[i,j] + population_sizes[j]/population_sizes[i] * mixing_means[j,i])
+#same algorithm but with a bitarray
+function random_bipartite_graph_fast_CL!(g::T,anodes,bnodes,aseq,bseq) where T<:AbstractArray 
+    lena = length(aseq)
+    lenb = length(bseq)
+    m = Int(sum(aseq))
+    @assert sum(aseq) == sum(bseq) "degree sequences must have equal sum"
+    astubs = sample(RNG,anodes,StatsBase.weights(aseq./m), m)
+    bstubs = sample(RNG,bnodes,StatsBase.weights(bseq./m), m)
+    for k in 1:m
+        g[astubs[k],bstubs[k]] = 1
     end
-    return map(((dst,sym_mean),)->from_mean(typeof(dst),sym_mean), zip(mixing_matrix,symmetrized_means))
+    return g
 end
 
 #generate population with households distributed according to household_size_distribution
diff --git a/CovidAlertVaccinationModel/src/mixing_distributions.jl b/CovidAlertVaccinationModel/src/mixing_distributions.jl
index d97120cfe9aaff6525a6fa9d9cf77df5b5be689a..f3f8df6984b9253b2eac923e6fea572944f0a919 100644
--- a/CovidAlertVaccinationModel/src/mixing_distributions.jl
+++ b/CovidAlertVaccinationModel/src/mixing_distributions.jl
@@ -20,6 +20,26 @@ function ZeroGeometric(α,p)
     return ZWDist(α,Geometric(p))
 end
 
+function symmetrize_means(population_sizes,mixing_matrix::Matrix{<:ZWDist})
+    mixing_means = mean.(mixing_matrix)
+    symmetrized_means = zeros((length(population_sizes),length(population_sizes)))
+    for i in 1:length(population_sizes), j in 1:length(population_sizes)
+        symmetrized_means[i,j] = 0.5*(mixing_means[i,j] + population_sizes[j]/population_sizes[i] * mixing_means[j,i])
+    end
+    return map(((dst,sym_mean),)->from_mean(typeof(dst),dst.α,sym_mean), zip(mixing_matrix,symmetrized_means))
+end
+
+
+function symmetrize_means(population_sizes,mixing_matrix::Matrix{<:Distribution})
+    mixing_means = mean.(mixing_matrix)
+    symmetrized_means = zeros((length(population_sizes),length(population_sizes)))
+
+    for i in 1:length(population_sizes), j in 1:length(population_sizes)
+        symmetrized_means[i,j] = 0.5*(mixing_means[i,j] + population_sizes[j]/population_sizes[i] * mixing_means[j,i])
+    end
+    return map(((dst,sym_mean),)->from_mean(typeof(dst),sym_mean), zip(mixing_matrix,symmetrized_means))
+end
+
 
 function from_mean(::Type{Geometric{T}},μ) where T
     if μ > 1.0
diff --git a/CovidAlertVaccinationModel/src/model.jl b/CovidAlertVaccinationModel/src/model.jl
index 43bedcc996204d5ae43445aabbfbf3a3d30a7925..e81b7c860536f2cf512be1851ec6954e6c802a22 100644
--- a/CovidAlertVaccinationModel/src/model.jl
+++ b/CovidAlertVaccinationModel/src/model.jl
@@ -29,8 +29,8 @@ function agents_step!(t,u_next,u,population_list,graph,params,index_vectors,vacc
         agent_status = u[i]
         agent_demo = population_list[i]
         if agent_status == Susceptible
-            for j in LightGraphs.neighbors(graph,i)
-                if u[j] == Infected && rand(RNG) < contact_weight(p, agent_demo,population_list[j])
+            for j in 1:length(u) #LightGraphs.neighbors(graph,i)
+                if graph[i,j] == 1 && u[j] == Infected && rand(RNG) < contact_weight(p, agent_demo,population_list[j])
                     u_next[i] = Infected
                 end
             end
@@ -43,23 +43,24 @@ function agents_step!(t,u_next,u,population_list,graph,params,index_vectors,vacc
 end
 
 
-function solve!(u_0,params,steps,agent_model,vaccination_algorithm!)
+function solve!(u_0,params,steps,agent_model,vaccination_algorithm!; record_graphs = false)
     solution = vcat([u_0], [fill(Susceptible,length(u_0)) for i in 1:steps])
 
     population_list = agent_model.demographics #list of demographic status for each agent
     index_vectors = agent_model.demographic_index_vectors #lists of indices of each agent with a given demographic 
     base_network = agent_model.base_network #static network, households and LTC homes
-    graphs = [base_network]
+    ws_contacts = agent_model.workschool_contacts_mean_adjusted_list
+    rest_contacts = agent_model.rest_contacts_mean_adjusted_list
+    mixing_graphs = MixingGraphs(base_network, ws_contacts, rest_contacts, index_vectors)
+
+
     for t in 1:steps
-        graph_t = deepcopy(base_network) #copy static network to modify with dynamic workschool/rest contacts
-        generate_mixing_graph!(graph_t,index_vectors,agent_model.workschool_contacts_mean_adjusted_list[t]) #add workschool contacts
-        generate_mixing_graph!(graph_t,index_vectors,agent_model.rest_contacts_mean_adjusted_list[t]) #add rest contacts
-        push!(graphs,graph_t) #add the generated graph for this timestep onto the list of graphs 
-        agents_step!(t,solution[t+1],solution[t],population_list,graph_t,params,index_vectors,vaccination_algorithm!) 
+        advance_mixing_graph!(t,mixing_graphs,index_vectors)
+        agents_step!(t,solution[t+1],solution[t],population_list,mixing_graphs.graph,params,index_vectors,vaccination_algorithm!) 
         #advance agent states based on the new network, vaccination process given by vaccination_algorithm!, which is just a function defined as above
     end
    
-    return solution,graphs
+    return solution #mixing_graphs.graph_list
 end
 
 function get_parameters()
diff --git a/CovidAlertVaccinationModel/test/runtests.jl b/CovidAlertVaccinationModel/test/runtests.jl
index 07a338e53674a5bf74bdd85c2604f7142f3b612a..cb4d9595530c296ad3fc4cb0f25d7cc38d937fb5 100644
--- a/CovidAlertVaccinationModel/test/runtests.jl
+++ b/CovidAlertVaccinationModel/test/runtests.jl
@@ -3,9 +3,9 @@ using RandomNumbers.Xorshifts
 using Test
 using ThreadsX
 import StatsBase.mean
-model_sizes = [(100,1),(1000,3),(5000,3)]
-vaccination_stratgies = [vaccinate_uniformly!]
-vaccination_rates = [0.001,0.005,0.01,0.05]
+model_sizes = [(100,1),(1000,1),(1000,3),(5000,3)]
+vaccination_strategies = [vaccinate_uniformly!]
+vaccination_rates = [0.000,0.005,0.01,0.05]
 infection_rates = [0.01,0.05,0.1]
 agent_models = ThreadsX.map(model_size -> AgentModel(model_size...), model_sizes)
 dem_cat = AgentDemographic.size + 1
@@ -33,7 +33,7 @@ function vac_rate_test(model,vac_strategy, vac_rate; rng = Xoroshiro128Plus())
     u_0 = get_u_0(length(model.demographics))
     params = merge(get_parameters(),(vaccines_per_day = vac_rate,))
     steps = 300
-    sol1,graphs = solve!(u_0,params,steps,model,vac_strategy);
+    sol1 = solve!(u_0,params,steps,model,vac_strategy);
     total_infections = count(x->x == AgentStatus(3),sol1[end])
     display(total_infections)
     return total_infections
@@ -44,9 +44,11 @@ function infection_rate_test(model, inf_parameter; rng = Xoroshiro128Plus())
     u_0 = get_u_0(length(model.demographics))
     params = merge(get_parameters(),(p = inf_parameter,))
     steps = 300
-    sol1,graphs = solve!(u_0,params,steps,model,vaccinate_uniformly!);
+    display(params)
+    sol1 = solve!(u_0,params,steps,model,vaccinate_uniformly!);
     total_infections = count(x->x == AgentStatus(3),sol1[end])
 
+    display(total_infections)
     return total_infections
 end
 
@@ -57,7 +59,8 @@ function test_comparison(f,xpts,comparison)
 end
 
 @testset "vaccination efficacy $sz" for (m,sz) in zip(deepcopy(agent_models),model_sizes)
-    @testset "strategy" for strat in vaccination_stratgies
+    @show vac_rate_test(m,vaccination_strategies[1],vaccination_rates[1])
+    @testset "strategy" for strat in vaccination_strategies
         @test test_comparison(x->vac_rate_test(m,strat,x),vaccination_rates,>)
     end
 end
diff --git a/IntervalsModel/plots/hh.pdf b/IntervalsModel/plots/hh.pdf
index e4475483c8bb20c1286e6008921c7c46baff0758..38123c8048b694cdb472e8481da98192dadfe564 100644
Binary files a/IntervalsModel/plots/hh.pdf and b/IntervalsModel/plots/hh.pdf differ
diff --git a/IntervalsModel/plots/rest.pdf b/IntervalsModel/plots/rest.pdf
index 1b5c6eb890568ffb59e5963a4c9d1ab6f4c8bd42..d206a27805ca5948a0e22fea0391defbf1985b61 100644
Binary files a/IntervalsModel/plots/rest.pdf and b/IntervalsModel/plots/rest.pdf differ
diff --git a/IntervalsModel/plots/ws.pdf b/IntervalsModel/plots/ws.pdf
index d46cb378594dd87fb76401b8ec447b228576f970..472c2d44417e4e27cef9621fa7687b11d7f61a1f 100644
Binary files a/IntervalsModel/plots/ws.pdf and b/IntervalsModel/plots/ws.pdf differ
diff --git a/IntervalsModel/simulation_data/hh.dat b/IntervalsModel/simulation_data/hh.dat
index e95347e52f9c39ee67d6ba672872164b446df535..c08687ddd0d038d7dcfa0584408a2eb9cbb85051 100644
Binary files a/IntervalsModel/simulation_data/hh.dat and b/IntervalsModel/simulation_data/hh.dat differ
diff --git a/IntervalsModel/simulation_data/ws.dat b/IntervalsModel/simulation_data/ws.dat
index 6bb21aa2aa083ff4534e8c7ec019f774982de332..99445fa56b8a39ae77c31625ad7770a8395c722f 100644
Binary files a/IntervalsModel/simulation_data/ws.dat and b/IntervalsModel/simulation_data/ws.dat differ
diff --git a/IntervalsModel/src/IntervalsModel.jl b/IntervalsModel/src/IntervalsModel.jl
index c9e6b9a555e7dbbd06b4683e5d2f3c86f7dbc0fa..7e557e70ccdbb713564e46ebbc7e064bd98a3249 100644
--- a/IntervalsModel/src/IntervalsModel.jl
+++ b/IntervalsModel/src/IntervalsModel.jl
@@ -31,7 +31,7 @@ using Plots
 
 const μ_bounds = (6,12*6) 
 const σ_bounds = (1,48)
-const α_bounds = (0.0,0.1) 
+const α_bounds = (0.0,0.8) 
 
 function do_hh(particles)
     dists = [
@@ -58,7 +58,7 @@ function do_ws(particles)
     bounds_list = map(l -> vcat(l...),[
         [[α_bounds for i = 1:6],[μ_bounds for i = 1:6], [σ_bounds for i = 1:6]],
         [[α_bounds for i = 1:6],[μ_bounds for i = 1:6]],
-        [[α_bounds for i = 1:6],[μ_bounds for i = 1:6], [σ_bounds for i = 1:6]],
+        [[α_bounds for i = 1:6],[(0.1,12*6)  for i = 1:6], [(0.1,48.0) for i = 1:6]],
     ])
     @show bounds_list
     bayesian_estimate("ws",err_ws,dists,bounds_list,particles)
@@ -89,7 +89,7 @@ function bayesian_estimate(fname,err_func,dists,bounds_list,particles)
 
         @btime err_ws($init,$dist) #compute benchmark of the error function, not rly necessary
         
-        out = smc(priors,p -> err_func(p, dist), verbose=true, nparticles=particles,  alpha=0.99, M = 1,epstol = 0.1,  parallel = true) #apply sequential monte carlo with 200 particles
+        out = smc(priors,p -> err_func(p, dist), verbose=true, nparticles=particles,  alpha=0.90,  parallel = true) #apply sequential monte carlo with 200 particles
         
         return dist => out
     end |> OrderedDict
diff --git a/IntervalsModel/src/hh_durations_model.jl b/IntervalsModel/src/hh_durations_model.jl
index d6ba6004308193163e32bd9267de9bc09459f791..5c385350cab099352eaed151c7c75fff5e57d71f 100644
--- a/IntervalsModel/src/hh_durations_model.jl
+++ b/IntervalsModel/src/hh_durations_model.jl
@@ -8,7 +8,7 @@ const cnst_hh = (
     # Set the underlying parameters for the intervals model
     Sparam = [60,12],
     # Set parameters for intervals sample and subpopulation size
-    numsamples = 100,
+    numsamples = 10,
     subsize = size(HHYMO)[1],
     # Swap age brackets for numbers
     swap = Dict("Y" => YOUNG, "M" => MIDDLE, "O" => OLD),
diff --git a/IntervalsModel/src/ws_durations_model.jl b/IntervalsModel/src/ws_durations_model.jl
index e27869e9c7b83827526c1619930f740da32458a9..3b16688679bd4e17c0f5ceec10682d4596bb9d3f 100644
--- a/IntervalsModel/src/ws_durations_model.jl
+++ b/IntervalsModel/src/ws_durations_model.jl
@@ -16,7 +16,7 @@ const rest_data = (
     M = CSV.File("$PACKAGE_FOLDER/network-data/Timeuse/Rest/RDataM.csv") |> Tables.matrix |> x -> dropdims(x;dims = 2),
     O = CSV.File("$PACKAGE_FOLDER/network-data/Timeuse/Rest/RDataO.csv") |> Tables.matrix |> x -> dropdims(x;dims = 2),
 )
-const comparison_samples = 10_000
+const comparison_samples = 2000
 
 ws_distributions = CovidAlertVaccinationModel.initial_workschool_mixing_matrix
 
@@ -40,11 +40,12 @@ function err_ws(p,dist)
                 durs = trunc.(Int,rand(rng,age_dists[age_sample,age_j],neighourhoods[age_sample,age_j])) .% durmax
                 # display(durs)
                 tot_dur_sample!(sample_list,cnst_hh.Sparam,durs)
-                kde_est = kde(sample_list)
+                # kde_est = kde(sample_list)
                 # err =  (1 - pvalue(KSampleADTest(ws_samples[age_sample],sample_list)))^2 #need to maximize probability of null hypothesis, not rly valid but everyone does it so idk
-                errsum += mapreduce(+,0:0.05:durmax) do i
-                    return (pdf(kde_est,i) - pdf(kerneldensity_data[age_sample],i))^2
-                end
+                # errsum += mapreduce(+,0:0.05:durmax) do i
+                #     return (pdf(kde_est,i) - pdf(kerneldensity_data[age_sample],i))^2
+                # end
+                errsum += (mean(sample_list) - mean(ws_samples[age_sample]))^2
             end
         end
     end