CrimsonSkyline.GenerativeFieldType
struct GenerativeField{T}
    field::RandomField
    proposal::T
    val::Dict
    ...
end

A generative field is a three-tuple of a random field field, a proposal kernel proposal, and an initialization value val used to start MH sampling.

CrimsonSkyline.NonparametricSamplingResultsType
struct NonparametricSamplingResults{I} <: SamplingResults{I}
    interpretation :: I
    log_weights :: Array{Float64, 1}
    return_values :: Array{Any, 1}
    traces :: Array{Trace, 1}
end

Wrapper for results of sampling. Implements the following methods from Base: getindex, length, keys. Intepretation of log weights is dependent on I.

CrimsonSkyline.ParametricNodeType
mutable struct ParametricNode{A, D, T} <: Node
    address :: A
    dist :: D
    value :: Maybe{T}
    logprob :: Float64
    logprob_sum :: Float64
    observed :: Bool
    pa :: Array{Node, 1}
    ch :: Array{Node, 1}
    interpretation :: Union{Interpretation, Vector{Interpretation}}
    last_interpretation :: Union{Interpretation, Vector{Interpretation}}
end

A Node that can be used with arbitrary code for which rand and Distributionss.logpdf are defined.

CrimsonSkyline.ParametricSamplingResultsType
struct ParametricSamplingResults{I} <: SamplingResults{I}
    interpretation :: I
    log_weights :: Array{Float64, 1}
    return_values :: Array{Any, 1}
    traces :: Array{Trace, 1}
    distributions :: Dict
end

distributions maps from addresses to distributions, $a \mapsto \pi^{(a)}_\psi(z)$, where $\pi^{(a)}_\psi(z)$ solves

\[\max_\psi E_{z \sim p(z|x)}[\log \pi^{(a)}_\psi(z)].\]

The distributions are not used to generate values but only to score sampled values; values are still sampled from the posterior traces. Right now, the parametric approximation is very simple: values with support over the negative orthant of $\mathbb R^D$ are approximated by (multivariate) normal distributions, while values with support over only the positive orthant of $\mathbb{R}^D$ are approximated by (multivariate) lognormal distributions. This behavior is expected to change in the future.

CrimsonSkyline.RandomFieldType
struct RandomField 
    names::Set{String}
    factors::Dict{Vector{String},Function}
    evidence::Dict{String,Any}
end

A representation of a random field by a collection of factors: $\log p(x) = \sum_{f \in \mathcal F} \log \psi_f(x_f)$, where $\mathcal F$ is the set of (log) factors and $x_f$ is the set of variables incident on that (log) factor. factors should be properly normalized log mass or density functions. There is no restriction on the state space of the variables involved as long as the factor functions can evaluate the log probability of the variables. For example,

factor_ab(x) = logpdf(MvNormal(PDMat([1.0 0.5; 0.5 1.0])), x)
factor_bc(x) = logpdf(MvNormal([2.0, 2.0], PDMat([2.0 -1.0; -1.0 2.0])), x)

are two valid (log) factor functions – the first corresponding to the factor $\log \psi_{a,b}(x_a,x_b)$ and the second corresponding to the factor $\log \psi_{b,c}(x_b, x_c)$. Posting evidence is done using a dictionary mapping an address to value, e.g., evidence = Dict("b" => 3.0).

Calling a random field corresponds to evaluating its log probability with the passed argument, e.g.,

my_rf = RandomField(...)
x = Dict("a" => 1.0, "b" => -2.1)
my_lp = my_rf(x)  # corresponds to logprob(my_rf, x)
CrimsonSkyline.RandomFieldMethod
function RandomField(factors::Dict{Vector{String},Function}, evidence::Dict)

Outer constructor for RandomField that requires a dict of factors and allows posting evidence when the field is created (instead of manually doing so later).

CrimsonSkyline.RandomFieldMethod
function RandomField(factors::Dict{Vector{String},Function})

Outer constuctor for RandomField that requires only a dict of factors.

CrimsonSkyline.SampleableNodeType
mutable struct SampleableNode{A, T} <: Node
    address :: A
    dist :: Sampleable
    value :: Maybe{T}
    logprob :: Float64
    logprob_sum :: Float64
    observed :: Bool
    pa :: Array{Node, 1}
    ch :: Array{Node, 1}
    interpretation :: Union{Interpretation, Vector{Interpretation}}
    last_interpretation :: Union{Interpretation, Vector{Interpretation}}
end

A Node that is restricted to be used with any Sampleable from Distributions.jl.

CrimsonSkyline.TraceType
abstract type Trace end

Base type for all traces. Traces support the following Base methods: setindex!, getindex, keys, values, and length.

CrimsonSkyline.TypedTraceType
mutable struct TypedTrace{A, T} <: Trace
    trace :: OrderedDict{A, SampleableNode{A, T}}
    logprob_sum :: Float64
end

Trace that can hold nodes of the specific address (A) and value (T) types.

CrimsonSkyline.UntypedTraceType
mutable struct UntypedTrace
    trace :: OrderedDict{Any, Node}
    logprob_sum :: Float64
end

Trace that can hold nodes with all address and value types.

Base.randMethod
function Distributions.rand(gf::GenerativeField; num_iterations=2500)

Sample from a generative fied using num_iterations of MH sampling. The value generated by MH at the end of num_iterations of sampling is returned. To customize burn-in, you can just overload this function, e.g. rand(gf::GenerativeField) = rand(gf; num_iterations=10000).

CrimsonSkyline.acceptMethod
function accept(t :: Trace, new_t :: Trace, log_a :: Float64)

Stochastic function that either returns new_t if accepted or returns t if not accepted.

CrimsonSkyline.addressesMethod
function addresses(r::NonparametricSamplingResults{I}) where I <: InferenceType

Get all addresses associated with the SamplingResults object, $A = \bigcup_{t\in \text{traces}}\mathcal A_t$

CrimsonSkyline.addressesMethod
function addresses(r::ParametricSamplingResults{I}) where I <: InferenceType

Get all addresses associated with the SamplingResults object, $A = \bigcup_{t\in \text{traces}}\mathcal A_t$

CrimsonSkyline.aicMethod
function aic(t :: Trace)

Computes the Akaike Information Criterion for a single trace (thus replacing the definition) with "maximum likelihood" by one with "likelihood". The formula is

\[\text{AIC}(t)/2 = |\text{params}(t)| - \ell(t),\]

where $\text{params}(t)|$ is the sum of the dimensionalities of non-observed and non-deterministic sample nodes.

CrimsonSkyline.aicMethod
function aic(r :: SamplingResults{I}) where I <: InferenceType

Computes an empirical estimate of the Akaike Information Criterion from a SamplingResults. The formula is

\[\text{AIC}(r)/2 = \min_{t \in \text{traces}(r)}|\text{params}(t)| - \hat\ell(t),\]

where $\text{params}(t)|$ is the sum of the dimensionalities of non-observed and non-deterministic sample nodes and $\hat\ell(t)$ is the empirical maximum likelihood.

CrimsonSkyline.blockMethod
block(f :: F, t :: Trace) where F <: Function

Converts all traced randomness into untraced randomness.

CrimsonSkyline.blockMethod
function block(f, t :: T, addresses) where T <: Trace

Given a stochastic function f, a trace t, and an iterable of addresses, converts traced randomness into untraced randomness.

Returns a tuple (t_new, g), where t_new is a trace and g is a function. The function signature of g is the same as that of f with the first argument removed; that is, if f(t :: Trace, params...), then g(params...). Computation is delayed, so each of the latent nodes in t_new has interpretation = BLOCKED. Calling g(params...) executes the computation and each latent node in t_new with an address in addresses is removed.

CrimsonSkyline.conditionMethod
function condition(f, evidence :: Dict)

Condition a trace modified by f on evidence, which maps addresses to observed evidence associated with that address. Returns a function with call signature identical to that of f and return signature (t :: Trace, rtype) where rtype is the return type of f.

CrimsonSkyline.connect_pa_ch!Method
function connect_pa_ch!(t :: Trace, pa, a)

Connects parent and child nodes. Adds child nodes to parent's ch and parent nodes to child's pa.

CrimsonSkyline.copy_common!Method
function copy_common!(old_t :: Trace, new_t :: Trace)

Copies nodes from old_t into new_t for all addresses in the intersection of their address sets.

CrimsonSkyline.forward_samplingMethod
function forward_sampling(f; params = (), num_iterations = 1)

Draws samples from the model's joint density. Equivalent to calling f in a loop num_iterations times, but results are collected in a NonparametricSamplingResults for easier postprocessing.

CrimsonSkyline.hpdiMethod
function hpdi(r::SamplingResults{I}, pct::Float64, addresses::AbstractArray{T}) where {I <: InferenceType, T}

Computes the highest posterior density interval(s) for a univariate variable. Does not check that the data corresponding to each address in addresses is actually univariate; if in doubt, use hpds instead.

CrimsonSkyline.hpdsMethod
function hpds(r::SamplingResults{I}, pct::Float64) where I <: InferenceType

Computes the highest posterior density set (HPDS) of the SamplingResults object. Let $\mathcal T$ be the set of traces. The $100\times Q \%$-percentile HPDS is defined as the set that satisfies $\sum_{t \in \mathrm{HPDS}} p(t) = Q$ and, for all $t \in \mathrm{HPDS}$, $p(t) > p(s)$ for every $s \in \mathcal T - \mathrm{HPDS}$. It is possible to compute the HPDS using the full joint density $p(t) \equiv p(x, z)$, where $x$ is the set of observed rvs and $z$ is the set of latent rvs, since $p(z|x) \propto p(x, z)$.

pct should be a float in (0.0, 1.0). E.g., pct = 0.95 returns the 95% HPDS.

CrimsonSkyline.importance_samplingMethod
function importance_sampling(f, q, types::Tuple{DataType,DataType}; params = (), nsamples :: Int = 1)

Given a stochastic function f, a proposal function q, and a tuple of params to pass to f and q, compute nsamples iterations of importance sampling. q must have the same input signature as f. Returns a SamplingResults instance.

CrimsonSkyline.importance_samplingMethod
function importance_sampling(f, q; params = (), nsamples :: Int = 1)

Given a stochastic function f, a proposal function q, and a tuple of params to pass to f and q, compute nsamples iterations of importance sampling. q must have the same input signature as f. Returns a SamplingResults instance.

CrimsonSkyline.inputMethod
input(t :: Trace, a, d)

Track a model input. Used only in graph intermediate representation and factor graph.

CrimsonSkyline.interpret_latent!Method
function interpret_latent!(t :: Trace, i :: Interpretation)

Changes the interpretation of all latent nodes in t to have interpretation == i

CrimsonSkyline.is_stepMethod
function is_step(f, q, types::Tuple{DataType,DataType}; params = ())

Perform one step of importance sampling – draw a single sample from the proposal q, replay it through f, and record the log weight as $\log W_n = \log p(x, z_n) - \log q(z_n)$. Returns a tuple (log weight, rval, trace).

CrimsonSkyline.is_stepMethod
function is_step(f, q; params = ())

Perform one step of importance sampling – draw a single sample from the proposal q, replay it through f, and record the log weight as $\log W_n = \log p(x, z_n) - \log q(z_n)$. Returns a tuple (log weight, rval, trace).

CrimsonSkyline.likelihood_weightingMethod
function likelihood_weighting(f, types::Tuple{DataType, DataType}, params...; nsamples :: Int = 1)

Given a stochastic function f and arguments to the function params..., executes nsamples iterations of importance sampling by using the prior as a proposal distribution. The importance weights are given by $\log W_n = \ell(t_n)$. Returns an SamplingResults instance.

CrimsonSkyline.likelihood_weightingMethod
function likelihood_weighting(f, params...; nsamples :: Int = 1)

Given a stochastic function f and arguments to the function params..., executes nsamples iterations of importance sampling by using the prior as a proposal distribution. The importance weights are given by $\log W_n = \ell(t_n)$. Returns an SamplingResults instance.

CrimsonSkyline.loadMethod
function load(f)

Load a Trace or SamplingResults object from file. The file must be saved in JuliaDB format with ending .jdb, which will be interpreted as a single saved trace, or must be a directory with ending .csm, which will be interpreted as a SamplingResults object.

CrimsonSkyline.load_csmMethod
function load_csm(f) :: SamplingResults

Loads a SamplingResults from a directory. The directory contains metadata.txt, which currently stores the interpretation of the SamplingResults (i.e., what kind of algorithm generated those results), and a file results.jdb, which is a JuliaDB table of the results.

CrimsonSkyline.load_jdbMethod
function load_jdb(f) :: Trace

Loads a serialized juliadb table from file f and converts it into a trace.

CrimsonSkyline.log_acceptance_ratioMethod
function log_acceptance_ratio(t :: Trace, t_proposed :: Trace, p :: Prior)

Computes the log acceptance ratio of a Metropolis step when using the independent prior proposal algorithm:

$\log \alpha = \ell(t_{\text{proposed}}) - \ell(t_{\text{original}})$

CrimsonSkyline.log_evidenceMethod
function log_evidence(r :: SamplingResults{LikelihoodWeighting})

Computes the log evidence (log partition function),

$\log Z \equiv \log p(x) \approx -\log N_{\text{samples}} + \log \sum_{n=1}^{N_{\text{samples}}} W_n.$

CrimsonSkyline.loglatentMethod
function loglatent(t :: Trace)

Computes the joint log probability of all latent variables in a trace, $\log p(t) - \ell(t)$.

CrimsonSkyline.loglikelihoodMethod
function loglikelihood(t :: Trace)

Computes and returns the log likelihood of the observed data under the model:

\[\ell(t) = \sum_{a:\ [a \in \text{keys}(t)] \wedge [\text{interpretation}(a) = \text{Standard}]} \log p(t[a])\]

CrimsonSkyline.logprob!Method
function logprob!(t :: Trace)

Computes the joint log probability to the trace and assigns it to t.logprob_sum.

CrimsonSkyline.logprobMethod
function logprob(rf::RandomField, x::Dict)

Evaluates the log probability of a set of values against the density described by the random field rf. The values x should have the format address => value.

CrimsonSkyline.logprobMethod
function logprob(t0 :: Trace, t1 :: Trace)

Computes the proposal log probability $q(t_1 | t_0)$.

This expression has two parts: log probability that is generated at the proposed site(s), and log probability that is generated at the sites that are present in t1 but not in t0.

CrimsonSkyline.logprobMethod
function logprob(t :: Trace)

Computes and returns the joint log probability of the trace:

\[\log p(t) = \sum_{a \in \text{keys}(t)}\log p(t[a])\]

CrimsonSkyline.lw_stepMethod
function lw_step(f, types::Tuple{DataType, DataType}, params...)

Perform one step of likelihood weighting – draw a single proposal from the prior and compute the log weight as equal to the likelihood. Returns a tuple (log weight, rval, trace).

CrimsonSkyline.lw_stepMethod
function lw_step(f, params...)

Perform one step of likelihood weighting – draw a single proposal from the prior and compute the log weight as equal to the likelihood. Returns a tuple (log weight, rval, trace).

CrimsonSkyline.mhMethod
function mh(f, types::Tuple{DataType, DataType}; params = (), burn = 1000, thin = 50, num_iterations = 10000)

Generic Metropolis algorithm using draws from the prior.

Args:

  • f: stochastic function. Must have call signature f(t :: Trace, params...)
  • params: addditional arguments to pass to f and each of the proposal kernels.
  • burn: number of samples to discard at beginning of markov chain
  • thin: keep only every thin-th draw. E.g., if thin = 100, only every 100-th trace will be kept.
  • num_iterations: total number of steps to take in the markov chain
CrimsonSkyline.mhMethod
function mh(f; params = (), burn = 1000, thin = 50, num_iterations = 10000)

Generic Metropolis algorithm using draws from the prior.

Args:

  • f: stochastic function. Must have call signature f(t :: Trace, params...)
  • params: addditional arguments to pass to f and each of the proposal kernels.
  • burn: number of samples to discard at beginning of markov chain
  • thin: keep only every thin-th draw. E.g., if thin = 100, only every 100-th trace will be kept.
  • num_iterations: total number of steps to take in the markov chain
CrimsonSkyline.mhMethod
function mh(f, qs :: A, addresses; params = (), burn = 100, thin = 10, num_iterations = 10000, inverse_verbosity = 100) where A <: AbstractArray

Generic Metropolis algorithm using user-defined proposal kernels, returning only a requested subset of addresses.

Args:

  • f: stochastic function. Must have call signature f(t :: Trace, params...)
  • qs: array-like of proposal kernels. Proposal kernels are applied sequentially in the order that they appear in this array. Proposal kernels must have the signature q(old_t :: Trace, new_t :: Trace, params...) where it must take in at least the same number of arguments in params as f.
  • addresses: only values sampled at these addresses will be saved in the values field of the BareResults struct returned.
  • params: addditional arguments to pass to f and each of the proposal kernels.
  • burn: number of samples to discard at beginning of markov chain
  • thin: keep only every thin-th draw. E.g., if thin = 100, only every 100-th trace will be kept.
  • num_iterations: total number of steps to take in the markov chain
  • inverse_verbosity: every inverse_verbosity iterations, a stattus report will be logged.
CrimsonSkyline.mhMethod
function mh(f, qs :: A, types::Tuple{DataType,DataType}; params = (), burn = 100, thin = 10, num_iterations = 10000, inverse_verbosity = 100) where A <: AbstractArray

Generic Metropolis algorithm using user-defined proposal kernels.

Args:

  • f: stochastic function. Must have call signature f(t :: Trace, params...)
  • qs: array-like of proposal kernels. Proposal kernels are applied sequentially in the order that they appear in this array. Proposal kernels must have the signature q(old_t :: Trace, new_t :: Trace, params...) where it must take in at least the same number of arguments in params as f.
  • params: addditional arguments to pass to f and each of the proposal kernels.
  • burn: number of samples to discard at beginning of markov chain
  • thin: keep only every thin-th draw. E.g., if thin = 100, only every 100-th trace will be kept.
  • num_iterations: total number of steps to take in the markov chain
  • inverse_verbosity: every inverse_verbosity iterations, a stattus report will be logged.
CrimsonSkyline.mhMethod
function mh(f, qs :: A; params = (), burn = 100, thin = 10, num_iterations = 10000, inverse_verbosity = 100) where A <: AbstractArray

Generic Metropolis algorithm using user-defined proposal kernels.

Args:

  • f: stochastic function. Must have call signature f(t :: Trace, params...)
  • qs: array-like of proposal kernels. Proposal kernels are applied sequentially in the order that they appear in this array. Proposal kernels must have the signature q(old_t :: Trace, new_t :: Trace, params...) where it must take in at least the same number of arguments in params as f.
  • params: addditional arguments to pass to f and each of the proposal kernels.
  • burn: number of samples to discard at beginning of markov chain
  • thin: keep only every thin-th draw. E.g., if thin = 100, only every 100-th trace will be kept.
  • num_iterations: total number of steps to take in the markov chain
  • inverse_verbosity: every inverse_verbosity iterations, a stattus report will be logged.
CrimsonSkyline.mhMethod
function mh(rf::RandomField, qs::Vector{T}, val; burn=1000, thin=100, num_iterations=11000) where T

Metropolis Hastings algorithm for sampling from a random field using arbitrary proposal kernels.

Args:

  • rf: the random field from which to sample
  • qs: vector of proposal kernel callables; see documentation of mh_step for specification of proposal kernels
  • val: initial guess with which to initialize MH, must be a dict with format address => value.
  • burn: number of samples to discard at beginning of markov chain
  • thin: keep only every thin-th draw. E.g., if thin = 100, only every 100-th trace will be kept.
  • num_iterations: total number of steps to take in the markov chain
CrimsonSkyline.mh_stepMethod
function mh_step(rf::RandomField, q, x, log_prob_rf_x)

A Metropolis step to sample from a random field using an arbitrary proposal kernel.

Args:

  • rf: a RandomField from which to sample

  • q: a proposal kernel. This must be a callable that satisfies the following requirements:

    • q(x) returns a new value x_prime that is generated using the input parameters x, i.e., $x' \sim q(x'|x)$. For example, q(x) = rand(Normal(x, 0.25))
    • q(x_prime, x) scores (computes the log probability of) x_prime against x, i.e., computes $\log q(x' | x)$
  • x: a dict with format address => value, the current sampled value.

  • log_prob_rf_x: the log probability of x under the random field.

CrimsonSkyline.mh_stepMethod
function mh_step(t :: Trace, f, q, types::Tuple{DataType,DataType}; params = (), return_val :: Bool = false)

A generic Metropolis step using an arbitrary proposal kernel.

Given a trace t, a stochastic function f with signature f(t :: Trace, params...) a stochastic function q with signature q(old_trace :: Trace, new_trace :: Trace, params...), generates a proposal from q and accepts based on the log acceptance probability:

$\log \alpha = \log p(t_{\text{new}}) - \log q(t_{\text{new}}|t_{\text{old}}) - [\log p(t_{\text{old}}) - \log q(t_{\text{old}} | t_{\text{new}})].$

CrimsonSkyline.mh_stepMethod
function mh_step(t :: Trace, f, q; params = (), return_val :: Bool = false)

A generic Metropolis step using an arbitrary proposal kernel.

Given a trace t, a stochastic function f with signature f(t :: Trace, params...) a stochastic function q with signature q(old_trace :: Trace, new_trace :: Trace, params...), generates a proposal from q and accepts based on the log acceptance probability:

$\log \alpha = \log p(t_{\text{new}}) - \log q(t_{\text{new}}|t_{\text{old}}) - [\log p(t_{\text{old}}) - \log q(t_{\text{old}} | t_{\text{new}})].$

CrimsonSkyline.mh_stepMethod
function mh_step(t :: Trace, f, types::Tuple{DataType, DataType}; params = (), return_val :: Bool = false)

An independent prior sample Metropolis step.

Given a trace t and stochastic function f depending on params..., generates proposals from prior draws and accepts based on the likelihood ratio.

CrimsonSkyline.mh_stepMethod
function mh_step(t :: Trace, f; params = (), return_val :: Bool = false)

An independent prior sample Metropolis step.

Given a trace t and stochatic function f depending on params..., generates proposals from prior draws and accepts based on the likelihood ratio.

CrimsonSkyline.nestedMethod
function nested(f, replace_fn; params = (), num_points :: Int64 = 1)

Generic implementation of nested sampling (Skilling, Nested sampling for general Bayesian computation, Bayesian Analysis, 2006). The number of sampling iterations is a function of num_points aka $N$, and the empirical entropy of the sampling distribution, given at the $n$-th iteration by $H_n \approx \sum_k \hat p_k \log \hat p_k^{-1}$, where $\hat p_k = L_k w_k / Z_k$, $L_k$ is the likelihood value, $w_k$ is the difference in prior mass, and $Z_k$ is the current estimate of the partition function. The number of sampling iterations is equal to $\min_n \{n > 0: n > NH_n\}$.

Args:

  • f: stochastic function. Must have signature f(t :: Trace, params...)
  • replace_fn: function that returns a tuple (new_trace :: Trace, new_log_likelihood :: Float64). The input signature of this function must be replace_fn(f :: F, log_likelihood :: Float64, params...) where F <: Function. It must guarantee that new_log_likelihood > log_likelihood.
  • params: any parameters to pass to f
  • num_points: the number of likelihood points to keep track of
CrimsonSkyline.nestedMethod
nested(f; params = (), num_points :: Int64 = 1)

Run nested sampling using internal rejection method.

CrimsonSkyline.nodeMethod
function node(value, address :: A, dist :: D, is_obs :: Bool, i :: Interpretation) where {A, D}

Outer constructor for Node where data is passed during construction. Data type is inferred from the passed data.

CrimsonSkyline.nodeMethod
function node(value, address :: A, dist :: D, is_obs :: Bool, i :: Interpretation) where {A, D}

Outer constructor for Node where data is passed during construction. Data type is inferred from the passed data.

CrimsonSkyline.nodeMethod
function node(T :: DataType, address :: A, dist :: D, is_obs :: Bool, i :: Interpretation) where {A, D}

Outer constructor for Node where no data is passed during construction.

CrimsonSkyline.nodeMethod
function node(T :: DataType, address :: A, dist :: D, is_obs :: Bool, i :: Interpretation) where {A, D}

Outer constructor for Node where no data is passed during construction.

CrimsonSkyline.normalized_weightsMethod
function normalized_weights(r :: SamplingResults{LikelihoodWeighting})

Computes the normalized weights $w_n$ from unnormalized weights $W_n$:

$w_n = W_n / p(x) = \exp\{\ell(t_n) - \log Z\}.$

CrimsonSkyline.observeMethod
function observe(t :: Trace, a, d, s; pa = ())

If d is not nothing an alias for calling sample with standard interpretation. Otherwise, an alias for calling sample with nonstandard interpretation.

CrimsonSkyline.pa_from_traceMethod
function pa_from_trace(t :: Trace, pa)

Collects nodes in trace corresponding to an iterable of parent addresses pa.

CrimsonSkyline.plateMethod
function plate(t::Trace, op::F, a, d, s::Int64, i::Conditioned; pa = ()) where F<:Function

Plate over conditioned variables.

CrimsonSkyline.plateMethod
function plate(t::Trace, op::F, a, d, s::Int64, i::Blocked; pa = ()) where F<:Function

Plate over blocked variables.

CrimsonSkyline.plateMethod
function plate(t::Trace, op::F, a, d, s::Int64, i::Nonstandard; pa = ()) where F<:Function

Plate over latent variables.

CrimsonSkyline.plateMethod
function plate(t::Trace, op::F, a, d, s::Int64, i::Replayed; pa = ()) where F<:Function

Plate over replayed variables. Note that this method assumes and does not check that the value to be replayed v satisfies length(v) == s.

CrimsonSkyline.plateMethod
function plate(t::Trace, op::F, a, d, s::Int64; pa = ()) where F<:Function

Sample or observe a vector of random variables at a single site instead of multiple. This can speed up inference since the number of sites in the model will no longer scale with dataset size (though numerical value computation is still linear in dataset size).

Example usage: instead of

vals = [sample(t, "val $i", Geometric()) for i in 1:N]

we can write

vals = plate(t, sample, "val", Geometric(), N)

Mathematically, this is equivalent to the product $p(z) = \prod_n p(z_n)$ and treating it as the single object $p(z)$ instead of the $N$ objects $p(z_n)$.

CrimsonSkyline.plateMethod
function plate(t::Trace, op::F, a, d, v::Vector{T}, params; pa = ()) where {T, F<:Function}

Plate over observed variables with different values but identical distribution, i.e., $p(x|z) = \prod_n p(x_n | z_n)$. This is as opposed to plate(t::Trace, op::F, a, d, v::Vector{T}; pa = ()), which is equivalent to $p(x|z) = \prod_n p(x_n | z)$.

params must have the same length as v. Each element of params corresponds to a vector of that particular component of the params, i.e., $z = (z_1, ..., z_D)$ where each $z_d$ has length $N$, the number of observed datapoints, and $D$ is the cardinality of the parameterization of the distribution.

E.g., replace

locs = sample(t, "locs", MvNormal(D, 1.0))
for (i, (loc, d)) in enumerate(zip(locs, data))
    observe(t, "data $i", Normal(loc, 1.0), d)
end

with

locs = sample(t, "locs", MvNormal(D, 1.0))
plate(t, observe, "data", Normal, data, (locs, ones(D)))
CrimsonSkyline.plateMethod
function plate(t::Trace, op::F, a, d, v::Vector{T}; pa = ()) where {T, F<:Function}

Plate over observed variables, i.e., a plated component of model likelihood. v is the vector of observations, while op is likely observe.

Example usage: instead of

for (i, d) in enumerate(data)
    observe(t, "data $i", Normal(loc, scale), d)
end

we can write

plate(t, observe, "data", Normal(loc, scale), data)
CrimsonSkyline.proposeMethod
propose(t :: Trace, a, d)

Propose a value for the address a in trace t from the distribution d.

CrimsonSkyline.rejectionMethod
function rejection(f, log_l :: Float64, params...)

Samples from the prior with the hard likelihood constraint $\log L_k >$log_l.

Args:

  • f stochastic function. Must have signature f(t :: Trace, params...)
  • log_l: current log likelihood threshold
  • params: any additional arguments to pass to f
CrimsonSkyline.replaceMethod
function replace(f, r :: Dict)

Given a mapping r from addresses to distribution-like (currently Distributions or Array{Any, 1}s), replaces the current distributions at that set of addresses with this set of distributions. Returns a function g that has return signature (t :: Trace, rval) where rval is a return value of f.

CrimsonSkyline.replaceMethod
function replace(t :: Trace, r :: Dict)

Given a mapping r from addresses to distribution-like (currently Distributions or Array{Any, 1}s), replaces the current distributions at that set of addresses with this set of distributions. Returns the modified trace.

CrimsonSkyline.replayMethod
function replay(f, t :: T) where T <: Trace

Given a stochastic function f and a trace t, makes sample calls behave as though they had sampled the values in t at the corresponding addresses.

Returns a tuple (t_new, g), where t_new is a trace and g is a function. The function signature of g is the same as that of f with the first argument removed; that is, if f(t :: Trace, params...), then g(params...). Computation is delayed, so each of the latent nodes in t_new has interpretation = REPLAYED. Calling g(params...) executes the computation and each latent node in t_new reverts to its original interpretation.

CrimsonSkyline.rewriteMethod
function rewrite(f, t :: T, r :: Dict) where T <: Trace

Rewrites the history of the trace to make it appear as if the values in the trace were sampled at the addresses in the keys of r from the corresponding distributions in the values of r. Returns a function with call signature g(params...) that returns (t :: Trace, rval), where rval is the return type of f.

CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d, params, i :: Blocked; pa = ())

Samples from d passing the optional arguments params, deletes the node stored at address a from trace t, and returns the sampled value.

CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, f, v, i :: Deterministic; pa = ())

Creates a deterministic node mapping the tuple of data v through function f, storing the value in trace t at address a.

  1. Infers input type from v
  2. Maps tuple of data v through function f, yielding r = f(v...)
  3. Creates a deterministic node and stores it in t at address a
  4. Optionally adds nodes corresponding to the addresses in pa as parent nodes
  5. Returns r
CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d, params, i :: Nonstandard; pa = ())

Samples from distribution d into trace t at address a.

  1. Samples a value from d passing the optional arguments params
  2. Creates a sample node
  3. Adds the sample node to trace t at value a
  4. Optionally adds nodes corresponding to the addresses in pa as parent nodes
  5. Returns the sampled value
CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d, params, i :: Replayed; pa = ())

Replays the sampled node through the trace.

  1. If a is not in t's address set, calls sample(t, a, d, NONSTANDARD; pa = pa).
  2. Creates a sample node that copies the value from the last node stored in the trace at address a.
  3. Adds the sample node to trace t at value a
  4. Optionally adds nodes corresponding to the addresses in pa as parent nodes
  5. Resets the node's interpretation to the original interpretation
CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d, s, i :: Standard; pa = ())

Scores an observed value s against the distribution d, storing the value in trace t at address a and optionally adds nodes corresponding to the addresses in pa as parent nodes.

CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d, params, ii :: Array{Interpretation, 1}; pa = ())

Sequentially apply sample statements with interpretations as given in ii. This is used to depth-first traverse the interpretation graph.

CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d, params; pa = ())

If a is in the set of trace addresses, calls sample using t[a]'s interpretation. Otherwise, calls sample using nonstandard interpretation.

CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d, i :: Blocked; pa = ())

Samples from d, deletes the node stored at address a from trace t, and returns the sampled value.

CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d, i :: Nonstandard; pa = ())

Samples from distribution d into trace t at address a.

  1. Samples a value from d
  2. Creates a sample node
  3. Adds the sample node to trace t at value a
  4. Optionally adds nodes corresponding to the addresses in pa as parent nodes
  5. Returns the sampled value
CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d, i :: Replayed; pa = ())

Replays the sampled node through the trace.

  1. If a is not in t's address set, calls sample(t, a, d, NONSTANDARD; pa = pa).
  2. Creates a sample node that copies the value from the last node stored in the trace at address a.
  3. Adds the sample node to trace t at value a
  4. Optionally adds nodes corresponding to the addresses in pa as parent nodes
  5. Resets the node's interpretation to the original interpretation
CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d, i :: Union{Standard,Conditioned}; pa = ())

Scores an observed value against the distribution d, storing the value in trace t at address a and optionally adds nodes corresponding to the addresses in pa as parent nodes.

This method is used by the condition effect. It will probably not be used by most users.

CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d, ii :: Array{Interpretation, 1}; pa = ())

Sequentially apply sample statements with interpretations as given in ii. This is used to depth-first traverse the interpretation graph.

CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, d; pa = ())

If a is in the set of trace addresses, calls sample using t[a]'s interpretation. Otherwise, calls sample using nonstandard interpretation.

CrimsonSkyline.sampleMethod
function sample(r :: SamplingResults{LikelihoodWeighting}, k, n :: Int)

Draws n samples from the empirical marginal posterior at address k.

CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, r :: NonparametricSamplingResults{I}; pa = ()) where I <: InferenceType

Treat a marginal site of a SamplingResults as a distribution, sampling from it into a trace.

CrimsonSkyline.sampleMethod
function sample(t :: Trace, a, r::ParametricSamplingResults{I}; pa = ()) where I <: InferenceType

Treat a marginal site of a SamplingResults as a distribution, sampling from it into a trace.

CrimsonSkyline.saveMethod
function save(r :: SamplingResults, f)

Saves a SamplingResults to disk in the directory f.

CrimsonSkyline.saveMethod
function save(t :: Trace, f)

Saves a trace to disk at the filepath f.

CrimsonSkyline.to_parametricMethod
function to_parametric(r::NonparametricSamplingResults{I}) where I<:InferenceType

Converts a nonparametric sampling results object into one that additionally contains a mapping from addresses to distributions.

CrimsonSkyline.to_tableMethod
function to_table(t :: Trace)

Turns a trace into a juliadb table. Does not store parent / child relationships.

CrimsonSkyline.traceMethod
trace(A, T)

This is the recommended way to construct a new typed trace. A is the address type, T is the value type.

CrimsonSkyline.transformMethod
function transform(t :: Trace, a, f :: F, v; pa = ()) where F <: Function

Alias for sample(t, a, f, v, DETERMINISTIC; pa = pa).

CrimsonSkyline.updateMethod
function update(f, r :: SamplingResults{I}) where I <: InferenceType

Given a stochastic function f and a SamplingResultsr, update the prior predictive to the posterior predictive by jointly replacing all latent sample sites with the joint empirical posterior. Returns a stochastic function g with the same call signature as f. This function will modify in place the trace passed into it as the first argument.

Distributions.logpdfMethod
function Distributions.logpdf(r :: A, v) where A <: AbstractArray

Interprets an array of objects as a delta distribution over those objects. If v is in the support set, returns $-log |r|$. Otherwise, returns $-\infty$.