贝叶斯在线变点检测 原理 & 代码(Bayesian Online Changepoint Detection)

""" 参考:https://github.com/gwgundersen/bocd/blob/master/bocd.py """
"""============================================================================ Python implementation of Bayesian online changepoint detection for a normal model with unknown mean parameter. For details, see Adams & MacKay 2007: "Bayesian Online Changepoint Detection" https://arxiv.org/abs/0710.3742 This code implements the figure in the following blog post: http://gregorygundersen.com/blog/2019/08/13/bocd/ Author: Gregory Gundersen ============================================================================"""

import matplotlib.pyplot as plt
from   matplotlib.colors import LogNorm
import numpy as np
from   scipy.stats import norm


# -----------------------------------------------------------------------------

def bocd(data, model, hazard):
    """Return run length posterior using Algorithm 1 in Adams & MacKay 2007. """
    # 1. Initialize lower triangular matrix representing the posterior as
    # function of time. Model parameters are initialized in the model class.
    R = np.zeros((T + 1, T + 1))
    R[0, 0] = 1
    message = np.array([1])

    for t in range(1, T + 1):

        # 2. Observe new datum.
        x = data[t - 1]

        # 3. Evaluate predictive probabilities.
        pis = model.pred_prob(t, x)

        # 4. Calculate growth probabilities.
        growth_probs = pis * message * (1 - hazard)

        # 5. Calculate changepoint probabilities.
        cp_prob = sum(pis * message * hazard)

        # 6. Calculate evidence
        new_joint = np.append(cp_prob, growth_probs)

        # 7. Determine run length distribution.
        R[t, :t + 1] = new_joint
        evidence = sum(new_joint)
        R[t, :] /= evidence

        # 8. Update sufficient statistics.
        model.update_statistics(t, x)

        # Setup message passing.
        message = new_joint

    return R


# -----------------------------------------------------------------------------

# Implementation of a Gaussian model with known precision. See Kevin Murphy's
# "Conjugate Bayesian analysis of the Gaussian distribution" for a complete
# derivation of the model:
#
# https://www.cs.ubc.ca/~murphyk/Papers/bayesGauss.pdf
#
class NormalKnownPrecision:

    def __init__(self, mean0, prec0):
        """Initialize model parameters. """
        self.mean0 = mean0
        self.prec0 = prec0
        self.mean_params = np.array([mean0])
        self.prec_params = np.array([prec0])

    def pred_prob(self, t, x):
        """Compute predictive probabilities. """
        d = lambda x, mu, tau: norm.pdf(x, mu, 1 / tau + 1)
        return np.array([d(x, self.mean_params[i], self.prec_params[i])
                         for i in range(t)])

    def update_statistics(self, t, x):
        """Update sufficient statistics. """
        # `offsets` is just a clever way to +1 all the sufficient statistics.
        offsets = np.arange(1, t + 1)
        new_mean_params = (self.mean_params * offsets + x) / (offsets + 1)
        new_prec_params = self.prec_params + 1
        self.mean_params = np.append([self.mean0], new_mean_params)
        self.prec_params = np.append([self.prec0], new_prec_params)


# -----------------------------------------------------------------------------

def generate_data(mean0, prec0, T, cp_prob):
    """Generate partitioned data of T observations according to constant changepoint probability `cp_prob` with hyperpriors `mean0` and `prec0`. """
    means = [0]
    data = []
    cpts = []
    for t in range(0, T):
        if np.random.random() < cp_prob:
            mean = np.random.normal(mean0, 1 / prec0)
            means.append(mean)
            cpts.append(t)
        data.append(np.random.normal(means[-1], 1))
    return data, cpts


# -----------------------------------------------------------------------------

def plot_posterior(T, data, R, cpts):
    """Plot data, run length posterior, and groundtruth changepoints. """
    fig, axes = plt.subplots(2, 1, figsize=(20, 10))
    ax1, ax2 = axes

    ax1.scatter(range(0, T), data)
    ax1.plot(range(0, T), data)
    ax1.set_xlim([0, T])
    ax1.margins(0)

    norm = LogNorm(vmin=0.0001, vmax=1)
    ax2.imshow(np.rot90(R), aspect='auto', cmap='gray_r', norm=norm)
    ax2.set_xlim([0, T])
    # This just reverses the y-tick marks.
    ticks = list(range(0, T+1, 50))
    ax2.set_yticks(ticks)
    ax2.set_yticklabels(ticks[::-1])
    ax2.margins(0)

    for cpt in cpts:
        ax1.axvline(cpt, c='r', ls='dotted')
        ax2.axvline(cpt, c='r', ls='dotted')

    plt.tight_layout()
    plt.show()


# -----------------------------------------------------------------------------

if __name__ == '__main__':
    T = 300         # Number of observations.
    cp_prob = 1/50  # Constant prior on changepoint probability.
    mean0 = 0       # Prior on Gaussian mean.
    prec0 = 0.2     # Prior on Gaussian precision.

    data, cpts = generate_data(mean0, prec0, T, cp_prob)
    model = NormalKnownPrecision(mean0, prec0)
    R = bocd(data=data, model=model, hazard=1/50)
    # The model becomes numerically unstable for large `T` because the mass is
    # distributed across a support whose size is increasing.
    for row in R:
        assert np.isclose(np.sum(row), 1)
    plot_posterior(T, data, R, cpts)

参考:

  1. Bayesian Online Changepoint Detection
  2. Github_1 (BOCD)
全部评论

相关推荐

老粉都知道小猪猪我很久没更新了,因为秋招非常非常不顺利,emo了三个月了,接下来说一下我的情况吧本人是双非本&nbsp;专业是完全不着计算机边的非科班,比较有优势的是有两段大厂实习,美团和字节。秋招面了50+场泡池子泡死的:滴滴&nbsp;快手&nbsp;去哪儿&nbsp;小鹏汽车&nbsp;不知名的一两个小厂其中字节13场&nbsp;两次3面挂&nbsp;两次2面挂&nbsp;一次一面挂其中有2场面试题没写出来,其他的都是全a,但该挂还是挂,第三次三面才面进去字节,秋招加暑期总共面了22次字节,在字节的面评可以出成书了快手面了8场,2次实习的,通过了但没去,一次2面挂&nbsp;最后一次到录用评估&nbsp;至今无消息滴滴三面完&nbsp;没几天挂了&nbsp;所有技术面找不出2个问题是我回答不上来的,三面还来说我去过字节,应该不会考虑滴滴吧,直接给我干傻了去哪儿一天速通&nbsp;至今无消息小鹏汽车hr&nbsp;至今无消息美团2面挂&nbsp;然后不捞我了,三个志愿全部结束,估计被卡学历了虾皮二面挂&nbsp;这个是我菜,面试官太牛逼了拼多多二面挂&nbsp;3道题也全写了&nbsp;也没问题是回答不出来的&nbsp;泡一周后挂腾讯面了5次&nbsp;一次2面挂&nbsp;三次一面挂,我宣布腾讯是世界上最难进的互联网公司然后还有一些零零散散的中小厂,但是数量比较少,约面大多数都是大厂。整体的战况非常惨烈,面试机会少,就算面过了也需要和各路神仙横向对比,很多次我都是那个被比下去的人,不过这也正常,毕竟谁会放着一个985的硕士不招,反而去招一个双非读化学的小子感觉现在互联网对学历的要求越来越高了,不仅仅要985还要硕士了,双非几乎没啥生存空间了,我感觉未来几年双非想要进大厂开发的难度应该直线上升了,唯一的打法还是从大二刷实习,然后苟个转正,不然要是去秋招大概率是炮灰。而且就我面字节这么多次,已经开始问很多ai的东西了,你一破本科生要是没实习没科研懂什么ai啊,纯纯白给了
不知名牛友_:爸爸
秋招你被哪家公司挂了?
点赞 评论 收藏
分享
评论
点赞
收藏
分享

创作者周榜

更多
牛客网
牛客网在线编程
牛客网题解
牛客企业服务