首页 > 其他分享 >一、贝叶斯回归_numpy实现

一、贝叶斯回归_numpy实现

时间:2022-12-21 18:55:55浏览次数:32  
标签:draws degree sigma 回归 贝叶斯 np numpy self dot

import numpy as np
from scipy.stats import chi2, multivariate_normal
from utils.data_metrics import mean_squared_error
from utils.data_manipulation import train_test_split, polynomial_features



class BayesianRegression(object):
    """贝叶斯回归模型。如果指定了poly_degree,那么特征将被转换为多项式基函数。
    将被转换为多项式基函数,从而实现多项式的 回归。
    假设权重为正态先验和似然,缩放后的逆卡方先验和似然为正态。
    秩平方先验和权重方差的似然。
    Parameters:
    -----------
    n_draws: float
        从参数的后验中提取的模拟次数。
    mu0: array
        参数的先验正态分布的均值。
    omega0: array
        参数的先验正态分布的精度矩阵。
    nu0: float
        先验标度反卡方分布的自由度。
    sigma_sq0: float
        先验标度反卡方分布的尺度参数。
    poly_degree: int
        特征应被转换为的多项式程度。允许 进行多项式回归。
    cred_int: float
        可信区间(ETI在本例中)。95 => 参数后验的95%可信区间。
    """
    def __init__(self, n_draws, mu0, omega0, nu0, sigma_sq0, poly_degree=0, cred_int=95):
        self.w = None
        self.n_draws = n_draws
        self.poly_degree = poly_degree
        self.cred_int = cred_int

        # Prior parameters
        self.mu0 = mu0
        self.omega0 = omega0
        self.nu0 = nu0
        self.sigma_sq0 = sigma_sq0

    # 允许从缩放的反卡方分布进行模拟。假设方差是按照这个分布的。
    #   https://en.wikipedia.org/wiki/Scaled_inverse_chi-squared_distribution
    def _draw_scaled_inv_chi_sq(self, n, df, scale):
        X = chi2.rvs(size=n, df=df)
        sigma_sq = df * scale / X
        return sigma_sq

    def fit(self, X, y):
        # If polynomial transformation
        if self.poly_degree:
            X = polynomial_features(X, degree=self.poly_degree)
        n_samples, n_features = np.shape(X)
        X_X = X.T.dot(X)
        # β的最小二乘法近似值
        beta_hat = np.linalg.pinv(X_X).dot(X.T).dot(y)
        # 后验参数可以通过分析来确定,因为我们假定似然的共轭先验。
        # 正态先验/似然 => 正态后验
        mu_n = np.linalg.pinv(X_X + self.omega0).dot(X_X.dot(beta_hat)+self.omega0.dot(self.mu0))
        omega_n = X_X + self.omega0
        # 缩放的逆卡方先验/似然 => 缩放的逆卡方后验
        nu_n = self.nu0 + n_samples
        sigma_sq_n = (1.0/nu_n)*(self.nu0*self.sigma_sq0 + \
            (y.T.dot(y) + self.mu0.T.dot(self.omega0).dot(self.mu0) - mu_n.T.dot(omega_n.dot(mu_n))))

        #模拟n_draws的参数值
        beta_draws = np.empty((self.n_draws, n_features))
        for i in range(self.n_draws):
            sigma_sq = self._draw_scaled_inv_chi_sq(n=1, df=nu_n, scale=sigma_sq_n)
            beta = multivariate_normal.rvs(size=1, mean=mu_n[:,0], cov=sigma_sq*np.linalg.pinv(omega_n))
            # 保存参数的绘制
            beta_draws[i, :] = beta

        # 选择模拟变量的平均值作为用于预测的变量。
        self.w = np.mean(beta_draws, axis=0)

        # Lower and upper boundary of the credible interval
        l_eti = 50 - self.cred_int/2
        u_eti = 50 + self.cred_int/2
        self.eti = np.array([[np.percentile(beta_draws[:,i], q=l_eti), np.percentile(beta_draws[:,i], q=u_eti)] \
                                for i in range(n_features)])

    def predict(self, X, eti=False):
        # 如果多项式变换
        if self.poly_degree:
            X = polynomial_features(X, degree=self.poly_degree)
        y_pred = X.dot(self.w)
        # 如果应该返回95%等尾区间的下限和上限
        if eti:
            lower_w = self.eti[:, 0]
            upper_w = self.eti[:, 1]
            y_lower_pred = X.dot(lower_w)
            y_upper_pred = X.dot(upper_w)
            return y_pred, y_lower_pred, y_upper_pred
        return y_pred

 

标签:draws,degree,sigma,回归,贝叶斯,np,numpy,self,dot
From: https://www.cnblogs.com/zhangxianrong/p/16996935.html

相关文章