Skip to content

Develop metatree #69

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 28 commits into from
Feb 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
72c06ef
Forgive feature assignment any number of times
yuta-nakahara Jan 17, 2023
96212b5
Add calc_log_marginal_likelihood
yuta-nakahara Feb 11, 2023
cf46a2e
Merge pull request #62 from yuta-nakahara/develop-metatree-bernoulliML
yuta-nakahara Feb 11, 2023
239e373
Add batch updating
yuta-nakahara Feb 12, 2023
10851e2
bug fix in set_h0(hn)_params
yuta-nakahara Feb 14, 2023
472a4ef
Modify visualization
yuta-nakahara Feb 14, 2023
c9f29c1
Modify calc_log_marginal_likelihood
yuta-nakahara Feb 15, 2023
66e1162
calc_log_marginal_likelihood for normal
1jonao Feb 21, 2023
09f38b9
Delete test.py
yuta-nakahara Feb 22, 2023
5e28844
Delete test_normal.py
yuta-nakahara Feb 22, 2023
e8e3f66
Merge pull request #63 from yuta-nakahara/develop-metatree-normal
yuta-nakahara Feb 22, 2023
2d745eb
Update _linearregression.py
yuta-nakahara Feb 25, 2023
d40ecd9
Update _categorical.py
yuta-nakahara Feb 25, 2023
980d40f
Update _poisson.py
yuta-nakahara Feb 25, 2023
0fc5ee0
Update _exponential.py
yuta-nakahara Feb 25, 2023
eda3a3f
Add calc_log_marginal_likelihood in categorical
yuta-nakahara Feb 26, 2023
8e3015b
Merge pull request #65 from yuta-nakahara/develop-metatree-categorical
yuta-nakahara Feb 26, 2023
ec3c006
Add calc_log_marginal_likelihood in exponential
yuta-nakahara Feb 26, 2023
13fbe98
Merge pull request #66 from yuta-nakahara/develop-metatree-exponential
yuta-nakahara Feb 26, 2023
bed1301
Add calc_log_marginal_likelihood in poisson
yuta-nakahara Feb 26, 2023
210952b
Update _poisson.py
yuta-nakahara Feb 26, 2023
89ede3c
Merge pull request #67 from yuta-nakahara/develop-metatree-poisson
yuta-nakahara Feb 26, 2023
790048d
Update _poisson.py
yuta-nakahara Feb 26, 2023
911fc1e
Add calc_log_marginal_likelihood in linearregression
yuta-nakahara Feb 26, 2023
7795a55
Merge pull request #68 from yuta-nakahara/develop-metatree-linearregr…
yuta-nakahara Feb 26, 2023
0fe39ad
Remove sequential algorithm
yuta-nakahara Feb 26, 2023
8dbaf21
Update _linearregression.py
yuta-nakahara Feb 26, 2023
896265f
Update _normal.py
yuta-nakahara Feb 26, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 16 additions & 1 deletion bayesml/bernoulli/_bernoulli.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import warnings
import numpy as np
from scipy.stats import beta as ss_beta
# from scipy.stats import betabinom as ss_betabinom
from scipy.special import gammaln
import matplotlib.pyplot as plt

from .. import base
Expand Down Expand Up @@ -486,3 +486,18 @@ def pred_and_update(self,x,loss="squared"):
prediction = self.make_prediction(loss=loss)
self.update_posterior(x)
return prediction

def calc_log_marginal_likelihood(self):
"""Calculate log marginal likelihood

Returns
-------
log_marginal_likelihood : float
The log marginal likelihood.
"""
return (gammaln(self.h0_alpha+self.h0_beta)
-gammaln(self.h0_alpha)
-gammaln(self.h0_beta)
-gammaln(self.hn_alpha+self.hn_beta)
+gammaln(self.hn_alpha)
+gammaln(self.hn_beta))
14 changes: 14 additions & 0 deletions bayesml/categorical/_categorical.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import warnings
import numpy as np
from scipy.stats import dirichlet as ss_dirichlet
from scipy.special import gammaln
import matplotlib.pyplot as plt

from .. import base
Expand Down Expand Up @@ -553,3 +554,16 @@ def pred_and_update(self, x, loss="squared",onehot=True):
prediction = self.make_prediction(loss,onehot)
self.update_posterior(x,onehot)
return prediction

def calc_log_marginal_likelihood(self):
"""Calculate log marginal likelihood

Returns
-------
log_marginal_likelihood : float
The log marginal likelihood.
"""
return (gammaln(self.h0_alpha_vec.sum())
-gammaln(self.h0_alpha_vec).sum()
-gammaln(self.hn_alpha_vec.sum())
+gammaln(self.hn_alpha_vec).sum())
14 changes: 14 additions & 0 deletions bayesml/exponential/_exponential.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import warnings
import numpy as np
from scipy.stats import expon as ss_expon, gamma as ss_gamma, lomax as ss_lomax
from scipy.special import gammaln
import matplotlib.pyplot as plt

from .. import base
Expand Down Expand Up @@ -481,3 +482,16 @@ def pred_and_update(self,x,loss="squared"):
prediction = self.make_prediction(loss=loss)
self.update_posterior(x)
return prediction

def calc_log_marginal_likelihood(self):
"""Calculate log marginal likelihood

Returns
-------
log_marginal_likelihood : float
The log marginal likelihood.
"""
return (self.h0_alpha * np.log(self.h0_beta)
- gammaln(self.h0_alpha)
- self.hn_alpha * np.log(self.hn_beta)
+ gammaln(self.hn_alpha))
27 changes: 27 additions & 0 deletions bayesml/linearregression/_linearregression.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from scipy.stats import gamma as ss_gamma
from scipy.stats import multivariate_t as ss_multivariate_t
from scipy.stats import t as ss_t
from scipy.special import gammaln
import matplotlib.pyplot as plt

from .. import base
Expand Down Expand Up @@ -360,6 +361,9 @@ def __init__(
self.p_lambda = 1.0
self.p_nu = 2.0

# sample size
self._n = 0

self.set_h0_params(
h0_mu_vec,
h0_lambda_mat,
Expand Down Expand Up @@ -462,6 +466,7 @@ def set_hn_params(
hn_beta : float, optional
a positive real number, by default None.
"""
self._n = 0
if hn_mu_vec is not None:
_check.float_vec(hn_mu_vec,'hn_mu_vec',ParameterFormatError)
_check.shape_consistency(
Expand Down Expand Up @@ -539,6 +544,7 @@ def update_posterior(self, x, y):
self.hn_alpha += x.shape[0]/2.0
self.hn_beta += (-self.hn_mu_vec[np.newaxis,:] @ self.hn_lambda_mat @ self.hn_mu_vec[:,np.newaxis]
+ y @ y + hn1_mu[np.newaxis,:] @ hn1_Lambda @ hn1_mu[:,np.newaxis])[0,0] /2.0
self._n += x.shape[0]
return self

def _update_posterior(self, x, y):
Expand All @@ -552,6 +558,7 @@ def _update_posterior(self, x, y):
self.hn_alpha += x.shape[0]/2.0
self.hn_beta += (-self.hn_mu_vec[np.newaxis,:] @ self.hn_lambda_mat @ self.hn_mu_vec[:,np.newaxis]
+ y @ y + hn1_mu[np.newaxis,:] @ hn1_Lambda @ hn1_mu[:,np.newaxis])[0,0] /2.0
self._n += x.shape[0]
return self

def estimate_params(self,loss="squared",dict_out=False):
Expand Down Expand Up @@ -775,3 +782,23 @@ def pred_and_update(self,x,y,loss="squared"):
prediction = self.make_prediction(loss=loss)
self.update_posterior(x,y)
return prediction

def calc_log_marginal_likelihood(self):
"""Calculate log marginal likelihood

Returns
-------
log_marginal_likelihood : float
The log marginal likelihood.
"""
return (
self.h0_alpha * np.log(self.h0_beta)
- self.hn_alpha * np.log(self.hn_beta)
+ gammaln(self.hn_alpha)
- gammaln(self.h0_alpha)
+ 0.5 * (
np.linalg.slogdet(self.h0_lambda_mat)[1]
- np.linalg.slogdet(self.hn_lambda_mat)[1]
- self._n * np.log(2*np.pi)
)
)
Loading