Skip to content

Develop hiddenmarkovnormal initgetset #30

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
52d88bf
Create hiddenmarkovautoregressive.md
yuta-nakahara Jul 21, 2022
9d4615e
Update hiddenmarkovautoregressive.md
yuta-nakahara Aug 3, 2022
f247c6d
Add files via upload
kkkzmwsd Aug 25, 2022
28054a1
Update hiddenmarkovautoregressive.md
kkkzmwsd Sep 2, 2022
5cd4b92
Update hiddenmarkovautoregressive.md
kkkzmwsd Sep 2, 2022
cd0ec46
Update hiddenmarkovautoregressive.md
kkkzmwsd Sep 2, 2022
a7b415a
Update hiddenmarkovautoregressive.md
kkkzmwsd Sep 2, 2022
b21e930
Create hiddenmarkovautoregressive.md
kkkzmwsd Sep 2, 2022
2360a2f
Update hiddenmarkovautoregressive.md
kkkzmwsd Sep 2, 2022
f0ccfaf
Update hiddenmarkovautoregressive.md
kkkzmwsd Sep 2, 2022
4fb4c20
Update hiddenmarkovautoregressive.md
kkkzmwsd Sep 3, 2022
7d2ac37
Delete hiddenmarkovautoregressive.md
kkkzmwsd Sep 3, 2022
2008345
Merge pull request #23 from yuta-nakahara/develop-hiddenmarkovautoreg…
yuta-nakahara Sep 3, 2022
3843027
Add float_vecs_sum_1
yuta-nakahara Sep 7, 2022
239f0a2
start my work
NJ-private Sep 11, 2022
849c69d
add params for data generation without hyper params
NJ-private Sep 11, 2022
ad521e8
add params for data generation without hyper params
NJ-private Sep 11, 2022
6df9a50
make self.__init__() for hiddenmarkovautoregressive and add check dim…
NJ-private Sep 18, 2022
0708936
add set_h_params
NJ-private Sep 18, 2022
9723a6d
add new check consistency
NJ-private Sep 23, 2022
539e963
bug fix
NJ-private Sep 23, 2022
8a37e7a
some modify
NJ-private Oct 2, 2022
ac6208c
some modify
NJ-private Oct 9, 2022
bb82eb4
add set_params and get_params and some modify of __init__
NJ-private Oct 9, 2022
37fa84d
init function
yuta-nakahara Oct 9, 2022
8522517
Merge branch 'develop-check' into develop-hiddenmarkovnormal-initgetset
yuta-nakahara Oct 9, 2022
8a03f2c
Merge branch 'develop-hiddenmarkovautoregressive-GenModel' into devel…
yuta-nakahara Oct 9, 2022
c09204f
add set_params
NJ-private Oct 9, 2022
571a7c0
little modify
NJ-private Oct 9, 2022
9cd44c2
modify set_params and add shape_consistency
NJ-private Oct 10, 2022
bd174ef
little modify and add set_h_params before eta and zeta
NJ-private Oct 10, 2022
2a1aeaa
Add shape_consistency
yuta-nakahara Oct 11, 2022
4bbe731
Revise shape_consistency
yuta-nakahara Oct 11, 2022
69d052a
little modification of shape_consistency
yuta-nakahara Oct 11, 2022
de87b59
Merge branch 'develop-check' into develop-hiddenmarkovnormal-initgetset
yuta-nakahara Oct 11, 2022
d066598
Revise set_params and set_h_params
yuta-nakahara Oct 11, 2022
e458e3c
Added get_params and get_h_params
yuta-nakahara Oct 12, 2022
dc67df4
Add set, get, etc to LearnModel
yuta-nakahara Oct 12, 2022
f921cae
Revise reset_hn_params and overwrite_h0_params
yuta-nakahara Oct 13, 2022
b0ed3b0
Merge branch 'develop-base' into develop-hiddenmarkovnormal-initgetset
yuta-nakahara Oct 13, 2022
ff1d77e
Delete rest_hn_params and overwrite_h0_params
yuta-nakahara Oct 13, 2022
fd91b6b
little modify
yuta-nakahara Oct 13, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 35 additions & 4 deletions bayesml/_check.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# Code Author
# Yuta Nakahara <yuta.nakahara@aoni.waseda.jp>
# Yuji Iikubo <yuji-iikubo.8@fuji.waseda.jp>
# Yasushi Esaki <esakiful@gmail.com>
# Jun Nishikawa <jun.b.nishikawa@gmail.com>
import numpy as np

_EPSILON = np.sqrt(np.finfo(np.float64).eps)
Expand Down Expand Up @@ -51,6 +53,12 @@ def nonneg_int_vec(val,val_name,exception_class):
return val
raise(exception_class(val_name + " must be a 1-dimensional numpy.ndarray whose dtype is int. Its values must be non-negative (including 0)."))

def nonneg_float_vec(val,val_name,exception_class):
if type(val) is np.ndarray:
if np.issubdtype(val.dtype,np.floating) and val.ndim == 1 and np.all(val>=0):
return val
raise(exception_class(val_name + " must be a 1-dimensional numpy.ndarray whose dtype is float. Its values must be non-negative (including 0)."))

def int_of_01(val,val_name,exception_class):
if np.issubdtype(type(val),np.integer):
if val == 0 or val ==1:
Expand Down Expand Up @@ -171,13 +179,30 @@ def float_vecs(val,val_name,exception_class):
return val
raise(exception_class(val_name + " must be a numpy.ndarray whose ndim >= 1."))

def float_vec_sum_1(val,val_name,exception_class):
def pos_float_vecs(val,val_name,exception_class):
if type(val) is np.ndarray:
if np.issubdtype(val.dtype,np.integer) and val.ndim == 1 and abs(val.sum() - 1.) <= _EPSILON:
if np.issubdtype(val.dtype,np.integer) and val.ndim >= 1 and np.all(val>0):
return val.astype(float)
if np.issubdtype(val.dtype,np.floating) and val.ndim == 1 and abs(val.sum() - 1.) <= _EPSILON:
if np.issubdtype(val.dtype,np.floating) and val.ndim >= 1 and np.all(val>0.0):
return val
raise(exception_class(val_name + " must be a 1-dimensional numpy.ndarray, and the sum of its elements must equal to 1."))
raise(exception_class(val_name + " must be a 1-dimensional numpy.ndarray. Its values must be positive (not including 0)"))

def float_vec_sum_1(val,val_name,exception_class,ndim=1,sum_axis=0):
if type(val) is np.ndarray:
sum_val = np.sum(val, axis=sum_axis)
if np.issubdtype(val.dtype,np.integer) and val.ndim == ndim and abs(sum_val.sum() - np.prod(sum_val.shape)) <= _EPSILON:
return val.astype(float)
if np.issubdtype(val.dtype,np.floating) and val.ndim == ndim and abs(sum_val.sum() - np.prod(sum_val.shape)) <= _EPSILON:
return val
raise(exception_class(val_name + f" must be a {ndim}-dimensional numpy.ndarray, and the sum of its elements must equal to 1."))

def float_vecs_sum_1(val,val_name,exception_class):
if type(val) is np.ndarray:
if np.issubdtype(val.dtype,np.integer) and val.ndim >= 1 and np.all(np.abs(np.sum(val, axis=-1) - 1.) <= _EPSILON):
return val.astype(float)
if np.issubdtype(val.dtype,np.floating) and val.ndim >= 1 and np.all(np.abs(np.sum(val, axis=-1) - 1.) <= _EPSILON):
return val
raise(exception_class(val_name + " must be a numpy.ndarray whose ndim >= 1, and the sum along the last dimension must equal to 1."))

def int_(val,val_name,exception_class):
if np.issubdtype(type(val),np.integer):
Expand Down Expand Up @@ -205,3 +230,9 @@ def onehot_vecs(val,val_name,exception_class):
if np.issubdtype(val.dtype,np.integer) and val.ndim >= 1 and np.all(val >= 0) and np.all(val.sum(axis=-1)==1):
return val
raise(exception_class(val_name + " must be a numpy.ndarray whose dtype is int and whose last axis constitutes one-hot vectors."))

def shape_consistency(val: int, val_name: str, correct: int, correct_name: str, exception_class):
if val != correct:
message = (f"{val_name} must coincide with {correct_name}: "
+ f"{val_name} = {val}, {correct_name} = {correct}")
raise(exception_class(message))
4 changes: 2 additions & 2 deletions bayesml/autoregressive/_autoregressive.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,11 @@ class GenModel(base.Generative):
h_mu_vec : numpy ndarray, optional
a vector of real numbers, by default [0.0, 0.0, ... , 0.0]
h_lambda_mat : numpy ndarray, optional
a positibe definate matrix, by default the identity matrix
a positive definate matrix, by default the identity matrix
h_alpha : float, optional
a positive real number, by default 1.0
h_beta : float, optional
a positibe real number, by default 1.0
a positive real number, by default 1.0
seed : {None, int}, optional
A seed to initialize numpy.random.default_rng(),
by default None
Expand Down
18 changes: 13 additions & 5 deletions bayesml/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,13 +257,21 @@ def load_hn_params(self,filename):
+'or ``LearnModel.save_hn_params()``.')
)

@abstractmethod
def reset_hn_params(self):
pass

@abstractmethod
"""Reset the hyperparameters of the posterior distribution to their initial values.

They are reset to the output of `self.get_h0_params()`.
Note that the parameters of the predictive distribution are also calculated from them.
"""
self.set_hn_params(*self.get_h0_params().values())

def overwrite_h0_params(self):
pass
"""Overwrite the initial values of the hyperparameters of the posterior distribution by the learned values.

They are overwitten by the output of `self.get_hn_params()`.
Note that the parameters of the predictive distribution are also calculated from them.
"""
self.set_h0_params(*self.get_hn_params().values())

@abstractmethod
def update_posterior(self):
Expand Down
9 changes: 9 additions & 0 deletions bayesml/hiddenmarkovautoregressive/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# Document Author
# Koki Kazama <kokikazama@aoni.waseda.jp>
# Jun Nishikawa <Jun.B.Nishikawa@gmail.com>

from ._hiddenmarkovautoregressive import GenModel
# from ._hiddenmarkovautoregressive import LearnModel

# __all__ = ["GenModel","LearnModel"]
__all__ = ["GenModel"]
Loading