Skip to content

Commit

Permalink
code cleaning
Browse files Browse the repository at this point in the history
  • Loading branch information
Krishn_bera committed May 20, 2022
1 parent c79df0d commit bc7067f
Show file tree
Hide file tree
Showing 30 changed files with 60,874 additions and 90,855 deletions.
302 changes: 22 additions & 280 deletions hddm/examples/demo_HDDMnnRL/demo_HDDMnnRL.ipynb

Large diffs are not rendered by default.

150,000 changes: 60,000 additions & 90,000 deletions hddm/examples/demo_HDDMnnRL/ppc_data.csv

Large diffs are not rendered by default.

Binary file modified hddm/examples/demo_HDDMnnRL/rlssm_model
Binary file not shown.
Binary file modified hddm/examples/demo_HDDMnnRL/traces.db
Binary file not shown.
24 changes: 11 additions & 13 deletions hddm/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -494,17 +494,17 @@ def gen_rand_rlssm_data_MAB_RWupdate(
binary_outcome=True,
uncertainty=False,
):
"""Generate RLSSM datasets on 2-armed bandit task.
"""Generate RLSSM datasets on 2-armed bandit task.
:Arguments:
model: str
String that determines which sequential sampling model to use.
ssm_param: list
ssm_param: list
List of sequential sampling model parameters (in the order of what you define in model_config).
rl_param: list
List of reinforcement learning parameters (in the order of what you define in model_config_rl).
dual: bool <default=False>
Flag to denote if use of separate learning rates for positive and negative RPEs.
Flag to denote if use of separate learning rates for positive and negative RPEs.
size: int <default=1>
Number of trials to simulate.
p_upper: float <default=1>
Expand All @@ -516,7 +516,7 @@ def gen_rand_rlssm_data_MAB_RWupdate(
subjs: int <default=1>
Number of subjects to simulate.
split_by: int <default=0>
Denotes the condition number/index in the experiment.
Denotes the condition number/index in the experiment.
mu_upper: float <default=1>
Mean of the (normal) reward distribution for the upper action/choice.
mu_lower: float <default=0>
Expand All @@ -527,7 +527,7 @@ def gen_rand_rlssm_data_MAB_RWupdate(
Std. dev. of the (normal) reward distribution for the lower action/choice.
binary_outcome: bool <default=True>
Denotes if the reward scheme is binary (as opposed to non-binary). Non-binary rewards are sampled from normal distributions.
:Returns:
all_data: Pandas.Dataframe
Expand Down Expand Up @@ -601,12 +601,12 @@ def gen_rand_rlssm_data_MAB_RWupdate(
no_noise=False,
bin_dim=None,
bin_pointwise=False,
)
)
# get the results in desired df format [rt, response] -- from np.array (1, 2)
tres = np.transpose(np.squeeze(np.array(list(res[0:2])), axis=1))
data = pd.DataFrame(tres, columns=['rt', 'response'])
data = pd.DataFrame(tres, columns=["rt", "response"])
# flip the responses to [1,0]
data.loc[data['response'] < 1, 'response'] = 0
data.loc[data["response"] < 1, "response"] = 0

df.loc[0, "response"] = data.response[0]
df.loc[0, "rt"] = data.rt[0]
Expand Down Expand Up @@ -658,12 +658,12 @@ def gen_rand_rlssm_data_MAB_RWupdate(
no_noise=False,
bin_dim=None,
bin_pointwise=False,
)
)
# get the results in desired df format [rt, response] -- from np.array (1, 2)
tres = np.transpose(np.squeeze(np.array(list(res[0:2])), axis=1))
data = pd.DataFrame(tres, columns=['rt', 'response'])
data = pd.DataFrame(tres, columns=["rt", "response"])
# flip the responses to [1,0]
data.loc[data['response'] < 1, 'response'] = 0
data.loc[data["response"] < 1, "response"] = 0

df.loc[i, "response"] = data.response[0]
df.loc[i, "rt"] = data.rt[0]
Expand Down Expand Up @@ -699,8 +699,6 @@ def gen_rand_rlssm_data_MAB_RWupdate(
return all_data




def gen_rand_rlddm_data(
a,
t,
Expand Down
96 changes: 67 additions & 29 deletions hddm/likelihoods_mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,19 @@
from hddm.simulators import *
from hddm.utils import *

def __prepare_indirect_regressors(model_config = {}):

def __prepare_indirect_regressors(model_config={}):
# Prepare indirect regressors
# From dictionary that has indirect regressors as keys and links to parameters
# To dictionary that has parameters as keys and links them to any potential indirect regressor
param_links = {}
indirect_regressors_present = False
if 'indirect_regressors' in model_config:
if "indirect_regressors" in model_config:
indirect_regressors_present = True
for indirect_regressor_tmp in model_config['indirect_regressors'].keys():
for links_to_tmp in model_config['indirect_regressors'][indirect_regressor_tmp]['links_to']:
for indirect_regressor_tmp in model_config["indirect_regressors"].keys():
for links_to_tmp in model_config["indirect_regressors"][
indirect_regressor_tmp
]["links_to"]:
if links_to_tmp in param_links.keys():
param_links[links_to_tmp].add(indirect_regressor_tmp)
else:
Expand All @@ -35,28 +38,45 @@ def __prepare_indirect_regressors(model_config = {}):

return param_links, indirect_regressors_present

def __prepare_indirect_betas(model_config = {}):

def __prepare_indirect_betas(model_config={}):
# Prepare indirect betas
param_links_betas = {}
indirect_betas_present = False
# Loop over indirect betas
if 'indirect_betas' in model_config:
if "indirect_betas" in model_config:
indirect_betas_present = True
for indirect_beta_tmp in model_config['indirect_betas'].keys():
for indirect_beta_tmp in model_config["indirect_betas"].keys():
# For particular indirect beta loop over the
# parameters it links to
for links_to_tmp in model_config['indirect_betas'][indirect_beta_tmp]['links_to'].keys():
for links_to_tmp in model_config["indirect_betas"][indirect_beta_tmp][
"links_to"
].keys():

# If param links has respective key already
# just add the indirect beta to it
if links_to_tmp in param_links_betas.keys():
param_links_betas[links_to_tmp].add((indirect_beta_tmp, model_config['indirect_betas'][indirect_beta_tmp]['links_to'][links_to_tmp]))
param_links_betas[links_to_tmp].add(
(
indirect_beta_tmp,
model_config["indirect_betas"][indirect_beta_tmp][
"links_to"
][links_to_tmp],
)
)

# Otherwise first crete empty set then add the indirect
# Otherwise first crete empty set then add the indirect
# regressor
else:
param_links_betas[links_to_tmp] = set()
param_links_betas[links_to_tmp].add((indirect_beta_tmp, model_config['indirect_betas'][indirect_beta_tmp]['links_to'][links_to_tmp]))
param_links_betas[links_to_tmp].add(
(
indirect_beta_tmp,
model_config["indirect_betas"][indirect_beta_tmp][
"links_to"
][links_to_tmp],
)
)

# For remaining parameters that haven't been linked to anything
# we let them link to an empty set
Expand All @@ -66,9 +86,10 @@ def __prepare_indirect_betas(model_config = {}):
pass
else:
param_links_betas[param] = set()

return param_links_betas, indirect_betas_present


# LIKELIHOODS
def make_mlp_likelihood(model=None, model_config=None, wiener_params=None, **kwargs):
"""Defines the likelihoods for the MLP networks.
Expand Down Expand Up @@ -183,7 +204,9 @@ def make_likelihood():
return wfpt_nn


def make_mlp_likelihood_rlssm(model=None, model_config=None, model_config_rl=None, wiener_params=None, **kwargs):
def make_mlp_likelihood_rlssm(
model=None, model_config=None, model_config_rl=None, wiener_params=None, **kwargs
):
"""Defines the likelihoods for the MLP networks for RLSSMs.
:Arguments:
Expand All @@ -204,18 +227,24 @@ def make_mlp_likelihood_rlssm(model=None, model_config=None, model_config_rl=Non

def make_likelihood():
likelihood_str = make_likelihood_str_mlp_rlssm(
model=model, config=model_config, config_rl=model_config_rl, wiener_params=wiener_params
model=model,
config=model_config,
config_rl=model_config_rl,
wiener_params=wiener_params,
)
exec(likelihood_str)
my_fun = locals()["custom_likelihood"]
return my_fun

likelihood_ = make_likelihood()

wfpt_nn_rl = stochastic_from_dist("WienernnRL_" + model, partial(likelihood_, **kwargs))
wfpt_nn_rl = stochastic_from_dist(
"WienernnRL_" + model, partial(likelihood_, **kwargs)
)

return wfpt_nn_rl


# REGRESSOR LIKELIHOODS
def make_mlp_likelihood_reg(
model=None, model_config=None, wiener_params=None, **kwargs
Expand Down Expand Up @@ -245,7 +274,7 @@ def random(
add_model_parameters=False,
add_outliers=False,
keep_subj_idx=False,
):
):
"""
Function to sample from a regressor based likelihood. Conditions on the covariates.
"""
Expand All @@ -261,11 +290,16 @@ def random(
if tmp_str in self.parents["reg_outcomes"]:
param_data[:, cnt] = param_dict[tmp_str].values
for linked_indirect_regressor in param_links[tmp_str]:
param_data[:, cnt] = param_data[:, cnt] + \
param_dict[linked_indirect_regressor].values
param_data[:, cnt] = (
param_data[:, cnt]
+ param_dict[linked_indirect_regressor].values
)
for linked_indirect_beta in param_links_betas[tmp_str]:
param_data[:, cnt] = param_data[:, cnt] + \
param_dict[linked_indirect_beta[0]] * self.value[linked_indirect_beta[1]]
param_data[:, cnt] = (
param_data[:, cnt]
+ param_dict[linked_indirect_beta[0]]
* self.value[linked_indirect_beta[1]]
)
else:
param_data[:, cnt] = param_dict[tmp_str]
cnt += 1
Expand Down Expand Up @@ -305,27 +339,31 @@ def cdf(self, x):
def make_likelihood():
if indirect_betas_present or indirect_regressors_present:
likelihood_str = make_reg_likelihood_str_mlp(
config=model_config,
wiener_params=wiener_params,
param_links=param_links,
param_links_betas=param_links_betas,
)
config=model_config,
wiener_params=wiener_params,
param_links=param_links,
param_links_betas=param_links_betas,
)
else:
likelihood_str = make_reg_likelihood_str_mlp_basic(
config = model_config,
config=model_config,
wiener_params=wiener_params,
)

exec(likelihood_str)
my_fun = locals()["custom_likelihood_reg"]
return my_fun

param_links, indirect_regressors_present = __prepare_indirect_regressors(model_config = model_config)
param_links_betas, indirect_betas_present = __prepare_indirect_betas(model_config = model_config)
param_links, indirect_regressors_present = __prepare_indirect_regressors(
model_config=model_config
)
param_links_betas, indirect_betas_present = __prepare_indirect_betas(
model_config=model_config
)

likelihood_ = make_likelihood()
stoch = stochastic_from_dist("wfpt_reg", partial(likelihood_, **kwargs))
stoch.pdf = pdf
stoch.cdf = cdf
stoch.random = random
return stoch
return stoch
Loading

0 comments on commit bc7067f

Please sign in to comment.