Skip to content

Remove t suffix from Model methods #5863

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 30 commits into from
Jun 14, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
8db2ffc
Future warning for logpt
cuchoi Jun 7, 2022
47cf58e
Future warning for dlogpt and d2logpt
cuchoi Jun 7, 2022
8935247
Updated references to logpt, and updated varlogpt, datalogpt, joint_l…
cuchoi Jun 8, 2022
bf37bba
Fix issue with d2logpt
cuchoi Jun 8, 2022
8a1348d
Added tests
cuchoi Jun 8, 2022
876eb11
Fix typo
cuchoi Jun 8, 2022
5ebdfe7
Updated release notes for 4.0
cuchoi Jun 8, 2022
067d4f9
Added potentiallogpt test
cuchoi Jun 8, 2022
454fc18
Updated developer guide
cuchoi Jun 8, 2022
28f5642
Update pymc/distributions/logprob.py
cuchoi Jun 13, 2022
04718d9
Removed t from varlogp_nojact
cuchoi Jun 13, 2022
99c0afb
Revert Release Notes
cuchoi Jun 13, 2022
299a009
Revert changes to developer guide
cuchoi Jun 13, 2022
df0c188
Future warning for logpt
cuchoi Jun 7, 2022
7633380
Future warning for dlogpt and d2logpt
cuchoi Jun 7, 2022
b12e823
Updated references to logpt, and updated varlogpt, datalogpt, joint_l…
cuchoi Jun 8, 2022
ce0e1d6
Fix issue with d2logpt
cuchoi Jun 8, 2022
242d72f
Added tests
cuchoi Jun 8, 2022
a8ef69a
Fix typo
cuchoi Jun 8, 2022
d3dcc41
Updated release notes for 4.0
cuchoi Jun 8, 2022
0788d79
Added potentiallogpt test
cuchoi Jun 8, 2022
bb0dbe3
Update pymc/distributions/logprob.py
cuchoi Jun 13, 2022
07ec242
Removed t from varlogp_nojact
cuchoi Jun 13, 2022
7322a32
Revert Release Notes
cuchoi Jun 13, 2022
eb1dab4
Merge branch 'remove-t-suffix' of https://github.com/cuchoi/pymc into…
cuchoi Jun 13, 2022
6027ed8
Updated release notes for 4.0
cuchoi Jun 8, 2022
1158022
Revert Release Notes
cuchoi Jun 13, 2022
d19471f
Merge branch 'remove-t-suffix' of https://github.com/cuchoi/pymc into…
cuchoi Jun 13, 2022
b4b4515
Added deprecation of functions/properties ending with t to release notes
cuchoi Jun 14, 2022
a33cd0b
Update RELEASE-NOTES.md
twiecki Jun 14, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions RELEASE-NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,16 @@
+ Fixed an incorrect entry in `pm.Metropolis.stats_dtypes` (see #5582).
+ Added a check in `Empirical` approximation which does not yet support `InferenceData` inputs (see #5874, #5884).
+ Fixed bug when sampling discrete variables with SMC (see #5887).
+ Removed trailing `t` (for tensor) in functions and properties from the model class and from `jointlogpt` (see #5859).
+ `Model.logpt` → `Model.logp`
+ `Model.dlogpt` → `Model.dlogp`
+ `Model.d2logpt` → `Model.d2logp`
+ `Model.datalogpt` → `Model.datalogp`
+ `Model.varlogpt` → `Model.varlogp`
+ `Model.observedlogpt` → `Model.observedlogp`
+ `Model.potentiallogpt` → `Model.potentiallogp`
+ `Model.varlogp_nojact` → `Model.varlogp_nojac`
+ `logprob.joint_logpt` → `logprob.joint_logp`

## PyMC 4.0.0 (2022-06-03)

Expand Down
8 changes: 4 additions & 4 deletions docs/source/learn/core_notebooks/pymc_aesara.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1844,7 +1844,7 @@
}
},
"source": [
"`pymc` models provide some helpful routines to facilitating the conversion of `RandomVariable`s to probability functions. {meth}`~pymc.Model.logpt`, for instance can be used to extract the joint probability of all variables in the model:"
"`pymc` models provide some helpful routines to facilitating the conversion of `RandomVariable`s to probability functions. {meth}`~pymc.Model.logp`, for instance can be used to extract the joint probability of all variables in the model:"
]
},
{
Expand Down Expand Up @@ -1902,7 +1902,7 @@
}
],
"source": [
"aesara.dprint(model.logpt(sum=False))"
"aesara.dprint(model.logp(sum=False))"
]
},
{
Expand Down Expand Up @@ -2213,7 +2213,7 @@
"sigma_log_value = model_2.rvs_to_values[sigma]\n",
"x_value = model_2.rvs_to_values[x]\n",
"# element-wise log-probability of the model (we do not take te sum)\n",
"logp_graph = at.stack(model_2.logpt(sum=False))\n",
"logp_graph = at.stack(model_2.logp(sum=False))\n",
"# evaluate by passing concrete values\n",
"logp_graph.eval({mu_value: 0, sigma_log_value: -10, x_value:0})"
]
Expand Down Expand Up @@ -2314,7 +2314,7 @@
}
},
"source": [
"The {class}`~pymc.Model` class also has methods to extract the gradient ({meth}`~pymc.Model.dlogpt`) and the hessian ({meth}`~pymc.Model.d2logpt`) of the logp."
"The {class}`~pymc.Model` class also has methods to extract the gradient ({meth}`~pymc.Model.dlogp`) and the hessian ({meth}`~pymc.Model.d2logp`) of the logp."
]
},
{
Expand Down
4 changes: 2 additions & 2 deletions pymc/backends/arviz.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ def _extract_log_likelihood(self, trace):
(
var,
self.model.compile_fn(
self.model.logpt(var, sum=False)[0],
self.model.logp(var, sum=False)[0],
inputs=self.model.value_vars,
on_unused_input="ignore",
),
Expand All @@ -263,7 +263,7 @@ def _extract_log_likelihood(self, trace):
(
var,
self.model.compile_fn(
self.model.logpt(var, sum=False)[0],
self.model.logp(var, sum=False)[0],
inputs=self.model.value_vars,
on_unused_input="ignore",
),
Expand Down
2 changes: 2 additions & 0 deletions pymc/distributions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from pymc.distributions.logprob import ( # isort:skip
logcdf,
logp,
joint_logp,
joint_logpt,
)

Expand Down Expand Up @@ -191,6 +192,7 @@
"CAR",
"PolyaGamma",
"joint_logpt",
"joint_logp",
"logp",
"logcdf",
]
2 changes: 1 addition & 1 deletion pymc/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -2558,7 +2558,7 @@ def logcdf(value, nu):
return logcdf(Gamma.dist(alpha=nu / 2, beta=0.5), value)


# TODO: Remove this once logpt for multiplication is working!
# TODO: Remove this once logp for multiplication is working!
class WeibullBetaRV(WeibullRV):
ndims_params = [0, 0]

Expand Down
17 changes: 13 additions & 4 deletions pymc/distributions/logprob.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings

from collections.abc import Mapping
from typing import Dict, List, Optional, Sequence, Union
Expand Down Expand Up @@ -119,7 +120,15 @@ def _get_scaling(
)


def joint_logpt(
def joint_logpt(*args, **kwargs):
warnings.warn(
"joint_logpt has been deprecated. Use joint_logp instead.",
FutureWarning,
)
return joint_logp(*args, **kwargs)


def joint_logp(
var: Union[TensorVariable, List[TensorVariable]],
rv_values: Optional[Union[TensorVariable, Dict[TensorVariable, TensorVariable]]] = None,
*,
Expand Down Expand Up @@ -159,14 +168,14 @@ def joint_logpt(

"""
# TODO: In future when we drop support for tag.value_var most of the following
# logic can be removed and logpt can just be a wrapper function that calls aeppl's
# logic can be removed and logp can just be a wrapper function that calls aeppl's
# joint_logprob directly.

# If var is not a list make it one.
if not isinstance(var, (list, tuple)):
var = [var]

# If logpt isn't provided values it is assumed that the tagged value var or
# If logp isn't provided values it is assumed that the tagged value var or
# observation is the value variable for that particular RV.
if rv_values is None:
rv_values = {}
Expand Down Expand Up @@ -251,7 +260,7 @@ def joint_logpt(
"reference nonlocal variables."
)

# aeppl returns the logpt for every single value term we provided to it. This includes
# aeppl returns the logp for every single value term we provided to it. This includes
# the extra values we plugged in above, so we filter those we actually wanted in the
# same order they were given in.
logp_var_dict = {}
Expand Down
105 changes: 83 additions & 22 deletions pymc/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
)
from pymc.blocking import DictToArrayBijection, RaveledVars
from pymc.data import GenTensorVariable, Minibatch
from pymc.distributions import joint_logpt
from pymc.distributions import joint_logp
from pymc.distributions.logprob import _get_scaling
from pymc.distributions.transforms import _default_transform
from pymc.exceptions import ImputationWarning, SamplingError, ShapeError, ShapeWarning
Expand Down Expand Up @@ -623,9 +623,9 @@ def logp_dlogp_function(self, grad_vars=None, tempered=False, **kwargs):
raise ValueError(f"Can only compute the gradient of continuous types: {var}")

if tempered:
costs = [self.varlogpt, self.datalogpt]
costs = [self.varlogp, self.datalogp]
else:
costs = [self.logpt()]
costs = [self.logp()]

input_vars = {i for i in graph_inputs(costs) if not isinstance(i, Constant)}
extra_vars = [self.rvs_to_values.get(var, var) for var in self.free_RVs]
Expand Down Expand Up @@ -654,7 +654,7 @@ def compile_logp(
Whether to sum all logp terms or return elemwise logp for each variable.
Defaults to True.
"""
return self.model.compile_fn(self.logpt(vars=vars, jacobian=jacobian, sum=sum))
return self.model.compile_fn(self.logp(vars=vars, jacobian=jacobian, sum=sum))

def compile_dlogp(
self,
Expand All @@ -671,7 +671,7 @@ def compile_dlogp(
jacobian:
Whether to include jacobian terms in logprob graph. Defaults to True.
"""
return self.model.compile_fn(self.dlogpt(vars=vars, jacobian=jacobian))
return self.model.compile_fn(self.dlogp(vars=vars, jacobian=jacobian))

def compile_d2logp(
self,
Expand All @@ -688,9 +688,16 @@ def compile_d2logp(
jacobian:
Whether to include jacobian terms in logprob graph. Defaults to True.
"""
return self.model.compile_fn(self.d2logpt(vars=vars, jacobian=jacobian))
return self.model.compile_fn(self.d2logp(vars=vars, jacobian=jacobian))

def logpt(
def logpt(self, *args, **kwargs):
warnings.warn(
"Model.logpt has been deprecated. Use Model.logp instead.",
FutureWarning,
)
return self.logp(*args, **kwargs)

def logp(
self,
vars: Optional[Union[Variable, Sequence[Variable]]] = None,
jacobian: bool = True,
Expand Down Expand Up @@ -742,7 +749,7 @@ def logpt(

rv_logps: List[TensorVariable] = []
if rv_values:
rv_logps = joint_logpt(list(rv_values.keys()), rv_values, sum=False, jacobian=jacobian)
rv_logps = joint_logp(list(rv_values.keys()), rv_values, sum=False, jacobian=jacobian)
assert isinstance(rv_logps, list)

# Replace random variables by their value variables in potential terms
Expand All @@ -764,7 +771,14 @@ def logpt(
logp_scalar.name = logp_scalar_name
return logp_scalar

def dlogpt(
def dlogpt(self, *args, **kwargs):
warnings.warn(
"Model.dlogpt has been deprecated. Use Model.dlogp instead.",
FutureWarning,
)
return self.dlogp(*args, **kwargs)

def dlogp(
self,
vars: Optional[Union[Variable, Sequence[Variable]]] = None,
jacobian: bool = True,
Expand Down Expand Up @@ -799,10 +813,17 @@ def dlogpt(
f"Requested variable {var} not found among the model variables"
)

cost = self.logpt(jacobian=jacobian)
cost = self.logp(jacobian=jacobian)
return gradient(cost, value_vars)

def d2logpt(
def d2logpt(self, *args, **kwargs):
warnings.warn(
"Model.d2logpt has been deprecated. Use Model.d2logp instead.",
FutureWarning,
)
return self.d2logp(*args, **kwargs)

def d2logp(
self,
vars: Optional[Union[Variable, Sequence[Variable]]] = None,
jacobian: bool = True,
Expand Down Expand Up @@ -837,34 +858,74 @@ def d2logpt(
f"Requested variable {var} not found among the model variables"
)

cost = self.logpt(jacobian=jacobian)
cost = self.logp(jacobian=jacobian)
return hessian(cost, value_vars)

@property
def datalogpt(self) -> Variable:
def datalogpt(self):
warnings.warn(
"Model.datalogpt has been deprecated. Use Model.datalogp instead.",
FutureWarning,
)
return self.datalogp

@property
def datalogp(self) -> Variable:
"""Aesara scalar of log-probability of the observed variables and
potential terms"""
return self.observedlogpt + self.potentiallogpt
return self.observedlogp + self.potentiallogp

@property
def varlogpt(self) -> Variable:
def varlogpt(self):
warnings.warn(
"Model.varlogpt has been deprecated. Use Model.varlogp instead.",
FutureWarning,
)
return self.varlogp

@property
def varlogp(self) -> Variable:
"""Aesara scalar of log-probability of the unobserved random variables
(excluding deterministic)."""
return self.logpt(vars=self.free_RVs)
return self.logp(vars=self.free_RVs)

@property
def varlogp_nojact(self) -> Variable:
def varlogp_nojact(self):
warnings.warn(
"Model.varlogp_nojact has been deprecated. Use Model.varlogp_nojac instead.",
FutureWarning,
)
return self.varlogp_nojac

@property
def varlogp_nojac(self) -> Variable:
"""Aesara scalar of log-probability of the unobserved random variables
(excluding deterministic) without jacobian term."""
return self.logpt(vars=self.free_RVs, jacobian=False)
return self.logp(vars=self.free_RVs, jacobian=False)

@property
def observedlogpt(self):
warnings.warn(
"Model.observedlogpt has been deprecated. Use Model.observedlogp instead.",
FutureWarning,
)
return self.observedlogp

@property
def observedlogpt(self) -> Variable:
def observedlogp(self) -> Variable:
"""Aesara scalar of log-probability of the observed variables"""
return self.logpt(vars=self.observed_RVs)
return self.logp(vars=self.observed_RVs)

@property
def potentiallogpt(self):
warnings.warn(
"Model.potentiallogpt has been deprecated. Use Model.potentiallogp instead.",
FutureWarning,
)
return self.potentiallogp

@property
def potentiallogpt(self) -> Variable:
def potentiallogp(self) -> Variable:
"""Aesara scalar of log-probability of the Potential terms"""
# Convert random variables in Potential expression into their log-likelihood
# inputs and apply their transforms, if any
Expand Down Expand Up @@ -1755,7 +1816,7 @@ def point_logps(self, point=None, round_vals=2):
point = self.initial_point()

factors = self.basic_RVs + self.potentials
factor_logps_fn = [at.sum(factor) for factor in self.logpt(factors, sum=False)]
factor_logps_fn = [at.sum(factor) for factor in self.logp(factors, sum=False)]
return {
factor.name: np.round(np.asarray(factor_logp), round_vals)
for factor, factor_logp in zip(
Expand Down
4 changes: 2 additions & 2 deletions pymc/sampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,15 +204,15 @@ def assign_step_methods(model, step=None, methods=None, step_kwargs=None):
# Use competence classmethods to select step methods for remaining
# variables
selected_steps = defaultdict(list)
model_logpt = model.logpt()
model_logp = model.logp()

for var in model.value_vars:
if var not in assigned_vars:
# determine if a gradient can be computed
has_gradient = var.dtype not in discrete_types
if has_gradient:
try:
tg.grad(model_logpt, var)
tg.grad(model_logp, var)
except (NotImplementedError, tg.NullTypeGradError):
has_gradient = False

Expand Down
10 changes: 5 additions & 5 deletions pymc/sampling_jax.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,10 @@ def get_jaxified_graph(


def get_jaxified_logp(model: Model, negative_logp=True) -> Callable:
model_logpt = model.logpt()
model_logp = model.logp()
if not negative_logp:
model_logpt = -model_logpt
logp_fn = get_jaxified_graph(inputs=model.value_vars, outputs=[model_logpt])
model_logp = -model_logp
logp_fn = get_jaxified_graph(inputs=model.value_vars, outputs=[model_logp])

def logp_fn_wrap(x):
return logp_fn(*x)[0]
Expand Down Expand Up @@ -136,8 +136,8 @@ def _get_log_likelihood(model: Model, samples, backend=None) -> Dict:
"""Compute log-likelihood for all observations"""
data = {}
for v in model.observed_RVs:
v_elemwise_logpt = model.logpt(v, sum=False)
jax_fn = get_jaxified_graph(inputs=model.value_vars, outputs=v_elemwise_logpt)
v_elemwise_logp = model.logp(v, sum=False)
jax_fn = get_jaxified_graph(inputs=model.value_vars, outputs=v_elemwise_logp)
result = jax.jit(jax.vmap(jax.vmap(jax_fn)), backend=backend)(*samples)[0]
data[v.name] = result
return data
Expand Down
4 changes: 2 additions & 2 deletions pymc/smc/smc.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,10 +219,10 @@ def _initialize_kernel(self):
shared = make_shared_replacements(initial_point, self.variables, self.model)

self.prior_logp_func = _logp_forw(
initial_point, [self.model.varlogpt], self.variables, shared
initial_point, [self.model.varlogp], self.variables, shared
)
self.likelihood_logp_func = _logp_forw(
initial_point, [self.model.datalogpt], self.variables, shared
initial_point, [self.model.datalogp], self.variables, shared
)

priors = [self.prior_logp_func(sample) for sample in self.tempered_posterior]
Expand Down
Loading