Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Log-space coefficients of kernels #148

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 25 additions & 8 deletions geometric_kernels/feature_maps/deterministic.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,13 @@ class DeterministicFeatureMapCompact(FeatureMap):
A :class:`~.spaces.DiscreteSpectrumSpace` space.
:param num_levels:
Number of levels in the kernel approximation.

.. note::
This feature map does not support a "`log_spectrum` mode", unlike the
:class:`~.feature_maps.RandomPhaseFeatureMapCompact` feature map or
the :class:`~.kernels.MaternKarhunenLoeveKernel` kernel.
The reason is simple: if the number of eigenfunctions per level
becomes large, this feature map is infeasible anyway.
"""

def __init__(self, space: DiscreteSpectrumSpace, num_levels: int):
Expand All @@ -36,8 +43,9 @@ def __call__(
self,
X: B.Numeric,
params: Dict[str, B.Numeric],
*,
normalize: Optional[bool] = None,
**kwargs,
key: Optional[B.RandomState] = None,
) -> Tuple[None, B.Numeric]:
"""
:param X:
Expand All @@ -48,15 +56,24 @@ def __call__(
Normalize to have unit average variance (if omitted
or None, follows the standard behavior of
:class:`~.kernels.MaternKarhunenLoeveKernel`).
:param ``**kwargs``:
Unused.
:param key:
Random state, either `np.random.RandomState`,
`tf.random.Generator`, `torch.Generator` or `jax.tensor` (which
represents a random state).

Keyword-only. Only accepted for compatibility with randomized
feature maps. If provided, the key is returned as the first
element of the tuple.

:return:
`Tuple(None, features)` where `features` is an [N, O] array, N
is the number of inputs and O is the dimension of the feature map.
`Tuple(key, features)` where `features` is an [N, O] array, N
is the number of inputs and O is the dimension of the feature map,
while `key` is the `key` keyword argument, unchanged.

.. note::
The first element of the returned tuple is the simple None and
The first element of the returned tuple is the value of the `key`
keyword argument, unchanged. When this argument is omitted (which is
expected to be the typical use case), it is a simple None and
should be ignored. It is only there to support the abstract
interface: for some other subclasses of :class:`FeatureMap`, this
first element may be an updated random key.
Expand All @@ -72,9 +89,9 @@ def __call__(
spectrum = spectrum / normalizer

weights = B.transpose(B.power(spectrum, 0.5)) # [1, M]
eigenfunctions = self.kernel.eigenfunctions(X, **params) # [N, M]
eigenfunctions = self.kernel.eigenfunctions(X) # [N, M]

features = B.cast(B.dtype(params["lengthscale"]), eigenfunctions) * B.cast(
B.dtype(params["lengthscale"]), weights
) # [N, M]
return None, features
return key, features
56 changes: 36 additions & 20 deletions geometric_kernels/feature_maps/random_phase.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
:mod:`geometric_kernels.feature_maps.rejection_sampling`.
"""

from math import log

import lab as B
from beartype.typing import Dict, Optional, Tuple

Expand Down Expand Up @@ -46,16 +48,19 @@ def __init__(
self.space = space
self.num_levels = num_levels
self.num_random_phases = num_random_phases
self.kernel = MaternKarhunenLoeveKernel(space, num_levels)
self.kernel = MaternKarhunenLoeveKernel(
space,
num_levels,
normalize=False, # RandomPhaseFeatureMapCompact uses its own normalization.
)

def __call__(
self,
X: B.Numeric,
params: Dict[str, B.Numeric],
*,
key: B.RandomState,
normalize: Optional[bool] = None,
**kwargs,
normalize: bool = True,
) -> Tuple[B.RandomState, B.Numeric]:
"""
:param X:
Expand All @@ -80,11 +85,7 @@ def __call__(
does this for you.

:param normalize:
Normalize to have unit average variance (if omitted
or None, follows the standard behavior of
:class:`kernels.MaternKarhunenLoeveKernel`).
:param ``**kwargs``:
Unused.
Normalize to have constant unit variance. Defaults to `True`.

:return:
`Tuple(key, features)` where `features` is an [N, O] array, N
Expand All @@ -93,25 +94,44 @@ def __call__(
state (generator) for any other backends.
"""
key, random_phases = self.space.random(key, self.num_random_phases) # [O, D]
eigenvalues = self.kernel.eigenvalues_laplacian

spectrum = self.kernel._spectrum(
eigenvalues,
nu=params["nu"],
lengthscale=params["lengthscale"],
log_spectrum = self.kernel.eigenvalues(
params,
normalize=True, # Will be overridden by the normalization below but helps with numerical stability.
log_spectrum_on=True,
)

if is_complex(X):
dtype = complex_like(params["lengthscale"])
else:
dtype = B.dtype(params["lengthscale"])

weights = B.power(spectrum, 0.5) # [L, 1]
log_weights = 0.5 * log_spectrum # [L, 1]

eigenfunctions = self.kernel.eigenfunctions
if hasattr(eigenfunctions, "log_num_eigenfunctions_per_level"):
log_num_eigenfunctions_per_level = (
eigenfunctions.log_num_eigenfunctions_per_level
) # [L]
else:
log_num_eigenfunctions_per_level = list(
map(log, eigenfunctions.num_eigenfunctions_per_level)
) # [L]

log_num_eigenfunctions_per_level = B.cast(
B.dtype(params["lengthscale"]),
from_numpy(params["lengthscale"], log_num_eigenfunctions_per_level)[
:, None
],
) # [L, 1]

log_weights = log_weights + log_num_eigenfunctions_per_level
weights = B.exp(log_weights) # [L, 1]

random_phases_b = B.cast(dtype, from_numpy(X, random_phases))

phi_product = self.kernel.eigenfunctions.phi_product(
X, random_phases_b, **params
phi_product = self.kernel.eigenfunctions.normalized_phi_product( # type: ignore[attr-defined]
X, random_phases_b
) # [N, O, L]

embedding = B.cast(dtype, phi_product) # [N, O, L]
Expand All @@ -121,7 +141,6 @@ def __call__(
if is_complex(features):
features = B.concat(B.real(features), B.imag(features), axis=1)

normalize = normalize or (normalize is None and self.kernel.normalize)
if normalize:
normalizer = B.sqrt(B.sum(features**2, axis=-1, squeeze=False))
features = features / normalizer
Expand Down Expand Up @@ -165,7 +184,6 @@ def __call__(
*,
key: B.RandomState,
normalize: Optional[bool] = True,
**kwargs,
) -> Tuple[B.RandomState, B.Numeric]:
"""
:param X:
Expand All @@ -189,8 +207,6 @@ def __call__(

:param normalize:
Normalize to have unit average variance (`True` by default).
:param ``**kwargs``:
Unused.

:return: `Tuple(key, features)` where `features` is an [N, O] array, N
is the number of inputs and O is the dimension of the feature map;
Expand Down
Loading
Loading