Skip to content

Scaling

Feature scalers and normalized model wrappers for time series models.

Scaler

Bases: Module

Base class for feature scaling on [batch, seq, features] tensors.

from_stats classmethod

from_stats(stats) -> Scaler

Create a Scaler from a NormPair. Override in subclasses.

Source code in tsfast/models/scaling.py
@classmethod
def from_stats(cls, stats) -> "Scaler":
    "Create a Scaler from a NormPair. Override in subclasses."
    raise NotImplementedError(f"{cls.__name__} must implement from_stats")

StandardScaler

StandardScaler(mean, std)

Bases: Scaler

Normalize by (x - mean) / std.

Parameters:

Name Type Description Default
mean

per-feature mean, as ndarray or tensor

required
std

per-feature standard deviation, as ndarray or tensor

required
Source code in tsfast/models/scaling.py
def __init__(self, mean, std):
    super().__init__()
    self.register_buffer("mean", _ensure_tensor(mean))
    self.register_buffer("std", _ensure_tensor(std) + self._epsilon)

MinMaxScaler

MinMaxScaler(min_val, max_val)

Bases: Scaler

Normalize by (x - min) / (max - min) to [0, 1].

Parameters:

Name Type Description Default
min_val

per-feature minimum, as ndarray or tensor

required
max_val

per-feature maximum, as ndarray or tensor

required
Source code in tsfast/models/scaling.py
def __init__(self, min_val, max_val):
    super().__init__()
    self.register_buffer("min_val", _ensure_tensor(min_val))
    self.register_buffer("range_val", _ensure_tensor(max_val) - _ensure_tensor(min_val) + self._epsilon)

MaxAbsScaler

MaxAbsScaler(min_val, max_val)

Bases: Scaler

Normalize by x / max(|min|, |max|).

Parameters:

Name Type Description Default
min_val

per-feature minimum, as ndarray or tensor

required
max_val

per-feature maximum, as ndarray or tensor

required
Source code in tsfast/models/scaling.py
def __init__(self, min_val, max_val):
    super().__init__()
    self.register_buffer(
        "max_abs", torch.max(torch.abs(_ensure_tensor(min_val)), torch.abs(_ensure_tensor(max_val))) + self._epsilon
    )

ScaledModel

ScaledModel(model: Module, input_norm: Scaler, output_norm: Scaler | None = None)

Bases: Module

Wraps a model with input normalization and optional output denormalization.

Parameters:

Name Type Description Default
model Module

inner model to wrap

required
input_norm Scaler

scaler applied to inputs before the model

required
output_norm Scaler | None

scaler applied to outputs after the model

None
Source code in tsfast/models/scaling.py
def __init__(self, model: nn.Module, input_norm: Scaler, output_norm: Scaler | None = None):
    super().__init__()
    self.model = model
    self.input_norm = input_norm
    self.output_norm = output_norm

from_stats classmethod

from_stats(model: Module, input_stats, output_stats=None, scaler_cls: type | None = None) -> ScaledModel

Create from NormPair stats with the given Scaler class.

Source code in tsfast/models/scaling.py
@classmethod
def from_stats(
    cls, model: nn.Module, input_stats, output_stats=None, scaler_cls: type | None = None
) -> "ScaledModel":
    "Create from NormPair stats with the given Scaler class."
    if scaler_cls is None:
        scaler_cls = StandardScaler
    input_norm = scaler_cls.from_stats(input_stats)
    output_norm = scaler_cls.from_stats(output_stats) if output_stats is not None else None
    return cls(model, input_norm, output_norm)

from_dls classmethod

from_dls(model: Module, dls, input_norm: type[Scaler] | None = StandardScaler, output_norm: type[Scaler] | None = None, *, autoregressive: bool = False) -> nn.Module

Create from DataLoaders norm_stats, or return model unchanged if input_norm is None.

Parameters:

Name Type Description Default
model Module

inner model to wrap

required
dls

DataLoaders with norm_stats attribute (populated automatically if missing)

required
input_norm type[Scaler] | None

scaler class for input normalization, or None to skip wrapping

StandardScaler
output_norm type[Scaler] | None

scaler class for output denormalization, or None to skip

None
autoregressive bool

if True, input stats are norm_u + norm_y and output stats use input_norm (AR models use the same scaler for both)

False
Source code in tsfast/models/scaling.py
@classmethod
def from_dls(
    cls,
    model: nn.Module,
    dls,
    input_norm: type[Scaler] | None = StandardScaler,
    output_norm: type[Scaler] | None = None,
    *,
    autoregressive: bool = False,
) -> nn.Module:
    """Create from DataLoaders norm_stats, or return *model* unchanged if *input_norm* is None.

    Args:
        model: inner model to wrap
        dls: DataLoaders with ``norm_stats`` attribute (populated automatically if missing)
        input_norm: scaler class for input normalization, or None to skip wrapping
        output_norm: scaler class for output denormalization, or None to skip
        autoregressive: if True, input stats are ``norm_u + norm_y`` and output
            stats use ``input_norm`` (AR models use the same scaler for both)
    """
    if input_norm is None:
        return model
    norm_u, norm_y = dls.norm_stats
    if autoregressive:
        in_scaler = input_norm.from_stats(norm_u + norm_y)
        out_scaler = input_norm.from_stats(norm_y)
    else:
        in_scaler = input_norm.from_stats(norm_u)
        out_scaler = output_norm.from_stats(norm_y) if output_norm is not None else None
    return cls(model, in_scaler, out_scaler)

unwrap_model

unwrap_model(model: Module) -> nn.Module

Get the inner model, unwrapping DDP/DP and ScaledModel if present.

Source code in tsfast/models/scaling.py
def unwrap_model(model: nn.Module) -> nn.Module:
    "Get the inner model, unwrapping DDP/DP and ScaledModel if present."
    model = _unwrap_ddp(model)
    return model.model if isinstance(model, ScaledModel) else model