statsforecast supports providing scikit-learn models through the statsforecast.models.SklearnModel wrapper. This can help you leverage feature engineering and train one model per serie, which can sometimes be better than training a single global model (as in mlforecast).

Data setup

import os
from functools import partial

from datasetsforecast.m4 import M4, M4Info
from sklearn.linear_model import Lasso, Ridge
from utilsforecast.feature_engineering import pipeline, trend, fourier
from utilsforecast.plotting import plot_series

from statsforecast import StatsForecast
from statsforecast.models import SklearnModel
from statsforecast.utils import ConformalIntervals
# this makes it so that the outputs of the predict methods have the id as a column 
# instead of as the index
os.environ['NIXTLA_ID_AS_COL'] = '1'
group = 'Hourly'
season_length = M4Info[group].seasonality
horizon = M4Info[group].horizon
data, *_ = M4.load('data', group)
data['ds'] = data['ds'].astype('int64')
valid = data.groupby('unique_id').tail(horizon).copy()
train = data.drop(valid.index)
train.head()
unique_iddsy
0H11605.0
1H12586.0
2H13586.0
3H14559.0
4H15511.0

Generating features

The utilsforecast library provides some utilies for feature engineering.

train_features, valid_features = pipeline(
    train,
    features=[
        trend,        
        partial(fourier, season_length=season_length, k=10),  # 10 fourier terms
    ],
    freq=1,
    h=horizon,
)
train_features.head()
unique_iddsytrendsin1_24sin2_24sin3_24sin4_24sin5_24sin6_24sin7_24sin8_24sin9_24sin10_24cos1_24cos2_24cos3_24cos4_24cos5_24cos6_24cos7_24cos8_24cos9_24cos10_24
0H11605.0261.0-0.707105-1.000000-0.707108-0.0000120.7071121.0000000.7070950.000024-0.707125-1.0000000.7071090.000006-0.707106-1.000000-0.707101-0.0000030.7071191.0000000.707088-0.000015
1H12586.0262.0-0.500001-0.866027-1.000000-0.866023-0.499988-0.0000070.5000010.8660311.0000000.8660110.8660250.4999980.000004-0.500005-0.866032-1.000000-0.866025-0.4999910.0000190.500025
2H13586.0263.0-0.258817-0.499997-0.707103-0.866021-0.965931-1.000000-0.965922-0.866033-0.707098-0.4999640.9659260.8660270.7071110.5000070.2587990.000012-0.258835-0.499986-0.707116-0.866046
3H14559.0264.00.0000050.0000110.0000080.0000210.0000030.000016-0.0000010.000042-0.0000060.0000071.0000001.0000001.0000001.0000001.0000001.0000001.0000001.0000001.0000001.000000
4H15511.0265.00.2588200.5000020.7071140.8660270.9659251.0000000.9659300.8660220.7071060.5000050.9659260.8660240.7070990.4999970.258822-0.000021-0.258803-0.500006-0.707107-0.866022

Forecasting

sf = StatsForecast(
    models=[
        SklearnModel(Lasso()),
        SklearnModel(Ridge()),
    ],
    freq=1,
)
preds = sf.forecast(
    df=train_features,
    h=horizon,
    X_df=valid_features,
    prediction_intervals=ConformalIntervals(n_windows=4, h=horizon),
    level=[95],
)
plot_series(train, preds, level=[95], palette='tab20b', max_ids=4)