Live loss plotting during trainingΒΆ
[1]:
if 'google.colab' in str(get_ipython()):
!pip install git+https://github.com/ourownstory/neural_prophet.git['live'] # may take a while
#!pip install neuralprophet # much faster, but may not have the latest upgrades/bugfixes
import pandas as pd
from neuralprophet import NeuralProphet
[2]:
data_location = "https://raw.githubusercontent.com/ourownstory/neuralprophet-data/main/datasets/"
df = pd.read_csv(data_location + "retail_sales.csv")
[3]:
df_train, df_val = NeuralProphet().split_df(df, valid_p=0.2)
INFO - (NP.df_utils._infer_frequency) - Major frequency MS corresponds to 91.126% of the data.
INFO - (NP.df_utils._infer_frequency) - Dataframe freq automatically defined as MS
[4]:
m = NeuralProphet()
metrics = m.fit(df_train, validation_df=df_val, progress="plot")

log-SmoothL1Loss
training (min: -7.234, max: -1.345, cur: -7.234)
validation (min: -4.842, max: -1.321, cur: -3.239)
[5]:
m = NeuralProphet()
metrics = m.fit(df_train, validation_df=df_val, progress="plot-all")

MAE
training (min: 6564.215, max: 253641.383, cur: 6577.867)
validation (min: 31846.620, max: 267939.478, cur: 63734.493)
RMSE
training (min: 8668.035, max: 283905.474, cur: 8856.043)
validation (min: 34741.931, max: 300150.946, cur: 67346.074)
RegLoss
RegLoss (min: 0.000, max: 0.000, cur: 0.000)
log-SmoothL1Loss
training (min: -7.233, max: -0.494, cur: -7.233)
validation (min: -4.564, max: -0.408, cur: -3.241)
[ ]: