some new features
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
57
.venv/lib/python3.12/site-packages/prophet/tests/conftest.py
Normal file
57
.venv/lib/python3.12/site-packages/prophet/tests/conftest.py
Normal file
@ -0,0 +1,57 @@
|
||||
from pathlib import Path
|
||||
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(scope="package")
|
||||
def daily_univariate_ts() -> pd.DataFrame:
|
||||
"""Daily univariate time series with 2 years of data"""
|
||||
return pd.read_csv(Path(__file__).parent / "data.csv", parse_dates=["ds"])
|
||||
|
||||
|
||||
@pytest.fixture(scope="package")
|
||||
def subdaily_univariate_ts() -> pd.DataFrame:
|
||||
"""Sub-daily univariate time series"""
|
||||
return pd.read_csv(Path(__file__).parent / "data2.csv", parse_dates=["ds"])
|
||||
|
||||
|
||||
@pytest.fixture(scope="package")
|
||||
def large_numbers_ts() -> pd.DataFrame:
|
||||
"""Univariate time series with large values to test scaling"""
|
||||
return pd.read_csv(Path(__file__).parent / "data3.csv", parse_dates=["ds"])
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
config.addinivalue_line("markers", "slow: mark tests as slow (include in run with --test-slow)")
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--test-slow", action="store_true", default=False, help="Run slow tests")
|
||||
parser.addoption(
|
||||
"--backend",
|
||||
nargs="+",
|
||||
default=["CMDSTANPY"],
|
||||
help="Probabilistic Programming Language backend to perform tests with.",
|
||||
)
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(config, items):
|
||||
if config.getoption("--test-slow"):
|
||||
return
|
||||
skip_slow = pytest.mark.skip(reason="Skipped due to the lack of '--test-slow' argument")
|
||||
for item in items:
|
||||
if "slow" in item.keywords:
|
||||
item.add_marker(skip_slow)
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
"""
|
||||
For each test, if `backend` is used as a fixture, add a parametrization equal to the value of the
|
||||
--backend option.
|
||||
|
||||
This is used to re-run the test suite for different probabilistic programming language backends
|
||||
(e.g. cmdstanpy, numpyro).
|
||||
"""
|
||||
if "backend" in metafunc.fixturenames:
|
||||
metafunc.parametrize("backend", metafunc.config.getoption("backend"))
|
||||
511
.venv/lib/python3.12/site-packages/prophet/tests/data.csv
Normal file
511
.venv/lib/python3.12/site-packages/prophet/tests/data.csv
Normal file
@ -0,0 +1,511 @@
|
||||
ds,y
|
||||
2012-05-18,38.23
|
||||
2012-05-21,34.03
|
||||
2012-05-22,31.0
|
||||
2012-05-23,32.0
|
||||
2012-05-24,33.03
|
||||
2012-05-25,31.91
|
||||
2012-05-29,28.84
|
||||
2012-05-30,28.19
|
||||
2012-05-31,29.6
|
||||
2012-06-01,27.72
|
||||
2012-06-04,26.9
|
||||
2012-06-05,25.87
|
||||
2012-06-06,26.81
|
||||
2012-06-07,26.31
|
||||
2012-06-08,27.1
|
||||
2012-06-11,27.01
|
||||
2012-06-12,27.4
|
||||
2012-06-13,27.27
|
||||
2012-06-14,28.29
|
||||
2012-06-15,30.01
|
||||
2012-06-18,31.41
|
||||
2012-06-19,31.91
|
||||
2012-06-20,31.6
|
||||
2012-06-21,31.84
|
||||
2012-06-22,33.05
|
||||
2012-06-25,32.06
|
||||
2012-06-26,33.1
|
||||
2012-06-27,32.23
|
||||
2012-06-28,31.36
|
||||
2012-06-29,31.1
|
||||
2012-07-02,30.77
|
||||
2012-07-03,31.2
|
||||
2012-07-05,31.47
|
||||
2012-07-06,31.73
|
||||
2012-07-09,32.17
|
||||
2012-07-10,31.47
|
||||
2012-07-11,30.97
|
||||
2012-07-12,30.81
|
||||
2012-07-13,30.72
|
||||
2012-07-16,28.25
|
||||
2012-07-17,28.09
|
||||
2012-07-18,29.11
|
||||
2012-07-19,29.0
|
||||
2012-07-20,28.76
|
||||
2012-07-23,28.75
|
||||
2012-07-24,28.45
|
||||
2012-07-25,29.34
|
||||
2012-07-26,26.85
|
||||
2012-07-27,23.71
|
||||
2012-07-30,23.15
|
||||
2012-07-31,21.71
|
||||
2012-08-01,20.88
|
||||
2012-08-02,20.04
|
||||
2012-08-03,21.09
|
||||
2012-08-06,21.92
|
||||
2012-08-07,20.72
|
||||
2012-08-08,20.72
|
||||
2012-08-09,21.01
|
||||
2012-08-10,21.81
|
||||
2012-08-13,21.6
|
||||
2012-08-14,20.38
|
||||
2012-08-15,21.2
|
||||
2012-08-16,19.87
|
||||
2012-08-17,19.05
|
||||
2012-08-20,20.01
|
||||
2012-08-21,19.16
|
||||
2012-08-22,19.44
|
||||
2012-08-23,19.44
|
||||
2012-08-24,19.41
|
||||
2012-08-27,19.15
|
||||
2012-08-28,19.34
|
||||
2012-08-29,19.1
|
||||
2012-08-30,19.09
|
||||
2012-08-31,18.06
|
||||
2012-09-04,17.73
|
||||
2012-09-05,18.58
|
||||
2012-09-06,18.96
|
||||
2012-09-07,18.98
|
||||
2012-09-10,18.81
|
||||
2012-09-11,19.43
|
||||
2012-09-12,20.93
|
||||
2012-09-13,20.71
|
||||
2012-09-14,22.0
|
||||
2012-09-17,21.52
|
||||
2012-09-18,21.87
|
||||
2012-09-19,23.29
|
||||
2012-09-20,22.59
|
||||
2012-09-21,22.86
|
||||
2012-09-24,20.79
|
||||
2012-09-25,20.28
|
||||
2012-09-26,20.62
|
||||
2012-09-27,20.32
|
||||
2012-09-28,21.66
|
||||
2012-10-01,21.99
|
||||
2012-10-02,22.27
|
||||
2012-10-03,21.83
|
||||
2012-10-04,21.95
|
||||
2012-10-05,20.91
|
||||
2012-10-08,20.4
|
||||
2012-10-09,20.23
|
||||
2012-10-10,19.64
|
||||
2012-10-11,19.75
|
||||
2012-10-12,19.52
|
||||
2012-10-15,19.52
|
||||
2012-10-16,19.48
|
||||
2012-10-17,19.88
|
||||
2012-10-18,18.98
|
||||
2012-10-19,19.0
|
||||
2012-10-22,19.32
|
||||
2012-10-23,19.5
|
||||
2012-10-24,23.23
|
||||
2012-10-25,22.56
|
||||
2012-10-26,21.94
|
||||
2012-10-31,21.11
|
||||
2012-11-01,21.21
|
||||
2012-11-02,21.18
|
||||
2012-11-05,21.25
|
||||
2012-11-06,21.17
|
||||
2012-11-07,20.47
|
||||
2012-11-08,19.99
|
||||
2012-11-09,19.21
|
||||
2012-11-12,20.07
|
||||
2012-11-13,19.86
|
||||
2012-11-14,22.36
|
||||
2012-11-15,22.17
|
||||
2012-11-16,23.56
|
||||
2012-11-19,22.92
|
||||
2012-11-20,23.1
|
||||
2012-11-21,24.32
|
||||
2012-11-23,24.0
|
||||
2012-11-26,25.94
|
||||
2012-11-27,26.15
|
||||
2012-11-28,26.36
|
||||
2012-11-29,27.32
|
||||
2012-11-30,28.0
|
||||
2012-12-03,27.04
|
||||
2012-12-04,27.46
|
||||
2012-12-05,27.71
|
||||
2012-12-06,26.97
|
||||
2012-12-07,27.49
|
||||
2012-12-10,27.84
|
||||
2012-12-11,27.98
|
||||
2012-12-12,27.58
|
||||
2012-12-13,28.24
|
||||
2012-12-14,26.81
|
||||
2012-12-17,26.75
|
||||
2012-12-18,27.71
|
||||
2012-12-19,27.41
|
||||
2012-12-20,27.36
|
||||
2012-12-21,26.26
|
||||
2012-12-24,26.93
|
||||
2012-12-26,26.51
|
||||
2012-12-27,26.05
|
||||
2012-12-28,25.91
|
||||
2012-12-31,26.62
|
||||
2013-01-02,28.0
|
||||
2013-01-03,27.77
|
||||
2013-01-04,28.76
|
||||
2013-01-07,29.42
|
||||
2013-01-08,29.06
|
||||
2013-01-09,30.59
|
||||
2013-01-10,31.3
|
||||
2013-01-11,31.72
|
||||
2013-01-14,30.95
|
||||
2013-01-15,30.1
|
||||
2013-01-16,29.85
|
||||
2013-01-17,30.14
|
||||
2013-01-18,29.66
|
||||
2013-01-22,30.73
|
||||
2013-01-23,30.82
|
||||
2013-01-24,31.08
|
||||
2013-01-25,31.54
|
||||
2013-01-28,32.47
|
||||
2013-01-29,30.79
|
||||
2013-01-30,31.24
|
||||
2013-01-31,30.98
|
||||
2013-02-01,29.73
|
||||
2013-02-04,28.11
|
||||
2013-02-05,28.64
|
||||
2013-02-06,29.05
|
||||
2013-02-07,28.65
|
||||
2013-02-08,28.55
|
||||
2013-02-11,28.26
|
||||
2013-02-12,27.37
|
||||
2013-02-13,27.91
|
||||
2013-02-14,28.5
|
||||
2013-02-15,28.32
|
||||
2013-02-19,28.93
|
||||
2013-02-20,28.46
|
||||
2013-02-21,27.28
|
||||
2013-02-22,27.13
|
||||
2013-02-25,27.27
|
||||
2013-02-26,27.39
|
||||
2013-02-27,26.87
|
||||
2013-02-28,27.25
|
||||
2013-03-01,27.78
|
||||
2013-03-04,27.72
|
||||
2013-03-05,27.52
|
||||
2013-03-06,27.45
|
||||
2013-03-07,28.58
|
||||
2013-03-08,27.96
|
||||
2013-03-11,28.14
|
||||
2013-03-12,27.83
|
||||
2013-03-13,27.08
|
||||
2013-03-14,27.04
|
||||
2013-03-15,26.65
|
||||
2013-03-18,26.49
|
||||
2013-03-19,26.55
|
||||
2013-03-20,25.86
|
||||
2013-03-21,25.74
|
||||
2013-03-22,25.73
|
||||
2013-03-25,25.13
|
||||
2013-03-26,25.21
|
||||
2013-03-27,26.09
|
||||
2013-03-28,25.58
|
||||
2013-04-01,25.53
|
||||
2013-04-02,25.42
|
||||
2013-04-03,26.25
|
||||
2013-04-04,27.07
|
||||
2013-04-05,27.39
|
||||
2013-04-08,26.85
|
||||
2013-04-09,26.59
|
||||
2013-04-10,27.57
|
||||
2013-04-11,28.02
|
||||
2013-04-12,27.4
|
||||
2013-04-15,26.52
|
||||
2013-04-16,26.92
|
||||
2013-04-17,26.63
|
||||
2013-04-18,25.69
|
||||
2013-04-19,25.73
|
||||
2013-04-22,25.97
|
||||
2013-04-23,25.98
|
||||
2013-04-24,26.11
|
||||
2013-04-25,26.14
|
||||
2013-04-26,26.85
|
||||
2013-04-29,26.98
|
||||
2013-04-30,27.77
|
||||
2013-05-01,27.43
|
||||
2013-05-02,28.97
|
||||
2013-05-03,28.31
|
||||
2013-05-06,27.57
|
||||
2013-05-07,26.89
|
||||
2013-05-08,27.12
|
||||
2013-05-09,27.04
|
||||
2013-05-10,26.68
|
||||
2013-05-13,26.82
|
||||
2013-05-14,27.07
|
||||
2013-05-15,26.6
|
||||
2013-05-16,26.13
|
||||
2013-05-17,26.25
|
||||
2013-05-20,25.76
|
||||
2013-05-21,25.66
|
||||
2013-05-22,25.16
|
||||
2013-05-23,25.06
|
||||
2013-05-24,24.31
|
||||
2013-05-28,24.1
|
||||
2013-05-29,23.32
|
||||
2013-05-30,24.55
|
||||
2013-05-31,24.35
|
||||
2013-06-03,23.85
|
||||
2013-06-04,23.52
|
||||
2013-06-05,22.9
|
||||
2013-06-06,22.97
|
||||
2013-06-07,23.29
|
||||
2013-06-10,24.33
|
||||
2013-06-11,24.03
|
||||
2013-06-12,23.77
|
||||
2013-06-13,23.73
|
||||
2013-06-14,23.63
|
||||
2013-06-17,24.02
|
||||
2013-06-18,24.21
|
||||
2013-06-19,24.31
|
||||
2013-06-20,23.9
|
||||
2013-06-21,24.53
|
||||
2013-06-24,23.94
|
||||
2013-06-25,24.25
|
||||
2013-06-26,24.16
|
||||
2013-06-27,24.66
|
||||
2013-06-28,24.88
|
||||
2013-07-01,24.81
|
||||
2013-07-02,24.41
|
||||
2013-07-03,24.52
|
||||
2013-07-05,24.37
|
||||
2013-07-08,24.71
|
||||
2013-07-09,25.48
|
||||
2013-07-10,25.8
|
||||
2013-07-11,25.81
|
||||
2013-07-12,25.91
|
||||
2013-07-15,26.28
|
||||
2013-07-16,26.32
|
||||
2013-07-17,26.65
|
||||
2013-07-18,26.18
|
||||
2013-07-19,25.88
|
||||
2013-07-22,26.05
|
||||
2013-07-23,26.13
|
||||
2013-07-24,26.51
|
||||
2013-07-25,34.36
|
||||
2013-07-26,34.01
|
||||
2013-07-29,35.43
|
||||
2013-07-30,37.63
|
||||
2013-07-31,36.8
|
||||
2013-08-01,37.49
|
||||
2013-08-02,38.05
|
||||
2013-08-05,39.19
|
||||
2013-08-06,38.55
|
||||
2013-08-07,38.87
|
||||
2013-08-08,38.54
|
||||
2013-08-09,38.5
|
||||
2013-08-12,38.22
|
||||
2013-08-13,37.02
|
||||
2013-08-14,36.65
|
||||
2013-08-15,36.56
|
||||
2013-08-16,37.08
|
||||
2013-08-19,37.81
|
||||
2013-08-20,38.41
|
||||
2013-08-21,38.32
|
||||
2013-08-22,38.55
|
||||
2013-08-23,40.55
|
||||
2013-08-26,41.34
|
||||
2013-08-27,39.64
|
||||
2013-08-28,40.55
|
||||
2013-08-29,41.28
|
||||
2013-08-30,41.29
|
||||
2013-09-03,41.87
|
||||
2013-09-04,41.78
|
||||
2013-09-05,42.66
|
||||
2013-09-06,43.95
|
||||
2013-09-09,44.04
|
||||
2013-09-10,43.6
|
||||
2013-09-11,45.04
|
||||
2013-09-12,44.75
|
||||
2013-09-13,44.31
|
||||
2013-09-16,42.51
|
||||
2013-09-17,45.07
|
||||
2013-09-18,45.23
|
||||
2013-09-19,45.98
|
||||
2013-09-20,47.49
|
||||
2013-09-23,47.19
|
||||
2013-09-24,48.45
|
||||
2013-09-25,49.46
|
||||
2013-09-26,50.39
|
||||
2013-09-27,51.24
|
||||
2013-09-30,50.23
|
||||
2013-10-01,50.42
|
||||
2013-10-02,50.28
|
||||
2013-10-03,49.18
|
||||
2013-10-04,51.04
|
||||
2013-10-07,50.52
|
||||
2013-10-08,47.14
|
||||
2013-10-09,46.77
|
||||
2013-10-10,49.05
|
||||
2013-10-11,49.11
|
||||
2013-10-14,49.51
|
||||
2013-10-15,49.5
|
||||
2013-10-16,51.14
|
||||
2013-10-17,52.21
|
||||
2013-10-18,54.22
|
||||
2013-10-21,53.85
|
||||
2013-10-22,52.68
|
||||
2013-10-23,51.9
|
||||
2013-10-24,52.45
|
||||
2013-10-25,51.95
|
||||
2013-10-28,50.23
|
||||
2013-10-29,49.4
|
||||
2013-10-30,49.01
|
||||
2013-10-31,50.21
|
||||
2013-11-01,49.75
|
||||
2013-11-04,48.22
|
||||
2013-11-05,50.11
|
||||
2013-11-06,49.12
|
||||
2013-11-07,47.56
|
||||
2013-11-08,47.53
|
||||
2013-11-11,46.2
|
||||
2013-11-12,46.61
|
||||
2013-11-13,48.71
|
||||
2013-11-14,48.99
|
||||
2013-11-15,49.01
|
||||
2013-11-18,45.83
|
||||
2013-11-19,46.36
|
||||
2013-11-20,46.43
|
||||
2013-11-21,46.7
|
||||
2013-11-22,46.23
|
||||
2013-11-25,44.82
|
||||
2013-11-26,45.89
|
||||
2013-11-27,46.49
|
||||
2013-11-29,47.01
|
||||
2013-12-02,47.06
|
||||
2013-12-03,46.73
|
||||
2013-12-04,48.62
|
||||
2013-12-05,48.34
|
||||
2013-12-06,47.94
|
||||
2013-12-09,48.84
|
||||
2013-12-10,50.25
|
||||
2013-12-11,49.38
|
||||
2013-12-12,51.83
|
||||
2013-12-13,53.32
|
||||
2013-12-16,53.81
|
||||
2013-12-17,54.86
|
||||
2013-12-18,55.57
|
||||
2013-12-19,55.05
|
||||
2013-12-20,55.12
|
||||
2013-12-23,57.77
|
||||
2013-12-24,57.96
|
||||
2013-12-26,57.73
|
||||
2013-12-27,55.44
|
||||
2013-12-30,53.71
|
||||
2013-12-31,54.65
|
||||
2014-01-02,54.71
|
||||
2014-01-03,54.56
|
||||
2014-01-06,57.2
|
||||
2014-01-07,57.92
|
||||
2014-01-08,58.23
|
||||
2014-01-09,57.22
|
||||
2014-01-10,57.94
|
||||
2014-01-13,55.91
|
||||
2014-01-14,57.74
|
||||
2014-01-15,57.6
|
||||
2014-01-16,57.19
|
||||
2014-01-17,56.3
|
||||
2014-01-21,58.51
|
||||
2014-01-22,57.51
|
||||
2014-01-23,56.63
|
||||
2014-01-24,54.45
|
||||
2014-01-27,53.55
|
||||
2014-01-28,55.14
|
||||
2014-01-29,53.53
|
||||
2014-01-30,61.08
|
||||
2014-01-31,62.57
|
||||
2014-02-03,61.48
|
||||
2014-02-04,62.75
|
||||
2014-02-05,62.19
|
||||
2014-02-06,62.16
|
||||
2014-02-07,64.32
|
||||
2014-02-10,63.55
|
||||
2014-02-11,64.85
|
||||
2014-02-12,64.45
|
||||
2014-02-13,67.33
|
||||
2014-02-14,67.09
|
||||
2014-02-18,67.3
|
||||
2014-02-19,68.06
|
||||
2014-02-20,69.63
|
||||
2014-02-21,68.59
|
||||
2014-02-24,70.78
|
||||
2014-02-25,69.85
|
||||
2014-02-26,69.26
|
||||
2014-02-27,68.94
|
||||
2014-02-28,68.46
|
||||
2014-03-03,67.41
|
||||
2014-03-04,68.8
|
||||
2014-03-05,71.57
|
||||
2014-03-06,70.84
|
||||
2014-03-07,69.8
|
||||
2014-03-10,72.03
|
||||
2014-03-11,70.1
|
||||
2014-03-12,70.88
|
||||
2014-03-13,68.83
|
||||
2014-03-14,67.72
|
||||
2014-03-17,68.74
|
||||
2014-03-18,69.19
|
||||
2014-03-19,68.24
|
||||
2014-03-20,66.97
|
||||
2014-03-21,67.24
|
||||
2014-03-24,64.1
|
||||
2014-03-25,64.89
|
||||
2014-03-26,60.39
|
||||
2014-03-27,60.97
|
||||
2014-03-28,60.01
|
||||
2014-03-31,60.24
|
||||
2014-04-01,62.62
|
||||
2014-04-02,62.72
|
||||
2014-04-03,59.49
|
||||
2014-04-04,56.75
|
||||
2014-04-07,56.95
|
||||
2014-04-08,58.19
|
||||
2014-04-09,62.41
|
||||
2014-04-10,59.16
|
||||
2014-04-11,58.53
|
||||
2014-04-14,58.89
|
||||
2014-04-15,59.09
|
||||
2014-04-16,59.72
|
||||
2014-04-17,58.94
|
||||
2014-04-21,61.24
|
||||
2014-04-22,63.03
|
||||
2014-04-23,61.36
|
||||
2014-04-24,60.87
|
||||
2014-04-25,57.71
|
||||
2014-04-28,56.14
|
||||
2014-04-29,58.15
|
||||
2014-04-30,59.78
|
||||
2014-05-01,61.15
|
||||
2014-05-02,60.46
|
||||
2014-05-05,61.22
|
||||
2014-05-06,58.53
|
||||
2014-05-07,57.39
|
||||
2014-05-08,56.76
|
||||
2014-05-09,57.24
|
||||
2014-05-12,59.83
|
||||
2014-05-13,59.83
|
||||
2014-05-14,59.23
|
||||
2014-05-15,57.92
|
||||
2014-05-16,58.02
|
||||
2014-05-19,59.21
|
||||
2014-05-20,58.56
|
||||
2014-05-21,60.49
|
||||
2014-05-22,60.52
|
||||
2014-05-23,61.35
|
||||
2014-05-27,63.48
|
||||
2014-05-28,63.51
|
||||
2014-05-29,63.83
|
||||
2014-05-30,63.30
|
||||
|
864
.venv/lib/python3.12/site-packages/prophet/tests/data2.csv
Normal file
864
.venv/lib/python3.12/site-packages/prophet/tests/data2.csv
Normal file
@ -0,0 +1,864 @@
|
||||
ds,y
|
||||
2017-01-01 00:05:00,0.0
|
||||
2017-01-01 00:10:00,0.0
|
||||
2017-01-01 00:15:00,0.0
|
||||
2017-01-01 00:20:00,0.0
|
||||
2017-01-01 00:25:00,-0.1
|
||||
2017-01-01 00:30:00,-0.1
|
||||
2017-01-01 00:35:00,-0.1
|
||||
2017-01-01 00:40:00,-0.1
|
||||
2017-01-01 00:45:00,-0.1
|
||||
2017-01-01 00:50:00,-0.1
|
||||
2017-01-01 00:55:00,-0.3
|
||||
2017-01-01 01:00:00,-0.2
|
||||
2017-01-01 01:05:00,-0.3
|
||||
2017-01-01 01:10:00,-0.4
|
||||
2017-01-01 01:15:00,-0.4
|
||||
2017-01-01 01:20:00,-0.3
|
||||
2017-01-01 01:25:00,-0.3
|
||||
2017-01-01 01:30:00,-0.2
|
||||
2017-01-01 01:35:00,-0.3
|
||||
2017-01-01 01:40:00,-0.3
|
||||
2017-01-01 01:45:00,-0.3
|
||||
2017-01-01 01:50:00,-0.3
|
||||
2017-01-01 01:55:00,-0.3
|
||||
2017-01-01 02:00:00,-0.3
|
||||
2017-01-01 02:05:00,-0.3
|
||||
2017-01-01 02:10:00,-0.3
|
||||
2017-01-01 02:15:00,-0.3
|
||||
2017-01-01 02:20:00,-0.3
|
||||
2017-01-01 02:25:00,-0.3
|
||||
2017-01-01 02:30:00,-0.3
|
||||
2017-01-01 02:35:00,-0.3
|
||||
2017-01-01 02:40:00,-0.3
|
||||
2017-01-01 02:45:00,-0.3
|
||||
2017-01-01 02:50:00,-0.3
|
||||
2017-01-01 02:55:00,-0.3
|
||||
2017-01-01 03:00:00,-0.3
|
||||
2017-01-01 03:05:00,-0.3
|
||||
2017-01-01 03:10:00,-0.3
|
||||
2017-01-01 03:15:00,-0.3
|
||||
2017-01-01 03:20:00,-0.3
|
||||
2017-01-01 03:25:00,-0.4
|
||||
2017-01-01 03:30:00,-0.6
|
||||
2017-01-01 03:35:00,-0.4
|
||||
2017-01-01 03:40:00,-0.3
|
||||
2017-01-01 03:45:00,-0.4
|
||||
2017-01-01 03:50:00,-0.7
|
||||
2017-01-01 03:55:00,-0.8
|
||||
2017-01-01 04:00:00,-0.4
|
||||
2017-01-01 04:05:00,-0.3
|
||||
2017-01-01 04:10:00,-0.4
|
||||
2017-01-01 04:15:00,-0.4
|
||||
2017-01-01 04:20:00,-0.4
|
||||
2017-01-01 04:25:00,-0.5
|
||||
2017-01-01 04:30:00,-0.5
|
||||
2017-01-01 04:35:00,-0.5
|
||||
2017-01-01 04:40:00,-0.4
|
||||
2017-01-01 04:45:00,-0.5
|
||||
2017-01-01 04:50:00,-0.5
|
||||
2017-01-01 04:55:00,-0.5
|
||||
2017-01-01 05:00:00,-0.6
|
||||
2017-01-01 05:05:00,-0.9
|
||||
2017-01-01 05:10:00,-0.9
|
||||
2017-01-01 05:15:00,-1.2
|
||||
2017-01-01 05:20:00,-1.4
|
||||
2017-01-01 05:25:00,-1.8
|
||||
2017-01-01 05:30:00,-2.0
|
||||
2017-01-01 05:35:00,-2.2
|
||||
2017-01-01 05:40:00,-1.6
|
||||
2017-01-01 05:45:00,-1.2
|
||||
2017-01-01 05:50:00,-1.2
|
||||
2017-01-01 05:55:00,-1.4
|
||||
2017-01-01 06:00:00,-1.2
|
||||
2017-01-01 06:05:00,-0.9
|
||||
2017-01-01 06:10:00,-0.9
|
||||
2017-01-01 06:15:00,-0.9
|
||||
2017-01-01 06:20:00,-0.9
|
||||
2017-01-01 06:25:00,-0.9
|
||||
2017-01-01 06:30:00,-1.2
|
||||
2017-01-01 06:35:00,-1.1
|
||||
2017-01-01 06:40:00,-1.2
|
||||
2017-01-01 06:45:00,-1.3
|
||||
2017-01-01 06:50:00,-1.4
|
||||
2017-01-01 06:55:00,-1.7
|
||||
2017-01-01 07:00:00,-1.7
|
||||
2017-01-01 07:05:00,-1.7
|
||||
2017-01-01 07:10:00,-1.8
|
||||
2017-01-01 07:15:00,-2.4
|
||||
2017-01-01 07:20:00,-2.9
|
||||
2017-01-01 07:25:00,-3.2
|
||||
2017-01-01 07:30:00,-3.4
|
||||
2017-01-01 07:35:00,-3.6
|
||||
2017-01-01 07:40:00,-3.6
|
||||
2017-01-01 07:45:00,-3.5
|
||||
2017-01-01 07:50:00,-3.5
|
||||
2017-01-01 07:55:00,-3.5
|
||||
2017-01-01 08:00:00,-3.6
|
||||
2017-01-01 08:05:00,-3.7
|
||||
2017-01-01 08:10:00,-3.6
|
||||
2017-01-01 08:15:00,-3.6
|
||||
2017-01-01 08:20:00,-3.8
|
||||
2017-01-01 08:25:00,-4.0
|
||||
2017-01-01 08:30:00,-3.9
|
||||
2017-01-01 08:35:00,-3.9
|
||||
2017-01-01 08:40:00,-4.1
|
||||
2017-01-01 08:45:00,-4.0
|
||||
2017-01-01 08:50:00,-4.1
|
||||
2017-01-01 08:55:00,-4.1
|
||||
2017-01-01 09:00:00,-4.2
|
||||
2017-01-01 09:05:00,-4.1
|
||||
2017-01-01 09:10:00,-4.2
|
||||
2017-01-01 09:15:00,-4.1
|
||||
2017-01-01 09:20:00,-4.0
|
||||
2017-01-01 09:25:00,-4.0
|
||||
2017-01-01 09:30:00,-4.0
|
||||
2017-01-01 09:35:00,-4.1
|
||||
2017-01-01 09:40:00,-4.1
|
||||
2017-01-01 09:45:00,-4.2
|
||||
2017-01-01 09:50:00,-4.3
|
||||
2017-01-01 09:55:00,-4.4
|
||||
2017-01-01 10:00:00,-4.5
|
||||
2017-01-01 10:05:00,-4.6
|
||||
2017-01-01 10:10:00,-4.7
|
||||
2017-01-01 10:15:00,-4.6
|
||||
2017-01-01 10:20:00,-4.6
|
||||
2017-01-01 10:25:00,-4.6
|
||||
2017-01-01 10:30:00,-4.5
|
||||
2017-01-01 10:35:00,-4.6
|
||||
2017-01-01 10:40:00,-4.6
|
||||
2017-01-01 10:45:00,-4.6
|
||||
2017-01-01 10:50:00,-4.6
|
||||
2017-01-01 10:55:00,-4.7
|
||||
2017-01-01 11:00:00,-4.7
|
||||
2017-01-01 11:05:00,-4.6
|
||||
2017-01-01 11:10:00,-4.5
|
||||
2017-01-01 11:15:00,-4.7
|
||||
2017-01-01 11:20:00,-4.7
|
||||
2017-01-01 11:25:00,-4.8
|
||||
2017-01-01 11:30:00,-4.8
|
||||
2017-01-01 11:35:00,-4.8
|
||||
2017-01-01 11:40:00,-4.8
|
||||
2017-01-01 11:45:00,-4.7
|
||||
2017-01-01 11:50:00,-4.6
|
||||
2017-01-01 11:55:00,-4.6
|
||||
2017-01-01 12:00:00,-4.8
|
||||
2017-01-01 12:05:00,-4.9
|
||||
2017-01-01 12:10:00,-4.9
|
||||
2017-01-01 12:15:00,-4.9
|
||||
2017-01-01 12:20:00,-5.0
|
||||
2017-01-01 12:25:00,-4.9
|
||||
2017-01-01 12:30:00,-4.9
|
||||
2017-01-01 12:35:00,-5.0
|
||||
2017-01-01 12:40:00,-5.1
|
||||
2017-01-01 12:45:00,-5.3
|
||||
2017-01-01 12:50:00,-5.5
|
||||
2017-01-01 12:55:00,-5.7
|
||||
2017-01-01 13:00:00,-5.8
|
||||
2017-01-01 13:05:00,-5.9
|
||||
2017-01-01 13:10:00,-5.9
|
||||
2017-01-01 13:15:00,-6.1
|
||||
2017-01-01 13:20:00,-6.1
|
||||
2017-01-01 13:25:00,-6.1
|
||||
2017-01-01 13:30:00,-6.2
|
||||
2017-01-01 13:35:00,-6.3
|
||||
2017-01-01 13:40:00,-6.4
|
||||
2017-01-01 13:45:00,-6.5
|
||||
2017-01-01 13:50:00,-6.6
|
||||
2017-01-01 13:55:00,-6.7
|
||||
2017-01-01 14:00:00,-6.7
|
||||
2017-01-01 14:05:00,-6.7
|
||||
2017-01-01 14:10:00,-6.6
|
||||
2017-01-01 14:15:00,-6.7
|
||||
2017-01-01 14:20:00,-6.7
|
||||
2017-01-01 14:25:00,-6.6
|
||||
2017-01-01 14:30:00,-6.7
|
||||
2017-01-01 14:35:00,-6.6
|
||||
2017-01-01 14:40:00,-6.6
|
||||
2017-01-01 14:45:00,-6.4
|
||||
2017-01-01 14:50:00,-6.5
|
||||
2017-01-01 14:55:00,-6.5
|
||||
2017-01-01 15:00:00,-6.4
|
||||
2017-01-01 15:05:00,-6.4
|
||||
2017-01-01 15:10:00,-6.3
|
||||
2017-01-01 15:15:00,-6.3
|
||||
2017-01-01 15:20:00,-6.4
|
||||
2017-01-01 15:25:00,-6.5
|
||||
2017-01-01 15:30:00,-6.6
|
||||
2017-01-01 15:35:00,-6.6
|
||||
2017-01-01 15:40:00,-6.6
|
||||
2017-01-01 15:45:00,-6.6
|
||||
2017-01-01 15:50:00,-6.5
|
||||
2017-01-01 15:55:00,-6.4
|
||||
2017-01-01 16:00:00,-6.3
|
||||
2017-01-01 16:05:00,-6.3
|
||||
2017-01-01 16:10:00,-6.2
|
||||
2017-01-01 16:15:00,-6.1
|
||||
2017-01-01 16:20:00,-6.0
|
||||
2017-01-01 16:25:00,-5.9
|
||||
2017-01-01 16:30:00,-5.8
|
||||
2017-01-01 16:35:00,-5.7
|
||||
2017-01-01 16:40:00,-5.4
|
||||
2017-01-01 16:45:00,-5.3
|
||||
2017-01-01 16:50:00,-5.1
|
||||
2017-01-01 16:55:00,-5.0
|
||||
2017-01-01 17:00:00,-4.8
|
||||
2017-01-01 17:05:00,-4.6
|
||||
2017-01-01 17:10:00,-4.3
|
||||
2017-01-01 17:15:00,-4.1
|
||||
2017-01-01 17:20:00,-3.9
|
||||
2017-01-01 17:25:00,-3.6
|
||||
2017-01-01 17:30:00,-3.3
|
||||
2017-01-01 17:35:00,-3.1
|
||||
2017-01-01 17:40:00,-2.8
|
||||
2017-01-01 17:45:00,-2.7
|
||||
2017-01-01 17:50:00,-2.4
|
||||
2017-01-01 17:55:00,-2.0
|
||||
2017-01-01 18:00:00,-1.6
|
||||
2017-01-01 18:05:00,-1.3
|
||||
2017-01-01 18:10:00,-1.1
|
||||
2017-01-01 18:15:00,-0.9
|
||||
2017-01-01 18:20:00,-0.7
|
||||
2017-01-01 18:25:00,-0.4
|
||||
2017-01-01 18:30:00,-0.4
|
||||
2017-01-01 18:35:00,-0.2
|
||||
2017-01-01 18:40:00,0.0
|
||||
2017-01-01 18:45:00,0.3
|
||||
2017-01-01 18:50:00,0.6
|
||||
2017-01-01 18:55:00,0.6
|
||||
2017-01-01 19:00:00,1.0
|
||||
2017-01-01 19:05:00,1.0
|
||||
2017-01-01 19:10:00,1.1
|
||||
2017-01-01 19:15:00,1.3
|
||||
2017-01-01 19:20:00,1.0
|
||||
2017-01-01 19:25:00,1.2
|
||||
2017-01-01 19:30:00,1.3
|
||||
2017-01-01 19:35:00,0.9
|
||||
2017-01-01 19:40:00,1.1
|
||||
2017-01-01 19:45:00,1.3
|
||||
2017-01-01 19:50:00,1.5
|
||||
2017-01-01 19:55:00,1.3
|
||||
2017-01-01 20:00:00,1.6
|
||||
2017-01-01 20:05:00,1.6
|
||||
2017-01-01 20:10:00,1.8
|
||||
2017-01-01 20:15:00,1.4
|
||||
2017-01-01 20:20:00,1.4
|
||||
2017-01-01 20:25:00,1.6
|
||||
2017-01-01 20:30:00,1.6
|
||||
2017-01-01 20:35:00,1.5
|
||||
2017-01-01 20:40:00,1.5
|
||||
2017-01-01 20:45:00,1.8
|
||||
2017-01-01 20:50:00,1.6
|
||||
2017-01-01 20:55:00,1.7
|
||||
2017-01-01 21:00:00,1.5
|
||||
2017-01-01 21:05:00,1.8
|
||||
2017-01-01 21:10:00,1.6
|
||||
2017-01-01 21:15:00,1.7
|
||||
2017-01-01 21:20:00,1.9
|
||||
2017-01-01 21:25:00,1.6
|
||||
2017-01-01 21:30:00,1.8
|
||||
2017-01-01 21:35:00,1.8
|
||||
2017-01-01 21:40:00,1.5
|
||||
2017-01-01 21:45:00,1.6
|
||||
2017-01-01 21:50:00,1.6
|
||||
2017-01-01 21:55:00,1.4
|
||||
2017-01-01 22:00:00,1.1
|
||||
2017-01-01 22:05:00,1.5
|
||||
2017-01-01 22:10:00,1.5
|
||||
2017-01-01 22:15:00,1.6
|
||||
2017-01-01 22:20:00,1.5
|
||||
2017-01-01 22:25:00,1.1
|
||||
2017-01-01 22:30:00,1.0
|
||||
2017-01-01 22:35:00,1.0
|
||||
2017-01-01 22:40:00,1.1
|
||||
2017-01-01 22:45:00,1.1
|
||||
2017-01-01 22:50:00,0.7
|
||||
2017-01-01 22:55:00,0.6
|
||||
2017-01-01 23:00:00,0.5
|
||||
2017-01-01 23:05:00,0.3
|
||||
2017-01-01 23:10:00,0.5
|
||||
2017-01-01 23:15:00,0.2
|
||||
2017-01-01 23:20:00,0.2
|
||||
2017-01-01 23:25:00,0.0
|
||||
2017-01-01 23:30:00,-0.2
|
||||
2017-01-01 23:35:00,-0.3
|
||||
2017-01-01 23:40:00,-0.5
|
||||
2017-01-01 23:45:00,-0.7
|
||||
2017-01-01 23:50:00,-1.1
|
||||
2017-01-01 23:55:00,-1.3
|
||||
2017-01-02 00:00:00,-1.4
|
||||
2017-01-02 00:05:00,-1.7
|
||||
2017-01-02 00:10:00,-2.1
|
||||
2017-01-02 00:15:00,-2.4
|
||||
2017-01-02 00:20:00,-2.6
|
||||
2017-01-02 00:25:00,-2.9
|
||||
2017-01-02 00:30:00,-3.2
|
||||
2017-01-02 00:35:00,-3.5
|
||||
2017-01-02 00:40:00,-3.9
|
||||
2017-01-02 00:45:00,-4.1
|
||||
2017-01-02 00:50:00,-4.2
|
||||
2017-01-02 00:55:00,-4.4
|
||||
2017-01-02 01:00:00,-4.6
|
||||
2017-01-02 01:05:00,-4.7
|
||||
2017-01-02 01:10:00,-5.0
|
||||
2017-01-02 01:15:00,-5.1
|
||||
2017-01-02 01:20:00,-4.8
|
||||
2017-01-02 01:25:00,-4.7
|
||||
2017-01-02 01:30:00,-4.5
|
||||
2017-01-02 01:35:00,-4.0
|
||||
2017-01-02 01:40:00,-3.6
|
||||
2017-01-02 01:45:00,-3.1
|
||||
2017-01-02 01:50:00,-3.0
|
||||
2017-01-02 01:55:00,-3.0
|
||||
2017-01-02 02:00:00,-3.0
|
||||
2017-01-02 02:05:00,-2.9
|
||||
2017-01-02 02:10:00,-3.0
|
||||
2017-01-02 02:15:00,-2.9
|
||||
2017-01-02 02:20:00,-3.0
|
||||
2017-01-02 02:25:00,-3.0
|
||||
2017-01-02 02:30:00,-3.0
|
||||
2017-01-02 02:35:00,-3.0
|
||||
2017-01-02 02:40:00,-3.2
|
||||
2017-01-02 02:45:00,-3.5
|
||||
2017-01-02 02:50:00,-3.7
|
||||
2017-01-02 02:55:00,-3.5
|
||||
2017-01-02 03:00:00,-3.5
|
||||
2017-01-02 03:05:00,-3.4
|
||||
2017-01-02 03:10:00,-3.3
|
||||
2017-01-02 03:15:00,-3.2
|
||||
2017-01-02 03:20:00,-3.2
|
||||
2017-01-02 03:25:00,-3.3
|
||||
2017-01-02 03:30:00,-3.3
|
||||
2017-01-02 03:35:00,-3.3
|
||||
2017-01-02 03:40:00,-3.4
|
||||
2017-01-02 03:45:00,-3.4
|
||||
2017-01-02 03:50:00,-3.4
|
||||
2017-01-02 03:55:00,-3.5
|
||||
2017-01-02 04:00:00,-3.5
|
||||
2017-01-02 04:05:00,-3.5
|
||||
2017-01-02 04:10:00,-3.5
|
||||
2017-01-02 04:15:00,-3.6
|
||||
2017-01-02 04:20:00,-3.6
|
||||
2017-01-02 04:25:00,-3.8
|
||||
2017-01-02 04:30:00,-3.8
|
||||
2017-01-02 04:35:00,-3.8
|
||||
2017-01-02 04:40:00,-3.9
|
||||
2017-01-02 04:45:00,-3.9
|
||||
2017-01-02 04:50:00,-3.9
|
||||
2017-01-02 04:55:00,-3.9
|
||||
2017-01-02 05:00:00,-3.9
|
||||
2017-01-02 05:05:00,-3.9
|
||||
2017-01-02 05:10:00,-3.9
|
||||
2017-01-02 05:15:00,-4.0
|
||||
2017-01-02 05:20:00,-3.9
|
||||
2017-01-02 05:25:00,-4.0
|
||||
2017-01-02 05:30:00,-4.2
|
||||
2017-01-02 05:35:00,-4.2
|
||||
2017-01-02 05:40:00,-4.4
|
||||
2017-01-02 05:45:00,-4.4
|
||||
2017-01-02 05:50:00,-4.4
|
||||
2017-01-02 05:55:00,-4.4
|
||||
2017-01-02 06:00:00,-4.4
|
||||
2017-01-02 06:05:00,-5.3
|
||||
2017-01-02 06:10:00,-5.2
|
||||
2017-01-02 06:15:00,-5.3
|
||||
2017-01-02 06:20:00,-5.2
|
||||
2017-01-02 06:25:00,-5.0
|
||||
2017-01-02 06:30:00,-4.9
|
||||
2017-01-02 06:35:00,-4.8
|
||||
2017-01-02 06:40:00,-4.8
|
||||
2017-01-02 06:45:00,-4.7
|
||||
2017-01-02 06:50:00,-4.7
|
||||
2017-01-02 06:55:00,-4.8
|
||||
2017-01-02 07:00:00,-4.7
|
||||
2017-01-02 07:05:00,-4.7
|
||||
2017-01-02 07:10:00,-4.7
|
||||
2017-01-02 07:15:00,-5.0
|
||||
2017-01-02 07:20:00,-5.0
|
||||
2017-01-02 07:25:00,-4.9
|
||||
2017-01-02 07:30:00,-4.8
|
||||
2017-01-02 07:35:00,-4.8
|
||||
2017-01-02 07:40:00,-4.7
|
||||
2017-01-02 07:45:00,-4.6
|
||||
2017-01-02 07:50:00,-4.6
|
||||
2017-01-02 07:55:00,-4.7
|
||||
2017-01-02 08:00:00,-4.6
|
||||
2017-01-02 08:05:00,-4.6
|
||||
2017-01-02 08:10:00,-4.5
|
||||
2017-01-02 08:15:00,-4.5
|
||||
2017-01-02 08:20:00,-4.5
|
||||
2017-01-02 08:25:00,-4.5
|
||||
2017-01-02 08:30:00,-4.5
|
||||
2017-01-02 08:35:00,-4.5
|
||||
2017-01-02 08:40:00,-4.6
|
||||
2017-01-02 08:45:00,-4.6
|
||||
2017-01-02 08:50:00,-4.6
|
||||
2017-01-02 08:55:00,-4.6
|
||||
2017-01-02 09:00:00,-4.6
|
||||
2017-01-02 09:05:00,-4.6
|
||||
2017-01-02 09:10:00,-4.5
|
||||
2017-01-02 09:15:00,-4.5
|
||||
2017-01-02 09:20:00,-4.5
|
||||
2017-01-02 09:25:00,-4.5
|
||||
2017-01-02 09:30:00,-4.5
|
||||
2017-01-02 09:35:00,-4.5
|
||||
2017-01-02 09:40:00,-4.5
|
||||
2017-01-02 09:45:00,-4.5
|
||||
2017-01-02 09:50:00,-4.4
|
||||
2017-01-02 09:55:00,-4.4
|
||||
2017-01-02 10:00:00,-4.4
|
||||
2017-01-02 10:05:00,-4.5
|
||||
2017-01-02 10:10:00,-4.5
|
||||
2017-01-02 10:15:00,-4.4
|
||||
2017-01-02 10:20:00,-4.5
|
||||
2017-01-02 10:25:00,-4.5
|
||||
2017-01-02 10:30:00,-4.5
|
||||
2017-01-02 10:35:00,-4.5
|
||||
2017-01-02 10:40:00,-4.5
|
||||
2017-01-02 10:45:00,-4.5
|
||||
2017-01-02 10:50:00,-4.5
|
||||
2017-01-02 10:55:00,-4.4
|
||||
2017-01-02 11:00:00,-4.4
|
||||
2017-01-02 11:05:00,-4.5
|
||||
2017-01-02 11:10:00,-4.5
|
||||
2017-01-02 11:15:00,-4.5
|
||||
2017-01-02 11:20:00,-4.5
|
||||
2017-01-02 11:25:00,-4.5
|
||||
2017-01-02 11:30:00,-4.5
|
||||
2017-01-02 11:35:00,-4.5
|
||||
2017-01-02 11:40:00,-4.5
|
||||
2017-01-02 11:45:00,-4.6
|
||||
2017-01-02 11:50:00,-4.6
|
||||
2017-01-02 11:55:00,-4.6
|
||||
2017-01-02 12:00:00,-4.6
|
||||
2017-01-02 12:05:00,-4.7
|
||||
2017-01-02 12:10:00,-4.8
|
||||
2017-01-02 12:15:00,-4.8
|
||||
2017-01-02 12:20:00,-4.9
|
||||
2017-01-02 12:25:00,-5.0
|
||||
2017-01-02 12:30:00,-5.3
|
||||
2017-01-02 12:35:00,-5.5
|
||||
2017-01-02 12:40:00,-5.5
|
||||
2017-01-02 12:45:00,-5.6
|
||||
2017-01-02 12:50:00,-5.9
|
||||
2017-01-02 12:55:00,-6.1
|
||||
2017-01-02 13:00:00,-6.0
|
||||
2017-01-02 13:05:00,-6.1
|
||||
2017-01-02 13:10:00,-6.1
|
||||
2017-01-02 13:15:00,-6.0
|
||||
2017-01-02 13:20:00,-5.7
|
||||
2017-01-02 13:25:00,-5.5
|
||||
2017-01-02 13:30:00,-5.3
|
||||
2017-01-02 13:35:00,-5.2
|
||||
2017-01-02 13:40:00,-5.1
|
||||
2017-01-02 13:45:00,-5.0
|
||||
2017-01-02 13:50:00,-5.0
|
||||
2017-01-02 13:55:00,-5.0
|
||||
2017-01-02 14:00:00,-4.9
|
||||
2017-01-02 14:05:00,-4.9
|
||||
2017-01-02 14:10:00,-5.0
|
||||
2017-01-02 14:15:00,-4.9
|
||||
2017-01-02 14:20:00,-4.9
|
||||
2017-01-02 14:25:00,-4.9
|
||||
2017-01-02 14:30:00,-4.9
|
||||
2017-01-02 14:35:00,-4.9
|
||||
2017-01-02 14:40:00,-5.0
|
||||
2017-01-02 14:45:00,-4.9
|
||||
2017-01-02 14:50:00,-4.9
|
||||
2017-01-02 14:55:00,-5.0
|
||||
2017-01-02 15:00:00,-4.9
|
||||
2017-01-02 15:05:00,-4.9
|
||||
2017-01-02 15:10:00,-4.9
|
||||
2017-01-02 15:15:00,-4.9
|
||||
2017-01-02 15:20:00,-4.9
|
||||
2017-01-02 15:25:00,-4.9
|
||||
2017-01-02 15:30:00,-4.9
|
||||
2017-01-02 15:35:00,-4.9
|
||||
2017-01-02 15:40:00,-4.9
|
||||
2017-01-02 15:45:00,-4.9
|
||||
2017-01-02 15:50:00,-4.9
|
||||
2017-01-02 15:55:00,-4.9
|
||||
2017-01-02 16:00:00,-4.9
|
||||
2017-01-02 16:05:00,-4.9
|
||||
2017-01-02 16:10:00,-4.9
|
||||
2017-01-02 16:15:00,-4.9
|
||||
2017-01-02 16:20:00,-4.9
|
||||
2017-01-02 16:25:00,-4.8
|
||||
2017-01-02 16:30:00,-4.8
|
||||
2017-01-02 16:35:00,-4.7
|
||||
2017-01-02 16:40:00,-4.8
|
||||
2017-01-02 16:45:00,-4.8
|
||||
2017-01-02 16:50:00,-4.8
|
||||
2017-01-02 16:55:00,-4.9
|
||||
2017-01-02 17:00:00,-4.8
|
||||
2017-01-02 17:05:00,-4.8
|
||||
2017-01-02 17:10:00,-4.8
|
||||
2017-01-02 17:15:00,-4.8
|
||||
2017-01-02 17:20:00,-4.7
|
||||
2017-01-02 17:25:00,-4.7
|
||||
2017-01-02 17:30:00,-4.7
|
||||
2017-01-02 17:35:00,-4.7
|
||||
2017-01-02 17:40:00,-4.7
|
||||
2017-01-02 17:45:00,-4.6
|
||||
2017-01-02 17:50:00,-4.7
|
||||
2017-01-02 17:55:00,-4.7
|
||||
2017-01-02 18:00:00,-4.5
|
||||
2017-01-02 18:05:00,-4.6
|
||||
2017-01-02 18:10:00,-4.5
|
||||
2017-01-02 18:15:00,-4.4
|
||||
2017-01-02 18:20:00,-4.6
|
||||
2017-01-02 18:25:00,-4.6
|
||||
2017-01-02 18:30:00,-4.5
|
||||
2017-01-02 18:35:00,-4.4
|
||||
2017-01-02 18:40:00,-4.4
|
||||
2017-01-02 18:45:00,-4.4
|
||||
2017-01-02 18:50:00,-4.3
|
||||
2017-01-02 18:55:00,-4.2
|
||||
2017-01-02 19:00:00,-4.2
|
||||
2017-01-02 19:05:00,-4.2
|
||||
2017-01-02 19:10:00,-4.2
|
||||
2017-01-02 19:15:00,-4.1
|
||||
2017-01-02 19:20:00,-4.2
|
||||
2017-01-02 19:25:00,-4.2
|
||||
2017-01-02 19:30:00,-4.1
|
||||
2017-01-02 19:35:00,-3.9
|
||||
2017-01-02 19:40:00,-3.9
|
||||
2017-01-02 19:45:00,-4.1
|
||||
2017-01-02 19:50:00,-4.2
|
||||
2017-01-02 19:55:00,-4.0
|
||||
2017-01-02 20:00:00,-4.0
|
||||
2017-01-02 20:05:00,-4.1
|
||||
2017-01-02 20:10:00,-4.0
|
||||
2017-01-02 20:15:00,-4.1
|
||||
2017-01-02 20:20:00,-4.1
|
||||
2017-01-02 20:25:00,-4.0
|
||||
2017-01-02 20:30:00,-4.2
|
||||
2017-01-02 20:35:00,-4.1
|
||||
2017-01-02 20:40:00,-4.1
|
||||
2017-01-02 20:45:00,-4.2
|
||||
2017-01-02 20:50:00,-4.1
|
||||
2017-01-02 20:55:00,-4.3
|
||||
2017-01-02 21:00:00,-4.3
|
||||
2017-01-02 21:05:00,-4.4
|
||||
2017-01-02 21:10:00,-4.5
|
||||
2017-01-02 21:15:00,-4.4
|
||||
2017-01-02 21:20:00,-4.2
|
||||
2017-01-02 21:25:00,-4.5
|
||||
2017-01-02 21:30:00,-4.4
|
||||
2017-01-02 21:35:00,-4.2
|
||||
2017-01-02 21:40:00,-4.3
|
||||
2017-01-02 21:45:00,-4.3
|
||||
2017-01-02 21:50:00,-4.2
|
||||
2017-01-02 21:55:00,-4.2
|
||||
2017-01-02 22:00:00,-4.3
|
||||
2017-01-02 22:05:00,-4.2
|
||||
2017-01-02 22:10:00,-4.3
|
||||
2017-01-02 22:15:00,-4.4
|
||||
2017-01-02 22:20:00,-4.3
|
||||
2017-01-02 22:25:00,-4.3
|
||||
2017-01-02 22:30:00,-4.0
|
||||
2017-01-02 22:35:00,-4.3
|
||||
2017-01-02 22:40:00,-4.1
|
||||
2017-01-02 22:45:00,-4.2
|
||||
2017-01-02 22:50:00,-4.0
|
||||
2017-01-02 22:55:00,-3.9
|
||||
2017-01-02 23:00:00,-4.0
|
||||
2017-01-02 23:05:00,-4.1
|
||||
2017-01-02 23:10:00,-4.1
|
||||
2017-01-02 23:15:00,-4.0
|
||||
2017-01-02 23:20:00,-4.1
|
||||
2017-01-02 23:25:00,-4.2
|
||||
2017-01-02 23:30:00,-4.3
|
||||
2017-01-02 23:35:00,-4.2
|
||||
2017-01-02 23:40:00,-4.3
|
||||
2017-01-02 23:45:00,-4.3
|
||||
2017-01-02 23:50:00,-4.3
|
||||
2017-01-02 23:55:00,-4.4
|
||||
2017-01-03 00:00:00,-4.5
|
||||
2017-01-03 00:05:00,-4.5
|
||||
2017-01-03 00:10:00,-4.5
|
||||
2017-01-03 00:15:00,-4.5
|
||||
2017-01-03 00:20:00,-4.6
|
||||
2017-01-03 00:25:00,-4.6
|
||||
2017-01-03 00:30:00,-4.5
|
||||
2017-01-03 00:35:00,-4.6
|
||||
2017-01-03 00:40:00,-4.6
|
||||
2017-01-03 00:45:00,-4.5
|
||||
2017-01-03 00:50:00,-4.5
|
||||
2017-01-03 00:55:00,-4.6
|
||||
2017-01-03 01:00:00,-4.5
|
||||
2017-01-03 01:05:00,-4.6
|
||||
2017-01-03 01:10:00,-4.7
|
||||
2017-01-03 01:15:00,-4.7
|
||||
2017-01-03 01:20:00,-4.7
|
||||
2017-01-03 01:25:00,-4.9
|
||||
2017-01-03 01:30:00,-4.9
|
||||
2017-01-03 01:35:00,-4.9
|
||||
2017-01-03 01:40:00,-5.0
|
||||
2017-01-03 01:45:00,-5.0
|
||||
2017-01-03 01:50:00,-5.2
|
||||
2017-01-03 01:55:00,-5.2
|
||||
2017-01-03 02:00:00,-5.5
|
||||
2017-01-03 02:05:00,-5.3
|
||||
2017-01-03 02:10:00,-5.2
|
||||
2017-01-03 02:15:00,-5.2
|
||||
2017-01-03 02:20:00,-5.9
|
||||
2017-01-03 02:25:00,-6.4
|
||||
2017-01-03 02:30:00,-6.5
|
||||
2017-01-03 02:35:00,-6.0
|
||||
2017-01-03 02:40:00,-5.8
|
||||
2017-01-03 02:45:00,-5.5
|
||||
2017-01-03 02:50:00,-5.4
|
||||
2017-01-03 02:55:00,-5.5
|
||||
2017-01-03 03:00:00,-6.3
|
||||
2017-01-03 03:05:00,-6.3
|
||||
2017-01-03 03:10:00,-6.8
|
||||
2017-01-03 03:15:00,-6.3
|
||||
2017-01-03 03:20:00,-5.8
|
||||
2017-01-03 03:25:00,-6.8
|
||||
2017-01-03 03:30:00,-6.2
|
||||
2017-01-03 03:35:00,-5.7
|
||||
2017-01-03 03:40:00,-5.4
|
||||
2017-01-03 03:45:00,-5.3
|
||||
2017-01-03 03:50:00,-5.3
|
||||
2017-01-03 03:55:00,-5.2
|
||||
2017-01-03 04:00:00,-5.3
|
||||
2017-01-03 04:05:00,-5.3
|
||||
2017-01-03 04:10:00,-5.2
|
||||
2017-01-03 04:15:00,-5.2
|
||||
2017-01-03 04:20:00,-5.6
|
||||
2017-01-03 04:25:00,-6.1
|
||||
2017-01-03 04:30:00,-6.1
|
||||
2017-01-03 04:35:00,-6.1
|
||||
2017-01-03 04:40:00,-6.0
|
||||
2017-01-03 04:45:00,-5.8
|
||||
2017-01-03 04:50:00,-5.6
|
||||
2017-01-03 04:55:00,-5.7
|
||||
2017-01-03 05:00:00,-5.6
|
||||
2017-01-03 05:05:00,-6.1
|
||||
2017-01-03 05:10:00,-5.8
|
||||
2017-01-03 05:15:00,-5.9
|
||||
2017-01-03 05:20:00,-5.8
|
||||
2017-01-03 05:25:00,-6.3
|
||||
2017-01-03 05:30:00,-6.4
|
||||
2017-01-03 05:35:00,-6.5
|
||||
2017-01-03 05:40:00,-6.5
|
||||
2017-01-03 05:45:00,-5.9
|
||||
2017-01-03 05:50:00,-5.7
|
||||
2017-01-03 05:55:00,-5.8
|
||||
2017-01-03 06:00:00,-6.0
|
||||
2017-01-03 06:05:00,-6.3
|
||||
2017-01-03 06:10:00,-6.7
|
||||
2017-01-03 06:15:00,-6.6
|
||||
2017-01-03 06:20:00,-6.5
|
||||
2017-01-03 06:25:00,-6.4
|
||||
2017-01-03 06:30:00,-6.1
|
||||
2017-01-03 06:35:00,-6.3
|
||||
2017-01-03 06:40:00,-6.2
|
||||
2017-01-03 06:45:00,-6.1
|
||||
2017-01-03 06:50:00,-6.1
|
||||
2017-01-03 06:55:00,-6.0
|
||||
2017-01-03 07:00:00,-6.0
|
||||
2017-01-03 07:05:00,-6.2
|
||||
2017-01-03 07:10:00,-6.4
|
||||
2017-01-03 07:15:00,-6.2
|
||||
2017-01-03 07:20:00,-6.1
|
||||
2017-01-03 07:25:00,-5.9
|
||||
2017-01-03 07:30:00,-5.9
|
||||
2017-01-03 07:35:00,-5.9
|
||||
2017-01-03 07:40:00,-6.2
|
||||
2017-01-03 07:45:00,-6.4
|
||||
2017-01-03 07:50:00,-6.2
|
||||
2017-01-03 07:55:00,-6.0
|
||||
2017-01-03 08:00:00,-5.9
|
||||
2017-01-03 08:05:00,-5.9
|
||||
2017-01-03 08:10:00,-5.8
|
||||
2017-01-03 08:15:00,-5.8
|
||||
2017-01-03 08:20:00,-5.8
|
||||
2017-01-03 08:25:00,-5.8
|
||||
2017-01-03 08:30:00,-6.0
|
||||
2017-01-03 08:35:00,-5.9
|
||||
2017-01-03 08:40:00,-5.9
|
||||
2017-01-03 08:45:00,-5.8
|
||||
2017-01-03 08:50:00,-5.8
|
||||
2017-01-03 08:55:00,-5.7
|
||||
2017-01-03 09:00:00,-5.8
|
||||
2017-01-03 09:05:00,-5.8
|
||||
2017-01-03 09:10:00,-6.0
|
||||
2017-01-03 09:15:00,-6.1
|
||||
2017-01-03 09:20:00,-6.0
|
||||
2017-01-03 09:25:00,-5.9
|
||||
2017-01-03 09:30:00,-6.0
|
||||
2017-01-03 09:35:00,-6.0
|
||||
2017-01-03 09:40:00,-6.1
|
||||
2017-01-03 09:45:00,-6.2
|
||||
2017-01-03 09:50:00,-6.1
|
||||
2017-01-03 09:55:00,-6.3
|
||||
2017-01-03 10:00:00,-6.3
|
||||
2017-01-03 10:05:00,-6.1
|
||||
2017-01-03 10:10:00,-6.0
|
||||
2017-01-03 10:15:00,-5.9
|
||||
2017-01-03 10:20:00,-5.8
|
||||
2017-01-03 10:25:00,-5.7
|
||||
2017-01-03 10:30:00,-5.7
|
||||
2017-01-03 10:35:00,-5.8
|
||||
2017-01-03 10:40:00,-5.6
|
||||
2017-01-03 10:45:00,-5.6
|
||||
2017-01-03 10:50:00,-5.6
|
||||
2017-01-03 10:55:00,-5.6
|
||||
2017-01-03 11:00:00,-5.5
|
||||
2017-01-03 11:05:00,-5.6
|
||||
2017-01-03 11:10:00,-5.7
|
||||
2017-01-03 11:15:00,-5.7
|
||||
2017-01-03 11:20:00,-5.8
|
||||
2017-01-03 11:25:00,-5.7
|
||||
2017-01-03 11:30:00,-5.6
|
||||
2017-01-03 11:35:00,-5.5
|
||||
2017-01-03 11:40:00,-5.3
|
||||
2017-01-03 11:45:00,-5.2
|
||||
2017-01-03 11:50:00,-5.1
|
||||
2017-01-03 11:55:00,-5.0
|
||||
2017-01-03 12:00:00,-5.1
|
||||
2017-01-03 12:05:00,-5.0
|
||||
2017-01-03 12:10:00,-5.0
|
||||
2017-01-03 12:15:00,-5.0
|
||||
2017-01-03 12:20:00,-4.8
|
||||
2017-01-03 12:25:00,-4.8
|
||||
2017-01-03 12:30:00,-4.7
|
||||
2017-01-03 12:35:00,-4.6
|
||||
2017-01-03 12:40:00,-4.5
|
||||
2017-01-03 12:45:00,-4.4
|
||||
2017-01-03 12:50:00,-4.5
|
||||
2017-01-03 12:55:00,-4.6
|
||||
2017-01-03 13:00:00,-4.6
|
||||
2017-01-03 13:05:00,-4.6
|
||||
2017-01-03 13:10:00,-4.5
|
||||
2017-01-03 13:15:00,-4.5
|
||||
2017-01-03 13:20:00,-4.5
|
||||
2017-01-03 13:25:00,-4.3
|
||||
2017-01-03 13:30:00,-4.3
|
||||
2017-01-03 13:35:00,-4.3
|
||||
2017-01-03 13:40:00,-4.2
|
||||
2017-01-03 13:45:00,-4.2
|
||||
2017-01-03 13:50:00,-4.2
|
||||
2017-01-03 13:55:00,-4.2
|
||||
2017-01-03 14:00:00,-4.3
|
||||
2017-01-03 14:05:00,-4.3
|
||||
2017-01-03 14:10:00,-4.3
|
||||
2017-01-03 14:15:00,-4.3
|
||||
2017-01-03 14:20:00,-4.3
|
||||
2017-01-03 14:25:00,-4.3
|
||||
2017-01-03 14:30:00,-4.4
|
||||
2017-01-03 14:35:00,-4.4
|
||||
2017-01-03 14:40:00,-4.4
|
||||
2017-01-03 14:45:00,-4.5
|
||||
2017-01-03 14:50:00,-4.6
|
||||
2017-01-03 14:55:00,-4.5
|
||||
2017-01-03 15:00:00,-4.5
|
||||
2017-01-03 15:05:00,-4.5
|
||||
2017-01-03 15:10:00,-4.5
|
||||
2017-01-03 15:15:00,-4.5
|
||||
2017-01-03 15:20:00,-4.5
|
||||
2017-01-03 15:25:00,-4.5
|
||||
2017-01-03 15:30:00,-4.5
|
||||
2017-01-03 15:35:00,-4.5
|
||||
2017-01-03 15:40:00,-4.5
|
||||
2017-01-03 15:45:00,-4.6
|
||||
2017-01-03 15:50:00,-4.6
|
||||
2017-01-03 15:55:00,-4.5
|
||||
2017-01-03 16:00:00,-4.6
|
||||
2017-01-03 16:05:00,-4.5
|
||||
2017-01-03 16:10:00,-4.3
|
||||
2017-01-03 16:15:00,-4.2
|
||||
2017-01-03 16:20:00,-4.3
|
||||
2017-01-03 16:25:00,-4.2
|
||||
2017-01-03 16:30:00,-4.1
|
||||
2017-01-03 16:35:00,-4.0
|
||||
2017-01-03 16:40:00,-3.9
|
||||
2017-01-03 16:45:00,-3.8
|
||||
2017-01-03 16:50:00,-3.7
|
||||
2017-01-03 16:55:00,-3.7
|
||||
2017-01-03 17:00:00,-3.4
|
||||
2017-01-03 17:05:00,-3.3
|
||||
2017-01-03 17:10:00,-3.5
|
||||
2017-01-03 17:15:00,-3.4
|
||||
2017-01-03 17:20:00,-3.3
|
||||
2017-01-03 17:25:00,-3.2
|
||||
2017-01-03 17:30:00,-3.1
|
||||
2017-01-03 17:35:00,-3.0
|
||||
2017-01-03 17:40:00,-2.7
|
||||
2017-01-03 17:45:00,-2.6
|
||||
2017-01-03 17:50:00,-2.2
|
||||
2017-01-03 17:55:00,-2.4
|
||||
2017-01-03 18:00:00,-2.4
|
||||
2017-01-03 18:05:00,-2.7
|
||||
2017-01-03 18:10:00,-2.7
|
||||
2017-01-03 18:15:00,-2.6
|
||||
2017-01-03 18:20:00,-2.7
|
||||
2017-01-03 18:25:00,-2.5
|
||||
2017-01-03 18:30:00,-2.5
|
||||
2017-01-03 18:35:00,-2.6
|
||||
2017-01-03 18:40:00,-2.6
|
||||
2017-01-03 18:45:00,-2.6
|
||||
2017-01-03 18:50:00,-2.9
|
||||
2017-01-03 18:55:00,-2.7
|
||||
2017-01-03 19:00:00,-2.5
|
||||
2017-01-03 19:05:00,-2.3
|
||||
2017-01-03 19:10:00,-2.3
|
||||
2017-01-03 19:15:00,-2.3
|
||||
2017-01-03 19:20:00,-2.3
|
||||
2017-01-03 19:25:00,-2.2
|
||||
2017-01-03 19:30:00,-2.1
|
||||
2017-01-03 19:35:00,-2.3
|
||||
2017-01-03 19:40:00,-2.2
|
||||
2017-01-03 19:45:00,-2.0
|
||||
2017-01-03 19:50:00,-1.9
|
||||
2017-01-03 19:55:00,-1.8
|
||||
2017-01-03 20:00:00,-1.8
|
||||
2017-01-03 20:05:00,-1.9
|
||||
2017-01-03 20:10:00,-1.8
|
||||
2017-01-03 20:15:00,-1.6
|
||||
2017-01-03 20:20:00,-1.5
|
||||
2017-01-03 20:25:00,-1.1
|
||||
2017-01-03 20:30:00,-1.6
|
||||
2017-01-03 20:35:00,-2.2
|
||||
2017-01-03 20:40:00,-2.2
|
||||
2017-01-03 20:45:00,-2.3
|
||||
2017-01-03 20:50:00,-2.4
|
||||
2017-01-03 20:55:00,-2.4
|
||||
2017-01-03 21:00:00,-2.4
|
||||
2017-01-03 21:05:00,-2.3
|
||||
2017-01-03 21:10:00,-2.4
|
||||
2017-01-03 21:15:00,-2.5
|
||||
2017-01-03 21:20:00,-2.3
|
||||
2017-01-03 21:25:00,-2.1
|
||||
2017-01-03 21:30:00,-2.2
|
||||
2017-01-03 21:35:00,-2.2
|
||||
2017-01-03 21:40:00,-2.3
|
||||
2017-01-03 21:45:00,-2.3
|
||||
2017-01-03 21:50:00,-2.3
|
||||
2017-01-03 21:55:00,-2.3
|
||||
2017-01-03 22:00:00,-2.4
|
||||
2017-01-03 22:05:00,-2.3
|
||||
2017-01-03 22:10:00,-2.3
|
||||
2017-01-03 22:15:00,-2.4
|
||||
2017-01-03 22:20:00,-2.4
|
||||
2017-01-03 22:25:00,-2.5
|
||||
2017-01-03 22:30:00,-2.5
|
||||
2017-01-03 22:35:00,-2.7
|
||||
2017-01-03 22:40:00,-2.7
|
||||
2017-01-03 22:45:00,-2.8
|
||||
2017-01-03 22:50:00,-2.8
|
||||
2017-01-03 22:55:00,-2.8
|
||||
2017-01-03 23:00:00,-2.8
|
||||
2017-01-03 23:05:00,-2.8
|
||||
2017-01-03 23:10:00,-2.8
|
||||
2017-01-03 23:15:00,-2.7
|
||||
2017-01-03 23:20:00,-2.7
|
||||
2017-01-03 23:25:00,-2.6
|
||||
2017-01-03 23:30:00,-2.6
|
||||
2017-01-03 23:35:00,-2.5
|
||||
2017-01-03 23:40:00,-2.5
|
||||
2017-01-03 23:45:00,-2.4
|
||||
2017-01-03 23:50:00,-2.4
|
||||
2017-01-03 23:55:00,-2.4
|
||||
|
71
.venv/lib/python3.12/site-packages/prophet/tests/data3.csv
Normal file
71
.venv/lib/python3.12/site-packages/prophet/tests/data3.csv
Normal file
@ -0,0 +1,71 @@
|
||||
ds,y
|
||||
2023-03-02,623031970.0
|
||||
2023-03-06,623032040.0
|
||||
2023-03-07,623032054.0
|
||||
2023-03-08,623032091.0
|
||||
2023-03-09,623032123.0
|
||||
2023-03-10,623032152.0
|
||||
2023-03-11,623032177.0
|
||||
2023-03-12,623032184.0
|
||||
2023-03-13,623032193.0
|
||||
2023-03-16,623032296.0
|
||||
2023-03-17,623032316.0
|
||||
2023-03-18,623032328.0
|
||||
2023-03-19,623032339.0
|
||||
2023-03-20,623032352.0
|
||||
2023-03-21,623032385.0
|
||||
2023-03-22,623032410.0
|
||||
2023-03-23,623032427.0
|
||||
2023-03-25,623032479.0
|
||||
2023-03-26,623032496.0
|
||||
2023-03-27,623032506.0
|
||||
2023-03-28,623032533.0
|
||||
2023-03-29,623032598.0
|
||||
2023-03-30,623032643.0
|
||||
2023-03-31,623032681.0
|
||||
2023-04-01,623032727.0
|
||||
2023-04-02,623032756.0
|
||||
2023-04-03,623032767.0
|
||||
2023-04-04,623032799.0
|
||||
2023-04-05,623032843.0
|
||||
2023-04-06,623032890.0
|
||||
2023-04-07,623032934.0
|
||||
2023-04-08,623032954.0
|
||||
2023-04-09,623032959.0
|
||||
2023-04-10,623032964.0
|
||||
2023-04-11,623032997.0
|
||||
2023-04-12,623033041.0
|
||||
2023-04-13,623033062.0
|
||||
2023-04-14,623033095.0
|
||||
2023-04-15,623033122.0
|
||||
2023-04-16,623033163.0
|
||||
2023-04-17,623033190.0
|
||||
2023-04-18,623033227.0
|
||||
2023-04-19,623033258.0
|
||||
2023-04-20,623033294.0
|
||||
2023-04-21,623033329.0
|
||||
2023-04-22,623033361.0
|
||||
2023-04-23,623033385.0
|
||||
2023-04-24,623033397.0
|
||||
2023-04-25,623033419.0
|
||||
2023-04-26,623033440.0
|
||||
2023-04-27,623033482.0
|
||||
2023-04-28,623033535.0
|
||||
2023-04-29,623033575.0
|
||||
2023-04-30,623033600.0
|
||||
2023-05-01,623033610.0
|
||||
2023-05-02,623033632.0
|
||||
2023-05-03,623033666.0
|
||||
2023-05-04,623033704.0
|
||||
2023-05-05,623033714.0
|
||||
2023-05-06,623033752.0
|
||||
2023-05-07,623033760.0
|
||||
2023-05-08,623033769.0
|
||||
2023-05-09,623033784.0
|
||||
2023-05-10,623033823.0
|
||||
2023-05-11,623033853.0
|
||||
2023-05-12,623034010.0
|
||||
2023-05-13,623034041.0
|
||||
2023-05-14,623034060.0
|
||||
2023-05-15,623034068.0
|
||||
2023-05-16,623034084.0
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,403 @@
|
||||
# Copyright (c) Facebook, Inc. and its affiliates.
|
||||
|
||||
# This source code is licensed under the MIT license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
|
||||
import datetime
|
||||
import itertools
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from prophet import Prophet, diagnostics
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def ts_short(daily_univariate_ts):
|
||||
return daily_univariate_ts.head(100)
|
||||
|
||||
|
||||
class CustomParallelBackend:
|
||||
def map(self, func, *iterables):
|
||||
results = [func(*args) for args in zip(*iterables)]
|
||||
return results
|
||||
|
||||
|
||||
PARALLEL_METHODS = [None, "processes", "threads", CustomParallelBackend()]
|
||||
try:
|
||||
from dask.distributed import Client
|
||||
|
||||
client = Client(processes=False) # noqa
|
||||
PARALLEL_METHODS.append("dask")
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@diagnostics.register_performance_metric
|
||||
def mase(df, w):
|
||||
"""Mean absolute scale error
|
||||
|
||||
Parameters
|
||||
----------
|
||||
df: Cross-validation results dataframe.
|
||||
w: Aggregation window size.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Dataframe with columns horizon and mase.
|
||||
"""
|
||||
e = (df['y'] - df['yhat'])
|
||||
d = np.abs(np.diff(df['y'])).sum()/(df['y'].shape[0]-1)
|
||||
se = np.abs(e/d)
|
||||
if w < 0:
|
||||
return pd.DataFrame({'horizon': df['horizon'], 'mase': se})
|
||||
return diagnostics.rolling_mean_by_h(
|
||||
x=se.values, h=df['horizon'].values, w=w, name='mase'
|
||||
)
|
||||
|
||||
class TestCrossValidation:
|
||||
@pytest.mark.parametrize("parallel_method", PARALLEL_METHODS)
|
||||
def test_cross_validation(self, ts_short, parallel_method, backend):
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(ts_short)
|
||||
# Calculate the number of cutoff points(k)
|
||||
horizon = pd.Timedelta("4 days")
|
||||
period = pd.Timedelta("10 days")
|
||||
initial = pd.Timedelta("115 days")
|
||||
df_cv = diagnostics.cross_validation(
|
||||
m, horizon="4 days", period="10 days", initial="115 days", parallel=parallel_method
|
||||
)
|
||||
assert len(np.unique(df_cv["cutoff"])) == 3
|
||||
assert max(df_cv["ds"] - df_cv["cutoff"]) == horizon
|
||||
assert min(df_cv["cutoff"]) >= min(ts_short["ds"]) + initial
|
||||
dc = df_cv["cutoff"].diff()
|
||||
dc = dc[dc > pd.Timedelta(0)].min()
|
||||
assert dc >= period
|
||||
assert (df_cv["cutoff"] < df_cv["ds"]).all()
|
||||
# Each y in df_cv and ts_short with same ds should be equal
|
||||
df_merged = pd.merge(df_cv, ts_short, "left", on="ds")
|
||||
assert np.sum((df_merged["y_x"] - df_merged["y_y"]) ** 2) == pytest.approx(0.0)
|
||||
df_cv = diagnostics.cross_validation(
|
||||
m, horizon="4 days", period="10 days", initial="135 days"
|
||||
)
|
||||
assert len(np.unique(df_cv["cutoff"])) == 1
|
||||
with pytest.raises(ValueError):
|
||||
diagnostics.cross_validation(m, horizon="10 days", period="10 days", initial="140 days")
|
||||
|
||||
def test_bad_parallel_methods(self, ts_short, backend):
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(ts_short)
|
||||
# invalid alias
|
||||
with pytest.raises(ValueError, match="'parallel' should be one"):
|
||||
diagnostics.cross_validation(m, horizon="4 days", parallel="bad")
|
||||
# no map method
|
||||
with pytest.raises(ValueError, match="'parallel' should be one"):
|
||||
diagnostics.cross_validation(m, horizon="4 days", parallel=object())
|
||||
|
||||
def test_check_single_cutoff_forecast_func_calls(self, ts_short, monkeypatch, backend):
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(ts_short)
|
||||
|
||||
def mock_predict(df, model, cutoff, horizon, predict_columns):
|
||||
nonlocal n_calls
|
||||
n_calls = n_calls + 1
|
||||
return pd.DataFrame(
|
||||
{
|
||||
"ds": pd.date_range(start="2012-09-17", periods=3),
|
||||
"yhat": np.arange(16, 19),
|
||||
"yhat_lower": np.arange(15, 18),
|
||||
"yhat_upper": np.arange(17, 20),
|
||||
"y": np.arange(16.5, 19.5),
|
||||
"cutoff": [datetime.date(2012, 9, 15)] * 3,
|
||||
}
|
||||
)
|
||||
|
||||
monkeypatch.setattr(diagnostics, "single_cutoff_forecast", mock_predict)
|
||||
# cross validation with 3 and 7 forecasts
|
||||
for args, forecasts in (
|
||||
(["4 days", "10 days", "115 days"], 3),
|
||||
(["4 days", "4 days", "115 days"], 7),
|
||||
):
|
||||
n_calls = 0
|
||||
_ = diagnostics.cross_validation(m, *args)
|
||||
# check single forecast function called expected number of times
|
||||
assert n_calls == forecasts
|
||||
|
||||
@pytest.mark.parametrize("extra_output_columns", ["trend", ["trend"]])
|
||||
def test_check_extra_output_columns_cross_validation(self, ts_short, backend, extra_output_columns):
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(ts_short)
|
||||
df_cv = diagnostics.cross_validation(
|
||||
m,
|
||||
horizon="1 days",
|
||||
period="1 days",
|
||||
initial="140 days",
|
||||
extra_output_columns=extra_output_columns
|
||||
)
|
||||
assert "trend" in df_cv.columns
|
||||
|
||||
@pytest.mark.parametrize("growth", ["logistic", "flat"])
|
||||
def test_cross_validation_logistic_or_flat_growth(self, growth, ts_short, backend):
|
||||
df = ts_short.copy()
|
||||
if growth == "logistic":
|
||||
df["cap"] = 40
|
||||
m = Prophet(growth=growth, stan_backend=backend).fit(df)
|
||||
df_cv = diagnostics.cross_validation(
|
||||
m, horizon="1 days", period="1 days", initial="140 days"
|
||||
)
|
||||
assert len(np.unique(df_cv["cutoff"])) == 2
|
||||
assert (df_cv["cutoff"] < df_cv["ds"]).all()
|
||||
df_merged = pd.merge(df_cv, ts_short, "left", on="ds")
|
||||
assert np.sum((df_merged["y_x"] - df_merged["y_y"]) ** 2) == pytest.approx(0.0)
|
||||
|
||||
def test_cross_validation_extra_regressors(self, ts_short, backend):
|
||||
df = ts_short.copy()
|
||||
df["extra"] = range(df.shape[0])
|
||||
df["is_conditional_week"] = np.arange(df.shape[0]) // 7 % 2
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.add_seasonality(name="monthly", period=30.5, fourier_order=5)
|
||||
m.add_seasonality(
|
||||
name="conditional_weekly",
|
||||
period=7,
|
||||
fourier_order=3,
|
||||
prior_scale=2.0,
|
||||
condition_name="is_conditional_week",
|
||||
)
|
||||
m.add_regressor("extra")
|
||||
m.fit(df)
|
||||
df_cv = diagnostics.cross_validation(
|
||||
m, horizon="4 days", period="4 days", initial="135 days"
|
||||
)
|
||||
assert len(np.unique(df_cv["cutoff"])) == 2
|
||||
period = pd.Timedelta("4 days")
|
||||
dc = df_cv["cutoff"].diff()
|
||||
dc = dc[dc > pd.Timedelta(0)].min()
|
||||
assert dc >= period
|
||||
assert (df_cv["cutoff"] < df_cv["ds"]).all()
|
||||
df_merged = pd.merge(df_cv, ts_short, "left", on="ds")
|
||||
assert np.sum((df_merged["y_x"] - df_merged["y_y"]) ** 2) == pytest.approx(0.0)
|
||||
|
||||
def test_cross_validation_default_value_check(self, ts_short, backend):
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(ts_short)
|
||||
# Default value of initial should be equal to 3 * horizon
|
||||
df_cv1 = diagnostics.cross_validation(m, horizon="32 days", period="10 days")
|
||||
df_cv2 = diagnostics.cross_validation(
|
||||
m, horizon="32 days", period="10 days", initial="96 days"
|
||||
)
|
||||
assert ((df_cv1["y"] - df_cv2["y"]) ** 2).sum() == pytest.approx(0.0)
|
||||
assert ((df_cv1["yhat"] - df_cv2["yhat"]) ** 2).sum() == pytest.approx(0.0)
|
||||
|
||||
def test_cross_validation_custom_cutoffs(self, ts_short, backend):
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(ts_short)
|
||||
# When specify a list of cutoffs
|
||||
# the cutoff dates in df_cv are those specified
|
||||
df_cv1 = diagnostics.cross_validation(
|
||||
m,
|
||||
horizon="32 days",
|
||||
period="10 days",
|
||||
cutoffs=[pd.Timestamp("2012-07-31"), pd.Timestamp("2012-08-31")],
|
||||
)
|
||||
assert len(df_cv1["cutoff"].unique()) == 2
|
||||
|
||||
def test_cross_validation_uncertainty_disabled(self, ts_short, backend):
|
||||
df = ts_short.copy()
|
||||
for uncertainty in [0, False]:
|
||||
m = Prophet(uncertainty_samples=uncertainty, stan_backend=backend)
|
||||
m.fit(df, algorithm="Newton")
|
||||
df_cv = diagnostics.cross_validation(
|
||||
m, horizon="4 days", period="4 days", initial="115 days"
|
||||
)
|
||||
expected_cols = ["ds", "yhat", "y", "cutoff"]
|
||||
assert all(col in expected_cols for col in df_cv.columns.tolist())
|
||||
df_p = diagnostics.performance_metrics(df_cv)
|
||||
assert "coverage" not in df_p.columns
|
||||
|
||||
|
||||
class TestPerformanceMetrics:
|
||||
def test_performance_metrics(self, ts_short, backend):
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(ts_short)
|
||||
df_cv = diagnostics.cross_validation(
|
||||
m, horizon="4 days", period="10 days", initial="90 days"
|
||||
)
|
||||
# Aggregation level none
|
||||
df_none = diagnostics.performance_metrics(df_cv, rolling_window=-1)
|
||||
assert set(df_none.columns) == {
|
||||
"horizon",
|
||||
"coverage",
|
||||
"mae",
|
||||
"mape",
|
||||
"mdape",
|
||||
"mse",
|
||||
"rmse",
|
||||
"smape",
|
||||
}
|
||||
assert df_none.shape[0] == 16
|
||||
# Aggregation level 0
|
||||
df_0 = diagnostics.performance_metrics(df_cv, rolling_window=0)
|
||||
assert len(df_0) == 4
|
||||
assert len(df_0["horizon"].unique()) == 4
|
||||
# Aggregation level 0.2
|
||||
df_horizon = diagnostics.performance_metrics(df_cv, rolling_window=0.2)
|
||||
assert len(df_horizon) == 4
|
||||
assert len(df_horizon["horizon"].unique()) == 4
|
||||
# Aggregation level all
|
||||
df_all = diagnostics.performance_metrics(df_cv, rolling_window=1)
|
||||
assert df_all.shape[0] == 1
|
||||
for metric in ["mse", "mape", "mae", "coverage"]:
|
||||
assert df_all[metric].values[0] == pytest.approx(df_none[metric].mean())
|
||||
assert df_all["mdape"].values[0] == pytest.approx(df_none["mdape"].median())
|
||||
# Custom list of metrics
|
||||
df_horizon = diagnostics.performance_metrics(
|
||||
df_cv,
|
||||
metrics=["coverage", "mse", "mase"],
|
||||
)
|
||||
assert set(df_horizon.columns) == {"coverage", "mse", "mase","horizon"}
|
||||
# Skip MAPE
|
||||
df_cv.loc[0, "y"] = 0.0
|
||||
df_horizon = diagnostics.performance_metrics(
|
||||
df_cv,
|
||||
metrics=["coverage", "mape"],
|
||||
)
|
||||
assert set(df_horizon.columns) == {"coverage", "horizon"}
|
||||
# Handle zero y and yhat
|
||||
df_cv["y"] = 0.0
|
||||
df_cv["yhat"] = 0.0
|
||||
df_horizon = diagnostics.performance_metrics(
|
||||
df_cv,
|
||||
)
|
||||
assert set(df_horizon.columns) == {"coverage", "horizon", "mae", "mdape", "mse", "rmse", "smape"}
|
||||
df_horizon = diagnostics.performance_metrics(
|
||||
df_cv,
|
||||
metrics=["mape"],
|
||||
)
|
||||
assert df_horizon is None
|
||||
# List of metrics containing non-valid metrics
|
||||
with pytest.raises(ValueError):
|
||||
diagnostics.performance_metrics(
|
||||
df_cv,
|
||||
metrics=["mse", "error_metric"],
|
||||
)
|
||||
|
||||
def test_rolling_mean(self):
|
||||
x = np.arange(10)
|
||||
h = np.arange(10)
|
||||
df = diagnostics.rolling_mean_by_h(x=x, h=h, w=1, name="x")
|
||||
assert np.array_equal(x, df["x"].values)
|
||||
assert np.array_equal(h, df["horizon"].values)
|
||||
|
||||
df = diagnostics.rolling_mean_by_h(x, h, w=4, name="x")
|
||||
assert np.allclose(x[3:] - 1.5, df["x"].values)
|
||||
assert np.array_equal(np.arange(3, 10), df["horizon"].values)
|
||||
|
||||
h = np.array([1.0, 2.0, 3.0, 4.0, 4.0, 4.0, 4.0, 4.0, 7.0, 7.0])
|
||||
x_true = np.array([1.0, 5.0, 22.0 / 3])
|
||||
h_true = np.array([3.0, 4.0, 7.0])
|
||||
df = diagnostics.rolling_mean_by_h(x, h, w=3, name="x")
|
||||
assert np.allclose(x_true, df["x"].values)
|
||||
assert np.array_equal(h_true, df["horizon"].values)
|
||||
|
||||
df = diagnostics.rolling_mean_by_h(x, h, w=10, name="x")
|
||||
assert np.allclose(np.array([7.0]), df["horizon"].values)
|
||||
assert np.allclose(np.array([4.5]), df["x"].values)
|
||||
|
||||
def test_rolling_median(self):
|
||||
x = np.arange(10)
|
||||
h = np.arange(10)
|
||||
df = diagnostics.rolling_median_by_h(x=x, h=h, w=1, name="x")
|
||||
assert np.array_equal(x, df["x"].values)
|
||||
assert np.array_equal(h, df["horizon"].values)
|
||||
|
||||
df = diagnostics.rolling_median_by_h(x, h, w=4, name="x")
|
||||
x_true = x[3:] - 1.5
|
||||
assert np.allclose(x_true, df["x"].values)
|
||||
assert np.array_equal(np.arange(3, 10), df["horizon"].values)
|
||||
|
||||
h = np.array([1.0, 2.0, 3.0, 4.0, 4.0, 4.0, 4.0, 4.0, 7.0, 7.0])
|
||||
x_true = np.array([1.0, 5.0, 8.0])
|
||||
h_true = np.array([3.0, 4.0, 7.0])
|
||||
df = diagnostics.rolling_median_by_h(x, h, w=3, name="x")
|
||||
assert np.allclose(x_true, df["x"].values)
|
||||
assert np.array_equal(h_true, df["horizon"].values)
|
||||
|
||||
df = diagnostics.rolling_median_by_h(x, h, w=10, name="x")
|
||||
assert np.allclose(np.array([7.0]), df["horizon"].values)
|
||||
assert np.allclose(np.array([4.5]), df["x"].values)
|
||||
|
||||
|
||||
class TestProphetCopy:
|
||||
@pytest.fixture(scope="class")
|
||||
def data(self, daily_univariate_ts):
|
||||
df = daily_univariate_ts.copy()
|
||||
df["cap"] = 200.0
|
||||
df["binary_feature"] = [0] * 255 + [1] * 255
|
||||
return df
|
||||
|
||||
def test_prophet_copy(self, data, backend):
|
||||
# These values are created except for its default values
|
||||
holiday = pd.DataFrame({"ds": pd.to_datetime(["2016-12-25"]), "holiday": ["x"]})
|
||||
products = itertools.product(
|
||||
["linear", "logistic"], # growth
|
||||
[None, pd.to_datetime(["2016-12-25"])], # changepoints
|
||||
[3], # n_changepoints
|
||||
[0.9], # changepoint_range
|
||||
[True, False], # yearly_seasonality
|
||||
[True, False], # weekly_seasonality
|
||||
[True, False], # daily_seasonality
|
||||
[None, holiday], # holidays
|
||||
["additive", "multiplicative"], # seasonality_mode
|
||||
[1.1], # seasonality_prior_scale
|
||||
[1.1], # holidays_prior_scale
|
||||
[0.1], # changepoint_prior_scale
|
||||
[100], # mcmc_samples
|
||||
[0.9], # interval_width
|
||||
[200], # uncertainty_samples
|
||||
)
|
||||
# Values should be copied correctly
|
||||
for product in products:
|
||||
m1 = Prophet(*product, stan_backend=backend)
|
||||
m1.country_holidays = "US"
|
||||
m1.history = m1.setup_dataframe(data.copy(), initialize_scales=True)
|
||||
m1.set_auto_seasonalities()
|
||||
m2 = diagnostics.prophet_copy(m1)
|
||||
assert m1.growth == m2.growth
|
||||
assert m1.n_changepoints == m2.n_changepoints
|
||||
assert m1.changepoint_range == m2.changepoint_range
|
||||
if m1.changepoints is None:
|
||||
assert m1.changepoints == m2.changepoints
|
||||
else:
|
||||
assert m1.changepoints.equals(m2.changepoints)
|
||||
assert False == m2.yearly_seasonality
|
||||
assert False == m2.weekly_seasonality
|
||||
assert False == m2.daily_seasonality
|
||||
assert m1.yearly_seasonality == ("yearly" in m2.seasonalities)
|
||||
assert m1.weekly_seasonality == ("weekly" in m2.seasonalities)
|
||||
assert m1.daily_seasonality == ("daily" in m2.seasonalities)
|
||||
if m1.holidays is None:
|
||||
assert m1.holidays == m2.holidays
|
||||
else:
|
||||
assert (m1.holidays == m2.holidays).values.all()
|
||||
assert m1.country_holidays == m2.country_holidays
|
||||
assert m1.holidays_mode == m2.holidays_mode
|
||||
assert m1.seasonality_mode == m2.seasonality_mode
|
||||
assert m1.seasonality_prior_scale == m2.seasonality_prior_scale
|
||||
assert m1.changepoint_prior_scale == m2.changepoint_prior_scale
|
||||
assert m1.holidays_prior_scale == m2.holidays_prior_scale
|
||||
assert m1.mcmc_samples == m2.mcmc_samples
|
||||
assert m1.interval_width == m2.interval_width
|
||||
assert m1.uncertainty_samples == m2.uncertainty_samples
|
||||
|
||||
def test_prophet_copy_custom(self, data, backend):
|
||||
changepoints = pd.date_range("2012-06-15", "2012-09-15")
|
||||
cutoff = pd.Timestamp("2012-07-25")
|
||||
m1 = Prophet(changepoints=changepoints, stan_backend=backend)
|
||||
m1.add_seasonality("custom", 10, 5)
|
||||
m1.add_regressor("binary_feature")
|
||||
m1.fit(data)
|
||||
m2 = diagnostics.prophet_copy(m1, cutoff=cutoff)
|
||||
changepoints = changepoints[changepoints < cutoff]
|
||||
assert (changepoints == m2.changepoints).all()
|
||||
assert "custom" in m2.seasonalities
|
||||
assert "binary_feature" in m2.extra_regressors
|
||||
984
.venv/lib/python3.12/site-packages/prophet/tests/test_prophet.py
Normal file
984
.venv/lib/python3.12/site-packages/prophet/tests/test_prophet.py
Normal file
@ -0,0 +1,984 @@
|
||||
# Copyright (c) Facebook, Inc. and its affiliates.
|
||||
|
||||
# This source code is licensed under the MIT license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from prophet import Prophet
|
||||
from prophet.utilities import warm_start_params
|
||||
|
||||
|
||||
def train_test_split(ts_data: pd.DataFrame, n_test_rows: int) -> pd.DataFrame:
|
||||
train = ts_data.head(ts_data.shape[0] - n_test_rows)
|
||||
test = ts_data.tail(n_test_rows)
|
||||
return train.reset_index(), test.reset_index()
|
||||
|
||||
|
||||
def rmse(predictions, targets) -> float:
|
||||
return np.sqrt(np.mean((predictions - targets) ** 2))
|
||||
|
||||
|
||||
class TestProphetFitPredictDefault:
|
||||
@pytest.mark.parametrize(
|
||||
"scaling,expected",
|
||||
[("absmax", 10.64), ("minmax", 11.13)],
|
||||
ids=["absmax", "minmax"]
|
||||
)
|
||||
def test_fit_predict(self, daily_univariate_ts, backend, scaling, expected):
|
||||
test_days = 30
|
||||
train, test = train_test_split(daily_univariate_ts, test_days)
|
||||
forecaster = Prophet(stan_backend=backend, scaling=scaling)
|
||||
forecaster.fit(train, seed=1237861298)
|
||||
np.random.seed(876543987)
|
||||
future = forecaster.make_future_dataframe(test_days, include_history=False)
|
||||
future = forecaster.predict(future)
|
||||
res = rmse(future["yhat"], test["y"])
|
||||
# Higher threshold due to cmdstan 2.33.1 producing numerical differences for macOS Intel (ARM is fine).
|
||||
assert res == pytest.approx(expected, 0.1), "backend: {}".format(forecaster.stan_backend)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"scaling,expected",
|
||||
[("absmax", 23.44), ("minmax", 11.29)],
|
||||
ids=["absmax", "minmax"]
|
||||
)
|
||||
def test_fit_predict_newton(self, daily_univariate_ts, backend, scaling, expected):
|
||||
test_days = 30
|
||||
train, test = train_test_split(daily_univariate_ts, test_days)
|
||||
forecaster = Prophet(stan_backend=backend, scaling=scaling)
|
||||
forecaster.fit(train, algorithm="Newton", seed=1237861298)
|
||||
np.random.seed(876543987)
|
||||
future = forecaster.make_future_dataframe(test_days, include_history=False)
|
||||
future = forecaster.predict(future)
|
||||
res = rmse(future["yhat"], test["y"])
|
||||
assert res == pytest.approx(expected, 0.01), "backend: {}".format(forecaster.stan_backend)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"scaling,expected",
|
||||
[("absmax", 127.01), ("minmax", 93.45)],
|
||||
ids=["absmax", "minmax"]
|
||||
)
|
||||
def test_fit_predict_large_numbers(self, large_numbers_ts, backend, scaling, expected):
|
||||
test_days = 30
|
||||
train, test = train_test_split(large_numbers_ts, test_days)
|
||||
forecaster = Prophet(stan_backend=backend, scaling=scaling)
|
||||
forecaster.fit(train, seed=1237861298)
|
||||
np.random.seed(876543987)
|
||||
future = forecaster.make_future_dataframe(test_days, include_history=False)
|
||||
future = forecaster.predict(future)
|
||||
res = rmse(future["yhat"], test["y"])
|
||||
assert res == pytest.approx(expected, 0.01), "backend: {}".format(forecaster.stan_backend)
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_fit_predict_sampling(self, daily_univariate_ts, backend):
|
||||
test_days = 30
|
||||
train, test = train_test_split(daily_univariate_ts, test_days)
|
||||
forecaster = Prophet(mcmc_samples=500, stan_backend=backend)
|
||||
# chains adjusted from 4 to 7 to satisfy test for cmdstanpy
|
||||
forecaster.fit(train, seed=1237861298, chains=7, show_progress=False)
|
||||
np.random.seed(876543987)
|
||||
future = forecaster.make_future_dataframe(test_days, include_history=False)
|
||||
future = forecaster.predict(future)
|
||||
# this gives ~ 215.77
|
||||
res = rmse(future["yhat"], test["y"])
|
||||
assert 236 < res < 193, "backend: {}".format(forecaster.stan_backend)
|
||||
|
||||
def test_fit_predict_no_seasons(self, daily_univariate_ts, backend):
|
||||
test_days = 30
|
||||
train, _ = train_test_split(daily_univariate_ts, test_days)
|
||||
forecaster = Prophet(
|
||||
weekly_seasonality=False, yearly_seasonality=False, stan_backend=backend
|
||||
)
|
||||
forecaster.fit(train)
|
||||
future = forecaster.make_future_dataframe(test_days, include_history=False)
|
||||
result = forecaster.predict(future)
|
||||
assert (future.ds == result.ds).all()
|
||||
|
||||
def test_fit_predict_no_changepoints(self, daily_univariate_ts, backend):
|
||||
test_days = daily_univariate_ts.shape[0] // 2
|
||||
train, future = train_test_split(daily_univariate_ts, test_days)
|
||||
forecaster = Prophet(n_changepoints=0, stan_backend=backend)
|
||||
forecaster.fit(train)
|
||||
forecaster.predict(future)
|
||||
assert forecaster.params is not None
|
||||
assert forecaster.n_changepoints == 0
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_fit_predict_no_changepoints_mcmc(self, daily_univariate_ts, backend):
|
||||
test_days = daily_univariate_ts.shape[0] // 2
|
||||
train, future = train_test_split(daily_univariate_ts, test_days)
|
||||
forecaster = Prophet(n_changepoints=0, mcmc_samples=100, stan_backend=backend)
|
||||
forecaster.fit(train, show_progress=False)
|
||||
forecaster.predict(future)
|
||||
assert forecaster.params is not None
|
||||
assert forecaster.n_changepoints == 0
|
||||
|
||||
def test_fit_changepoint_not_in_history(self, daily_univariate_ts, backend):
|
||||
train = daily_univariate_ts[
|
||||
(daily_univariate_ts["ds"] < "2013-01-01") | (daily_univariate_ts["ds"] > "2014-01-01")
|
||||
]
|
||||
future = pd.DataFrame({"ds": daily_univariate_ts["ds"]})
|
||||
prophet = Prophet(changepoints=["2013-06-06"], stan_backend=backend)
|
||||
forecaster = prophet
|
||||
forecaster.fit(train)
|
||||
forecaster.predict(future)
|
||||
assert forecaster.params is not None
|
||||
assert forecaster.n_changepoints == 1
|
||||
|
||||
def test_fit_predict_duplicates(self, daily_univariate_ts, backend):
|
||||
"""
|
||||
The underlying model should still fit successfully when there are duplicate dates in the history.
|
||||
The model essentially sees this as multiple observations for the same time value, and fits the parameters
|
||||
accordingly.
|
||||
"""
|
||||
train, test = train_test_split(daily_univariate_ts, daily_univariate_ts.shape[0] // 2)
|
||||
repeated_obs = train.copy()
|
||||
repeated_obs["y"] += 10
|
||||
train = pd.concat([train, repeated_obs])
|
||||
forecaster = Prophet(stan_backend=backend)
|
||||
forecaster.fit(train)
|
||||
forecaster.predict(test)
|
||||
|
||||
def test_fit_predict_constant_history(self, daily_univariate_ts, backend):
|
||||
"""
|
||||
When the training data history is constant, Prophet should predict the same value for all future dates.
|
||||
"""
|
||||
for constant in [0, 20]:
|
||||
train, test = train_test_split(daily_univariate_ts, daily_univariate_ts.shape[0] // 2)
|
||||
train["y"] = constant
|
||||
forecaster = Prophet(stan_backend=backend)
|
||||
forecaster.fit(train)
|
||||
result = forecaster.predict(test)
|
||||
assert result["yhat"].values[-1] == constant
|
||||
|
||||
def test_fit_predict_uncertainty_disabled(self, daily_univariate_ts, backend):
|
||||
test_days = daily_univariate_ts.shape[0] // 2
|
||||
train, future = train_test_split(daily_univariate_ts, test_days)
|
||||
for uncertainty in [0, False]:
|
||||
forecaster = Prophet(uncertainty_samples=uncertainty, stan_backend=backend)
|
||||
forecaster.fit(train)
|
||||
result = forecaster.predict(future)
|
||||
expected_cols = [
|
||||
"ds",
|
||||
"trend",
|
||||
"additive_terms",
|
||||
"multiplicative_terms",
|
||||
"weekly",
|
||||
"yhat",
|
||||
]
|
||||
assert all(col in expected_cols for col in result.columns.tolist())
|
||||
|
||||
|
||||
class TestProphetDataPrep:
|
||||
def test_setup_dataframe(self, daily_univariate_ts, backend):
|
||||
"""Test that the columns 't' and 'y_scaled' are added to the dataframe."""
|
||||
train, _ = train_test_split(daily_univariate_ts, daily_univariate_ts.shape[0] // 2)
|
||||
m = Prophet(stan_backend=backend)
|
||||
history = m.setup_dataframe(train, initialize_scales=True)
|
||||
|
||||
assert "t" in history
|
||||
assert history["t"].min() == 0.0
|
||||
assert history["t"].max() == 1.0
|
||||
|
||||
assert "y_scaled" in history
|
||||
assert history["y_scaled"].max() == 1.0
|
||||
|
||||
def test_setup_dataframe_ds_column(self, daily_univariate_ts, backend):
|
||||
"""Test case where 'ds' exists as an index name and column. Prophet should use the column."""
|
||||
train, _ = train_test_split(daily_univariate_ts, daily_univariate_ts.shape[0] // 2)
|
||||
train.index = pd.to_datetime(["1970-01-01" for _ in range(train.shape[0])])
|
||||
train.index.rename("ds", inplace=True)
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(train)
|
||||
assert np.all(m.history["ds"].values == train["ds"].values)
|
||||
|
||||
def test_logistic_floor(self, daily_univariate_ts, backend):
|
||||
"""Test the scaling of y with logistic growth and a floor/cap."""
|
||||
train, _ = train_test_split(daily_univariate_ts, daily_univariate_ts.shape[0] // 2)
|
||||
train["floor"] = 10.0
|
||||
train["cap"] = 80.0
|
||||
m = Prophet(growth="logistic", stan_backend=backend)
|
||||
m.fit(train)
|
||||
assert m.logistic_floor
|
||||
assert "floor" in m.history
|
||||
assert m.history["y_scaled"][0] == 1.0
|
||||
for col in ["y", "floor", "cap"]:
|
||||
train[col] += 10.0
|
||||
m2 = Prophet(growth="logistic", stan_backend=backend)
|
||||
m2.fit(train)
|
||||
assert m2.history["y_scaled"][0] == pytest.approx(1.0, 0.01)
|
||||
|
||||
def test_logistic_floor_minmax(self, daily_univariate_ts, backend):
|
||||
"""Test the scaling of y with logistic growth and a floor/cap."""
|
||||
train, _ = train_test_split(daily_univariate_ts, daily_univariate_ts.shape[0] // 2)
|
||||
train["floor"] = 10.0
|
||||
train["cap"] = 80.0
|
||||
m = Prophet(growth="logistic", stan_backend=backend, scaling="minmax")
|
||||
m.fit(train)
|
||||
assert m.logistic_floor
|
||||
assert "floor" in m.history
|
||||
assert m.history["y_scaled"].min() > 0.0
|
||||
assert m.history["y_scaled"].max() < 1.0
|
||||
for col in ["y", "floor", "cap"]:
|
||||
train[col] += 10.0
|
||||
m2 = Prophet(growth="logistic", stan_backend=backend, scaling="minmax")
|
||||
m2.fit(train)
|
||||
assert m2.history["y_scaled"].min() > 0.0
|
||||
assert m2.history["y_scaled"].max() < 1.0
|
||||
# Check that the scaling is the same
|
||||
assert m2.history['y_scaled'].mean() == m.history['y_scaled'].mean()
|
||||
|
||||
def test_make_future_dataframe(self, daily_univariate_ts, backend):
|
||||
train = daily_univariate_ts.head(468 // 2)
|
||||
forecaster = Prophet(stan_backend=backend)
|
||||
forecaster.fit(train)
|
||||
future = forecaster.make_future_dataframe(periods=3, freq="D", include_history=False)
|
||||
correct = pd.DatetimeIndex(["2013-04-26", "2013-04-27", "2013-04-28"])
|
||||
assert len(future) == 3
|
||||
assert np.all(future["ds"].values == correct.values)
|
||||
|
||||
future = forecaster.make_future_dataframe(periods=3, freq=pd.tseries.offsets.MonthEnd(1), include_history=False)
|
||||
correct = pd.DatetimeIndex(["2013-04-30", "2013-05-31", "2013-06-30"])
|
||||
assert len(future) == 3
|
||||
assert np.all(future["ds"].values == correct.values)
|
||||
|
||||
def test_make_future_dataframe_include_history(self, daily_univariate_ts, backend):
|
||||
train = daily_univariate_ts.head(468 // 2).copy()
|
||||
#cover history with NAs
|
||||
train.loc[train.sample(10).index, "y"] = np.nan
|
||||
|
||||
forecaster = Prophet(stan_backend=backend)
|
||||
forecaster.fit(train)
|
||||
future = forecaster.make_future_dataframe(periods=3, freq="D", include_history=True)
|
||||
|
||||
assert len(future) == train.shape[0] + 3
|
||||
|
||||
class TestProphetTrendComponent:
|
||||
def test_invalid_growth_input(self, backend):
|
||||
msg = 'Parameter "growth" should be "linear", ' '"logistic" or "flat".'
|
||||
with pytest.raises(ValueError, match=msg):
|
||||
Prophet(growth="constant", stan_backend=backend)
|
||||
|
||||
def test_growth_init(self, daily_univariate_ts, backend):
|
||||
model = Prophet(growth="logistic", stan_backend=backend)
|
||||
train = daily_univariate_ts.iloc[:468].copy()
|
||||
train["cap"] = train["y"].max()
|
||||
|
||||
history = model.setup_dataframe(train, initialize_scales=True)
|
||||
|
||||
k, m = model.linear_growth_init(history)
|
||||
assert k == pytest.approx(0.3055671)
|
||||
assert m == pytest.approx(0.5307511)
|
||||
|
||||
k, m = model.logistic_growth_init(history)
|
||||
assert k == pytest.approx(1.507925, abs=1e-4)
|
||||
assert m == pytest.approx(-0.08167497, abs=1e-4)
|
||||
|
||||
k, m = model.flat_growth_init(history)
|
||||
assert k == 0
|
||||
assert m == pytest.approx(0.49335657, abs=1e-4)
|
||||
|
||||
def test_growth_init_minmax(self, daily_univariate_ts, backend):
|
||||
model = Prophet(growth="logistic", stan_backend=backend, scaling="minmax")
|
||||
train = daily_univariate_ts.iloc[:468].copy()
|
||||
train["cap"] = train["y"].max()
|
||||
|
||||
history = model.setup_dataframe(train, initialize_scales=True)
|
||||
|
||||
k, m = model.linear_growth_init(history)
|
||||
assert k == pytest.approx(0.4053406)
|
||||
assert m == pytest.approx(0.3775322)
|
||||
|
||||
k, m = model.logistic_growth_init(history)
|
||||
assert k == pytest.approx(1.782523, abs=1e-4)
|
||||
assert m == pytest.approx(0.280521, abs=1e-4)
|
||||
|
||||
k, m = model.flat_growth_init(history)
|
||||
assert k == 0
|
||||
assert m == pytest.approx(0.32792770, abs=1e-4)
|
||||
|
||||
@pytest.mark.parametrize("scaling",["absmax","minmax"])
|
||||
def test_flat_growth(self, backend, scaling):
|
||||
m = Prophet(growth="flat", stan_backend=backend, scaling=scaling)
|
||||
x = np.linspace(0, 2 * np.pi, 8 * 7)
|
||||
history = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.date_range(start="2020-01-01", periods=8 * 7, freq="d"),
|
||||
"y": 30 + np.sin(x * 8.0),
|
||||
}
|
||||
)
|
||||
m.fit(history)
|
||||
future = m.make_future_dataframe(10, include_history=True)
|
||||
fcst = m.predict(future)
|
||||
m_ = m.params["m"][0, 0]
|
||||
k = m.params["k"][0, 0]
|
||||
assert k == pytest.approx(0.0)
|
||||
assert fcst["trend"].unique()[0] == pytest.approx((m_ * m.y_scale) + m.y_min)
|
||||
assert np.round((m_ * m.y_scale) + m.y_min) == 30.0
|
||||
|
||||
def test_piecewise_linear(self, backend):
|
||||
model = Prophet(stan_backend=backend)
|
||||
|
||||
t = np.arange(11.0)
|
||||
m = 0
|
||||
k = 1.0
|
||||
deltas = np.array([0.5])
|
||||
changepoint_ts = np.array([5])
|
||||
|
||||
y = model.piecewise_linear(t, deltas, k, m, changepoint_ts)
|
||||
y_true = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.5, 8.0, 9.5, 11.0, 12.5])
|
||||
assert (y - y_true).sum() == 0.0
|
||||
|
||||
t = t[8:]
|
||||
y_true = y_true[8:]
|
||||
y = model.piecewise_linear(t, deltas, k, m, changepoint_ts)
|
||||
assert (y - y_true).sum() == 0.0
|
||||
|
||||
def test_piecewise_logistic(self, backend):
|
||||
model = Prophet(stan_backend=backend)
|
||||
|
||||
t = np.arange(11.0)
|
||||
cap = np.ones(11) * 10
|
||||
m = 0
|
||||
k = 1.0
|
||||
deltas = np.array([0.5])
|
||||
changepoint_ts = np.array([5])
|
||||
|
||||
y = model.piecewise_logistic(t, cap, deltas, k, m, changepoint_ts)
|
||||
y_true = np.array(
|
||||
[
|
||||
5.000000,
|
||||
7.310586,
|
||||
8.807971,
|
||||
9.525741,
|
||||
9.820138,
|
||||
9.933071,
|
||||
9.984988,
|
||||
9.996646,
|
||||
9.999252,
|
||||
9.999833,
|
||||
9.999963,
|
||||
]
|
||||
)
|
||||
|
||||
t = t[8:]
|
||||
y_true = y_true[8:]
|
||||
cap = cap[8:]
|
||||
y = model.piecewise_logistic(t, cap, deltas, k, m, changepoint_ts)
|
||||
assert (y - y_true).sum() == pytest.approx(0.0, abs=1e-5)
|
||||
|
||||
def test_flat_trend(self, backend):
|
||||
model = Prophet(stan_backend=backend)
|
||||
t = np.arange(11)
|
||||
m = 0.5
|
||||
y = model.flat_trend(t, m)
|
||||
y_true = np.array([0.5] * 11)
|
||||
assert (y - y_true).sum() == 0.0
|
||||
t = t[8:]
|
||||
y_true = y_true[8:]
|
||||
y = model.flat_trend(t, m)
|
||||
assert (y - y_true).sum() == 0.0
|
||||
|
||||
def test_get_changepoints(self, daily_univariate_ts, backend):
|
||||
"""
|
||||
By default, Prophet uses the first 80% of the history to detect changepoints.
|
||||
"""
|
||||
train, _ = train_test_split(daily_univariate_ts, daily_univariate_ts.shape[0] // 2)
|
||||
m = Prophet(stan_backend=backend)
|
||||
history = m.setup_dataframe(train, initialize_scales=True)
|
||||
m.history = history
|
||||
m.set_changepoints()
|
||||
cp = m.changepoints_t
|
||||
assert cp.shape[0] == m.n_changepoints
|
||||
assert len(cp.shape) == 1
|
||||
assert cp.min() > 0
|
||||
cp_indx = int(np.ceil(0.8 * history.shape[0]))
|
||||
assert cp.max() <= history["t"].values[cp_indx]
|
||||
|
||||
def test_set_changepoint_range(self, daily_univariate_ts, backend):
|
||||
train, _ = train_test_split(daily_univariate_ts, daily_univariate_ts.shape[0] // 2)
|
||||
m = Prophet(changepoint_range=0.4, stan_backend=backend)
|
||||
history = m.setup_dataframe(train, initialize_scales=True)
|
||||
m.history = history
|
||||
m.set_changepoints()
|
||||
cp = m.changepoints_t
|
||||
assert cp.shape[0] == m.n_changepoints
|
||||
assert len(cp.shape) == 1
|
||||
assert cp.min() > 0
|
||||
cp_indx = int(np.ceil(0.4 * history.shape[0]))
|
||||
cp.max() <= history["t"].values[cp_indx]
|
||||
for out_of_range in [-0.1, 2]:
|
||||
with pytest.raises(ValueError):
|
||||
m = Prophet(changepoint_range=out_of_range, stan_backend=backend)
|
||||
|
||||
def test_get_zero_changepoints(self, daily_univariate_ts, backend):
|
||||
train, _ = train_test_split(daily_univariate_ts, daily_univariate_ts.shape[0] // 2)
|
||||
m = Prophet(n_changepoints=0, stan_backend=backend)
|
||||
history = m.setup_dataframe(train, initialize_scales=True)
|
||||
m.history = history
|
||||
m.set_changepoints()
|
||||
cp = m.changepoints_t
|
||||
assert cp.shape[0] == 1
|
||||
assert cp[0] == 0
|
||||
|
||||
def test_override_n_changepoints(self, daily_univariate_ts, backend):
|
||||
train = daily_univariate_ts.head(20).copy()
|
||||
m = Prophet(n_changepoints=15, stan_backend=backend)
|
||||
history = m.setup_dataframe(train, initialize_scales=True)
|
||||
m.history = history
|
||||
m.set_changepoints()
|
||||
assert m.n_changepoints == 15
|
||||
cp = m.changepoints_t
|
||||
assert cp.shape[0] == 15
|
||||
|
||||
|
||||
class TestProphetSeasonalComponent:
|
||||
def test_fourier_series_weekly(self, daily_univariate_ts):
|
||||
mat = Prophet.fourier_series(daily_univariate_ts["ds"], 7, 3)
|
||||
# These are from the R forecast package directly.
|
||||
true_values = np.array([0.7818315, 0.6234898, 0.9749279, -0.2225209, 0.4338837, -0.9009689])
|
||||
assert np.sum((mat[0] - true_values) ** 2) == pytest.approx(0.0)
|
||||
|
||||
def test_fourier_series_yearly(self, daily_univariate_ts):
|
||||
mat = Prophet.fourier_series(daily_univariate_ts["ds"], 365.25, 3)
|
||||
# These are from the R forecast package directly.
|
||||
true_values = np.array(
|
||||
[0.7006152, -0.7135393, -0.9998330, 0.01827656, 0.7262249, 0.6874572]
|
||||
)
|
||||
assert np.sum((mat[0] - true_values) ** 2) == pytest.approx(0.0)
|
||||
|
||||
def test_auto_weekly_seasonality(self, daily_univariate_ts, backend):
|
||||
# Should be enabled
|
||||
train = daily_univariate_ts.head(15)
|
||||
m = Prophet(stan_backend=backend)
|
||||
assert m.weekly_seasonality == "auto"
|
||||
m.fit(train)
|
||||
assert "weekly" in m.seasonalities
|
||||
assert m.seasonalities["weekly"] == {
|
||||
"period": 7,
|
||||
"fourier_order": 3,
|
||||
"prior_scale": 10.0,
|
||||
"mode": "additive",
|
||||
"condition_name": None,
|
||||
}
|
||||
# Should be disabled due to too short history
|
||||
train = daily_univariate_ts.head(9)
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(train)
|
||||
assert "weekly" not in m.seasonalities
|
||||
m = Prophet(weekly_seasonality=True, stan_backend=backend)
|
||||
m.fit(train)
|
||||
assert "weekly" in m.seasonalities
|
||||
# Should be False due to weekly spacing
|
||||
train = daily_univariate_ts.iloc[::7, :]
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(train)
|
||||
assert "weekly" not in m.seasonalities
|
||||
m = Prophet(weekly_seasonality=2, seasonality_prior_scale=3.0, stan_backend=backend)
|
||||
m.fit(daily_univariate_ts)
|
||||
assert m.seasonalities["weekly"] == {
|
||||
"period": 7,
|
||||
"fourier_order": 2,
|
||||
"prior_scale": 3.0,
|
||||
"mode": "additive",
|
||||
"condition_name": None,
|
||||
}
|
||||
|
||||
def test_auto_yearly_seasonality(self, daily_univariate_ts, backend):
|
||||
# Should be enabled
|
||||
m = Prophet(stan_backend=backend)
|
||||
assert m.yearly_seasonality == "auto"
|
||||
m.fit(daily_univariate_ts)
|
||||
assert "yearly" in m.seasonalities
|
||||
assert m.seasonalities["yearly"] == {
|
||||
"period": 365.25,
|
||||
"fourier_order": 10,
|
||||
"prior_scale": 10.0,
|
||||
"mode": "additive",
|
||||
"condition_name": None,
|
||||
}
|
||||
# Should be disabled due to too short history
|
||||
train = daily_univariate_ts.head(240)
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(train)
|
||||
assert "yearly" not in m.seasonalities
|
||||
m = Prophet(yearly_seasonality=True, stan_backend=backend)
|
||||
m.fit(train)
|
||||
assert "yearly" in m.seasonalities
|
||||
m = Prophet(yearly_seasonality=7, seasonality_prior_scale=3.0, stan_backend=backend)
|
||||
m.fit(daily_univariate_ts)
|
||||
assert m.seasonalities["yearly"] == {
|
||||
"period": 365.25,
|
||||
"fourier_order": 7,
|
||||
"prior_scale": 3.0,
|
||||
"mode": "additive",
|
||||
"condition_name": None,
|
||||
}
|
||||
|
||||
def test_auto_daily_seasonality(self, daily_univariate_ts, subdaily_univariate_ts, backend):
|
||||
# Should be enabled
|
||||
m = Prophet(stan_backend=backend)
|
||||
assert m.daily_seasonality == "auto"
|
||||
m.fit(subdaily_univariate_ts)
|
||||
assert "daily" in m.seasonalities
|
||||
assert m.seasonalities["daily"] == {
|
||||
"period": 1,
|
||||
"fourier_order": 4,
|
||||
"prior_scale": 10.0,
|
||||
"mode": "additive",
|
||||
"condition_name": None,
|
||||
}
|
||||
# Should be disabled due to too short history
|
||||
train = subdaily_univariate_ts.head(430)
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(train)
|
||||
assert "daily" not in m.seasonalities
|
||||
m = Prophet(daily_seasonality=True, stan_backend=backend)
|
||||
m.fit(train)
|
||||
assert "daily" in m.seasonalities
|
||||
m = Prophet(daily_seasonality=7, seasonality_prior_scale=3.0, stan_backend=backend)
|
||||
m.fit(subdaily_univariate_ts)
|
||||
assert m.seasonalities["daily"] == {
|
||||
"period": 1,
|
||||
"fourier_order": 7,
|
||||
"prior_scale": 3.0,
|
||||
"mode": "additive",
|
||||
"condition_name": None,
|
||||
}
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.fit(daily_univariate_ts)
|
||||
assert "daily" not in m.seasonalities
|
||||
|
||||
def test_set_seasonality_mode(self, backend):
|
||||
# Setting attribute
|
||||
m = Prophet(stan_backend=backend)
|
||||
assert m.seasonality_mode == "additive"
|
||||
m = Prophet(seasonality_mode="multiplicative", stan_backend=backend)
|
||||
assert m.seasonality_mode == "multiplicative"
|
||||
with pytest.raises(ValueError):
|
||||
Prophet(seasonality_mode="batman", stan_backend=backend)
|
||||
|
||||
def test_set_holidays_mode(self, backend):
|
||||
# Setting attribute
|
||||
m = Prophet(stan_backend=backend)
|
||||
assert m.holidays_mode == "additive"
|
||||
m = Prophet(seasonality_mode="multiplicative", stan_backend=backend)
|
||||
assert m.holidays_mode == "multiplicative"
|
||||
m = Prophet(holidays_mode="multiplicative", stan_backend=backend)
|
||||
assert m.holidays_mode == "multiplicative"
|
||||
with pytest.raises(ValueError):
|
||||
Prophet(holidays_mode="batman", stan_backend=backend)
|
||||
|
||||
def test_seasonality_modes(self, daily_univariate_ts, backend):
|
||||
# Model with holidays, seasonalities, and extra regressors
|
||||
holidays = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2016-12-25"]),
|
||||
"holiday": ["xmas"],
|
||||
"lower_window": [-1],
|
||||
"upper_window": [0],
|
||||
}
|
||||
)
|
||||
m = Prophet(seasonality_mode="multiplicative", holidays=holidays, stan_backend=backend)
|
||||
m.add_seasonality("monthly", period=30, mode="additive", fourier_order=3)
|
||||
m.add_regressor("binary_feature", mode="additive")
|
||||
m.add_regressor("numeric_feature")
|
||||
# Construct seasonal features
|
||||
df = daily_univariate_ts.copy()
|
||||
df["binary_feature"] = [0] * 255 + [1] * 255
|
||||
df["numeric_feature"] = range(510)
|
||||
df = m.setup_dataframe(df, initialize_scales=True)
|
||||
m.history = df.copy()
|
||||
m.set_auto_seasonalities()
|
||||
seasonal_features, prior_scales, component_cols, modes = m.make_all_seasonality_features(df)
|
||||
assert sum(component_cols["additive_terms"]) == 7
|
||||
assert sum(component_cols["multiplicative_terms"]) == 29
|
||||
assert set(modes["additive"]) == {
|
||||
"monthly",
|
||||
"binary_feature",
|
||||
"additive_terms",
|
||||
"extra_regressors_additive",
|
||||
}
|
||||
assert set(modes["multiplicative"]) == {
|
||||
"weekly",
|
||||
"yearly",
|
||||
"xmas",
|
||||
"numeric_feature",
|
||||
"multiplicative_terms",
|
||||
"extra_regressors_multiplicative",
|
||||
"holidays",
|
||||
}
|
||||
|
||||
|
||||
class TestProphetCustomSeasonalComponent:
|
||||
def test_custom_monthly_seasonality(self, backend):
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.add_seasonality(name="monthly", period=30, fourier_order=5, prior_scale=2.0)
|
||||
assert m.seasonalities["monthly"] == {
|
||||
"period": 30,
|
||||
"fourier_order": 5,
|
||||
"prior_scale": 2.0,
|
||||
"mode": "additive",
|
||||
"condition_name": None,
|
||||
}
|
||||
|
||||
def test_duplicate_component_names(self, backend):
|
||||
holidays = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2017-01-02"]),
|
||||
"holiday": ["special_day"],
|
||||
"prior_scale": [4.0],
|
||||
}
|
||||
)
|
||||
m = Prophet(holidays=holidays, stan_backend=backend)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
m.add_seasonality(name="special_day", period=30, fourier_order=5)
|
||||
with pytest.raises(ValueError):
|
||||
m.add_seasonality(name="trend", period=30, fourier_order=5)
|
||||
m.add_seasonality(name="weekly", period=30, fourier_order=5)
|
||||
|
||||
def test_custom_fourier_order(self, backend):
|
||||
"""Fourier order cannot be <= 0"""
|
||||
m = Prophet(stan_backend=backend)
|
||||
with pytest.raises(ValueError):
|
||||
m.add_seasonality(name="weekly", period=7, fourier_order=0)
|
||||
with pytest.raises(ValueError):
|
||||
m.add_seasonality(name="weekly", period=7, fourier_order=-1)
|
||||
|
||||
def test_custom_priors(self, daily_univariate_ts, backend):
|
||||
holidays = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2017-01-02"]),
|
||||
"holiday": ["special_day"],
|
||||
"prior_scale": [4.0],
|
||||
}
|
||||
)
|
||||
m = Prophet(
|
||||
holidays=holidays,
|
||||
yearly_seasonality=False,
|
||||
seasonality_mode="multiplicative",
|
||||
stan_backend=backend,
|
||||
)
|
||||
m.add_seasonality(
|
||||
name="monthly", period=30, fourier_order=5, prior_scale=2.0, mode="additive"
|
||||
)
|
||||
m.fit(daily_univariate_ts)
|
||||
assert m.seasonalities["monthly"]["mode"] == "additive"
|
||||
assert m.seasonalities["weekly"]["mode"] == "multiplicative"
|
||||
seasonal_features, prior_scales, component_cols, modes = m.make_all_seasonality_features(
|
||||
m.history
|
||||
)
|
||||
assert sum(component_cols["monthly"]) == 10
|
||||
assert sum(component_cols["special_day"]) == 1
|
||||
assert sum(component_cols["weekly"]) == 6
|
||||
assert sum(component_cols["additive_terms"]) == 10
|
||||
assert sum(component_cols["multiplicative_terms"]) == 7
|
||||
|
||||
if seasonal_features.columns[0] == "monthly_delim_1":
|
||||
true = [2.0] * 10 + [10.0] * 6 + [4.0]
|
||||
assert sum(component_cols["monthly"][:10]) == 10
|
||||
assert sum(component_cols["weekly"][10:16]) == 6
|
||||
else:
|
||||
true = [10.0] * 6 + [2.0] * 10 + [4.0]
|
||||
assert sum(component_cols["weekly"][:6]) == 6
|
||||
assert sum(component_cols["monthly"][6:16]) == 10
|
||||
assert prior_scales == true
|
||||
|
||||
def test_conditional_custom_seasonality(self, daily_univariate_ts, backend):
|
||||
m = Prophet(weekly_seasonality=False, yearly_seasonality=False, stan_backend=backend)
|
||||
m.add_seasonality(
|
||||
name="conditional_weekly",
|
||||
period=7,
|
||||
fourier_order=3,
|
||||
prior_scale=2.0,
|
||||
condition_name="is_conditional_week",
|
||||
)
|
||||
m.add_seasonality(name="normal_monthly", period=30.5, fourier_order=5, prior_scale=2.0)
|
||||
df = daily_univariate_ts.copy()
|
||||
with pytest.raises(ValueError):
|
||||
# Require all conditions names in df
|
||||
m.fit(df)
|
||||
df["is_conditional_week"] = [0] * 255 + [2] * 255
|
||||
with pytest.raises(ValueError):
|
||||
# Require boolean compatible values
|
||||
m.fit(df)
|
||||
df["is_conditional_week"] = [0] * 255 + [1] * 255
|
||||
m.fit(df)
|
||||
assert m.seasonalities["conditional_weekly"] == {
|
||||
"period": 7,
|
||||
"fourier_order": 3,
|
||||
"prior_scale": 2.0,
|
||||
"mode": "additive",
|
||||
"condition_name": "is_conditional_week",
|
||||
}
|
||||
assert m.seasonalities["normal_monthly"]["condition_name"] is None
|
||||
seasonal_features, prior_scales, component_cols, modes = m.make_all_seasonality_features(
|
||||
m.history
|
||||
)
|
||||
# Confirm that only values without is_conditional_week has non zero entries
|
||||
condition_cols = [
|
||||
c for c in seasonal_features.columns if c.startswith("conditional_weekly")
|
||||
]
|
||||
assert np.array_equal(
|
||||
(seasonal_features[condition_cols] != 0).any(axis=1).values,
|
||||
df["is_conditional_week"].values,
|
||||
)
|
||||
|
||||
|
||||
class TestProphetHolidays:
|
||||
def test_holidays_lower_window(self, backend):
|
||||
holidays = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2016-12-25"]),
|
||||
"holiday": ["xmas"],
|
||||
"lower_window": [-1],
|
||||
"upper_window": [0],
|
||||
}
|
||||
)
|
||||
model = Prophet(holidays=holidays, stan_backend=backend)
|
||||
df = pd.DataFrame({"ds": pd.date_range("2016-12-20", "2016-12-31")})
|
||||
feats, priors, names = model.make_holiday_features(df["ds"], model.holidays)
|
||||
assert feats.shape == (df.shape[0], 2)
|
||||
assert (feats.sum(axis=0) - np.array([1.0, 1.0])).sum() == 0.0
|
||||
assert priors == [10.0, 10.0] # Default prior
|
||||
assert names == ["xmas"]
|
||||
|
||||
def test_holidays_upper_window(self, backend):
|
||||
holidays = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2016-12-25"]),
|
||||
"holiday": ["xmas"],
|
||||
"lower_window": [-1],
|
||||
"upper_window": [10],
|
||||
}
|
||||
)
|
||||
m = Prophet(holidays=holidays, stan_backend=backend)
|
||||
df = pd.DataFrame({"ds": pd.date_range("2016-12-20", "2016-12-31")})
|
||||
feats, priors, names = m.make_holiday_features(df["ds"], m.holidays)
|
||||
# 12 columns generated even though only 8 overlap
|
||||
assert feats.shape == (df.shape[0], 12)
|
||||
assert priors == [10.0 for _ in range(12)]
|
||||
assert names == ["xmas"]
|
||||
|
||||
def test_holidays_priors(self, backend):
|
||||
# Check prior specifications
|
||||
holidays = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2016-12-25", "2017-12-25"]),
|
||||
"holiday": ["xmas", "xmas"],
|
||||
"lower_window": [-1, -1],
|
||||
"upper_window": [0, 0],
|
||||
"prior_scale": [5.0, 5.0],
|
||||
}
|
||||
)
|
||||
m = Prophet(holidays=holidays, stan_backend=backend)
|
||||
df = pd.DataFrame({"ds": pd.date_range("2016-12-20", "2016-12-31")})
|
||||
feats, priors, names = m.make_holiday_features(df["ds"], m.holidays)
|
||||
assert priors == [5.0, 5.0]
|
||||
assert names == ["xmas"]
|
||||
# 2 different priors
|
||||
holidays2 = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2012-06-06", "2013-06-06"]),
|
||||
"holiday": ["seans-bday"] * 2,
|
||||
"lower_window": [0] * 2,
|
||||
"upper_window": [1] * 2,
|
||||
"prior_scale": [8] * 2,
|
||||
}
|
||||
)
|
||||
holidays2 = pd.concat((holidays, holidays2), sort=True)
|
||||
m = Prophet(holidays=holidays2, stan_backend=backend)
|
||||
feats, priors, names = m.make_holiday_features(df["ds"], m.holidays)
|
||||
pn = zip(priors, [s.split("_delim_")[0] for s in feats.columns])
|
||||
for t in pn:
|
||||
assert t in [(8.0, "seans-bday"), (5.0, "xmas")]
|
||||
holidays2 = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2012-06-06", "2013-06-06"]),
|
||||
"holiday": ["seans-bday"] * 2,
|
||||
"lower_window": [0] * 2,
|
||||
"upper_window": [1] * 2,
|
||||
}
|
||||
)
|
||||
holidays2 = pd.concat((holidays, holidays2), sort=True)
|
||||
feats, priors, names = Prophet(
|
||||
holidays=holidays2, holidays_prior_scale=4, stan_backend=backend
|
||||
).make_holiday_features(df["ds"], holidays2)
|
||||
assert set(priors) == {4.0, 5.0}
|
||||
|
||||
def test_holidays_bad_priors(self, backend):
|
||||
holidays = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2016-12-25", "2016-12-27"]),
|
||||
"holiday": ["xmasish", "xmasish"],
|
||||
"lower_window": [-1, -1],
|
||||
"upper_window": [0, 0],
|
||||
"prior_scale": [5.0, 6.0],
|
||||
}
|
||||
)
|
||||
df = pd.DataFrame({"ds": pd.date_range("2016-12-20", "2016-12-31")})
|
||||
with pytest.raises(ValueError):
|
||||
Prophet(holidays=holidays, stan_backend=backend).make_holiday_features(
|
||||
df["ds"], holidays
|
||||
)
|
||||
|
||||
def test_fit_with_holidays(self, daily_univariate_ts, backend):
|
||||
holidays = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2012-06-06", "2013-06-06"]),
|
||||
"holiday": ["seans-bday"] * 2,
|
||||
"lower_window": [0] * 2,
|
||||
"upper_window": [1] * 2,
|
||||
}
|
||||
)
|
||||
model = Prophet(holidays=holidays, uncertainty_samples=0, stan_backend=backend)
|
||||
model.fit(daily_univariate_ts).predict()
|
||||
|
||||
def test_fit_predict_with_country_holidays(self, daily_univariate_ts, backend):
|
||||
holidays = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2012-06-06", "2013-06-06"]),
|
||||
"holiday": ["seans-bday"] * 2,
|
||||
"lower_window": [0] * 2,
|
||||
"upper_window": [1] * 2,
|
||||
}
|
||||
)
|
||||
# Test with holidays and country_holidays
|
||||
model = Prophet(holidays=holidays, uncertainty_samples=0, stan_backend=backend)
|
||||
model.add_country_holidays(country_name="US")
|
||||
model.fit(daily_univariate_ts).predict()
|
||||
# There are training holidays missing in the test set
|
||||
train = daily_univariate_ts.head(154)
|
||||
future = daily_univariate_ts.tail(355)
|
||||
model = Prophet(uncertainty_samples=0, stan_backend=backend)
|
||||
model.add_country_holidays(country_name="US")
|
||||
model.fit(train).predict(future)
|
||||
# There are test holidays missing in the training set
|
||||
train = daily_univariate_ts.tail(355)
|
||||
model = Prophet(uncertainty_samples=0, stan_backend=backend)
|
||||
model.add_country_holidays(country_name="US")
|
||||
model.fit(train)
|
||||
future = model.make_future_dataframe(periods=60, include_history=False)
|
||||
model.predict(future)
|
||||
|
||||
def test_subdaily_holidays(self, subdaily_univariate_ts, backend):
|
||||
holidays = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2017-01-02"]),
|
||||
"holiday": ["special_day"],
|
||||
}
|
||||
)
|
||||
m = Prophet(holidays=holidays, stan_backend=backend)
|
||||
m.fit(subdaily_univariate_ts)
|
||||
fcst = m.predict()
|
||||
assert sum(fcst["special_day"] == 0) == 575
|
||||
|
||||
|
||||
|
||||
class TestProphetRegressors:
|
||||
def test_added_regressors(self, daily_univariate_ts, backend):
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.add_regressor("binary_feature", prior_scale=0.2)
|
||||
m.add_regressor("numeric_feature", prior_scale=0.5)
|
||||
m.add_regressor("numeric_feature2", prior_scale=0.5, mode="multiplicative")
|
||||
m.add_regressor("binary_feature2", standardize=True)
|
||||
df = daily_univariate_ts.copy()
|
||||
df["binary_feature"] = ["0"] * 255 + ["1"] * 255
|
||||
df["numeric_feature"] = range(510)
|
||||
df["numeric_feature2"] = range(510)
|
||||
with pytest.raises(ValueError):
|
||||
# Require all regressors in df
|
||||
m.fit(df)
|
||||
df["binary_feature2"] = [1] * 100 + [0] * 410
|
||||
m.fit(df)
|
||||
# Check that standardizations are correctly set
|
||||
assert m.extra_regressors["binary_feature"] == {
|
||||
"prior_scale": 0.2,
|
||||
"mu": 0,
|
||||
"std": 1,
|
||||
"standardize": "auto",
|
||||
"mode": "additive",
|
||||
}
|
||||
assert m.extra_regressors["numeric_feature"]["prior_scale"] == 0.5
|
||||
assert m.extra_regressors["numeric_feature"]["mu"] == 254.5
|
||||
assert m.extra_regressors["numeric_feature"]["std"] == pytest.approx(147.368585, abs=1e-5)
|
||||
assert m.extra_regressors["numeric_feature2"]["mode"] == "multiplicative"
|
||||
assert m.extra_regressors["binary_feature2"]["prior_scale"] == 10.0
|
||||
assert m.extra_regressors["binary_feature2"]["mu"] == pytest.approx(0.1960784, abs=1e-5)
|
||||
assert m.extra_regressors["binary_feature2"]["std"] == pytest.approx(0.3974183, abs=1e-5)
|
||||
# Check that standardization is done correctly
|
||||
df2 = m.setup_dataframe(df.copy())
|
||||
assert df2["binary_feature"][0] == 0
|
||||
assert df2["numeric_feature"][0] == pytest.approx(-1.726962, abs=1e-4)
|
||||
assert df2["binary_feature2"][0] == pytest.approx(2.022859, abs=1e-4)
|
||||
# Check that feature matrix and prior scales are correctly constructed
|
||||
seasonal_features, prior_scales, component_cols, modes = m.make_all_seasonality_features(
|
||||
df2
|
||||
)
|
||||
assert seasonal_features.shape[1] == 30
|
||||
names = ["binary_feature", "numeric_feature", "binary_feature2"]
|
||||
true_priors = [0.2, 0.5, 10.0]
|
||||
for i, name in enumerate(names):
|
||||
assert name in seasonal_features
|
||||
assert sum(component_cols[name]) == 1
|
||||
assert sum(np.array(prior_scales) * component_cols[name]) == true_priors[i]
|
||||
# Check that forecast components are reasonable
|
||||
future = pd.DataFrame(
|
||||
{
|
||||
"ds": ["2014-06-01"],
|
||||
"binary_feature": [0],
|
||||
"numeric_feature": [10],
|
||||
"numeric_feature2": [10],
|
||||
}
|
||||
)
|
||||
# future dataframe also requires regressor values
|
||||
with pytest.raises(ValueError):
|
||||
m.predict(future)
|
||||
future["binary_feature2"] = 0
|
||||
fcst = m.predict(future)
|
||||
assert fcst.shape[1] == 37
|
||||
assert fcst["binary_feature"][0] == 0
|
||||
assert fcst["extra_regressors_additive"][0] == pytest.approx(
|
||||
fcst["numeric_feature"][0] + fcst["binary_feature2"][0]
|
||||
)
|
||||
assert fcst["extra_regressors_multiplicative"][0] == pytest.approx(
|
||||
fcst["numeric_feature2"][0]
|
||||
)
|
||||
assert fcst["additive_terms"][0] == pytest.approx(
|
||||
fcst["yearly"][0] + fcst["weekly"][0] + fcst["extra_regressors_additive"][0]
|
||||
)
|
||||
assert fcst["multiplicative_terms"][0] == pytest.approx(
|
||||
fcst["extra_regressors_multiplicative"][0]
|
||||
)
|
||||
assert fcst["yhat"][0] == pytest.approx(
|
||||
fcst["trend"][0] * (1 + fcst["multiplicative_terms"][0]) + fcst["additive_terms"][0]
|
||||
)
|
||||
|
||||
def test_constant_regressor(self, daily_univariate_ts, backend):
|
||||
df = daily_univariate_ts.copy()
|
||||
df["constant_feature"] = 0
|
||||
m = Prophet(stan_backend=backend)
|
||||
m.add_regressor("constant_feature")
|
||||
m.fit(df)
|
||||
assert m.extra_regressors["constant_feature"]["std"] == 1
|
||||
|
||||
|
||||
class TestProphetWarmStart:
|
||||
def test_fit_warm_start(self, daily_univariate_ts, backend):
|
||||
m = Prophet(stan_backend=backend).fit(daily_univariate_ts.iloc[:500])
|
||||
m2 = Prophet(stan_backend=backend).fit(
|
||||
daily_univariate_ts.iloc[:510], init=warm_start_params(m)
|
||||
)
|
||||
assert len(m2.params["delta"][0]) == 25
|
||||
|
||||
def test_sampling_warm_start(self, daily_univariate_ts, backend):
|
||||
m = Prophet(mcmc_samples=100, stan_backend=backend).fit(
|
||||
daily_univariate_ts.iloc[:500], show_progress=False
|
||||
)
|
||||
m2 = Prophet(mcmc_samples=100, stan_backend=backend).fit(
|
||||
daily_univariate_ts.iloc[:510], init=warm_start_params(m), show_progress=False
|
||||
)
|
||||
assert m2.params["delta"].shape == (200, 25)
|
||||
@ -0,0 +1,142 @@
|
||||
# Copyright (c) Facebook, Inc. and its affiliates.
|
||||
|
||||
# This source code is licensed under the MIT license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from prophet import Prophet
|
||||
from prophet.serialize import PD_DATAFRAME, PD_SERIES, model_from_json, model_to_json
|
||||
|
||||
|
||||
class TestSerialize:
|
||||
def test_simple_serialize(self, daily_univariate_ts, backend):
|
||||
m = Prophet(stan_backend=backend)
|
||||
df = daily_univariate_ts.head(daily_univariate_ts.shape[0] - 30)
|
||||
m.fit(df)
|
||||
|
||||
future = m.make_future_dataframe(2, include_history=False)
|
||||
fcst = m.predict(future)
|
||||
|
||||
model_str = model_to_json(m)
|
||||
# Make sure json doesn't get too large in the future
|
||||
assert len(model_str) < 200000
|
||||
m2 = model_from_json(model_str)
|
||||
|
||||
# Check that m and m2 are equal
|
||||
assert m.__dict__.keys() == m2.__dict__.keys()
|
||||
for k, v in m.__dict__.items():
|
||||
if k in ["stan_fit", "stan_backend"]:
|
||||
continue
|
||||
if k == "params":
|
||||
assert v.keys() == m2.params.keys()
|
||||
for kk, vv in v.items():
|
||||
assert np.array_equal(vv, m2.params[kk])
|
||||
elif k in PD_SERIES and v is not None:
|
||||
assert v.equals(m2.__dict__[k])
|
||||
elif k in PD_DATAFRAME and v is not None:
|
||||
pd.testing.assert_frame_equal(v, m2.__dict__[k], check_index_type=False)
|
||||
elif k == "changepoints_t":
|
||||
assert np.array_equal(v, m.__dict__[k])
|
||||
else:
|
||||
assert v == m2.__dict__[k]
|
||||
assert m2.stan_fit is None
|
||||
assert m2.stan_backend is None
|
||||
|
||||
# Check that m2 makes the same forecast
|
||||
future2 = m2.make_future_dataframe(2, include_history=False)
|
||||
fcst2 = m2.predict(future2)
|
||||
|
||||
assert np.array_equal(fcst["yhat"].values, fcst2["yhat"].values)
|
||||
|
||||
def test_full_serialize(self, daily_univariate_ts, backend):
|
||||
# Construct a model with all attributes
|
||||
holidays = pd.DataFrame(
|
||||
{
|
||||
"ds": pd.to_datetime(["2012-06-06", "2013-06-06"]),
|
||||
"holiday": ["seans-bday"] * 2,
|
||||
"lower_window": [0] * 2,
|
||||
"upper_window": [1] * 2,
|
||||
}
|
||||
)
|
||||
# Test with holidays and country_holidays
|
||||
m = Prophet(
|
||||
holidays=holidays,
|
||||
seasonality_mode="multiplicative",
|
||||
changepoints=["2012-07-01", "2012-10-01", "2013-01-01"],
|
||||
stan_backend=backend,
|
||||
)
|
||||
m.add_country_holidays(country_name="US")
|
||||
m.add_seasonality(
|
||||
name="conditional_weekly",
|
||||
period=7,
|
||||
fourier_order=3,
|
||||
prior_scale=2.0,
|
||||
condition_name="is_conditional_week",
|
||||
)
|
||||
m.add_seasonality(name="normal_monthly", period=30.5, fourier_order=5, prior_scale=2.0)
|
||||
df = daily_univariate_ts.copy()
|
||||
df["is_conditional_week"] = [0] * 255 + [1] * 255
|
||||
m.add_regressor("binary_feature", prior_scale=0.2)
|
||||
m.add_regressor("numeric_feature", prior_scale=0.5)
|
||||
m.add_regressor("numeric_feature2", prior_scale=0.5, mode="multiplicative")
|
||||
m.add_regressor("binary_feature2", standardize=True)
|
||||
df["binary_feature"] = ["0"] * 255 + ["1"] * 255
|
||||
df["numeric_feature"] = range(510)
|
||||
df["numeric_feature2"] = range(510)
|
||||
df["binary_feature2"] = [1] * 100 + [0] * 410
|
||||
|
||||
train = df.head(400)
|
||||
test = df.tail(100)
|
||||
|
||||
m.fit(train)
|
||||
fcst = m.predict(test)
|
||||
# Serialize!
|
||||
m2 = model_from_json(model_to_json(m))
|
||||
|
||||
# Check that m and m2 are equal
|
||||
assert m.__dict__.keys() == m2.__dict__.keys()
|
||||
for k, v in m.__dict__.items():
|
||||
if k in ["stan_fit", "stan_backend"]:
|
||||
continue
|
||||
if k == "params":
|
||||
assert v.keys() == m2.params.keys()
|
||||
for kk, vv in v.items():
|
||||
assert np.array_equal(vv, m2.params[kk])
|
||||
elif k in PD_SERIES and v is not None:
|
||||
assert v.equals(m2.__dict__[k])
|
||||
elif k in PD_DATAFRAME and v is not None:
|
||||
pd.testing.assert_frame_equal(v, m2.__dict__[k], check_index_type=False)
|
||||
elif k == "changepoints_t":
|
||||
assert np.array_equal(v, m.__dict__[k])
|
||||
else:
|
||||
assert v == m2.__dict__[k]
|
||||
assert m2.stan_fit is None
|
||||
assert m2.stan_backend is None
|
||||
|
||||
# Check that m2 makes the same forecast
|
||||
fcst2 = m2.predict(test)
|
||||
assert np.array_equal(fcst["yhat"].values, fcst2["yhat"].values)
|
||||
|
||||
def test_backwards_compatibility(self):
|
||||
old_versions = {
|
||||
"0.6.1.dev0": (29.3669923968994, "fb"),
|
||||
"0.7.1": (29.282810844704414, "fb"),
|
||||
"1.0.1": (29.282810844704414, ""),
|
||||
}
|
||||
for v, (pred_val, v_str) in old_versions.items():
|
||||
fname = Path(__file__).parent / f"serialized_model_v{v}.json"
|
||||
with open(fname, "r") as fin:
|
||||
model_str = json.load(fin)
|
||||
# Check that deserializes
|
||||
m = model_from_json(model_str)
|
||||
assert json.loads(model_str)[f"__{v_str}prophet_version"] == v
|
||||
# Predict
|
||||
future = m.make_future_dataframe(10)
|
||||
fcst = m.predict(future)
|
||||
assert fcst["yhat"].values[-1] == pytest.approx(pred_val)
|
||||
@ -0,0 +1,27 @@
|
||||
# Copyright (c) Facebook, Inc. and its affiliates.
|
||||
|
||||
# This source code is licensed under the MIT license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
|
||||
import numpy as np
|
||||
|
||||
from prophet import Prophet
|
||||
from prophet.utilities import regressor_coefficients
|
||||
|
||||
|
||||
class TestUtilities:
|
||||
def test_regressor_coefficients(self, daily_univariate_ts, backend):
|
||||
m = Prophet(stan_backend=backend)
|
||||
df = daily_univariate_ts.copy()
|
||||
np.random.seed(123)
|
||||
df["regr1"] = np.random.normal(size=df.shape[0])
|
||||
df["regr2"] = np.random.normal(size=df.shape[0])
|
||||
m.add_regressor("regr1", mode="additive")
|
||||
m.add_regressor("regr2", mode="multiplicative")
|
||||
m.fit(df)
|
||||
|
||||
coefs = regressor_coefficients(m)
|
||||
assert coefs.shape == (2, 6)
|
||||
# No MCMC sampling, so lower and upper should be the same as mean
|
||||
assert np.array_equal(coefs["coef_lower"].values, coefs["coef"].values)
|
||||
assert np.array_equal(coefs["coef_upper"].values, coefs["coef"].values)
|
||||
Reference in New Issue
Block a user