optional behaviour control by user via config #22

Merged
foefl merged 5 commits from add_cfg into main 2025-04-16 11:43:25 +00:00
8 changed files with 101 additions and 54 deletions
Showing only changes of commit c2757cca26 - Show all commits

View File

@ -33,7 +33,6 @@ from delta_barth.constants import (
DEFAULT_DB_ERR_CODE, DEFAULT_DB_ERR_CODE,
DUMMY_DATA_PATH, DUMMY_DATA_PATH,
FEATURES_SALES_PROGNOSIS, FEATURES_SALES_PROGNOSIS,
SALES_BASE_NUM_DATAPOINTS_MONTHS,
SALES_MIN_NUM_DATAPOINTS, SALES_MIN_NUM_DATAPOINTS,
) )
from delta_barth.errors import STATUS_HANDLER, wrap_result from delta_barth.errors import STATUS_HANDLER, wrap_result
@ -433,7 +432,7 @@ def pipeline_sales_forecast(
pipe = _process_sales( pipe = _process_sales(
pipe, pipe,
min_num_data_points=SALES_MIN_NUM_DATAPOINTS, min_num_data_points=SALES_MIN_NUM_DATAPOINTS,
base_num_data_points_months=SALES_BASE_NUM_DATAPOINTS_MONTHS, base_num_data_points_months=SESSION.cfg.forecast.threshold_month_data_points,
) )
if pipe.statistics is not None: if pipe.statistics is not None:
res = _write_sales_forecast_stats_wrapped(pipe.statistics) res = _write_sales_forecast_stats_wrapped(pipe.statistics)

View File

@ -5,6 +5,7 @@ from typing import Final
from delta_barth.types import DualDict, HttpContentHeaders from delta_barth.types import DualDict, HttpContentHeaders
# ** config # ** config
CFG_FILENAME: Final[str] = "dopt-cfg.toml"
# ** lib path # ** lib path
lib_path = Path(__file__).parent lib_path = Path(__file__).parent
@ -63,4 +64,6 @@ FEATURES_SALES_PROGNOSIS: Final[frozenset[str]] = frozenset(
# ** Pipelines # ** Pipelines
# ** Forecast # ** Forecast
SALES_MIN_NUM_DATAPOINTS: Final[int] = 36 SALES_MIN_NUM_DATAPOINTS: Final[int] = 36
SALES_BASE_NUM_DATAPOINTS_MONTHS: Final[int] = 36 # !! now in config
# TODO remove later till proven stable
# SALES_BASE_NUM_DATAPOINTS_MONTHS: Final[int] = 36

View File

@ -1,5 +1,6 @@
from __future__ import annotations from __future__ import annotations
import shutil
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Final from typing import TYPE_CHECKING, Final
@ -14,12 +15,19 @@ from delta_barth.api.common import (
LoginResponse, LoginResponse,
validate_credentials, validate_credentials,
) )
from delta_barth.constants import API_CON_TIMEOUT, DB_ECHO from delta_barth.config import LazyCfgLoader
from delta_barth.constants import (
API_CON_TIMEOUT,
CFG_FILENAME,
DB_ECHO,
LIB_PATH,
)
from delta_barth.errors import STATUS_HANDLER from delta_barth.errors import STATUS_HANDLER
from delta_barth.logging import logger_session as logger from delta_barth.logging import logger_session as logger
from delta_barth.types import DelBarApiError, Status from delta_barth.types import DelBarApiError, Status
if TYPE_CHECKING: if TYPE_CHECKING:
from delta_barth.config import Config
from delta_barth.types import ApiCredentials, HttpContentHeaders from delta_barth.types import ApiCredentials, HttpContentHeaders
@ -41,6 +49,7 @@ class Session:
base_headers: HttpContentHeaders, base_headers: HttpContentHeaders,
db_folder: str = "data", db_folder: str = "data",
logging_folder: str = "logs", logging_folder: str = "logs",
cfg_folder: str = "config",
) -> None: ) -> None:
self._setup: bool = False self._setup: bool = False
self._data_path: Path | None = None self._data_path: Path | None = None
@ -49,6 +58,10 @@ class Session:
self._db_engine: sql.Engine | None = None self._db_engine: sql.Engine | None = None
self._logging_dir: Path | None = None self._logging_dir: Path | None = None
self._logging_folder = logging_folder self._logging_folder = logging_folder
self._cfg_path: Path | None = None
self._cfg_folder = cfg_folder
self._cfg_loader: LazyCfgLoader | None = None
self._cfg: Config | None = None
self._creds: ApiCredentials | None = None self._creds: ApiCredentials | None = None
self._base_url: str | None = None self._base_url: str | None = None
self._headers = base_headers self._headers = base_headers
@ -59,6 +72,7 @@ class Session:
# at this point: no logging configured # at this point: no logging configured
assert not self._setup, "tried to setup session twice" assert not self._setup, "tried to setup session twice"
self._setup_logging() self._setup_logging()
self._setup_config()
self._setup_db_management() self._setup_db_management()
self._setup = True self._setup = True
logger.info("[SESSION] Setup procedure successful") logger.info("[SESSION] Setup procedure successful")
@ -68,6 +82,32 @@ class Session:
assert self._data_path is not None, "accessed data path not set" assert self._data_path is not None, "accessed data path not set"
return self._data_path return self._data_path
@property
def cfg_path(self) -> Path:
if self._cfg_path is not None and self._setup:
return self._cfg_path
root = (self.data_path / self._cfg_folder).resolve()
cfg_path = root / CFG_FILENAME
if not root.exists():
root.mkdir(parents=False)
self._cfg_path = cfg_path
return self._cfg_path
@property
def cfg(self) -> Config:
assert self._cfg is not None, "tried to access not set config from session"
return self._cfg
def _setup_config(self) -> None:
if not self.cfg_path.exists():
src_cfg = LIB_PATH / CFG_FILENAME
shutil.copyfile(src_cfg, self.cfg_path)
self._cfg_loader = LazyCfgLoader(self.cfg_path)
self._cfg = self._cfg_loader.get()
logger.info("[SESSION] Successfully read and setup config")
@property @property
def db_engine(self) -> sql.Engine: def db_engine(self) -> sql.Engine:
assert self._db_engine is not None, "accessed database engine not set" assert self._db_engine is not None, "accessed database engine not set"
@ -78,10 +118,10 @@ class Session:
if self._db_path is not None and self._setup: if self._db_path is not None and self._setup:
return self._db_path return self._db_path
db_root = (self.data_path / self._db_folder).resolve() root = (self.data_path / self._db_folder).resolve()
db_path = db_root / "dopt-data.db" db_path = root / "dopt-data.db"
if not db_root.exists(): if not root.exists():
db_root.mkdir(parents=False) root.mkdir(parents=False)
self._db_path = db_path self._db_path = db_path
return self._db_path return self._db_path

View File

@ -1,6 +1,5 @@
import datetime import datetime
from datetime import datetime as Datetime from datetime import datetime as Datetime
from pathlib import Path
from unittest.mock import patch from unittest.mock import patch
import numpy as np import numpy as np
@ -431,27 +430,16 @@ def test_export_on_fail():
assert res.status.description == status.description assert res.status.description == status.description
@patch("delta_barth.analysis.forecast.SALES_BASE_NUM_DATAPOINTS_MONTHS", 1)
def test_pipeline_sales_forecast_SuccessDbWrite(exmpl_api_sales_prognosis_resp, session): def test_pipeline_sales_forecast_SuccessDbWrite(exmpl_api_sales_prognosis_resp, session):
with patch( with (
"delta_barth.analysis.forecast.get_sales_prognosis_data", patch(
) as mock: "delta_barth.analysis.forecast.get_sales_prognosis_data",
mock.return_value = exmpl_api_sales_prognosis_resp, STATUS_HANDLER.SUCCESS ) as get_mock,
with patch("delta_barth.analysis.forecast.SESSION", session): patch("delta_barth.analysis.forecast.SESSION", session) as sess_mock,
result = fc.pipeline_sales_forecast(None) # type: ignore ):
print(result) get_mock.return_value = exmpl_api_sales_prognosis_resp, STATUS_HANDLER.SUCCESS
assert result.status == STATUS_HANDLER.SUCCESS sess_mock.cfg.forecast.threshold_month_data_points = 1
assert len(result.response.daten) > 0
@patch("delta_barth.analysis.forecast.SALES_BASE_NUM_DATAPOINTS_MONTHS", 1)
def test_pipeline_sales_forecast_FailDbWrite(exmpl_api_sales_prognosis_resp):
with patch(
"delta_barth.analysis.forecast.get_sales_prognosis_data",
) as mock:
mock.return_value = exmpl_api_sales_prognosis_resp, STATUS_HANDLER.SUCCESS
result = fc.pipeline_sales_forecast(None) # type: ignore result = fc.pipeline_sales_forecast(None) # type: ignore
print(result)
assert result.status == STATUS_HANDLER.SUCCESS assert result.status == STATUS_HANDLER.SUCCESS
assert len(result.response.daten) > 0 assert len(result.response.daten) > 0

View File

@ -8,6 +8,7 @@ from unittest.mock import patch
import pandas as pd import pandas as pd
import pytest import pytest
import tomli_w
import delta_barth.session import delta_barth.session
from delta_barth.api.requests import SalesPrognosisResponse from delta_barth.api.requests import SalesPrognosisResponse
@ -42,6 +43,19 @@ def pth_dummy_cfg() -> Path:
return data_pth return data_pth
@pytest.fixture(scope="function")
def pth_cfg(pth_dummy_cfg, tmp_path) -> Path:
with open(pth_dummy_cfg, "rb") as file:
cfg_data = tomllib.load(file)
target = tmp_path / "dummy_cfg.toml"
target.touch()
with open(target, "wb") as file:
tomli_w.dump(cfg_data, file)
return target
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def sales_data_real() -> pd.DataFrame: def sales_data_real() -> pd.DataFrame:
pwd = Path.cwd() pwd = Path.cwd()

View File

@ -1,26 +1,10 @@
import tomllib import tomllib
from pathlib import Path
import pytest
import tomli_w import tomli_w
from delta_barth import config from delta_barth import config
@pytest.fixture(scope="function")
def pth_cfg(pth_dummy_cfg, tmp_path) -> Path:
with open(pth_dummy_cfg, "rb") as file:
cfg_data = tomllib.load(file)
target = tmp_path / "dummy_cfg.toml"
target.touch()
with open(target, "wb") as file:
tomli_w.dump(cfg_data, file)
return target
@pytest.mark.new
def test_CfgLoader_Init(pth_cfg): def test_CfgLoader_Init(pth_cfg):
loader = config.LazyCfgLoader(pth_cfg) loader = config.LazyCfgLoader(pth_cfg)
@ -28,7 +12,6 @@ def test_CfgLoader_Init(pth_cfg):
assert loader._cfg is None assert loader._cfg is None
@pytest.mark.new
def test_CfgLoader_Get(pth_cfg): def test_CfgLoader_Get(pth_cfg):
loader = config.LazyCfgLoader(pth_cfg) loader = config.LazyCfgLoader(pth_cfg)
@ -37,7 +20,6 @@ def test_CfgLoader_Get(pth_cfg):
assert parsed_cfg.forecast.threshold_month_data_points == 28 assert parsed_cfg.forecast.threshold_month_data_points == 28
@pytest.mark.new
def test_CfgLoader_Reload(pth_cfg): def test_CfgLoader_Reload(pth_cfg):
loader = config.LazyCfgLoader(pth_cfg) loader = config.LazyCfgLoader(pth_cfg)

View File

@ -45,14 +45,17 @@ def test_write_performance_metrics_FailStartingTime(session):
) )
@patch("delta_barth.analysis.forecast.SALES_BASE_NUM_DATAPOINTS_MONTHS", 1) def test_sales_prognosis_pipeline(exmpl_api_sales_prognosis_resp, session, monkeypatch):
def test_sales_prognosis_pipeline(exmpl_api_sales_prognosis_resp, session): with (
with patch( patch(
"delta_barth.analysis.forecast.get_sales_prognosis_data", "delta_barth.analysis.forecast.get_sales_prognosis_data",
) as mock: ) as get_mock,
mock.return_value = (exmpl_api_sales_prognosis_resp, STATUS_HANDLER.SUCCESS) patch("delta_barth.pipelines.SESSION", session),
with patch("delta_barth.pipelines.SESSION", session): patch("delta_barth.analysis.forecast.SESSION", session) as sess_mock,
json_export = pl.pipeline_sales_forecast(None, None) ):
get_mock.return_value = (exmpl_api_sales_prognosis_resp, STATUS_HANDLER.SUCCESS)
sess_mock.cfg.forecast.threshold_month_data_points = 1
json_export = pl.pipeline_sales_forecast(None, None)
assert isinstance(json_export, str) assert isinstance(json_export, str)
parsed_resp = json.loads(json_export) parsed_resp = json.loads(json_export)

View File

@ -62,6 +62,24 @@ def test_session_setup_db_management(tmp_path):
assert db_path.exists() assert db_path.exists()
def test_session_setup_config(tmp_path, pth_cfg):
str_path = str(tmp_path)
foldername: str = "cfg_test"
target_cfg_dir = tmp_path / foldername
session = delta_barth.session.Session(HTTP_BASE_CONTENT_HEADERS, cfg_folder=foldername)
session.set_data_path(str_path)
cfg_path = session.cfg_path
assert cfg_path.parent.exists()
assert cfg_path.parent == target_cfg_dir
assert not cfg_path.exists()
session.setup()
cfg_path2 = session.cfg_path
assert cfg_path2 == cfg_path
assert session._cfg is not None
assert cfg_path.exists()
assert session.cfg.forecast.threshold_month_data_points == 28
@patch("delta_barth.logging.ENABLE_LOGGING", True) @patch("delta_barth.logging.ENABLE_LOGGING", True)
@patch("delta_barth.logging.LOGGING_TO_FILE", True) @patch("delta_barth.logging.LOGGING_TO_FILE", True)
@patch("delta_barth.logging.LOGGING_TO_STDERR", True) @patch("delta_barth.logging.LOGGING_TO_STDERR", True)