add option for multiple company IDs, related to #26

This commit is contained in:
Florian Förster 2025-04-30 14:46:23 +02:00
parent 248b811786
commit 690431472c
7 changed files with 24 additions and 26 deletions

View File

@ -403,13 +403,13 @@ def _export_on_fail(
def pipeline_sales_forecast(
session: Session,
company_id: int | None = None,
company_ids: list[int] | None = None,
start_date: Datetime | None = None,
) -> SalesPrognosisResultsExport:
logger_pipelines.info("[PIPELINES] Starting main sales forecast pipeline...")
response, status = get_sales_prognosis_data(
session,
company_id=company_id,
company_ids=company_ids,
start_date=start_date,
)
if status != STATUS_HANDLER.SUCCESS:

View File

@ -20,7 +20,7 @@ if TYPE_CHECKING:
# ** sales data
# ** import
class SalesPrognosisRequestP(BaseModel):
FirmaId: SkipValidation[int | None]
FirmaIds: SkipValidation[list[int] | None]
BuchungsDatum: SkipValidation[Datetime | None]
@ -55,20 +55,14 @@ class SalesPrognosisResultsExport(ExportResponse):
def get_sales_prognosis_data(
session: Session,
company_id: int | None = None,
company_ids: list[int] | None = None,
start_date: Datetime | None = None,
) -> tuple[SalesPrognosisResponse, Status]:
# TODO check elimination of assertion for login, #25
# _, status = session.assert_login()
# if status != STATUS_HANDLER.SUCCESS:
# response = SalesPrognosisResponse(daten=tuple())
# return response, status
ROUTE: Final[str] = "verkauf/umsatzprognosedaten"
URL: Final = combine_route(session.base_url, ROUTE)
sales_prog_req = SalesPrognosisRequestP(
FirmaId=company_id,
FirmaIds=company_ids,
BuchungsDatum=start_date,
)
empty_response = SalesPrognosisResponse(daten=tuple())

View File

@ -50,7 +50,7 @@ class KnownDelBarApiErrorCodes(enum.Enum):
# ** API
API_CON_TIMEOUT: Final[float] = 10.0 # secs to response
API_CON_TIMEOUT: Final[float] = 20.0 # secs to response
MAX_LOGIN_RETRIES: Final[int] = 1
# ** API response parsing

View File

@ -44,14 +44,14 @@ def _write_performance_metrics_wrapped(
def pipeline_sales_forecast(
company_id: int | None,
company_ids: list[int] | None,
start_date: Datetime | None,
) -> JsonExportResponse:
PIPELINE_NAME: Final[str] = "sales_forecast"
logger.info("[EXT-CALL PIPELINES] Starting main sales forecast pipeline...")
t_start = time.perf_counter_ns()
result = forecast.pipeline_sales_forecast(
SESSION, company_id=company_id, start_date=start_date
SESSION, company_ids=company_ids, start_date=start_date
)
export = JsonExportResponse(result.model_dump_json())
t_end = time.perf_counter_ns()

View File

@ -432,6 +432,8 @@ def test_export_on_fail():
@patch("delta_barth.session.CFG_HOT_RELOAD", False)
def test_pipeline_sales_forecast_SuccessDbWrite(exmpl_api_sales_prognosis_resp, session):
date = Datetime(2023, 8, 15)
company_ids = [5661, 1027, 1024]
with (
patch(
"delta_barth.analysis.forecast.get_sales_prognosis_data",
@ -440,7 +442,7 @@ def test_pipeline_sales_forecast_SuccessDbWrite(exmpl_api_sales_prognosis_resp,
):
get_mock.return_value = exmpl_api_sales_prognosis_resp, STATUS_HANDLER.SUCCESS
sess_mock.cfg.forecast.threshold_month_data_points = 1
result = fc.pipeline_sales_forecast(None) # type: ignore
result = fc.pipeline_sales_forecast(None, company_ids, date) # type: ignore
assert result.status == STATUS_HANDLER.SUCCESS
assert len(result.response.daten) > 0

View File

@ -11,7 +11,7 @@ def test_get_sales_prognosis_data_Success(session):
resp, status = session.login()
# test without company ID
assert status.code == 0
date = Datetime(2022, 6, 1)
date = Datetime(2023, 12, 15)
resp, status = requests_.get_sales_prognosis_data(session, None, date)
assert status.code == 0
assert len(resp.daten) > 0
@ -21,26 +21,25 @@ def test_get_sales_prognosis_data_Success(session):
assert len(resp.daten) == 0
# test with company ID
assert status.code == 0
date = Datetime(2022, 6, 1)
company_id = 1024
resp, status = requests_.get_sales_prognosis_data(session, company_id, date)
date = Datetime(2023, 8, 15)
company_ids = [5661, 1027]
resp, status = requests_.get_sales_prognosis_data(session, company_ids, date)
assert status.code == 0
assert len(resp.daten) > 0
date = Datetime(2030, 1, 1)
resp, status = requests_.get_sales_prognosis_data(session, company_id, date)
resp, status = requests_.get_sales_prognosis_data(session, company_ids, date)
assert status.code == 0
assert len(resp.daten) == 0
# test with non-existent company ID
assert status.code == 0
date = Datetime(2022, 6, 1)
company_id = 1000024
resp, status = requests_.get_sales_prognosis_data(session, company_id, date)
# TODO check if this behaviour is still considered "successful"
company_ids = [1000024]
resp, status = requests_.get_sales_prognosis_data(session, company_ids, date)
assert status.code == 0
assert len(resp.daten) == 0
# test without date
company_id = 1024
resp, status = requests_.get_sales_prognosis_data(session, company_id, None)
company_ids = [1024]
resp, status = requests_.get_sales_prognosis_data(session, company_ids, None)
assert status.code == 0
assert len(resp.daten) > 0
# test without filters

View File

@ -1,4 +1,5 @@
import json
from datetime import datetime as Datetime
from unittest.mock import patch
import pytest
@ -47,6 +48,8 @@ def test_write_performance_metrics_FailStartingTime(session):
@patch("delta_barth.session.CFG_HOT_RELOAD", False)
def test_sales_prognosis_pipeline(exmpl_api_sales_prognosis_resp, session, monkeypatch):
date = Datetime(2023, 8, 15)
company_ids = [5661, 1027, 1024]
with (
patch(
"delta_barth.analysis.forecast.get_sales_prognosis_data",
@ -56,7 +59,7 @@ def test_sales_prognosis_pipeline(exmpl_api_sales_prognosis_resp, session, monke
):
get_mock.return_value = (exmpl_api_sales_prognosis_resp, STATUS_HANDLER.SUCCESS)
sess_mock.cfg.forecast.threshold_month_data_points = 1
json_export = pl.pipeline_sales_forecast(None, None)
json_export = pl.pipeline_sales_forecast(company_ids, date)
assert isinstance(json_export, str)
parsed_resp = json.loads(json_export)