diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index 0d38b63f..00558ffb 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -87,6 +87,7 @@ def new( _offline=offline, ) _alert._staging |= _alert_definition + _alert._params = {"deduplicate": True} return _alert diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index 2fb74f06..e9340873 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -105,6 +105,8 @@ def new( _offline=offline, ) _alert._staging |= _alert_definition + _alert._params = {"deduplicate": True} + return _alert @@ -194,6 +196,7 @@ def new( _offline=offline, ) _alert._staging |= _alert_definition + _alert._params = {"deduplicate": True} return _alert diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index 9ddcd6e1..7fdcee43 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -57,7 +57,7 @@ def new( whether this alert should be created locally, default is False """ - return UserAlert( + _alert = UserAlert( name=name, description=description, notification=notification, @@ -66,6 +66,8 @@ def new( _read_only=False, _offline=offline, ) + _alert._params = {"deduplicate": True} + return _alert @classmethod def get( diff --git a/simvue/api/objects/artifact/base.py b/simvue/api/objects/artifact/base.py index dac009f5..0b5af04d 100644 --- a/simvue/api/objects/artifact/base.py +++ b/simvue/api/objects/artifact/base.py @@ -97,6 +97,7 @@ def _upload(self, file: io.BytesIO) -> None: _response = sv_post( url=_url, headers={}, + params={}, is_json=False, files={"file": file}, data=self._init_data.get("fields"), diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 07d23e9c..f6f61762 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -174,6 +174,8 @@ def __init__( else {} ) + self._params: dict[str, str] = {} + self._staging: dict[str, typing.Any] = {} # If this object is read-only, but not a local construction, make an API call @@ -417,6 +419,7 @@ def _post(self, is_json: bool = True, **kwargs) -> dict[str, typing.Any]: _response = sv_post( url=f"{self._base_url}", headers=self._headers | {"Content-Type": "application/msgpack"}, + params=self._params, data=kwargs, is_json=is_json, ) @@ -457,7 +460,7 @@ def _put(self, **kwargs) -> dict[str, typing.Any]: return get_json_from_response( response=_response, - expected_status=[http.HTTPStatus.OK], + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.CONFLICT], scenario=f"Creation of {self._label} '{self._identifier}", ) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index dfb21c1f..f2b53f23 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -227,12 +227,18 @@ def notifications( @property @staging_check - def alerts(self) -> typing.Generator[str, None, None]: - for alert in self.get_alert_details(): - yield alert["id"] + def alerts(self) -> list[str]: + if self._offline: + return self._get_attribute("alerts") + + return [alert["id"] for alert in self.get_alert_details()] def get_alert_details(self) -> typing.Generator[dict[str, typing.Any], None, None]: """Retrieve the full details of alerts for this run""" + if self._offline: + raise RuntimeError( + "Cannot get alert details from an offline run - use .alerts to access a list of IDs instead" + ) for alert in self._get_attribute("alerts"): yield alert["alert"] @@ -240,9 +246,7 @@ def get_alert_details(self) -> typing.Generator[dict[str, typing.Any], None, Non @write_only @pydantic.validate_call def alerts(self, alerts: list[str]) -> None: - self._staging["alerts"] = [ - alert for alert in alerts if alert not in self._staging.get("alerts", []) - ] + self._staging["alerts"] = list(set(self._staging.get("alerts", []) + alerts)) @property @staging_check diff --git a/simvue/api/request.py b/simvue/api/request.py index 3ebdb86b..8dd6a8bd 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -64,6 +64,7 @@ def is_retryable_exception(exception: Exception) -> bool: def post( url: str, headers: dict[str, str], + params: dict[str, str], data: typing.Any, is_json: bool = True, files: dict[str, typing.Any] | None = None, @@ -76,6 +77,8 @@ def post( URL to post to headers : dict[str, str] headers for the post request + params : dict[str, str] + query parameters for the post request data : dict[str, typing.Any] data to post is_json : bool, optional @@ -95,7 +98,12 @@ def post( logging.debug(f"POST: {url}\n\tdata={data_sent}") response = requests.post( - url, headers=headers, data=data_sent, timeout=DEFAULT_API_TIMEOUT, files=files + url, + headers=headers, + params=params, + data=data_sent, + timeout=DEFAULT_API_TIMEOUT, + files=files, ) if response.status_code == http.HTTPStatus.UNPROCESSABLE_ENTITY: diff --git a/simvue/run.py b/simvue/run.py index cc586b30..2c358fbd 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -29,7 +29,6 @@ import click import psutil -from simvue.api.objects.alert.base import AlertBase from simvue.api.objects.alert.fetch import Alert from simvue.api.objects.folder import Folder from simvue.exception import SimvueRunError @@ -695,6 +694,7 @@ def init( self._sv_obj.tags = tags self._sv_obj.metadata = (metadata or {}) | git_info(os.getcwd()) | environment() self._sv_obj.heartbeat_timeout = timeout + self._sv_obj.alerts = [] self._sv_obj.notifications = notification if self._status == "running": @@ -1642,48 +1642,26 @@ def add_alerts( try: if alerts := Alert.get(offline=self._user_config.run.mode == "offline"): for alert in alerts: - if alert.name in names: - ids.append(alert.id) + if alert[1].name in names: + ids.append(alert[1].id) + else: + self._error("No existing alerts") + return False except RuntimeError as e: self._error(f"{e.args[0]}") return False - else: - self._error("No existing alerts") - return False elif not names and not ids: self._error("Need to provide alert ids or alert names") return False # Avoid duplication - self._sv_obj.alerts = list(set(self._sv_obj.alerts + [ids])) + _deduplicated = list(set(self._sv_obj.alerts + ids)) + self._sv_obj.alerts = _deduplicated self._sv_obj.commit() - return False - - def _attach_alert_to_run(self, alert: AlertBase) -> str | None: - # Check if the alert already exists - _alert_id: str | None = None - - for _, _existing_alert in Alert.get( - offline=self._user_config.run.mode == "offline" - ): - if _existing_alert.compare(alert): - _alert_id = _existing_alert.id - logger.info("Existing alert found with id: %s", _existing_alert.id) - break - - if not _alert_id: - alert.commit() - _alert_id = alert.id - - self._sv_obj.alerts = [_alert_id] - - self._sv_obj.commit() - - return _alert_id + return True @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_metric_range_alert( self, @@ -1701,6 +1679,7 @@ def create_metric_range_alert( ] = "average", notification: typing.Literal["email", "none"] = "none", trigger_abort: bool = False, + attach_to_run: bool = True, ) -> str | None: """Creates a metric range alert with the specified name (if it doesn't exist) and applies it to the current run. If alert already exists it will @@ -1730,6 +1709,8 @@ def create_metric_range_alert( whether to notify on trigger, by default "none" trigger_abort : bool, optional whether this alert can trigger a run abort, default False + attach_to_run : bool, optional + whether to attach this alert to the current run, default True Returns ------- @@ -1751,10 +1732,12 @@ def create_metric_range_alert( offline=self._user_config.run.mode == "offline", ) _alert.abort = trigger_abort - return self._attach_alert_to_run(_alert) + _alert.commit() + if attach_to_run: + self.add_alerts(ids=[_alert.id]) + return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_metric_threshold_alert( self, @@ -1771,6 +1754,7 @@ def create_metric_threshold_alert( ] = "average", notification: typing.Literal["email", "none"] = "none", trigger_abort: bool = False, + attach_to_run: bool = True, ) -> str | None: """Creates a metric threshold alert with the specified name (if it doesn't exist) and applies it to the current run. If alert already exists it will @@ -1798,6 +1782,8 @@ def create_metric_threshold_alert( whether to notify on trigger, by default "none" trigger_abort : bool, optional whether this alert can trigger a run abort, default False + attach_to_run : bool, optional + whether to attach this alert to the current run, default True Returns ------- @@ -1817,11 +1803,14 @@ def create_metric_threshold_alert( notification=notification, offline=self._user_config.run.mode == "offline", ) + _alert.abort = trigger_abort - return self._attach_alert_to_run(_alert) + _alert.commit() + if attach_to_run: + self.add_alerts(ids=[_alert.id]) + return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_event_alert( self, @@ -1832,6 +1821,7 @@ def create_event_alert( frequency: pydantic.PositiveInt = 1, notification: typing.Literal["email", "none"] = "none", trigger_abort: bool = False, + attach_to_run: bool = True, ) -> str | None: """Creates an events alert with the specified name (if it doesn't exist) and applies it to the current run. If alert already exists it will @@ -1849,6 +1839,8 @@ def create_event_alert( whether to notify on trigger, by default "none" trigger_abort : bool, optional whether this alert can trigger a run abort + attach_to_run : bool, optional + whether to attach this alert to the current run, default True Returns ------- @@ -1865,10 +1857,12 @@ def create_event_alert( offline=self._user_config.run.mode == "offline", ) _alert.abort = trigger_abort - return self._attach_alert_to_run(_alert) + _alert.commit() + if attach_to_run: + self.add_alerts(ids=[_alert.id]) + return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_user_alert( self, @@ -1877,6 +1871,7 @@ def create_user_alert( description: str | None = None, notification: typing.Literal["email", "none"] = "none", trigger_abort: bool = False, + attach_to_run: bool = True, ) -> None: """Creates a user alert with the specified name (if it doesn't exist) and applies it to the current run. If alert already exists it will @@ -1892,6 +1887,8 @@ def create_user_alert( whether to notify on trigger, by default "none" trigger_abort : bool, optional whether this alert can trigger a run abort, default False + attach_to_run : bool, optional + whether to attach this alert to the current run, default True Returns ------- @@ -1906,7 +1903,10 @@ def create_user_alert( offline=self._user_config.run.mode == "offline", ) _alert.abort = trigger_abort - return self._attach_alert_to_run(_alert) + _alert.commit() + if attach_to_run: + self.add_alerts(ids=[_alert.id]) + return _alert.id @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 4352e97e..84807e54 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -716,6 +716,94 @@ def test_save_object( save_obj = array([1, 2, 3, 4]) simvue_run.save_object(save_obj, "input", f"test_object_{object_type}") +@pytest.mark.run +def test_add_alerts() -> None: + _uuid = f"{uuid.uuid4()}".split("-")[0] + + run = sv_run.Run() + run.init( + name="test_add_alerts", + folder="/simvue_unit_tests", + retention_period="1 min", + tags=["test_add_alerts"], + visibility="tenant" + ) + + _expected_alerts = [] + + # Create alerts, have them attach to run automatically + _id = run.create_event_alert( + name=f"event_alert_{_uuid}", + pattern = "test", + ) + _expected_alerts.append(_id) + time.sleep(1) + # Retrieve run, check if alert has been added + _online_run = RunObject(identifier=run._id) + assert _id in _online_run.alerts + + # Create another alert and attach to run + _id = run.create_metric_range_alert( + name=f"metric_range_alert_{_uuid}", + metric="test", + range_low=10, + range_high=100, + rule="is inside range", + ) + _expected_alerts.append(_id) + time.sleep(1) + # Retrieve run, check both alerts have been added + _online_run.refresh() + assert sorted(_online_run.alerts) == sorted(_expected_alerts) + + # Create another alert, do not attach to run + _id = run.create_metric_threshold_alert( + name=f"metric_threshold_alert_{_uuid}", + metric="test", + threshold=10, + rule="is above", + attach_to_run=False + ) + time.sleep(1) + # Retrieve run, check alert has NOT been added + _online_run.refresh() + assert sorted(_online_run.alerts) == sorted(_expected_alerts) + + # Try adding all three alerts using add_alerts + _expected_alerts.append(_id) + run.add_alerts(names=[f"event_alert_{_uuid}", f"metric_range_alert_{_uuid}", f"metric_threshold_alert_{_uuid}"]) + time.sleep(1) + + # Check that there is no duplication + _online_run.refresh() + assert sorted(_online_run.alerts) == sorted(_expected_alerts) + + # Create another run without adding to run + _id = run.create_user_alert( + name=f"user_alert_{_uuid}", + attach_to_run=False + ) + time.sleep(1) + + # Check alert is not added + _online_run.refresh() + assert sorted(_online_run.alerts) == sorted(_expected_alerts) + + # Try adding alerts with IDs, check there is no duplication + _expected_alerts.append(_id) + run.add_alerts(ids=_expected_alerts) + time.sleep(1) + + _online_run.refresh() + assert sorted(_online_run.alerts) == sorted(_expected_alerts) + + run.close() + + client = sv_cl.Client() + client.delete_run(run._id) + for _id in _expected_alerts: + client.delete_alert(_id) + @pytest.mark.run def test_abort_on_alert_process(mocker: pytest_mock.MockerFixture) -> None: @@ -796,8 +884,8 @@ def testing_exit(status: int) -> None: run.add_process(identifier="forever_long", executable="bash", c="sleep 10") time.sleep(2) run.log_alert(alert_id, "critical") - _alert = Alert(identifier=alert_id) time.sleep(1) + _alert = Alert(identifier=alert_id) assert _alert.get_status(run.id) == "critical" counter = 0 while run._status != "terminated" and counter < 15: