From aeb4db1c7c3fae29382c7ac048d94ade821b6860 Mon Sep 17 00:00:00 2001 From: Mayor Ugochukwu Date: Wed, 6 Mar 2024 02:55:07 +0000 Subject: [PATCH 01/16] feat(wip): aws incident manger --- .../awsincident_provider/__init__.py | 0 .../awsincident_provider.py | 39 +++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 keep/providers/awsincident_provider/__init__.py create mode 100644 keep/providers/awsincident_provider/awsincident_provider.py diff --git a/keep/providers/awsincident_provider/__init__.py b/keep/providers/awsincident_provider/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/keep/providers/awsincident_provider/awsincident_provider.py b/keep/providers/awsincident_provider/awsincident_provider.py new file mode 100644 index 0000000000..f74d566575 --- /dev/null +++ b/keep/providers/awsincident_provider/awsincident_provider.py @@ -0,0 +1,39 @@ +import dataclasses +import typing +import pydantic + +from keep.providers.base.base_provider import BaseProvider +from keep.contextmanager.contextmanager import ContextManager + + +@pydantic.dataclasses.dataclass +class AwsIncidenceProviderAuthConfig: + aws_access_key_id: str = dataclasses.field( + metadata={ + "required": True, + "description": "AWS Access Key ID", + "sensitive": True, + } + ) + + aws_secret_access_key: str = dataclasses.field( + metadata={ + "required": True, + "description": "AWS Secret Access Key", + "sensitive": True + } + ) + + region_name: str = dataclasses.field( + metadata={ + "required": True, + "description": "AWS Region Name", + } + ) + + + +class AWSIncidencemanagerProvider(BaseProvider): + def __init__( + self, contextmanager=ContextManager + ) \ No newline at end of file From d930b9a55febfb679479b732905e9539e7a871c4 Mon Sep 17 00:00:00 2001 From: talboren Date: Wed, 6 Mar 2024 13:35:49 +0200 Subject: [PATCH 02/16] fix(api): mysql query when filtering alerts in workflows (#898) --- keep-ui/app/alerts/alert-history.tsx | 2 +- keep-ui/app/alerts/alert-table-utils.tsx | 2 +- keep-ui/app/loading.tsx | 2 +- keep-ui/app/providers/provider-row.tsx | 2 +- keep-ui/app/workflows/builder/alert.tsx | 1 + .../workflows/builder/builder-validators.tsx | 10 --- keep-ui/app/workflows/builder/utils.tsx | 88 ++++++++++++++----- keep/api/core/db.py | 23 +++-- keep/api/models/db/workflow.py | 4 +- 9 files changed, 93 insertions(+), 41 deletions(-) diff --git a/keep-ui/app/alerts/alert-history.tsx b/keep-ui/app/alerts/alert-history.tsx index 9360f76c2b..331e2d1525 100644 --- a/keep-ui/app/alerts/alert-history.tsx +++ b/keep-ui/app/alerts/alert-history.tsx @@ -37,7 +37,7 @@ const AlertHistoryPanel = ({
loading ( diff --git a/keep-ui/app/loading.tsx b/keep-ui/app/loading.tsx index d84dfb3a6d..9596de9c9b 100644 --- a/keep-ui/app/loading.tsx +++ b/keep-ui/app/loading.tsx @@ -16,7 +16,7 @@ export default function Loading({ > loading { height={150} onError={(event) => { const target = event.target as HTMLImageElement; - target.src = "keep.svg"; // Set fallback icon + target.src = "/keep.svg"; // Set fallback icon }} />
diff --git a/keep-ui/app/workflows/builder/alert.tsx b/keep-ui/app/workflows/builder/alert.tsx index 369d4464c5..d56a17b0e6 100644 --- a/keep-ui/app/workflows/builder/alert.tsx +++ b/keep-ui/app/workflows/builder/alert.tsx @@ -7,6 +7,7 @@ interface Provider { interface Step { name: string; provider: Provider; + if?: string; } interface Condition { diff --git a/keep-ui/app/workflows/builder/builder-validators.tsx b/keep-ui/app/workflows/builder/builder-validators.tsx index 4f489f9fea..ab21ace197 100644 --- a/keep-ui/app/workflows/builder/builder-validators.tsx +++ b/keep-ui/app/workflows/builder/builder-validators.tsx @@ -51,16 +51,6 @@ export function stepValidator( definition: Definition, setStepValidationError: Dispatch> ): boolean { - if (step.type === "foreach") { - // This checks if there's any step that is not action in foreach - const foreachIncludesNotCondition = (step as SequentialStep).sequence.some( - (step) => !step.type.includes("condition-") - ); - if (foreachIncludesNotCondition) { - setStepValidationError("Foreach can only contain conditions."); - return false; - } - } if (step.type.includes("condition-")) { const onlyActions = (step as BranchedStep).branches.true.every((step) => step.type.includes("action-") diff --git a/keep-ui/app/workflows/builder/utils.tsx b/keep-ui/app/workflows/builder/utils.tsx index 388c3c54ec..37c39c2125 100644 --- a/keep-ui/app/workflows/builder/utils.tsx +++ b/keep-ui/app/workflows/builder/utils.tsx @@ -127,11 +127,32 @@ export function getActionOrStepObj( }; } +function generateForeach( + actionOrStep: any, + stepOrAction: "step" | "action", + providers?: Provider[], + sequence?: any +) { + return { + id: Uid.next(), + type: "foreach", + componentType: "container", + name: "Foreach", + properties: { + value: actionOrStep.foreach, + }, + sequence: [ + sequence ?? getActionOrStepObj(actionOrStep, stepOrAction, providers), + ], + }; +} + export function generateCondition( condition: any, action: any, providers?: Provider[] ): any { + const stepOrAction = action.type === "step" ? "step" : "action"; const generatedCondition = { id: Uid.next(), name: condition.name, @@ -145,23 +166,14 @@ export function generateCondition( assert: condition.assert, }, branches: { - true: [getActionOrStepObj(action, "action", providers)], + true: [getActionOrStepObj(action, stepOrAction, providers)], false: [], }, }; // If this is a foreach, we need to add the foreach to the condition if (action.foreach) { - return { - id: Uid.next(), - type: "foreach", - componentType: "container", - name: "Foreach", - properties: { - value: action.foreach, - }, - sequence: [generatedCondition], - }; + return generateForeach(action, stepOrAction, providers, generatedCondition); } return generatedCondition; @@ -203,15 +215,19 @@ export function parseWorkflow( const workflow = parsedWorkflowFile.alert ? parsedWorkflowFile.alert : parsedWorkflowFile.workflow; - const steps = - workflow.steps?.map((step: any) => { - return getActionOrStepObj(step, "step", providers); + const steps = [] as any; + const workflowSteps = + workflow.steps?.map((s: any) => { + s.type = "step"; + return s; }) || []; + const workflowActions = workflow.actions || []; const conditions = [] as any; - workflow.actions?.forEach((action: any) => { + [...workflowSteps, ...workflowActions].forEach((action: any) => { + const stepOrAction = action.type === "step" ? "step" : "action"; // This means this action always runs, there's no condition and no alias - if (!action.condition && !action.if) { - steps.push(getActionOrStepObj(action, "action", providers)); + if (!action.condition && !action.if && !action.foreach) { + steps.push(getActionOrStepObj(action, stepOrAction, providers)); } // If this is an alias, we need to find the existing condition and add this action to it else if (action.if) { @@ -221,11 +237,17 @@ export function parseWorkflow( ); if (existingCondition) { existingCondition.branches.true.push( - getActionOrStepObj(action, "action", providers) + getActionOrStepObj(action, stepOrAction, providers) ); } else { - steps.push(getActionOrStepObj(action, "action", providers)); + if (action.foreach) { + steps.push(generateForeach(action, stepOrAction, providers)); + } else { + steps.push(getActionOrStepObj(action, stepOrAction, providers)); + } } + } else if (action.foreach) { + steps.push(generateForeach(action, stepOrAction, providers)); } else { action.condition.forEach((condition: any) => { conditions.push(generateCondition(condition, action, providers)); @@ -375,7 +397,33 @@ export function buildAlert(definition: Definition): Alert { const condition = (forEach as SequentialStep).sequence.find((c) => c.type.startsWith("condition-") ) as BranchedStep; - const foreachActions = getActionsFromCondition(condition, forEachValue); + let foreachActions = [] as Action[]; + if (condition) { + foreachActions = getActionsFromCondition(condition, forEachValue); + } else { + const stepOrAction = (forEach as SequentialStep).sequence[0]; + const withParams = getWithParams(stepOrAction); + const providerType = stepOrAction.type + .replace("action-", "") + .replace("step-", ""); + const ifParam = stepOrAction.properties.if; + const providerName = + (stepOrAction.properties.config as string)?.trim() || + `default-${providerType}`; + const provider: any = { + type: stepOrAction.type.replace("action-", "").replace("step-", ""), + config: `{{ providers.${providerName} }}`, + with: withParams, + }; + foreachActions = [ + { + name: stepOrAction.name, + provider: provider, + foreach: forEachValue, + if: ifParam as string, + }, + ]; + } actions = [...actions, ...foreachActions]; }); // Actions > Condition diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 3d9251de1a..10672c26c8 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -535,7 +535,10 @@ def finish_workflow_execution(tenant_id, workflow_id, execution_id, status, erro ).first() workflow_execution.status = status - workflow_execution.error = error + # TODO: we had a bug with the error field, it was too short so some customers may fail over it. + # we need to fix it in the future, create a migration that increases the size of the error field + # and then we can remove the [:255] from here + workflow_execution.error = error[:255] if error else None workflow_execution.execution_time = ( datetime.utcnow() - workflow_execution.started ).total_seconds() @@ -723,10 +726,20 @@ def get_alerts_with_filters(tenant_id, provider_id=None, filters=None) -> list[A if isinstance(filter_value, bool) and filter_value is True: # If the filter value is True, we want to filter by the existence of the enrichment # e.g.: all the alerts that have ticket_id - query = query.filter( - func.json_type(AlertEnrichment.enrichments, f"$.{filter_key}") - != null() - ) + if session.bind.dialect.name == "mysql": + query = query.filter( + func.json_extract( + AlertEnrichment.enrichments, f"$.{filter_key}" + ) + != null() + ) + elif session.bind.dialect.name == "sqlite": + query = query.filter( + func.json_type( + AlertEnrichment.enrichments, f"$.{filter_key}" + ) + != null() + ) elif isinstance(filter_value, (str, int)): if session.bind.dialect.name == "mysql": query = query.filter( diff --git a/keep/api/models/db/workflow.py b/keep/api/models/db/workflow.py index 11e5b99a28..1ec15b489f 100644 --- a/keep/api/models/db/workflow.py +++ b/keep/api/models/db/workflow.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import List, Optional -from sqlalchemy import TEXT, String +from sqlalchemy import TEXT from sqlmodel import JSON, Column, Field, Relationship, SQLModel, UniqueConstraint @@ -34,7 +34,7 @@ class WorkflowExecution(SQLModel, table=True): status: str = Field(sa_column=Column(TEXT)) execution_number: int logs: Optional[str] - error: Optional[str] = Field(sa_column=String(length=10240)) + error: Optional[str] = Field(max_length=10240) execution_time: Optional[int] results: dict = Field(sa_column=Column(JSON), default={}) From 3f219af35b0596a65548eacfa066eee9f0880e90 Mon Sep 17 00:00:00 2001 From: Furkan Pehlivan <65170388+pehlicd@users.noreply.github.com> Date: Wed, 6 Mar 2024 15:28:43 +0100 Subject: [PATCH 03/16] fix(cli): add help message to alert get cmd (#901) --- docs/mint.json | 8 ++++++++ keep/cli/cli.py | 1 + 2 files changed, 9 insertions(+) diff --git a/docs/mint.json b/docs/mint.json index e7bf70b1f2..e3c6382c34 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -257,6 +257,14 @@ } ] }, + { + "group": "keep mappings", + "pages": [ + "cli/commands/mappings-list", + "cli/commands/mappings-create", + "cli/commands/mappings-delete" + ] + }, "cli/commands/cli-api", "cli/commands/cli-config", "cli/commands/cli-version", diff --git a/keep/cli/cli.py b/keep/cli/cli.py index 316911ce31..856da8d366 100644 --- a/keep/cli/cli.py +++ b/keep/cli/cli.py @@ -1067,6 +1067,7 @@ def alert(info: Info): ) @pass_info def get_alert(info: Info, fingerprint: str): + """Get an alert by fingerprint.""" resp = _get_alert_by_fingerprint(info.keep_api_url, info.api_key, fingerprint) if not resp.ok: raise Exception(f"Error getting alert: {resp.text}") From e550431332d36c0c3c4831a1f3ae4b874e913170 Mon Sep 17 00:00:00 2001 From: Shahar Glazner Date: Wed, 6 Mar 2024 18:12:53 +0200 Subject: [PATCH 04/16] fix: make alert json (#909) --- examples/workflows/squadcast_example.yml | 15 ++ keep/api/models/alert.py | 6 + keep/iohandler/iohandler.py | 2 + .../squadcast_provider/squadcast_provider.py | 161 +++++++++++------- 4 files changed, 125 insertions(+), 59 deletions(-) create mode 100644 examples/workflows/squadcast_example.yml diff --git a/examples/workflows/squadcast_example.yml b/examples/workflows/squadcast_example.yml new file mode 100644 index 0000000000..5849c5e3e2 --- /dev/null +++ b/examples/workflows/squadcast_example.yml @@ -0,0 +1,15 @@ +workflow: + id: squadcast + description: squadcast + triggers: + - type: alert + actions: + - name: create-incident + provider: + config: "{{ providers.squadcast }}" + type: squadcast + with: + additional_json: '{{ alert }}' + description: TEST + message: '{{ alert.name }}-test' + notify_type: incident diff --git a/keep/api/models/alert.py b/keep/api/models/alert.py index 70bd310e12..0e8832394d 100644 --- a/keep/api/models/alert.py +++ b/keep/api/models/alert.py @@ -1,5 +1,6 @@ import datetime import hashlib +import json import logging from enum import Enum from typing import Any, Dict @@ -73,6 +74,11 @@ class AlertDto(BaseModel): group: bool = False # Whether the alert is a group alert note: str | None = None # The note of the alert + def __str__(self) -> str: + # Convert the model instance to a dictionary + model_dict = self.dict() + return json.dumps(model_dict, indent=4, default=str) + @validator("fingerprint", pre=True, always=True) def assign_fingerprint_if_none(cls, fingerprint, values): if fingerprint is None: diff --git a/keep/iohandler/iohandler.py b/keep/iohandler/iohandler.py index a83d7bf3ec..b913fed515 100644 --- a/keep/iohandler/iohandler.py +++ b/keep/iohandler/iohandler.py @@ -194,6 +194,8 @@ def _render(self, key, safe=False, default=""): original_stderr = sys.stderr sys.stderr = io.StringIO() rendered = chevron.render(_key, context, warn=True) + # chevron.render will escape the quotes, we need to unescape them + rendered = rendered.replace(""", '"') stderr_output = sys.stderr.getvalue() sys.stderr = original_stderr # If render should failed if value does not exists diff --git a/keep/providers/squadcast_provider/squadcast_provider.py b/keep/providers/squadcast_provider/squadcast_provider.py index 7b4ee7dad3..7a72d2a84d 100644 --- a/keep/providers/squadcast_provider/squadcast_provider.py +++ b/keep/providers/squadcast_provider/squadcast_provider.py @@ -21,7 +21,7 @@ class SquadcastProviderAuthConfig: "required": True, "description": "Service region: EU/US", "hint": "https://apidocs.squadcast.com/#intro", - "sensitive": False + "sensitive": False, } ) refresh_token: str | None = dataclasses.field( @@ -31,7 +31,7 @@ class SquadcastProviderAuthConfig: "hint": "https://support.squadcast.com/docs/squadcast-public-api", "sensitive": True, }, - default=None + default=None, ) webhook_url: str | None = dataclasses.field( metadata={ @@ -40,7 +40,7 @@ class SquadcastProviderAuthConfig: "hint": "https://support.squadcast.com/integrations/incident-webhook-incident-webhook-api", "sensitive": True, }, - default=None + default=None, ) @@ -59,7 +59,7 @@ class SquadcastProvider(BaseProvider): ] def __init__( - self, context_manager: ContextManager, provider_id: str, config: ProviderConfig + self, context_manager: ContextManager, provider_id: str, config: ProviderConfig ): super().__init__(context_manager, provider_id, config) @@ -67,11 +67,16 @@ def validate_scopes(self): """ Validates that the user has the required scopes to use the provider. """ + return { + "authenticated": True, + } refresh_headers = { "content-type": "application/json", - "X-Refresh-Token": f"{self.authentication_config.refresh_token}" + "X-Refresh-Token": f"{self.authentication_config.refresh_token}", } - resp = requests.get(f"{self.__get_endpoint('auth')}/oauth/access-token", headers=refresh_headers) + resp = requests.get( + f"{self.__get_endpoint('auth')}/oauth/access-token", headers=refresh_headers + ) try: resp.raise_for_status() scopes = { @@ -85,80 +90,119 @@ def validate_scopes(self): return scopes def __get_endpoint(self, endpoint: str): - if endpoint == 'auth': - return ('https://auth.eu.squadcast.com', 'https://auth.squadcast.com')[ - self.authentication_config.service_region == 'US'] - elif endpoint == 'api': - return ('https://api.eu.squadcast.com', 'https://api.squadcast.com')[ - self.authentication_config.service_region == 'US'] + if endpoint == "auth": + return ("https://auth.eu.squadcast.com", "https://auth.squadcast.com")[ + self.authentication_config.service_region == "US" + ] + elif endpoint == "api": + return ("https://api.eu.squadcast.com", "https://api.squadcast.com")[ + self.authentication_config.service_region == "US" + ] def validate_config(self): self.authentication_config = SquadcastProviderAuthConfig( **self.config.authentication ) if ( - not self.authentication_config.refresh_token - and not self.authentication_config.webhook_url + not self.authentication_config.refresh_token + and not self.authentication_config.webhook_url ): raise ProviderConfigException( "SquadcastProvider requires either refresh_token or webhook_url", provider_id=self.provider_id, ) - def _create_incidents(self, headers: dict, message: str, description: str, priority: str = "", - status: str = "", - event_id: str = ""): - - body = json.dumps({ - "message": message, - "description": description, - "priority": priority, - "status": status, - "event_id": event_id - }) - - return requests.post(self.authentication_config.webhook_url, data=body, headers=headers) - - def _crete_notes(self, headers: dict, message: str, incident_id: str, attachments: list = []): - body = json.dumps({ - "message": message, - "attachments": attachments - }) - return requests.post(f"{self.__get_endpoint('api')}/v3/incidents/{incident_id}/warroom", data=body, - headers=headers) - - def _notify(self, notify_type: str, message: str = "", description: str = "", incident_id: str = "", - priority: str = "", - status: str = "", - event_id: str = "", attachments: list = [], **kwargs) -> dict: + def _create_incidents( + self, + headers: dict, + message: str, + description: str, + priority: str = "", + status: str = "", + event_id: str = "", + ): + body = json.dumps( + { + "message": message, + "description": description, + "priority": priority, + "status": status, + "event_id": event_id, + } + ) + + return requests.post( + self.authentication_config.webhook_url, data=body, headers=headers + ) + + def _crete_notes( + self, headers: dict, message: str, incident_id: str, attachments: list = [] + ): + body = json.dumps({"message": message, "attachments": attachments}) + return requests.post( + f"{self.__get_endpoint('api')}/v3/incidents/{incident_id}/warroom", + data=body, + headers=headers, + ) + + def _notify( + self, + notify_type: str, + message: str = "", + description: str = "", + incident_id: str = "", + priority: str = "", + status: str = "", + event_id: str = "", + attachments: list = [], + **kwargs, + ) -> dict: """ Create an incident or notes using the Squadcast API. """ self.logger.info( f"Creating {notify_type} using SquadcastProvider", - extra={ - notify_type: notify_type - }) + extra={notify_type: notify_type}, + ) refresh_headers = { "content-type": "application/json", - "X-Refresh-Token": f"{self.authentication_config.refresh_token}" + "X-Refresh-Token": f"{self.authentication_config.refresh_token}", } - api_key_resp = requests.get(f"{self.__get_endpoint('auth')}/oauth/access-token", headers=refresh_headers) + api_key_resp = requests.get( + f"{self.__get_endpoint('auth')}/oauth/access-token", headers=refresh_headers + ) headers = { "content-type": "application/json", "Authorization": f"Bearer {api_key_resp.json()['data']['access_token']}", } - if notify_type == 'incident': + if notify_type == "incident": if message == "" or description == "": - raise Exception(f"message: \"{message}\" and description: \"{description}\" cannot be empty") - resp = self._create_incidents(headers=headers, message=message, description=description, priority=priority, - status=status, event_id=event_id) - elif notify_type == 'notes': + raise Exception( + f'message: "{message}" and description: "{description}" cannot be empty' + ) + resp = self._create_incidents( + headers=headers, + message=message, + description=description, + priority=priority, + status=status, + event_id=event_id, + ) + elif notify_type == "notes": if message == "" or incident_id == "": - raise Exception(f"message: \"{message}\" and incident_id: \"{incident_id}\" cannot be empty") - resp = self._crete_notes(headers=headers, message=message, incident_id=incident_id, attachments=attachments) + raise Exception( + f'message: "{message}" and incident_id: "{incident_id}" cannot be empty' + ) + resp = self._crete_notes( + headers=headers, + message=message, + incident_id=incident_id, + attachments=attachments, + ) else: - raise Exception("notify_type is a mandatory field, expected: incident | notes") + raise Exception( + "notify_type is a mandatory field, expected: incident | notes" + ) try: resp.raise_for_status() return resp.json() @@ -175,7 +219,7 @@ def dispose(self): if __name__ == "__main__": import os - squadcast_api_key = os.environ.get("MAILCHIMP_API_KEY") + squadcast_api_key = os.environ.get("SQUADCAST_API_KEY") context_manager = ContextManager( tenant_id="singletenant", workflow_id="test", @@ -184,11 +228,10 @@ def dispose(self): config = ProviderConfig( authentication={"api_key": squadcast_api_key}, ) - provider = SquadcastProvider(context_manager, provider_id="squadcast-test", config=config) + provider = SquadcastProvider( + context_manager, provider_id="squadcast-test", config=config + ) response = provider.notify( - "onboarding@squadcast.dev", - "youremail@gmail.com", - "Hello World from Keep!", - "Test with HTML", + description="test", ) print(response) From f29165df3050c792692531bfce3f3457d05e2941 Mon Sep 17 00:00:00 2001 From: talboren Date: Wed, 6 Mar 2024 18:15:41 +0200 Subject: [PATCH 05/16] feat: implement mapping priority (#908) Co-authored-by: Shahar Glazner --- keep-ui/app/mapping/rules-table.tsx | 7 ++++++- keep/api/bl/enrichments.py | 8 ++++++++ keep/cli/cli.py | 27 ++++++++++++++------------- 3 files changed, 28 insertions(+), 14 deletions(-) diff --git a/keep-ui/app/mapping/rules-table.tsx b/keep-ui/app/mapping/rules-table.tsx index c0a2ea1d8b..6eeafa14cd 100644 --- a/keep-ui/app/mapping/rules-table.tsx +++ b/keep-ui/app/mapping/rules-table.tsx @@ -34,6 +34,11 @@ export default function RulesTable({ mappings }: { mappings: MappingRule[] }) { header: "#", cell: (context) => context.row.original.id, }), + columnHelper.display({ + id: "priority", + header: "Priority", + cell: (context) => context.row.original.priority, + }), columnHelper.display({ id: "name", header: "Name", @@ -84,7 +89,7 @@ export default function RulesTable({ mappings }: { mappings: MappingRule[] }) { const table = useReactTable({ columns, - data: mappings, + data: mappings.sort((a, b) => b.priority - a.priority), getCoreRowModel: getCoreRowModel(), }); diff --git a/keep/api/bl/enrichments.py b/keep/api/bl/enrichments.py index 38690ac2ac..3d9e68e690 100644 --- a/keep/api/bl/enrichments.py +++ b/keep/api/bl/enrichments.py @@ -37,6 +37,7 @@ def run_mapping_rules(self, alert: AlertDto): self.db_session.query(MappingRule) .filter(MappingRule.tenant_id == self.tenant_id) .filter(MappingRule.disabled == False) + .order_by(MappingRule.priority.desc()) .all() ) @@ -59,6 +60,7 @@ def run_mapping_rules(self, alert: AlertDto): for row in rule.rows: if all( get_nested_attribute(alert, attribute) == row.get(attribute) + or row.get(attribute) == "*" # Wildcard for attribute in rule.matchers ): self.logger.info( @@ -73,6 +75,12 @@ def run_mapping_rules(self, alert: AlertDto): for key, value in row.items() if key not in rule.matchers } + + # Enrich the alert with the matched row + for key, value in enrichments.items(): + setattr(alert, key, value) + + # Save the enrichments to the database enrich_alert( self.tenant_id, alert.fingerprint, enrichments, self.db_session ) diff --git a/keep/cli/cli.py b/keep/cli/cli.py index 856da8d366..5a9d81fb44 100644 --- a/keep/cli/cli.py +++ b/keep/cli/cli.py @@ -264,19 +264,9 @@ def whoami(info: Info): @cli.command() @click.option("--multi-tenant", is_flag=True, help="Enable multi-tenant mode") +@click.option("--port", "-p", type=int, default=8080, help="The port to run the API on") @click.option( - "--port", - "-p", - type=int, - default=8080, - help="The port to run the API on" -) -@click.option( - "--host", - "-h", - type=str, - default="0.0.0.0", - help="The host to run the API on" + "--host", "-h", type=str, default="0.0.0.0", help="The host to run the API on" ) def api(multi_tenant: bool, port: int, host: str): """Start the API.""" @@ -745,8 +735,18 @@ def list_mappings(info: Info): help="The matchers of the mapping, as a comma-separated list of strings.", required=True, ) +@click.option( + "--priority", + "-p", + type=int, + help="The priority of the mapping, higher priority means this rule will execute first.", + required=False, + default=0, +) @pass_info -def create(info: Info, name: str, description: str, file: str, matchers: str): +def create( + info: Info, name: str, description: str, file: str, matchers: str, priority: int +): """Create a mapping rule.""" if os.path.isfile(file) and file.endswith(".csv"): with open(file, "rb") as f: @@ -775,6 +775,7 @@ def create(info: Info, name: str, description: str, file: str, matchers: str): "file_name": file_name, "matchers": matchers.split(","), "rows": rows, + "priority": priority, }, ) From 4f518316711dc0e9d8725d096cf74d748ac32e57 Mon Sep 17 00:00:00 2001 From: Shahar Glazner Date: Wed, 6 Mar 2024 18:53:27 +0200 Subject: [PATCH 06/16] feat: add support for pg and db create (#910) --- keep/api/core/db.py | 35 +++++++++++----------------------- keep/api/models/db/workflow.py | 2 +- poetry.lock | 35 ++++++++++++++++++++++++++++++---- pyproject.toml | 1 + 4 files changed, 44 insertions(+), 29 deletions(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 10672c26c8..44c07aea2b 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -10,10 +10,11 @@ from dotenv import find_dotenv, load_dotenv from google.cloud.sql.connector import Connector from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor -from sqlalchemy import and_, desc, func, null, select, text, update -from sqlalchemy.exc import IntegrityError, OperationalError +from sqlalchemy import and_, desc, func, null, select, update +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import joinedload, selectinload, subqueryload from sqlalchemy.orm.attributes import flag_modified +from sqlalchemy_utils import create_database, database_exists from sqlmodel import Session, SQLModel, create_engine, select # This import is required to create the tables @@ -105,6 +106,7 @@ def __get_conn_impersonate() -> pymysql.connections.Connection: ) elif db_connection_string: try: + logger.info(f"Creating a connection pool with size {pool_size}") engine = create_engine(db_connection_string, pool_size=pool_size) # SQLite does not support pool_size except TypeError: @@ -121,26 +123,11 @@ def create_db_and_tables(): """ Creates the database and tables. """ + if not database_exists(engine.url): + logger.info("Creating the database") + create_database(engine.url) + logger.info("Database created") SQLModel.metadata.create_all(engine) - # migration add column - - # todo: remove this - - # Execute the ALTER TABLE command - with engine.connect() as connection: - try: - connection.execute( - text("ALTER TABLE alert ADD COLUMN alert_hash VARCHAR(255);") - ) - except OperationalError as e: - # that's ok - if "duplicate column" in str(e).lower(): - return - logger.exception("Failed to add column alert_hash to alert table") - raise - except Exception: - logger.exception("Failed to add column alert_hash to alert table") - raise def get_session() -> Session: @@ -726,7 +713,7 @@ def get_alerts_with_filters(tenant_id, provider_id=None, filters=None) -> list[A if isinstance(filter_value, bool) and filter_value is True: # If the filter value is True, we want to filter by the existence of the enrichment # e.g.: all the alerts that have ticket_id - if session.bind.dialect.name == "mysql": + if session.bind.dialect.name in ["mysql", "postgresql"]: query = query.filter( func.json_extract( AlertEnrichment.enrichments, f"$.{filter_key}" @@ -741,7 +728,7 @@ def get_alerts_with_filters(tenant_id, provider_id=None, filters=None) -> list[A != null() ) elif isinstance(filter_value, (str, int)): - if session.bind.dialect.name == "mysql": + if session.bind.dialect.name in ["mysql", "postgresql"]: query = query.filter( func.json_unquote( func.json_extract( @@ -1186,7 +1173,7 @@ def get_rule_distribution(tenant_id, minute=False): seven_days_ago = datetime.utcnow() - timedelta(days=1) # Check the dialect - if session.bind.dialect.name == "mysql": + if session.bind.dialect.name in ["mysql", "postgresql"]: time_format = "%Y-%m-%d %H:%i" if minute else "%Y-%m-%d %H" timestamp_format = func.date_format(AlertToGroup.timestamp, time_format) elif session.bind.dialect.name == "sqlite": diff --git a/keep/api/models/db/workflow.py b/keep/api/models/db/workflow.py index 1ec15b489f..04c15ad59e 100644 --- a/keep/api/models/db/workflow.py +++ b/keep/api/models/db/workflow.py @@ -55,7 +55,7 @@ class WorkflowToAlertExecution(SQLModel, table=True): # https://sqlmodel.tiangolo.com/tutorial/automatic-id-none-refresh/ id: Optional[int] = Field(primary_key=True, default=None) workflow_execution_id: str = Field(foreign_key="workflowexecution.id") - alert_fingerprint: str = Field(foreign_key="alert.fingerprint") + alert_fingerprint: str workflow_execution: WorkflowExecution = Relationship( back_populates="workflow_to_alert_execution" ) diff --git a/poetry.lock b/poetry.lock index 0fcc263daf..e0798df6b6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiohttp" @@ -2113,7 +2113,7 @@ name = "ndg-httpsclient" version = "0.5.1" description = "Provides enhanced HTTPS support for httplib and urllib2 using PyOpenSSL" optional = false -python-versions = ">=2.7,<3.0.dev0 || >=3.4.dev0" +python-versions = ">=2.7,<3.0.0 || >=3.4.0" files = [ {file = "ndg_httpsclient-0.5.1-py2-none-any.whl", hash = "sha256:d2c7225f6a1c6cf698af4ebc962da70178a99bcde24ee6d1961c4f3338130d57"}, {file = "ndg_httpsclient-0.5.1-py3-none-any.whl", hash = "sha256:dd174c11d971b6244a891f7be2b32ca9853d3797a72edb34fa5d7b07d8fff7d4"}, @@ -3425,7 +3425,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -3974,6 +3973,34 @@ postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] sqlcipher = ["sqlcipher3-binary"] +[[package]] +name = "sqlalchemy-utils" +version = "0.41.1" +description = "Various utility functions for SQLAlchemy." +optional = false +python-versions = ">=3.6" +files = [ + {file = "SQLAlchemy-Utils-0.41.1.tar.gz", hash = "sha256:a2181bff01eeb84479e38571d2c0718eb52042f9afd8c194d0d02877e84b7d74"}, + {file = "SQLAlchemy_Utils-0.41.1-py3-none-any.whl", hash = "sha256:6c96b0768ea3f15c0dc56b363d386138c562752b84f647fb8d31a2223aaab801"}, +] + +[package.dependencies] +SQLAlchemy = ">=1.3" + +[package.extras] +arrow = ["arrow (>=0.3.4)"] +babel = ["Babel (>=1.3)"] +color = ["colour (>=0.0.4)"] +encrypted = ["cryptography (>=0.6)"] +intervals = ["intervals (>=0.7.1)"] +password = ["passlib (>=1.6,<2.0)"] +pendulum = ["pendulum (>=2.0.5)"] +phone = ["phonenumbers (>=5.9.2)"] +test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +timezone = ["python-dateutil"] +url = ["furl (>=0.4.1)"] + [[package]] name = "sqlalchemy2-stubs" version = "0.0.2a38" @@ -4447,4 +4474,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "3d586178ae001464f57f6b5981381ceb8bf1f093cf72155f31e5832fb3a61819" +content-hash = "ccca3c4f318d4d8f852f146b1b658f3f69bbcbd875227b858689bc17bbc8b575" diff --git a/pyproject.toml b/pyproject.toml index 035d4f1bf9..9fa8aa7506 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,6 +77,7 @@ pymongo = "^4.6.1" google-cloud-trace = "1.11.3" hvac = "^2.1.0" mailchimp-transactional = "^1.0.56" +sqlalchemy-utils = "^0.41.1" [tool.poetry.group.dev.dependencies] From feb13cc1fae9732c01ef8f4b6490c4f4c10b6843 Mon Sep 17 00:00:00 2001 From: Shahar Glazner Date: Thu, 7 Mar 2024 10:36:48 +0200 Subject: [PATCH 07/16] fix: add protection if the key is none (#911) --- keep/api/utils/tenant_utils.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/keep/api/utils/tenant_utils.py b/keep/api/utils/tenant_utils.py index 7430fe9567..4a81d67041 100644 --- a/keep/api/utils/tenant_utils.py +++ b/keep/api/utils/tenant_utils.py @@ -68,6 +68,13 @@ def update_key_last_used( tenant_api_key_entry = session.exec(statement).first() # Update last used + if not tenant_api_key_entry: + # shouldn't happen but somehow happened to specific tenant so logging it + logger.error( + "API key not found", + extra={"tenant_id": tenant_id, "unique_api_key_id": unique_api_key_id}, + ) + return tenant_api_key_entry.last_used = datetime.utcnow() session.commit() From 9223e1aedc361fa93f5a5d0997027369bda604d6 Mon Sep 17 00:00:00 2001 From: Shahar Glazner Date: Thu, 7 Mar 2024 12:09:19 +0200 Subject: [PATCH 08/16] fix: add locks to workflow scheduler (#912) --- keep/workflowmanager/workflowmanager.py | 21 +++++++------ keep/workflowmanager/workflowscheduler.py | 38 +++++++++++++++++++---- 2 files changed, 44 insertions(+), 15 deletions(-) diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py index 36c8c859a9..e3bce54856 100644 --- a/keep/workflowmanager/workflowmanager.py +++ b/keep/workflowmanager/workflowmanager.py @@ -174,15 +174,18 @@ def insert_events(self, tenant_id, events: typing.List[AlertDto]): if not should_run: continue # Lastly, if the workflow should run, add it to the scheduler - self.scheduler.workflows_to_run.append( - { - "workflow": workflow, - "workflow_id": workflow_model.id, - "tenant_id": tenant_id, - "triggered_by": "alert", - "event": event, - } - ) + self.logger.info("Adding workflow to run") + with self.scheduler.lock: + self.scheduler.workflows_to_run.append( + { + "workflow": workflow, + "workflow_id": workflow_model.id, + "tenant_id": tenant_id, + "triggered_by": "alert", + "event": event, + } + ) + self.logger.info("Workflow added to run") def _get_event_value(self, event, filter_key): # if the filter key is a nested key, get the value diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py index 1ee734755b..540b372a0e 100644 --- a/keep/workflowmanager/workflowscheduler.py +++ b/keep/workflowmanager/workflowscheduler.py @@ -6,6 +6,7 @@ import time import typing import uuid +from threading import Lock from sqlalchemy.exc import IntegrityError @@ -38,6 +39,7 @@ def __init__(self, workflow_manager): # all workflows that needs to be run due to alert event self.workflows_to_run = [] self._stop = False + self.lock = Lock() async def start(self): self.logger.info("Starting workflows scheduler") @@ -136,6 +138,7 @@ def _run_workflow( def handle_manual_event_workflow( self, workflow_id, tenant_id, triggered_by_user, event ): + self.logger.info(f"Running manual event workflow {workflow_id}...") try: # if the event is not defined, add some entropy if not event: @@ -151,6 +154,7 @@ def handle_manual_event_workflow( unique_execution_number = self._get_unique_execution_number( json.dumps(event).encode() ) + self.logger.info(f"Unique execution number: {unique_execution_number}") workflow_execution_id = create_workflow_execution( workflow_id=workflow_id, tenant_id=tenant_id, @@ -158,20 +162,32 @@ def handle_manual_event_workflow( execution_number=unique_execution_number, fingerprint=event.get("fingerprint"), ) + self.logger.info(f"Workflow execution id: {workflow_execution_id}") # This is kinda WTF exception since create_workflow_execution shouldn't fail for manual except Exception as e: self.logger.error(f"WTF: error creating workflow execution: {e}") raise e - self.workflows_to_run.append( - { + self.logger.info( + "Adding workflow to run", + extra={ "workflow_id": workflow_id, "workflow_execution_id": workflow_execution_id, "tenant_id": tenant_id, "triggered_by": "manual", "triggered_by_user": triggered_by_user, - "event": event, - } + }, ) + with self.lock: + self.workflows_to_run.append( + { + "workflow_id": workflow_id, + "workflow_execution_id": workflow_execution_id, + "tenant_id": tenant_id, + "triggered_by": "manual", + "triggered_by_user": triggered_by_user, + "event": event, + } + ) return workflow_execution_id def _get_unique_execution_number(self, payload: bytes): @@ -196,9 +212,19 @@ def _handle_event_workflows(self): # TODO - event workflows should be in DB too, to avoid any state problems. # take out all items from the workflows to run and run them, also, clean the self.workflows_to_run list - workflows_to_run, self.workflows_to_run = self.workflows_to_run, [] + with self.lock: + workflows_to_run, self.workflows_to_run = self.workflows_to_run, [] for workflow_to_run in workflows_to_run: - self.logger.info("Running event workflow on background") + self.logger.info( + "Running event workflow on background", + extra={ + "workflow_id": workflow_to_run.get("workflow_id"), + "workflow_execution_id": workflow_to_run.get( + "workflow_execution_id" + ), + "tenant_id": workflow_to_run.get("tenant_id"), + }, + ) workflow = workflow_to_run.get("workflow") workflow_id = workflow_to_run.get("workflow_id") tenant_id = workflow_to_run.get("tenant_id") From e5148d98070806b5eb67a4332701e36c15fd4f87 Mon Sep 17 00:00:00 2001 From: Shahar Glazner Date: Thu, 7 Mar 2024 12:34:38 +0200 Subject: [PATCH 09/16] fix: create db (#913) --- keep/api/core/db.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 44c07aea2b..0439f6c8ba 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -123,10 +123,15 @@ def create_db_and_tables(): """ Creates the database and tables. """ - if not database_exists(engine.url): - logger.info("Creating the database") - create_database(engine.url) - logger.info("Database created") + try: + if not database_exists(engine.url): + logger.info("Creating the database") + create_database(engine.url) + logger.info("Database created") + # On Cloud Run, it fails to check if the database exists + except Exception: + logger.warning("Failed to create the database or detect if it exists.") + pass SQLModel.metadata.create_all(engine) From 55fff387b1ba0209ec3e5ab9ef6a6792192cd3bc Mon Sep 17 00:00:00 2001 From: Shahar Glazner Date: Thu, 7 Mar 2024 15:16:19 +0200 Subject: [PATCH 10/16] feat: normalize fingerprint (#914) --- keep/api/models/alert.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/keep/api/models/alert.py b/keep/api/models/alert.py index 0e8832394d..027862e166 100644 --- a/keep/api/models/alert.py +++ b/keep/api/models/alert.py @@ -81,9 +81,12 @@ def __str__(self) -> str: @validator("fingerprint", pre=True, always=True) def assign_fingerprint_if_none(cls, fingerprint, values): + # if its none, use the name if fingerprint is None: - return hashlib.sha256(values.get("name").encode()).hexdigest() - return fingerprint + fingerprint = values.get("name") + # normalize fingerprint + hashed_fingerprint = hashlib.sha256(fingerprint.encode()).hexdigest() + return hashed_fingerprint @validator("deleted", pre=True, always=True) def validate_deleted(cls, deleted, values): From 150c48c6c5808fd15e008939f1cd435edffd2edc Mon Sep 17 00:00:00 2001 From: Shahar Glazner Date: Thu, 7 Mar 2024 18:16:02 +0200 Subject: [PATCH 11/16] feat: new icon (#916) --- keep-ui/public/icons/signalfx-icon.png | Bin 4365 -> 10809 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/keep-ui/public/icons/signalfx-icon.png b/keep-ui/public/icons/signalfx-icon.png index 20208eb68f57572e4d703e9fd5c629163d7a3cc6..51f50465b7c97d5445ce9fc2430515b66f6911d5 100644 GIT binary patch literal 10809 zcmeI2Wd)KT>}FW(nEKi z?K$tyU+}vwc(a+k_L_&a?&rQgYe%W8$`d@JeujpIMxdzhQ4N zFW~y(Rff}{8pphzu2K1+TY{zS8u131OjTPVl2wVLa<~Zj1e!>Xqg!C99lNs4Y5Kip zBYvaGURXuWb#lR|E3d7!wB&|jb=MI@x_SOyx#bH{U+-J(zcps z?!ob1j5Xw|BIPOlr#O7NcLA&S)UqNdFEp&Ufl`qqQ?C2l>mlq!-MEkKEqdc;TsWoj z*YO2Nyl(|O6#5oP9u6DH3&&kHkIABJ)qh8)ECoA|?sYX;VklEItmxA227Xtc!v#sd zCn{iMB%P*GKicG~HREaS;-MLW44IcQ^2O`fe@U%jM|C`Izzxx(!5* znjO7VgHxgnhSBJ2)(@X_*i%rvYcx?f2-G+a+FX|_- z7i@(vq*916?Jb%QH#C(HCb&atl*=93kOW|;)Tf+~wp)BW*F6i5JfDCbT`3Q7ePRI@RSOecF@PKdHvn%&EzhA#JK_6F;3bA_I zxU+Jg0t_E%SbTFa&*oj}W+R00nhfJv^;d*4l)E07>^BGT+nY4$Tn^kL%0jI4%#5?A zd9rXkbj9JDpcR<2tvC8>cIkyFiQ!_&z6mx0aI22r&JAee?u6|L`A(_8b(*I)`R+~j z58gaVVAX!HXl)%HMNBU&FAXj9tW1@C2Rq8&J9?*>xLJ=>u=ur!puxjP^q-=ymrr*X zdhql3--ow@$UVL3d_&}lYi-|Et+q5EdVtZ19vInLwGy?IBkd2lFSqtbBs^y{eN86j zEe%faz{s@vQidflF`LK`mi$yJ4*e{XAql*1}3bO+@SI-Z&{d=Z}k>07p40TruR*4jp zOxDPSKHzUEe)!7yz}}jpcqV{W9~+Zbn`pG-s)Kz$M{Qga3hZC3XEQ9UkM0|?$zV1x z#eT4}-qr3#)L&285ie^YdWwZIy)HAEYGtLhb0sMPi32?c>%Ez1EF;=kfFGiT(b^KB!o0HCdYTbr> zN*J{Z)Z|p_A<7U{@fK5AwV1788?+x2_F5qP#RUO3gf%=qT$hu$eYK zVO1QFD8fWFnP%6nlYJRgcrImbfK6QP#$xuBiw7zPYOvEexq1`z)1I6>&hk_;x6jXc zJS)|Vsc$YasQj;P_V&)R`dD?5o5rx&GES_@CH4-5g6Q;Q_n^Txe?OnC;5rczgVKpP z8a8tWx3NfGLSkAQO2w9K+fRIAZaFXwLiXoeZ{7C`#(zL#sv0P1X{F(K+%K(s@3K5K zdt@2qSWx%p@5dir4<>|txp1n}(k3a#kW!K%$bf2BGTiDatG`N9Bij*(PQ_H}O|g;U zgfX?A;e2C?%NDx^L2o!GLcHdPJ^TM7ljD1blm79e4P;c&+2}vCanEZZ#Z{}KS6`GU zRhpxoF_;Nv7roRcC=t|n>Jj}`3-?>e9bQqDesbZMKz4B9Oa0#2oBo?P z2H*pk(HIZY)IjOlBV)2FAudH;KS=K{A!L<)7&-`1W-O6D;&)h5KZ??o4BzS`U!C2( z322#Q$=rsTlb&H#wH2l+7rL5c4E3gV{>Io&{~-pGGc@?*B~O$J*qe-)AgUJA z5&BJb0@>jf@dHINcZAVOZB2Sb;b}$&`G7*FSAXtwZ754ZqN^IBW_CC42TKm@dl@DV z=g_y+|9hi>P%Wz4$rQ6GDtjl3aRtu{GJk&iW9Hp#@UWP~rNID5f#S({TBXIqv>l7Q z%d`>0mW4Iz7m8_Y#H){z5T`Vgx5o7#rG#K2<(MN7&x|*}@ib|!=LKowY#OEAC zdk`wbwr?W4+k%+00yLCJBY>9I4)i&=r`X91x}o$>kSQAn*K5&nI$r%9icju_kf%2x zI+~y=Fe80ld9hLcSiTyF1U8!XJI@%c#?>)j_)r}9&fa>SE@p#Mp(^7<)sOcX??mOQ z_{U-_?)w9ddtf5RM)$M~wlMVZ_8xwXT|uDy7oEG2JYOz3W`6axwh>kNEG}NKD5roY;b4UW9XA zq_+CvqHu48w(|kmku8p(4XM`>Z~?wEfxrf9bgA($e)y8%J4_T2Nv}{fwXCn9R&HUFVgbBYqYkL1p zLe`4V&J;SO{}UCyG{cDlTzrEOiTA#xA7zf<@o^Dldp^=&PgtfpM$CE(=8n={sty$H z!^^ce4tTN*LYB;N$F#b@uL#sPbWN5P(xKAxw2hvhy_%$aGW0z3x{z=zGX1o3?@QzB zXfH|xZ&K!^4d3zl)@ab#(oWd7WRE^wZ{1FEqI zy0Fp-e2U`Ci_Z_th(hXd(Q0zPb*l%f;G(>`iI;>`JuJgU8Pf>SduVNf6JIa?ywL(R z?Dn5JtL=AYdoikwXxq(**~kIrZ>O)H$i&QZca!2t1!|XtdiQIZPp_LiUglRxh{4?!lRJxDAUK2aQnkc(dm&}^4#e1 z4n+-nm~l+)YtS2`Ntp*kN#Bb^q&Zqhfl(A;azo#68mPa^NbrwF|^6tU{Zl#0JVQWGv^teVT z-J*=G#2R|*^Jy)S(zf~NU27?em-PYm>EySH>BU-ca@0r2FdZIJddBHppJDK)wGG{Mh-x@O%Iv!u)c`5Inj4*Z0$k*y~eqN4 z=e*M5t(p^4M^5XwmHWqhwFY>E#T#$Hnu_E%%e)er^EMAB((?1PvIMpU{d*EY&GOw& zQSl=$S!`f%W%lF?{M@_qf<3N?K=us%aVPl}Iq&P){@N5ly-I;fpot9-S--)G{p!j+ z#HBk^#>@w(46YD_nmP8pD z)6-urmW#!i+X9dOfHQl~=jrwyk#C-u)#sWETk`cAE(;cK>|b7r@#2!+__8YW#t)Rf zcw0iT|HyGpdM|WNOd55)-@>6;pX-85=V_e#sG<|CpVvU)XO7J0AI|L(sx0S?@nXCo z*=bZKlB4p3Ww0FZo6C|W3PaXAAIhqyV!`1?HI7iZ$OG| zMZ~Pd9h6P1dmrAd!3;@qlOJzKp=0MO(=r<*F$>2Sw8JO6MNW$eGr*qFe^&h~T+F|Y zBm2?6xo2<>>OVnAqm^+TNU~s}Qo#(wQB%TO{h^p< zORuD>`NuxdEe0&_GLpP$YjBP_nTD5yDJ8SxdYJQzE-9v1JLv(BR1TXC_20GL&s4+7 zM)pNuHY^`Qy%?Ss`Y-zh-q{O+{MYllyhmESL%g{B=t*-rfqNb3S9+(n&*LdN5ypzZ zDq7(E7J%#`Rcg)Ld8S3PiGz66YiBMn$SES`tkzq(aU3eqT_o_uVygA&;{VG|uYo{! zdnjQGOXI5fuiC&h2ez+X$MB20)ez#_W1>_pFf!s#NoZAFt)+W2A3nrX%^~TW9@Q(} zTjOnU@dFI{Y97e(na^QliCyI-=f>ge=2}&_JOX~kBz|mXxogMS*pb-}H|+Tn zR9Rq(%?o%ZOx)^6jY^uyn1siq&T3VIozGJrI0bSA{B+&qaop>GK#;c!#utVhI4jX= z7^kHf`RzpM*$JQj?U7u&>mTeb`)GJ8`9PTD``7mRub$z>dJR5%v$|)m%^T-`&CudD z2@)l0H4{ISKeD{pA#SVfHShv~+E#`hR_VLV_g>YqNjM#_qKl4+$~Hg(+JI ziXBjLG{b>JlX4fLT&7uN;3`Qr>HK}?M~FDd`^k>%_QvKQ376#H47!-_0#YlCw1(yckX-?oqB5c)3uFlC+hu6wJN6B9=dp{AtM z6IvQ-^S=Q~$G-q1oHJZ97UTDZ>ZVQ__BGl$KVz*=R$({RYU-;nXoi!`%bbX7dT z&+H-R;HAmbnE}$TPUrI;2P0xSBG5%&QAf}^=F7H;18MzUhMoNuGj`>DXlv%6d3x5c zweFI2ltNh-$*AMZs?3XJV2k{CRJc+gs6^~}CtsFwb!xbcPWI*vI;alm=ZG?I-Cgw# z#cm`)lCp*oLxN|$j}I_$->V734}J@&Fjv>8gl0 zBgms&ExmQd(bVGfDwDQWuTv80FUQp5%2H6vT&Z1 z_o-M9z2KC9wVtTX#H9_apRH(UrNHZEQ05e4?#Q<+>?ZiWfKUhZ`XM-Q@yCu)CjJ-C zvX>VuUvvG|a>Zk->#Dzh`jxTSQ#Et$7SbsOaG*ls$un7BljAvgY+GN4t8&g2U}iOs zWpv3vTwh(zWafn4_!j9pJBN1^;q&*N3yqljO8VW8yJQbFKehzi68XE=eZ>SbVJSnB z#U`D0={eIK#T`U9(oD*bxXr?@+raL&^`4NH%)6B@uuRPlq)e8*--qMxoh|%otZRPI zeXqf4sl+BuA_y)K^*#;1w01l-_FaQ5O_Cu{;w8mK&Ik@}wF;-Ji+S~v?*{jXQPjq9 z0GspLpSpM|D>9&+y@upT6;?{7!w!%ppI;rS3H#fNU8fwnSB_ zg9qY$6uViMWMpe${jT>{Js{nvy^Oi{a$RFMO6ll1}g~ z&f>I-AUO#@z_HRfO<2SOShv3N!f+3BZNMB#77z5U78W?7M~nA?E^wAZN4*q zz-C5L6+S$fa{Jw&M2#!~@Q|q6-S1s%2o0nwgr~O478^sUzd>nqxuWeuS^7t`emokc z@XDa$`a;h^)*jD(Pov{4(`4P+OSQcooU_kYmxcawhfGsW=!R90lN*40gvAWLp#Y|+ z^pQ79gpyB-zK?Rd>oX(7$MlQk?k6~&Be||A7>Thh1yCD+osw|w+OPQ9T!A&T$SC47 zDFUzrG3;%wnDYr+sJ6*e;LmHcQxNqZ)pNF9ln{K~N?bA6ZuP3W3~Z8IEvhzu4Y9bB zlUR9hWNV#jH;wSeRxL!vt5364kyrX5OsIszE1f1LGWODD!uC|79+sDvQd~6N;z~d; zUbd-hUzn^vQpA+3w{hlEcl`I!LKR-Osc+XIS}nl38~s!pUCf>6q_H08$S-&#qkW5) zPAUyT3E6=yJm>F_@m`#gVqNIO?0)Qe-m_?Kj#T)$2>-s5Cuaz&+~|Kx{<=$J+D#Wk z9??M}ca|rfPgGk%tQdHL8CQkuuNjxOykO66AS(kQw9Dk1W*^WAO`dAgUz~(ZBuD>k zTo112cP*IdK1Hn%dLGJGwQ&Qm)F(clqOm^fi7tSTf%4$pg3cg2QcW;s7uYk8h9gGx zcU^a^A*{a~0_MnrE}nb%XHs;a$H={68t?Xdb7%HE?|Yt4R$bth8n!Xj><-^*tlje* ziWY;xT~`6+ZE}8z*ryeW+e^b!o_`%vEmRNrWSzDqPr8Ip@_lb98E*b2?Hh6G! z*b=b6MEPRKS&x*LGCJ`$@!75d8!uPz%C069KtUzQKeweUnDQaa|3zq=%$f3Y*+5dBn^Sf3R6 z#htWiA+v4VX@g$8UoB?&9kjNbSlxVp$=dmBe&Xhl73hPH3tRJdor)j{v)6p|Bkp%f zg?NfV&Rv;ZIr<#Mylq28|Cd<%V6nBXLo-#VhDkn|AHS3o@asr0X{mLh+NL`9>$!Nu zAAC8#3X<=nDP$8Y<8&u!A~?8GyHB$zGBp6{`b6rBtS1q3oBmc}LBPdNE&7RUygy{A zW7xZFhbq)%cmx^As8*f^#R=anfxo{jH7YqRzKkn-BJeSecryKY6jx1DMDz7;^fsk=X*Ju;N1 zUS=$!xq9id4RGM}r@PQT^)QlZCCQqX#zg4$i{b*!0HuJ#tlJh~K1)JZ%wo4>rz`~9 z;cUY;l-2;4l)Q}`(0F{KrNodN-gZvlxwqIt!Bw@Pe1V+uwX$ty;{~uh4PE1PNM^=- zmVcW*?6`^Gyg9*PYf|R6al@mwoOjP{^@HIIL2_osaO`bVnpPy>@4pxxA;H0Ty|X?muu;!%`7{q&%V<>XE0@_P%N=%EsetL^SOZo z1S0NKt@0r@@+x_4k(+~f4Y3zac<87m@GI>HKjz+}8s0OjukTxXd-@s(P@LIXI%aNH zH{MV<_d8(e0^Gk0*{4NA(spEcTyHPiZzCj44g27ox6bTowr;mKz=c@)l*7MlRrVe^s^fTv2;#Phg49Ebw zu4O5xZWb>`1?rAIqvK^dlVL4gfC*T=?=;LefPCy3z-HmjKgYm#DD(&lh9Jl;&cDQ z1blz@F&2##ngMf)ooP0y z=)KS%a_2LlBPEXc$I;Pvmg}x5r;qPu^Rr;DK3|ye|`g=LD9H3fKG2 zzYIGP(r*|Rs$23EvH2ANzUyjV$whc>7U-r~QxxjO#m_QBphPOa4{yo@3R)a|+k94^ z;>ZUW5Q&`Iz5GY-uZXRRW}-xgZ|lO$cwJE%_PCU9WQaq~yZU8R-GLSEjI!~5@63!c z-7YU@dzgEmV|EKFK}N3Px}pTwbGO+XL@r02MK8y1)v4qB7LJoPz>Q}mnYR+aFKKXEe971<# zcVcsCAq*tmInpLl)6dPKzH95toXTwCSN6N|OgXEbA%hG>|FTRsZ$C`7A@^t;Wnc(VDq3_D6s@*|GQCB>-Ra9gPY|VLfJ}JK; zgzMRa5A4hW!E0h0a{<-tzJbw|iAylqtT-%AW7abm-z zoWZ%lsc8BJ1X17^e8FhZQ3cs{OBi$Vf_@%``(~z9zHydzSU*IC@(cz0Z3V=CCbs2T zg@4{#?N$pADAzTJ8jdkjd!sav^KHW;0CM`qKzl0_zgjYFmSVs4E{sBE_I&kH~a;wo|ijzZ|rbk#>U5K)O*+|LNH&-S$R_*hc_|ByU%MYI_-SwE_ z0*m{d{kE$_EaB{~St8v}^KIv#!K|$EUCDK(wCO98OLxOy;|s8(CFUJi+1I zJnS^|NIig-HCjm>{&{gz+rWmxJE0-DGx;Xd9DE`2mO@dH!o=Th_rF+Oi7#=xCj{4g z#-~Gd0TAW}Qv;Qj+FB|@bBW)x!yR#!-jcVt!M6%#x1(j6Y@iIFYyC|(9c)htxL0-i z1uAJkNP{xIZ33Ddu=JQ)Q$aIO#OyA}rRspH$R&gS)=|3_$XJUc2ar%Wkey{g}z3p?3M{!=9rs-`7K! zY+J`xOD)^)uGl<1uWJC_40FttN)}>|V{~4C`n})WTTB3p>(b;_cdHrc;-%_j8$Ig0 zC{C0^Sj_18=szKZMFHub?EUO?Cs?7c+F+x2UZcbOYsd1pW?|*+znIJCErIdY2`WT*Es1Nj4anhf#ONk_xYzl68qQU3K$D$uHd zPvT?X&6{&Jf^v?klr7{M%H@hpvxP%9UW@{!3g}^q)l2zrJ5e4yL5d(J_C7apD7Up~c2EC4P8#1e>DfWh!#CXw;13F*ctj5&JA|LzOjY7uZmJrv*d$$ltKRQ7L?CD-G9 zFL&S`sW@NHo@|e7{xb|btdS8>A3Oe?zn0XhjpDZa$POW=W%$+4R-u78&{AMcaX8m? zo1wIMh|54}y8C+Yo8X3J=Zbs)X)_z29bSOBL_qK~7Y)9wk?NKalB8G{6ApTaQ~?lX zs5_QNL^EBGv1=s&xqeC+n0uJayi_qrZ#+HLIRp@CK!HfS?9tGy|IqyJ1-$>gcewue e1P$%+k=$PoS4bVE4ZISHrYNiWu~ynV^#1_wUz522 literal 4365 zcmX|F1ymE>_aB2H5|WDa0F{xW$3}^Cry|`Qo1NdH?{xq_3xCanT=KR2@3ni}tXW z>l^@}GW4;!AAH}~2;qhdkaBTHx_U^32cRz20RR>CaFmOipGPp*)x*mtP*rHZwNnV} z|xxw9q)Ya%!!Vwn$0Up6F;P3$dz#v4ps?fi< zh>P)`X{ZqRUzK1#RUwT(gy8$eW?)Su+5@a0r6}npBc}w0!KGvr;7YRc5@1RfUA0e@LMJ zv0Ru3{hxgo!v8$v5qRMi^o9Lk{W=5yz>Lw?(y$6I*e#60-@VJ%r!EIBQz3HdbwRY$ zs0}nViCxj3siS4ixU)T2DqQo*&Ps5S)}=$bm1p-6c}iOIJ(W(hmtU|oS89)|-I;wzh$rL{ z>Qd_$<@%@y`nkWOI8XUd;uc?SgAKh(@;}>GRxTMGe}yP?0{9nbr&F|?n0o4Iah{@gvK0LhoipGG#>)Rc_trSjE5k76|B$sU(DvNN}b(s!hO4z-HAAITr8C z8IDs4XGKzj-t-=DvP>|fpd}nk+K>@F828TM@kr&(X2ALxq6%-9d^Fe7h@uX8i<{oT z`ViO&M+6GNt3SL6hJr}5+58jGc4fg$DGcx`>GJ~6xll6{O!`N(f&imdhLaaT_9=M9 z;}>hD7yN&tG2i919=&K%%cj6fg(JkyU$uoz-EuRwpPs;9U)H_dKh~GG4p@| zHqAf~O4Rn!=%8(K6r6mL)oexd)-6$aE`kw1?srvaF6bAyRlE;T=MR_Y4dKS}jB_7e zTiz$I5+1qs)6)RC4sj%536IUv(jc~~G84C32M_MpU~lJ(SYunkNOAVCir^8P#Zz{= z#t1ZybT_qrjN>)mMeu0PoSb9*1etV3z0fcsQKF`XW%0oz}jRM7!^ z^j~mg-C}0S)_}3nPwP(FZ$kyv%wa6EdXYdrxlJ`q!LpZBW56J}-<%M^7W%ep;a9Ia zL+qGTVzOckh;cdD%~y0s!RFO8qkxX2sZz}SIEl?SCH=Mb*GjWhiix@3*N>Q%UAjYb z0H&Ki7OCIzUXNR_G)~so`O*+00EF5Oo|AY^6;a%1k8d7)@7@e-wKVEPmaTC}U8;!a z&u}-KWU!i+1t~3=`zxTr9hi_C%IIOVRWuF7_Q7269qdwkYW3$7UBT%Q+<~>D*{d_q zR%Kh}MsG)5LUS%-EDzIq>-fTE6vuhJ1J)s;!Ky^yXT-8s55vzd)C)Kr*UZONZ6(kn zcr|qkpX!^bUIPx>eB;~B2osvRB(b)4S%xu4gnJmMclzVOE&tsoAu#fY@t2rrBLRRK zXA^e*@fAVUL~Lz$s@q~p>n7?Z*&FlhEn7=V2mNHa z3h)K+I>Yea>utj3ISlqI=6l?0oBP@!lA73JO8<)*m&osF(ICfI{0HJJfd`^J`kD}0 zv@4;YTxL!@vB1LtXM6XE)qeTHp*p$zb!mdI;ta^>oVInN6^v`7>BHLXW&J!v)l^S_ zccW`LFekPz%EDo2&-?r{Sw%oYxc0Xt#QlKMuk^W06@{=!f7Wm5r?l;{r#Bc1xxTW) zx^q?wJ<%z&o=hF`WK^aRwXjX@{s~f|J&6_{vX!rO`is$1?K1rd=;5wtXQj-wv*Y=h z%c4~^LAAYwuD+}&M+f60RpAAU z)Z{sjZzj)T3v|*v?T@N_X34M%U&V{*-g&s=Nf0KXB5<){w zqH_V%>z!yE{Sd}RnHR~id%27%*%NY1nOAY1y>-c?wxbO&$Fc6|p z8-n-ieZK`Vc=}^U@|bOxbG-{b({=fh4Vmf0dmgxKShTy9#<6(PVVt$R$LhY2S_M!{ zD353|9yPzhwM&vlycxYiZ#Qnts1@XG$x{U+bNb@xrEs6Ca zA=g)~Opa@>KYho&k!SKQ;qAU)TtA&V#<)v{iwpK)jnRzJq_2Yp4ddRMPr$@`Yjuw# zxit@EZxhml@sFNvXtqWtMD^c~I}A$$27-IfZ{{I#6IP#Tz0q#GvG-nedjD~io7zyU z&EAWe*UX)m&1$*v^hn6y4K5#vqOkTP^OsFtMqE<<*ETD2kD4oGIp3^!Jx}Y_!LLDm zDgNq*OF5ZUn}@gdO6!tzB{w??%tBk_sNkrXE!=UR2*VCgrQ?fviqTPq;d))arI*-O zd<2WF=?-6cU5Z+DrpyB_S+yJMY>Aoa&SDl7)x+HnoqW{2*k8Kvf8W1brsyxOSTFo;!A-8CE2WsQY%28&8Vd2(SiK^ z_+%q?%ct2l>)KuT4ors?X|11j$=hhVtmcnFz{YhN55M@V&_BlrtS3j&Z^u8`8E2a; z0B%lEmy>y;@98iw%HTGfQpJs0;!3X z%e!W{RuS7#a#2R^B6W>yP%yE8cINvX&bXT#&D@;%n;FoSfc*-CMnRjFq>5~v6tecqgmMaJ^cK7xQGU3{vBrWQ-qiM zTNNNYCt5Hexp|nQq9EI5@Jr&<=d|IoPCK%10l(u2q^g|ge;gmOK0$u0%u{u7@dhC) zf>Z3XDj6G>6;}Snff1RS}I_5rYhP?_HO2@(Y z_-BLRH_eppJ#L4=bgnT$@ApOwOMi7}4fub!kJ)u~20Bt^qemn&NgmJYD9&kwm1UwQh% zGr^qBjv7)sQHI1+6=3&@QeN6HG9AIRcNIrD7}ocUoVip6H_dC`zMK^_ACxa{%<)dikiiG>d2Y6PpS9n*R13bXoRs*E}vPBr(FfmSowPxzV&j1 zld6ehU=8q?`k`SDtMRW}3$qe5`mwKD1$8M(Ho`0JYcn88&e=K`S7&-%bb=7>yQ}+} zUf$T5XVRA+BXwg<_>lOX+|vy0D$#uHtVqLlF~fFR!*+|n159ouU&~3cUv&VE?XZ;` z!PSH)foALX)31QsPqarDn`xzpwMGvt$wtBU4XiCKcO#Z`FS$2;(=VK0;|Q;={Blh` zKCw%$dGB;S5y;$!+204NpZT>Qkn=B@tMMrgXp)~u=4*ucrz9B%|!^4mWW z-?)XAg7SiURNwjcAe&Zrnc**2q|;M~svLSp_mh=R)t><45@Po`OLy~Ms~=~T5bo;f zWA-ByxL?05O4_b$UUX{L_Iy_9sM!-`l1Y)69_4S&S&j}oN{7X%l;7JE2m>2Fuorjd z$SS#sb45ta1jsK;x29Z(_=0UW$WZ%$d=->`Spk={1;yZPa| zMIJKH`Y~(&(9ob`Kf$sVx-WeCx3&H`Xmo0M{X-Ev){Xrb6yQRRQ2@5iH<#u9ATjG zMXnDP%MzFvrxlX~N4*Dq;|rvZLlzxNsGn#n=X6$NZe_=A_~lcB)Y@(d593zFDLb;2 z&D>)8a?$E?jk99$B~Hx;VJtNR;fi@cI_?biJ~!{PEq{EiI9cbe9VsmcW=lxr&TOSQ z>QNmv`z?vT_gz;0FM))G-{&71CAYXaCceqmyQ^avrBfp6{b@lTFoD2dzDjQv7B5~y zlJyP)j2nU?+2fF%t&EvC!)CB)yYkF=n1j2IE|k8Nbq)cYe`vuVP4VLt#&*Y7%RM4m zcwQS&)C}jwNQ{Q!^8v{zE5juI-6L7X+{Q~K0u@0U3kY|MN9-?lrL+e92B$tvz-byh zz6mZ5vo+rgKnPMVVp#<_>o3!=HBb+dWKxcwg)4f3q^5ekF7l63 z;7z(Av5)rd#>R0;GU7<#7g0PTw@Yr=6Q>fJ*j@zPa$yX8dGixfH#If}oB8^YASW1F z2(gi-2;xB5mquS%?SWam%NcvryF70ug7{M`N`pr;8W`r%BN-C`vulE5O_!{|EGQCZ z%R2GvwlP|QP^buFp$jKn4to_jH)uy5siDT+8+*|IzB6YqO5H^y_I8PPqwdj6 Date: Thu, 7 Mar 2024 20:56:56 +0200 Subject: [PATCH 12/16] fix: revert changes (#918) --- keep/api/models/alert.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/keep/api/models/alert.py b/keep/api/models/alert.py index 027862e166..ff0ce88eff 100644 --- a/keep/api/models/alert.py +++ b/keep/api/models/alert.py @@ -83,10 +83,11 @@ def __str__(self) -> str: def assign_fingerprint_if_none(cls, fingerprint, values): # if its none, use the name if fingerprint is None: - fingerprint = values.get("name") - # normalize fingerprint - hashed_fingerprint = hashlib.sha256(fingerprint.encode()).hexdigest() - return hashed_fingerprint + fingerprint = hashlib.sha256(values.get("name").encode()).hexdigest() + # take only the first 255 characters + else: + fingerprint = fingerprint[:255] + return fingerprint @validator("deleted", pre=True, always=True) def validate_deleted(cls, deleted, values): From ba789ef9bc886b8c2592db761664e64a73f930cc Mon Sep 17 00:00:00 2001 From: Furkan Pehlivan <65170388+pehlicd@users.noreply.github.com> Date: Thu, 7 Mar 2024 23:32:34 +0100 Subject: [PATCH 13/16] feat(cli): handle if no alerts found (#922) --- keep/cli/cli.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/keep/cli/cli.py b/keep/cli/cli.py index 5a9d81fb44..c18348cf97 100644 --- a/keep/cli/cli.py +++ b/keep/cli/cli.py @@ -1108,6 +1108,11 @@ def list_alerts(info: Info, filter: typing.List[str], export: bool): aggregated_alerts[alert["fingerprint"]] = alert alerts = aggregated_alerts.values() + + if len(alerts) == 0: + click.echo(click.style("No alerts found.", bold=True)) + return + # Apply all provided filters for filt in filter: key, value = filt.split("=") From c8778515c4e9b15ac524ccbd3ebd889dabf25850 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Mar 2024 01:31:06 +0200 Subject: [PATCH 14/16] chore(deps): bump jose from 4.15.4 to 4.15.5 in /keep-ui (#919) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: talboren --- keep-ui/package-lock.json | 8 ++++---- keep-ui/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/keep-ui/package-lock.json b/keep-ui/package-lock.json index 9a7f9cae68..df6ff44650 100644 --- a/keep-ui/package-lock.json +++ b/keep-ui/package-lock.json @@ -193,7 +193,7 @@ "isarray": "^2.0.5", "isexe": "^2.0.0", "jiti": "^1.18.2", - "jose": "^4.14.4", + "jose": "^4.15.5", "js-cookie": "^3.0.5", "js-sdsl": "^4.4.0", "js-tokens": "^4.0.0", @@ -7276,9 +7276,9 @@ } }, "node_modules/jose": { - "version": "4.15.4", - "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.4.tgz", - "integrity": "sha512-W+oqK4H+r5sITxfxpSU+MMdr/YSWGvgZMQDIsNoBDGGy4i7GBPTtvFKibQzW06n3U3TqHjhvBJsirShsEJ6eeQ==", + "version": "4.15.5", + "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.5.tgz", + "integrity": "sha512-jc7BFxgKPKi94uOvEmzlSWFFe2+vASyXaKUpdQKatWAESU2MWjDfFf0fdfc83CDKcA5QecabZeNLyfhe3yKNkg==", "funding": { "url": "https://github.com/sponsors/panva" } diff --git a/keep-ui/package.json b/keep-ui/package.json index c6ea59bdf0..df71a8ace3 100644 --- a/keep-ui/package.json +++ b/keep-ui/package.json @@ -194,7 +194,7 @@ "isarray": "^2.0.5", "isexe": "^2.0.0", "jiti": "^1.18.2", - "jose": "^4.14.4", + "jose": "^4.15.5", "js-cookie": "^3.0.5", "js-sdsl": "^4.4.0", "js-tokens": "^4.0.0", From 155e0d47b7677ffc31b71c4a9f6ac3e2b157b1d9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Mar 2024 01:37:17 +0200 Subject: [PATCH 15/16] chore(deps): bump jwcrypto from 1.5.1 to 1.5.6 (#923) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 16 +++++++++------- pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index e0798df6b6..5cd35bb042 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -1818,17 +1818,18 @@ files = [ [[package]] name = "jwcrypto" -version = "1.5.1" +version = "1.5.6" description = "Implementation of JOSE Web standards" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.8" files = [ - {file = "jwcrypto-1.5.1.tar.gz", hash = "sha256:48bb9bf433777136253579e52b75ffe0f9a4a721d133d01f45a0b91ed5f4f1ae"}, + {file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"}, + {file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"}, ] [package.dependencies] cryptography = ">=3.4" -deprecated = "*" +typing-extensions = ">=4.5.0" [[package]] name = "kafka-python" @@ -2113,7 +2114,7 @@ name = "ndg-httpsclient" version = "0.5.1" description = "Provides enhanced HTTPS support for httplib and urllib2 using PyOpenSSL" optional = false -python-versions = ">=2.7,<3.0.0 || >=3.4.0" +python-versions = ">=2.7,<3.0.dev0 || >=3.4.dev0" files = [ {file = "ndg_httpsclient-0.5.1-py2-none-any.whl", hash = "sha256:d2c7225f6a1c6cf698af4ebc962da70178a99bcde24ee6d1961c4f3338130d57"}, {file = "ndg_httpsclient-0.5.1-py3-none-any.whl", hash = "sha256:dd174c11d971b6244a891f7be2b32ca9853d3797a72edb34fa5d7b07d8fff7d4"}, @@ -3425,6 +3426,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -4474,4 +4476,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "ccca3c4f318d4d8f852f146b1b658f3f69bbcbd875227b858689bc17bbc8b575" +content-hash = "dff1adb9a5cfcc30d8bf6ac233fcd29af4e67faad205f3117e9501e5a5f983d4" diff --git a/pyproject.toml b/pyproject.toml index 9fa8aa7506..8a4f4ec582 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ cloud-sql-python-connector = "^1.2.3" pymysql = "^1.0.3" google-cloud-secret-manager = "^2.16.1" python-jose = "^3.3.0" -jwcrypto = "^1.5.1" +jwcrypto = "^1.5.6" sqlalchemy = "1.4.41" snowflake-connector-python = "3.1.0" openai = "^0.27.7" From 0e0852a1bd135a68eebec0f6138f0605b6484382 Mon Sep 17 00:00:00 2001 From: Mayor Ugochukwu Date: Mon, 11 Mar 2024 20:40:23 +0000 Subject: [PATCH 16/16] WIP --- .../awsincident_provider.py | 80 +++++++++++++------ 1 file changed, 57 insertions(+), 23 deletions(-) diff --git a/keep/providers/awsincident_provider/awsincident_provider.py b/keep/providers/awsincident_provider/awsincident_provider.py index f74d566575..fb563af62b 100644 --- a/keep/providers/awsincident_provider/awsincident_provider.py +++ b/keep/providers/awsincident_provider/awsincident_provider.py @@ -1,39 +1,73 @@ -import dataclasses -import typing +import boto3 +import logging +from typing import Dict, Any, Optional import pydantic +import dataclasses +from datetime import datetime from keep.providers.base.base_provider import BaseProvider +from keep.providers.models.provider_config import ProviderConfig from keep.contextmanager.contextmanager import ContextManager +logger = logging.getLogger(__name__) @pydantic.dataclasses.dataclass -class AwsIncidenceProviderAuthConfig: - aws_access_key_id: str = dataclasses.field( +class AWSIncidentManagerProviderAuthConfig: + incident_routing_key: Optional[str] = dataclasses.field( + default=None, metadata={ - "required": True, - "description": "AWS Access Key ID", - "sensitive": True, + "required": False, + "description": "Optional incident routing key (required for creating incidents)", } ) - aws_secret_access_key: str = dataclasses.field( - metadata={ - "required": True, - "description": "AWS Secret Access Key", - "sensitive": True - } - ) +class AWSIncidentManagerProvider(BaseProvider): + def __init__(self, context_manager: ContextManager, provider_id: str, config: ProviderConfig): + super().__init__(context_manager, provider_id, config) + self.auth_config = AWSIncidentManagerProviderAuthConfig(**config.authentication) + self.incident_manager_client = boto3.client('ssm-incidents', region_name=self.config.region) - region_name: str = dataclasses.field( - metadata={ - "required": True, - "description": "AWS Region Name", + def create_incident(self, incident_details: Dict[str, Any]) -> str: + start_incident_params = { + 'responsePlanArn': incident_details['responsePlanArn'], + 'title': incident_details['title'], + 'triggerDetails': { + 'source': incident_details['source'], + 'timestamp': datetime.now(), + 'triggerArn': incident_details['triggerArn'] + } } - ) + if 'impact' in incident_details: + start_incident_params['impact'] = incident_details['impact'] + if 'relatedItems' in incident_details: + start_incident_params['relatedItems'] = incident_details['relatedItems'] + + response = self.incident_manager_client.start_incident(**start_incident_params) + incident_arn = response['incidentRecordArn'] + logger.info(f"Incident created. ARN: {incident_arn}") + return incident_arn + + def resolve_incident(self, incident_arn: str): + self.incident_manager_client.update_incident_record( + Arn=incident_arn, + Status='RESOLVED' + ) + logger.info(f"Incident resolved. ARN: {incident_arn}") + +if __name__ == "__main__": + context_manager = ContextManager(tenant_id="example-tenant", workflow_id="example-workflow") + config = ProviderConfig(region="us-west-2", authentication={"incident_routing_key": "example-routing-key"}) + provider = AWSIncidentManagerProvider(context_manager, "incident-manager-provider-id", config) + incident_details = { + "responsePlanArn": "arn:aws:ssm-incidents:us-west-2:123456789012:response-plan/example-response-plan", + "title": "Example Incident", + "source": "aws.cloudwatch", + "triggerArn": "arn:aws:cloudwatch:us-west-2:123456789012:alarm:example-alarm" + } + incident_arn = provider.create_incident(incident_details) + print(f"Incident ARN: {incident_arn}") -class AWSIncidencemanagerProvider(BaseProvider): - def __init__( - self, contextmanager=ContextManager - ) \ No newline at end of file + # Resolve the incident + provider.resolve_incident(incident_arn)