Compare commits

...

2 Commits

Author SHA1 Message Date
bd414b1fc7 4.45.0
All checks were successful
Build-Release-Image / Build-Image (linux/amd64) (push) Successful in 3m1s
Build-Release-Image / Build-Image (linux/arm64) (push) Successful in 4m13s
Build-Release-Image / Merge-Images (push) Successful in 22s
Build-Release-Image / Create-Release (push) Successful in 9s
Build-Release-Image / Notify (push) Successful in 4s
2024-06-11 12:00:06 +01:00
0f73a14926 4.44.3 2024-05-24 12:00:06 +01:00
20 changed files with 305 additions and 67 deletions

View File

@ -26,7 +26,11 @@ from app.email_utils import (
)
from app.errors import AliasInTrashError
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import AliasDeleted, AliasStatusChange, EventContent
from app.events.generated.event_pb2 import (
AliasDeleted,
AliasStatusChanged,
EventContent,
)
from app.log import LOG
from app.models import (
Alias,
@ -468,9 +472,10 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
def change_alias_status(alias: Alias, enabled: bool, commit: bool = False):
LOG.i(f"Changing alias {alias} enabled to {enabled}")
alias.enabled = enabled
event = AliasStatusChange(
event = AliasStatusChanged(
alias_id=alias.id, alias_email=alias.email, enabled=enabled
)
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))

View File

@ -25,6 +25,7 @@ from app.errors import (
ErrAddressInvalid,
)
from app.extensions import limiter
from app.log import LOG
from app.models import Alias, Contact, Mailbox, AliasMailbox
@ -185,6 +186,7 @@ def toggle_alias(alias_id):
return jsonify(error="Forbidden"), 403
alias_utils.change_alias_status(alias, enabled=not alias.enabled)
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
Session.commit()
return jsonify(enabled=alias.enabled), 200

View File

@ -281,6 +281,7 @@ JOB_DELETE_MAILBOX = "delete-mailbox"
JOB_DELETE_DOMAIN = "delete-domain"
JOB_SEND_USER_REPORT = "send-user-report"
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
# for pagination
PAGE_LIMIT = 20
@ -583,3 +584,7 @@ UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
STORE_TRANSACTIONAL_EMAILS = "STORE_TRANSACTIONAL_EMAILS" in os.environ
EVENT_WEBHOOK = os.environ.get("EVENT_WEBHOOK", None)
# We want it disabled by default, so only skip if defined
EVENT_WEBHOOK_SKIP_VERIFY_SSL = "EVENT_WEBHOOK_SKIP_VERIFY_SSL" in os.environ
EVENT_WEBHOOK_DISABLE = "EVENT_WEBHOOK_DISABLE" in os.environ

View File

@ -34,6 +34,9 @@ class EventDispatcher:
dispatcher: Dispatcher = PostgresDispatcher.get(),
skip_if_webhook_missing: bool = True,
):
if config.EVENT_WEBHOOK_DISABLE:
return
if not config.EVENT_WEBHOOK and skip_if_webhook_missing:
return

View File

@ -1,12 +1,22 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: event.proto
# Protobuf Python Version: 5.26.1
# Protobuf Python Version: 5.27.0
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import runtime_version as _runtime_version
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
_runtime_version.Domain.PUBLIC,
5,
27,
0,
'',
'event.proto'
)
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
@ -14,25 +24,27 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\"\'\n\x0eUserPlanChange\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\"\r\n\x0bUserDeleted\"Z\n\x0c\x41liasCreated\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x12\n\nalias_note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\"K\n\x11\x41liasStatusChange\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\"5\n\x0c\x41liasDeleted\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\"\xce\x02\n\x0c\x45ventContent\x12>\n\x10user_plan_change\x18\x01 \x01(\x0b\x32\".simplelogin_events.UserPlanChangeH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x44\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32%.simplelogin_events.AliasStatusChangeH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\"(\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\"\r\n\x0bUserDeleted\"Z\n\x0c\x41liasCreated\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x12\n\nalias_note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\"L\n\x12\x41liasStatusChanged\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\"5\n\x0c\x41liasDeleted\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x93\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'event_pb2', _globals)
if not _descriptor._USE_C_DESCRIPTORS:
DESCRIPTOR._loaded_options = None
_globals['_USERPLANCHANGE']._serialized_start=35
_globals['_USERPLANCHANGE']._serialized_end=74
_globals['_USERDELETED']._serialized_start=76
_globals['_USERDELETED']._serialized_end=89
_globals['_ALIASCREATED']._serialized_start=91
_globals['_ALIASCREATED']._serialized_end=181
_globals['_ALIASSTATUSCHANGE']._serialized_start=183
_globals['_ALIASSTATUSCHANGE']._serialized_end=258
_globals['_ALIASDELETED']._serialized_start=260
_globals['_ALIASDELETED']._serialized_end=313
_globals['_EVENTCONTENT']._serialized_start=316
_globals['_EVENTCONTENT']._serialized_end=650
_globals['_EVENT']._serialized_start=652
_globals['_EVENT']._serialized_end=773
_globals['_USERPLANCHANGED']._serialized_start=35
_globals['_USERPLANCHANGED']._serialized_end=75
_globals['_USERDELETED']._serialized_start=77
_globals['_USERDELETED']._serialized_end=90
_globals['_ALIASCREATED']._serialized_start=92
_globals['_ALIASCREATED']._serialized_end=182
_globals['_ALIASSTATUSCHANGED']._serialized_start=184
_globals['_ALIASSTATUSCHANGED']._serialized_end=260
_globals['_ALIASDELETED']._serialized_start=262
_globals['_ALIASDELETED']._serialized_end=315
_globals['_ALIASCREATEDLIST']._serialized_start=317
_globals['_ALIASCREATEDLIST']._serialized_end=385
_globals['_EVENTCONTENT']._serialized_start=388
_globals['_EVENTCONTENT']._serialized_end=791
_globals['_EVENT']._serialized_start=793
_globals['_EVENT']._serialized_end=914
# @@protoc_insertion_point(module_scope)

View File

@ -1,10 +1,11 @@
from google.protobuf.internal import containers as _containers
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Mapping as _Mapping, Optional as _Optional, Union as _Union
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
DESCRIPTOR: _descriptor.FileDescriptor
class UserPlanChange(_message.Message):
class UserPlanChanged(_message.Message):
__slots__ = ("plan_end_time",)
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
plan_end_time: int
@ -26,7 +27,7 @@ class AliasCreated(_message.Message):
enabled: bool
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., alias_note: _Optional[str] = ..., enabled: bool = ...) -> None: ...
class AliasStatusChange(_message.Message):
class AliasStatusChanged(_message.Message):
__slots__ = ("alias_id", "alias_email", "enabled")
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
@ -44,19 +45,27 @@ class AliasDeleted(_message.Message):
alias_email: str
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ...) -> None: ...
class AliasCreatedList(_message.Message):
__slots__ = ("events",)
EVENTS_FIELD_NUMBER: _ClassVar[int]
events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
class EventContent(_message.Message):
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted")
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list")
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
USER_DELETED_FIELD_NUMBER: _ClassVar[int]
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
user_plan_change: UserPlanChange
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
user_plan_change: UserPlanChanged
user_deleted: UserDeleted
alias_created: AliasCreated
alias_status_change: AliasStatusChange
alias_status_change: AliasStatusChanged
alias_deleted: AliasDeleted
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChange, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChange, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ...) -> None: ...
alias_create_list: AliasCreatedList
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ...) -> None: ...
class Event(_message.Message):
__slots__ = ("user_id", "external_user_id", "partner_id", "content")

View File

@ -102,6 +102,7 @@ class UnsubscribeHandler:
mailbox.email, alias
):
return status.E509
LOG.i(f"User disabled alias {alias} via unsubscribe header")
alias_utils.change_alias_status(alias, enabled=False)
Session.commit()
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"

View File

@ -0,0 +1,40 @@
from app.events.event_dispatcher import EventDispatcher, Dispatcher
from app.events.generated.event_pb2 import EventContent, AliasCreated, AliasCreatedList
from app.log import LOG
from app.models import User, Alias
def send_alias_creation_events_for_user(
user: User, dispatcher: Dispatcher, chunk_size=50
):
if user.disabled:
LOG.i("User {user} is disabled. Skipping sending events for that user")
return
chunk_size = min(chunk_size, 50)
event_list = []
for alias in (
Alias.yield_per_query(chunk_size)
.filter_by(user_id=user.id)
.order_by(Alias.id.asc())
):
event_list.append(
AliasCreated(
alias_id=alias.id,
alias_email=alias.email,
alias_note=alias.note,
enabled=alias.enabled,
)
)
if len(event_list) >= chunk_size:
EventDispatcher.send_event(
user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher,
)
event_list = []
if len(event_list) > 0:
EventDispatcher.send_event(
user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher,
)

View File

@ -3699,7 +3699,10 @@ class SyncEvent(Base, ModelMixin):
AND taken_time IS NULL
"""
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
res = Session.execute(sql, args)
Session.commit()
return res.rowcount > 0
@classmethod

View File

@ -3,7 +3,7 @@ from requests import RequestException
from app import config
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import EventContent, UserPlanChange
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
from app.log import LOG
from app.models import User
@ -34,5 +34,5 @@ def execute_subscription_webhook(user: User):
except RequestException as e:
LOG.error(f"Subscription request exception: {e}")
event = UserPlanChange(plan_end_time=sl_subscription_end)
event = UserPlanChanged(plan_end_time=sl_subscription_end)
EventDispatcher.send_event(user, EventContent(user_plan_change=event))

View File

@ -3,9 +3,10 @@ from enum import Enum
from sys import argv, exit
from app.config import DB_URI
from app.log import LOG
from events.runner import Runner
from events.event_source import DeadLetterEventSource, PostgresEventSource
from events.event_sink import ConsoleEventSink
from events.event_sink import ConsoleEventSink, HttpEventSink
class Mode(Enum):
@ -24,16 +25,20 @@ class Mode(Enum):
def main(mode: Mode, dry_run: bool):
if mode == Mode.DEAD_LETTER:
LOG.i("Using DeadLetterEventSource")
source = DeadLetterEventSource()
elif mode == Mode.LISTENER:
LOG.i("Using PostgresEventSource")
source = PostgresEventSource(DB_URI)
else:
raise ValueError(f"Invalid mode: {mode}")
if dry_run:
LOG.i("Starting with ConsoleEventSink")
sink = ConsoleEventSink()
else:
sink = ConsoleEventSink()
LOG.i("Starting with HttpEventSink")
sink = HttpEventSink()
runner = Runner(source=source, sink=sink)
runner.run()

View File

@ -1,19 +1,42 @@
import requests
from abc import ABC, abstractmethod
from app.config import EVENT_WEBHOOK, EVENT_WEBHOOK_SKIP_VERIFY_SSL
from app.log import LOG
from app.models import SyncEvent
class EventSink(ABC):
@abstractmethod
def process(self, event: SyncEvent):
def process(self, event: SyncEvent) -> bool:
pass
class HttpEventSink(EventSink):
def process(self, event: SyncEvent):
pass
def process(self, event: SyncEvent) -> bool:
if not EVENT_WEBHOOK:
LOG.warning("Skipping sending event because there is no webhook configured")
return False
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
res = requests.post(
url=EVENT_WEBHOOK,
data=event.content,
headers={"Content-Type": "application/x-protobuf"},
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
)
if res.status_code != 200:
LOG.warning(
f"Failed to send event to webhook: {res.status_code} {res.text}"
)
return False
else:
LOG.info(f"Event {event.id} sent successfully to webhook")
return True
class ConsoleEventSink(EventSink):
def process(self, event: SyncEvent):
def process(self, event: SyncEvent) -> bool:
LOG.info(f"Handling event {event.id}")
return True

View File

@ -13,6 +13,8 @@ from typing import Callable, NoReturn
_DEAD_LETTER_THRESHOLD_MINUTES = 10
_DEAD_LETTER_INTERVAL_SECONDS = 30
_POSTGRES_RECONNECT_INTERVAL_SECONDS = 5
class EventSource(ABC):
@abstractmethod
@ -22,9 +24,19 @@ class EventSource(ABC):
class PostgresEventSource(EventSource):
def __init__(self, connection_string: str):
self.__connection = psycopg2.connect(connection_string)
self.__connection_string = connection_string
self.__connect()
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
while True:
try:
self.__listen(on_event)
except Exception as e:
LOG.warn(f"Error listening to events: {e}")
sleep(_POSTGRES_RECONNECT_INTERVAL_SECONDS)
self.__connect()
def __listen(self, on_event: Callable[[SyncEvent], NoReturn]):
self.__connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
)
@ -44,12 +56,24 @@ class PostgresEventSource(EventSource):
webhook_id = int(notify.payload)
event = SyncEvent.get_by(id=webhook_id)
if event is not None:
on_event(event)
if event.mark_as_taken():
on_event(event)
else:
LOG.info(
f"Event {event.id} was handled by another runner"
)
else:
LOG.info(f"Could not find event with id={notify.payload}")
except Exception as e:
LOG.warn(f"Error getting event: {e}")
def __connect(self):
self.__connection = psycopg2.connect(self.__connection_string)
from app.db import Session
Session.close()
class DeadLetterEventSource(EventSource):
@newrelic.agent.background_task()
@ -73,3 +97,4 @@ class DeadLetterEventSource(EventSource):
sleep(_DEAD_LETTER_INTERVAL_SECONDS)
except Exception as e:
LOG.warn(f"Error getting dead letter event: {e}")
sleep(_DEAD_LETTER_INTERVAL_SECONDS)

View File

@ -18,11 +18,10 @@ class Runner:
@newrelic.agent.background_task()
def __on_event(self, event: SyncEvent):
try:
can_process = event.mark_as_taken()
if can_process:
event_created_at = event.created_at
start_time = arrow.now()
self.__sink.process(event)
event_created_at = event.created_at
start_time = arrow.now()
success = self.__sink.process(event)
if success:
event_id = event.id
SyncEvent.delete(event.id, commit=True)
LOG.info(f"Marked {event_id} as done")
@ -38,8 +37,6 @@ class Runner:
"Custom/sync_event_elapsed_time",
time_between_taken_and_created.total_seconds(),
)
else:
LOG.info(f"{event.id} was handled by another runner")
except Exception as e:
LOG.warn(f"Exception processing event [id={event.id}]: {e}")
newrelic.agent.record_custom_metric("Custom/sync_event_failed", 1)

View File

@ -15,6 +15,7 @@ from app.email_utils import (
render,
)
from app.import_utils import handle_batch_import
from app.jobs.event_jobs import send_alias_creation_events_for_user
from app.jobs.export_user_data_job import ExportUserDataJob
from app.log import LOG
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
@ -197,13 +198,18 @@ def process_job(job: Job):
onboarding_mailbox(user)
elif job.name == config.JOB_ONBOARDING_4:
user_id = job.payload.get("user_id")
user = User.get(user_id)
user: User = User.get(user_id)
# user might delete their account in the meantime
# or disable the notification
if user and user.notification and user.activated:
LOG.d("send onboarding pgp email to user %s", user)
onboarding_pgp(user)
# if user only has 1 mailbox which is Proton then do not send PGP onboarding email
mailboxes = user.mailboxes()
if len(mailboxes) == 1 and mailboxes[0].is_proton():
LOG.d("Do not send onboarding PGP email to Proton mailbox")
else:
LOG.d("send onboarding pgp email to user %s", user)
onboarding_pgp(user)
elif job.name == config.JOB_BATCH_IMPORT:
batch_import_id = job.payload.get("batch_import_id")
@ -264,8 +270,14 @@ SimpleLogin team.
user_id = job.payload.get("user_id")
user = User.get(user_id)
if user and user.activated:
LOG.d("send proton welcome email to user %s", user)
LOG.d("Send proton welcome email to user %s", user)
welcome_proton(user)
elif job.name == config.JOB_SEND_ALIAS_CREATION_EVENTS:
user_id = job.payload.get("user_id")
user = User.get(user_id)
if user and user.activated:
LOG.d(f"Sending alias creation events for {user}")
send_alias_creation_events_for_user(user)
else:
LOG.e("Unknown job name %s", job.name)

View File

@ -4,6 +4,7 @@ import subprocess
from time import sleep
from typing import List, Dict
import arrow
import newrelic.agent
from app.db import Session
@ -93,11 +94,44 @@ def log_nb_db_connection():
newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection)
@newrelic.agent.background_task()
def log_pending_to_process_events():
r = Session.execute("select count(*) from sync_event WHERE taken_time IS NULL;")
events_pending = list(r)[0][0]
LOG.d("number of events pending to process %s", events_pending)
newrelic.agent.record_custom_metric(
"Custom/sync_events_pending_to_process", events_pending
)
@newrelic.agent.background_task()
def log_events_pending_dead_letter():
since = arrow.now().shift(minutes=-10).datetime
r = Session.execute(
"""
SELECT COUNT(*)
FROM sync_event
WHERE (taken_time IS NOT NULL AND taken_time < :since)
OR (taken_time IS NULL AND created_at < :since)
""",
{"since": since},
)
events_pending = list(r)[0][0]
LOG.d("number of events pending dead letter %s", events_pending)
newrelic.agent.record_custom_metric(
"Custom/sync_events_pending_dead_letter", events_pending
)
if __name__ == "__main__":
exporter = MetricExporter(get_newrelic_license())
while True:
log_postfix_metrics()
log_nb_db_connection()
log_pending_to_process_events()
log_events_pending_dead_letter()
Session.close()
exporter.run()

28
app/poetry.lock generated
View File

@ -2150,24 +2150,22 @@ wcwidth = "*"
[[package]]
name = "protobuf"
version = "4.24.3"
version = "5.27.1"
description = ""
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"},
{file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"},
{file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"},
{file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"},
{file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"},
{file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"},
{file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"},
{file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"},
{file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"},
{file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"},
{file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"},
{file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"},
{file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"},
{file = "protobuf-5.27.1-cp310-abi3-win32.whl", hash = "sha256:3adc15ec0ff35c5b2d0992f9345b04a540c1e73bfee3ff1643db43cc1d734333"},
{file = "protobuf-5.27.1-cp310-abi3-win_amd64.whl", hash = "sha256:25236b69ab4ce1bec413fd4b68a15ef8141794427e0b4dc173e9d5d9dffc3bcd"},
{file = "protobuf-5.27.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4e38fc29d7df32e01a41cf118b5a968b1efd46b9c41ff515234e794011c78b17"},
{file = "protobuf-5.27.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:917ed03c3eb8a2d51c3496359f5b53b4e4b7e40edfbdd3d3f34336e0eef6825a"},
{file = "protobuf-5.27.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:ee52874a9e69a30271649be88ecbe69d374232e8fd0b4e4b0aaaa87f429f1631"},
{file = "protobuf-5.27.1-cp38-cp38-win32.whl", hash = "sha256:7a97b9c5aed86b9ca289eb5148df6c208ab5bb6906930590961e08f097258107"},
{file = "protobuf-5.27.1-cp38-cp38-win_amd64.whl", hash = "sha256:f6abd0f69968792da7460d3c2cfa7d94fd74e1c21df321eb6345b963f9ec3d8d"},
{file = "protobuf-5.27.1-cp39-cp39-win32.whl", hash = "sha256:dfddb7537f789002cc4eb00752c92e67885badcc7005566f2c5de9d969d3282d"},
{file = "protobuf-5.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:39309898b912ca6febb0084ea912e976482834f401be35840a008da12d189340"},
{file = "protobuf-5.27.1-py3-none-any.whl", hash = "sha256:4ac7249a1530a2ed50e24201d6630125ced04b30619262f06224616e0030b6cf"},
{file = "protobuf-5.27.1.tar.gz", hash = "sha256:df5e5b8e39b7d1c25b186ffdf9f44f40f810bbcc9d2b71d9d3156fee5a9adf15"},
]
[[package]]

View File

@ -2,7 +2,7 @@ syntax = "proto3";
package simplelogin_events;
message UserPlanChange {
message UserPlanChanged {
uint32 plan_end_time = 1;
}
@ -16,7 +16,7 @@ message AliasCreated {
bool enabled = 4;
}
message AliasStatusChange {
message AliasStatusChanged {
uint32 alias_id = 1;
string alias_email = 2;
bool enabled = 3;
@ -27,13 +27,18 @@ message AliasDeleted {
string alias_email = 2;
}
message AliasCreatedList {
repeated AliasCreated events = 1;
}
message EventContent {
oneof content {
UserPlanChange user_plan_change = 1;
UserPlanChanged user_plan_change = 1;
UserDeleted user_deleted = 2;
AliasCreated alias_created = 3;
AliasStatusChange alias_status_change = 4;
AliasStatusChanged alias_status_change = 4;
AliasDeleted alias_deleted = 5;
AliasCreatedList alias_create_list = 6;
}
}
@ -42,4 +47,4 @@ message Event {
string external_user_id = 2;
uint32 partner_id = 3;
EventContent content = 4;
}
}

View File

@ -0,0 +1,46 @@
from app import config
from app.db import Session
from app.events.event_dispatcher import Dispatcher
from app.events.generated import event_pb2
from app.jobs.event_jobs import send_alias_creation_events_for_user
from app.models import Alias
from tests.utils import create_partner_linked_user
class MemStoreDispatcher(Dispatcher):
def __init__(self):
self.events = []
def send(self, event: bytes):
self.events.append(event)
def setup_module():
config.EVENT_WEBHOOK = True
def teardown_module():
config.EVENT_WEBHOOK = False
def test_send_alias_creation_events():
[user, partner_user] = create_partner_linked_user()
aliases = [Alias.create_new_random(user) for i in range(2)]
Session.flush()
dispatcher = MemStoreDispatcher()
send_alias_creation_events_for_user(user, dispatcher=dispatcher, chunk_size=2)
# 2 batches. 1st newsletter + first alias. 2nd last alias
assert len(dispatcher.events) == 2
decoded_event = event_pb2.Event.FromString(dispatcher.events[0])
assert decoded_event.user_id == user.id
assert decoded_event.external_user_id == partner_user.external_user_id
event_list = decoded_event.content.alias_create_list.events
assert len(event_list) == 2
# 0 is newsletter alias
assert event_list[1].alias_id == aliases[0].id
decoded_event = event_pb2.Event.FromString(dispatcher.events[1])
assert decoded_event.user_id == user.id
assert decoded_event.external_user_id == partner_user.external_user_id
event_list = decoded_event.content.alias_create_list.events
assert len(event_list) == 1
assert event_list[0].alias_id == aliases[1].id

View File

@ -9,7 +9,8 @@ from typing import Optional, Dict
import jinja2
from flask import url_for
from app.models import User
from app.models import User, PartnerUser
from app.proton.utils import get_proton_partner
from app.utils import random_string
@ -30,6 +31,18 @@ def create_new_user(email: Optional[str] = None, name: Optional[str] = None) ->
return user
def create_partner_linked_user() -> tuple[User, PartnerUser]:
user = create_new_user()
partner_user = PartnerUser.create(
partner_id=get_proton_partner().id,
user_id=user.id,
external_user_id=random_token(10),
flush=True,
)
return user, partner_user
def login(flask_client, user: Optional[User] = None) -> User:
if not user:
user = create_new_user()