Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
bd414b1fc7 | |||
0f73a14926 | |||
0ea33ca5f8 |
@ -46,7 +46,8 @@ class SLModelView(sqla.ModelView):
|
|||||||
|
|
||||||
def inaccessible_callback(self, name, **kwargs):
|
def inaccessible_callback(self, name, **kwargs):
|
||||||
# redirect to login page if user doesn't have access
|
# redirect to login page if user doesn't have access
|
||||||
return redirect(url_for("auth.login", next=request.url))
|
flash("You don't have access to the admin page", "error")
|
||||||
|
return redirect(url_for("dashboard.index", next=request.url))
|
||||||
|
|
||||||
def on_model_change(self, form, model, is_created):
|
def on_model_change(self, form, model, is_created):
|
||||||
changes = {}
|
changes = {}
|
||||||
|
@ -25,6 +25,12 @@ from app.email_utils import (
|
|||||||
render,
|
render,
|
||||||
)
|
)
|
||||||
from app.errors import AliasInTrashError
|
from app.errors import AliasInTrashError
|
||||||
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
|
from app.events.generated.event_pb2 import (
|
||||||
|
AliasDeleted,
|
||||||
|
AliasStatusChanged,
|
||||||
|
EventContent,
|
||||||
|
)
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import (
|
from app.models import (
|
||||||
Alias,
|
Alias,
|
||||||
@ -334,6 +340,10 @@ def delete_alias(alias: Alias, user: User):
|
|||||||
Alias.filter(Alias.id == alias.id).delete()
|
Alias.filter(Alias.id == alias.id).delete()
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
user, EventContent(alias_deleted=AliasDeleted(alias_id=alias.id))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]:
|
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]:
|
||||||
"""
|
"""
|
||||||
@ -459,3 +469,16 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
|||||||
alias.pinned = False
|
alias.pinned = False
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def change_alias_status(alias: Alias, enabled: bool, commit: bool = False):
|
||||||
|
LOG.i(f"Changing alias {alias} enabled to {enabled}")
|
||||||
|
alias.enabled = enabled
|
||||||
|
|
||||||
|
event = AliasStatusChanged(
|
||||||
|
alias_id=alias.id, alias_email=alias.email, enabled=enabled
|
||||||
|
)
|
||||||
|
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
|
||||||
|
|
||||||
|
if commit:
|
||||||
|
Session.commit()
|
||||||
|
@ -25,6 +25,7 @@ from app.errors import (
|
|||||||
ErrAddressInvalid,
|
ErrAddressInvalid,
|
||||||
)
|
)
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
|
from app.log import LOG
|
||||||
from app.models import Alias, Contact, Mailbox, AliasMailbox
|
from app.models import Alias, Contact, Mailbox, AliasMailbox
|
||||||
|
|
||||||
|
|
||||||
@ -184,7 +185,8 @@ def toggle_alias(alias_id):
|
|||||||
if not alias or alias.user_id != user.id:
|
if not alias or alias.user_id != user.id:
|
||||||
return jsonify(error="Forbidden"), 403
|
return jsonify(error="Forbidden"), 403
|
||||||
|
|
||||||
alias.enabled = not alias.enabled
|
alias_utils.change_alias_status(alias, enabled=not alias.enabled)
|
||||||
|
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
return jsonify(enabled=alias.enabled), 200
|
return jsonify(enabled=alias.enabled), 200
|
||||||
|
@ -281,6 +281,7 @@ JOB_DELETE_MAILBOX = "delete-mailbox"
|
|||||||
JOB_DELETE_DOMAIN = "delete-domain"
|
JOB_DELETE_DOMAIN = "delete-domain"
|
||||||
JOB_SEND_USER_REPORT = "send-user-report"
|
JOB_SEND_USER_REPORT = "send-user-report"
|
||||||
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
||||||
|
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
|
||||||
|
|
||||||
# for pagination
|
# for pagination
|
||||||
PAGE_LIMIT = 20
|
PAGE_LIMIT = 20
|
||||||
@ -429,7 +430,7 @@ except Exception:
|
|||||||
HIBP_SCAN_INTERVAL_DAYS = 7
|
HIBP_SCAN_INTERVAL_DAYS = 7
|
||||||
HIBP_API_KEYS = sl_getenv("HIBP_API_KEYS", list) or []
|
HIBP_API_KEYS = sl_getenv("HIBP_API_KEYS", list) or []
|
||||||
HIBP_MAX_ALIAS_CHECK = 10_000
|
HIBP_MAX_ALIAS_CHECK = 10_000
|
||||||
HIBP_RPM = 100
|
HIBP_RPM = int(os.environ.get("HIBP_API_RPM", 100))
|
||||||
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
|
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
|
||||||
|
|
||||||
KEEP_OLD_DATA_DAYS = 30
|
KEEP_OLD_DATA_DAYS = 30
|
||||||
@ -581,3 +582,9 @@ UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
|
|||||||
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
|
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
|
||||||
|
|
||||||
STORE_TRANSACTIONAL_EMAILS = "STORE_TRANSACTIONAL_EMAILS" in os.environ
|
STORE_TRANSACTIONAL_EMAILS = "STORE_TRANSACTIONAL_EMAILS" in os.environ
|
||||||
|
|
||||||
|
EVENT_WEBHOOK = os.environ.get("EVENT_WEBHOOK", None)
|
||||||
|
|
||||||
|
# We want it disabled by default, so only skip if defined
|
||||||
|
EVENT_WEBHOOK_SKIP_VERIFY_SSL = "EVENT_WEBHOOK_SKIP_VERIFY_SSL" in os.environ
|
||||||
|
EVENT_WEBHOOK_DISABLE = "EVENT_WEBHOOK_DISABLE" in os.environ
|
||||||
|
@ -146,7 +146,7 @@ def index():
|
|||||||
alias_utils.delete_alias(alias, current_user)
|
alias_utils.delete_alias(alias, current_user)
|
||||||
flash(f"Alias {email} has been deleted", "success")
|
flash(f"Alias {email} has been deleted", "success")
|
||||||
elif request.form.get("form-name") == "disable-alias":
|
elif request.form.get("form-name") == "disable-alias":
|
||||||
alias.enabled = False
|
alias_utils.change_alias_status(alias, enabled=False)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(f"Alias {alias.email} has been disabled", "success")
|
flash(f"Alias {alias.email} has been disabled", "success")
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@ from app.db import Session
|
|||||||
from flask import redirect, url_for, flash, request, render_template
|
from flask import redirect, url_for, flash, request, render_template
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
|
||||||
|
from app import alias_utils
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.handler.unsubscribe_encoder import UnsubscribeAction
|
from app.handler.unsubscribe_encoder import UnsubscribeAction
|
||||||
from app.handler.unsubscribe_handler import UnsubscribeHandler
|
from app.handler.unsubscribe_handler import UnsubscribeHandler
|
||||||
@ -31,7 +32,7 @@ def unsubscribe(alias_id):
|
|||||||
|
|
||||||
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
|
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
alias.enabled = False
|
alias_utils.change_alias_status(alias, False)
|
||||||
flash(f"Alias {alias.email} has been blocked", "success")
|
flash(f"Alias {alias.email} has been blocked", "success")
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
0
app/app/events/__init__.py
Normal file
0
app/app/events/__init__.py
Normal file
66
app/app/events/event_dispatcher.py
Normal file
66
app/app/events/event_dispatcher.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from app import config
|
||||||
|
from app.db import Session
|
||||||
|
from app.errors import ProtonPartnerNotSetUp
|
||||||
|
from app.events.generated import event_pb2
|
||||||
|
from app.models import User, PartnerUser, SyncEvent
|
||||||
|
from app.proton.utils import get_proton_partner
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
NOTIFICATION_CHANNEL = "simplelogin_sync_events"
|
||||||
|
|
||||||
|
|
||||||
|
class Dispatcher(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def send(self, event: bytes):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PostgresDispatcher(Dispatcher):
|
||||||
|
def send(self, event: bytes):
|
||||||
|
instance = SyncEvent.create(content=event, flush=True)
|
||||||
|
Session.execute(f"NOTIFY {NOTIFICATION_CHANNEL}, '{instance.id}';")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get():
|
||||||
|
return PostgresDispatcher()
|
||||||
|
|
||||||
|
|
||||||
|
class EventDispatcher:
|
||||||
|
@staticmethod
|
||||||
|
def send_event(
|
||||||
|
user: User,
|
||||||
|
content: event_pb2.EventContent,
|
||||||
|
dispatcher: Dispatcher = PostgresDispatcher.get(),
|
||||||
|
skip_if_webhook_missing: bool = True,
|
||||||
|
):
|
||||||
|
if config.EVENT_WEBHOOK_DISABLE:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not config.EVENT_WEBHOOK and skip_if_webhook_missing:
|
||||||
|
return
|
||||||
|
|
||||||
|
partner_user = EventDispatcher.__partner_user(user.id)
|
||||||
|
if not partner_user:
|
||||||
|
return
|
||||||
|
|
||||||
|
event = event_pb2.Event(
|
||||||
|
user_id=user.id,
|
||||||
|
external_user_id=partner_user.external_user_id,
|
||||||
|
partner_id=partner_user.partner_id,
|
||||||
|
content=content,
|
||||||
|
)
|
||||||
|
|
||||||
|
serialized = event.SerializeToString()
|
||||||
|
dispatcher.send(serialized)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __partner_user(user_id: int) -> Optional[PartnerUser]:
|
||||||
|
# Check if the current user has a partner_id
|
||||||
|
try:
|
||||||
|
proton_partner_id = get_proton_partner().id
|
||||||
|
except ProtonPartnerNotSetUp:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# It has. Retrieve the information for the PartnerUser
|
||||||
|
return PartnerUser.get_by(user_id=user_id, partner_id=proton_partner_id)
|
50
app/app/events/generated/event_pb2.py
Normal file
50
app/app/events/generated/event_pb2.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||||
|
# NO CHECKED-IN PROTOBUF GENCODE
|
||||||
|
# source: event.proto
|
||||||
|
# Protobuf Python Version: 5.27.0
|
||||||
|
"""Generated protocol buffer code."""
|
||||||
|
from google.protobuf import descriptor as _descriptor
|
||||||
|
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||||
|
from google.protobuf import runtime_version as _runtime_version
|
||||||
|
from google.protobuf import symbol_database as _symbol_database
|
||||||
|
from google.protobuf.internal import builder as _builder
|
||||||
|
_runtime_version.ValidateProtobufRuntimeVersion(
|
||||||
|
_runtime_version.Domain.PUBLIC,
|
||||||
|
5,
|
||||||
|
27,
|
||||||
|
0,
|
||||||
|
'',
|
||||||
|
'event.proto'
|
||||||
|
)
|
||||||
|
# @@protoc_insertion_point(imports)
|
||||||
|
|
||||||
|
_sym_db = _symbol_database.Default()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\"(\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\"\r\n\x0bUserDeleted\"Z\n\x0c\x41liasCreated\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x12\n\nalias_note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\"L\n\x12\x41liasStatusChanged\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\"5\n\x0c\x41liasDeleted\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x93\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
|
||||||
|
|
||||||
|
_globals = globals()
|
||||||
|
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||||
|
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'event_pb2', _globals)
|
||||||
|
if not _descriptor._USE_C_DESCRIPTORS:
|
||||||
|
DESCRIPTOR._loaded_options = None
|
||||||
|
_globals['_USERPLANCHANGED']._serialized_start=35
|
||||||
|
_globals['_USERPLANCHANGED']._serialized_end=75
|
||||||
|
_globals['_USERDELETED']._serialized_start=77
|
||||||
|
_globals['_USERDELETED']._serialized_end=90
|
||||||
|
_globals['_ALIASCREATED']._serialized_start=92
|
||||||
|
_globals['_ALIASCREATED']._serialized_end=182
|
||||||
|
_globals['_ALIASSTATUSCHANGED']._serialized_start=184
|
||||||
|
_globals['_ALIASSTATUSCHANGED']._serialized_end=260
|
||||||
|
_globals['_ALIASDELETED']._serialized_start=262
|
||||||
|
_globals['_ALIASDELETED']._serialized_end=315
|
||||||
|
_globals['_ALIASCREATEDLIST']._serialized_start=317
|
||||||
|
_globals['_ALIASCREATEDLIST']._serialized_end=385
|
||||||
|
_globals['_EVENTCONTENT']._serialized_start=388
|
||||||
|
_globals['_EVENTCONTENT']._serialized_end=791
|
||||||
|
_globals['_EVENT']._serialized_start=793
|
||||||
|
_globals['_EVENT']._serialized_end=914
|
||||||
|
# @@protoc_insertion_point(module_scope)
|
80
app/app/events/generated/event_pb2.pyi
Normal file
80
app/app/events/generated/event_pb2.pyi
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
from google.protobuf.internal import containers as _containers
|
||||||
|
from google.protobuf import descriptor as _descriptor
|
||||||
|
from google.protobuf import message as _message
|
||||||
|
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
|
||||||
|
|
||||||
|
DESCRIPTOR: _descriptor.FileDescriptor
|
||||||
|
|
||||||
|
class UserPlanChanged(_message.Message):
|
||||||
|
__slots__ = ("plan_end_time",)
|
||||||
|
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
plan_end_time: int
|
||||||
|
def __init__(self, plan_end_time: _Optional[int] = ...) -> None: ...
|
||||||
|
|
||||||
|
class UserDeleted(_message.Message):
|
||||||
|
__slots__ = ()
|
||||||
|
def __init__(self) -> None: ...
|
||||||
|
|
||||||
|
class AliasCreated(_message.Message):
|
||||||
|
__slots__ = ("alias_id", "alias_email", "alias_note", "enabled")
|
||||||
|
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
ALIAS_NOTE_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
ENABLED_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
alias_id: int
|
||||||
|
alias_email: str
|
||||||
|
alias_note: str
|
||||||
|
enabled: bool
|
||||||
|
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., alias_note: _Optional[str] = ..., enabled: bool = ...) -> None: ...
|
||||||
|
|
||||||
|
class AliasStatusChanged(_message.Message):
|
||||||
|
__slots__ = ("alias_id", "alias_email", "enabled")
|
||||||
|
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
ENABLED_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
alias_id: int
|
||||||
|
alias_email: str
|
||||||
|
enabled: bool
|
||||||
|
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., enabled: bool = ...) -> None: ...
|
||||||
|
|
||||||
|
class AliasDeleted(_message.Message):
|
||||||
|
__slots__ = ("alias_id", "alias_email")
|
||||||
|
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
alias_id: int
|
||||||
|
alias_email: str
|
||||||
|
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ...) -> None: ...
|
||||||
|
|
||||||
|
class AliasCreatedList(_message.Message):
|
||||||
|
__slots__ = ("events",)
|
||||||
|
EVENTS_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
|
||||||
|
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
|
||||||
|
|
||||||
|
class EventContent(_message.Message):
|
||||||
|
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list")
|
||||||
|
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
USER_DELETED_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
user_plan_change: UserPlanChanged
|
||||||
|
user_deleted: UserDeleted
|
||||||
|
alias_created: AliasCreated
|
||||||
|
alias_status_change: AliasStatusChanged
|
||||||
|
alias_deleted: AliasDeleted
|
||||||
|
alias_create_list: AliasCreatedList
|
||||||
|
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ...) -> None: ...
|
||||||
|
|
||||||
|
class Event(_message.Message):
|
||||||
|
__slots__ = ("user_id", "external_user_id", "partner_id", "content")
|
||||||
|
USER_ID_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
EXTERNAL_USER_ID_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
PARTNER_ID_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
CONTENT_FIELD_NUMBER: _ClassVar[int]
|
||||||
|
user_id: int
|
||||||
|
external_user_id: str
|
||||||
|
partner_id: int
|
||||||
|
content: EventContent
|
||||||
|
def __init__(self, user_id: _Optional[int] = ..., external_user_id: _Optional[str] = ..., partner_id: _Optional[int] = ..., content: _Optional[_Union[EventContent, _Mapping]] = ...) -> None: ...
|
@ -5,6 +5,7 @@ from typing import Optional
|
|||||||
from aiosmtpd.smtp import Envelope
|
from aiosmtpd.smtp import Envelope
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
|
from app import alias_utils
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email import headers, status
|
from app.email import headers, status
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
@ -101,7 +102,8 @@ class UnsubscribeHandler:
|
|||||||
mailbox.email, alias
|
mailbox.email, alias
|
||||||
):
|
):
|
||||||
return status.E509
|
return status.E509
|
||||||
alias.enabled = False
|
LOG.i(f"User disabled alias {alias} via unsubscribe header")
|
||||||
|
alias_utils.change_alias_status(alias, enabled=False)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
|
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
|
||||||
for mailbox in alias.mailboxes:
|
for mailbox in alias.mailboxes:
|
||||||
|
40
app/app/jobs/event_jobs.py
Normal file
40
app/app/jobs/event_jobs.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from app.events.event_dispatcher import EventDispatcher, Dispatcher
|
||||||
|
from app.events.generated.event_pb2 import EventContent, AliasCreated, AliasCreatedList
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import User, Alias
|
||||||
|
|
||||||
|
|
||||||
|
def send_alias_creation_events_for_user(
|
||||||
|
user: User, dispatcher: Dispatcher, chunk_size=50
|
||||||
|
):
|
||||||
|
if user.disabled:
|
||||||
|
LOG.i("User {user} is disabled. Skipping sending events for that user")
|
||||||
|
return
|
||||||
|
chunk_size = min(chunk_size, 50)
|
||||||
|
event_list = []
|
||||||
|
for alias in (
|
||||||
|
Alias.yield_per_query(chunk_size)
|
||||||
|
.filter_by(user_id=user.id)
|
||||||
|
.order_by(Alias.id.asc())
|
||||||
|
):
|
||||||
|
event_list.append(
|
||||||
|
AliasCreated(
|
||||||
|
alias_id=alias.id,
|
||||||
|
alias_email=alias.email,
|
||||||
|
alias_note=alias.note,
|
||||||
|
enabled=alias.enabled,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if len(event_list) >= chunk_size:
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
user,
|
||||||
|
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
|
||||||
|
dispatcher=dispatcher,
|
||||||
|
)
|
||||||
|
event_list = []
|
||||||
|
if len(event_list) > 0:
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
user,
|
||||||
|
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
|
||||||
|
dispatcher=dispatcher,
|
||||||
|
)
|
@ -657,6 +657,21 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
|
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def delete(cls, obj_id, commit=False):
|
||||||
|
# Internal import to avoid global import cycles
|
||||||
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
|
from app.events.generated.event_pb2 import UserDeleted, EventContent
|
||||||
|
|
||||||
|
user: User = cls.get(obj_id)
|
||||||
|
EventDispatcher.send_event(user, EventContent(user_deleted=UserDeleted()))
|
||||||
|
|
||||||
|
res = super(User, cls).delete(obj_id)
|
||||||
|
if commit:
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
def get_active_subscription(
|
def get_active_subscription(
|
||||||
self, include_partner_subscription: bool = True
|
self, include_partner_subscription: bool = True
|
||||||
) -> Optional[
|
) -> Optional[
|
||||||
@ -1619,6 +1634,18 @@ class Alias(Base, ModelMixin):
|
|||||||
Session.add(new_alias)
|
Session.add(new_alias)
|
||||||
DailyMetric.get_or_create_today_metric().nb_alias += 1
|
DailyMetric.get_or_create_today_metric().nb_alias += 1
|
||||||
|
|
||||||
|
# Internal import to avoid global import cycles
|
||||||
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
|
from app.events.generated.event_pb2 import AliasCreated, EventContent
|
||||||
|
|
||||||
|
event = AliasCreated(
|
||||||
|
alias_id=new_alias.id,
|
||||||
|
alias_email=new_alias.email,
|
||||||
|
alias_note=new_alias.note,
|
||||||
|
enabled=True,
|
||||||
|
)
|
||||||
|
EventDispatcher.send_event(user, EventContent(alias_created=event))
|
||||||
|
|
||||||
if commit:
|
if commit:
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -3648,3 +3675,52 @@ class ApiToCookieToken(Base, ModelMixin):
|
|||||||
code = secrets.token_urlsafe(32)
|
code = secrets.token_urlsafe(32)
|
||||||
|
|
||||||
return super().create(code=code, **kwargs)
|
return super().create(code=code, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class SyncEvent(Base, ModelMixin):
|
||||||
|
"""This model holds the events that need to be sent to the webhook"""
|
||||||
|
|
||||||
|
__tablename__ = "sync_event"
|
||||||
|
content = sa.Column(sa.LargeBinary, unique=False, nullable=False)
|
||||||
|
taken_time = sa.Column(
|
||||||
|
ArrowType, default=None, nullable=True, server_default=None, index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
sa.Index("ix_sync_event_created_at", "created_at"),
|
||||||
|
sa.Index("ix_sync_event_taken_time", "taken_time"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def mark_as_taken(self) -> bool:
|
||||||
|
sql = """
|
||||||
|
UPDATE sync_event
|
||||||
|
SET taken_time = :taken_time
|
||||||
|
WHERE id = :sync_event_id
|
||||||
|
AND taken_time IS NULL
|
||||||
|
"""
|
||||||
|
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
|
||||||
|
|
||||||
|
res = Session.execute(sql, args)
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
return res.rowcount > 0
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_dead_letter(cls, older_than: Arrow) -> [SyncEvent]:
|
||||||
|
return (
|
||||||
|
SyncEvent.filter(
|
||||||
|
(
|
||||||
|
(
|
||||||
|
SyncEvent.taken_time.isnot(None)
|
||||||
|
& (SyncEvent.taken_time < older_than)
|
||||||
|
)
|
||||||
|
| (
|
||||||
|
SyncEvent.taken_time.is_(None)
|
||||||
|
& (SyncEvent.created_at < older_than)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(SyncEvent.id)
|
||||||
|
.limit(100)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
@ -2,6 +2,8 @@ import requests
|
|||||||
from requests import RequestException
|
from requests import RequestException
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
|
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User
|
from app.models import User
|
||||||
|
|
||||||
@ -31,3 +33,6 @@ def execute_subscription_webhook(user: User):
|
|||||||
)
|
)
|
||||||
except RequestException as e:
|
except RequestException as e:
|
||||||
LOG.error(f"Subscription request exception: {e}")
|
LOG.error(f"Subscription request exception: {e}")
|
||||||
|
|
||||||
|
event = UserPlanChanged(plan_end_time=sl_subscription_end)
|
||||||
|
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
|
||||||
|
@ -53,7 +53,7 @@ from flanker.addresslib.address import EmailAddress
|
|||||||
from sqlalchemy.exc import IntegrityError
|
from sqlalchemy.exc import IntegrityError
|
||||||
|
|
||||||
from app import pgp_utils, s3, config
|
from app import pgp_utils, s3, config
|
||||||
from app.alias_utils import try_auto_create
|
from app.alias_utils import try_auto_create, change_alias_status
|
||||||
from app.config import (
|
from app.config import (
|
||||||
EMAIL_DOMAIN,
|
EMAIL_DOMAIN,
|
||||||
URL,
|
URL,
|
||||||
@ -1585,7 +1585,7 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
|
|||||||
LOG.w(
|
LOG.w(
|
||||||
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
|
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
|
||||||
)
|
)
|
||||||
alias.enabled = False
|
change_alias_status(alias, enabled=False)
|
||||||
|
|
||||||
Notification.create(
|
Notification.create(
|
||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
|
64
app/event_listener.py
Normal file
64
app/event_listener.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
import argparse
|
||||||
|
from enum import Enum
|
||||||
|
from sys import argv, exit
|
||||||
|
|
||||||
|
from app.config import DB_URI
|
||||||
|
from app.log import LOG
|
||||||
|
from events.runner import Runner
|
||||||
|
from events.event_source import DeadLetterEventSource, PostgresEventSource
|
||||||
|
from events.event_sink import ConsoleEventSink, HttpEventSink
|
||||||
|
|
||||||
|
|
||||||
|
class Mode(Enum):
|
||||||
|
DEAD_LETTER = "dead_letter"
|
||||||
|
LISTENER = "listener"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_str(value: str):
|
||||||
|
if value == Mode.DEAD_LETTER.value:
|
||||||
|
return Mode.DEAD_LETTER
|
||||||
|
elif value == Mode.LISTENER.value:
|
||||||
|
return Mode.LISTENER
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Invalid mode: {value}")
|
||||||
|
|
||||||
|
|
||||||
|
def main(mode: Mode, dry_run: bool):
|
||||||
|
if mode == Mode.DEAD_LETTER:
|
||||||
|
LOG.i("Using DeadLetterEventSource")
|
||||||
|
source = DeadLetterEventSource()
|
||||||
|
elif mode == Mode.LISTENER:
|
||||||
|
LOG.i("Using PostgresEventSource")
|
||||||
|
source = PostgresEventSource(DB_URI)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Invalid mode: {mode}")
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
LOG.i("Starting with ConsoleEventSink")
|
||||||
|
sink = ConsoleEventSink()
|
||||||
|
else:
|
||||||
|
LOG.i("Starting with HttpEventSink")
|
||||||
|
sink = HttpEventSink()
|
||||||
|
|
||||||
|
runner = Runner(source=source, sink=sink)
|
||||||
|
runner.run()
|
||||||
|
|
||||||
|
|
||||||
|
def args():
|
||||||
|
parser = argparse.ArgumentParser(description="Run event listener")
|
||||||
|
parser.add_argument(
|
||||||
|
"mode",
|
||||||
|
help="Mode to run",
|
||||||
|
choices=[Mode.DEAD_LETTER.value, Mode.LISTENER.value],
|
||||||
|
)
|
||||||
|
parser.add_argument("--dry-run", help="Dry run mode", action="store_true")
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if len(argv) < 2:
|
||||||
|
print("Invalid usage. Pass 'listener' or 'dead_letter' as argument")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
args = args()
|
||||||
|
main(Mode.from_str(args.mode), args.dry_run)
|
0
app/events/__init__.py
Normal file
0
app/events/__init__.py
Normal file
42
app/events/event_sink.py
Normal file
42
app/events/event_sink.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
import requests
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from app.config import EVENT_WEBHOOK, EVENT_WEBHOOK_SKIP_VERIFY_SSL
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import SyncEvent
|
||||||
|
|
||||||
|
|
||||||
|
class EventSink(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def process(self, event: SyncEvent) -> bool:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class HttpEventSink(EventSink):
|
||||||
|
def process(self, event: SyncEvent) -> bool:
|
||||||
|
if not EVENT_WEBHOOK:
|
||||||
|
LOG.warning("Skipping sending event because there is no webhook configured")
|
||||||
|
return False
|
||||||
|
|
||||||
|
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
|
||||||
|
|
||||||
|
res = requests.post(
|
||||||
|
url=EVENT_WEBHOOK,
|
||||||
|
data=event.content,
|
||||||
|
headers={"Content-Type": "application/x-protobuf"},
|
||||||
|
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
|
||||||
|
)
|
||||||
|
if res.status_code != 200:
|
||||||
|
LOG.warning(
|
||||||
|
f"Failed to send event to webhook: {res.status_code} {res.text}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
LOG.info(f"Event {event.id} sent successfully to webhook")
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class ConsoleEventSink(EventSink):
|
||||||
|
def process(self, event: SyncEvent) -> bool:
|
||||||
|
LOG.info(f"Handling event {event.id}")
|
||||||
|
return True
|
100
app/events/event_source.py
Normal file
100
app/events/event_source.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
import arrow
|
||||||
|
import newrelic.agent
|
||||||
|
import psycopg2
|
||||||
|
import select
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import SyncEvent
|
||||||
|
from app.events.event_dispatcher import NOTIFICATION_CHANNEL
|
||||||
|
from time import sleep
|
||||||
|
from typing import Callable, NoReturn
|
||||||
|
|
||||||
|
_DEAD_LETTER_THRESHOLD_MINUTES = 10
|
||||||
|
_DEAD_LETTER_INTERVAL_SECONDS = 30
|
||||||
|
|
||||||
|
_POSTGRES_RECONNECT_INTERVAL_SECONDS = 5
|
||||||
|
|
||||||
|
|
||||||
|
class EventSource(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PostgresEventSource(EventSource):
|
||||||
|
def __init__(self, connection_string: str):
|
||||||
|
self.__connection_string = connection_string
|
||||||
|
self.__connect()
|
||||||
|
|
||||||
|
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
self.__listen(on_event)
|
||||||
|
except Exception as e:
|
||||||
|
LOG.warn(f"Error listening to events: {e}")
|
||||||
|
sleep(_POSTGRES_RECONNECT_INTERVAL_SECONDS)
|
||||||
|
self.__connect()
|
||||||
|
|
||||||
|
def __listen(self, on_event: Callable[[SyncEvent], NoReturn]):
|
||||||
|
self.__connection.set_isolation_level(
|
||||||
|
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
|
||||||
|
)
|
||||||
|
|
||||||
|
cursor = self.__connection.cursor()
|
||||||
|
cursor.execute(f"LISTEN {NOTIFICATION_CHANNEL};")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if select.select([self.__connection], [], [], 5) != ([], [], []):
|
||||||
|
self.__connection.poll()
|
||||||
|
while self.__connection.notifies:
|
||||||
|
notify = self.__connection.notifies.pop(0)
|
||||||
|
LOG.debug(
|
||||||
|
f"Got NOTIFY: pid={notify.pid} channel={notify.channel} payload={notify.payload}"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
webhook_id = int(notify.payload)
|
||||||
|
event = SyncEvent.get_by(id=webhook_id)
|
||||||
|
if event is not None:
|
||||||
|
if event.mark_as_taken():
|
||||||
|
on_event(event)
|
||||||
|
else:
|
||||||
|
LOG.info(
|
||||||
|
f"Event {event.id} was handled by another runner"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
LOG.info(f"Could not find event with id={notify.payload}")
|
||||||
|
except Exception as e:
|
||||||
|
LOG.warn(f"Error getting event: {e}")
|
||||||
|
|
||||||
|
def __connect(self):
|
||||||
|
self.__connection = psycopg2.connect(self.__connection_string)
|
||||||
|
|
||||||
|
from app.db import Session
|
||||||
|
|
||||||
|
Session.close()
|
||||||
|
|
||||||
|
|
||||||
|
class DeadLetterEventSource(EventSource):
|
||||||
|
@newrelic.agent.background_task()
|
||||||
|
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
threshold = arrow.utcnow().shift(
|
||||||
|
minutes=-_DEAD_LETTER_THRESHOLD_MINUTES
|
||||||
|
)
|
||||||
|
events = SyncEvent.get_dead_letter(older_than=threshold)
|
||||||
|
if events:
|
||||||
|
LOG.info(f"Got {len(events)} dead letter events")
|
||||||
|
if events:
|
||||||
|
newrelic.agent.record_custom_metric(
|
||||||
|
"Custom/dead_letter_events_to_process", len(events)
|
||||||
|
)
|
||||||
|
for event in events:
|
||||||
|
on_event(event)
|
||||||
|
else:
|
||||||
|
LOG.debug("No dead letter events")
|
||||||
|
sleep(_DEAD_LETTER_INTERVAL_SECONDS)
|
||||||
|
except Exception as e:
|
||||||
|
LOG.warn(f"Error getting dead letter event: {e}")
|
||||||
|
sleep(_DEAD_LETTER_INTERVAL_SECONDS)
|
42
app/events/runner.py
Normal file
42
app/events/runner.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
import arrow
|
||||||
|
import newrelic.agent
|
||||||
|
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import SyncEvent
|
||||||
|
from events.event_sink import EventSink
|
||||||
|
from events.event_source import EventSource
|
||||||
|
|
||||||
|
|
||||||
|
class Runner:
|
||||||
|
def __init__(self, source: EventSource, sink: EventSink):
|
||||||
|
self.__source = source
|
||||||
|
self.__sink = sink
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.__source.run(self.__on_event)
|
||||||
|
|
||||||
|
@newrelic.agent.background_task()
|
||||||
|
def __on_event(self, event: SyncEvent):
|
||||||
|
try:
|
||||||
|
event_created_at = event.created_at
|
||||||
|
start_time = arrow.now()
|
||||||
|
success = self.__sink.process(event)
|
||||||
|
if success:
|
||||||
|
event_id = event.id
|
||||||
|
SyncEvent.delete(event.id, commit=True)
|
||||||
|
LOG.info(f"Marked {event_id} as done")
|
||||||
|
|
||||||
|
end_time = arrow.now() - start_time
|
||||||
|
time_between_taken_and_created = start_time - event_created_at
|
||||||
|
|
||||||
|
newrelic.agent.record_custom_metric("Custom/sync_event_processed", 1)
|
||||||
|
newrelic.agent.record_custom_metric(
|
||||||
|
"Custom/sync_event_process_time", end_time.total_seconds()
|
||||||
|
)
|
||||||
|
newrelic.agent.record_custom_metric(
|
||||||
|
"Custom/sync_event_elapsed_time",
|
||||||
|
time_between_taken_and_created.total_seconds(),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
LOG.warn(f"Exception processing event [id={event.id}]: {e}")
|
||||||
|
newrelic.agent.record_custom_metric("Custom/sync_event_failed", 1)
|
@ -15,6 +15,7 @@ from app.email_utils import (
|
|||||||
render,
|
render,
|
||||||
)
|
)
|
||||||
from app.import_utils import handle_batch_import
|
from app.import_utils import handle_batch_import
|
||||||
|
from app.jobs.event_jobs import send_alias_creation_events_for_user
|
||||||
from app.jobs.export_user_data_job import ExportUserDataJob
|
from app.jobs.export_user_data_job import ExportUserDataJob
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
|
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
|
||||||
@ -197,13 +198,18 @@ def process_job(job: Job):
|
|||||||
onboarding_mailbox(user)
|
onboarding_mailbox(user)
|
||||||
elif job.name == config.JOB_ONBOARDING_4:
|
elif job.name == config.JOB_ONBOARDING_4:
|
||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user: User = User.get(user_id)
|
||||||
|
|
||||||
# user might delete their account in the meantime
|
# user might delete their account in the meantime
|
||||||
# or disable the notification
|
# or disable the notification
|
||||||
if user and user.notification and user.activated:
|
if user and user.notification and user.activated:
|
||||||
LOG.d("send onboarding pgp email to user %s", user)
|
# if user only has 1 mailbox which is Proton then do not send PGP onboarding email
|
||||||
onboarding_pgp(user)
|
mailboxes = user.mailboxes()
|
||||||
|
if len(mailboxes) == 1 and mailboxes[0].is_proton():
|
||||||
|
LOG.d("Do not send onboarding PGP email to Proton mailbox")
|
||||||
|
else:
|
||||||
|
LOG.d("send onboarding pgp email to user %s", user)
|
||||||
|
onboarding_pgp(user)
|
||||||
|
|
||||||
elif job.name == config.JOB_BATCH_IMPORT:
|
elif job.name == config.JOB_BATCH_IMPORT:
|
||||||
batch_import_id = job.payload.get("batch_import_id")
|
batch_import_id = job.payload.get("batch_import_id")
|
||||||
@ -264,8 +270,14 @@ SimpleLogin team.
|
|||||||
user_id = job.payload.get("user_id")
|
user_id = job.payload.get("user_id")
|
||||||
user = User.get(user_id)
|
user = User.get(user_id)
|
||||||
if user and user.activated:
|
if user and user.activated:
|
||||||
LOG.d("send proton welcome email to user %s", user)
|
LOG.d("Send proton welcome email to user %s", user)
|
||||||
welcome_proton(user)
|
welcome_proton(user)
|
||||||
|
elif job.name == config.JOB_SEND_ALIAS_CREATION_EVENTS:
|
||||||
|
user_id = job.payload.get("user_id")
|
||||||
|
user = User.get(user_id)
|
||||||
|
if user and user.activated:
|
||||||
|
LOG.d(f"Sending alias creation events for {user}")
|
||||||
|
send_alias_creation_events_for_user(user)
|
||||||
else:
|
else:
|
||||||
LOG.e("Unknown job name %s", job.name)
|
LOG.e("Unknown job name %s", job.name)
|
||||||
|
|
||||||
|
38
app/migrations/versions/2024_051713_06a9a7133445_.py
Normal file
38
app/migrations/versions/2024_051713_06a9a7133445_.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
"""Create sync_event table
|
||||||
|
|
||||||
|
Revision ID: 06a9a7133445
|
||||||
|
Revises: fa2f19bb4e5a
|
||||||
|
Create Date: 2024-05-17 13:11:20.402259
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '06a9a7133445'
|
||||||
|
down_revision = 'fa2f19bb4e5a'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('sync_event',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
|
||||||
|
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
|
||||||
|
sa.Column('content', sa.LargeBinary(), nullable=False),
|
||||||
|
sa.Column('taken_time', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_sync_event_created_at'), 'sync_event', ['created_at'], unique=False)
|
||||||
|
op.create_index(op.f('ix_sync_event_taken_time'), 'sync_event', ['taken_time'], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table('sync_event')
|
||||||
|
# ### end Alembic commands ###
|
@ -4,6 +4,7 @@ import subprocess
|
|||||||
from time import sleep
|
from time import sleep
|
||||||
from typing import List, Dict
|
from typing import List, Dict
|
||||||
|
|
||||||
|
import arrow
|
||||||
import newrelic.agent
|
import newrelic.agent
|
||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
@ -93,11 +94,44 @@ def log_nb_db_connection():
|
|||||||
newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection)
|
newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection)
|
||||||
|
|
||||||
|
|
||||||
|
@newrelic.agent.background_task()
|
||||||
|
def log_pending_to_process_events():
|
||||||
|
r = Session.execute("select count(*) from sync_event WHERE taken_time IS NULL;")
|
||||||
|
events_pending = list(r)[0][0]
|
||||||
|
|
||||||
|
LOG.d("number of events pending to process %s", events_pending)
|
||||||
|
newrelic.agent.record_custom_metric(
|
||||||
|
"Custom/sync_events_pending_to_process", events_pending
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@newrelic.agent.background_task()
|
||||||
|
def log_events_pending_dead_letter():
|
||||||
|
since = arrow.now().shift(minutes=-10).datetime
|
||||||
|
r = Session.execute(
|
||||||
|
"""
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM sync_event
|
||||||
|
WHERE (taken_time IS NOT NULL AND taken_time < :since)
|
||||||
|
OR (taken_time IS NULL AND created_at < :since)
|
||||||
|
""",
|
||||||
|
{"since": since},
|
||||||
|
)
|
||||||
|
events_pending = list(r)[0][0]
|
||||||
|
|
||||||
|
LOG.d("number of events pending dead letter %s", events_pending)
|
||||||
|
newrelic.agent.record_custom_metric(
|
||||||
|
"Custom/sync_events_pending_dead_letter", events_pending
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
exporter = MetricExporter(get_newrelic_license())
|
exporter = MetricExporter(get_newrelic_license())
|
||||||
while True:
|
while True:
|
||||||
log_postfix_metrics()
|
log_postfix_metrics()
|
||||||
log_nb_db_connection()
|
log_nb_db_connection()
|
||||||
|
log_pending_to_process_events()
|
||||||
|
log_events_pending_dead_letter()
|
||||||
Session.close()
|
Session.close()
|
||||||
|
|
||||||
exporter.run()
|
exporter.run()
|
||||||
|
28
app/poetry.lock
generated
28
app/poetry.lock
generated
@ -2150,24 +2150,22 @@ wcwidth = "*"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "protobuf"
|
name = "protobuf"
|
||||||
version = "4.24.3"
|
version = "5.27.1"
|
||||||
description = ""
|
description = ""
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"},
|
{file = "protobuf-5.27.1-cp310-abi3-win32.whl", hash = "sha256:3adc15ec0ff35c5b2d0992f9345b04a540c1e73bfee3ff1643db43cc1d734333"},
|
||||||
{file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"},
|
{file = "protobuf-5.27.1-cp310-abi3-win_amd64.whl", hash = "sha256:25236b69ab4ce1bec413fd4b68a15ef8141794427e0b4dc173e9d5d9dffc3bcd"},
|
||||||
{file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"},
|
{file = "protobuf-5.27.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4e38fc29d7df32e01a41cf118b5a968b1efd46b9c41ff515234e794011c78b17"},
|
||||||
{file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"},
|
{file = "protobuf-5.27.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:917ed03c3eb8a2d51c3496359f5b53b4e4b7e40edfbdd3d3f34336e0eef6825a"},
|
||||||
{file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"},
|
{file = "protobuf-5.27.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:ee52874a9e69a30271649be88ecbe69d374232e8fd0b4e4b0aaaa87f429f1631"},
|
||||||
{file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"},
|
{file = "protobuf-5.27.1-cp38-cp38-win32.whl", hash = "sha256:7a97b9c5aed86b9ca289eb5148df6c208ab5bb6906930590961e08f097258107"},
|
||||||
{file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"},
|
{file = "protobuf-5.27.1-cp38-cp38-win_amd64.whl", hash = "sha256:f6abd0f69968792da7460d3c2cfa7d94fd74e1c21df321eb6345b963f9ec3d8d"},
|
||||||
{file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"},
|
{file = "protobuf-5.27.1-cp39-cp39-win32.whl", hash = "sha256:dfddb7537f789002cc4eb00752c92e67885badcc7005566f2c5de9d969d3282d"},
|
||||||
{file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"},
|
{file = "protobuf-5.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:39309898b912ca6febb0084ea912e976482834f401be35840a008da12d189340"},
|
||||||
{file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"},
|
{file = "protobuf-5.27.1-py3-none-any.whl", hash = "sha256:4ac7249a1530a2ed50e24201d6630125ced04b30619262f06224616e0030b6cf"},
|
||||||
{file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"},
|
{file = "protobuf-5.27.1.tar.gz", hash = "sha256:df5e5b8e39b7d1c25b186ffdf9f44f40f810bbcc9d2b71d9d3156fee5a9adf15"},
|
||||||
{file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"},
|
|
||||||
{file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
50
app/proto/event.proto
Normal file
50
app/proto/event.proto
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package simplelogin_events;
|
||||||
|
|
||||||
|
message UserPlanChanged {
|
||||||
|
uint32 plan_end_time = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message UserDeleted {
|
||||||
|
}
|
||||||
|
|
||||||
|
message AliasCreated {
|
||||||
|
uint32 alias_id = 1;
|
||||||
|
string alias_email = 2;
|
||||||
|
string alias_note = 3;
|
||||||
|
bool enabled = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
message AliasStatusChanged {
|
||||||
|
uint32 alias_id = 1;
|
||||||
|
string alias_email = 2;
|
||||||
|
bool enabled = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message AliasDeleted {
|
||||||
|
uint32 alias_id = 1;
|
||||||
|
string alias_email = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message AliasCreatedList {
|
||||||
|
repeated AliasCreated events = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message EventContent {
|
||||||
|
oneof content {
|
||||||
|
UserPlanChanged user_plan_change = 1;
|
||||||
|
UserDeleted user_deleted = 2;
|
||||||
|
AliasCreated alias_created = 3;
|
||||||
|
AliasStatusChanged alias_status_change = 4;
|
||||||
|
AliasDeleted alias_deleted = 5;
|
||||||
|
AliasCreatedList alias_create_list = 6;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message Event {
|
||||||
|
uint32 user_id = 1;
|
||||||
|
string external_user_id = 2;
|
||||||
|
uint32 partner_id = 3;
|
||||||
|
EventContent content = 4;
|
||||||
|
}
|
@ -14,13 +14,14 @@ exclude = '''
|
|||||||
| build
|
| build
|
||||||
| dist
|
| dist
|
||||||
| migrations # migrations/ is generated by alembic
|
| migrations # migrations/ is generated by alembic
|
||||||
|
| app/events/generated
|
||||||
)/
|
)/
|
||||||
)
|
)
|
||||||
'''
|
'''
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
ignore-init-module-imports = true
|
ignore-init-module-imports = true
|
||||||
exclude = [".venv", "migrations"]
|
exclude = [".venv", "migrations", "app/events/generated"]
|
||||||
|
|
||||||
[tool.djlint]
|
[tool.djlint]
|
||||||
indent = 2
|
indent = 2
|
||||||
|
24
app/scripts/generate-proto-files.sh
Executable file
24
app/scripts/generate-proto-files.sh
Executable file
@ -0,0 +1,24 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euxo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "$0")" || exit 1; pwd -P)"
|
||||||
|
REPO_ROOT=$(echo "${SCRIPT_DIR}" | sed 's:scripts::g')
|
||||||
|
|
||||||
|
DEST_DIR="${REPO_ROOT}/app/events/generated"
|
||||||
|
|
||||||
|
PROTOC=${PROTOC:-"protoc"}
|
||||||
|
|
||||||
|
if ! eval "${PROTOC} --version" &> /dev/null ; then
|
||||||
|
echo "Cannot find $PROTOC"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -rf "${DEST_DIR}"
|
||||||
|
mkdir -p "${DEST_DIR}"
|
||||||
|
|
||||||
|
pushd $REPO_ROOT || exit 1
|
||||||
|
|
||||||
|
eval "${PROTOC} --proto_path=proto --python_out=\"${DEST_DIR}\" --pyi_out=\"${DEST_DIR}\" proto/event.proto"
|
||||||
|
|
||||||
|
popd || exit 1
|
@ -48,15 +48,16 @@
|
|||||||
{# SIWSL#}
|
{# SIWSL#}
|
||||||
{# </a>#}
|
{# </a>#}
|
||||||
{# </li>#}
|
{# </li>#}
|
||||||
{# {% if current_user.should_show_app_page() %}#}
|
{% if current_user.should_show_app_page() %}
|
||||||
{# <li class="nav-item">#}
|
|
||||||
{# <a href="{{ url_for('dashboard.app_route') }}"#}
|
<li class="nav-item">
|
||||||
{# class="nav-link {{ 'active' if active_page == 'app' }}">#}
|
<a href="{{ url_for('dashboard.app_route') }}"
|
||||||
{# <i class="fe fe-grid"></i>#}
|
class="nav-link {{ 'active' if active_page == 'app' }}">
|
||||||
{# Apps#}
|
<i class="fe fe-grid"></i>
|
||||||
{# </a>#}
|
Apps
|
||||||
{# </li>#}
|
</a>
|
||||||
{# {% endif %}#}
|
</li>
|
||||||
|
{% endif %}
|
||||||
<li class="nav-item">
|
<li class="nav-item">
|
||||||
<a href="{{ url_for('dashboard.setting') }}"
|
<a href="{{ url_for('dashboard.setting') }}"
|
||||||
class="nav-link {{ 'active' if active_page == 'setting' }}">
|
class="nav-link {{ 'active' if active_page == 'setting' }}">
|
||||||
|
0
app/tests/events/__init__.py
Normal file
0
app/tests/events/__init__.py
Normal file
56
app/tests/events/test_event_dispatcher.py
Normal file
56
app/tests/events/test_event_dispatcher.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
from app.events.event_dispatcher import EventDispatcher, Dispatcher
|
||||||
|
from app.events.generated.event_pb2 import EventContent, UserDeleted
|
||||||
|
from app.models import PartnerUser, User
|
||||||
|
from app.proton.utils import get_proton_partner
|
||||||
|
from tests.utils import create_new_user, random_token
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
|
||||||
|
class OnMemoryDispatcher(Dispatcher):
|
||||||
|
def __init__(self):
|
||||||
|
self.memory = []
|
||||||
|
|
||||||
|
def send(self, event: bytes):
|
||||||
|
self.memory.append(event)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_unlinked_user() -> User:
|
||||||
|
return create_new_user()
|
||||||
|
|
||||||
|
|
||||||
|
def _create_linked_user() -> Tuple[User, PartnerUser]:
|
||||||
|
user = _create_unlinked_user()
|
||||||
|
partner_user = PartnerUser.create(
|
||||||
|
partner_id=get_proton_partner().id,
|
||||||
|
user_id=user.id,
|
||||||
|
external_user_id=random_token(10),
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return user, partner_user
|
||||||
|
|
||||||
|
|
||||||
|
def test_event_dispatcher_stores_events():
|
||||||
|
dispatcher = OnMemoryDispatcher()
|
||||||
|
|
||||||
|
(user, partner) = _create_linked_user()
|
||||||
|
content = EventContent(user_deleted=UserDeleted())
|
||||||
|
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
|
||||||
|
assert len(dispatcher.memory) == 1
|
||||||
|
|
||||||
|
content = EventContent(user_deleted=UserDeleted())
|
||||||
|
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
|
||||||
|
assert len(dispatcher.memory) == 2
|
||||||
|
|
||||||
|
|
||||||
|
def test_event_dispatcher_does_not_send_event_if_user_not_linked():
|
||||||
|
dispatcher = OnMemoryDispatcher()
|
||||||
|
|
||||||
|
user = _create_unlinked_user()
|
||||||
|
content = EventContent(user_deleted=UserDeleted())
|
||||||
|
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
|
||||||
|
assert len(dispatcher.memory) == 0
|
||||||
|
|
||||||
|
content = EventContent(user_deleted=UserDeleted())
|
||||||
|
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
|
||||||
|
assert len(dispatcher.memory) == 0
|
46
app/tests/jobs/test_send_alias_creation_events.py
Normal file
46
app/tests/jobs/test_send_alias_creation_events.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
from app import config
|
||||||
|
from app.db import Session
|
||||||
|
from app.events.event_dispatcher import Dispatcher
|
||||||
|
from app.events.generated import event_pb2
|
||||||
|
from app.jobs.event_jobs import send_alias_creation_events_for_user
|
||||||
|
from app.models import Alias
|
||||||
|
from tests.utils import create_partner_linked_user
|
||||||
|
|
||||||
|
|
||||||
|
class MemStoreDispatcher(Dispatcher):
|
||||||
|
def __init__(self):
|
||||||
|
self.events = []
|
||||||
|
|
||||||
|
def send(self, event: bytes):
|
||||||
|
self.events.append(event)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_module():
|
||||||
|
config.EVENT_WEBHOOK = True
|
||||||
|
|
||||||
|
|
||||||
|
def teardown_module():
|
||||||
|
config.EVENT_WEBHOOK = False
|
||||||
|
|
||||||
|
|
||||||
|
def test_send_alias_creation_events():
|
||||||
|
[user, partner_user] = create_partner_linked_user()
|
||||||
|
aliases = [Alias.create_new_random(user) for i in range(2)]
|
||||||
|
Session.flush()
|
||||||
|
dispatcher = MemStoreDispatcher()
|
||||||
|
send_alias_creation_events_for_user(user, dispatcher=dispatcher, chunk_size=2)
|
||||||
|
# 2 batches. 1st newsletter + first alias. 2nd last alias
|
||||||
|
assert len(dispatcher.events) == 2
|
||||||
|
decoded_event = event_pb2.Event.FromString(dispatcher.events[0])
|
||||||
|
assert decoded_event.user_id == user.id
|
||||||
|
assert decoded_event.external_user_id == partner_user.external_user_id
|
||||||
|
event_list = decoded_event.content.alias_create_list.events
|
||||||
|
assert len(event_list) == 2
|
||||||
|
# 0 is newsletter alias
|
||||||
|
assert event_list[1].alias_id == aliases[0].id
|
||||||
|
decoded_event = event_pb2.Event.FromString(dispatcher.events[1])
|
||||||
|
assert decoded_event.user_id == user.id
|
||||||
|
assert decoded_event.external_user_id == partner_user.external_user_id
|
||||||
|
event_list = decoded_event.content.alias_create_list.events
|
||||||
|
assert len(event_list) == 1
|
||||||
|
assert event_list[0].alias_id == aliases[1].id
|
@ -17,6 +17,7 @@ from app.models import (
|
|||||||
Subscription,
|
Subscription,
|
||||||
PlanEnum,
|
PlanEnum,
|
||||||
PADDLE_SUBSCRIPTION_GRACE_DAYS,
|
PADDLE_SUBSCRIPTION_GRACE_DAYS,
|
||||||
|
SyncEvent,
|
||||||
)
|
)
|
||||||
from tests.utils import login, create_new_user, random_token
|
from tests.utils import login, create_new_user, random_token
|
||||||
|
|
||||||
@ -325,3 +326,51 @@ def test_user_can_send_receive():
|
|||||||
user.disabled = False
|
user.disabled = False
|
||||||
user.delete_on = arrow.now()
|
user.delete_on = arrow.now()
|
||||||
assert not user.can_send_or_receive()
|
assert not user.can_send_or_receive()
|
||||||
|
|
||||||
|
|
||||||
|
def test_sync_event_dead_letter():
|
||||||
|
# remove all SyncEvents before the test
|
||||||
|
all_events = SyncEvent.all()
|
||||||
|
for event in all_events:
|
||||||
|
SyncEvent.delete(event.id, commit=True)
|
||||||
|
|
||||||
|
# create an expired not taken event
|
||||||
|
e1 = SyncEvent.create(
|
||||||
|
content=b"content",
|
||||||
|
created_at=arrow.now().shift(minutes=-15),
|
||||||
|
taken_time=None,
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create an expired taken event (but too long ago)
|
||||||
|
e2 = SyncEvent.create(
|
||||||
|
content=b"content",
|
||||||
|
created_at=arrow.now().shift(minutes=-15),
|
||||||
|
taken_time=arrow.now().shift(minutes=-14),
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create an expired taken event (but recently)
|
||||||
|
e3 = SyncEvent.create(
|
||||||
|
content=b"content",
|
||||||
|
created_at=arrow.now().shift(minutes=-15),
|
||||||
|
taken_time=arrow.now().shift(minutes=-1),
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create a normal event
|
||||||
|
e4 = SyncEvent.create(
|
||||||
|
content=b"content",
|
||||||
|
created_at=arrow.now(),
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# get dead letter events
|
||||||
|
dead_letter_events = SyncEvent.get_dead_letter(
|
||||||
|
older_than=arrow.now().shift(minutes=-10)
|
||||||
|
)
|
||||||
|
assert len(dead_letter_events) == 2
|
||||||
|
assert e1 in dead_letter_events
|
||||||
|
assert e2 in dead_letter_events
|
||||||
|
assert e3 not in dead_letter_events
|
||||||
|
assert e4 not in dead_letter_events
|
||||||
|
@ -9,7 +9,8 @@ from typing import Optional, Dict
|
|||||||
import jinja2
|
import jinja2
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
|
|
||||||
from app.models import User
|
from app.models import User, PartnerUser
|
||||||
|
from app.proton.utils import get_proton_partner
|
||||||
from app.utils import random_string
|
from app.utils import random_string
|
||||||
|
|
||||||
|
|
||||||
@ -30,6 +31,18 @@ def create_new_user(email: Optional[str] = None, name: Optional[str] = None) ->
|
|||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
def create_partner_linked_user() -> tuple[User, PartnerUser]:
|
||||||
|
user = create_new_user()
|
||||||
|
partner_user = PartnerUser.create(
|
||||||
|
partner_id=get_proton_partner().id,
|
||||||
|
user_id=user.id,
|
||||||
|
external_user_id=random_token(10),
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return user, partner_user
|
||||||
|
|
||||||
|
|
||||||
def login(flask_client, user: Optional[User] = None) -> User:
|
def login(flask_client, user: Optional[User] = None) -> User:
|
||||||
if not user:
|
if not user:
|
||||||
user = create_new_user()
|
user = create_new_user()
|
||||||
|
Reference in New Issue
Block a user