Compare commits
14 Commits
Author | SHA1 | Date | |
---|---|---|---|
01dba12ed0 | |||
c872d43c3d | |||
3e6867bc17 | |||
a829074584 | |||
25834e8f61 | |||
a62b43b7c4 | |||
44fda2d94e | |||
bc48198bb1 | |||
da6e56c4eb | |||
798b58529c | |||
3da6c983e1 | |||
294232a329 | |||
fae9d7bc17 | |||
d666f5af3f |
2
app/.github/workflows/main.yml
vendored
2
app/.github/workflows/main.yml
vendored
@ -163,7 +163,7 @@ jobs:
|
|||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ For email gurus, we have chosen 1024 key length instead of 2048 for DNS simplici
|
|||||||
|
|
||||||
### DNS
|
### DNS
|
||||||
|
|
||||||
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to to force using absolute domain.
|
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to force using absolute domain.
|
||||||
|
|
||||||
|
|
||||||
#### MX record
|
#### MX record
|
||||||
|
@ -7,8 +7,4 @@ If you want be up to date on security patches, make sure your SimpleLogin image
|
|||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
If you've found a security vulnerability, you can disclose it responsibly by sending a summary to security@simplelogin.io.
|
If you want to report a vulnerability, please take a look at our bug bounty program at https://proton.me/security/bug-bounty.
|
||||||
We will review the potential threat and fix it as fast as we can.
|
|
||||||
|
|
||||||
We are incredibly thankful for people who disclose vulnerabilities, unfortunately we do not have a bounty program in place yet.
|
|
||||||
|
|
||||||
|
@ -3,12 +3,17 @@ from dataclasses import dataclass
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
import arrow
|
||||||
from arrow import Arrow
|
from arrow import Arrow
|
||||||
from newrelic import agent
|
from newrelic import agent
|
||||||
|
from psycopg2.errors import UniqueViolation
|
||||||
from sqlalchemy import or_
|
from sqlalchemy import or_
|
||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import send_welcome_email
|
from app.email_utils import send_welcome_email
|
||||||
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
|
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
||||||
|
from app.partner_user_utils import create_partner_user, create_partner_subscription
|
||||||
from app.utils import sanitize_email, canonicalize_email
|
from app.utils import sanitize_email, canonicalize_email
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
AccountAlreadyLinkedToAnotherPartnerException,
|
AccountAlreadyLinkedToAnotherPartnerException,
|
||||||
@ -23,6 +28,7 @@ from app.models import (
|
|||||||
User,
|
User,
|
||||||
Alias,
|
Alias,
|
||||||
)
|
)
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
from app.utils import random_string
|
from app.utils import random_string
|
||||||
|
|
||||||
|
|
||||||
@ -52,6 +58,21 @@ class LinkResult:
|
|||||||
strategy: str
|
strategy: str
|
||||||
|
|
||||||
|
|
||||||
|
def send_user_plan_changed_event(partner_user: PartnerUser) -> Optional[int]:
|
||||||
|
subscription_end = partner_user.user.get_active_subscription_end(
|
||||||
|
include_partner_subscription=False
|
||||||
|
)
|
||||||
|
end_timestamp = None
|
||||||
|
if partner_user.user.lifetime:
|
||||||
|
end_timestamp = arrow.get("2038-01-01").timestamp
|
||||||
|
elif subscription_end:
|
||||||
|
end_timestamp = subscription_end.timestamp
|
||||||
|
event = UserPlanChanged(plan_end_time=end_timestamp)
|
||||||
|
EventDispatcher.send_event(partner_user.user, EventContent(user_plan_change=event))
|
||||||
|
Session.flush()
|
||||||
|
return end_timestamp
|
||||||
|
|
||||||
|
|
||||||
def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
||||||
sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
|
sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
|
||||||
if plan.type == SLPlanType.Free:
|
if plan.type == SLPlanType.Free:
|
||||||
@ -66,9 +87,10 @@ def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
|||||||
LOG.i(
|
LOG.i(
|
||||||
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
||||||
)
|
)
|
||||||
PartnerSubscription.create(
|
create_partner_subscription(
|
||||||
partner_user_id=partner_user.id,
|
partner_user=partner_user,
|
||||||
end_at=plan.expiration,
|
expiration=plan.expiration,
|
||||||
|
msg="Upgraded via partner. User did not have a previous partner subscription",
|
||||||
)
|
)
|
||||||
agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"})
|
agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"})
|
||||||
else:
|
else:
|
||||||
@ -80,6 +102,13 @@ def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
|||||||
"PlanChange", {"plan": "premium", "type": "extension"}
|
"PlanChange", {"plan": "premium", "type": "extension"}
|
||||||
)
|
)
|
||||||
sub.end_at = plan.expiration
|
sub.end_at = plan.expiration
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=partner_user.user,
|
||||||
|
action=UserAuditLogAction.SubscriptionExtended,
|
||||||
|
message="Extended partner subscription",
|
||||||
|
)
|
||||||
|
Session.flush()
|
||||||
|
send_user_plan_changed_event(partner_user)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
@ -98,12 +127,13 @@ def ensure_partner_user_exists_for_user(
|
|||||||
if res and res.partner_id != partner.id:
|
if res and res.partner_id != partner.id:
|
||||||
raise AccountAlreadyLinkedToAnotherPartnerException()
|
raise AccountAlreadyLinkedToAnotherPartnerException()
|
||||||
if not res:
|
if not res:
|
||||||
res = PartnerUser.create(
|
res = create_partner_user(
|
||||||
user_id=sl_user.id,
|
user=sl_user,
|
||||||
partner_id=partner.id,
|
partner_id=partner.id,
|
||||||
partner_email=link_request.email,
|
partner_email=link_request.email,
|
||||||
external_user_id=link_request.external_user_id,
|
external_user_id=link_request.external_user_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"Created new partner_user for partner:{partner.id} user:{sl_user.id} external_user_id:{link_request.external_user_id}. PartnerUser.id is {res.id}"
|
f"Created new partner_user for partner:{partner.id} user:{sl_user.id} external_user_id:{link_request.external_user_id}. PartnerUser.id is {res.id}"
|
||||||
@ -131,17 +161,57 @@ class ClientMergeStrategy(ABC):
|
|||||||
|
|
||||||
class NewUserStrategy(ClientMergeStrategy):
|
class NewUserStrategy(ClientMergeStrategy):
|
||||||
def process(self) -> LinkResult:
|
def process(self) -> LinkResult:
|
||||||
# Will create a new SL User with a random password
|
|
||||||
canonical_email = canonicalize_email(self.link_request.email)
|
canonical_email = canonicalize_email(self.link_request.email)
|
||||||
new_user = User.create(
|
try:
|
||||||
email=canonical_email,
|
# Will create a new SL User with a random password
|
||||||
name=self.link_request.name,
|
new_user = User.create(
|
||||||
password=random_string(20),
|
email=canonical_email,
|
||||||
activated=True,
|
name=self.link_request.name,
|
||||||
from_partner=self.link_request.from_partner,
|
password=random_string(20),
|
||||||
|
activated=True,
|
||||||
|
from_partner=self.link_request.from_partner,
|
||||||
|
)
|
||||||
|
self.create_partner_user(new_user)
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
if not new_user.created_by_partner:
|
||||||
|
send_welcome_email(new_user)
|
||||||
|
|
||||||
|
agent.record_custom_event(
|
||||||
|
"PartnerUserCreation", {"partner": self.partner.name}
|
||||||
|
)
|
||||||
|
|
||||||
|
return LinkResult(
|
||||||
|
user=new_user,
|
||||||
|
strategy=self.__class__.__name__,
|
||||||
|
)
|
||||||
|
except UniqueViolation:
|
||||||
|
return self.create_missing_link(canonical_email)
|
||||||
|
|
||||||
|
def create_missing_link(self, canonical_email: str):
|
||||||
|
# If there's a unique key violation due to race conditions try to create only the partner if needed
|
||||||
|
partner_user = PartnerUser.get_by(
|
||||||
|
external_user_id=self.link_request.external_user_id,
|
||||||
|
partner_id=self.partner.id,
|
||||||
)
|
)
|
||||||
partner_user = PartnerUser.create(
|
if partner_user is None:
|
||||||
user_id=new_user.id,
|
# Get the user by canonical email and if not by normal email
|
||||||
|
user = User.get_by(email=canonical_email) or User.get_by(
|
||||||
|
email=self.link_request.email
|
||||||
|
)
|
||||||
|
if not user:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Tried to create only partner on UniqueViolation but cannot find the user"
|
||||||
|
)
|
||||||
|
partner_user = self.create_partner_user(user)
|
||||||
|
Session.commit()
|
||||||
|
return LinkResult(
|
||||||
|
user=partner_user.user, strategy=ExistingUnlinkedUserStrategy.__name__
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_partner_user(self, new_user: User):
|
||||||
|
partner_user = create_partner_user(
|
||||||
|
user=new_user,
|
||||||
partner_id=self.partner.id,
|
partner_id=self.partner.id,
|
||||||
external_user_id=self.link_request.external_user_id,
|
external_user_id=self.link_request.external_user_id,
|
||||||
partner_email=self.link_request.email,
|
partner_email=self.link_request.email,
|
||||||
@ -153,17 +223,7 @@ class NewUserStrategy(ClientMergeStrategy):
|
|||||||
partner_user,
|
partner_user,
|
||||||
self.link_request.plan,
|
self.link_request.plan,
|
||||||
)
|
)
|
||||||
Session.commit()
|
return partner_user
|
||||||
|
|
||||||
if not new_user.created_by_partner:
|
|
||||||
send_welcome_email(new_user)
|
|
||||||
|
|
||||||
agent.record_custom_event("PartnerUserCreation", {"partner": self.partner.name})
|
|
||||||
|
|
||||||
return LinkResult(
|
|
||||||
user=new_user,
|
|
||||||
strategy=self.__class__.__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||||
@ -200,7 +260,7 @@ def get_login_strategy(
|
|||||||
return ExistingUnlinkedUserStrategy(link_request, user, partner)
|
return ExistingUnlinkedUserStrategy(link_request, user, partner)
|
||||||
|
|
||||||
|
|
||||||
def check_alias(email: str) -> bool:
|
def check_alias(email: str):
|
||||||
alias = Alias.get_by(email=email)
|
alias = Alias.get_by(email=email)
|
||||||
if alias is not None:
|
if alias is not None:
|
||||||
raise AccountIsUsingAliasAsEmail()
|
raise AccountIsUsingAliasAsEmail()
|
||||||
@ -275,10 +335,26 @@ def switch_already_linked_user(
|
|||||||
LOG.i(
|
LOG.i(
|
||||||
f"Deleting previous partner_user:{other_partner_user.id} from user:{current_user.id}"
|
f"Deleting previous partner_user:{other_partner_user.id} from user:{current_user.id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=other_partner_user.user,
|
||||||
|
action=UserAuditLogAction.UnlinkAccount,
|
||||||
|
message=f"Deleting partner_user {other_partner_user.id} (external_user_id={other_partner_user.external_user_id} | partner_email={other_partner_user.partner_email}) from user {current_user.id}, as we received a new link request for the same partner",
|
||||||
|
)
|
||||||
PartnerUser.delete(other_partner_user.id)
|
PartnerUser.delete(other_partner_user.id)
|
||||||
LOG.i(f"Linking partner_user:{partner_user.id} to user:{current_user.id}")
|
LOG.i(f"Linking partner_user:{partner_user.id} to user:{current_user.id}")
|
||||||
# Link this partner_user to the current user
|
# Link this partner_user to the current user
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=partner_user.user,
|
||||||
|
action=UserAuditLogAction.UnlinkAccount,
|
||||||
|
message=f"Unlinking from partner, as user will now be tied to another external account. old=(id={partner_user.user.id} | email={partner_user.user.email}) | new=(id={current_user.id} | email={current_user.email})",
|
||||||
|
)
|
||||||
partner_user.user_id = current_user.id
|
partner_user.user_id = current_user.id
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.LinkAccount,
|
||||||
|
message=f"Linking user {current_user.id} ({current_user.email}) to partner_user:{partner_user.id} (external_user_id={partner_user.external_user_id} | partner_email={partner_user.partner_email})",
|
||||||
|
)
|
||||||
# Set plan
|
# Set plan
|
||||||
set_plan_for_partner_user(partner_user, link_request.plan)
|
set_plan_for_partner_user(partner_user, link_request.plan)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import Optional
|
from typing import Optional, List
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
@ -16,6 +16,8 @@ from flask_admin.contrib import sqla
|
|||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
|
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||||
from app.models import (
|
from app.models import (
|
||||||
User,
|
User,
|
||||||
ManualSubscription,
|
ManualSubscription,
|
||||||
@ -34,8 +36,12 @@ from app.models import (
|
|||||||
DeletedAlias,
|
DeletedAlias,
|
||||||
DomainDeletedAlias,
|
DomainDeletedAlias,
|
||||||
PartnerUser,
|
PartnerUser,
|
||||||
|
AliasMailbox,
|
||||||
|
AliasAuditLog,
|
||||||
|
UserAuditLog,
|
||||||
)
|
)
|
||||||
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
|
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
|
||||||
def _admin_action_formatter(view, context, model, name):
|
def _admin_action_formatter(view, context, model, name):
|
||||||
@ -112,7 +118,7 @@ class SLAdminIndexView(AdminIndexView):
|
|||||||
if not current_user.is_authenticated or not current_user.is_admin:
|
if not current_user.is_authenticated or not current_user.is_admin:
|
||||||
return redirect(url_for("auth.login", next=request.url))
|
return redirect(url_for("auth.login", next=request.url))
|
||||||
|
|
||||||
return redirect("/admin/user")
|
return redirect("/admin/email_search")
|
||||||
|
|
||||||
|
|
||||||
class UserAdmin(SLModelView):
|
class UserAdmin(SLModelView):
|
||||||
@ -348,17 +354,42 @@ def manual_upgrade(way: str, ids: [int], is_giveaway: bool):
|
|||||||
manual_sub.end_at = manual_sub.end_at.shift(years=1)
|
manual_sub.end_at = manual_sub.end_at.shift(years=1)
|
||||||
else:
|
else:
|
||||||
manual_sub.end_at = arrow.now().shift(years=1, days=1)
|
manual_sub.end_at = arrow.now().shift(years=1, days=1)
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.Upgrade,
|
||||||
|
message=f"Admin {current_user.email} extended manual subscription to user {user.email}",
|
||||||
|
)
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
user=user,
|
||||||
|
content=EventContent(
|
||||||
|
user_plan_change=UserPlanChanged(
|
||||||
|
plan_end_time=manual_sub.end_at.timestamp
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
flash(f"Subscription extended to {manual_sub.end_at.humanize()}", "success")
|
flash(f"Subscription extended to {manual_sub.end_at.humanize()}", "success")
|
||||||
continue
|
else:
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.Upgrade,
|
||||||
|
message=f"Admin {current_user.email} created manual subscription to user {user.email}",
|
||||||
|
)
|
||||||
|
manual_sub = ManualSubscription.create(
|
||||||
|
user_id=user.id,
|
||||||
|
end_at=arrow.now().shift(years=1, days=1),
|
||||||
|
comment=way,
|
||||||
|
is_giveaway=is_giveaway,
|
||||||
|
)
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
user=user,
|
||||||
|
content=EventContent(
|
||||||
|
user_plan_change=UserPlanChanged(
|
||||||
|
plan_end_time=manual_sub.end_at.timestamp
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
ManualSubscription.create(
|
flash(f"New {way} manual subscription for {user} is created", "success")
|
||||||
user_id=user.id,
|
|
||||||
end_at=arrow.now().shift(years=1, days=1),
|
|
||||||
comment=way,
|
|
||||||
is_giveaway=is_giveaway,
|
|
||||||
)
|
|
||||||
|
|
||||||
flash(f"New {way} manual subscription for {user} is created", "success")
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
@ -450,14 +481,7 @@ class ManualSubscriptionAdmin(SLModelView):
|
|||||||
"Extend 1 year more?",
|
"Extend 1 year more?",
|
||||||
)
|
)
|
||||||
def extend_1y(self, ids):
|
def extend_1y(self, ids):
|
||||||
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
self.__extend_manual_subscription(ids, msg="1 year", years=1)
|
||||||
ms.end_at = ms.end_at.shift(years=1)
|
|
||||||
flash(f"Extend subscription for 1 year for {ms.user}", "success")
|
|
||||||
AdminAuditLog.extend_subscription(
|
|
||||||
current_user.id, ms.user.id, ms.end_at, "1 year"
|
|
||||||
)
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
@action(
|
@action(
|
||||||
"extend_1m",
|
"extend_1m",
|
||||||
@ -465,11 +489,26 @@ class ManualSubscriptionAdmin(SLModelView):
|
|||||||
"Extend 1 month more?",
|
"Extend 1 month more?",
|
||||||
)
|
)
|
||||||
def extend_1m(self, ids):
|
def extend_1m(self, ids):
|
||||||
|
self.__extend_manual_subscription(ids, msg="1 month", months=1)
|
||||||
|
|
||||||
|
def __extend_manual_subscription(self, ids: List[int], msg: str, **kwargs):
|
||||||
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
||||||
ms.end_at = ms.end_at.shift(months=1)
|
sub: ManualSubscription = ms
|
||||||
flash(f"Extend subscription for 1 month for {ms.user}", "success")
|
sub.end_at = sub.end_at.shift(**kwargs)
|
||||||
|
flash(f"Extend subscription for {msg} for {sub.user}", "success")
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=sub.user,
|
||||||
|
action=UserAuditLogAction.Upgrade,
|
||||||
|
message=f"Admin {current_user.email} extended manual subscription for {msg} for {sub.user}",
|
||||||
|
)
|
||||||
AdminAuditLog.extend_subscription(
|
AdminAuditLog.extend_subscription(
|
||||||
current_user.id, ms.user.id, ms.end_at, "1 month"
|
current_user.id, sub.user.id, sub.end_at, msg
|
||||||
|
)
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
user=sub.user,
|
||||||
|
content=EventContent(
|
||||||
|
user_plan_change=UserPlanChanged(plan_end_time=sub.end_at.timestamp)
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
@ -736,22 +775,47 @@ class InvalidMailboxDomainAdmin(SLModelView):
|
|||||||
class EmailSearchResult:
|
class EmailSearchResult:
|
||||||
no_match: bool = True
|
no_match: bool = True
|
||||||
alias: Optional[Alias] = None
|
alias: Optional[Alias] = None
|
||||||
mailbox: list[Mailbox] = []
|
alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||||
|
mailbox: List[Mailbox] = []
|
||||||
mailbox_count: int = 0
|
mailbox_count: int = 0
|
||||||
deleted_alias: Optional[DeletedAlias] = None
|
deleted_alias: Optional[DeletedAlias] = None
|
||||||
deleted_custom_alias: Optional[DomainDeletedAlias] = None
|
deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||||
|
domain_deleted_alias: Optional[DomainDeletedAlias] = None
|
||||||
|
domain_deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||||
user: Optional[User] = None
|
user: Optional[User] = None
|
||||||
|
user_audit_log: Optional[List[UserAuditLog]] = None
|
||||||
|
query: str
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_email(email: str) -> EmailSearchResult:
|
def from_email(email: str) -> EmailSearchResult:
|
||||||
output = EmailSearchResult()
|
output = EmailSearchResult()
|
||||||
|
output.query = email
|
||||||
alias = Alias.get_by(email=email)
|
alias = Alias.get_by(email=email)
|
||||||
if alias:
|
if alias:
|
||||||
output.alias = alias
|
output.alias = alias
|
||||||
|
output.alias_audit_log = (
|
||||||
|
AliasAuditLog.filter_by(alias_id=alias.id)
|
||||||
|
.order_by(AliasAuditLog.created_at.desc())
|
||||||
|
.all()
|
||||||
|
)
|
||||||
output.no_match = False
|
output.no_match = False
|
||||||
user = User.get_by(email=email)
|
user = User.get_by(email=email)
|
||||||
if user:
|
if user:
|
||||||
output.user = user
|
output.user = user
|
||||||
|
output.user_audit_log = (
|
||||||
|
UserAuditLog.filter_by(user_id=user.id)
|
||||||
|
.order_by(UserAuditLog.created_at.desc())
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
output.no_match = False
|
||||||
|
|
||||||
|
user_audit_log = (
|
||||||
|
UserAuditLog.filter_by(user_email=email)
|
||||||
|
.order_by(UserAuditLog.created_at.desc())
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
if user_audit_log:
|
||||||
|
output.user_audit_log = user_audit_log
|
||||||
output.no_match = False
|
output.no_match = False
|
||||||
mailboxes = (
|
mailboxes = (
|
||||||
Mailbox.filter_by(email=email).order_by(Mailbox.id.desc()).limit(10).all()
|
Mailbox.filter_by(email=email).order_by(Mailbox.id.desc()).limit(10).all()
|
||||||
@ -763,10 +827,20 @@ class EmailSearchResult:
|
|||||||
deleted_alias = DeletedAlias.get_by(email=email)
|
deleted_alias = DeletedAlias.get_by(email=email)
|
||||||
if deleted_alias:
|
if deleted_alias:
|
||||||
output.deleted_alias = deleted_alias
|
output.deleted_alias = deleted_alias
|
||||||
|
output.deleted_alias_audit_log = (
|
||||||
|
AliasAuditLog.filter_by(alias_email=deleted_alias.email)
|
||||||
|
.order_by(AliasAuditLog.created_at.desc())
|
||||||
|
.all()
|
||||||
|
)
|
||||||
output.no_match = False
|
output.no_match = False
|
||||||
domain_deleted_alias = DomainDeletedAlias.get_by(email=email)
|
domain_deleted_alias = DomainDeletedAlias.get_by(email=email)
|
||||||
if domain_deleted_alias:
|
if domain_deleted_alias:
|
||||||
output.domain_deleted_alias = domain_deleted_alias
|
output.domain_deleted_alias = domain_deleted_alias
|
||||||
|
output.domain_deleted_alias_audit_log = (
|
||||||
|
AliasAuditLog.filter_by(alias_email=domain_deleted_alias.email)
|
||||||
|
.order_by(AliasAuditLog.created_at.desc())
|
||||||
|
.all()
|
||||||
|
)
|
||||||
output.no_match = False
|
output.no_match = False
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@ -785,6 +859,25 @@ class EmailSearchHelpers:
|
|||||||
def mailbox_count(user: User) -> int:
|
def mailbox_count(user: User) -> int:
|
||||||
return Mailbox.filter_by(user_id=user.id).order_by(Mailbox.id.desc()).count()
|
return Mailbox.filter_by(user_id=user.id).order_by(Mailbox.id.desc()).count()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def alias_mailboxes(alias: Alias) -> list[Mailbox]:
|
||||||
|
return (
|
||||||
|
Session.query(Mailbox)
|
||||||
|
.filter(Mailbox.id == Alias.mailbox_id, Alias.id == alias.id)
|
||||||
|
.union(
|
||||||
|
Session.query(Mailbox)
|
||||||
|
.join(AliasMailbox, Mailbox.id == AliasMailbox.mailbox_id)
|
||||||
|
.filter(AliasMailbox.alias_id == alias.id)
|
||||||
|
)
|
||||||
|
.order_by(Mailbox.id)
|
||||||
|
.limit(10)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def alias_mailbox_count(alias: Alias) -> int:
|
||||||
|
return len(alias.mailboxes)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def alias_list(user: User) -> list[Alias]:
|
def alias_list(user: User) -> list[Alias]:
|
||||||
return (
|
return (
|
||||||
|
38
app/app/alias_audit_log_utils.py
Normal file
38
app/app/alias_audit_log_utils.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
from enum import Enum
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from app.models import Alias, AliasAuditLog
|
||||||
|
|
||||||
|
|
||||||
|
class AliasAuditLogAction(Enum):
|
||||||
|
CreateAlias = "create"
|
||||||
|
ChangeAliasStatus = "change_status"
|
||||||
|
DeleteAlias = "delete"
|
||||||
|
UpdateAlias = "update"
|
||||||
|
|
||||||
|
InitiateTransferAlias = "initiate_transfer_alias"
|
||||||
|
AcceptTransferAlias = "accept_transfer_alias"
|
||||||
|
TransferredAlias = "transferred_alias"
|
||||||
|
|
||||||
|
ChangedMailboxes = "changed_mailboxes"
|
||||||
|
|
||||||
|
CreateContact = "create_contact"
|
||||||
|
UpdateContact = "update_contact"
|
||||||
|
DeleteContact = "delete_contact"
|
||||||
|
|
||||||
|
|
||||||
|
def emit_alias_audit_log(
|
||||||
|
alias: Alias,
|
||||||
|
action: AliasAuditLogAction,
|
||||||
|
message: str,
|
||||||
|
user_id: Optional[int] = None,
|
||||||
|
commit: bool = False,
|
||||||
|
):
|
||||||
|
AliasAuditLog.create(
|
||||||
|
user_id=user_id or alias.user_id,
|
||||||
|
alias_id=alias.id,
|
||||||
|
alias_email=alias.email,
|
||||||
|
action=action.value,
|
||||||
|
message=message,
|
||||||
|
commit=commit,
|
||||||
|
)
|
61
app/app/alias_mailbox_utils.py
Normal file
61
app/app/alias_mailbox_utils.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from enum import Enum
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||||
|
from app.db import Session
|
||||||
|
from app.models import Alias, AliasMailbox, Mailbox
|
||||||
|
|
||||||
|
_MAX_MAILBOXES_PER_ALIAS = 20
|
||||||
|
|
||||||
|
|
||||||
|
class CannotSetMailboxesForAliasCause(Enum):
|
||||||
|
Forbidden = "Forbidden"
|
||||||
|
EmptyMailboxes = "Must choose at least one mailbox"
|
||||||
|
TooManyMailboxes = "Too many mailboxes"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SetMailboxesForAliasResult:
|
||||||
|
performed_change: bool
|
||||||
|
reason: Optional[CannotSetMailboxesForAliasCause]
|
||||||
|
|
||||||
|
|
||||||
|
def set_mailboxes_for_alias(
|
||||||
|
user_id: int, alias: Alias, mailbox_ids: List[int]
|
||||||
|
) -> Optional[CannotSetMailboxesForAliasCause]:
|
||||||
|
if len(mailbox_ids) == 0:
|
||||||
|
return CannotSetMailboxesForAliasCause.EmptyMailboxes
|
||||||
|
if len(mailbox_ids) > _MAX_MAILBOXES_PER_ALIAS:
|
||||||
|
return CannotSetMailboxesForAliasCause.TooManyMailboxes
|
||||||
|
|
||||||
|
mailboxes = (
|
||||||
|
Session.query(Mailbox)
|
||||||
|
.filter(
|
||||||
|
Mailbox.id.in_(mailbox_ids),
|
||||||
|
Mailbox.user_id == user_id,
|
||||||
|
Mailbox.verified == True, # noqa: E712
|
||||||
|
)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
if len(mailboxes) != len(mailbox_ids):
|
||||||
|
return CannotSetMailboxesForAliasCause.Forbidden
|
||||||
|
|
||||||
|
# first remove all existing alias-mailboxes links
|
||||||
|
AliasMailbox.filter_by(alias_id=alias.id).delete()
|
||||||
|
Session.flush()
|
||||||
|
|
||||||
|
# then add all new mailboxes, being the first the one associated with the alias
|
||||||
|
for i, mailbox in enumerate(mailboxes):
|
||||||
|
if i == 0:
|
||||||
|
alias.mailbox_id = mailboxes[0].id
|
||||||
|
else:
|
||||||
|
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
|
||||||
|
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias=alias,
|
||||||
|
action=AliasAuditLogAction.ChangedMailboxes,
|
||||||
|
message=",".join([f"{mailbox.id} ({mailbox.email})" for mailbox in mailboxes]),
|
||||||
|
)
|
||||||
|
|
||||||
|
return None
|
@ -58,7 +58,7 @@ def verify_prefix_suffix(
|
|||||||
|
|
||||||
# alias_domain must be either one of user custom domains or built-in domains
|
# alias_domain must be either one of user custom domains or built-in domains
|
||||||
if alias_domain not in user.available_alias_domains(alias_options=alias_options):
|
if alias_domain not in user.available_alias_domains(alias_options=alias_options):
|
||||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# SimpleLogin domain case:
|
# SimpleLogin domain case:
|
||||||
@ -75,17 +75,17 @@ def verify_prefix_suffix(
|
|||||||
and not config.DISABLE_ALIAS_SUFFIX
|
and not config.DISABLE_ALIAS_SUFFIX
|
||||||
):
|
):
|
||||||
if not alias_domain_prefix.startswith("."):
|
if not alias_domain_prefix.startswith("."):
|
||||||
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
LOG.i("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if alias_domain not in user_custom_domains:
|
if alias_domain not in user_custom_domains:
|
||||||
if not config.DISABLE_ALIAS_SUFFIX:
|
if not config.DISABLE_ALIAS_SUFFIX:
|
||||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if alias_domain not in available_sl_domains:
|
if alias_domain not in available_sl_domains:
|
||||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -1,12 +1,14 @@
|
|||||||
import csv
|
import csv
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
import re
|
import re
|
||||||
|
from dataclasses import dataclass
|
||||||
from typing import Optional, Tuple
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
from email_validator import validate_email, EmailNotValidError
|
from email_validator import validate_email, EmailNotValidError
|
||||||
from sqlalchemy.exc import IntegrityError, DataError
|
from sqlalchemy.exc import IntegrityError, DataError
|
||||||
from flask import make_response
|
from flask import make_response
|
||||||
|
|
||||||
|
from app.alias_audit_log_utils import AliasAuditLogAction, emit_alias_audit_log
|
||||||
from app.config import (
|
from app.config import (
|
||||||
BOUNCE_PREFIX_FOR_REPLY_PHASE,
|
BOUNCE_PREFIX_FOR_REPLY_PHASE,
|
||||||
BOUNCE_PREFIX,
|
BOUNCE_PREFIX,
|
||||||
@ -23,6 +25,7 @@ from app.email_utils import (
|
|||||||
send_cannot_create_domain_alias,
|
send_cannot_create_domain_alias,
|
||||||
send_email,
|
send_email,
|
||||||
render,
|
render,
|
||||||
|
sl_formataddr,
|
||||||
)
|
)
|
||||||
from app.errors import AliasInTrashError
|
from app.errors import AliasInTrashError
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
@ -30,6 +33,7 @@ from app.events.generated.event_pb2 import (
|
|||||||
AliasDeleted,
|
AliasDeleted,
|
||||||
AliasStatusChanged,
|
AliasStatusChanged,
|
||||||
EventContent,
|
EventContent,
|
||||||
|
AliasCreated,
|
||||||
)
|
)
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import (
|
from app.models import (
|
||||||
@ -365,6 +369,10 @@ def delete_alias(
|
|||||||
|
|
||||||
alias_id = alias.id
|
alias_id = alias.id
|
||||||
alias_email = alias.email
|
alias_email = alias.email
|
||||||
|
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias, AliasAuditLogAction.DeleteAlias, "Alias deleted by user action"
|
||||||
|
)
|
||||||
Alias.filter(Alias.id == alias.id).delete()
|
Alias.filter(Alias.id == alias.id).delete()
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -447,7 +455,7 @@ def alias_export_csv(user, csv_direct_export=False):
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
def transfer_alias(alias: Alias, new_user: User, new_mailboxes: [Mailbox]):
|
||||||
# cannot transfer alias which is used for receiving newsletter
|
# cannot transfer alias which is used for receiving newsletter
|
||||||
if User.get_by(newsletter_alias_id=alias.id):
|
if User.get_by(newsletter_alias_id=alias.id):
|
||||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||||
@ -501,10 +509,47 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
|||||||
alias.disable_pgp = False
|
alias.disable_pgp = False
|
||||||
alias.pinned = False
|
alias.pinned = False
|
||||||
|
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias=alias,
|
||||||
|
action=AliasAuditLogAction.TransferredAlias,
|
||||||
|
message=f"Lost ownership of alias due to alias transfer confirmed. New owner is {new_user.id}",
|
||||||
|
user_id=old_user.id,
|
||||||
|
)
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
old_user,
|
||||||
|
EventContent(
|
||||||
|
alias_deleted=AliasDeleted(
|
||||||
|
id=alias.id,
|
||||||
|
email=alias.email,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias=alias,
|
||||||
|
action=AliasAuditLogAction.AcceptTransferAlias,
|
||||||
|
message=f"Accepted alias transfer from user {old_user.id}",
|
||||||
|
user_id=new_user.id,
|
||||||
|
)
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
new_user,
|
||||||
|
EventContent(
|
||||||
|
alias_created=AliasCreated(
|
||||||
|
id=alias.id,
|
||||||
|
email=alias.email,
|
||||||
|
note=alias.note,
|
||||||
|
enabled=alias.enabled,
|
||||||
|
created_at=int(alias.created_at.timestamp),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
def change_alias_status(alias: Alias, enabled: bool, commit: bool = False):
|
def change_alias_status(
|
||||||
|
alias: Alias, enabled: bool, message: Optional[str] = None, commit: bool = False
|
||||||
|
):
|
||||||
LOG.i(f"Changing alias {alias} enabled to {enabled}")
|
LOG.i(f"Changing alias {alias} enabled to {enabled}")
|
||||||
alias.enabled = enabled
|
alias.enabled = enabled
|
||||||
|
|
||||||
@ -515,6 +560,39 @@ def change_alias_status(alias: Alias, enabled: bool, commit: bool = False):
|
|||||||
created_at=int(alias.created_at.timestamp),
|
created_at=int(alias.created_at.timestamp),
|
||||||
)
|
)
|
||||||
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
|
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
|
||||||
|
audit_log_message = f"Set alias status to {enabled}"
|
||||||
|
if message is not None:
|
||||||
|
audit_log_message += f". {message}"
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias, AliasAuditLogAction.ChangeAliasStatus, audit_log_message
|
||||||
|
)
|
||||||
|
|
||||||
if commit:
|
if commit:
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AliasRecipientName:
|
||||||
|
name: str
|
||||||
|
message: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_alias_recipient_name(alias: Alias) -> AliasRecipientName:
|
||||||
|
"""
|
||||||
|
Logic:
|
||||||
|
1. If alias has name, use it
|
||||||
|
2. If alias has custom domain, and custom domain has name, use it
|
||||||
|
3. Otherwise, use the alias email as the recipient
|
||||||
|
"""
|
||||||
|
if alias.name:
|
||||||
|
return AliasRecipientName(
|
||||||
|
name=sl_formataddr((alias.name, alias.email)),
|
||||||
|
message=f"Put alias name {alias.name} in from header",
|
||||||
|
)
|
||||||
|
elif alias.custom_domain:
|
||||||
|
if alias.custom_domain.name:
|
||||||
|
return AliasRecipientName(
|
||||||
|
name=sl_formataddr((alias.custom_domain.name, alias.email)),
|
||||||
|
message=f"Put domain default alias name {alias.custom_domain.name} in from header",
|
||||||
|
)
|
||||||
|
return AliasRecipientName(name=alias.email)
|
||||||
|
@ -1,9 +1,13 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
from deprecated import deprecated
|
from deprecated import deprecated
|
||||||
from flask import g
|
from flask import g
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from flask import request
|
from flask import request
|
||||||
|
|
||||||
from app import alias_utils
|
from app import alias_utils
|
||||||
|
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||||
|
from app.alias_mailbox_utils import set_mailboxes_for_alias
|
||||||
from app.api.base import api_bp, require_api_auth
|
from app.api.base import api_bp, require_api_auth
|
||||||
from app.api.serializer import (
|
from app.api.serializer import (
|
||||||
AliasInfo,
|
AliasInfo,
|
||||||
@ -26,7 +30,7 @@ from app.errors import (
|
|||||||
)
|
)
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Alias, Contact, Mailbox, AliasMailbox, AliasDeleteReason
|
from app.models import Alias, Contact, Mailbox, AliasDeleteReason
|
||||||
|
|
||||||
|
|
||||||
@deprecated
|
@deprecated
|
||||||
@ -185,7 +189,11 @@ def toggle_alias(alias_id):
|
|||||||
if not alias or alias.user_id != user.id:
|
if not alias or alias.user_id != user.id:
|
||||||
return jsonify(error="Forbidden"), 403
|
return jsonify(error="Forbidden"), 403
|
||||||
|
|
||||||
alias_utils.change_alias_status(alias, enabled=not alias.enabled)
|
alias_utils.change_alias_status(
|
||||||
|
alias,
|
||||||
|
enabled=not alias.enabled,
|
||||||
|
message=f"Set enabled={not alias.enabled} via API",
|
||||||
|
)
|
||||||
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
|
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -272,10 +280,12 @@ def update_alias(alias_id):
|
|||||||
if not alias or alias.user_id != user.id:
|
if not alias or alias.user_id != user.id:
|
||||||
return jsonify(error="Forbidden"), 403
|
return jsonify(error="Forbidden"), 403
|
||||||
|
|
||||||
|
changed_fields = []
|
||||||
changed = False
|
changed = False
|
||||||
if "note" in data:
|
if "note" in data:
|
||||||
new_note = data.get("note")
|
new_note = data.get("note")
|
||||||
alias.note = new_note
|
alias.note = new_note
|
||||||
|
changed_fields.append("note")
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if "mailbox_id" in data:
|
if "mailbox_id" in data:
|
||||||
@ -285,35 +295,19 @@ def update_alias(alias_id):
|
|||||||
return jsonify(error="Forbidden"), 400
|
return jsonify(error="Forbidden"), 400
|
||||||
|
|
||||||
alias.mailbox_id = mailbox_id
|
alias.mailbox_id = mailbox_id
|
||||||
|
changed_fields.append(f"mailbox_id ({mailbox_id})")
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if "mailbox_ids" in data:
|
if "mailbox_ids" in data:
|
||||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||||
mailboxes: [Mailbox] = []
|
err = set_mailboxes_for_alias(
|
||||||
|
user_id=user.id, alias=alias, mailbox_ids=mailbox_ids
|
||||||
# check if all mailboxes belong to user
|
)
|
||||||
for mailbox_id in mailbox_ids:
|
if err:
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
return jsonify(error=err.value), 400
|
||||||
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
|
|
||||||
return jsonify(error="Forbidden"), 400
|
|
||||||
mailboxes.append(mailbox)
|
|
||||||
|
|
||||||
if not mailboxes:
|
|
||||||
return jsonify(error="Must choose at least one mailbox"), 400
|
|
||||||
|
|
||||||
# <<< update alias mailboxes >>>
|
|
||||||
# first remove all existing alias-mailboxes links
|
|
||||||
AliasMailbox.filter_by(alias_id=alias.id).delete()
|
|
||||||
Session.flush()
|
|
||||||
|
|
||||||
# then add all new mailboxes
|
|
||||||
for i, mailbox in enumerate(mailboxes):
|
|
||||||
if i == 0:
|
|
||||||
alias.mailbox_id = mailboxes[0].id
|
|
||||||
else:
|
|
||||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
|
|
||||||
# <<< END update alias mailboxes >>>
|
|
||||||
|
|
||||||
|
mailbox_ids_string = ",".join(map(str, mailbox_ids))
|
||||||
|
changed_fields.append(f"mailbox_ids ({mailbox_ids_string})")
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if "name" in data:
|
if "name" in data:
|
||||||
@ -325,17 +319,26 @@ def update_alias(alias_id):
|
|||||||
if new_name:
|
if new_name:
|
||||||
new_name = new_name.replace("\n", "")
|
new_name = new_name.replace("\n", "")
|
||||||
alias.name = new_name
|
alias.name = new_name
|
||||||
|
changed_fields.append("name")
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if "disable_pgp" in data:
|
if "disable_pgp" in data:
|
||||||
alias.disable_pgp = data.get("disable_pgp")
|
alias.disable_pgp = data.get("disable_pgp")
|
||||||
|
changed_fields.append("disable_pgp")
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if "pinned" in data:
|
if "pinned" in data:
|
||||||
alias.pinned = data.get("pinned")
|
alias.pinned = data.get("pinned")
|
||||||
|
changed_fields.append("pinned")
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if changed:
|
if changed:
|
||||||
|
changed_fields_string = ",".join(changed_fields)
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias,
|
||||||
|
AliasAuditLogAction.UpdateAlias,
|
||||||
|
f"Alias fields updated ({changed_fields_string})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
return jsonify(ok=True), 200
|
return jsonify(ok=True), 200
|
||||||
@ -416,15 +419,14 @@ def create_contact_route(alias_id):
|
|||||||
if not data:
|
if not data:
|
||||||
return jsonify(error="request body cannot be empty"), 400
|
return jsonify(error="request body cannot be empty"), 400
|
||||||
|
|
||||||
alias: Alias = Alias.get(alias_id)
|
alias: Optional[Alias] = Alias.get_by(id=alias_id, user_id=g.user.id)
|
||||||
|
if not alias:
|
||||||
if alias.user_id != g.user.id:
|
|
||||||
return jsonify(error="Forbidden"), 403
|
return jsonify(error="Forbidden"), 403
|
||||||
|
|
||||||
contact_address = data.get("contact")
|
contact_address = data.get("contact")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
contact = create_contact(g.user, alias, contact_address)
|
contact = create_contact(alias, contact_address)
|
||||||
except ErrContactErrorUpgradeNeeded as err:
|
except ErrContactErrorUpgradeNeeded as err:
|
||||||
return jsonify(error=err.error_for_user()), 403
|
return jsonify(error=err.error_for_user()), 403
|
||||||
except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err:
|
except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err:
|
||||||
@ -446,11 +448,16 @@ def delete_contact(contact_id):
|
|||||||
200
|
200
|
||||||
"""
|
"""
|
||||||
user = g.user
|
user = g.user
|
||||||
contact = Contact.get(contact_id)
|
contact: Optional[Contact] = Contact.get(contact_id)
|
||||||
|
|
||||||
if not contact or contact.alias.user_id != user.id:
|
if not contact or contact.alias.user_id != user.id:
|
||||||
return jsonify(error="Forbidden"), 403
|
return jsonify(error="Forbidden"), 403
|
||||||
|
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias=contact.alias,
|
||||||
|
action=AliasAuditLogAction.DeleteContact,
|
||||||
|
message=f"Deleted contact {contact_id} ({contact.email})",
|
||||||
|
)
|
||||||
Contact.delete(contact_id)
|
Contact.delete(contact_id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -468,12 +475,17 @@ def toggle_contact(contact_id):
|
|||||||
200
|
200
|
||||||
"""
|
"""
|
||||||
user = g.user
|
user = g.user
|
||||||
contact = Contact.get(contact_id)
|
contact: Optional[Contact] = Contact.get(contact_id)
|
||||||
|
|
||||||
if not contact or contact.alias.user_id != user.id:
|
if not contact or contact.alias.user_id != user.id:
|
||||||
return jsonify(error="Forbidden"), 403
|
return jsonify(error="Forbidden"), 403
|
||||||
|
|
||||||
contact.block_forward = not contact.block_forward
|
contact.block_forward = not contact.block_forward
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias=contact.alias,
|
||||||
|
action=AliasAuditLogAction.UpdateContact,
|
||||||
|
message=f"Set contact state {contact.id} {contact.email} -> {contact.website_email} to blocked {contact.block_forward}",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
return jsonify(block_forward=contact.block_forward), 200
|
return jsonify(block_forward=contact.block_forward), 200
|
||||||
|
@ -23,6 +23,7 @@ from app.events.auth_event import LoginEvent, RegisterEvent
|
|||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, ApiKey, SocialAuth, AccountActivation
|
from app.models import User, ApiKey, SocialAuth, AccountActivation
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
from app.utils import sanitize_email, canonicalize_email
|
from app.utils import sanitize_email, canonicalize_email
|
||||||
|
|
||||||
|
|
||||||
@ -52,8 +53,12 @@ def auth_login():
|
|||||||
password = data.get("password")
|
password = data.get("password")
|
||||||
device = data.get("device")
|
device = data.get("device")
|
||||||
|
|
||||||
email = sanitize_email(data.get("email"))
|
email = data.get("email")
|
||||||
canonical_email = canonicalize_email(data.get("email"))
|
if not email:
|
||||||
|
LoginEvent(LoginEvent.ActionType.failed, LoginEvent.Source.api).send()
|
||||||
|
return jsonify(error="Email or password incorrect"), 400
|
||||||
|
email = sanitize_email(email)
|
||||||
|
canonical_email = canonicalize_email(email)
|
||||||
|
|
||||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||||
|
|
||||||
@ -183,6 +188,11 @@ def auth_activate():
|
|||||||
|
|
||||||
LOG.d("activate user %s", user)
|
LOG.d("activate user %s", user)
|
||||||
user.activated = True
|
user.activated = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.ActivateUser,
|
||||||
|
message=f"User has been activated: {user.email}",
|
||||||
|
)
|
||||||
AccountActivation.delete(account_activation.id)
|
AccountActivation.delete(account_activation.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
@ -2,8 +2,10 @@ from flask import g, request
|
|||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
|
|
||||||
from app.api.base import api_bp, require_api_auth
|
from app.api.base import api_bp, require_api_auth
|
||||||
|
from app.custom_domain_utils import set_custom_domain_mailboxes
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import CustomDomain, DomainDeletedAlias, Mailbox, DomainMailbox
|
from app.log import LOG
|
||||||
|
from app.models import CustomDomain, DomainDeletedAlias
|
||||||
|
|
||||||
|
|
||||||
def custom_domain_to_dict(custom_domain: CustomDomain):
|
def custom_domain_to_dict(custom_domain: CustomDomain):
|
||||||
@ -100,23 +102,14 @@ def update_custom_domain(custom_domain_id):
|
|||||||
|
|
||||||
if "mailbox_ids" in data:
|
if "mailbox_ids" in data:
|
||||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||||
if mailbox_ids:
|
result = set_custom_domain_mailboxes(user.id, custom_domain, mailbox_ids)
|
||||||
# check if mailbox is not tempered with
|
if result.success:
|
||||||
mailboxes = []
|
|
||||||
for mailbox_id in mailbox_ids:
|
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
|
||||||
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
|
|
||||||
return jsonify(error="Forbidden"), 400
|
|
||||||
mailboxes.append(mailbox)
|
|
||||||
|
|
||||||
# first remove all existing domain-mailboxes links
|
|
||||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
|
||||||
Session.flush()
|
|
||||||
|
|
||||||
for mailbox in mailboxes:
|
|
||||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
|
||||||
|
|
||||||
changed = True
|
changed = True
|
||||||
|
else:
|
||||||
|
LOG.info(
|
||||||
|
f"Prevented from updating mailboxes [custom_domain_id={custom_domain.id}]: {result.reason.value}"
|
||||||
|
)
|
||||||
|
return jsonify(error="Forbidden"), 400
|
||||||
|
|
||||||
if changed:
|
if changed:
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
@ -38,7 +38,11 @@ def create_mailbox():
|
|||||||
the new mailbox dict
|
the new mailbox dict
|
||||||
"""
|
"""
|
||||||
user = g.user
|
user = g.user
|
||||||
mailbox_email = sanitize_email(request.get_json().get("email"))
|
email = request.get_json().get("email")
|
||||||
|
if not email:
|
||||||
|
return jsonify(error="Invalid email"), 400
|
||||||
|
|
||||||
|
mailbox_email = sanitize_email(email)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
new_mailbox = mailbox_utils.create_mailbox(user, mailbox_email).mailbox
|
new_mailbox = mailbox_utils.create_mailbox(user, mailbox_email).mailbox
|
||||||
|
@ -153,7 +153,8 @@ def new_custom_alias_v3():
|
|||||||
if not isinstance(data, dict):
|
if not isinstance(data, dict):
|
||||||
return jsonify(error="request body does not follow the required format"), 400
|
return jsonify(error="request body does not follow the required format"), 400
|
||||||
|
|
||||||
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
alias_prefix_data = data.get("alias_prefix", "") or ""
|
||||||
|
alias_prefix = alias_prefix_data.strip().lower().replace(" ", "")
|
||||||
signed_suffix = data.get("signed_suffix", "") or ""
|
signed_suffix = data.get("signed_suffix", "") or ""
|
||||||
signed_suffix = signed_suffix.strip()
|
signed_suffix = signed_suffix.strip()
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@ from app import config
|
|||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Job, ApiToCookieToken
|
from app.models import Job, ApiToCookieToken
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
|
||||||
@api_bp.route("/user", methods=["DELETE"])
|
@api_bp.route("/user", methods=["DELETE"])
|
||||||
@ -16,6 +17,11 @@ def delete_user():
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
# Schedule delete account job
|
# Schedule delete account job
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=g.user,
|
||||||
|
action=UserAuditLogAction.UserMarkedForDeletion,
|
||||||
|
message=f"Marked user {g.user.id} ({g.user.email}) for deletion from API",
|
||||||
|
)
|
||||||
LOG.w("schedule delete account job for %s", g.user)
|
LOG.w("schedule delete account job for %s", g.user)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=config.JOB_DELETE_ACCOUNT,
|
name=config.JOB_DELETE_ACCOUNT,
|
||||||
|
@ -87,7 +87,7 @@ def update_user_info():
|
|||||||
File.delete(file.id)
|
File.delete(file.id)
|
||||||
s3.delete(file.path)
|
s3.delete(file.path)
|
||||||
Session.flush()
|
Session.flush()
|
||||||
else:
|
if data["profile_picture"] is not None:
|
||||||
raw_data = base64.decodebytes(data["profile_picture"].encode())
|
raw_data = base64.decodebytes(data["profile_picture"].encode())
|
||||||
if detect_image_format(raw_data) == ImageFormat.Unknown:
|
if detect_image_format(raw_data) == ImageFormat.Unknown:
|
||||||
return jsonify(error="Unsupported image format"), 400
|
return jsonify(error="Unsupported image format"), 400
|
||||||
|
@ -7,6 +7,7 @@ from app.db import Session
|
|||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import ActivationCode
|
from app.models import ActivationCode
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
from app.utils import sanitize_next_url
|
from app.utils import sanitize_next_url
|
||||||
|
|
||||||
|
|
||||||
@ -47,6 +48,11 @@ def activate():
|
|||||||
|
|
||||||
user = activation_code.user
|
user = activation_code.user
|
||||||
user.activated = True
|
user.activated = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.ActivateUser,
|
||||||
|
message=f"User has been activated: {user.email}",
|
||||||
|
)
|
||||||
login_user(user)
|
login_user(user)
|
||||||
|
|
||||||
# activation code is to be used only once
|
# activation code is to be used only once
|
||||||
|
@ -10,6 +10,7 @@ from app.events.auth_event import LoginEvent
|
|||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User
|
from app.models import User
|
||||||
|
from app.pw_models import PasswordOracle
|
||||||
from app.utils import sanitize_email, sanitize_next_url, canonicalize_email
|
from app.utils import sanitize_email, sanitize_next_url, canonicalize_email
|
||||||
|
|
||||||
|
|
||||||
@ -43,6 +44,13 @@ def login():
|
|||||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||||
|
|
||||||
if not user or not user.check_password(form.password.data):
|
if not user or not user.check_password(form.password.data):
|
||||||
|
if not user:
|
||||||
|
# Do the hash to avoid timing attacks nevertheless
|
||||||
|
dummy_pw = PasswordOracle()
|
||||||
|
dummy_pw.password = (
|
||||||
|
"$2b$12$ZWqpL73h4rGNfLkJohAFAu0isqSw/bX9p/tzpbWRz/To5FAftaW8u"
|
||||||
|
)
|
||||||
|
dummy_pw.check_password(form.password.data)
|
||||||
# Trigger rate limiter
|
# Trigger rate limiter
|
||||||
g.deduct_limit = True
|
g.deduct_limit = True
|
||||||
form.password.data = None
|
form.password.data = None
|
||||||
|
@ -9,6 +9,7 @@ from app.auth.views.login_utils import after_login
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.models import ResetPasswordCode
|
from app.models import ResetPasswordCode
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
|
||||||
class ResetPasswordForm(FlaskForm):
|
class ResetPasswordForm(FlaskForm):
|
||||||
@ -59,6 +60,11 @@ def reset_password():
|
|||||||
|
|
||||||
# this can be served to activate user too
|
# this can be served to activate user too
|
||||||
user.activated = True
|
user.activated = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.ResetPassword,
|
||||||
|
message="User has reset their password",
|
||||||
|
)
|
||||||
|
|
||||||
# remove all reset password codes
|
# remove all reset password codes
|
||||||
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
||||||
|
@ -309,6 +309,7 @@ JOB_DELETE_DOMAIN = "delete-domain"
|
|||||||
JOB_SEND_USER_REPORT = "send-user-report"
|
JOB_SEND_USER_REPORT = "send-user-report"
|
||||||
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
||||||
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
|
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
|
||||||
|
JOB_SEND_EVENT_TO_WEBHOOK = "send-event-to-webhook"
|
||||||
|
|
||||||
# for pagination
|
# for pagination
|
||||||
PAGE_LIMIT = 20
|
PAGE_LIMIT = 20
|
||||||
@ -601,7 +602,6 @@ SKIP_MX_LOOKUP_ON_CHECK = False
|
|||||||
|
|
||||||
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
||||||
|
|
||||||
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
|
|
||||||
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
||||||
|
|
||||||
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
||||||
@ -653,7 +653,15 @@ def read_partner_dict(var: str) -> dict[int, str]:
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
PARTNER_DOMAINS: dict[int, str] = read_partner_dict("PARTNER_DOMAINS")
|
PARTNER_DNS_CUSTOM_DOMAINS: dict[int, str] = read_partner_dict(
|
||||||
PARTNER_DOMAIN_VALIDATION_PREFIXES: dict[int, str] = read_partner_dict(
|
"PARTNER_DNS_CUSTOM_DOMAINS"
|
||||||
"PARTNER_DOMAIN_VALIDATION_PREFIXES"
|
|
||||||
)
|
)
|
||||||
|
PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES: dict[int, str] = read_partner_dict(
|
||||||
|
"PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES"
|
||||||
|
)
|
||||||
|
|
||||||
|
MAILBOX_VERIFICATION_OVERRIDE_CODE: Optional[str] = os.environ.get(
|
||||||
|
"MAILBOX_VERIFICATION_OVERRIDE_CODE", None
|
||||||
|
)
|
||||||
|
|
||||||
|
AUDIT_LOG_MAX_DAYS = int(os.environ.get("AUDIT_LOG_MAX_DAYS", 30))
|
||||||
|
@ -4,8 +4,9 @@ from typing import Optional
|
|||||||
|
|
||||||
from sqlalchemy.exc import IntegrityError
|
from sqlalchemy.exc import IntegrityError
|
||||||
|
|
||||||
|
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import generate_reply_email
|
from app.email_utils import generate_reply_email, parse_full_address
|
||||||
from app.email_validation import is_valid_email
|
from app.email_validation import is_valid_email
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Contact, Alias
|
from app.models import Contact, Alias
|
||||||
@ -14,11 +15,14 @@ from app.utils import sanitize_email
|
|||||||
|
|
||||||
class ContactCreateError(Enum):
|
class ContactCreateError(Enum):
|
||||||
InvalidEmail = "Invalid email"
|
InvalidEmail = "Invalid email"
|
||||||
|
NotAllowed = "Your plan does not allow to create contacts"
|
||||||
|
Unknown = "Unknown error when trying to create contact"
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ContactCreateResult:
|
class ContactCreateResult:
|
||||||
contact: Optional[Contact]
|
contact: Optional[Contact]
|
||||||
|
created: bool
|
||||||
error: Optional[ContactCreateError]
|
error: Optional[ContactCreateError]
|
||||||
|
|
||||||
|
|
||||||
@ -33,37 +37,61 @@ def __update_contact_if_needed(
|
|||||||
LOG.d(f"Setting {contact} mail_from to {mail_from}")
|
LOG.d(f"Setting {contact} mail_from to {mail_from}")
|
||||||
contact.mail_from = mail_from
|
contact.mail_from = mail_from
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return ContactCreateResult(contact, None)
|
return ContactCreateResult(contact, created=False, error=None)
|
||||||
|
|
||||||
|
|
||||||
def create_contact(
|
def create_contact(
|
||||||
email: str,
|
email: str,
|
||||||
name: Optional[str],
|
|
||||||
alias: Alias,
|
alias: Alias,
|
||||||
|
name: Optional[str] = None,
|
||||||
mail_from: Optional[str] = None,
|
mail_from: Optional[str] = None,
|
||||||
allow_empty_email: bool = False,
|
allow_empty_email: bool = False,
|
||||||
automatic_created: bool = False,
|
automatic_created: bool = False,
|
||||||
from_partner: bool = False,
|
from_partner: bool = False,
|
||||||
) -> ContactCreateResult:
|
) -> ContactCreateResult:
|
||||||
if name is not None:
|
# If user cannot create contacts, they still need to be created when receiving an email for an alias
|
||||||
|
if not automatic_created and not alias.user.can_create_contacts():
|
||||||
|
return ContactCreateResult(
|
||||||
|
None, created=False, error=ContactCreateError.NotAllowed
|
||||||
|
)
|
||||||
|
# Parse emails with form 'name <email>'
|
||||||
|
try:
|
||||||
|
email_name, email = parse_full_address(email)
|
||||||
|
except ValueError:
|
||||||
|
email = ""
|
||||||
|
email_name = ""
|
||||||
|
# If no name is explicitly given try to get it from the parsed email
|
||||||
|
if name is None:
|
||||||
|
name = email_name[: Contact.MAX_NAME_LENGTH]
|
||||||
|
else:
|
||||||
name = name[: Contact.MAX_NAME_LENGTH]
|
name = name[: Contact.MAX_NAME_LENGTH]
|
||||||
|
# If still no name is there, make sure the name is None instead of empty string
|
||||||
|
if not name:
|
||||||
|
name = None
|
||||||
if name is not None and "\x00" in name:
|
if name is not None and "\x00" in name:
|
||||||
LOG.w("Cannot use contact name because has \\x00")
|
LOG.w("Cannot use contact name because has \\x00")
|
||||||
name = ""
|
name = ""
|
||||||
|
# Sanitize email and if it's not valid only allow to create a contact if it's explicitly allowed. Otherwise fail
|
||||||
|
email = sanitize_email(email, not_lower=True)
|
||||||
if not is_valid_email(email):
|
if not is_valid_email(email):
|
||||||
LOG.w(f"invalid contact email {email}")
|
LOG.w(f"invalid contact email {email}")
|
||||||
if not allow_empty_email:
|
if not allow_empty_email:
|
||||||
return ContactCreateResult(None, ContactCreateError.InvalidEmail)
|
return ContactCreateResult(
|
||||||
|
None, created=False, error=ContactCreateError.InvalidEmail
|
||||||
|
)
|
||||||
LOG.d("Create a contact with invalid email for %s", alias)
|
LOG.d("Create a contact with invalid email for %s", alias)
|
||||||
# either reuse a contact with empty email or create a new contact with empty email
|
# either reuse a contact with empty email or create a new contact with empty email
|
||||||
email = ""
|
email = ""
|
||||||
email = sanitize_email(email, not_lower=True)
|
# If contact exists, update name and mail_from if needed
|
||||||
contact = Contact.get_by(alias_id=alias.id, website_email=email)
|
contact = Contact.get_by(alias_id=alias.id, website_email=email)
|
||||||
if contact is not None:
|
if contact is not None:
|
||||||
return __update_contact_if_needed(contact, name, mail_from)
|
return __update_contact_if_needed(contact, name, mail_from)
|
||||||
|
# Create the contact
|
||||||
reply_email = generate_reply_email(email, alias)
|
reply_email = generate_reply_email(email, alias)
|
||||||
|
alias_id = alias.id
|
||||||
try:
|
try:
|
||||||
flags = Contact.FLAG_PARTNER_CREATED if from_partner else 0
|
flags = Contact.FLAG_PARTNER_CREATED if from_partner else 0
|
||||||
|
is_invalid_email = email == ""
|
||||||
contact = Contact.create(
|
contact = Contact.create(
|
||||||
user_id=alias.user_id,
|
user_id=alias.user_id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
@ -73,17 +101,38 @@ def create_contact(
|
|||||||
mail_from=mail_from,
|
mail_from=mail_from,
|
||||||
automatic_created=automatic_created,
|
automatic_created=automatic_created,
|
||||||
flags=flags,
|
flags=flags,
|
||||||
invalid_email=email == "",
|
invalid_email=is_invalid_email,
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
contact_id = contact.id
|
||||||
|
if automatic_created:
|
||||||
|
trail = ". Automatically created"
|
||||||
|
else:
|
||||||
|
trail = ". Created by user action"
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias=alias,
|
||||||
|
action=AliasAuditLogAction.CreateContact,
|
||||||
|
message=f"Created contact {contact_id} ({email}){trail}",
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
LOG.d(
|
LOG.d(
|
||||||
f"Created contact {contact} for alias {alias} with email {email} invalid_email={contact.invalid_email}"
|
f"Created contact {contact} for alias {alias} with email {email} invalid_email={is_invalid_email}"
|
||||||
)
|
)
|
||||||
|
return ContactCreateResult(contact, created=True, error=None)
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
Session.rollback()
|
Session.rollback()
|
||||||
LOG.info(
|
LOG.info(
|
||||||
f"Contact with email {email} for alias_id {alias.id} already existed, fetching from DB"
|
f"Contact with email {email} for alias_id {alias_id} already existed, fetching from DB"
|
||||||
)
|
)
|
||||||
contact = Contact.get_by(alias_id=alias.id, website_email=email)
|
contact: Optional[Contact] = Contact.get_by(
|
||||||
return __update_contact_if_needed(contact, name, mail_from)
|
alias_id=alias_id, website_email=email
|
||||||
return ContactCreateResult(contact, None)
|
)
|
||||||
|
if contact:
|
||||||
|
return __update_contact_if_needed(contact, name, mail_from)
|
||||||
|
else:
|
||||||
|
LOG.warning(
|
||||||
|
f"Could not find contact with email {email} for alias_id {alias_id} and it should exist"
|
||||||
|
)
|
||||||
|
return ContactCreateResult(
|
||||||
|
None, created=False, error=ContactCreateError.Unknown
|
||||||
|
)
|
||||||
|
@ -3,15 +3,17 @@ import re
|
|||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from app.config import JOB_DELETE_DOMAIN
|
from app.config import JOB_DELETE_DOMAIN
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import get_email_domain_part
|
from app.email_utils import get_email_domain_part
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, CustomDomain, SLDomain, Mailbox, Job
|
from app.models import User, CustomDomain, SLDomain, Mailbox, Job, DomainMailbox
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
_ALLOWED_DOMAIN_REGEX = re.compile(r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)$")
|
_ALLOWED_DOMAIN_REGEX = re.compile(r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)$")
|
||||||
|
_MAX_MAILBOXES_PER_DOMAIN = 20
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -45,6 +47,20 @@ class CannotUseDomainReason(Enum):
|
|||||||
raise Exception("Invalid CannotUseDomainReason")
|
raise Exception("Invalid CannotUseDomainReason")
|
||||||
|
|
||||||
|
|
||||||
|
class CannotSetCustomDomainMailboxesCause(Enum):
|
||||||
|
InvalidMailbox = "Something went wrong, please retry"
|
||||||
|
NoMailboxes = "You must select at least 1 mailbox"
|
||||||
|
TooManyMailboxes = (
|
||||||
|
f"You can only set up to {_MAX_MAILBOXES_PER_DOMAIN} mailboxes per domain"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SetCustomDomainMailboxesResult:
|
||||||
|
success: bool
|
||||||
|
reason: Optional[CannotSetCustomDomainMailboxesCause] = None
|
||||||
|
|
||||||
|
|
||||||
def is_valid_domain(domain: str) -> bool:
|
def is_valid_domain(domain: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Checks that a domain is valid according to RFC 1035
|
Checks that a domain is valid according to RFC 1035
|
||||||
@ -122,6 +138,11 @@ def create_custom_domain(
|
|||||||
if partner_id is not None:
|
if partner_id is not None:
|
||||||
new_custom_domain.partner_id = partner_id
|
new_custom_domain.partner_id = partner_id
|
||||||
|
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.CreateCustomDomain,
|
||||||
|
message=f"Created custom domain {new_custom_domain.id} ({new_domain})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
return CreateCustomDomainResult(
|
return CreateCustomDomainResult(
|
||||||
@ -140,3 +161,46 @@ def delete_custom_domain(domain: CustomDomain):
|
|||||||
run_at=arrow.now(),
|
run_at=arrow.now(),
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def set_custom_domain_mailboxes(
|
||||||
|
user_id: int, custom_domain: CustomDomain, mailbox_ids: List[int]
|
||||||
|
) -> SetCustomDomainMailboxesResult:
|
||||||
|
if len(mailbox_ids) == 0:
|
||||||
|
return SetCustomDomainMailboxesResult(
|
||||||
|
success=False, reason=CannotSetCustomDomainMailboxesCause.NoMailboxes
|
||||||
|
)
|
||||||
|
elif len(mailbox_ids) > _MAX_MAILBOXES_PER_DOMAIN:
|
||||||
|
return SetCustomDomainMailboxesResult(
|
||||||
|
success=False, reason=CannotSetCustomDomainMailboxesCause.TooManyMailboxes
|
||||||
|
)
|
||||||
|
|
||||||
|
mailboxes = (
|
||||||
|
Session.query(Mailbox)
|
||||||
|
.filter(
|
||||||
|
Mailbox.id.in_(mailbox_ids),
|
||||||
|
Mailbox.user_id == user_id,
|
||||||
|
Mailbox.verified == True, # noqa: E712
|
||||||
|
)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
if len(mailboxes) != len(mailbox_ids):
|
||||||
|
return SetCustomDomainMailboxesResult(
|
||||||
|
success=False, reason=CannotSetCustomDomainMailboxesCause.InvalidMailbox
|
||||||
|
)
|
||||||
|
|
||||||
|
# first remove all existing domain-mailboxes links
|
||||||
|
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||||
|
Session.flush()
|
||||||
|
|
||||||
|
for mailbox in mailboxes:
|
||||||
|
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||||
|
|
||||||
|
mailboxes_as_str = ",".join(map(str, mailbox_ids))
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=custom_domain.user,
|
||||||
|
action=UserAuditLogAction.UpdateCustomDomain,
|
||||||
|
message=f"Updated custom domain {custom_domain.id} mailboxes (domain={custom_domain.domain}) (mailboxes={mailboxes_as_str})",
|
||||||
|
)
|
||||||
|
Session.commit()
|
||||||
|
return SetCustomDomainMailboxesResult(success=True)
|
||||||
|
@ -1,15 +1,18 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
from app.constants import DMARC_RECORD
|
from app.constants import DMARC_RECORD
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.dns_utils import (
|
from app.dns_utils import (
|
||||||
|
MxRecord,
|
||||||
DNSClient,
|
DNSClient,
|
||||||
is_mx_equivalent,
|
is_mx_equivalent,
|
||||||
get_network_dns_client,
|
get_network_dns_client,
|
||||||
)
|
)
|
||||||
from app.models import CustomDomain
|
from app.models import CustomDomain
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
from app.utils import random_string
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -28,10 +31,10 @@ class CustomDomainValidation:
|
|||||||
):
|
):
|
||||||
self.dkim_domain = dkim_domain
|
self.dkim_domain = dkim_domain
|
||||||
self._dns_client = dns_client
|
self._dns_client = dns_client
|
||||||
self._partner_domains = partner_domains or config.PARTNER_DOMAINS
|
self._partner_domains = partner_domains or config.PARTNER_DNS_CUSTOM_DOMAINS
|
||||||
self._partner_domain_validation_prefixes = (
|
self._partner_domain_validation_prefixes = (
|
||||||
partner_domains_validation_prefixes
|
partner_domains_validation_prefixes
|
||||||
or config.PARTNER_DOMAIN_VALIDATION_PREFIXES
|
or config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_ownership_verification_record(self, domain: CustomDomain) -> str:
|
def get_ownership_verification_record(self, domain: CustomDomain) -> str:
|
||||||
@ -41,8 +44,36 @@ class CustomDomainValidation:
|
|||||||
and domain.partner_id in self._partner_domain_validation_prefixes
|
and domain.partner_id in self._partner_domain_validation_prefixes
|
||||||
):
|
):
|
||||||
prefix = self._partner_domain_validation_prefixes[domain.partner_id]
|
prefix = self._partner_domain_validation_prefixes[domain.partner_id]
|
||||||
|
|
||||||
|
if not domain.ownership_txt_token:
|
||||||
|
domain.ownership_txt_token = random_string(30)
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
return f"{prefix}-verification={domain.ownership_txt_token}"
|
return f"{prefix}-verification={domain.ownership_txt_token}"
|
||||||
|
|
||||||
|
def get_expected_mx_records(self, domain: CustomDomain) -> list[MxRecord]:
|
||||||
|
records = []
|
||||||
|
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
||||||
|
domain = self._partner_domains[domain.partner_id]
|
||||||
|
records.append(MxRecord(10, f"mx1.{domain}."))
|
||||||
|
records.append(MxRecord(20, f"mx2.{domain}."))
|
||||||
|
else:
|
||||||
|
# Default ones
|
||||||
|
for priority, domain in config.EMAIL_SERVERS_WITH_PRIORITY:
|
||||||
|
records.append(MxRecord(priority, domain))
|
||||||
|
|
||||||
|
return records
|
||||||
|
|
||||||
|
def get_expected_spf_domain(self, domain: CustomDomain) -> str:
|
||||||
|
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
||||||
|
return self._partner_domains[domain.partner_id]
|
||||||
|
else:
|
||||||
|
return config.EMAIL_DOMAIN
|
||||||
|
|
||||||
|
def get_expected_spf_record(self, domain: CustomDomain) -> str:
|
||||||
|
spf_domain = self.get_expected_spf_domain(domain)
|
||||||
|
return f"v=spf1 include:{spf_domain} ~all"
|
||||||
|
|
||||||
def get_dkim_records(self, domain: CustomDomain) -> {str: str}:
|
def get_dkim_records(self, domain: CustomDomain) -> {str: str}:
|
||||||
"""
|
"""
|
||||||
Get a list of dkim records to set up. Depending on the custom_domain, whether if it's from a partner or not,
|
Get a list of dkim records to set up. Depending on the custom_domain, whether if it's from a partner or not,
|
||||||
@ -91,6 +122,12 @@ class CustomDomainValidation:
|
|||||||
# Original DKIM record is not there, which means the DKIM config is not finished. Proceed with the
|
# Original DKIM record is not there, which means the DKIM config is not finished. Proceed with the
|
||||||
# rest of the code path, returning the invalid records and clearing the flag
|
# rest of the code path, returning the invalid records and clearing the flag
|
||||||
custom_domain.dkim_verified = len(invalid_records) == 0
|
custom_domain.dkim_verified = len(invalid_records) == 0
|
||||||
|
if custom_domain.dkim_verified:
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=custom_domain.user,
|
||||||
|
action=UserAuditLogAction.VerifyCustomDomain,
|
||||||
|
message=f"Verified DKIM records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return invalid_records
|
return invalid_records
|
||||||
|
|
||||||
@ -107,6 +144,11 @@ class CustomDomainValidation:
|
|||||||
|
|
||||||
if expected_verification_record in txt_records:
|
if expected_verification_record in txt_records:
|
||||||
custom_domain.ownership_verified = True
|
custom_domain.ownership_verified = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=custom_domain.user,
|
||||||
|
action=UserAuditLogAction.VerifyCustomDomain,
|
||||||
|
message=f"Verified ownership for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return DomainValidationResult(success=True, errors=[])
|
return DomainValidationResult(success=True, errors=[])
|
||||||
else:
|
else:
|
||||||
@ -116,14 +158,20 @@ class CustomDomainValidation:
|
|||||||
self, custom_domain: CustomDomain
|
self, custom_domain: CustomDomain
|
||||||
) -> DomainValidationResult:
|
) -> DomainValidationResult:
|
||||||
mx_domains = self._dns_client.get_mx_domains(custom_domain.domain)
|
mx_domains = self._dns_client.get_mx_domains(custom_domain.domain)
|
||||||
|
expected_mx_records = self.get_expected_mx_records(custom_domain)
|
||||||
|
|
||||||
if not is_mx_equivalent(mx_domains, config.EMAIL_SERVERS_WITH_PRIORITY):
|
if not is_mx_equivalent(mx_domains, expected_mx_records):
|
||||||
return DomainValidationResult(
|
return DomainValidationResult(
|
||||||
success=False,
|
success=False,
|
||||||
errors=[f"{priority} {domain}" for (priority, domain) in mx_domains],
|
errors=[f"{record.priority} {record.domain}" for record in mx_domains],
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
custom_domain.verified = True
|
custom_domain.verified = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=custom_domain.user,
|
||||||
|
action=UserAuditLogAction.VerifyCustomDomain,
|
||||||
|
message=f"Verified MX records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return DomainValidationResult(success=True, errors=[])
|
return DomainValidationResult(success=True, errors=[])
|
||||||
|
|
||||||
@ -131,16 +179,24 @@ class CustomDomainValidation:
|
|||||||
self, custom_domain: CustomDomain
|
self, custom_domain: CustomDomain
|
||||||
) -> DomainValidationResult:
|
) -> DomainValidationResult:
|
||||||
spf_domains = self._dns_client.get_spf_domain(custom_domain.domain)
|
spf_domains = self._dns_client.get_spf_domain(custom_domain.domain)
|
||||||
if config.EMAIL_DOMAIN in spf_domains:
|
expected_spf_domain = self.get_expected_spf_domain(custom_domain)
|
||||||
|
if expected_spf_domain in spf_domains:
|
||||||
custom_domain.spf_verified = True
|
custom_domain.spf_verified = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=custom_domain.user,
|
||||||
|
action=UserAuditLogAction.VerifyCustomDomain,
|
||||||
|
message=f"Verified SPF records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return DomainValidationResult(success=True, errors=[])
|
return DomainValidationResult(success=True, errors=[])
|
||||||
else:
|
else:
|
||||||
custom_domain.spf_verified = False
|
custom_domain.spf_verified = False
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
|
||||||
|
cleaned_records = self.__clean_spf_records(txt_records, custom_domain)
|
||||||
return DomainValidationResult(
|
return DomainValidationResult(
|
||||||
success=False,
|
success=False,
|
||||||
errors=self._dns_client.get_txt_record(custom_domain.domain),
|
errors=cleaned_records,
|
||||||
)
|
)
|
||||||
|
|
||||||
def validate_dmarc_records(
|
def validate_dmarc_records(
|
||||||
@ -149,9 +205,24 @@ class CustomDomainValidation:
|
|||||||
txt_records = self._dns_client.get_txt_record("_dmarc." + custom_domain.domain)
|
txt_records = self._dns_client.get_txt_record("_dmarc." + custom_domain.domain)
|
||||||
if DMARC_RECORD in txt_records:
|
if DMARC_RECORD in txt_records:
|
||||||
custom_domain.dmarc_verified = True
|
custom_domain.dmarc_verified = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=custom_domain.user,
|
||||||
|
action=UserAuditLogAction.VerifyCustomDomain,
|
||||||
|
message=f"Verified DMARC records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return DomainValidationResult(success=True, errors=[])
|
return DomainValidationResult(success=True, errors=[])
|
||||||
else:
|
else:
|
||||||
custom_domain.dmarc_verified = False
|
custom_domain.dmarc_verified = False
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return DomainValidationResult(success=False, errors=txt_records)
|
return DomainValidationResult(success=False, errors=txt_records)
|
||||||
|
|
||||||
|
def __clean_spf_records(
|
||||||
|
self, txt_records: List[str], custom_domain: CustomDomain
|
||||||
|
) -> List[str]:
|
||||||
|
final_records = []
|
||||||
|
verification_record = self.get_ownership_verification_record(custom_domain)
|
||||||
|
for record in txt_records:
|
||||||
|
if record != verification_record:
|
||||||
|
final_records.append(record)
|
||||||
|
return final_records
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
import secrets
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
from flask import (
|
from flask import (
|
||||||
render_template,
|
render_template,
|
||||||
@ -163,7 +165,7 @@ def send_reset_password_email(user):
|
|||||||
"""
|
"""
|
||||||
# the activation code is valid for 1h
|
# the activation code is valid for 1h
|
||||||
reset_password_code = ResetPasswordCode.create(
|
reset_password_code = ResetPasswordCode.create(
|
||||||
user_id=user.id, code=random_string(60)
|
user_id=user.id, code=secrets.token_urlsafe(32)
|
||||||
)
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from operator import or_
|
from operator import or_
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from flask import render_template, request, redirect, flash
|
from flask import render_template, request, redirect, flash
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
@ -9,13 +10,11 @@ from sqlalchemy import and_, func, case
|
|||||||
from wtforms import StringField, validators, ValidationError
|
from wtforms import StringField, validators, ValidationError
|
||||||
|
|
||||||
# Need to import directly from config to allow modification from the tests
|
# Need to import directly from config to allow modification from the tests
|
||||||
from app import config, parallel_limiter
|
from app import config, parallel_limiter, contact_utils
|
||||||
|
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||||
|
from app.contact_utils import ContactCreateError
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import (
|
|
||||||
generate_reply_email,
|
|
||||||
parse_full_address,
|
|
||||||
)
|
|
||||||
from app.email_validation import is_valid_email
|
from app.email_validation import is_valid_email
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
CannotCreateContactForReverseAlias,
|
CannotCreateContactForReverseAlias,
|
||||||
@ -24,8 +23,8 @@ from app.errors import (
|
|||||||
ErrContactAlreadyExists,
|
ErrContactAlreadyExists,
|
||||||
)
|
)
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Alias, Contact, EmailLog, User
|
from app.models import Alias, Contact, EmailLog
|
||||||
from app.utils import sanitize_email, CSRFValidationForm
|
from app.utils import CSRFValidationForm
|
||||||
|
|
||||||
|
|
||||||
def email_validator():
|
def email_validator():
|
||||||
@ -51,7 +50,7 @@ def email_validator():
|
|||||||
return _check
|
return _check
|
||||||
|
|
||||||
|
|
||||||
def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
def create_contact(alias: Alias, contact_address: str) -> Contact:
|
||||||
"""
|
"""
|
||||||
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
|
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
|
||||||
Can throw exceptions:
|
Can throw exceptions:
|
||||||
@ -61,37 +60,23 @@ def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
|||||||
"""
|
"""
|
||||||
if not contact_address:
|
if not contact_address:
|
||||||
raise ErrAddressInvalid("Empty address")
|
raise ErrAddressInvalid("Empty address")
|
||||||
try:
|
output = contact_utils.create_contact(email=contact_address, alias=alias)
|
||||||
contact_name, contact_email = parse_full_address(contact_address)
|
if output.error == ContactCreateError.InvalidEmail:
|
||||||
except ValueError:
|
|
||||||
raise ErrAddressInvalid(contact_address)
|
raise ErrAddressInvalid(contact_address)
|
||||||
|
elif output.error == ContactCreateError.NotAllowed:
|
||||||
contact_email = sanitize_email(contact_email)
|
|
||||||
if not is_valid_email(contact_email):
|
|
||||||
raise ErrAddressInvalid(contact_email)
|
|
||||||
|
|
||||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
|
||||||
if contact:
|
|
||||||
raise ErrContactAlreadyExists(contact)
|
|
||||||
|
|
||||||
if not user.can_create_contacts():
|
|
||||||
raise ErrContactErrorUpgradeNeeded()
|
raise ErrContactErrorUpgradeNeeded()
|
||||||
|
elif output.error is not None:
|
||||||
|
raise ErrAddressInvalid("Invalid address")
|
||||||
|
elif not output.created:
|
||||||
|
raise ErrContactAlreadyExists(output.contact)
|
||||||
|
|
||||||
contact = Contact.create(
|
contact = output.contact
|
||||||
user_id=alias.user_id,
|
|
||||||
alias_id=alias.id,
|
|
||||||
website_email=contact_email,
|
|
||||||
name=contact_name,
|
|
||||||
reply_email=generate_reply_email(contact_email, alias),
|
|
||||||
)
|
|
||||||
|
|
||||||
LOG.d(
|
LOG.d(
|
||||||
"create reverse-alias for %s %s, reverse alias:%s",
|
"create reverse-alias for %s %s, reverse alias:%s",
|
||||||
contact_address,
|
contact_address,
|
||||||
alias,
|
alias,
|
||||||
contact.reply_email,
|
contact.reply_email,
|
||||||
)
|
)
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
return contact
|
return contact
|
||||||
|
|
||||||
@ -207,7 +192,7 @@ def get_contact_infos(
|
|||||||
|
|
||||||
|
|
||||||
def delete_contact(alias: Alias, contact_id: int):
|
def delete_contact(alias: Alias, contact_id: int):
|
||||||
contact = Contact.get(contact_id)
|
contact: Optional[Contact] = Contact.get(contact_id)
|
||||||
|
|
||||||
if not contact:
|
if not contact:
|
||||||
flash("Unknown error. Refresh the page", "warning")
|
flash("Unknown error. Refresh the page", "warning")
|
||||||
@ -215,6 +200,11 @@ def delete_contact(alias: Alias, contact_id: int):
|
|||||||
flash("You cannot delete reverse-alias", "warning")
|
flash("You cannot delete reverse-alias", "warning")
|
||||||
else:
|
else:
|
||||||
delete_contact_email = contact.website_email
|
delete_contact_email = contact.website_email
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias=alias,
|
||||||
|
action=AliasAuditLogAction.DeleteContact,
|
||||||
|
message=f"Delete contact {contact_id} ({contact.email})",
|
||||||
|
)
|
||||||
Contact.delete(contact_id)
|
Contact.delete(contact_id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -237,7 +227,10 @@ def alias_contact_manager(alias_id):
|
|||||||
|
|
||||||
page = 0
|
page = 0
|
||||||
if request.args.get("page"):
|
if request.args.get("page"):
|
||||||
page = int(request.args.get("page"))
|
try:
|
||||||
|
page = int(request.args.get("page"))
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
query = request.args.get("query") or ""
|
query = request.args.get("query") or ""
|
||||||
|
|
||||||
@ -261,7 +254,7 @@ def alias_contact_manager(alias_id):
|
|||||||
if new_contact_form.validate():
|
if new_contact_form.validate():
|
||||||
contact_address = new_contact_form.email.data.strip()
|
contact_address = new_contact_form.email.data.strip()
|
||||||
try:
|
try:
|
||||||
contact = create_contact(current_user, alias, contact_address)
|
contact = create_contact(alias, contact_address)
|
||||||
except (
|
except (
|
||||||
ErrContactErrorUpgradeNeeded,
|
ErrContactErrorUpgradeNeeded,
|
||||||
ErrAddressInvalid,
|
ErrAddressInvalid,
|
||||||
|
@ -7,6 +7,7 @@ from flask import render_template, redirect, url_for, flash, request
|
|||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
|
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||||
from app.alias_utils import transfer_alias
|
from app.alias_utils import transfer_alias
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
@ -57,6 +58,12 @@ def alias_transfer_send_route(alias_id):
|
|||||||
transfer_token = f"{alias.id}.{secrets.token_urlsafe(32)}"
|
transfer_token = f"{alias.id}.{secrets.token_urlsafe(32)}"
|
||||||
alias.transfer_token = hmac_alias_transfer_token(transfer_token)
|
alias.transfer_token = hmac_alias_transfer_token(transfer_token)
|
||||||
alias.transfer_token_expiration = arrow.utcnow().shift(hours=24)
|
alias.transfer_token_expiration = arrow.utcnow().shift(hours=24)
|
||||||
|
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias,
|
||||||
|
AliasAuditLogAction.InitiateTransferAlias,
|
||||||
|
"Initiated alias transfer",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
alias_transfer_url = (
|
alias_transfer_url = (
|
||||||
config.URL
|
config.URL
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
from flask import render_template, request, redirect, url_for, flash
|
from flask import render_template, request, redirect, url_for, flash
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, validators
|
from wtforms import StringField, validators
|
||||||
|
|
||||||
|
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import Contact
|
from app.models import Contact
|
||||||
@ -20,7 +23,7 @@ class PGPContactForm(FlaskForm):
|
|||||||
@dashboard_bp.route("/contact/<int:contact_id>/", methods=["GET", "POST"])
|
@dashboard_bp.route("/contact/<int:contact_id>/", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def contact_detail_route(contact_id):
|
def contact_detail_route(contact_id):
|
||||||
contact = Contact.get(contact_id)
|
contact: Optional[Contact] = Contact.get(contact_id)
|
||||||
if not contact or contact.user_id != current_user.id:
|
if not contact or contact.user_id != current_user.id:
|
||||||
flash("You cannot see this page", "warning")
|
flash("You cannot see this page", "warning")
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
@ -50,6 +53,11 @@ def contact_detail_route(contact_id):
|
|||||||
except PGPException:
|
except PGPException:
|
||||||
flash("Cannot add the public key, please verify it", "error")
|
flash("Cannot add the public key, please verify it", "error")
|
||||||
else:
|
else:
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias=alias,
|
||||||
|
action=AliasAuditLogAction.UpdateContact,
|
||||||
|
message=f"Added PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(
|
flash(
|
||||||
f"PGP public key for {contact.email} is saved successfully",
|
f"PGP public key for {contact.email} is saved successfully",
|
||||||
@ -62,6 +70,11 @@ def contact_detail_route(contact_id):
|
|||||||
)
|
)
|
||||||
elif pgp_form.action.data == "remove":
|
elif pgp_form.action.data == "remove":
|
||||||
# Free user can decide to remove contact PGP key
|
# Free user can decide to remove contact PGP key
|
||||||
|
emit_alias_audit_log(
|
||||||
|
alias=alias,
|
||||||
|
action=AliasAuditLogAction.UpdateContact,
|
||||||
|
message=f"Removed PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
|
||||||
|
)
|
||||||
contact.pgp_public_key = None
|
contact.pgp_public_key = None
|
||||||
contact.pgp_finger_print = None
|
contact.pgp_finger_print = None
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
@ -21,7 +21,9 @@ class NewCustomDomainForm(FlaskForm):
|
|||||||
@parallel_limiter.lock(only_when=lambda: request.method == "POST")
|
@parallel_limiter.lock(only_when=lambda: request.method == "POST")
|
||||||
def custom_domain():
|
def custom_domain():
|
||||||
custom_domains = CustomDomain.filter_by(
|
custom_domains = CustomDomain.filter_by(
|
||||||
user_id=current_user.id, is_sl_subdomain=False
|
user_id=current_user.id,
|
||||||
|
is_sl_subdomain=False,
|
||||||
|
pending_deletion=False,
|
||||||
).all()
|
).all()
|
||||||
new_custom_domain_form = NewCustomDomainForm()
|
new_custom_domain_form = NewCustomDomainForm()
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@ from app.dashboard.base import dashboard_bp
|
|||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Subscription, Job
|
from app.models import Subscription, Job
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
|
||||||
class DeleteDirForm(FlaskForm):
|
class DeleteDirForm(FlaskForm):
|
||||||
@ -33,6 +34,11 @@ def delete_account():
|
|||||||
|
|
||||||
# Schedule delete account job
|
# Schedule delete account job
|
||||||
LOG.w("schedule delete account job for %s", current_user)
|
LOG.w("schedule delete account job for %s", current_user)
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UserMarkedForDeletion,
|
||||||
|
message=f"User {current_user.id} ({current_user.email}) marked for deletion via webapp",
|
||||||
|
)
|
||||||
Job.create(
|
Job.create(
|
||||||
name=JOB_DELETE_ACCOUNT,
|
name=JOB_DELETE_ACCOUNT,
|
||||||
payload={"user_id": current_user.id},
|
payload={"user_id": current_user.id},
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
from flask import render_template, request, redirect, url_for, flash
|
from flask import render_template, request, redirect, url_for, flash
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
@ -20,6 +22,7 @@ from app.dashboard.base import dashboard_bp
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.errors import DirectoryInTrashError
|
from app.errors import DirectoryInTrashError
|
||||||
from app.models import Directory, Mailbox, DirectoryMailbox
|
from app.models import Directory, Mailbox, DirectoryMailbox
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
|
||||||
class NewDirForm(FlaskForm):
|
class NewDirForm(FlaskForm):
|
||||||
@ -69,7 +72,9 @@ def directory():
|
|||||||
if not delete_dir_form.validate():
|
if not delete_dir_form.validate():
|
||||||
flash("Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_obj = Directory.get(delete_dir_form.directory_id.data)
|
dir_obj: Optional[Directory] = Directory.get(
|
||||||
|
delete_dir_form.directory_id.data
|
||||||
|
)
|
||||||
|
|
||||||
if not dir_obj:
|
if not dir_obj:
|
||||||
flash("Unknown error. Refresh the page", "warning")
|
flash("Unknown error. Refresh the page", "warning")
|
||||||
@ -79,6 +84,11 @@ def directory():
|
|||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
|
|
||||||
name = dir_obj.name
|
name = dir_obj.name
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.DeleteDirectory,
|
||||||
|
message=f"Delete directory {dir_obj.id} ({dir_obj.name})",
|
||||||
|
)
|
||||||
Directory.delete(dir_obj.id)
|
Directory.delete(dir_obj.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(f"Directory {name} has been deleted", "success")
|
flash(f"Directory {name} has been deleted", "success")
|
||||||
@ -90,7 +100,7 @@ def directory():
|
|||||||
flash("Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_id = toggle_dir_form.directory_id.data
|
dir_id = toggle_dir_form.directory_id.data
|
||||||
dir_obj = Directory.get(dir_id)
|
dir_obj: Optional[Directory] = Directory.get(dir_id)
|
||||||
|
|
||||||
if not dir_obj or dir_obj.user_id != current_user.id:
|
if not dir_obj or dir_obj.user_id != current_user.id:
|
||||||
flash("Unknown error. Refresh the page", "warning")
|
flash("Unknown error. Refresh the page", "warning")
|
||||||
@ -103,6 +113,11 @@ def directory():
|
|||||||
dir_obj.disabled = True
|
dir_obj.disabled = True
|
||||||
flash(f"On-the-fly is disabled for {dir_obj.name}", "warning")
|
flash(f"On-the-fly is disabled for {dir_obj.name}", "warning")
|
||||||
|
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateDirectory,
|
||||||
|
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) set disabled = {dir_obj.disabled}",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
@ -112,7 +127,7 @@ def directory():
|
|||||||
flash("Invalid request", "warning")
|
flash("Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_id = update_dir_form.directory_id.data
|
dir_id = update_dir_form.directory_id.data
|
||||||
dir_obj = Directory.get(dir_id)
|
dir_obj: Optional[Directory] = Directory.get(dir_id)
|
||||||
|
|
||||||
if not dir_obj or dir_obj.user_id != current_user.id:
|
if not dir_obj or dir_obj.user_id != current_user.id:
|
||||||
flash("Unknown error. Refresh the page", "warning")
|
flash("Unknown error. Refresh the page", "warning")
|
||||||
@ -143,6 +158,12 @@ def directory():
|
|||||||
for mailbox in mailboxes:
|
for mailbox in mailboxes:
|
||||||
DirectoryMailbox.create(directory_id=dir_obj.id, mailbox_id=mailbox.id)
|
DirectoryMailbox.create(directory_id=dir_obj.id, mailbox_id=mailbox.id)
|
||||||
|
|
||||||
|
mailboxes_as_str = ",".join(map(str, mailbox_ids))
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateDirectory,
|
||||||
|
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) mailboxes ({mailboxes_as_str})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(f"Directory {dir_obj.name} has been updated", "success")
|
flash(f"Directory {dir_obj.name} has been updated", "success")
|
||||||
|
|
||||||
@ -181,6 +202,11 @@ def directory():
|
|||||||
new_dir = Directory.create(
|
new_dir = Directory.create(
|
||||||
name=new_dir_name, user_id=current_user.id
|
name=new_dir_name, user_id=current_user.id
|
||||||
)
|
)
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.CreateDirectory,
|
||||||
|
message=f"New directory {new_dir.name} ({new_dir.name})",
|
||||||
|
)
|
||||||
except DirectoryInTrashError:
|
except DirectoryInTrashError:
|
||||||
flash(
|
flash(
|
||||||
f"{new_dir_name} has been used before and cannot be reused",
|
f"{new_dir_name} has been used before and cannot be reused",
|
||||||
|
@ -7,7 +7,7 @@ from wtforms import StringField, validators, IntegerField
|
|||||||
|
|
||||||
from app.constants import DMARC_RECORD
|
from app.constants import DMARC_RECORD
|
||||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN
|
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN
|
||||||
from app.custom_domain_utils import delete_custom_domain
|
from app.custom_domain_utils import delete_custom_domain, set_custom_domain_mailboxes
|
||||||
from app.custom_domain_validation import CustomDomainValidation
|
from app.custom_domain_validation import CustomDomainValidation
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
@ -16,11 +16,11 @@ from app.models import (
|
|||||||
Alias,
|
Alias,
|
||||||
DomainDeletedAlias,
|
DomainDeletedAlias,
|
||||||
Mailbox,
|
Mailbox,
|
||||||
DomainMailbox,
|
|
||||||
AutoCreateRule,
|
AutoCreateRule,
|
||||||
AutoCreateRuleMailbox,
|
AutoCreateRuleMailbox,
|
||||||
)
|
)
|
||||||
from app.regex_utils import regex_match
|
from app.regex_utils import regex_match
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
from app.utils import random_string, CSRFValidationForm
|
from app.utils import random_string, CSRFValidationForm
|
||||||
|
|
||||||
|
|
||||||
@ -37,8 +37,6 @@ def domain_detail_dns(custom_domain_id):
|
|||||||
custom_domain.ownership_txt_token = random_string(30)
|
custom_domain.ownership_txt_token = random_string(30)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
spf_record = f"v=spf1 include:{EMAIL_DOMAIN} ~all"
|
|
||||||
|
|
||||||
domain_validator = CustomDomainValidation(EMAIL_DOMAIN)
|
domain_validator = CustomDomainValidation(EMAIL_DOMAIN)
|
||||||
csrf_form = CSRFValidationForm()
|
csrf_form = CSRFValidationForm()
|
||||||
|
|
||||||
@ -142,7 +140,9 @@ def domain_detail_dns(custom_domain_id):
|
|||||||
ownership_record=domain_validator.get_ownership_verification_record(
|
ownership_record=domain_validator.get_ownership_verification_record(
|
||||||
custom_domain
|
custom_domain
|
||||||
),
|
),
|
||||||
|
expected_mx_records=domain_validator.get_expected_mx_records(custom_domain),
|
||||||
dkim_records=domain_validator.get_dkim_records(custom_domain),
|
dkim_records=domain_validator.get_dkim_records(custom_domain),
|
||||||
|
spf_record=domain_validator.get_expected_spf_record(custom_domain),
|
||||||
dmarc_record=DMARC_RECORD,
|
dmarc_record=DMARC_RECORD,
|
||||||
**locals(),
|
**locals(),
|
||||||
)
|
)
|
||||||
@ -165,6 +165,11 @@ def domain_detail(custom_domain_id):
|
|||||||
return redirect(request.url)
|
return redirect(request.url)
|
||||||
if request.form.get("form-name") == "switch-catch-all":
|
if request.form.get("form-name") == "switch-catch-all":
|
||||||
custom_domain.catch_all = not custom_domain.catch_all
|
custom_domain.catch_all = not custom_domain.catch_all
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateCustomDomain,
|
||||||
|
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) catch all to {custom_domain.catch_all}",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
if custom_domain.catch_all:
|
if custom_domain.catch_all:
|
||||||
@ -183,6 +188,11 @@ def domain_detail(custom_domain_id):
|
|||||||
elif request.form.get("form-name") == "set-name":
|
elif request.form.get("form-name") == "set-name":
|
||||||
if request.form.get("action") == "save":
|
if request.form.get("action") == "save":
|
||||||
custom_domain.name = request.form.get("alias-name").replace("\n", "")
|
custom_domain.name = request.form.get("alias-name").replace("\n", "")
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateCustomDomain,
|
||||||
|
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) name",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(
|
flash(
|
||||||
f"Default alias name for Domain {custom_domain.domain} has been set",
|
f"Default alias name for Domain {custom_domain.domain} has been set",
|
||||||
@ -190,6 +200,11 @@ def domain_detail(custom_domain_id):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
custom_domain.name = None
|
custom_domain.name = None
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateCustomDomain,
|
||||||
|
message=f"Cleared custom domain {custom_domain.id} ({custom_domain.domain}) name",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(
|
flash(
|
||||||
f"Default alias name for Domain {custom_domain.domain} has been removed",
|
f"Default alias name for Domain {custom_domain.domain} has been removed",
|
||||||
@ -203,6 +218,11 @@ def domain_detail(custom_domain_id):
|
|||||||
custom_domain.random_prefix_generation = (
|
custom_domain.random_prefix_generation = (
|
||||||
not custom_domain.random_prefix_generation
|
not custom_domain.random_prefix_generation
|
||||||
)
|
)
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateCustomDomain,
|
||||||
|
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) random prefix generation to {custom_domain.random_prefix_generation}",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
if custom_domain.random_prefix_generation:
|
if custom_domain.random_prefix_generation:
|
||||||
@ -220,40 +240,16 @@ def domain_detail(custom_domain_id):
|
|||||||
)
|
)
|
||||||
elif request.form.get("form-name") == "update":
|
elif request.form.get("form-name") == "update":
|
||||||
mailbox_ids = request.form.getlist("mailbox_ids")
|
mailbox_ids = request.form.getlist("mailbox_ids")
|
||||||
# check if mailbox is not tempered with
|
result = set_custom_domain_mailboxes(
|
||||||
mailboxes = []
|
user_id=current_user.id,
|
||||||
for mailbox_id in mailbox_ids:
|
custom_domain=custom_domain,
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
mailbox_ids=mailbox_ids,
|
||||||
if (
|
)
|
||||||
not mailbox
|
|
||||||
or mailbox.user_id != current_user.id
|
|
||||||
or not mailbox.verified
|
|
||||||
):
|
|
||||||
flash("Something went wrong, please retry", "warning")
|
|
||||||
return redirect(
|
|
||||||
url_for(
|
|
||||||
"dashboard.domain_detail", custom_domain_id=custom_domain.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
mailboxes.append(mailbox)
|
|
||||||
|
|
||||||
if not mailboxes:
|
if result.success:
|
||||||
flash("You must select at least 1 mailbox", "warning")
|
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
|
||||||
return redirect(
|
else:
|
||||||
url_for(
|
flash(result.reason.value, "warning")
|
||||||
"dashboard.domain_detail", custom_domain_id=custom_domain.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# first remove all existing domain-mailboxes links
|
|
||||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
|
||||||
Session.flush()
|
|
||||||
|
|
||||||
for mailbox in mailboxes:
|
|
||||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
|
|
||||||
|
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
|
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
|
||||||
|
@ -71,7 +71,10 @@ def index():
|
|||||||
|
|
||||||
page = 0
|
page = 0
|
||||||
if request.args.get("page"):
|
if request.args.get("page"):
|
||||||
page = int(request.args.get("page"))
|
try:
|
||||||
|
page = int(request.args.get("page"))
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
highlight_alias_id = None
|
highlight_alias_id = None
|
||||||
if request.args.get("highlight_alias_id"):
|
if request.args.get("highlight_alias_id"):
|
||||||
@ -149,7 +152,9 @@ def index():
|
|||||||
)
|
)
|
||||||
flash(f"Alias {email} has been deleted", "success")
|
flash(f"Alias {email} has been deleted", "success")
|
||||||
elif request.form.get("form-name") == "disable-alias":
|
elif request.form.get("form-name") == "disable-alias":
|
||||||
alias_utils.change_alias_status(alias, enabled=False)
|
alias_utils.change_alias_status(
|
||||||
|
alias, enabled=False, message="Set enabled=False from dashboard"
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(f"Alias {alias.email} has been disabled", "success")
|
flash(f"Alias {alias.email} has been disabled", "success")
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import arrow
|
||||||
from flask import render_template, flash, redirect, url_for
|
from flask import render_template, flash, redirect, url_for
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
@ -7,6 +8,8 @@ from app.config import ADMIN_EMAIL
|
|||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import send_email
|
from app.email_utils import send_email
|
||||||
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
|
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
||||||
from app.models import LifetimeCoupon
|
from app.models import LifetimeCoupon
|
||||||
|
|
||||||
|
|
||||||
@ -40,6 +43,14 @@ def lifetime_licence():
|
|||||||
current_user.lifetime_coupon_id = coupon.id
|
current_user.lifetime_coupon_id = coupon.id
|
||||||
if coupon.paid:
|
if coupon.paid:
|
||||||
current_user.paid_lifetime = True
|
current_user.paid_lifetime = True
|
||||||
|
EventDispatcher.send_event(
|
||||||
|
user=current_user,
|
||||||
|
content=EventContent(
|
||||||
|
user_plan_change=UserPlanChanged(
|
||||||
|
plan_end_time=arrow.get("2038-01-01").timestamp
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
# notify admin
|
# notify admin
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import base64
|
import base64
|
||||||
import binascii
|
import binascii
|
||||||
import json
|
import json
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from flask import render_template, request, redirect, url_for, flash
|
from flask import render_template, request, redirect, url_for, flash
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
@ -15,6 +16,7 @@ from app.dashboard.base import dashboard_bp
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Mailbox
|
from app.models import Mailbox
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
from app.utils import CSRFValidationForm
|
from app.utils import CSRFValidationForm
|
||||||
|
|
||||||
|
|
||||||
@ -119,10 +121,16 @@ def mailbox_route():
|
|||||||
@login_required
|
@login_required
|
||||||
def mailbox_verify():
|
def mailbox_verify():
|
||||||
mailbox_id = request.args.get("mailbox_id")
|
mailbox_id = request.args.get("mailbox_id")
|
||||||
|
if not mailbox_id:
|
||||||
|
LOG.i("Missing mailbox_id")
|
||||||
|
flash("You followed an invalid link", "error")
|
||||||
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
|
|
||||||
code = request.args.get("code")
|
code = request.args.get("code")
|
||||||
if not code:
|
if not code:
|
||||||
# Old way
|
# Old way
|
||||||
return verify_with_signed_secret(mailbox_id)
|
return verify_with_signed_secret(mailbox_id)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
||||||
except mailbox_utils.MailboxError as e:
|
except mailbox_utils.MailboxError as e:
|
||||||
@ -151,7 +159,7 @@ def verify_with_signed_secret(request: str):
|
|||||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
mailbox_id = mailbox_data[0]
|
mailbox_id = mailbox_data[0]
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
||||||
if not mailbox:
|
if not mailbox:
|
||||||
flash("Invalid link", "error")
|
flash("Invalid link", "error")
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
@ -161,6 +169,11 @@ def verify_with_signed_secret(request: str):
|
|||||||
return redirect(url_for("dashboard.mailbox_route"))
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
|
|
||||||
mailbox.verified = True
|
mailbox.verified = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.VerifyMailbox,
|
||||||
|
message=f"Verified mailbox {mailbox.id} ({mailbox.email})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
LOG.d("Mailbox %s is verified", mailbox)
|
LOG.d("Mailbox %s is verified", mailbox)
|
||||||
|
@ -16,10 +16,11 @@ from app.db import Session
|
|||||||
from app.email_utils import email_can_be_used_as_mailbox
|
from app.email_utils import email_can_be_used_as_mailbox
|
||||||
from app.email_utils import mailbox_already_used, render, send_email
|
from app.email_utils import mailbox_already_used, render, send_email
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.mailbox_utils import perform_mailbox_email_change, MailboxEmailChangeError
|
||||||
from app.models import Alias, AuthorizedAddress
|
from app.models import Alias, AuthorizedAddress
|
||||||
from app.models import Mailbox
|
from app.models import Mailbox
|
||||||
from app.pgp_utils import PGPException, load_public_key_and_check
|
from app.pgp_utils import PGPException, load_public_key_and_check
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
from app.utils import sanitize_email, CSRFValidationForm
|
from app.utils import sanitize_email, CSRFValidationForm
|
||||||
|
|
||||||
|
|
||||||
@ -88,8 +89,12 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
flash("SPF enforcement globally not enabled", "error")
|
flash("SPF enforcement globally not enabled", "error")
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
|
|
||||||
mailbox.force_spf = (
|
force_spf_value = request.form.get("spf-status") == "on"
|
||||||
True if request.form.get("spf-status") == "on" else False
|
mailbox.force_spf = force_spf_value
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Set force_spf to {force_spf_value} on mailbox {mailbox_id} ({mailbox.email})",
|
||||||
)
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(
|
flash(
|
||||||
@ -113,6 +118,11 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
if AuthorizedAddress.get_by(mailbox_id=mailbox.id, email=address):
|
if AuthorizedAddress.get_by(mailbox_id=mailbox.id, email=address):
|
||||||
flash(f"{address} already added", "error")
|
flash(f"{address} already added", "error")
|
||||||
else:
|
else:
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Add authorized address {address} to mailbox {mailbox_id} ({mailbox.email})",
|
||||||
|
)
|
||||||
AuthorizedAddress.create(
|
AuthorizedAddress.create(
|
||||||
user_id=current_user.id,
|
user_id=current_user.id,
|
||||||
mailbox_id=mailbox.id,
|
mailbox_id=mailbox.id,
|
||||||
@ -133,6 +143,11 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
flash("Unknown error. Refresh the page", "warning")
|
flash("Unknown error. Refresh the page", "warning")
|
||||||
else:
|
else:
|
||||||
address = authorized_address.email
|
address = authorized_address.email
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Remove authorized address {address} from mailbox {mailbox_id} ({mailbox.email})",
|
||||||
|
)
|
||||||
AuthorizedAddress.delete(authorized_address_id)
|
AuthorizedAddress.delete(authorized_address_id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash(f"{address} has been deleted", "success")
|
flash(f"{address} has been deleted", "success")
|
||||||
@ -165,6 +180,11 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
except PGPException:
|
except PGPException:
|
||||||
flash("Cannot add the public key, please verify it", "error")
|
flash("Cannot add the public key, please verify it", "error")
|
||||||
else:
|
else:
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Add PGP Key {mailbox.pgp_finger_print} to mailbox {mailbox_id} ({mailbox.email})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash("Your PGP public key is saved successfully", "success")
|
flash("Your PGP public key is saved successfully", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
@ -172,6 +192,11 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
)
|
)
|
||||||
elif request.form.get("action") == "remove":
|
elif request.form.get("action") == "remove":
|
||||||
# Free user can decide to remove their added PGP key
|
# Free user can decide to remove their added PGP key
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Remove PGP Key {mailbox.pgp_finger_print} from mailbox {mailbox_id} ({mailbox.email})",
|
||||||
|
)
|
||||||
mailbox.pgp_public_key = None
|
mailbox.pgp_public_key = None
|
||||||
mailbox.pgp_finger_print = None
|
mailbox.pgp_finger_print = None
|
||||||
mailbox.disable_pgp = False
|
mailbox.disable_pgp = False
|
||||||
@ -191,9 +216,19 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
mailbox.disable_pgp = False
|
mailbox.disable_pgp = False
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Enabled PGP for mailbox {mailbox_id} ({mailbox.email})",
|
||||||
|
)
|
||||||
flash(f"PGP is enabled on {mailbox.email}", "info")
|
flash(f"PGP is enabled on {mailbox.email}", "info")
|
||||||
else:
|
else:
|
||||||
mailbox.disable_pgp = True
|
mailbox.disable_pgp = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Disabled PGP for mailbox {mailbox_id} ({mailbox.email})",
|
||||||
|
)
|
||||||
flash(f"PGP is disabled on {mailbox.email}", "info")
|
flash(f"PGP is disabled on {mailbox.email}", "info")
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
@ -203,6 +238,11 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
elif request.form.get("form-name") == "generic-subject":
|
elif request.form.get("form-name") == "generic-subject":
|
||||||
if request.form.get("action") == "save":
|
if request.form.get("action") == "save":
|
||||||
mailbox.generic_subject = request.form.get("generic-subject")
|
mailbox.generic_subject = request.form.get("generic-subject")
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Set generic subject for mailbox {mailbox_id} ({mailbox.email})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash("Generic subject is enabled", "success")
|
flash("Generic subject is enabled", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
@ -210,6 +250,11 @@ def mailbox_detail_route(mailbox_id):
|
|||||||
)
|
)
|
||||||
elif request.form.get("action") == "remove":
|
elif request.form.get("action") == "remove":
|
||||||
mailbox.generic_subject = None
|
mailbox.generic_subject = None
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Remove generic subject for mailbox {mailbox_id} ({mailbox.email})",
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash("Generic subject is disabled", "success")
|
flash("Generic subject is disabled", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
@ -272,7 +317,7 @@ def cancel_mailbox_change_route(mailbox_id):
|
|||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/mailbox/confirm_change")
|
@dashboard_bp.route("/mailbox/confirm_change")
|
||||||
def mailbox_confirm_change_route():
|
def mailbox_confirm_email_change_route():
|
||||||
s = TimestampSigner(MAILBOX_SECRET)
|
s = TimestampSigner(MAILBOX_SECRET)
|
||||||
signed_mailbox_id = request.args.get("mailbox_id")
|
signed_mailbox_id = request.args.get("mailbox_id")
|
||||||
|
|
||||||
@ -281,30 +326,20 @@ def mailbox_confirm_change_route():
|
|||||||
except Exception:
|
except Exception:
|
||||||
flash("Invalid link", "error")
|
flash("Invalid link", "error")
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
else:
|
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
|
||||||
|
|
||||||
# new_email can be None if user cancels change in the meantime
|
res = perform_mailbox_email_change(mailbox_id)
|
||||||
if mailbox and mailbox.new_email:
|
|
||||||
user = mailbox.user
|
|
||||||
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
|
||||||
flash(f"{mailbox.new_email} is already used", "error")
|
|
||||||
return redirect(
|
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
|
||||||
)
|
|
||||||
|
|
||||||
mailbox.email = mailbox.new_email
|
flash(res.message, res.message_category)
|
||||||
mailbox.new_email = None
|
if res.error:
|
||||||
|
if res.error == MailboxEmailChangeError.EmailAlreadyUsed:
|
||||||
# mark mailbox as verified if the change request is sent from an unverified mailbox
|
|
||||||
mailbox.verified = True
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
LOG.d("Mailbox change %s is verified", mailbox)
|
|
||||||
flash(f"The {mailbox.email} is updated", "success")
|
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
)
|
)
|
||||||
else:
|
elif res.error == MailboxEmailChangeError.InvalidId:
|
||||||
flash("Invalid link", "error")
|
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
|
else:
|
||||||
|
raise Exception("Unhandled MailboxEmailChangeError")
|
||||||
|
else:
|
||||||
|
return redirect(
|
||||||
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
|
)
|
||||||
|
@ -43,7 +43,10 @@ def notification_route(notification_id):
|
|||||||
def notifications_route():
|
def notifications_route():
|
||||||
page = 0
|
page = 0
|
||||||
if request.args.get("page"):
|
if request.args.get("page"):
|
||||||
page = int(request.args.get("page"))
|
try:
|
||||||
|
page = int(request.args.get("page"))
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
notifications = (
|
notifications = (
|
||||||
Notification.filter_by(user_id=current_user.id)
|
Notification.filter_by(user_id=current_user.id)
|
||||||
|
@ -11,6 +11,7 @@ from app.dashboard.base import dashboard_bp
|
|||||||
from app.errors import SubdomainInTrashError
|
from app.errors import SubdomainInTrashError
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import CustomDomain, Mailbox, SLDomain
|
from app.models import CustomDomain, Mailbox, SLDomain
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
# Only lowercase letters, numbers, dashes (-) are currently supported
|
# Only lowercase letters, numbers, dashes (-) are currently supported
|
||||||
_SUBDOMAIN_PATTERN = r"[0-9a-z-]{1,}"
|
_SUBDOMAIN_PATTERN = r"[0-9a-z-]{1,}"
|
||||||
@ -102,6 +103,12 @@ def subdomain_route():
|
|||||||
ownership_verified=True,
|
ownership_verified=True,
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.CreateCustomDomain,
|
||||||
|
message=f"Create subdomain {new_custom_domain.id} ({full_domain})",
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
except SubdomainInTrashError:
|
except SubdomainInTrashError:
|
||||||
flash(
|
flash(
|
||||||
f"{full_domain} has been used before and cannot be reused",
|
f"{full_domain} has been used before and cannot be reused",
|
||||||
|
@ -32,7 +32,9 @@ def unsubscribe(alias_id):
|
|||||||
|
|
||||||
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
|
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
alias_utils.change_alias_status(alias, False)
|
alias_utils.change_alias_status(
|
||||||
|
alias, enabled=False, message="Set enabled=False from unsubscribe request"
|
||||||
|
)
|
||||||
flash(f"Alias {alias.email} has been blocked", "success")
|
flash(f"Alias {alias.email} has been blocked", "success")
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import List, Tuple, Optional
|
from dataclasses import dataclass
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
import dns.resolver
|
import dns.resolver
|
||||||
|
|
||||||
@ -8,8 +9,14 @@ from app.config import NAMESERVERS
|
|||||||
_include_spf = "include:"
|
_include_spf = "include:"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MxRecord:
|
||||||
|
priority: int
|
||||||
|
domain: str
|
||||||
|
|
||||||
|
|
||||||
def is_mx_equivalent(
|
def is_mx_equivalent(
|
||||||
mx_domains: List[Tuple[int, str]], ref_mx_domains: List[Tuple[int, str]]
|
mx_domains: List[MxRecord], ref_mx_domains: List[MxRecord]
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""
|
"""
|
||||||
Compare mx_domains with ref_mx_domains to see if they are equivalent.
|
Compare mx_domains with ref_mx_domains to see if they are equivalent.
|
||||||
@ -18,14 +25,14 @@ def is_mx_equivalent(
|
|||||||
The priority order is taken into account but not the priority number.
|
The priority order is taken into account but not the priority number.
|
||||||
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
|
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
|
||||||
"""
|
"""
|
||||||
mx_domains = sorted(mx_domains, key=lambda x: x[0])
|
mx_domains = sorted(mx_domains, key=lambda x: x.priority)
|
||||||
ref_mx_domains = sorted(ref_mx_domains, key=lambda x: x[0])
|
ref_mx_domains = sorted(ref_mx_domains, key=lambda x: x.priority)
|
||||||
|
|
||||||
if len(mx_domains) < len(ref_mx_domains):
|
if len(mx_domains) < len(ref_mx_domains):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for i in range(len(ref_mx_domains)):
|
for actual, expected in zip(mx_domains, ref_mx_domains):
|
||||||
if mx_domains[i][1] != ref_mx_domains[i][1]:
|
if actual.domain != expected.domain:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@ -37,7 +44,7 @@ class DNSClient(ABC):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_mx_domains(self, hostname: str) -> List[Tuple[int, str]]:
|
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def get_spf_domain(self, hostname: str) -> List[str]:
|
def get_spf_domain(self, hostname: str) -> List[str]:
|
||||||
@ -81,7 +88,7 @@ class NetworkDNSClient(DNSClient):
|
|||||||
except Exception:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_mx_domains(self, hostname: str) -> List[Tuple[int, str]]:
|
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
||||||
"""
|
"""
|
||||||
return list of (priority, domain name) sorted by priority (lowest priority first)
|
return list of (priority, domain name) sorted by priority (lowest priority first)
|
||||||
domain name ends with a "." at the end.
|
domain name ends with a "." at the end.
|
||||||
@ -92,14 +99,14 @@ class NetworkDNSClient(DNSClient):
|
|||||||
for a in answers:
|
for a in answers:
|
||||||
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
||||||
parts = record.split(" ")
|
parts = record.split(" ")
|
||||||
ret.append((int(parts[0]), parts[1]))
|
ret.append(MxRecord(priority=int(parts[0]), domain=parts[1]))
|
||||||
return sorted(ret, key=lambda x: x[0])
|
return sorted(ret, key=lambda x: x.priority)
|
||||||
except Exception:
|
except Exception:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def get_txt_record(self, hostname: str) -> List[str]:
|
def get_txt_record(self, hostname: str) -> List[str]:
|
||||||
try:
|
try:
|
||||||
answers = self._resolver.resolve(hostname, "TXT", search=True)
|
answers = self._resolver.resolve(hostname, "TXT", search=False)
|
||||||
ret = []
|
ret = []
|
||||||
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||||
for record in a.strings:
|
for record in a.strings:
|
||||||
@ -112,14 +119,14 @@ class NetworkDNSClient(DNSClient):
|
|||||||
class InMemoryDNSClient(DNSClient):
|
class InMemoryDNSClient(DNSClient):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.cname_records: dict[str, Optional[str]] = {}
|
self.cname_records: dict[str, Optional[str]] = {}
|
||||||
self.mx_records: dict[str, List[Tuple[int, str]]] = {}
|
self.mx_records: dict[str, List[MxRecord]] = {}
|
||||||
self.spf_records: dict[str, List[str]] = {}
|
self.spf_records: dict[str, List[str]] = {}
|
||||||
self.txt_records: dict[str, List[str]] = {}
|
self.txt_records: dict[str, List[str]] = {}
|
||||||
|
|
||||||
def set_cname_record(self, hostname: str, cname: str):
|
def set_cname_record(self, hostname: str, cname: str):
|
||||||
self.cname_records[hostname] = cname
|
self.cname_records[hostname] = cname
|
||||||
|
|
||||||
def set_mx_records(self, hostname: str, mx_list: List[Tuple[int, str]]):
|
def set_mx_records(self, hostname: str, mx_list: List[MxRecord]):
|
||||||
self.mx_records[hostname] = mx_list
|
self.mx_records[hostname] = mx_list
|
||||||
|
|
||||||
def set_txt_record(self, hostname: str, txt_list: List[str]):
|
def set_txt_record(self, hostname: str, txt_list: List[str]):
|
||||||
@ -128,9 +135,9 @@ class InMemoryDNSClient(DNSClient):
|
|||||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||||
return self.cname_records.get(hostname)
|
return self.cname_records.get(hostname)
|
||||||
|
|
||||||
def get_mx_domains(self, hostname: str) -> List[Tuple[int, str]]:
|
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
||||||
mx_list = self.mx_records.get(hostname, [])
|
mx_list = self.mx_records.get(hostname, [])
|
||||||
return sorted(mx_list, key=lambda x: x[0])
|
return sorted(mx_list, key=lambda x: x.priority)
|
||||||
|
|
||||||
def get_txt_record(self, hostname: str) -> List[str]:
|
def get_txt_record(self, hostname: str) -> List[str]:
|
||||||
return self.txt_records.get(hostname, [])
|
return self.txt_records.get(hostname, [])
|
||||||
@ -140,5 +147,5 @@ def get_network_dns_client() -> NetworkDNSClient:
|
|||||||
return NetworkDNSClient(NAMESERVERS)
|
return NetworkDNSClient(NAMESERVERS)
|
||||||
|
|
||||||
|
|
||||||
def get_mx_domains(hostname: str) -> [(int, str)]:
|
def get_mx_domains(hostname: str) -> List[MxRecord]:
|
||||||
return get_network_dns_client().get_mx_domains(hostname)
|
return get_network_dns_client().get_mx_domains(hostname)
|
||||||
|
@ -592,7 +592,7 @@ def email_can_be_used_as_mailbox(email_address: str) -> bool:
|
|||||||
|
|
||||||
from app.models import CustomDomain
|
from app.models import CustomDomain
|
||||||
|
|
||||||
if CustomDomain.get_by(domain=domain, verified=True):
|
if CustomDomain.get_by(domain=domain, is_sl_subdomain=True, verified=True):
|
||||||
LOG.d("domain %s is a SimpleLogin custom domain", domain)
|
LOG.d("domain %s is a SimpleLogin custom domain", domain)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -657,7 +657,7 @@ def get_mx_domain_list(domain) -> [str]:
|
|||||||
"""
|
"""
|
||||||
priority_domains = get_mx_domains(domain)
|
priority_domains = get_mx_domains(domain)
|
||||||
|
|
||||||
return [d[:-1] for _, d in priority_domains]
|
return [d.domain[:-1] for d in priority_domains]
|
||||||
|
|
||||||
|
|
||||||
def personal_email_already_used(email_address: str) -> bool:
|
def personal_email_already_used(email_address: str) -> bool:
|
||||||
|
@ -64,10 +64,6 @@ class EventDispatcher:
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if config.EVENT_WEBHOOK_ENABLED_USER_IDS is not None:
|
|
||||||
if user.id not in config.EVENT_WEBHOOK_ENABLED_USER_IDS:
|
|
||||||
return
|
|
||||||
|
|
||||||
partner_user = EventDispatcher.__partner_user(user.id)
|
partner_user = EventDispatcher.__partner_user(user.id)
|
||||||
if not partner_user:
|
if not partner_user:
|
||||||
LOG.i(f"Not sending events because there's no partner user for user {user}")
|
LOG.i(f"Not sending events because there's no partner user for user {user}")
|
||||||
|
@ -103,7 +103,9 @@ class UnsubscribeHandler:
|
|||||||
):
|
):
|
||||||
return status.E509
|
return status.E509
|
||||||
LOG.i(f"User disabled alias {alias} via unsubscribe header")
|
LOG.i(f"User disabled alias {alias} via unsubscribe header")
|
||||||
alias_utils.change_alias_status(alias, enabled=False)
|
alias_utils.change_alias_status(
|
||||||
|
alias, enabled=False, message="Set enabled=False via unsubscribe header"
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
|
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
|
||||||
for mailbox in alias.mailboxes:
|
for mailbox in alias.mailboxes:
|
||||||
|
70
app/app/jobs/send_event_job.py
Normal file
70
app/app/jobs/send_event_job.py
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import arrow
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
from app.errors import ProtonPartnerNotSetUp
|
||||||
|
from app.events.generated import event_pb2
|
||||||
|
from app.events.generated.event_pb2 import EventContent
|
||||||
|
from app.models import (
|
||||||
|
User,
|
||||||
|
Job,
|
||||||
|
PartnerUser,
|
||||||
|
)
|
||||||
|
from app.proton.utils import get_proton_partner
|
||||||
|
from events.event_sink import EventSink
|
||||||
|
|
||||||
|
|
||||||
|
class SendEventToWebhookJob:
|
||||||
|
def __init__(self, user: User, event: EventContent):
|
||||||
|
self._user: User = user
|
||||||
|
self._event: EventContent = event
|
||||||
|
|
||||||
|
def run(self, sink: EventSink) -> bool:
|
||||||
|
# Check if the current user has a partner_id
|
||||||
|
try:
|
||||||
|
proton_partner_id = get_proton_partner().id
|
||||||
|
except ProtonPartnerNotSetUp:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# It has. Retrieve the information for the PartnerUser
|
||||||
|
partner_user = PartnerUser.get_by(
|
||||||
|
user_id=self._user.id, partner_id=proton_partner_id
|
||||||
|
)
|
||||||
|
if partner_user is None:
|
||||||
|
return True
|
||||||
|
event = event_pb2.Event(
|
||||||
|
user_id=self._user.id,
|
||||||
|
external_user_id=partner_user.external_user_id,
|
||||||
|
partner_id=partner_user.partner_id,
|
||||||
|
content=self._event,
|
||||||
|
)
|
||||||
|
|
||||||
|
serialized = event.SerializeToString()
|
||||||
|
return sink.send_data_to_webhook(serialized)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_from_job(job: Job) -> Optional[SendEventToWebhookJob]:
|
||||||
|
user = User.get(job.payload["user_id"])
|
||||||
|
if not user:
|
||||||
|
return None
|
||||||
|
event_data = base64.b64decode(job.payload["event"])
|
||||||
|
event = event_pb2.EventContent()
|
||||||
|
event.ParseFromString(event_data)
|
||||||
|
|
||||||
|
return SendEventToWebhookJob(user=user, event=event)
|
||||||
|
|
||||||
|
def store_job_in_db(self, run_at: Optional[arrow.Arrow]) -> Job:
|
||||||
|
stub = self._event.SerializeToString()
|
||||||
|
return Job.create(
|
||||||
|
name=config.JOB_SEND_EVENT_TO_WEBHOOK,
|
||||||
|
payload={
|
||||||
|
"user_id": self._user.id,
|
||||||
|
"event": base64.b64encode(stub).decode("utf-8"),
|
||||||
|
},
|
||||||
|
run_at=run_at if run_at is not None else arrow.now(),
|
||||||
|
commit=True,
|
||||||
|
)
|
@ -1,6 +1,6 @@
|
|||||||
import dataclasses
|
import dataclasses
|
||||||
import secrets
|
import secrets
|
||||||
import random
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
import arrow
|
import arrow
|
||||||
|
|
||||||
@ -16,6 +16,7 @@ from app.email_utils import (
|
|||||||
from app.email_validation import is_valid_email
|
from app.email_validation import is_valid_email
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, Mailbox, Job, MailboxActivation
|
from app.models import User, Mailbox, Job, MailboxActivation
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
@dataclasses.dataclass
|
||||||
@ -35,8 +36,9 @@ class OnlyPaidError(MailboxError):
|
|||||||
|
|
||||||
|
|
||||||
class CannotVerifyError(MailboxError):
|
class CannotVerifyError(MailboxError):
|
||||||
def __init__(self, msg: str):
|
def __init__(self, msg: str, deleted_activation_code: bool = False):
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
self.deleted_activation_code = deleted_activation_code
|
||||||
|
|
||||||
|
|
||||||
MAX_ACTIVATION_TRIES = 3
|
MAX_ACTIVATION_TRIES = 3
|
||||||
@ -70,9 +72,15 @@ def create_mailbox(
|
|||||||
f"User {user} has tried to create mailbox with {email} but email is invalid"
|
f"User {user} has tried to create mailbox with {email} but email is invalid"
|
||||||
)
|
)
|
||||||
raise MailboxError("Invalid email")
|
raise MailboxError("Invalid email")
|
||||||
new_mailbox = Mailbox.create(
|
new_mailbox: Mailbox = Mailbox.create(
|
||||||
email=email, user_id=user.id, verified=verified, commit=True
|
email=email, user_id=user.id, verified=verified, commit=True
|
||||||
)
|
)
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.CreateMailbox,
|
||||||
|
message=f"Create mailbox {new_mailbox.id} ({new_mailbox.email}). Verified={verified}",
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
|
||||||
if verified:
|
if verified:
|
||||||
LOG.i(f"User {user} as created a pre-verified mailbox with {email}")
|
LOG.i(f"User {user} as created a pre-verified mailbox with {email}")
|
||||||
@ -129,7 +137,7 @@ def delete_mailbox(
|
|||||||
|
|
||||||
if not transfer_mailbox.verified:
|
if not transfer_mailbox.verified:
|
||||||
LOG.i(f"User {user} has tried to transfer to a non verified mailbox")
|
LOG.i(f"User {user} has tried to transfer to a non verified mailbox")
|
||||||
MailboxError("Your new mailbox is not verified")
|
raise MailboxError("Your new mailbox is not verified")
|
||||||
|
|
||||||
# Schedule delete account job
|
# Schedule delete account job
|
||||||
LOG.i(
|
LOG.i(
|
||||||
@ -163,17 +171,17 @@ def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
|||||||
f"User {user} failed to verify mailbox {mailbox_id} because it does not exist"
|
f"User {user} failed to verify mailbox {mailbox_id} because it does not exist"
|
||||||
)
|
)
|
||||||
raise MailboxError("Invalid mailbox")
|
raise MailboxError("Invalid mailbox")
|
||||||
|
if mailbox.user_id != user.id:
|
||||||
|
LOG.i(
|
||||||
|
f"User {user} failed to verify mailbox {mailbox_id} because it's owned by another user"
|
||||||
|
)
|
||||||
|
raise MailboxError("Invalid mailbox")
|
||||||
if mailbox.verified:
|
if mailbox.verified:
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
|
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
|
||||||
)
|
)
|
||||||
clear_activation_codes_for_mailbox(mailbox)
|
clear_activation_codes_for_mailbox(mailbox)
|
||||||
return mailbox
|
return mailbox
|
||||||
if mailbox.user_id != user.id:
|
|
||||||
LOG.i(
|
|
||||||
f"User {user} failed to verify mailbox {mailbox_id} because it's owned by another user"
|
|
||||||
)
|
|
||||||
raise MailboxError("Invalid mailbox")
|
|
||||||
|
|
||||||
activation = (
|
activation = (
|
||||||
MailboxActivation.filter(MailboxActivation.mailbox_id == mailbox_id)
|
MailboxActivation.filter(MailboxActivation.mailbox_id == mailbox_id)
|
||||||
@ -188,7 +196,10 @@ def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
|||||||
if activation.tries >= MAX_ACTIVATION_TRIES:
|
if activation.tries >= MAX_ACTIVATION_TRIES:
|
||||||
LOG.i(f"User {user} failed to verify mailbox {mailbox_id} more than 3 times")
|
LOG.i(f"User {user} failed to verify mailbox {mailbox_id} more than 3 times")
|
||||||
clear_activation_codes_for_mailbox(mailbox)
|
clear_activation_codes_for_mailbox(mailbox)
|
||||||
raise CannotVerifyError("Invalid activation code. Please request another code.")
|
raise CannotVerifyError(
|
||||||
|
"Invalid activation code. Please request another code.",
|
||||||
|
deleted_activation_code=True,
|
||||||
|
)
|
||||||
if activation.created_at < arrow.now().shift(minutes=-15):
|
if activation.created_at < arrow.now().shift(minutes=-15):
|
||||||
LOG.i(
|
LOG.i(
|
||||||
f"User {user} failed to verify mailbox {mailbox_id} because code is too old"
|
f"User {user} failed to verify mailbox {mailbox_id} because code is too old"
|
||||||
@ -204,6 +215,11 @@ def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
|
|||||||
raise CannotVerifyError("Invalid activation code")
|
raise CannotVerifyError("Invalid activation code")
|
||||||
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
|
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
|
||||||
mailbox.verified = True
|
mailbox.verified = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.VerifyMailbox,
|
||||||
|
message=f"Verify mailbox {mailbox_id} ({mailbox.email})",
|
||||||
|
)
|
||||||
clear_activation_codes_for_mailbox(mailbox)
|
clear_activation_codes_for_mailbox(mailbox)
|
||||||
return mailbox
|
return mailbox
|
||||||
|
|
||||||
@ -213,7 +229,10 @@ def generate_activation_code(
|
|||||||
) -> MailboxActivation:
|
) -> MailboxActivation:
|
||||||
clear_activation_codes_for_mailbox(mailbox)
|
clear_activation_codes_for_mailbox(mailbox)
|
||||||
if use_digit_code:
|
if use_digit_code:
|
||||||
code = "{:06d}".format(random.randint(1, 999999))
|
if config.MAILBOX_VERIFICATION_OVERRIDE_CODE:
|
||||||
|
code = config.MAILBOX_VERIFICATION_OVERRIDE_CODE
|
||||||
|
else:
|
||||||
|
code = "{:06d}".format(secrets.randbelow(1000000))[:6]
|
||||||
else:
|
else:
|
||||||
code = secrets.token_urlsafe(16)
|
code = secrets.token_urlsafe(16)
|
||||||
return MailboxActivation.create(
|
return MailboxActivation.create(
|
||||||
@ -258,3 +277,54 @@ def send_verification_email(
|
|||||||
mailbox_email=mailbox.email,
|
mailbox_email=mailbox.email,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MailboxEmailChangeError(Enum):
|
||||||
|
InvalidId = 1
|
||||||
|
EmailAlreadyUsed = 2
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass
|
||||||
|
class MailboxEmailChangeResult:
|
||||||
|
error: Optional[MailboxEmailChangeError]
|
||||||
|
message: str
|
||||||
|
message_category: str
|
||||||
|
|
||||||
|
|
||||||
|
def perform_mailbox_email_change(mailbox_id: int) -> MailboxEmailChangeResult:
|
||||||
|
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
||||||
|
|
||||||
|
# new_email can be None if user cancels change in the meantime
|
||||||
|
if mailbox and mailbox.new_email:
|
||||||
|
user = mailbox.user
|
||||||
|
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
||||||
|
return MailboxEmailChangeResult(
|
||||||
|
error=MailboxEmailChangeError.EmailAlreadyUsed,
|
||||||
|
message=f"{mailbox.new_email} is already used",
|
||||||
|
message_category="error",
|
||||||
|
)
|
||||||
|
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Change mailbox email for mailbox {mailbox_id} (old={mailbox.email} | new={mailbox.new_email})",
|
||||||
|
)
|
||||||
|
mailbox.email = mailbox.new_email
|
||||||
|
mailbox.new_email = None
|
||||||
|
|
||||||
|
# mark mailbox as verified if the change request is sent from an unverified mailbox
|
||||||
|
mailbox.verified = True
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
LOG.d("Mailbox change %s is verified", mailbox)
|
||||||
|
return MailboxEmailChangeResult(
|
||||||
|
error=None,
|
||||||
|
message=f"The {mailbox.email} is updated",
|
||||||
|
message_category="success",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return MailboxEmailChangeResult(
|
||||||
|
error=MailboxEmailChangeError.InvalidId,
|
||||||
|
message="Invalid link",
|
||||||
|
message_category="error",
|
||||||
|
)
|
||||||
|
@ -24,6 +24,7 @@ from sqlalchemy import text, desc, CheckConstraint, Index, Column
|
|||||||
from sqlalchemy.dialects.postgresql import TSVECTOR
|
from sqlalchemy.dialects.postgresql import TSVECTOR
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.orm import deferred
|
from sqlalchemy.orm import deferred
|
||||||
|
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||||
from sqlalchemy.sql import and_
|
from sqlalchemy.sql import and_
|
||||||
from sqlalchemy_utils import ArrowType
|
from sqlalchemy_utils import ArrowType
|
||||||
|
|
||||||
@ -157,6 +158,8 @@ class File(Base, ModelMixin):
|
|||||||
path = sa.Column(sa.String(128), unique=True, nullable=False)
|
path = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||||
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_file_user_id", "user_id"),)
|
||||||
|
|
||||||
def get_url(self, expires_in=3600):
|
def get_url(self, expires_in=3600):
|
||||||
return s3.get_url(self.path, expires_in)
|
return s3.get_url(self.path, expires_in)
|
||||||
|
|
||||||
@ -318,6 +321,8 @@ class HibpNotifiedAlias(Base, ModelMixin):
|
|||||||
|
|
||||||
notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False)
|
notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_hibp_notified_alias_user_id", "user_id"),)
|
||||||
|
|
||||||
|
|
||||||
class Fido(Base, ModelMixin):
|
class Fido(Base, ModelMixin):
|
||||||
__tablename__ = "fido"
|
__tablename__ = "fido"
|
||||||
@ -332,11 +337,13 @@ class Fido(Base, ModelMixin):
|
|||||||
name = sa.Column(sa.String(128), nullable=False, unique=False)
|
name = sa.Column(sa.String(128), nullable=False, unique=False)
|
||||||
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=True)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_fido_user_id", "user_id"),)
|
||||||
|
|
||||||
|
|
||||||
class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
__tablename__ = "users"
|
__tablename__ = "users"
|
||||||
|
|
||||||
FLAG_FREE_DISABLE_CREATE_ALIAS = 1 << 0
|
FLAG_DISABLE_CREATE_CONTACTS = 1 << 0
|
||||||
FLAG_CREATED_FROM_PARTNER = 1 << 1
|
FLAG_CREATED_FROM_PARTNER = 1 << 1
|
||||||
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
|
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
|
||||||
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
|
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
|
||||||
@ -543,7 +550,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
# bitwise flags. Allow for future expansion
|
# bitwise flags. Allow for future expansion
|
||||||
flags = sa.Column(
|
flags = sa.Column(
|
||||||
sa.BigInteger,
|
sa.BigInteger,
|
||||||
default=FLAG_FREE_DISABLE_CREATE_ALIAS,
|
default=FLAG_DISABLE_CREATE_CONTACTS,
|
||||||
server_default="0",
|
server_default="0",
|
||||||
nullable=False,
|
nullable=False,
|
||||||
)
|
)
|
||||||
@ -564,6 +571,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
||||||
),
|
),
|
||||||
sa.Index("ix_users_delete_on", delete_on),
|
sa.Index("ix_users_delete_on", delete_on),
|
||||||
|
sa.Index("ix_users_default_mailbox_id", default_mailbox_id),
|
||||||
|
sa.Index(
|
||||||
|
"ix_users_default_alias_custom_domain_id", default_alias_custom_domain_id
|
||||||
|
),
|
||||||
|
sa.Index("ix_users_profile_picture_id", profile_picture_id),
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -616,6 +628,15 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
if "alternative_id" not in kwargs:
|
if "alternative_id" not in kwargs:
|
||||||
user.alternative_id = str(uuid.uuid4())
|
user.alternative_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
trail = ". Created from partner" if from_partner else ""
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.CreateUser,
|
||||||
|
message=f"Created user {email}{trail}",
|
||||||
|
)
|
||||||
|
|
||||||
# If the user is created from partner, do not notify
|
# If the user is created from partner, do not notify
|
||||||
# nor give a trial
|
# nor give a trial
|
||||||
if from_partner:
|
if from_partner:
|
||||||
@ -1168,7 +1189,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
def can_create_contacts(self) -> bool:
|
def can_create_contacts(self) -> bool:
|
||||||
if self.is_premium():
|
if self.is_premium():
|
||||||
return True
|
return True
|
||||||
if self.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
|
if self.flags & User.FLAG_DISABLE_CREATE_CONTACTS == 0:
|
||||||
return True
|
return True
|
||||||
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
|
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
|
||||||
|
|
||||||
@ -1211,6 +1232,8 @@ class ActivationCode(Base, ModelMixin):
|
|||||||
|
|
||||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_activation_code_user_id", "user_id"),)
|
||||||
|
|
||||||
def is_expired(self):
|
def is_expired(self):
|
||||||
return self.expired < arrow.now()
|
return self.expired < arrow.now()
|
||||||
|
|
||||||
@ -1227,6 +1250,8 @@ class ResetPasswordCode(Base, ModelMixin):
|
|||||||
|
|
||||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_reset_password_code_user_id", "user_id"),)
|
||||||
|
|
||||||
def is_expired(self):
|
def is_expired(self):
|
||||||
return self.expired < arrow.now()
|
return self.expired < arrow.now()
|
||||||
|
|
||||||
@ -1269,6 +1294,8 @@ class MfaBrowser(Base, ModelMixin):
|
|||||||
|
|
||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_mfa_browser_user_id", "user_id"),)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_new(cls, user, token_length=64) -> "MfaBrowser":
|
def create_new(cls, user, token_length=64) -> "MfaBrowser":
|
||||||
found = False
|
found = False
|
||||||
@ -1327,6 +1354,12 @@ class Client(Base, ModelMixin):
|
|||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
referral = orm.relationship("Referral")
|
referral = orm.relationship("Referral")
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
sa.Index("ix_client_user_id", "user_id"),
|
||||||
|
sa.Index("ix_client_icon_id", "icon_id"),
|
||||||
|
sa.Index("ix_client_referral_id", "referral_id"),
|
||||||
|
)
|
||||||
|
|
||||||
def nb_user(self):
|
def nb_user(self):
|
||||||
return ClientUser.filter_by(client_id=self.id).count()
|
return ClientUser.filter_by(client_id=self.id).count()
|
||||||
|
|
||||||
@ -1375,6 +1408,8 @@ class RedirectUri(Base, ModelMixin):
|
|||||||
|
|
||||||
client = orm.relationship(Client, backref="redirect_uris")
|
client = orm.relationship(Client, backref="redirect_uris")
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_redirect_uri_client_id", "client_id"),)
|
||||||
|
|
||||||
|
|
||||||
class AuthorizationCode(Base, ModelMixin):
|
class AuthorizationCode(Base, ModelMixin):
|
||||||
__tablename__ = "authorization_code"
|
__tablename__ = "authorization_code"
|
||||||
@ -1396,6 +1431,11 @@ class AuthorizationCode(Base, ModelMixin):
|
|||||||
|
|
||||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_5m)
|
expired = sa.Column(ArrowType, nullable=False, default=_expiration_5m)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
sa.Index("ix_authorization_code_client_id", "client_id"),
|
||||||
|
sa.Index("ix_authorization_code_user_id", "user_id"),
|
||||||
|
)
|
||||||
|
|
||||||
def is_expired(self):
|
def is_expired(self):
|
||||||
return self.expired < arrow.now()
|
return self.expired < arrow.now()
|
||||||
|
|
||||||
@ -1418,6 +1458,11 @@ class OauthToken(Base, ModelMixin):
|
|||||||
|
|
||||||
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
expired = sa.Column(ArrowType, nullable=False, default=_expiration_1h)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
sa.Index("ix_oauth_token_user_id", "user_id"),
|
||||||
|
sa.Index("ix_oauth_token_client_id", "client_id"),
|
||||||
|
)
|
||||||
|
|
||||||
def is_expired(self):
|
def is_expired(self):
|
||||||
return self.expired < arrow.now()
|
return self.expired < arrow.now()
|
||||||
|
|
||||||
@ -1571,6 +1616,7 @@ class Alias(Base, ModelMixin):
|
|||||||
postgresql_ops={"note": "gin_trgm_ops"},
|
postgresql_ops={"note": "gin_trgm_ops"},
|
||||||
postgresql_using="gin",
|
postgresql_using="gin",
|
||||||
),
|
),
|
||||||
|
Index("ix_alias_original_owner_id", "original_owner_id"),
|
||||||
)
|
)
|
||||||
|
|
||||||
user = orm.relationship(User, foreign_keys=[user_id])
|
user = orm.relationship(User, foreign_keys=[user_id])
|
||||||
@ -1656,6 +1702,11 @@ class Alias(Base, ModelMixin):
|
|||||||
custom_domain = Alias.get_custom_domain(email)
|
custom_domain = Alias.get_custom_domain(email)
|
||||||
if custom_domain:
|
if custom_domain:
|
||||||
new_alias.custom_domain_id = custom_domain.id
|
new_alias.custom_domain_id = custom_domain.id
|
||||||
|
else:
|
||||||
|
custom_domain = CustomDomain.get(kw["custom_domain_id"])
|
||||||
|
# If it comes from a custom domain created from partner. Mark it as created from partner
|
||||||
|
if custom_domain is not None and custom_domain.partner_id is not None:
|
||||||
|
new_alias.flags = (new_alias.flags or 0) | Alias.FLAG_PARTNER_CREATED
|
||||||
|
|
||||||
Session.add(new_alias)
|
Session.add(new_alias)
|
||||||
DailyMetric.get_or_create_today_metric().nb_alias += 1
|
DailyMetric.get_or_create_today_metric().nb_alias += 1
|
||||||
@ -1673,6 +1724,7 @@ class Alias(Base, ModelMixin):
|
|||||||
Session.flush()
|
Session.flush()
|
||||||
|
|
||||||
# Internal import to avoid global import cycles
|
# Internal import to avoid global import cycles
|
||||||
|
from app.alias_audit_log_utils import AliasAuditLogAction, emit_alias_audit_log
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
from app.events.generated.event_pb2 import AliasCreated, EventContent
|
from app.events.generated.event_pb2 import AliasCreated, EventContent
|
||||||
|
|
||||||
@ -1684,6 +1736,9 @@ class Alias(Base, ModelMixin):
|
|||||||
created_at=int(new_alias.created_at.timestamp),
|
created_at=int(new_alias.created_at.timestamp),
|
||||||
)
|
)
|
||||||
EventDispatcher.send_event(user, EventContent(alias_created=event))
|
EventDispatcher.send_event(user, EventContent(alias_created=event))
|
||||||
|
emit_alias_audit_log(
|
||||||
|
new_alias, AliasAuditLogAction.CreateAlias, "New alias created"
|
||||||
|
)
|
||||||
|
|
||||||
return new_alias
|
return new_alias
|
||||||
|
|
||||||
@ -2055,7 +2110,12 @@ class Contact(Base, ModelMixin):
|
|||||||
|
|
||||||
class EmailLog(Base, ModelMixin):
|
class EmailLog(Base, ModelMixin):
|
||||||
__tablename__ = "email_log"
|
__tablename__ = "email_log"
|
||||||
__table_args__ = (Index("ix_email_log_created_at", "created_at"),)
|
__table_args__ = (
|
||||||
|
Index("ix_email_log_created_at", "created_at"),
|
||||||
|
Index("ix_email_log_mailbox_id", "mailbox_id"),
|
||||||
|
Index("ix_email_log_bounced_mailbox_id", "bounced_mailbox_id"),
|
||||||
|
Index("ix_email_log_refused_email_id", "refused_email_id"),
|
||||||
|
)
|
||||||
|
|
||||||
user_id = sa.Column(
|
user_id = sa.Column(
|
||||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||||
@ -2331,6 +2391,7 @@ class AliasUsedOn(Base, ModelMixin):
|
|||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"),
|
sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"),
|
||||||
|
sa.Index("ix_alias_used_on_user_id", "user_id"),
|
||||||
)
|
)
|
||||||
|
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(
|
||||||
@ -2357,6 +2418,11 @@ class ApiKey(Base, ModelMixin):
|
|||||||
|
|
||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
sa.Index("ix_api_key_code", "code"),
|
||||||
|
sa.Index("ix_api_key_user_id", "user_id"),
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, user_id, name=None, **kwargs):
|
def create(cls, user_id, name=None, **kwargs):
|
||||||
code = random_string(60)
|
code = random_string(60)
|
||||||
@ -2443,6 +2509,8 @@ class CustomDomain(Base, ModelMixin):
|
|||||||
unique=True,
|
unique=True,
|
||||||
postgresql_where=Column("ownership_verified"),
|
postgresql_where=Column("ownership_verified"),
|
||||||
), # The condition
|
), # The condition
|
||||||
|
Index("ix_custom_domain_user_id", "user_id"),
|
||||||
|
Index("ix_custom_domain_pending_deletion", "pending_deletion"),
|
||||||
)
|
)
|
||||||
|
|
||||||
user = orm.relationship(User, foreign_keys=[user_id], backref="custom_domains")
|
user = orm.relationship(User, foreign_keys=[user_id], backref="custom_domains")
|
||||||
@ -2513,6 +2581,7 @@ class AutoCreateRule(Base, ModelMixin):
|
|||||||
sa.UniqueConstraint(
|
sa.UniqueConstraint(
|
||||||
"custom_domain_id", "order", name="uq_auto_create_rule_order"
|
"custom_domain_id", "order", name="uq_auto_create_rule_order"
|
||||||
),
|
),
|
||||||
|
sa.Index("ix_auto_create_rule_custom_domain_id", "custom_domain_id"),
|
||||||
)
|
)
|
||||||
|
|
||||||
custom_domain_id = sa.Column(
|
custom_domain_id = sa.Column(
|
||||||
@ -2556,6 +2625,7 @@ class DomainDeletedAlias(Base, ModelMixin):
|
|||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
sa.UniqueConstraint("domain_id", "email", name="uq_domain_trash"),
|
sa.UniqueConstraint("domain_id", "email", name="uq_domain_trash"),
|
||||||
|
sa.Index("ix_domain_deleted_alias_user_id", "user_id"),
|
||||||
)
|
)
|
||||||
|
|
||||||
email = sa.Column(sa.String(256), nullable=False)
|
email = sa.Column(sa.String(256), nullable=False)
|
||||||
@ -2616,6 +2686,8 @@ class Coupon(Base, ModelMixin):
|
|||||||
# a coupon can have an expiration
|
# a coupon can have an expiration
|
||||||
expires_date = sa.Column(ArrowType, nullable=True)
|
expires_date = sa.Column(ArrowType, nullable=True)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_coupon_used_by_user_id", "used_by_user_id"),)
|
||||||
|
|
||||||
|
|
||||||
class Directory(Base, ModelMixin):
|
class Directory(Base, ModelMixin):
|
||||||
__tablename__ = "directory"
|
__tablename__ = "directory"
|
||||||
@ -2630,6 +2702,8 @@ class Directory(Base, ModelMixin):
|
|||||||
"Mailbox", secondary="directory_mailbox", lazy="joined"
|
"Mailbox", secondary="directory_mailbox", lazy="joined"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_directory_user_id", "user_id"),)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mailboxes(self):
|
def mailboxes(self):
|
||||||
if self._mailboxes:
|
if self._mailboxes:
|
||||||
@ -2731,7 +2805,10 @@ class Mailbox(Base, ModelMixin):
|
|||||||
|
|
||||||
generic_subject = sa.Column(sa.String(78), nullable=True)
|
generic_subject = sa.Column(sa.String(78), nullable=True)
|
||||||
|
|
||||||
__table_args__ = (sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),)
|
__table_args__ = (
|
||||||
|
sa.UniqueConstraint("user_id", "email", name="uq_mailbox_user"),
|
||||||
|
sa.Index("ix_mailbox_pgp_finger_print", "pgp_finger_print"),
|
||||||
|
)
|
||||||
|
|
||||||
user = orm.relationship(User, foreign_keys=[user_id])
|
user = orm.relationship(User, foreign_keys=[user_id])
|
||||||
|
|
||||||
@ -2764,9 +2841,9 @@ class Mailbox(Base, ModelMixin):
|
|||||||
|
|
||||||
from app.email_utils import get_email_local_part
|
from app.email_utils import get_email_local_part
|
||||||
|
|
||||||
mx_domains: [(int, str)] = get_mx_domains(get_email_local_part(self.email))
|
mx_domains = get_mx_domains(get_email_local_part(self.email))
|
||||||
# Proton is the first domain
|
# Proton is the first domain
|
||||||
if mx_domains and mx_domains[0][1] in (
|
if mx_domains and mx_domains[0].domain in (
|
||||||
"mail.protonmail.ch.",
|
"mail.protonmail.ch.",
|
||||||
"mailsec.protonmail.ch.",
|
"mailsec.protonmail.ch.",
|
||||||
):
|
):
|
||||||
@ -2868,6 +2945,8 @@ class RefusedEmail(Base, ModelMixin):
|
|||||||
# toggle this when email content (stored at full_report_path & path are deleted)
|
# toggle this when email content (stored at full_report_path & path are deleted)
|
||||||
deleted = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
deleted = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_refused_email_user_id", "user_id"),)
|
||||||
|
|
||||||
def get_url(self, expires_in=3600):
|
def get_url(self, expires_in=3600):
|
||||||
if self.path:
|
if self.path:
|
||||||
return s3.get_url(self.path, expires_in)
|
return s3.get_url(self.path, expires_in)
|
||||||
@ -2890,6 +2969,8 @@ class Referral(Base, ModelMixin):
|
|||||||
|
|
||||||
user = orm.relationship(User, foreign_keys=[user_id], backref="referrals")
|
user = orm.relationship(User, foreign_keys=[user_id], backref="referrals")
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_referral_user_id", "user_id"),)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def nb_user(self) -> int:
|
def nb_user(self) -> int:
|
||||||
return User.filter_by(referral_id=self.id, activated=True).count()
|
return User.filter_by(referral_id=self.id, activated=True).count()
|
||||||
@ -2929,6 +3010,8 @@ class SentAlert(Base, ModelMixin):
|
|||||||
to_email = sa.Column(sa.String(256), nullable=False)
|
to_email = sa.Column(sa.String(256), nullable=False)
|
||||||
alert_type = sa.Column(sa.String(256), nullable=False)
|
alert_type = sa.Column(sa.String(256), nullable=False)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_sent_alert_user_id", "user_id"),)
|
||||||
|
|
||||||
|
|
||||||
class AliasMailbox(Base, ModelMixin):
|
class AliasMailbox(Base, ModelMixin):
|
||||||
__tablename__ = "alias_mailbox"
|
__tablename__ = "alias_mailbox"
|
||||||
@ -3174,6 +3257,11 @@ class BatchImport(Base, ModelMixin):
|
|||||||
file = orm.relationship(File)
|
file = orm.relationship(File)
|
||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
sa.Index("ix_batch_import_file_id", "file_id"),
|
||||||
|
sa.Index("ix_batch_import_user_id", "user_id"),
|
||||||
|
)
|
||||||
|
|
||||||
def nb_alias(self):
|
def nb_alias(self):
|
||||||
return Alias.filter_by(batch_import_id=self.id).count()
|
return Alias.filter_by(batch_import_id=self.id).count()
|
||||||
|
|
||||||
@ -3194,6 +3282,7 @@ class AuthorizedAddress(Base, ModelMixin):
|
|||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
sa.UniqueConstraint("mailbox_id", "email", name="uq_authorize_address"),
|
sa.UniqueConstraint("mailbox_id", "email", name="uq_authorize_address"),
|
||||||
|
sa.Index("ix_authorized_address_user_id", "user_id"),
|
||||||
)
|
)
|
||||||
|
|
||||||
mailbox = orm.relationship(Mailbox, backref="authorized_addresses")
|
mailbox = orm.relationship(Mailbox, backref="authorized_addresses")
|
||||||
@ -3335,6 +3424,8 @@ class Payout(Base, ModelMixin):
|
|||||||
|
|
||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_payout_user_id", "user_id"),)
|
||||||
|
|
||||||
|
|
||||||
class IgnoredEmail(Base, ModelMixin):
|
class IgnoredEmail(Base, ModelMixin):
|
||||||
"""If an email has mail_from and rcpt_to present in this table, discard it by returning 250 status."""
|
"""If an email has mail_from and rcpt_to present in this table, discard it by returning 250 status."""
|
||||||
@ -3436,6 +3527,8 @@ class PhoneReservation(Base, ModelMixin):
|
|||||||
start = sa.Column(ArrowType, nullable=False)
|
start = sa.Column(ArrowType, nullable=False)
|
||||||
end = sa.Column(ArrowType, nullable=False)
|
end = sa.Column(ArrowType, nullable=False)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_phone_reservation_user_id", "user_id"),)
|
||||||
|
|
||||||
|
|
||||||
class PhoneMessage(Base, ModelMixin):
|
class PhoneMessage(Base, ModelMixin):
|
||||||
__tablename__ = "phone_message"
|
__tablename__ = "phone_message"
|
||||||
@ -3610,6 +3703,11 @@ class ProviderComplaint(Base, ModelMixin):
|
|||||||
user = orm.relationship(User, foreign_keys=[user_id])
|
user = orm.relationship(User, foreign_keys=[user_id])
|
||||||
refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id])
|
refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id])
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
sa.Index("ix_provider_complaint_user_id", "user_id"),
|
||||||
|
sa.Index("ix_provider_complaint_refused_email_id", "refused_email_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PartnerApiToken(Base, ModelMixin):
|
class PartnerApiToken(Base, ModelMixin):
|
||||||
__tablename__ = "partner_api_token"
|
__tablename__ = "partner_api_token"
|
||||||
@ -3733,6 +3831,8 @@ class NewsletterUser(Base, ModelMixin):
|
|||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
newsletter = orm.relationship(Newsletter)
|
newsletter = orm.relationship(Newsletter)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_newsletter_user_user_id", "user_id"),)
|
||||||
|
|
||||||
|
|
||||||
class ApiToCookieToken(Base, ModelMixin):
|
class ApiToCookieToken(Base, ModelMixin):
|
||||||
__tablename__ = "api_cookie_token"
|
__tablename__ = "api_cookie_token"
|
||||||
@ -3743,6 +3843,11 @@ class ApiToCookieToken(Base, ModelMixin):
|
|||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
api_key = orm.relationship(ApiKey)
|
api_key = orm.relationship(ApiKey)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
sa.Index("ix_api_to_cookie_token_api_key_id", "api_key_id"),
|
||||||
|
sa.Index("ix_api_to_cookie_token_user_id", "user_id"),
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, **kwargs):
|
def create(cls, **kwargs):
|
||||||
code = secrets.token_urlsafe(32)
|
code = secrets.token_urlsafe(32)
|
||||||
@ -3765,17 +3870,19 @@ class SyncEvent(Base, ModelMixin):
|
|||||||
sa.Index("ix_sync_event_taken_time", "taken_time"),
|
sa.Index("ix_sync_event_taken_time", "taken_time"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def mark_as_taken(self) -> bool:
|
def mark_as_taken(self, allow_taken_older_than: Optional[Arrow] = None) -> bool:
|
||||||
sql = """
|
try:
|
||||||
UPDATE sync_event
|
taken_condition = ["taken_time IS NULL"]
|
||||||
SET taken_time = :taken_time
|
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
|
||||||
WHERE id = :sync_event_id
|
if allow_taken_older_than:
|
||||||
AND taken_time IS NULL
|
taken_condition.append("taken_time < :taken_older_than")
|
||||||
"""
|
args["taken_older_than"] = allow_taken_older_than.datetime
|
||||||
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
|
sql_taken_condition = "({})".format(" OR ".join(taken_condition))
|
||||||
|
sql = f"UPDATE sync_event SET taken_time = :taken_time WHERE id = :sync_event_id AND {sql_taken_condition}"
|
||||||
res = Session.execute(sql, args)
|
res = Session.execute(sql, args)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
except ObjectDeletedError:
|
||||||
|
return False
|
||||||
|
|
||||||
return res.rowcount > 0
|
return res.rowcount > 0
|
||||||
|
|
||||||
@ -3799,3 +3906,39 @@ class SyncEvent(Base, ModelMixin):
|
|||||||
.limit(100)
|
.limit(100)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AliasAuditLog(Base, ModelMixin):
|
||||||
|
"""This model holds an audit log for all the actions performed to an alias"""
|
||||||
|
|
||||||
|
__tablename__ = "alias_audit_log"
|
||||||
|
|
||||||
|
user_id = sa.Column(sa.Integer, nullable=False)
|
||||||
|
alias_id = sa.Column(sa.Integer, nullable=False)
|
||||||
|
alias_email = sa.Column(sa.String(255), nullable=False)
|
||||||
|
action = sa.Column(sa.String(255), nullable=False)
|
||||||
|
message = sa.Column(sa.Text, default=None, nullable=True)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
sa.Index("ix_alias_audit_log_user_id", "user_id"),
|
||||||
|
sa.Index("ix_alias_audit_log_alias_id", "alias_id"),
|
||||||
|
sa.Index("ix_alias_audit_log_alias_email", "alias_email"),
|
||||||
|
sa.Index("ix_alias_audit_log_created_at", "created_at"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UserAuditLog(Base, ModelMixin):
|
||||||
|
"""This model holds an audit log for all the actions performed by a user"""
|
||||||
|
|
||||||
|
__tablename__ = "user_audit_log"
|
||||||
|
|
||||||
|
user_id = sa.Column(sa.Integer, nullable=False)
|
||||||
|
user_email = sa.Column(sa.String(255), nullable=False)
|
||||||
|
action = sa.Column(sa.String(255), nullable=False)
|
||||||
|
message = sa.Column(sa.Text, default=None, nullable=True)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
sa.Index("ix_user_audit_log_user_id", "user_id"),
|
||||||
|
sa.Index("ix_user_audit_log_user_email", "user_email"),
|
||||||
|
sa.Index("ix_user_audit_log_created_at", "created_at"),
|
||||||
|
)
|
||||||
|
53
app/app/partner_user_utils.py
Normal file
53
app/app/partner_user_utils.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import arrow
|
||||||
|
from arrow import Arrow
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
from app.models import PartnerUser, PartnerSubscription, User, Job
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
|
||||||
|
def create_partner_user(
|
||||||
|
user: User, partner_id: int, partner_email: str, external_user_id: str
|
||||||
|
) -> PartnerUser:
|
||||||
|
instance = PartnerUser.create(
|
||||||
|
user_id=user.id,
|
||||||
|
partner_id=partner_id,
|
||||||
|
partner_email=partner_email,
|
||||||
|
external_user_id=external_user_id,
|
||||||
|
)
|
||||||
|
Job.create(
|
||||||
|
name=config.JOB_SEND_ALIAS_CREATION_EVENTS,
|
||||||
|
payload={"user_id": user.id},
|
||||||
|
run_at=arrow.now(),
|
||||||
|
)
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.LinkAccount,
|
||||||
|
message=f"Linked account to partner_id={partner_id} | partner_email={partner_email} | external_user_id={external_user_id}",
|
||||||
|
)
|
||||||
|
|
||||||
|
return instance
|
||||||
|
|
||||||
|
|
||||||
|
def create_partner_subscription(
|
||||||
|
partner_user: PartnerUser,
|
||||||
|
expiration: Optional[Arrow],
|
||||||
|
msg: Optional[str] = None,
|
||||||
|
) -> PartnerSubscription:
|
||||||
|
instance = PartnerSubscription.create(
|
||||||
|
partner_user_id=partner_user.id,
|
||||||
|
end_at=expiration,
|
||||||
|
)
|
||||||
|
|
||||||
|
message = "User upgraded through partner subscription"
|
||||||
|
if msg:
|
||||||
|
message += f" | {msg}"
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=partner_user.user,
|
||||||
|
action=UserAuditLogAction.Upgrade,
|
||||||
|
message=message,
|
||||||
|
)
|
||||||
|
|
||||||
|
return instance
|
0
app/app/payments/__init__.py
Normal file
0
app/app/payments/__init__.py
Normal file
121
app/app/payments/coinbase.py
Normal file
121
app/app/payments/coinbase.py
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import arrow
|
||||||
|
|
||||||
|
from coinbase_commerce.error import WebhookInvalidPayload, SignatureVerificationError
|
||||||
|
from coinbase_commerce.webhook import Webhook
|
||||||
|
from flask import Flask, request
|
||||||
|
|
||||||
|
from app.config import COINBASE_WEBHOOK_SECRET
|
||||||
|
from app.db import Session
|
||||||
|
from app.email_utils import send_email, render
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import CoinbaseSubscription, User
|
||||||
|
from app.subscription_webhook import execute_subscription_webhook
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
|
||||||
|
def setup_coinbase_commerce(app: Flask):
|
||||||
|
@app.route("/coinbase", methods=["POST"])
|
||||||
|
def coinbase_webhook():
|
||||||
|
# event payload
|
||||||
|
request_data = request.data.decode("utf-8")
|
||||||
|
# webhook signature
|
||||||
|
request_sig = request.headers.get("X-CC-Webhook-Signature", None)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# signature verification and event object construction
|
||||||
|
event = Webhook.construct_event(
|
||||||
|
request_data, request_sig, COINBASE_WEBHOOK_SECRET
|
||||||
|
)
|
||||||
|
except (WebhookInvalidPayload, SignatureVerificationError) as e:
|
||||||
|
LOG.e("Invalid Coinbase webhook")
|
||||||
|
return str(e), 400
|
||||||
|
|
||||||
|
LOG.d("Coinbase event %s", event)
|
||||||
|
|
||||||
|
if event["type"] == "charge:confirmed":
|
||||||
|
if handle_coinbase_event(event):
|
||||||
|
return "success", 200
|
||||||
|
else:
|
||||||
|
return "error", 400
|
||||||
|
|
||||||
|
return "success", 200
|
||||||
|
|
||||||
|
|
||||||
|
def handle_coinbase_event(event) -> bool:
|
||||||
|
server_user_id = event["data"]["metadata"]["user_id"]
|
||||||
|
try:
|
||||||
|
user_id = int(server_user_id)
|
||||||
|
except ValueError:
|
||||||
|
user_id = int(float(server_user_id))
|
||||||
|
|
||||||
|
code = event["data"]["code"]
|
||||||
|
user: Optional[User] = User.get(user_id)
|
||||||
|
if not user:
|
||||||
|
LOG.e("User not found %s", user_id)
|
||||||
|
return False
|
||||||
|
|
||||||
|
coinbase_subscription: CoinbaseSubscription = CoinbaseSubscription.get_by(
|
||||||
|
user_id=user_id
|
||||||
|
)
|
||||||
|
|
||||||
|
if not coinbase_subscription:
|
||||||
|
LOG.d("Create a coinbase subscription for %s", user)
|
||||||
|
coinbase_subscription = CoinbaseSubscription.create(
|
||||||
|
user_id=user_id, end_at=arrow.now().shift(years=1), code=code, commit=True
|
||||||
|
)
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.Upgrade,
|
||||||
|
message="Upgraded though Coinbase",
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
send_email(
|
||||||
|
user.email,
|
||||||
|
"Your SimpleLogin account has been upgraded",
|
||||||
|
render(
|
||||||
|
"transactional/coinbase/new-subscription.txt",
|
||||||
|
user=user,
|
||||||
|
coinbase_subscription=coinbase_subscription,
|
||||||
|
),
|
||||||
|
render(
|
||||||
|
"transactional/coinbase/new-subscription.html",
|
||||||
|
user=user,
|
||||||
|
coinbase_subscription=coinbase_subscription,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if coinbase_subscription.code != code:
|
||||||
|
LOG.d("Update code from %s to %s", coinbase_subscription.code, code)
|
||||||
|
coinbase_subscription.code = code
|
||||||
|
|
||||||
|
if coinbase_subscription.is_active():
|
||||||
|
coinbase_subscription.end_at = coinbase_subscription.end_at.shift(years=1)
|
||||||
|
else: # already expired subscription
|
||||||
|
coinbase_subscription.end_at = arrow.now().shift(years=1)
|
||||||
|
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.SubscriptionExtended,
|
||||||
|
message="Extended coinbase subscription",
|
||||||
|
)
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
send_email(
|
||||||
|
user.email,
|
||||||
|
"Your SimpleLogin account has been extended",
|
||||||
|
render(
|
||||||
|
"transactional/coinbase/extend-subscription.txt",
|
||||||
|
user=user,
|
||||||
|
coinbase_subscription=coinbase_subscription,
|
||||||
|
),
|
||||||
|
render(
|
||||||
|
"transactional/coinbase/extend-subscription.html",
|
||||||
|
user=user,
|
||||||
|
coinbase_subscription=coinbase_subscription,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
execute_subscription_webhook(user)
|
||||||
|
|
||||||
|
return True
|
286
app/app/payments/paddle.py
Normal file
286
app/app/payments/paddle.py
Normal file
@ -0,0 +1,286 @@
|
|||||||
|
import arrow
|
||||||
|
import json
|
||||||
|
from dateutil.relativedelta import relativedelta
|
||||||
|
|
||||||
|
|
||||||
|
from flask import Flask, request
|
||||||
|
|
||||||
|
from app import paddle_utils, paddle_callback
|
||||||
|
from app.config import (
|
||||||
|
PADDLE_MONTHLY_PRODUCT_ID,
|
||||||
|
PADDLE_MONTHLY_PRODUCT_IDS,
|
||||||
|
PADDLE_YEARLY_PRODUCT_IDS,
|
||||||
|
PADDLE_COUPON_ID,
|
||||||
|
)
|
||||||
|
from app.db import Session
|
||||||
|
from app.email_utils import send_email, render
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import Subscription, PlanEnum, User, Coupon
|
||||||
|
from app.subscription_webhook import execute_subscription_webhook
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
from app.utils import random_string
|
||||||
|
|
||||||
|
|
||||||
|
def setup_paddle_callback(app: Flask):
|
||||||
|
@app.route("/paddle", methods=["GET", "POST"])
|
||||||
|
def paddle():
|
||||||
|
LOG.d(f"paddle callback {request.form.get('alert_name')} {request.form}")
|
||||||
|
|
||||||
|
# make sure the request comes from Paddle
|
||||||
|
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
||||||
|
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
||||||
|
return "KO", 400
|
||||||
|
|
||||||
|
if (
|
||||||
|
request.form.get("alert_name") == "subscription_created"
|
||||||
|
): # new user subscribes
|
||||||
|
# the passthrough is json encoded, e.g.
|
||||||
|
# request.form.get("passthrough") = '{"user_id": 88 }'
|
||||||
|
passthrough = json.loads(request.form.get("passthrough"))
|
||||||
|
user_id = passthrough.get("user_id")
|
||||||
|
user = User.get(user_id)
|
||||||
|
|
||||||
|
subscription_plan_id = int(request.form.get("subscription_plan_id"))
|
||||||
|
|
||||||
|
if subscription_plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
|
||||||
|
plan = PlanEnum.monthly
|
||||||
|
elif subscription_plan_id in PADDLE_YEARLY_PRODUCT_IDS:
|
||||||
|
plan = PlanEnum.yearly
|
||||||
|
else:
|
||||||
|
LOG.e(
|
||||||
|
"Unknown subscription_plan_id %s %s",
|
||||||
|
subscription_plan_id,
|
||||||
|
request.form,
|
||||||
|
)
|
||||||
|
return "No such subscription", 400
|
||||||
|
|
||||||
|
sub = Subscription.get_by(user_id=user.id)
|
||||||
|
|
||||||
|
if not sub:
|
||||||
|
LOG.d(f"create a new Subscription for user {user}")
|
||||||
|
Subscription.create(
|
||||||
|
user_id=user.id,
|
||||||
|
cancel_url=request.form.get("cancel_url"),
|
||||||
|
update_url=request.form.get("update_url"),
|
||||||
|
subscription_id=request.form.get("subscription_id"),
|
||||||
|
event_time=arrow.now(),
|
||||||
|
next_bill_date=arrow.get(
|
||||||
|
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||||
|
).date(),
|
||||||
|
plan=plan,
|
||||||
|
)
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.Upgrade,
|
||||||
|
message="Upgraded through Paddle",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
LOG.d(f"Update an existing Subscription for user {user}")
|
||||||
|
sub.cancel_url = request.form.get("cancel_url")
|
||||||
|
sub.update_url = request.form.get("update_url")
|
||||||
|
sub.subscription_id = request.form.get("subscription_id")
|
||||||
|
sub.event_time = arrow.now()
|
||||||
|
sub.next_bill_date = arrow.get(
|
||||||
|
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||||
|
).date()
|
||||||
|
sub.plan = plan
|
||||||
|
|
||||||
|
# make sure to set the new plan as not-cancelled
|
||||||
|
# in case user cancels a plan and subscribes a new plan
|
||||||
|
sub.cancelled = False
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.SubscriptionExtended,
|
||||||
|
message="Extended Paddle subscription",
|
||||||
|
)
|
||||||
|
|
||||||
|
execute_subscription_webhook(user)
|
||||||
|
LOG.d("User %s upgrades!", user)
|
||||||
|
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
elif request.form.get("alert_name") == "subscription_payment_succeeded":
|
||||||
|
subscription_id = request.form.get("subscription_id")
|
||||||
|
LOG.d("Update subscription %s", subscription_id)
|
||||||
|
|
||||||
|
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||||
|
# when user subscribes, the "subscription_payment_succeeded" can arrive BEFORE "subscription_created"
|
||||||
|
# at that time, subscription object does not exist yet
|
||||||
|
if sub:
|
||||||
|
sub.event_time = arrow.now()
|
||||||
|
sub.next_bill_date = arrow.get(
|
||||||
|
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||||
|
).date()
|
||||||
|
|
||||||
|
Session.commit()
|
||||||
|
execute_subscription_webhook(sub.user)
|
||||||
|
|
||||||
|
elif request.form.get("alert_name") == "subscription_cancelled":
|
||||||
|
subscription_id = request.form.get("subscription_id")
|
||||||
|
|
||||||
|
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||||
|
if sub:
|
||||||
|
# cancellation_effective_date should be the same as next_bill_date
|
||||||
|
LOG.w(
|
||||||
|
"Cancel subscription %s %s on %s, next bill date %s",
|
||||||
|
subscription_id,
|
||||||
|
sub.user,
|
||||||
|
request.form.get("cancellation_effective_date"),
|
||||||
|
sub.next_bill_date,
|
||||||
|
)
|
||||||
|
sub.event_time = arrow.now()
|
||||||
|
|
||||||
|
sub.cancelled = True
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=sub.user,
|
||||||
|
action=UserAuditLogAction.SubscriptionCancelled,
|
||||||
|
message="Cancelled Paddle subscription",
|
||||||
|
)
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
user = sub.user
|
||||||
|
|
||||||
|
send_email(
|
||||||
|
user.email,
|
||||||
|
"SimpleLogin - your subscription is canceled",
|
||||||
|
render(
|
||||||
|
"transactional/subscription-cancel.txt",
|
||||||
|
user=user,
|
||||||
|
end_date=request.form.get("cancellation_effective_date"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
execute_subscription_webhook(sub.user)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# user might have deleted their account
|
||||||
|
LOG.i(f"Cancel non-exist subscription {subscription_id}")
|
||||||
|
return "OK"
|
||||||
|
elif request.form.get("alert_name") == "subscription_updated":
|
||||||
|
subscription_id = request.form.get("subscription_id")
|
||||||
|
|
||||||
|
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||||
|
if sub:
|
||||||
|
next_bill_date = request.form.get("next_bill_date")
|
||||||
|
if not next_bill_date:
|
||||||
|
paddle_callback.failed_payment(sub, subscription_id)
|
||||||
|
return "OK"
|
||||||
|
|
||||||
|
LOG.d(
|
||||||
|
"Update subscription %s %s on %s, next bill date %s",
|
||||||
|
subscription_id,
|
||||||
|
sub.user,
|
||||||
|
request.form.get("cancellation_effective_date"),
|
||||||
|
sub.next_bill_date,
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
int(request.form.get("subscription_plan_id"))
|
||||||
|
== PADDLE_MONTHLY_PRODUCT_ID
|
||||||
|
):
|
||||||
|
plan = PlanEnum.monthly
|
||||||
|
else:
|
||||||
|
plan = PlanEnum.yearly
|
||||||
|
|
||||||
|
sub.cancel_url = request.form.get("cancel_url")
|
||||||
|
sub.update_url = request.form.get("update_url")
|
||||||
|
sub.event_time = arrow.now()
|
||||||
|
sub.next_bill_date = arrow.get(
|
||||||
|
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
||||||
|
).date()
|
||||||
|
sub.plan = plan
|
||||||
|
|
||||||
|
# make sure to set the new plan as not-cancelled
|
||||||
|
sub.cancelled = False
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=sub.user,
|
||||||
|
action=UserAuditLogAction.SubscriptionExtended,
|
||||||
|
message="Extended Paddle subscription",
|
||||||
|
)
|
||||||
|
|
||||||
|
Session.commit()
|
||||||
|
execute_subscription_webhook(sub.user)
|
||||||
|
else:
|
||||||
|
LOG.w(
|
||||||
|
f"update non-exist subscription {subscription_id}. {request.form}"
|
||||||
|
)
|
||||||
|
return "No such subscription", 400
|
||||||
|
elif request.form.get("alert_name") == "payment_refunded":
|
||||||
|
subscription_id = request.form.get("subscription_id")
|
||||||
|
LOG.d("Refund request for subscription %s", subscription_id)
|
||||||
|
|
||||||
|
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||||
|
|
||||||
|
if sub:
|
||||||
|
user = sub.user
|
||||||
|
Subscription.delete(sub.id)
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.SubscriptionCancelled,
|
||||||
|
message="Paddle subscription cancelled as user requested a refund",
|
||||||
|
)
|
||||||
|
Session.commit()
|
||||||
|
LOG.e("%s requests a refund", user)
|
||||||
|
execute_subscription_webhook(sub.user)
|
||||||
|
|
||||||
|
elif request.form.get("alert_name") == "subscription_payment_refunded":
|
||||||
|
subscription_id = request.form.get("subscription_id")
|
||||||
|
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
||||||
|
LOG.d(
|
||||||
|
"Handle subscription_payment_refunded for subscription %s",
|
||||||
|
subscription_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not sub:
|
||||||
|
LOG.w(
|
||||||
|
"No such subscription for %s, payload %s",
|
||||||
|
subscription_id,
|
||||||
|
request.form,
|
||||||
|
)
|
||||||
|
return "No such subscription"
|
||||||
|
|
||||||
|
plan_id = int(request.form["subscription_plan_id"])
|
||||||
|
if request.form["refund_type"] == "full":
|
||||||
|
if plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
|
||||||
|
LOG.d("subtract 1 month from next_bill_date %s", sub.next_bill_date)
|
||||||
|
sub.next_bill_date = sub.next_bill_date - relativedelta(months=1)
|
||||||
|
LOG.d("next_bill_date is %s", sub.next_bill_date)
|
||||||
|
Session.commit()
|
||||||
|
elif plan_id in PADDLE_YEARLY_PRODUCT_IDS:
|
||||||
|
LOG.d("subtract 1 year from next_bill_date %s", sub.next_bill_date)
|
||||||
|
sub.next_bill_date = sub.next_bill_date - relativedelta(years=1)
|
||||||
|
LOG.d("next_bill_date is %s", sub.next_bill_date)
|
||||||
|
Session.commit()
|
||||||
|
else:
|
||||||
|
LOG.e("Unknown plan_id %s", plan_id)
|
||||||
|
else:
|
||||||
|
LOG.w("partial subscription_payment_refunded, not handled")
|
||||||
|
execute_subscription_webhook(sub.user)
|
||||||
|
|
||||||
|
return "OK"
|
||||||
|
|
||||||
|
@app.route("/paddle_coupon", methods=["GET", "POST"])
|
||||||
|
def paddle_coupon():
|
||||||
|
LOG.d("paddle coupon callback %s", request.form)
|
||||||
|
|
||||||
|
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
||||||
|
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
||||||
|
return "KO", 400
|
||||||
|
|
||||||
|
product_id = request.form.get("p_product_id")
|
||||||
|
if product_id != PADDLE_COUPON_ID:
|
||||||
|
LOG.e("product_id %s not match with %s", product_id, PADDLE_COUPON_ID)
|
||||||
|
return "KO", 400
|
||||||
|
|
||||||
|
email = request.form.get("email")
|
||||||
|
LOG.d("Paddle coupon request for %s", email)
|
||||||
|
|
||||||
|
coupon = Coupon.create(
|
||||||
|
code=random_string(30),
|
||||||
|
comment="For 1-year coupon",
|
||||||
|
expires_date=arrow.now().shift(years=1, days=-1),
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return (
|
||||||
|
f"Your 1-year coupon is <b>{coupon.code}</b> <br> "
|
||||||
|
f"It's valid until <b>{coupon.expires_date.date().isoformat()}</b>"
|
||||||
|
)
|
@ -2,11 +2,9 @@ from dataclasses import dataclass
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
import arrow
|
|
||||||
|
|
||||||
from app import config
|
|
||||||
from app.errors import LinkException
|
from app.errors import LinkException
|
||||||
from app.models import User, Partner, Job
|
from app.models import User, Partner
|
||||||
from app.proton.proton_client import ProtonClient, ProtonUser
|
from app.proton.proton_client import ProtonClient, ProtonUser
|
||||||
from app.account_linking import (
|
from app.account_linking import (
|
||||||
process_login_case,
|
process_login_case,
|
||||||
@ -43,21 +41,12 @@ class ProtonCallbackHandler:
|
|||||||
def __init__(self, proton_client: ProtonClient):
|
def __init__(self, proton_client: ProtonClient):
|
||||||
self.proton_client = proton_client
|
self.proton_client = proton_client
|
||||||
|
|
||||||
def _initial_alias_sync(self, user: User):
|
|
||||||
Job.create(
|
|
||||||
name=config.JOB_SEND_ALIAS_CREATION_EVENTS,
|
|
||||||
payload={"user_id": user.id},
|
|
||||||
run_at=arrow.now(),
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
def handle_login(self, partner: Partner) -> ProtonCallbackResult:
|
def handle_login(self, partner: Partner) -> ProtonCallbackResult:
|
||||||
try:
|
try:
|
||||||
user = self.__get_partner_user()
|
user = self.__get_partner_user()
|
||||||
if user is None:
|
if user is None:
|
||||||
return generate_account_not_allowed_to_log_in()
|
return generate_account_not_allowed_to_log_in()
|
||||||
res = process_login_case(user, partner)
|
res = process_login_case(user, partner)
|
||||||
self._initial_alias_sync(res.user)
|
|
||||||
return ProtonCallbackResult(
|
return ProtonCallbackResult(
|
||||||
redirect_to_login=False,
|
redirect_to_login=False,
|
||||||
flash_message=None,
|
flash_message=None,
|
||||||
@ -86,7 +75,6 @@ class ProtonCallbackHandler:
|
|||||||
if user is None:
|
if user is None:
|
||||||
return generate_account_not_allowed_to_log_in()
|
return generate_account_not_allowed_to_log_in()
|
||||||
res = process_link_case(user, current_user, partner)
|
res = process_link_case(user, current_user, partner)
|
||||||
self._initial_alias_sync(res.user)
|
|
||||||
return ProtonCallbackResult(
|
return ProtonCallbackResult(
|
||||||
redirect_to_login=False,
|
redirect_to_login=False,
|
||||||
flash_message="Account successfully linked",
|
flash_message="Account successfully linked",
|
||||||
|
@ -5,6 +5,7 @@ from app.db import Session
|
|||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.errors import ProtonPartnerNotSetUp
|
from app.errors import ProtonPartnerNotSetUp
|
||||||
from app.models import Partner, PartnerUser, User
|
from app.models import Partner, PartnerUser, User
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
PROTON_PARTNER_NAME = "Proton"
|
PROTON_PARTNER_NAME = "Proton"
|
||||||
_PROTON_PARTNER: Optional[Partner] = None
|
_PROTON_PARTNER: Optional[Partner] = None
|
||||||
@ -32,6 +33,11 @@ def perform_proton_account_unlink(current_user: User):
|
|||||||
)
|
)
|
||||||
if partner_user is not None:
|
if partner_user is not None:
|
||||||
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
|
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=current_user,
|
||||||
|
action=UserAuditLogAction.UnlinkAccount,
|
||||||
|
message=f"User has unlinked the account (email={partner_user.partner_email} | external_user_id={partner_user.external_user_id})",
|
||||||
|
)
|
||||||
PartnerUser.delete(partner_user.id)
|
PartnerUser.delete(partner_user.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})
|
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})
|
||||||
|
21
app/app/sentry_utils.py
Normal file
21
app/app/sentry_utils.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from sentry_sdk.types import Event, Hint
|
||||||
|
|
||||||
|
_HTTP_CODES_TO_IGNORE = [416]
|
||||||
|
|
||||||
|
|
||||||
|
def _should_send(_event: Event, hint: Hint) -> bool:
|
||||||
|
# Check if this is an HTTP Exception event
|
||||||
|
if "exc_info" in hint:
|
||||||
|
exc_type, exc_value, exc_traceback = hint["exc_info"]
|
||||||
|
# Check if it's a Werkzeug HTTPException (raised for HTTP status codes)
|
||||||
|
if hasattr(exc_value, "code") and exc_value.code in _HTTP_CODES_TO_IGNORE:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def sentry_before_send(event: Event, hint: Hint) -> Optional[Event]:
|
||||||
|
if _should_send(event, hint):
|
||||||
|
return event
|
||||||
|
return None
|
@ -1,40 +1,16 @@
|
|||||||
import requests
|
|
||||||
from requests import RequestException
|
|
||||||
|
|
||||||
from app import config
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.events.event_dispatcher import EventDispatcher
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||||
from app.log import LOG
|
|
||||||
from app.models import User
|
from app.models import User
|
||||||
|
|
||||||
|
|
||||||
def execute_subscription_webhook(user: User):
|
def execute_subscription_webhook(user: User):
|
||||||
webhook_url = config.SUBSCRIPTION_CHANGE_WEBHOOK
|
|
||||||
if webhook_url is None:
|
|
||||||
return
|
|
||||||
subscription_end = user.get_active_subscription_end(
|
subscription_end = user.get_active_subscription_end(
|
||||||
include_partner_subscription=False
|
include_partner_subscription=False
|
||||||
)
|
)
|
||||||
sl_subscription_end = None
|
sl_subscription_end = None
|
||||||
if subscription_end:
|
if subscription_end:
|
||||||
sl_subscription_end = subscription_end.timestamp
|
sl_subscription_end = subscription_end.timestamp
|
||||||
payload = {
|
|
||||||
"user_id": user.id,
|
|
||||||
"is_premium": user.is_premium(),
|
|
||||||
"active_subscription_end": sl_subscription_end,
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
response = requests.post(webhook_url, json=payload, timeout=2)
|
|
||||||
if response.status_code == 200:
|
|
||||||
LOG.i("Sent request to subscription update webhook successfully")
|
|
||||||
else:
|
|
||||||
LOG.i(
|
|
||||||
f"Request to webhook failed with status {response.status_code}: {response.text}"
|
|
||||||
)
|
|
||||||
except RequestException as e:
|
|
||||||
LOG.error(f"Subscription request exception: {e}")
|
|
||||||
|
|
||||||
event = UserPlanChanged(plan_end_time=sl_subscription_end)
|
event = UserPlanChanged(plan_end_time=sl_subscription_end)
|
||||||
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
|
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
44
app/app/user_audit_log_utils.py
Normal file
44
app/app/user_audit_log_utils.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from app.models import User, UserAuditLog
|
||||||
|
|
||||||
|
|
||||||
|
class UserAuditLogAction(Enum):
|
||||||
|
CreateUser = "create_user"
|
||||||
|
ActivateUser = "activate_user"
|
||||||
|
ResetPassword = "reset_password"
|
||||||
|
|
||||||
|
Upgrade = "upgrade"
|
||||||
|
SubscriptionExtended = "subscription_extended"
|
||||||
|
SubscriptionCancelled = "subscription_cancelled"
|
||||||
|
LinkAccount = "link_account"
|
||||||
|
UnlinkAccount = "unlink_account"
|
||||||
|
|
||||||
|
CreateMailbox = "create_mailbox"
|
||||||
|
VerifyMailbox = "verify_mailbox"
|
||||||
|
UpdateMailbox = "update_mailbox"
|
||||||
|
DeleteMailbox = "delete_mailbox"
|
||||||
|
|
||||||
|
CreateCustomDomain = "create_custom_domain"
|
||||||
|
VerifyCustomDomain = "verify_custom_domain"
|
||||||
|
UpdateCustomDomain = "update_custom_domain"
|
||||||
|
DeleteCustomDomain = "delete_custom_domain"
|
||||||
|
|
||||||
|
CreateDirectory = "create_directory"
|
||||||
|
UpdateDirectory = "update_directory"
|
||||||
|
DeleteDirectory = "delete_directory"
|
||||||
|
|
||||||
|
UserMarkedForDeletion = "user_marked_for_deletion"
|
||||||
|
DeleteUser = "delete_user"
|
||||||
|
|
||||||
|
|
||||||
|
def emit_user_audit_log(
|
||||||
|
user: User, action: UserAuditLogAction, message: str, commit: bool = False
|
||||||
|
):
|
||||||
|
UserAuditLog.create(
|
||||||
|
user_id=user.id,
|
||||||
|
user_email=user.email,
|
||||||
|
action=action.value,
|
||||||
|
message=message,
|
||||||
|
commit=commit,
|
||||||
|
)
|
@ -3,6 +3,7 @@ from typing import Optional
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, SLDomain, CustomDomain, Mailbox
|
from app.models import User, SLDomain, CustomDomain, Mailbox
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
|
|
||||||
|
|
||||||
class CannotSetAlias(Exception):
|
class CannotSetAlias(Exception):
|
||||||
@ -54,7 +55,7 @@ def set_default_alias_domain(user: User, domain_name: Optional[str]):
|
|||||||
|
|
||||||
|
|
||||||
def set_default_mailbox(user: User, mailbox_id: int) -> Mailbox:
|
def set_default_mailbox(user: User, mailbox_id: int) -> Mailbox:
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
||||||
|
|
||||||
if not mailbox or mailbox.user_id != user.id:
|
if not mailbox or mailbox.user_id != user.id:
|
||||||
raise CannotSetMailbox("Invalid mailbox")
|
raise CannotSetMailbox("Invalid mailbox")
|
||||||
@ -67,5 +68,11 @@ def set_default_mailbox(user: User, mailbox_id: int) -> Mailbox:
|
|||||||
LOG.i(f"User {user} has set mailbox {mailbox} as his default one")
|
LOG.i(f"User {user} has set mailbox {mailbox} as his default one")
|
||||||
|
|
||||||
user.default_mailbox_id = mailbox.id
|
user.default_mailbox_id = mailbox.id
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.UpdateMailbox,
|
||||||
|
message=f"Set mailbox {mailbox.id} ({mailbox.email}) as default",
|
||||||
|
)
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return mailbox
|
return mailbox
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
import random
|
|
||||||
import re
|
import re
|
||||||
import secrets
|
import secrets
|
||||||
import string
|
import string
|
||||||
@ -32,8 +31,9 @@ def random_words(words: int = 2, numbers: int = 0):
|
|||||||
fields = [secrets.choice(_words) for i in range(words)]
|
fields = [secrets.choice(_words) for i in range(words)]
|
||||||
|
|
||||||
if numbers > 0:
|
if numbers > 0:
|
||||||
digits = "".join([str(random.randint(0, 9)) for i in range(numbers)])
|
digits = [n for n in range(10)]
|
||||||
return "_".join(fields) + digits
|
suffix = "".join([str(secrets.choice(digits)) for i in range(numbers)])
|
||||||
|
return "_".join(fields) + suffix
|
||||||
else:
|
else:
|
||||||
return "_".join(fields)
|
return "_".join(fields)
|
||||||
|
|
||||||
|
90
app/cron.py
90
app/cron.py
@ -14,6 +14,7 @@ from sqlalchemy.sql import Insert, text
|
|||||||
from app import s3, config
|
from app import s3, config
|
||||||
from app.alias_utils import nb_email_log_for_mailbox
|
from app.alias_utils import nb_email_log_for_mailbox
|
||||||
from app.api.views.apple import verify_receipt
|
from app.api.views.apple import verify_receipt
|
||||||
|
from app.custom_domain_validation import CustomDomainValidation
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.dns_utils import get_mx_domains, is_mx_equivalent
|
from app.dns_utils import get_mx_domains, is_mx_equivalent
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
@ -59,8 +60,11 @@ from app.models import (
|
|||||||
)
|
)
|
||||||
from app.pgp_utils import load_public_key_and_check, PGPException
|
from app.pgp_utils import load_public_key_and_check, PGPException
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
from app.utils import sanitize_email
|
from app.utils import sanitize_email
|
||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
|
from tasks.clean_alias_audit_log import cleanup_alias_audit_log
|
||||||
|
from tasks.clean_user_audit_log import cleanup_user_audit_log
|
||||||
from tasks.cleanup_old_imports import cleanup_old_imports
|
from tasks.cleanup_old_imports import cleanup_old_imports
|
||||||
from tasks.cleanup_old_jobs import cleanup_old_jobs
|
from tasks.cleanup_old_jobs import cleanup_old_jobs
|
||||||
from tasks.cleanup_old_notifications import cleanup_old_notifications
|
from tasks.cleanup_old_notifications import cleanup_old_notifications
|
||||||
@ -282,8 +286,16 @@ def notify_manual_sub_end():
|
|||||||
|
|
||||||
def poll_apple_subscription():
|
def poll_apple_subscription():
|
||||||
"""Poll Apple API to update AppleSubscription"""
|
"""Poll Apple API to update AppleSubscription"""
|
||||||
# todo: only near the end of the subscription
|
for apple_sub in (
|
||||||
for apple_sub in AppleSubscription.all():
|
AppleSubscription.filter(
|
||||||
|
AppleSubscription.expires_date < arrow.now().shift(days=15)
|
||||||
|
)
|
||||||
|
.enable_eagerloads(False)
|
||||||
|
.yield_per(100)
|
||||||
|
):
|
||||||
|
if not apple_sub.is_valid():
|
||||||
|
# Subscription is not valid anymore and hasn't been renewed
|
||||||
|
continue
|
||||||
if not apple_sub.product_id:
|
if not apple_sub.product_id:
|
||||||
LOG.d("Ignore %s", apple_sub)
|
LOG.d("Ignore %s", apple_sub)
|
||||||
continue
|
continue
|
||||||
@ -896,6 +908,24 @@ def check_mailbox_valid_pgp_keys():
|
|||||||
|
|
||||||
|
|
||||||
def check_custom_domain():
|
def check_custom_domain():
|
||||||
|
# Delete custom domains that haven't been verified in a month
|
||||||
|
for custom_domain in (
|
||||||
|
CustomDomain.filter(
|
||||||
|
CustomDomain.verified == False, # noqa: E712
|
||||||
|
CustomDomain.created_at < arrow.now().shift(months=-1),
|
||||||
|
)
|
||||||
|
.enable_eagerloads(False)
|
||||||
|
.yield_per(100)
|
||||||
|
):
|
||||||
|
alias_count = Alias.filter(Alias.custom_domain_id == custom_domain.id).count()
|
||||||
|
if alias_count > 0:
|
||||||
|
LOG.warn(
|
||||||
|
f"Custom Domain {custom_domain} has {alias_count} aliases. Won't delete"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
LOG.i(f"Deleting unverified old custom domain {custom_domain}")
|
||||||
|
CustomDomain.delete(custom_domain.id)
|
||||||
|
|
||||||
LOG.d("Check verified domain for DNS issues")
|
LOG.d("Check verified domain for DNS issues")
|
||||||
|
|
||||||
for custom_domain in CustomDomain.filter_by(verified=True): # type: CustomDomain
|
for custom_domain in CustomDomain.filter_by(verified=True): # type: CustomDomain
|
||||||
@ -905,9 +935,11 @@ def check_custom_domain():
|
|||||||
LOG.i("custom domain has been deleted")
|
LOG.i("custom domain has been deleted")
|
||||||
|
|
||||||
|
|
||||||
def check_single_custom_domain(custom_domain):
|
def check_single_custom_domain(custom_domain: CustomDomain):
|
||||||
mx_domains = get_mx_domains(custom_domain.domain)
|
mx_domains = get_mx_domains(custom_domain.domain)
|
||||||
if not is_mx_equivalent(mx_domains, config.EMAIL_SERVERS_WITH_PRIORITY):
|
validator = CustomDomainValidation(dkim_domain=config.EMAIL_DOMAIN)
|
||||||
|
expected_custom_domains = validator.get_expected_mx_records(custom_domain)
|
||||||
|
if not is_mx_equivalent(mx_domains, expected_custom_domains):
|
||||||
user = custom_domain.user
|
user = custom_domain.user
|
||||||
LOG.w(
|
LOG.w(
|
||||||
"The MX record is not correctly set for %s %s %s",
|
"The MX record is not correctly set for %s %s %s",
|
||||||
@ -965,7 +997,7 @@ def delete_expired_tokens():
|
|||||||
LOG.d("Delete api to cookie tokens older than %s, nb row %s", max_time, nb_row)
|
LOG.d("Delete api to cookie tokens older than %s, nb row %s", max_time, nb_row)
|
||||||
|
|
||||||
|
|
||||||
async def _hibp_check(api_key, queue):
|
async def _hibp_check(api_key: str, queue: asyncio.Queue):
|
||||||
"""
|
"""
|
||||||
Uses a single API key to check the queue as fast as possible.
|
Uses a single API key to check the queue as fast as possible.
|
||||||
|
|
||||||
@ -984,11 +1016,16 @@ async def _hibp_check(api_key, queue):
|
|||||||
if not alias:
|
if not alias:
|
||||||
continue
|
continue
|
||||||
user = alias.user
|
user = alias.user
|
||||||
if user.disabled or not user.is_paid():
|
if user.disabled or not user.is_premium():
|
||||||
# Mark it as hibp done to skip it as if it had been checked
|
# Mark it as hibp done to skip it as if it had been checked
|
||||||
alias.hibp_last_check = arrow.utcnow()
|
alias.hibp_last_check = arrow.utcnow()
|
||||||
Session.commit()
|
Session.commit()
|
||||||
continue
|
continue
|
||||||
|
if alias.flags & Alias.FLAG_PARTNER_CREATED > 0:
|
||||||
|
# Mark as hibp done
|
||||||
|
alias.hibp_last_check = arrow.utcnow()
|
||||||
|
Session.commit()
|
||||||
|
continue
|
||||||
|
|
||||||
LOG.d("Checking HIBP for %s", alias)
|
LOG.d("Checking HIBP for %s", alias)
|
||||||
|
|
||||||
@ -1215,7 +1252,7 @@ def notify_hibp():
|
|||||||
|
|
||||||
|
|
||||||
def clear_users_scheduled_to_be_deleted(dry_run=False):
|
def clear_users_scheduled_to_be_deleted(dry_run=False):
|
||||||
users = User.filter(
|
users: List[User] = User.filter(
|
||||||
and_(
|
and_(
|
||||||
User.delete_on.isnot(None),
|
User.delete_on.isnot(None),
|
||||||
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
|
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
|
||||||
@ -1227,6 +1264,11 @@ def clear_users_scheduled_to_be_deleted(dry_run=False):
|
|||||||
)
|
)
|
||||||
if dry_run:
|
if dry_run:
|
||||||
continue
|
continue
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.DeleteUser,
|
||||||
|
message=f"Delete user {user.id} ({user.email})",
|
||||||
|
)
|
||||||
User.delete(user.id)
|
User.delete(user.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@ -1238,6 +1280,16 @@ def delete_old_data():
|
|||||||
cleanup_old_notifications(oldest_valid)
|
cleanup_old_notifications(oldest_valid)
|
||||||
|
|
||||||
|
|
||||||
|
def clear_alias_audit_log():
|
||||||
|
oldest_valid = arrow.now().shift(days=-config.AUDIT_LOG_MAX_DAYS)
|
||||||
|
cleanup_alias_audit_log(oldest_valid)
|
||||||
|
|
||||||
|
|
||||||
|
def clear_user_audit_log():
|
||||||
|
oldest_valid = arrow.now().shift(days=-config.AUDIT_LOG_MAX_DAYS)
|
||||||
|
cleanup_user_audit_log(oldest_valid)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
LOG.d("Start running cronjob")
|
LOG.d("Start running cronjob")
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
@ -1246,22 +1298,6 @@ if __name__ == "__main__":
|
|||||||
"--job",
|
"--job",
|
||||||
help="Choose a cron job to run",
|
help="Choose a cron job to run",
|
||||||
type=str,
|
type=str,
|
||||||
choices=[
|
|
||||||
"stats",
|
|
||||||
"notify_trial_end",
|
|
||||||
"notify_manual_subscription_end",
|
|
||||||
"notify_premium_end",
|
|
||||||
"delete_logs",
|
|
||||||
"delete_old_data",
|
|
||||||
"poll_apple_subscription",
|
|
||||||
"sanity_check",
|
|
||||||
"delete_old_monitoring",
|
|
||||||
"check_custom_domain",
|
|
||||||
"check_hibp",
|
|
||||||
"notify_hibp",
|
|
||||||
"cleanup_tokens",
|
|
||||||
"send_undelivered_mails",
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
# wrap in an app context to benefit from app setup like database cleanup, sentry integration, etc
|
# wrap in an app context to benefit from app setup like database cleanup, sentry integration, etc
|
||||||
@ -1310,4 +1346,10 @@ if __name__ == "__main__":
|
|||||||
load_unsent_mails_from_fs_and_resend()
|
load_unsent_mails_from_fs_and_resend()
|
||||||
elif args.job == "delete_scheduled_users":
|
elif args.job == "delete_scheduled_users":
|
||||||
LOG.d("Deleting users scheduled to be deleted")
|
LOG.d("Deleting users scheduled to be deleted")
|
||||||
clear_users_scheduled_to_be_deleted(dry_run=True)
|
clear_users_scheduled_to_be_deleted()
|
||||||
|
elif args.job == "clear_alias_audit_log":
|
||||||
|
LOG.d("Clearing alias audit log")
|
||||||
|
clear_alias_audit_log()
|
||||||
|
elif args.job == "clear_user_audit_log":
|
||||||
|
LOG.d("Clearing user audit log")
|
||||||
|
clear_user_audit_log()
|
||||||
|
@ -16,13 +16,25 @@ jobs:
|
|||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "15 2 * * *"
|
schedule: "15 2 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
|
onFailure:
|
||||||
|
retry:
|
||||||
|
maximumRetries: 10
|
||||||
|
initialDelay: 1
|
||||||
|
maximumDelay: 30
|
||||||
|
backoffMultiplier: 2
|
||||||
|
|
||||||
- name: SimpleLogin HIBP check
|
- name: SimpleLogin HIBP check
|
||||||
command: python /code/cron.py -j check_hibp
|
command: python /code/cron.py -j check_hibp
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "15 3 * * *"
|
schedule: "16 */4 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
concurrencyPolicy: Forbid
|
||||||
|
onFailure:
|
||||||
|
retry:
|
||||||
|
maximumRetries: 10
|
||||||
|
initialDelay: 1
|
||||||
|
maximumDelay: 30
|
||||||
|
backoffMultiplier: 2
|
||||||
|
|
||||||
- name: SimpleLogin Notify HIBP breaches
|
- name: SimpleLogin Notify HIBP breaches
|
||||||
command: python /code/cron.py -j notify_hibp
|
command: python /code/cron.py -j notify_hibp
|
||||||
@ -31,6 +43,7 @@ jobs:
|
|||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
concurrencyPolicy: Forbid
|
||||||
|
|
||||||
|
|
||||||
- name: SimpleLogin Delete Logs
|
- name: SimpleLogin Delete Logs
|
||||||
command: python /code/cron.py -j delete_logs
|
command: python /code/cron.py -j delete_logs
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
@ -80,3 +93,17 @@ jobs:
|
|||||||
schedule: "*/5 * * * *"
|
schedule: "*/5 * * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
concurrencyPolicy: Forbid
|
||||||
|
|
||||||
|
- name: SimpleLogin clear alias_audit_log old entries
|
||||||
|
command: python /code/cron.py -j clear_alias_audit_log
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "0 * * * *" # Once every hour
|
||||||
|
captureStderr: true
|
||||||
|
concurrencyPolicy: Forbid
|
||||||
|
|
||||||
|
- name: SimpleLogin clear user_audit_log old entries
|
||||||
|
command: python /code/cron.py -j clear_user_audit_log
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "0 * * * *" # Once every hour
|
||||||
|
captureStderr: true
|
||||||
|
concurrencyPolicy: Forbid
|
||||||
|
@ -53,7 +53,11 @@ from flanker.addresslib.address import EmailAddress
|
|||||||
from sqlalchemy.exc import IntegrityError
|
from sqlalchemy.exc import IntegrityError
|
||||||
|
|
||||||
from app import pgp_utils, s3, config, contact_utils
|
from app import pgp_utils, s3, config, contact_utils
|
||||||
from app.alias_utils import try_auto_create, change_alias_status
|
from app.alias_utils import (
|
||||||
|
try_auto_create,
|
||||||
|
change_alias_status,
|
||||||
|
get_alias_recipient_name,
|
||||||
|
)
|
||||||
from app.config import (
|
from app.config import (
|
||||||
EMAIL_DOMAIN,
|
EMAIL_DOMAIN,
|
||||||
URL,
|
URL,
|
||||||
@ -173,7 +177,9 @@ from init_app import load_pgp_public_keys
|
|||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
|
|
||||||
|
|
||||||
def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Contact:
|
def get_or_create_contact(
|
||||||
|
from_header: str, mail_from: str, alias: Alias
|
||||||
|
) -> Optional[Contact]:
|
||||||
"""
|
"""
|
||||||
contact_from_header is the RFC 2047 format FROM header
|
contact_from_header is the RFC 2047 format FROM header
|
||||||
"""
|
"""
|
||||||
@ -197,13 +203,15 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
|||||||
contact_email = mail_from
|
contact_email = mail_from
|
||||||
contact_result = contact_utils.create_contact(
|
contact_result = contact_utils.create_contact(
|
||||||
email=contact_email,
|
email=contact_email,
|
||||||
name=contact_name,
|
|
||||||
alias=alias,
|
alias=alias,
|
||||||
|
name=contact_name,
|
||||||
mail_from=mail_from,
|
mail_from=mail_from,
|
||||||
allow_empty_email=True,
|
allow_empty_email=True,
|
||||||
automatic_created=True,
|
automatic_created=True,
|
||||||
from_partner=False,
|
from_partner=False,
|
||||||
)
|
)
|
||||||
|
if contact_result.error:
|
||||||
|
LOG.w(f"Error creating contact: {contact_result.error.value}")
|
||||||
return contact_result.contact
|
return contact_result.contact
|
||||||
|
|
||||||
|
|
||||||
@ -229,7 +237,7 @@ def get_or_create_reply_to_contact(
|
|||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return contact_utils.create_contact(contact_address, contact_name, alias).contact
|
return contact_utils.create_contact(contact_address, alias, contact_name).contact
|
||||||
|
|
||||||
|
|
||||||
def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||||
@ -554,7 +562,7 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
|||||||
|
|
||||||
if not user.is_active():
|
if not user.is_active():
|
||||||
LOG.w(f"User {user} has been soft deleted")
|
LOG.w(f"User {user} has been soft deleted")
|
||||||
return False, status.E502
|
return [(False, status.E502)]
|
||||||
|
|
||||||
if not user.can_send_or_receive():
|
if not user.can_send_or_receive():
|
||||||
LOG.i(f"User {user} cannot receive emails")
|
LOG.i(f"User {user} cannot receive emails")
|
||||||
@ -575,6 +583,8 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
|||||||
from_header = get_header_unicode(msg[headers.FROM])
|
from_header = get_header_unicode(msg[headers.FROM])
|
||||||
LOG.d("Create or get contact for from_header:%s", from_header)
|
LOG.d("Create or get contact for from_header:%s", from_header)
|
||||||
contact = get_or_create_contact(from_header, envelope.mail_from, alias)
|
contact = get_or_create_contact(from_header, envelope.mail_from, alias)
|
||||||
|
if not contact:
|
||||||
|
return [(False, status.E504)]
|
||||||
alias = (
|
alias = (
|
||||||
contact.alias
|
contact.alias
|
||||||
) # In case the Session was closed in the get_or_create we re-fetch the alias
|
) # In case the Session was closed in the get_or_create we re-fetch the alias
|
||||||
@ -1161,23 +1171,11 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
# make the email comes from alias
|
recipient_name = get_alias_recipient_name(alias)
|
||||||
from_header = alias.email
|
if recipient_name.message:
|
||||||
# add alias name from alias
|
LOG.d(recipient_name.message)
|
||||||
if alias.name:
|
LOG.d("From header is %s", recipient_name.name)
|
||||||
LOG.d("Put alias name %s in from header", alias.name)
|
add_or_replace_header(msg, headers.FROM, recipient_name.name)
|
||||||
from_header = sl_formataddr((alias.name, alias.email))
|
|
||||||
elif alias.custom_domain:
|
|
||||||
# add alias name from domain
|
|
||||||
if alias.custom_domain.name:
|
|
||||||
LOG.d(
|
|
||||||
"Put domain default alias name %s in from header",
|
|
||||||
alias.custom_domain.name,
|
|
||||||
)
|
|
||||||
from_header = sl_formataddr((alias.custom_domain.name, alias.email))
|
|
||||||
|
|
||||||
LOG.d("From header is %s", from_header)
|
|
||||||
add_or_replace_header(msg, headers.FROM, from_header)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
||||||
@ -1510,7 +1508,9 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
|
|||||||
LOG.w(
|
LOG.w(
|
||||||
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
|
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
|
||||||
)
|
)
|
||||||
change_alias_status(alias, enabled=False)
|
change_alias_status(
|
||||||
|
alias, enabled=False, message=f"Set enabled=False due to {reason}"
|
||||||
|
)
|
||||||
|
|
||||||
Notification.create(
|
Notification.create(
|
||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
|
@ -12,6 +12,10 @@ class EventSink(ABC):
|
|||||||
def process(self, event: SyncEvent) -> bool:
|
def process(self, event: SyncEvent) -> bool:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def send_data_to_webhook(self, data: bytes) -> bool:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class HttpEventSink(EventSink):
|
class HttpEventSink(EventSink):
|
||||||
def process(self, event: SyncEvent) -> bool:
|
def process(self, event: SyncEvent) -> bool:
|
||||||
@ -21,9 +25,16 @@ class HttpEventSink(EventSink):
|
|||||||
|
|
||||||
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
|
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
|
||||||
|
|
||||||
|
if self.send_data_to_webhook(event.content):
|
||||||
|
LOG.info(f"Event {event.id} sent successfully to webhook")
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def send_data_to_webhook(self, data: bytes) -> bool:
|
||||||
res = requests.post(
|
res = requests.post(
|
||||||
url=EVENT_WEBHOOK,
|
url=EVENT_WEBHOOK,
|
||||||
data=event.content,
|
data=data,
|
||||||
headers={"Content-Type": "application/x-protobuf"},
|
headers={"Content-Type": "application/x-protobuf"},
|
||||||
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
|
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
|
||||||
)
|
)
|
||||||
@ -36,7 +47,6 @@ class HttpEventSink(EventSink):
|
|||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
LOG.info(f"Event {event.id} sent successfully to webhook")
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@ -44,3 +54,7 @@ class ConsoleEventSink(EventSink):
|
|||||||
def process(self, event: SyncEvent) -> bool:
|
def process(self, event: SyncEvent) -> bool:
|
||||||
LOG.info(f"Handling event {event.id}")
|
LOG.info(f"Handling event {event.id}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def send_data_to_webhook(self, data: bytes) -> bool:
|
||||||
|
LOG.info(f"Sending {len(data)} bytes to webhook")
|
||||||
|
return True
|
||||||
|
@ -72,7 +72,9 @@ class PostgresEventSource(EventSource):
|
|||||||
Session.close() # Ensure we get a new connection and we don't leave a dangling tx
|
Session.close() # Ensure we get a new connection and we don't leave a dangling tx
|
||||||
|
|
||||||
def __connect(self):
|
def __connect(self):
|
||||||
self.__connection = psycopg2.connect(self.__connection_string)
|
self.__connection = psycopg2.connect(
|
||||||
|
self.__connection_string, application_name="sl-event-listen"
|
||||||
|
)
|
||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
|
||||||
@ -83,24 +85,28 @@ class DeadLetterEventSource(EventSource):
|
|||||||
def __init__(self, max_retries: int):
|
def __init__(self, max_retries: int):
|
||||||
self.__max_retries = max_retries
|
self.__max_retries = max_retries
|
||||||
|
|
||||||
|
def execute_loop(
|
||||||
|
self, on_event: Callable[[SyncEvent], NoReturn]
|
||||||
|
) -> list[SyncEvent]:
|
||||||
|
threshold = arrow.utcnow().shift(minutes=-_DEAD_LETTER_THRESHOLD_MINUTES)
|
||||||
|
events = SyncEvent.get_dead_letter(
|
||||||
|
older_than=threshold, max_retries=self.__max_retries
|
||||||
|
)
|
||||||
|
if events:
|
||||||
|
LOG.info(f"Got {len(events)} dead letter events")
|
||||||
|
newrelic.agent.record_custom_metric(
|
||||||
|
"Custom/dead_letter_events_to_process", len(events)
|
||||||
|
)
|
||||||
|
for event in events:
|
||||||
|
if event.mark_as_taken(allow_taken_older_than=threshold):
|
||||||
|
on_event(event)
|
||||||
|
return events
|
||||||
|
|
||||||
@newrelic.agent.background_task()
|
@newrelic.agent.background_task()
|
||||||
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
|
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
threshold = arrow.utcnow().shift(
|
events = self.execute_loop(on_event)
|
||||||
minutes=-_DEAD_LETTER_THRESHOLD_MINUTES
|
|
||||||
)
|
|
||||||
events = SyncEvent.get_dead_letter(
|
|
||||||
older_than=threshold, max_retries=self.__max_retries
|
|
||||||
)
|
|
||||||
if events:
|
|
||||||
LOG.info(f"Got {len(events)} dead letter events")
|
|
||||||
if events:
|
|
||||||
newrelic.agent.record_custom_metric(
|
|
||||||
"Custom/dead_letter_events_to_process", len(events)
|
|
||||||
)
|
|
||||||
for event in events:
|
|
||||||
on_event(event)
|
|
||||||
Session.close() # Ensure that we have a new connection and we don't have a dangling tx with a lock
|
Session.close() # Ensure that we have a new connection and we don't have a dangling tx with a lock
|
||||||
if not events:
|
if not events:
|
||||||
LOG.debug("No dead letter events")
|
LOG.debug("No dead letter events")
|
||||||
|
@ -18,8 +18,10 @@ from app.events.event_dispatcher import PostgresDispatcher
|
|||||||
from app.import_utils import handle_batch_import
|
from app.import_utils import handle_batch_import
|
||||||
from app.jobs.event_jobs import send_alias_creation_events_for_user
|
from app.jobs.event_jobs import send_alias_creation_events_for_user
|
||||||
from app.jobs.export_user_data_job import ExportUserDataJob
|
from app.jobs.export_user_data_job import ExportUserDataJob
|
||||||
|
from app.jobs.send_event_job import SendEventToWebhookJob
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
|
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
|
||||||
|
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||||
from server import create_light_app
|
from server import create_light_app
|
||||||
|
|
||||||
|
|
||||||
@ -128,7 +130,7 @@ def welcome_proton(user):
|
|||||||
|
|
||||||
def delete_mailbox_job(job: Job):
|
def delete_mailbox_job(job: Job):
|
||||||
mailbox_id = job.payload.get("mailbox_id")
|
mailbox_id = job.payload.get("mailbox_id")
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
||||||
if not mailbox:
|
if not mailbox:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -152,6 +154,12 @@ def delete_mailbox_job(job: Job):
|
|||||||
|
|
||||||
mailbox_email = mailbox.email
|
mailbox_email = mailbox.email
|
||||||
user = mailbox.user
|
user = mailbox.user
|
||||||
|
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.DeleteMailbox,
|
||||||
|
message=f"Delete mailbox {mailbox.id} ({mailbox.email})",
|
||||||
|
)
|
||||||
Mailbox.delete(mailbox_id)
|
Mailbox.delete(mailbox_id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
LOG.d("Mailbox %s %s deleted", mailbox_id, mailbox_email)
|
LOG.d("Mailbox %s %s deleted", mailbox_id, mailbox_email)
|
||||||
@ -244,15 +252,27 @@ def process_job(job: Job):
|
|||||||
if not custom_domain:
|
if not custom_domain:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
is_subdomain = custom_domain.is_sl_subdomain
|
||||||
domain_name = custom_domain.domain
|
domain_name = custom_domain.domain
|
||||||
user = custom_domain.user
|
user = custom_domain.user
|
||||||
|
|
||||||
|
custom_domain_partner_id = custom_domain.partner_id
|
||||||
CustomDomain.delete(custom_domain.id)
|
CustomDomain.delete(custom_domain.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
if is_subdomain:
|
||||||
|
message = f"Delete subdomain {custom_domain_id} ({domain_name})"
|
||||||
|
else:
|
||||||
|
message = f"Delete custom domain {custom_domain_id} ({domain_name})"
|
||||||
|
emit_user_audit_log(
|
||||||
|
user=user,
|
||||||
|
action=UserAuditLogAction.DeleteCustomDomain,
|
||||||
|
message=message,
|
||||||
|
)
|
||||||
|
|
||||||
LOG.d("Domain %s deleted", domain_name)
|
LOG.d("Domain %s deleted", domain_name)
|
||||||
|
|
||||||
if custom_domain.partner_id is None:
|
if custom_domain_partner_id is None:
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
f"Your domain {domain_name} has been deleted",
|
f"Your domain {domain_name} has been deleted",
|
||||||
@ -281,6 +301,10 @@ def process_job(job: Job):
|
|||||||
send_alias_creation_events_for_user(
|
send_alias_creation_events_for_user(
|
||||||
user, dispatcher=PostgresDispatcher.get()
|
user, dispatcher=PostgresDispatcher.get()
|
||||||
)
|
)
|
||||||
|
elif job.name == config.JOB_SEND_EVENT_TO_WEBHOOK:
|
||||||
|
send_job = SendEventToWebhookJob.create_from_job(job)
|
||||||
|
if send_job:
|
||||||
|
send_job.run()
|
||||||
else:
|
else:
|
||||||
LOG.e("Unknown job name %s", job.name)
|
LOG.e("Unknown job name %s", job.name)
|
||||||
|
|
||||||
|
@ -7000,7 +7000,6 @@ unfunded
|
|||||||
unglazed
|
unglazed
|
||||||
ungloved
|
ungloved
|
||||||
unglue
|
unglue
|
||||||
ungodly
|
|
||||||
ungraded
|
ungraded
|
||||||
ungreased
|
ungreased
|
||||||
unguarded
|
unguarded
|
||||||
|
@ -0,0 +1,27 @@
|
|||||||
|
"""custom domain indices
|
||||||
|
|
||||||
|
Revision ID: 62afa3a10010
|
||||||
|
Revises: 88dd7a0abf54
|
||||||
|
Create Date: 2024-09-30 11:40:04.127791
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '62afa3a10010'
|
||||||
|
down_revision = '88dd7a0abf54'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index('ix_custom_domain_pending_deletion', 'custom_domain', ['pending_deletion'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_custom_domain_user_id', 'custom_domain', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index('ix_custom_domain_user_id', table_name='custom_domain', postgresql_concurrently=True)
|
||||||
|
op.drop_index('ix_custom_domain_pending_deletion', table_name='custom_domain', postgresql_concurrently=True)
|
@ -0,0 +1,45 @@
|
|||||||
|
"""alias_audit_log
|
||||||
|
|
||||||
|
Revision ID: 91ed7f46dc81
|
||||||
|
Revises: 62afa3a10010
|
||||||
|
Create Date: 2024-10-11 13:22:11.594054
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '91ed7f46dc81'
|
||||||
|
down_revision = '62afa3a10010'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('alias_audit_log',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
|
||||||
|
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
|
||||||
|
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('alias_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('alias_email', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('action', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('message', sa.Text(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index('ix_alias_audit_log_alias_email', 'alias_audit_log', ['alias_email'], unique=False)
|
||||||
|
op.create_index('ix_alias_audit_log_alias_id', 'alias_audit_log', ['alias_id'], unique=False)
|
||||||
|
op.create_index('ix_alias_audit_log_user_id', 'alias_audit_log', ['user_id'], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index('ix_alias_audit_log_user_id', table_name='alias_audit_log')
|
||||||
|
op.drop_index('ix_alias_audit_log_alias_id', table_name='alias_audit_log')
|
||||||
|
op.drop_index('ix_alias_audit_log_alias_email', table_name='alias_audit_log')
|
||||||
|
op.drop_table('alias_audit_log')
|
||||||
|
# ### end Alembic commands ###
|
@ -0,0 +1,44 @@
|
|||||||
|
"""user_audit_log
|
||||||
|
|
||||||
|
Revision ID: 7d7b84779837
|
||||||
|
Revises: 91ed7f46dc81
|
||||||
|
Create Date: 2024-10-16 11:52:49.128644
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '7d7b84779837'
|
||||||
|
down_revision = '91ed7f46dc81'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('user_audit_log',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
|
||||||
|
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
|
||||||
|
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('user_email', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('action', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('message', sa.Text(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index('ix_user_audit_log_user_email', 'user_audit_log', ['user_email'], unique=False)
|
||||||
|
op.create_index('ix_user_audit_log_user_id', 'user_audit_log', ['user_id'], unique=False)
|
||||||
|
op.create_index('ix_user_audit_log_created_at', 'user_audit_log', ['created_at'], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index('ix_user_audit_log_user_id', table_name='user_audit_log')
|
||||||
|
op.drop_index('ix_user_audit_log_user_email', table_name='user_audit_log')
|
||||||
|
op.drop_index('ix_user_audit_log_created_at', table_name='user_audit_log')
|
||||||
|
op.drop_table('user_audit_log')
|
||||||
|
# ### end Alembic commands ###
|
@ -0,0 +1,27 @@
|
|||||||
|
"""alias_audit_log_index_created_at
|
||||||
|
|
||||||
|
Revision ID: 32f25cbf12f6
|
||||||
|
Revises: 7d7b84779837
|
||||||
|
Create Date: 2024-10-16 16:45:36.827161
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '32f25cbf12f6'
|
||||||
|
down_revision = '7d7b84779837'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index('ix_alias_audit_log_created_at', 'alias_audit_log', ['created_at'], unique=False, postgresql_concurrently=True)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index('ix_alias_audit_log_created_at', table_name='alias_audit_log', postgresql_concurrently=True)
|
@ -0,0 +1,28 @@
|
|||||||
|
"""Preserve user id on alias delete
|
||||||
|
|
||||||
|
Revision ID: 4882cc49dde9
|
||||||
|
Revises: 32f25cbf12f6
|
||||||
|
Create Date: 2024-11-06 10:10:40.235991
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '4882cc49dde9'
|
||||||
|
down_revision = '32f25cbf12f6'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column('deleted_alias', sa.Column('user_id', sa.Integer(), server_default=None, nullable=True))
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index('ix_deleted_alias_user_id_created_at', 'deleted_alias', ['user_id', 'created_at'], unique=False, postgresql_concurrently=True)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index('ix_deleted_alias_user_id_created_at', table_name='deleted_alias')
|
||||||
|
op.drop_column('deleted_alias', 'user_id')
|
@ -0,0 +1,28 @@
|
|||||||
|
"""Revert user id on deleted alias
|
||||||
|
|
||||||
|
Revision ID: bc9aa210efa3
|
||||||
|
Revises: 4882cc49dde9
|
||||||
|
Create Date: 2024-11-06 12:44:44.129691
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'bc9aa210efa3'
|
||||||
|
down_revision = '4882cc49dde9'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index('ix_deleted_alias_user_id_created_at', table_name='deleted_alias')
|
||||||
|
op.drop_column('deleted_alias', 'user_id')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.add_column('deleted_alias', sa.Column('user_id', sa.Integer(), server_default=None, nullable=True))
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index('ix_deleted_alias_user_id_created_at', 'deleted_alias', ['user_id', 'created_at'], unique=False, postgresql_concurrently=True)
|
@ -0,0 +1,30 @@
|
|||||||
|
"""add missing indices on user and mailbox
|
||||||
|
|
||||||
|
Revision ID: 842ac670096e
|
||||||
|
Revises: bc9aa210efa3
|
||||||
|
Create Date: 2024-11-13 15:55:28.798506
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '842ac670096e'
|
||||||
|
down_revision = 'bc9aa210efa3'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index('ix_mailbox_pgp_finger_print', 'mailbox', ['pgp_finger_print'], unique=False)
|
||||||
|
op.create_index('ix_users_default_mailbox_id', 'users', ['default_mailbox_id'], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index('ix_users_default_mailbox_id', table_name='users')
|
||||||
|
op.drop_index('ix_mailbox_pgp_finger_print', table_name='mailbox')
|
@ -0,0 +1,29 @@
|
|||||||
|
"""add missing indices on email log
|
||||||
|
|
||||||
|
Revision ID: 12274da2299f
|
||||||
|
Revises: 842ac670096e
|
||||||
|
Create Date: 2024-11-14 10:27:20.371191
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '12274da2299f'
|
||||||
|
down_revision = '842ac670096e'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index('ix_email_log_bounced_mailbox_id', 'email_log', ['bounced_mailbox_id'], unique=False)
|
||||||
|
op.create_index('ix_email_log_mailbox_id', 'email_log', ['mailbox_id'], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index('ix_email_log_mailbox_id', table_name='email_log')
|
||||||
|
op.drop_index('ix_email_log_bounced_mailbox_id', table_name='email_log')
|
@ -0,0 +1,102 @@
|
|||||||
|
"""add missing indices for fk constraints
|
||||||
|
|
||||||
|
Revision ID: 0f3ee15b0014
|
||||||
|
Revises: 12274da2299f
|
||||||
|
Create Date: 2024-11-15 12:29:10.739938
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '0f3ee15b0014'
|
||||||
|
down_revision = '12274da2299f'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.create_index('ix_activation_code_user_id', 'activation_code', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_alias_original_owner_id', 'alias', ['original_owner_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_alias_used_on_user_id', 'alias_used_on', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_api_to_cookie_token_api_key_id', 'api_cookie_token', ['api_key_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_api_to_cookie_token_user_id', 'api_cookie_token', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_api_key_code', 'api_key', ['code'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_api_key_user_id', 'api_key', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_authorization_code_client_id', 'authorization_code', ['client_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_authorization_code_user_id', 'authorization_code', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_authorized_address_user_id', 'authorized_address', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_auto_create_rule_custom_domain_id', 'auto_create_rule', ['custom_domain_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_batch_import_file_id', 'batch_import', ['file_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_batch_import_user_id', 'batch_import', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_client_icon_id', 'client', ['icon_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_client_referral_id', 'client', ['referral_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_client_user_id', 'client', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_coupon_used_by_user_id', 'coupon', ['used_by_user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_directory_user_id', 'directory', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_domain_deleted_alias_user_id', 'domain_deleted_alias', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_email_log_refused_email_id', 'email_log', ['refused_email_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_fido_user_id', 'fido', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_file_user_id', 'file', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_hibp_notified_alias_user_id', 'hibp_notified_alias', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_mfa_browser_user_id', 'mfa_browser', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_newsletter_user_user_id', 'newsletter_user', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_oauth_token_client_id', 'oauth_token', ['client_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_oauth_token_user_id', 'oauth_token', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_payout_user_id', 'payout', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_phone_reservation_user_id', 'phone_reservation', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_provider_complaint_refused_email_id', 'provider_complaint', ['refused_email_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_provider_complaint_user_id', 'provider_complaint', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_redirect_uri_client_id', 'redirect_uri', ['client_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_referral_user_id', 'referral', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_refused_email_user_id', 'refused_email', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_reset_password_code_user_id', 'reset_password_code', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_sent_alert_user_id', 'sent_alert', ['user_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_users_default_alias_custom_domain_id', 'users', ['default_alias_custom_domain_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_users_profile_picture_id', 'users', ['profile_picture_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
op.drop_index('ix_users_profile_picture_id', table_name='users')
|
||||||
|
op.drop_index('ix_users_default_alias_custom_domain_id', table_name='users')
|
||||||
|
op.drop_index('ix_sent_alert_user_id', table_name='sent_alert')
|
||||||
|
op.drop_index('ix_reset_password_code_user_id', table_name='reset_password_code')
|
||||||
|
op.drop_index('ix_refused_email_user_id', table_name='refused_email')
|
||||||
|
op.drop_index('ix_referral_user_id', table_name='referral')
|
||||||
|
op.drop_index('ix_redirect_uri_client_id', table_name='redirect_uri')
|
||||||
|
op.drop_index('ix_provider_complaint_user_id', table_name='provider_complaint')
|
||||||
|
op.drop_index('ix_provider_complaint_refused_email_id', table_name='provider_complaint')
|
||||||
|
op.drop_index('ix_phone_reservation_user_id', table_name='phone_reservation')
|
||||||
|
op.drop_index('ix_payout_user_id', table_name='payout')
|
||||||
|
op.drop_index('ix_oauth_token_user_id', table_name='oauth_token')
|
||||||
|
op.drop_index('ix_oauth_token_client_id', table_name='oauth_token')
|
||||||
|
op.drop_index('ix_newsletter_user_user_id', table_name='newsletter_user')
|
||||||
|
op.drop_index('ix_mfa_browser_user_id', table_name='mfa_browser')
|
||||||
|
op.drop_index('ix_hibp_notified_alias_user_id', table_name='hibp_notified_alias')
|
||||||
|
op.drop_index('ix_file_user_id', table_name='file')
|
||||||
|
op.drop_index('ix_fido_user_id', table_name='fido')
|
||||||
|
op.drop_index('ix_email_log_refused_email_id', table_name='email_log')
|
||||||
|
op.drop_index('ix_domain_deleted_alias_user_id', table_name='domain_deleted_alias')
|
||||||
|
op.drop_index('ix_directory_user_id', table_name='directory')
|
||||||
|
op.drop_index('ix_coupon_used_by_user_id', table_name='coupon')
|
||||||
|
op.drop_index('ix_client_user_id', table_name='client')
|
||||||
|
op.drop_index('ix_client_referral_id', table_name='client')
|
||||||
|
op.drop_index('ix_client_icon_id', table_name='client')
|
||||||
|
op.drop_index('ix_batch_import_user_id', table_name='batch_import')
|
||||||
|
op.drop_index('ix_batch_import_file_id', table_name='batch_import')
|
||||||
|
op.drop_index('ix_auto_create_rule_custom_domain_id', table_name='auto_create_rule')
|
||||||
|
op.drop_index('ix_authorized_address_user_id', table_name='authorized_address')
|
||||||
|
op.drop_index('ix_authorization_code_user_id', table_name='authorization_code')
|
||||||
|
op.drop_index('ix_authorization_code_client_id', table_name='authorization_code')
|
||||||
|
op.drop_index('ix_api_key_user_id', table_name='api_key')
|
||||||
|
op.drop_index('ix_api_key_code', table_name='api_key')
|
||||||
|
op.drop_index('ix_api_to_cookie_token_user_id', table_name='api_cookie_token')
|
||||||
|
op.drop_index('ix_api_to_cookie_token_api_key_id', table_name='api_cookie_token')
|
||||||
|
op.drop_index('ix_alias_used_on_user_id', table_name='alias_used_on')
|
||||||
|
op.drop_index('ix_alias_original_owner_id', table_name='alias')
|
||||||
|
op.drop_index('ix_activation_code_user_id', table_name='activation_code')
|
@ -94,6 +94,20 @@ def log_nb_db_connection():
|
|||||||
newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection)
|
newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection)
|
||||||
|
|
||||||
|
|
||||||
|
@newrelic.agent.background_task()
|
||||||
|
def log_nb_db_connection_by_app_name():
|
||||||
|
# get the number of connections to the DB
|
||||||
|
rows = Session.execute(
|
||||||
|
"SELECT application_name, count(datid) FROM pg_stat_activity group by application_name"
|
||||||
|
)
|
||||||
|
for row in rows:
|
||||||
|
if row[0].find("sl-") == 0:
|
||||||
|
LOG.d("number of db connections for app %s = %s", row[0], row[1])
|
||||||
|
newrelic.agent.record_custom_metric(
|
||||||
|
f"Custom/nb_db_app_connection/{row[0]}", row[1]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@newrelic.agent.background_task()
|
@newrelic.agent.background_task()
|
||||||
def log_pending_to_process_events():
|
def log_pending_to_process_events():
|
||||||
r = Session.execute("select count(*) from sync_event WHERE taken_time IS NULL;")
|
r = Session.execute("select count(*) from sync_event WHERE taken_time IS NULL;")
|
||||||
@ -148,6 +162,7 @@ if __name__ == "__main__":
|
|||||||
log_pending_to_process_events()
|
log_pending_to_process_events()
|
||||||
log_events_pending_dead_letter()
|
log_events_pending_dead_letter()
|
||||||
log_failed_events()
|
log_failed_events()
|
||||||
|
log_nb_db_connection_by_app_name()
|
||||||
Session.close()
|
Session.close()
|
||||||
|
|
||||||
exporter.run()
|
exporter.run()
|
||||||
|
@ -21,7 +21,7 @@ if max_alias_id == 0:
|
|||||||
max_alias_id = Session.query(func.max(Alias.id)).scalar()
|
max_alias_id = Session.query(func.max(Alias.id)).scalar()
|
||||||
|
|
||||||
print(f"Checking alias {alias_id_start} to {max_alias_id}")
|
print(f"Checking alias {alias_id_start} to {max_alias_id}")
|
||||||
step = 1000
|
step = 10000
|
||||||
noteSql = "(note = 'Created through Proton' or note = 'Created through partner Proton')"
|
noteSql = "(note = 'Created through Proton' or note = 'Created through partner Proton')"
|
||||||
alias_query = f"UPDATE alias set note = NULL, flags = flags | :flag where id>=:start AND id<:end and {noteSql}"
|
alias_query = f"UPDATE alias set note = NULL, flags = flags | :flag where id>=:start AND id<:end and {noteSql}"
|
||||||
updated = 0
|
updated = 0
|
||||||
@ -38,12 +38,12 @@ for batch_start in range(alias_id_start, max_alias_id, step):
|
|||||||
updated += rows_done.rowcount
|
updated += rows_done.rowcount
|
||||||
Session.commit()
|
Session.commit()
|
||||||
elapsed = time.time() - start_time
|
elapsed = time.time() - start_time
|
||||||
time_per_alias = elapsed / (updated + 1)
|
|
||||||
last_batch_id = batch_start + step
|
last_batch_id = batch_start + step
|
||||||
|
time_per_alias = elapsed / (last_batch_id)
|
||||||
remaining = max_alias_id - last_batch_id
|
remaining = max_alias_id - last_batch_id
|
||||||
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
|
time_remaining = remaining / time_per_alias
|
||||||
hours_remaining = time_remaining / 3600.0
|
hours_remaining = time_remaining / 60.0
|
||||||
print(
|
print(
|
||||||
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f}hrs remaining"
|
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f} mins remaining"
|
||||||
)
|
)
|
||||||
print("")
|
print("")
|
||||||
|
62
app/oneshot/send_lifetime_user_events.py
Normal file
62
app/oneshot/send_lifetime_user_events.py
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import time
|
||||||
|
|
||||||
|
import arrow
|
||||||
|
from sqlalchemy import func
|
||||||
|
|
||||||
|
from app.events.event_dispatcher import EventDispatcher
|
||||||
|
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
||||||
|
from app.models import PartnerUser, User
|
||||||
|
from app.db import Session
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog="Backfill alias", description="Send lifetime users to proton"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-s", "--start_pu_id", default=0, type=int, help="Initial partner_user_id"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-e", "--end_pu_id", default=0, type=int, help="Last partner_user_id"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
pu_id_start = args.start_pu_id
|
||||||
|
max_pu_id = args.end_pu_id
|
||||||
|
if max_pu_id == 0:
|
||||||
|
max_pu_id = Session.query(func.max(PartnerUser.id)).scalar()
|
||||||
|
|
||||||
|
print(f"Checking partner user {pu_id_start} to {max_pu_id}")
|
||||||
|
step = 1000
|
||||||
|
done = 0
|
||||||
|
start_time = time.time()
|
||||||
|
with_lifetime = 0
|
||||||
|
for batch_start in range(pu_id_start, max_pu_id, step):
|
||||||
|
users = (
|
||||||
|
Session.query(User)
|
||||||
|
.join(PartnerUser, PartnerUser.user_id == User.id)
|
||||||
|
.filter(
|
||||||
|
PartnerUser.id >= batch_start,
|
||||||
|
PartnerUser.id < batch_start + step,
|
||||||
|
User.lifetime == True, # noqa :E712
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
for user in users:
|
||||||
|
# Just in case the == True cond is wonky
|
||||||
|
if not user.lifetime:
|
||||||
|
continue
|
||||||
|
with_lifetime += 1
|
||||||
|
event = UserPlanChanged(plan_end_time=arrow.get("2038-01-01").timestamp)
|
||||||
|
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
|
||||||
|
Session.flush()
|
||||||
|
Session.commit()
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
last_batch_id = batch_start + step
|
||||||
|
time_per_alias = elapsed / (last_batch_id)
|
||||||
|
remaining = max_pu_id - last_batch_id
|
||||||
|
time_remaining = remaining / time_per_alias
|
||||||
|
hours_remaining = time_remaining / 60.0
|
||||||
|
print(
|
||||||
|
f"\PartnerUser {batch_start}/{max_pu_id} {with_lifetime} {hours_remaining:.2f} mins remaining"
|
||||||
|
)
|
||||||
|
print(f"With SL lifetime {with_lifetime}")
|
58
app/oneshot/send_plan_change_events.py
Normal file
58
app/oneshot/send_plan_change_events.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import time
|
||||||
|
|
||||||
|
import arrow
|
||||||
|
from sqlalchemy import func
|
||||||
|
|
||||||
|
from app.account_linking import send_user_plan_changed_event
|
||||||
|
from app.models import PartnerUser
|
||||||
|
from app.db import Session
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog="Backfill alias", description="Update alias notes and backfill flag"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-s", "--start_pu_id", default=0, type=int, help="Initial partner_user_id"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-e", "--end_pu_id", default=0, type=int, help="Last partner_user_id"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
pu_id_start = args.start_pu_id
|
||||||
|
max_pu_id = args.end_pu_id
|
||||||
|
if max_pu_id == 0:
|
||||||
|
max_pu_id = Session.query(func.max(PartnerUser.id)).scalar()
|
||||||
|
|
||||||
|
print(f"Checking partner user {pu_id_start} to {max_pu_id}")
|
||||||
|
step = 100
|
||||||
|
updated = 0
|
||||||
|
start_time = time.time()
|
||||||
|
with_premium = 0
|
||||||
|
with_lifetime = 0
|
||||||
|
for batch_start in range(pu_id_start, max_pu_id, step):
|
||||||
|
partner_users = (
|
||||||
|
Session.query(PartnerUser).filter(
|
||||||
|
PartnerUser.id >= batch_start, PartnerUser.id < batch_start + step
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
for partner_user in partner_users:
|
||||||
|
subscription_end = send_user_plan_changed_event(partner_user)
|
||||||
|
if subscription_end is not None:
|
||||||
|
if subscription_end > arrow.get("2038-01-01").timestamp:
|
||||||
|
with_lifetime += 1
|
||||||
|
else:
|
||||||
|
with_premium += 1
|
||||||
|
updated += 1
|
||||||
|
Session.commit()
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
last_batch_id = batch_start + step
|
||||||
|
time_per_alias = elapsed / (last_batch_id)
|
||||||
|
remaining = max_pu_id - last_batch_id
|
||||||
|
time_remaining = remaining / time_per_alias
|
||||||
|
hours_remaining = time_remaining / 60.0
|
||||||
|
print(
|
||||||
|
f"\PartnerUser {batch_start}/{max_pu_id} {updated} {hours_remaining:.2f} mins remaining"
|
||||||
|
)
|
||||||
|
print(f"With SL premium {with_premium} lifetime {with_lifetime}")
|
98
app/poetry.lock
generated
98
app/poetry.lock
generated
@ -1,4 +1,4 @@
|
|||||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand.
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiohttp"
|
name = "aiohttp"
|
||||||
@ -360,35 +360,41 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "boto3"
|
name = "boto3"
|
||||||
version = "1.15.9"
|
version = "1.35.37"
|
||||||
description = "The AWS SDK for Python"
|
description = "The AWS SDK for Python"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "boto3-1.15.9-py2.py3-none-any.whl", hash = "sha256:e0a1dbc0a0e460dc6de2f4144b5015edad3ab5c17ee83c6194b1a010d815bc60"},
|
{file = "boto3-1.35.37-py3-none-any.whl", hash = "sha256:385ca77bf8ea4ab2d97f6e2435bdb29f77d9301e2f7ac796c2f465753c2adf3c"},
|
||||||
{file = "boto3-1.15.9.tar.gz", hash = "sha256:02f5f7a2b1349760b030c34f90a9cb4600bf8fe3cbc76b801d122bc4cecf3a7f"},
|
{file = "boto3-1.35.37.tar.gz", hash = "sha256:470d981583885859fed2fd1c185eeb01cc03e60272d499bafe41b12625b158c8"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
botocore = ">=1.18.9,<1.19.0"
|
botocore = ">=1.35.37,<1.36.0"
|
||||||
jmespath = ">=0.7.1,<1.0.0"
|
jmespath = ">=0.7.1,<2.0.0"
|
||||||
s3transfer = ">=0.3.0,<0.4.0"
|
s3transfer = ">=0.10.0,<0.11.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "botocore"
|
name = "botocore"
|
||||||
version = "1.18.9"
|
version = "1.35.37"
|
||||||
description = "Low-level, data-driven core of boto 3."
|
description = "Low-level, data-driven core of boto 3."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "botocore-1.18.9-py2.py3-none-any.whl", hash = "sha256:dc3244170254cbba7dfde00b0489f830069d93dd6a9e555178d989072d7ee7c2"},
|
{file = "botocore-1.35.37-py3-none-any.whl", hash = "sha256:64f965d4ba7adb8d79ce044c3aef7356e05dd74753cf7e9115b80f477845d920"},
|
||||||
{file = "botocore-1.18.9.tar.gz", hash = "sha256:35b06b8801eb2dd7e708de35581f9c0304740645874f3af5b8b0c1648f8d6365"},
|
{file = "botocore-1.35.37.tar.gz", hash = "sha256:b2b4d29bafd95b698344f2f0577bb67064adbf1735d8a0e3c7473daa59c23ba6"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
jmespath = ">=0.7.1,<1.0.0"
|
jmespath = ">=0.7.1,<2.0.0"
|
||||||
python-dateutil = ">=2.1,<3.0.0"
|
python-dateutil = ">=2.1,<3.0.0"
|
||||||
urllib3 = {version = ">=1.20,<1.26", markers = "python_version != \"3.4\""}
|
urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
crt = ["awscrt (==0.22.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cachetools"
|
name = "cachetools"
|
||||||
@ -1372,6 +1378,7 @@ files = [
|
|||||||
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
|
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
|
||||||
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
|
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
|
||||||
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
|
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
|
||||||
|
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
|
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
|
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
|
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
|
||||||
@ -1380,6 +1387,7 @@ files = [
|
|||||||
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
|
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
|
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
|
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
|
||||||
|
{file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
|
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
|
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
|
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
|
||||||
@ -1409,6 +1417,7 @@ files = [
|
|||||||
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
|
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
|
||||||
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
|
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
|
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
|
||||||
|
{file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
|
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
|
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
|
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
|
||||||
@ -1417,6 +1426,7 @@ files = [
|
|||||||
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
|
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
|
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
|
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
|
||||||
|
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"},
|
||||||
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
|
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
|
||||||
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
|
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
|
||||||
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
|
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
|
||||||
@ -2891,50 +2901,72 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "s3transfer"
|
name = "s3transfer"
|
||||||
version = "0.3.3"
|
version = "0.10.3"
|
||||||
description = "An Amazon S3 Transfer Manager"
|
description = "An Amazon S3 Transfer Manager"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "s3transfer-0.3.3-py2.py3-none-any.whl", hash = "sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13"},
|
{file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"},
|
||||||
{file = "s3transfer-0.3.3.tar.gz", hash = "sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db"},
|
{file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
botocore = ">=1.12.36,<2.0a.0"
|
botocore = ">=1.33.2,<2.0a.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sentry-sdk"
|
name = "sentry-sdk"
|
||||||
version = "1.5.11"
|
version = "2.16.0"
|
||||||
description = "Python client for Sentry (https://sentry.io)"
|
description = "Python client for Sentry (https://sentry.io)"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
{file = "sentry-sdk-1.5.11.tar.gz", hash = "sha256:6c01d9d0b65935fd275adc120194737d1df317dce811e642cbf0394d0d37a007"},
|
{file = "sentry_sdk-2.16.0-py2.py3-none-any.whl", hash = "sha256:49139c31ebcd398f4f6396b18910610a0c1602f6e67083240c33019d1f6aa30c"},
|
||||||
{file = "sentry_sdk-1.5.11-py2.py3-none-any.whl", hash = "sha256:c17179183cac614e900cbd048dab03f49a48e2820182ec686c25e7ce46f8548f"},
|
{file = "sentry_sdk-2.16.0.tar.gz", hash = "sha256:90f733b32e15dfc1999e6b7aca67a38688a567329de4d6e184154a73f96c6892"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
certifi = "*"
|
certifi = "*"
|
||||||
urllib3 = ">=1.10.0"
|
urllib3 = ">=1.26.11"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
aiohttp = ["aiohttp (>=3.5)"]
|
aiohttp = ["aiohttp (>=3.5)"]
|
||||||
|
anthropic = ["anthropic (>=0.16)"]
|
||||||
|
arq = ["arq (>=0.23)"]
|
||||||
|
asyncpg = ["asyncpg (>=0.23)"]
|
||||||
beam = ["apache-beam (>=2.12)"]
|
beam = ["apache-beam (>=2.12)"]
|
||||||
bottle = ["bottle (>=0.12.13)"]
|
bottle = ["bottle (>=0.12.13)"]
|
||||||
celery = ["celery (>=3)"]
|
celery = ["celery (>=3)"]
|
||||||
|
celery-redbeat = ["celery-redbeat (>=2)"]
|
||||||
chalice = ["chalice (>=1.16.0)"]
|
chalice = ["chalice (>=1.16.0)"]
|
||||||
|
clickhouse-driver = ["clickhouse-driver (>=0.2.0)"]
|
||||||
django = ["django (>=1.8)"]
|
django = ["django (>=1.8)"]
|
||||||
falcon = ["falcon (>=1.4)"]
|
falcon = ["falcon (>=1.4)"]
|
||||||
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
|
fastapi = ["fastapi (>=0.79.0)"]
|
||||||
|
flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"]
|
||||||
|
grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"]
|
||||||
|
http2 = ["httpcore[http2] (==1.*)"]
|
||||||
httpx = ["httpx (>=0.16.0)"]
|
httpx = ["httpx (>=0.16.0)"]
|
||||||
|
huey = ["huey (>=2)"]
|
||||||
|
huggingface-hub = ["huggingface-hub (>=0.22)"]
|
||||||
|
langchain = ["langchain (>=0.0.210)"]
|
||||||
|
litestar = ["litestar (>=2.0.0)"]
|
||||||
|
loguru = ["loguru (>=0.5)"]
|
||||||
|
openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
|
||||||
|
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
|
||||||
|
opentelemetry-experimental = ["opentelemetry-distro"]
|
||||||
pure-eval = ["asttokens", "executing", "pure-eval"]
|
pure-eval = ["asttokens", "executing", "pure-eval"]
|
||||||
|
pymongo = ["pymongo (>=3.1)"]
|
||||||
pyspark = ["pyspark (>=2.4.4)"]
|
pyspark = ["pyspark (>=2.4.4)"]
|
||||||
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
|
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
|
||||||
rq = ["rq (>=0.6)"]
|
rq = ["rq (>=0.6)"]
|
||||||
sanic = ["sanic (>=0.8)"]
|
sanic = ["sanic (>=0.8)"]
|
||||||
sqlalchemy = ["sqlalchemy (>=1.2)"]
|
sqlalchemy = ["sqlalchemy (>=1.2)"]
|
||||||
tornado = ["tornado (>=5)"]
|
starlette = ["starlette (>=0.19.1)"]
|
||||||
|
starlite = ["starlite (>=1.48)"]
|
||||||
|
tornado = ["tornado (>=6)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "setuptools"
|
name = "setuptools"
|
||||||
@ -3295,18 +3327,18 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "urllib3"
|
name = "urllib3"
|
||||||
version = "1.25.10"
|
version = "1.26.20"
|
||||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "urllib3-1.25.10-py2.py3-none-any.whl", hash = "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"},
|
{file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"},
|
||||||
{file = "urllib3-1.25.10.tar.gz", hash = "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a"},
|
{file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
brotli = ["brotlipy (>=0.6.0)"]
|
brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
|
||||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
|
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3704,4 +3736,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.10"
|
python-versions = "^3.10"
|
||||||
content-hash = "22b9a61e9999a215aacb889b3790ee1a6840ce249aea2e3d16c6113243d5c126"
|
content-hash = "314f199bd50ccbf636ce1c6c753f8c79a1f5a16aa7c1a330a2ec514a13dbad2d"
|
||||||
|
@ -69,7 +69,7 @@ python-dotenv = "^0.14.0"
|
|||||||
ipython = "^7.31.1"
|
ipython = "^7.31.1"
|
||||||
sqlalchemy_utils = "^0.36.8"
|
sqlalchemy_utils = "^0.36.8"
|
||||||
psycopg2-binary = "^2.9.3"
|
psycopg2-binary = "^2.9.3"
|
||||||
sentry_sdk = "^1.5.11"
|
sentry_sdk = "^2.16.0"
|
||||||
blinker = "^1.4"
|
blinker = "^1.4"
|
||||||
arrow = "^0.16.0"
|
arrow = "^0.16.0"
|
||||||
Flask-WTF = "^0.14.3"
|
Flask-WTF = "^0.14.3"
|
||||||
|
364
app/server.py
364
app/server.py
@ -1,4 +1,3 @@
|
|||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
@ -7,10 +6,9 @@ import arrow
|
|||||||
import click
|
import click
|
||||||
import flask_limiter
|
import flask_limiter
|
||||||
import flask_profiler
|
import flask_profiler
|
||||||
|
import newrelic.agent
|
||||||
import sentry_sdk
|
import sentry_sdk
|
||||||
from coinbase_commerce.error import WebhookInvalidPayload, SignatureVerificationError
|
|
||||||
from coinbase_commerce.webhook import Webhook
|
|
||||||
from dateutil.relativedelta import relativedelta
|
|
||||||
from flask import (
|
from flask import (
|
||||||
Flask,
|
Flask,
|
||||||
redirect,
|
redirect,
|
||||||
@ -29,7 +27,7 @@ from sentry_sdk.integrations.flask import FlaskIntegration
|
|||||||
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
|
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
|
||||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||||
|
|
||||||
from app import paddle_utils, config, paddle_callback, constants
|
from app import config, constants
|
||||||
from app.admin_model import (
|
from app.admin_model import (
|
||||||
SLAdminIndexView,
|
SLAdminIndexView,
|
||||||
UserAdmin,
|
UserAdmin,
|
||||||
@ -55,7 +53,6 @@ from app.config import (
|
|||||||
FLASK_SECRET,
|
FLASK_SECRET,
|
||||||
SENTRY_DSN,
|
SENTRY_DSN,
|
||||||
URL,
|
URL,
|
||||||
PADDLE_MONTHLY_PRODUCT_ID,
|
|
||||||
FLASK_PROFILER_PATH,
|
FLASK_PROFILER_PATH,
|
||||||
FLASK_PROFILER_PASSWORD,
|
FLASK_PROFILER_PASSWORD,
|
||||||
SENTRY_FRONT_END_DSN,
|
SENTRY_FRONT_END_DSN,
|
||||||
@ -69,22 +66,16 @@ from app.config import (
|
|||||||
LANDING_PAGE_URL,
|
LANDING_PAGE_URL,
|
||||||
STATUS_PAGE_URL,
|
STATUS_PAGE_URL,
|
||||||
SUPPORT_EMAIL,
|
SUPPORT_EMAIL,
|
||||||
PADDLE_MONTHLY_PRODUCT_IDS,
|
|
||||||
PADDLE_YEARLY_PRODUCT_IDS,
|
|
||||||
PGP_SIGNER,
|
PGP_SIGNER,
|
||||||
COINBASE_WEBHOOK_SECRET,
|
|
||||||
PAGE_LIMIT,
|
PAGE_LIMIT,
|
||||||
PADDLE_COUPON_ID,
|
|
||||||
ZENDESK_ENABLED,
|
ZENDESK_ENABLED,
|
||||||
MAX_NB_EMAIL_FREE_PLAN,
|
MAX_NB_EMAIL_FREE_PLAN,
|
||||||
MEM_STORE_URI,
|
MEM_STORE_URI,
|
||||||
)
|
)
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.subscription_webhook import execute_subscription_webhook
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.developer.base import developer_bp
|
from app.developer.base import developer_bp
|
||||||
from app.discover.base import discover_bp
|
from app.discover.base import discover_bp
|
||||||
from app.email_utils import send_email, render
|
|
||||||
from app.extensions import login_manager, limiter
|
from app.extensions import login_manager, limiter
|
||||||
from app.fake_data import fake_data
|
from app.fake_data import fake_data
|
||||||
from app.internal.base import internal_bp
|
from app.internal.base import internal_bp
|
||||||
@ -93,11 +84,8 @@ from app.log import LOG
|
|||||||
from app.models import (
|
from app.models import (
|
||||||
User,
|
User,
|
||||||
Alias,
|
Alias,
|
||||||
Subscription,
|
|
||||||
PlanEnum,
|
|
||||||
CustomDomain,
|
CustomDomain,
|
||||||
Mailbox,
|
Mailbox,
|
||||||
CoinbaseSubscription,
|
|
||||||
EmailLog,
|
EmailLog,
|
||||||
Contact,
|
Contact,
|
||||||
ManualSubscription,
|
ManualSubscription,
|
||||||
@ -114,9 +102,11 @@ from app.monitor.base import monitor_bp
|
|||||||
from app.newsletter_utils import send_newsletter_to_user
|
from app.newsletter_utils import send_newsletter_to_user
|
||||||
from app.oauth.base import oauth_bp
|
from app.oauth.base import oauth_bp
|
||||||
from app.onboarding.base import onboarding_bp
|
from app.onboarding.base import onboarding_bp
|
||||||
|
from app.payments.coinbase import setup_coinbase_commerce
|
||||||
|
from app.payments.paddle import setup_paddle_callback
|
||||||
from app.phone.base import phone_bp
|
from app.phone.base import phone_bp
|
||||||
from app.redis_services import initialize_redis_services
|
from app.redis_services import initialize_redis_services
|
||||||
from app.utils import random_string
|
from app.sentry_utils import sentry_before_send
|
||||||
|
|
||||||
if SENTRY_DSN:
|
if SENTRY_DSN:
|
||||||
LOG.d("enable sentry")
|
LOG.d("enable sentry")
|
||||||
@ -127,6 +117,7 @@ if SENTRY_DSN:
|
|||||||
FlaskIntegration(),
|
FlaskIntegration(),
|
||||||
SqlalchemyIntegration(),
|
SqlalchemyIntegration(),
|
||||||
],
|
],
|
||||||
|
before_send=sentry_before_send,
|
||||||
)
|
)
|
||||||
|
|
||||||
# the app is served behind nginx which uses http and not https
|
# the app is served behind nginx which uses http and not https
|
||||||
@ -299,7 +290,9 @@ def set_index_page(app):
|
|||||||
res.status_code,
|
res.status_code,
|
||||||
time.time() - start_time,
|
time.time() - start_time,
|
||||||
)
|
)
|
||||||
|
newrelic.agent.record_custom_event(
|
||||||
|
"HttpResponseStatus", {"code": res.status_code}
|
||||||
|
)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
@ -441,341 +434,6 @@ def jinja2_filter(app):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def setup_paddle_callback(app: Flask):
|
|
||||||
@app.route("/paddle", methods=["GET", "POST"])
|
|
||||||
def paddle():
|
|
||||||
LOG.d(f"paddle callback {request.form.get('alert_name')} {request.form}")
|
|
||||||
|
|
||||||
# make sure the request comes from Paddle
|
|
||||||
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
|
||||||
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
|
||||||
return "KO", 400
|
|
||||||
|
|
||||||
if (
|
|
||||||
request.form.get("alert_name") == "subscription_created"
|
|
||||||
): # new user subscribes
|
|
||||||
# the passthrough is json encoded, e.g.
|
|
||||||
# request.form.get("passthrough") = '{"user_id": 88 }'
|
|
||||||
passthrough = json.loads(request.form.get("passthrough"))
|
|
||||||
user_id = passthrough.get("user_id")
|
|
||||||
user = User.get(user_id)
|
|
||||||
|
|
||||||
subscription_plan_id = int(request.form.get("subscription_plan_id"))
|
|
||||||
|
|
||||||
if subscription_plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
|
|
||||||
plan = PlanEnum.monthly
|
|
||||||
elif subscription_plan_id in PADDLE_YEARLY_PRODUCT_IDS:
|
|
||||||
plan = PlanEnum.yearly
|
|
||||||
else:
|
|
||||||
LOG.e(
|
|
||||||
"Unknown subscription_plan_id %s %s",
|
|
||||||
subscription_plan_id,
|
|
||||||
request.form,
|
|
||||||
)
|
|
||||||
return "No such subscription", 400
|
|
||||||
|
|
||||||
sub = Subscription.get_by(user_id=user.id)
|
|
||||||
|
|
||||||
if not sub:
|
|
||||||
LOG.d(f"create a new Subscription for user {user}")
|
|
||||||
Subscription.create(
|
|
||||||
user_id=user.id,
|
|
||||||
cancel_url=request.form.get("cancel_url"),
|
|
||||||
update_url=request.form.get("update_url"),
|
|
||||||
subscription_id=request.form.get("subscription_id"),
|
|
||||||
event_time=arrow.now(),
|
|
||||||
next_bill_date=arrow.get(
|
|
||||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
|
||||||
).date(),
|
|
||||||
plan=plan,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
LOG.d(f"Update an existing Subscription for user {user}")
|
|
||||||
sub.cancel_url = request.form.get("cancel_url")
|
|
||||||
sub.update_url = request.form.get("update_url")
|
|
||||||
sub.subscription_id = request.form.get("subscription_id")
|
|
||||||
sub.event_time = arrow.now()
|
|
||||||
sub.next_bill_date = arrow.get(
|
|
||||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
|
||||||
).date()
|
|
||||||
sub.plan = plan
|
|
||||||
|
|
||||||
# make sure to set the new plan as not-cancelled
|
|
||||||
# in case user cancels a plan and subscribes a new plan
|
|
||||||
sub.cancelled = False
|
|
||||||
|
|
||||||
execute_subscription_webhook(user)
|
|
||||||
LOG.d("User %s upgrades!", user)
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
elif request.form.get("alert_name") == "subscription_payment_succeeded":
|
|
||||||
subscription_id = request.form.get("subscription_id")
|
|
||||||
LOG.d("Update subscription %s", subscription_id)
|
|
||||||
|
|
||||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
|
||||||
# when user subscribes, the "subscription_payment_succeeded" can arrive BEFORE "subscription_created"
|
|
||||||
# at that time, subscription object does not exist yet
|
|
||||||
if sub:
|
|
||||||
sub.event_time = arrow.now()
|
|
||||||
sub.next_bill_date = arrow.get(
|
|
||||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
|
||||||
).date()
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
elif request.form.get("alert_name") == "subscription_cancelled":
|
|
||||||
subscription_id = request.form.get("subscription_id")
|
|
||||||
|
|
||||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
|
||||||
if sub:
|
|
||||||
# cancellation_effective_date should be the same as next_bill_date
|
|
||||||
LOG.w(
|
|
||||||
"Cancel subscription %s %s on %s, next bill date %s",
|
|
||||||
subscription_id,
|
|
||||||
sub.user,
|
|
||||||
request.form.get("cancellation_effective_date"),
|
|
||||||
sub.next_bill_date,
|
|
||||||
)
|
|
||||||
sub.event_time = arrow.now()
|
|
||||||
|
|
||||||
sub.cancelled = True
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
user = sub.user
|
|
||||||
|
|
||||||
send_email(
|
|
||||||
user.email,
|
|
||||||
"SimpleLogin - your subscription is canceled",
|
|
||||||
render(
|
|
||||||
"transactional/subscription-cancel.txt",
|
|
||||||
user=user,
|
|
||||||
end_date=request.form.get("cancellation_effective_date"),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# user might have deleted their account
|
|
||||||
LOG.i(f"Cancel non-exist subscription {subscription_id}")
|
|
||||||
return "OK"
|
|
||||||
elif request.form.get("alert_name") == "subscription_updated":
|
|
||||||
subscription_id = request.form.get("subscription_id")
|
|
||||||
|
|
||||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
|
||||||
if sub:
|
|
||||||
next_bill_date = request.form.get("next_bill_date")
|
|
||||||
if not next_bill_date:
|
|
||||||
paddle_callback.failed_payment(sub, subscription_id)
|
|
||||||
return "OK"
|
|
||||||
|
|
||||||
LOG.d(
|
|
||||||
"Update subscription %s %s on %s, next bill date %s",
|
|
||||||
subscription_id,
|
|
||||||
sub.user,
|
|
||||||
request.form.get("cancellation_effective_date"),
|
|
||||||
sub.next_bill_date,
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
int(request.form.get("subscription_plan_id"))
|
|
||||||
== PADDLE_MONTHLY_PRODUCT_ID
|
|
||||||
):
|
|
||||||
plan = PlanEnum.monthly
|
|
||||||
else:
|
|
||||||
plan = PlanEnum.yearly
|
|
||||||
|
|
||||||
sub.cancel_url = request.form.get("cancel_url")
|
|
||||||
sub.update_url = request.form.get("update_url")
|
|
||||||
sub.event_time = arrow.now()
|
|
||||||
sub.next_bill_date = arrow.get(
|
|
||||||
request.form.get("next_bill_date"), "YYYY-MM-DD"
|
|
||||||
).date()
|
|
||||||
sub.plan = plan
|
|
||||||
|
|
||||||
# make sure to set the new plan as not-cancelled
|
|
||||||
sub.cancelled = False
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
else:
|
|
||||||
LOG.w(
|
|
||||||
f"update non-exist subscription {subscription_id}. {request.form}"
|
|
||||||
)
|
|
||||||
return "No such subscription", 400
|
|
||||||
elif request.form.get("alert_name") == "payment_refunded":
|
|
||||||
subscription_id = request.form.get("subscription_id")
|
|
||||||
LOG.d("Refund request for subscription %s", subscription_id)
|
|
||||||
|
|
||||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
|
||||||
|
|
||||||
if sub:
|
|
||||||
user = sub.user
|
|
||||||
Subscription.delete(sub.id)
|
|
||||||
Session.commit()
|
|
||||||
LOG.e("%s requests a refund", user)
|
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
elif request.form.get("alert_name") == "subscription_payment_refunded":
|
|
||||||
subscription_id = request.form.get("subscription_id")
|
|
||||||
sub: Subscription = Subscription.get_by(subscription_id=subscription_id)
|
|
||||||
LOG.d(
|
|
||||||
"Handle subscription_payment_refunded for subscription %s",
|
|
||||||
subscription_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not sub:
|
|
||||||
LOG.w(
|
|
||||||
"No such subscription for %s, payload %s",
|
|
||||||
subscription_id,
|
|
||||||
request.form,
|
|
||||||
)
|
|
||||||
return "No such subscription"
|
|
||||||
|
|
||||||
plan_id = int(request.form["subscription_plan_id"])
|
|
||||||
if request.form["refund_type"] == "full":
|
|
||||||
if plan_id in PADDLE_MONTHLY_PRODUCT_IDS:
|
|
||||||
LOG.d("subtract 1 month from next_bill_date %s", sub.next_bill_date)
|
|
||||||
sub.next_bill_date = sub.next_bill_date - relativedelta(months=1)
|
|
||||||
LOG.d("next_bill_date is %s", sub.next_bill_date)
|
|
||||||
Session.commit()
|
|
||||||
elif plan_id in PADDLE_YEARLY_PRODUCT_IDS:
|
|
||||||
LOG.d("subtract 1 year from next_bill_date %s", sub.next_bill_date)
|
|
||||||
sub.next_bill_date = sub.next_bill_date - relativedelta(years=1)
|
|
||||||
LOG.d("next_bill_date is %s", sub.next_bill_date)
|
|
||||||
Session.commit()
|
|
||||||
else:
|
|
||||||
LOG.e("Unknown plan_id %s", plan_id)
|
|
||||||
else:
|
|
||||||
LOG.w("partial subscription_payment_refunded, not handled")
|
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
return "OK"
|
|
||||||
|
|
||||||
@app.route("/paddle_coupon", methods=["GET", "POST"])
|
|
||||||
def paddle_coupon():
|
|
||||||
LOG.d("paddle coupon callback %s", request.form)
|
|
||||||
|
|
||||||
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
|
||||||
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
|
||||||
return "KO", 400
|
|
||||||
|
|
||||||
product_id = request.form.get("p_product_id")
|
|
||||||
if product_id != PADDLE_COUPON_ID:
|
|
||||||
LOG.e("product_id %s not match with %s", product_id, PADDLE_COUPON_ID)
|
|
||||||
return "KO", 400
|
|
||||||
|
|
||||||
email = request.form.get("email")
|
|
||||||
LOG.d("Paddle coupon request for %s", email)
|
|
||||||
|
|
||||||
coupon = Coupon.create(
|
|
||||||
code=random_string(30),
|
|
||||||
comment="For 1-year coupon",
|
|
||||||
expires_date=arrow.now().shift(years=1, days=-1),
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
return (
|
|
||||||
f"Your 1-year coupon is <b>{coupon.code}</b> <br> "
|
|
||||||
f"It's valid until <b>{coupon.expires_date.date().isoformat()}</b>"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_coinbase_commerce(app):
|
|
||||||
@app.route("/coinbase", methods=["POST"])
|
|
||||||
def coinbase_webhook():
|
|
||||||
# event payload
|
|
||||||
request_data = request.data.decode("utf-8")
|
|
||||||
# webhook signature
|
|
||||||
request_sig = request.headers.get("X-CC-Webhook-Signature", None)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# signature verification and event object construction
|
|
||||||
event = Webhook.construct_event(
|
|
||||||
request_data, request_sig, COINBASE_WEBHOOK_SECRET
|
|
||||||
)
|
|
||||||
except (WebhookInvalidPayload, SignatureVerificationError) as e:
|
|
||||||
LOG.e("Invalid Coinbase webhook")
|
|
||||||
return str(e), 400
|
|
||||||
|
|
||||||
LOG.d("Coinbase event %s", event)
|
|
||||||
|
|
||||||
if event["type"] == "charge:confirmed":
|
|
||||||
if handle_coinbase_event(event):
|
|
||||||
return "success", 200
|
|
||||||
else:
|
|
||||||
return "error", 400
|
|
||||||
|
|
||||||
return "success", 200
|
|
||||||
|
|
||||||
|
|
||||||
def handle_coinbase_event(event) -> bool:
|
|
||||||
server_user_id = event["data"]["metadata"]["user_id"]
|
|
||||||
try:
|
|
||||||
user_id = int(server_user_id)
|
|
||||||
except ValueError:
|
|
||||||
user_id = int(float(server_user_id))
|
|
||||||
|
|
||||||
code = event["data"]["code"]
|
|
||||||
user = User.get(user_id)
|
|
||||||
if not user:
|
|
||||||
LOG.e("User not found %s", user_id)
|
|
||||||
return False
|
|
||||||
|
|
||||||
coinbase_subscription: CoinbaseSubscription = CoinbaseSubscription.get_by(
|
|
||||||
user_id=user_id
|
|
||||||
)
|
|
||||||
|
|
||||||
if not coinbase_subscription:
|
|
||||||
LOG.d("Create a coinbase subscription for %s", user)
|
|
||||||
coinbase_subscription = CoinbaseSubscription.create(
|
|
||||||
user_id=user_id, end_at=arrow.now().shift(years=1), code=code, commit=True
|
|
||||||
)
|
|
||||||
send_email(
|
|
||||||
user.email,
|
|
||||||
"Your SimpleLogin account has been upgraded",
|
|
||||||
render(
|
|
||||||
"transactional/coinbase/new-subscription.txt",
|
|
||||||
user=user,
|
|
||||||
coinbase_subscription=coinbase_subscription,
|
|
||||||
),
|
|
||||||
render(
|
|
||||||
"transactional/coinbase/new-subscription.html",
|
|
||||||
user=user,
|
|
||||||
coinbase_subscription=coinbase_subscription,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if coinbase_subscription.code != code:
|
|
||||||
LOG.d("Update code from %s to %s", coinbase_subscription.code, code)
|
|
||||||
coinbase_subscription.code = code
|
|
||||||
|
|
||||||
if coinbase_subscription.is_active():
|
|
||||||
coinbase_subscription.end_at = coinbase_subscription.end_at.shift(years=1)
|
|
||||||
else: # already expired subscription
|
|
||||||
coinbase_subscription.end_at = arrow.now().shift(years=1)
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
send_email(
|
|
||||||
user.email,
|
|
||||||
"Your SimpleLogin account has been extended",
|
|
||||||
render(
|
|
||||||
"transactional/coinbase/extend-subscription.txt",
|
|
||||||
user=user,
|
|
||||||
coinbase_subscription=coinbase_subscription,
|
|
||||||
),
|
|
||||||
render(
|
|
||||||
"transactional/coinbase/extend-subscription.html",
|
|
||||||
user=user,
|
|
||||||
coinbase_subscription=coinbase_subscription,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
execute_subscription_webhook(user)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def init_extensions(app: Flask):
|
def init_extensions(app: Flask):
|
||||||
login_manager.init_app(app)
|
login_manager.init_app(app)
|
||||||
|
|
||||||
@ -784,10 +442,10 @@ def init_admin(app):
|
|||||||
admin = Admin(name="SimpleLogin", template_mode="bootstrap4")
|
admin = Admin(name="SimpleLogin", template_mode="bootstrap4")
|
||||||
|
|
||||||
admin.init_app(app, index_view=SLAdminIndexView())
|
admin.init_app(app, index_view=SLAdminIndexView())
|
||||||
|
admin.add_view(EmailSearchAdmin(name="Email Search", endpoint="email_search"))
|
||||||
admin.add_view(UserAdmin(User, Session))
|
admin.add_view(UserAdmin(User, Session))
|
||||||
admin.add_view(AliasAdmin(Alias, Session))
|
admin.add_view(AliasAdmin(Alias, Session))
|
||||||
admin.add_view(MailboxAdmin(Mailbox, Session))
|
admin.add_view(MailboxAdmin(Mailbox, Session))
|
||||||
admin.add_view(EmailSearchAdmin(name="Email Search", endpoint="email_search"))
|
|
||||||
admin.add_view(CouponAdmin(Coupon, Session))
|
admin.add_view(CouponAdmin(Coupon, Session))
|
||||||
admin.add_view(ManualSubscriptionAdmin(ManualSubscription, Session))
|
admin.add_view(ManualSubscriptionAdmin(ManualSubscription, Session))
|
||||||
admin.add_view(CustomDomainAdmin(CustomDomain, Session))
|
admin.add_view(CustomDomainAdmin(CustomDomain, Session))
|
||||||
|
12
app/tasks/clean_alias_audit_log.py
Normal file
12
app/tasks/clean_alias_audit_log.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import arrow
|
||||||
|
|
||||||
|
from app.db import Session
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import AliasAuditLog
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_alias_audit_log(oldest_allowed: arrow.Arrow):
|
||||||
|
LOG.i(f"Deleting alias_audit_log older than {oldest_allowed}")
|
||||||
|
count = AliasAuditLog.filter(AliasAuditLog.created_at < oldest_allowed).delete()
|
||||||
|
Session.commit()
|
||||||
|
LOG.i(f"Deleted {count} alias_audit_log entries")
|
12
app/tasks/clean_user_audit_log.py
Normal file
12
app/tasks/clean_user_audit_log.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import arrow
|
||||||
|
|
||||||
|
from app.db import Session
|
||||||
|
from app.log import LOG
|
||||||
|
from app.models import UserAuditLog
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_user_audit_log(oldest_allowed: arrow.Arrow):
|
||||||
|
LOG.i(f"Deleting user_audit_log older than {oldest_allowed}")
|
||||||
|
count = UserAuditLog.filter(UserAuditLog.created_at < oldest_allowed).delete()
|
||||||
|
Session.commit()
|
||||||
|
LOG.i(f"Deleted {count} user_audit_log entries")
|
@ -8,8 +8,10 @@
|
|||||||
<tr>
|
<tr>
|
||||||
<th scope="col">User ID</th>
|
<th scope="col">User ID</th>
|
||||||
<th scope="col">Email</th>
|
<th scope="col">Email</th>
|
||||||
|
<th scope="col">Verified</th>
|
||||||
<th scope="col">Status</th>
|
<th scope="col">Status</th>
|
||||||
<th scope="col">Paid</th>
|
<th scope="col">Paid</th>
|
||||||
|
<th scope="col">Premium</th>
|
||||||
<th>Subscription</th>
|
<th>Subscription</th>
|
||||||
<th>Created At</th>
|
<th>Created At</th>
|
||||||
<th>Updated At</th>
|
<th>Updated At</th>
|
||||||
@ -19,7 +21,15 @@
|
|||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{{ user.id }}</td>
|
<td>{{ user.id }}</td>
|
||||||
<td><a href="?email={{ user.email }}">{{ user.email }}</a></td>
|
<td>
|
||||||
|
<a href="?email={{ user.email }}">{{ user.email }}</a>
|
||||||
|
</td>
|
||||||
|
{% if user.activated %}
|
||||||
|
|
||||||
|
<td class="text-success">Activated</td>
|
||||||
|
{% else %}
|
||||||
|
<td class="text-warning">Pending</td>
|
||||||
|
{% endif %}
|
||||||
{% if user.disabled %}
|
{% if user.disabled %}
|
||||||
|
|
||||||
<td class="text-danger">Disabled</td>
|
<td class="text-danger">Disabled</td>
|
||||||
@ -27,12 +37,15 @@
|
|||||||
<td class="text-success">Enabled</td>
|
<td class="text-success">Enabled</td>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<td>{{ "yes" if user.is_paid() else "No" }}</td>
|
<td>{{ "yes" if user.is_paid() else "No" }}</td>
|
||||||
|
<td>{{ "yes" if user.is_premium() else "No" }}</td>
|
||||||
<td>{{ user.get_active_subscription() }}</td>
|
<td>{{ user.get_active_subscription() }}</td>
|
||||||
<td>{{ user.created_at }}</td>
|
<td>{{ user.created_at }}</td>
|
||||||
<td>{{ user.updated_at }}</td>
|
<td>{{ user.updated_at }}</td>
|
||||||
{% if pu %}
|
{% if pu %}
|
||||||
|
|
||||||
<td><a href="?email={{ pu.partner_email }}">{{ pu.partner_email }}</a></td>
|
<td>
|
||||||
|
<a href="?email={{ pu.partner_email }}">{{ pu.partner_email }}</a>
|
||||||
|
</td>
|
||||||
{% else %}
|
{% else %}
|
||||||
<td>No</td>
|
<td>No</td>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@ -40,9 +53,9 @@
|
|||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
{% macro list_mailboxes(mbox_count, mboxes) %}
|
{% macro list_mailboxes(message, mbox_count, mboxes) %}
|
||||||
<h4>
|
<h4>
|
||||||
{{ mbox_count }} Mailboxes found.
|
{{ mbox_count }} {{ message }}.
|
||||||
{% if mbox_count>10 %}Showing only the last 10.{% endif %}
|
{% if mbox_count>10 %}Showing only the last 10.{% endif %}
|
||||||
</h4>
|
</h4>
|
||||||
<table class="table">
|
<table class="table">
|
||||||
@ -56,14 +69,13 @@
|
|||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
{% for mailbox in mboxes %}
|
{% for mailbox in mboxes %}
|
||||||
|
|
||||||
<tr>
|
<tr>
|
||||||
<td>{{ mailbox.id }}</td>
|
<td>{{ mailbox.id }}</td>
|
||||||
<td><a href="?email={{mailbox.email}}">{{mailbox.email}}</a></td>
|
|
||||||
<td>{{ "Yes" if mailbox.verified else "No" }}</td>
|
|
||||||
<td>
|
<td>
|
||||||
{{ mailbox.created_at }}
|
<a href="?email={{ mailbox.email }}">{{ mailbox.email }}</a>
|
||||||
</td>
|
</td>
|
||||||
|
<td>{{ "Yes" if mailbox.verified else "No" }}</td>
|
||||||
|
<td>{{ mailbox.created_at }}</td>
|
||||||
</tr>
|
</tr>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</tbody>
|
</tbody>
|
||||||
@ -77,26 +89,21 @@
|
|||||||
<table class="table">
|
<table class="table">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>
|
<th>Alias ID</th>
|
||||||
Alias ID
|
<th>Email</th>
|
||||||
</th>
|
<th>Enabled</th>
|
||||||
<th>
|
<th>Created At</th>
|
||||||
Email
|
|
||||||
</th>
|
|
||||||
<th>
|
|
||||||
Verified
|
|
||||||
</th>
|
|
||||||
<th>
|
|
||||||
Created At
|
|
||||||
</th>
|
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
{% for alias in aliases %}
|
{% for alias in aliases %}
|
||||||
|
|
||||||
<tr>
|
<tr>
|
||||||
<td>{{ alias.id }}</td>
|
<td>{{ alias.id }}</td>
|
||||||
<td><a href="?email={{alias.email}}">{{alias.email}}</a></td>
|
<td>
|
||||||
<td>{{ "Yes" if alias.verified else "No" }}</td>
|
<a href="?email={{ alias.email }}">{{ alias.email }}</a>
|
||||||
|
</td>
|
||||||
|
<td>{{ "Yes" if alias.enabled else "No" }}</td>
|
||||||
<td>{{ alias.created_at }}</td>
|
<td>{{ alias.created_at }}</td>
|
||||||
</tr>
|
</tr>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@ -126,7 +133,8 @@
|
|||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
{% macro show_domain_deleted_alias(dom_deleted_alias) -%}
|
{% macro show_domain_deleted_alias(dom_deleted_alias) -%}
|
||||||
<h4>
|
<h4>
|
||||||
Domain Deleted Alias {{ dom_deleted_alias.email }} with ID {{ dom_deleted_alias.id }} for domain {{ dom_deleted_alias.domain.domain }}
|
Domain Deleted Alias {{ dom_deleted_alias.email }} with ID {{ dom_deleted_alias.id }} for
|
||||||
|
domain {{ dom_deleted_alias.domain.domain }}
|
||||||
</h4>
|
</h4>
|
||||||
<table class="table">
|
<table class="table">
|
||||||
<thead>
|
<thead>
|
||||||
@ -153,6 +161,62 @@
|
|||||||
</table>
|
</table>
|
||||||
{{ show_user(data.domain_deleted_alias.domain.user) }}
|
{{ show_user(data.domain_deleted_alias.domain.user) }}
|
||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
|
{% macro list_alias_audit_log(alias_audit_log) %}
|
||||||
|
<h4>Alias Audit Log</h4>
|
||||||
|
<table class="table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>User ID</th>
|
||||||
|
<th>Alias ID</th>
|
||||||
|
<th>Alias Email</th>
|
||||||
|
<th>Action</th>
|
||||||
|
<th>Message</th>
|
||||||
|
<th>Time</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for entry in alias_audit_log %}
|
||||||
|
|
||||||
|
<tr>
|
||||||
|
<td>{{ entry.user_id }}</td>
|
||||||
|
<td>{{ entry.alias_id }}</td>
|
||||||
|
<td>
|
||||||
|
<a href="?email={{ entry.alias_email }}">{{ entry.alias_email }}</a>
|
||||||
|
</td>
|
||||||
|
<td>{{ entry.action }}</td>
|
||||||
|
<td>{{ entry.message }}</td>
|
||||||
|
<td>{{ entry.created_at }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{% endmacro %}
|
||||||
|
{% macro list_user_audit_log(user_audit_log) %}
|
||||||
|
<h4>User Audit Log</h4>
|
||||||
|
<table class="table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>User email</th>
|
||||||
|
<th>Action</th>
|
||||||
|
<th>Message</th>
|
||||||
|
<th>Time</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for entry in user_audit_log %}
|
||||||
|
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<a href="?email={{ entry.user_email }}">{{ entry.user_email }}</a>
|
||||||
|
</td>
|
||||||
|
<td>{{ entry.action }}</td>
|
||||||
|
<td>{{ entry.message }}</td>
|
||||||
|
<td>{{ entry.created_at }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{% endmacro %}
|
||||||
{% block body %}
|
{% block body %}
|
||||||
|
|
||||||
<div class="border border-dark border-2 mt-1 mb-2 p-3">
|
<div class="border border-dark border-2 mt-1 mb-2 p-3">
|
||||||
@ -177,8 +241,9 @@
|
|||||||
<div class="border border-dark border-2 mt-1 mb-2 p-3">
|
<div class="border border-dark border-2 mt-1 mb-2 p-3">
|
||||||
<h3 class="mb-3">Found Alias {{ data.alias.email }}</h3>
|
<h3 class="mb-3">Found Alias {{ data.alias.email }}</h3>
|
||||||
{{ list_alias(1,[data.alias]) }}
|
{{ list_alias(1,[data.alias]) }}
|
||||||
|
{{ list_alias_audit_log(data.alias_audit_log) }}
|
||||||
|
{{ list_mailboxes("Mailboxes for alias", helper.alias_mailbox_count(data.alias) , helper.alias_mailboxes(data.alias)) }}
|
||||||
{{ show_user(data.alias.user) }}
|
{{ show_user(data.alias.user) }}
|
||||||
{{ list_mailboxes(helper.mailbox_count(data.alias.user) , helper.mailbox_list(data.alias.user) ) }}
|
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if data.user %}
|
{% if data.user %}
|
||||||
@ -186,20 +251,28 @@
|
|||||||
<div class="border border-dark border-2 mt-1 mb-2 p-3">
|
<div class="border border-dark border-2 mt-1 mb-2 p-3">
|
||||||
<h3 class="mb-3">Found User {{ data.user.email }}</h3>
|
<h3 class="mb-3">Found User {{ data.user.email }}</h3>
|
||||||
{{ show_user(data.user) }}
|
{{ show_user(data.user) }}
|
||||||
{{ list_mailboxes(helper.mailbox_count(data.user) , helper.mailbox_list(data.user) ) }}
|
{{ list_mailboxes("Mailboxes for user", helper.mailbox_count(data.user) , helper.mailbox_list(data.user) ) }}
|
||||||
{{ list_alias(helper.alias_count(data.user) ,helper.alias_list(data.user)) }}
|
{{ list_alias(helper.alias_count(data.user) ,helper.alias_list(data.user)) }}
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if data.mailbox_count > 10 %}
|
{% if data.user_audit_log %}
|
||||||
|
|
||||||
|
<div class="border border-dark border-2 mt-1 mb-2 p-3">
|
||||||
|
<h3 class="mb-3">Audit log entries for user {{ data.query }}</h3>
|
||||||
|
{{ list_user_audit_log(data.user_audit_log) }}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% if data.mailbox_count > 10 %}
|
||||||
|
|
||||||
<h3>Found more than 10 mailboxes for {{ email }}. Showing the last 10</h3>
|
<h3>Found more than 10 mailboxes for {{ email }}. Showing the last 10</h3>
|
||||||
{% elif data.mailbox_count > 0 %}
|
{% elif data.mailbox_count > 0 %}
|
||||||
<h3>Found {{ data.mailbox_count }} mailbox(es) for {{ email }}</h3>
|
<h3>Found {{ data.mailbox_count }} mailbox(es) for {{ email }}</h3>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% for mailbox in data.mailbox %}
|
{% for mailbox in data.mailbox %}
|
||||||
|
|
||||||
<div class="border border-dark mt-1 mb-2 p-3">
|
<div class="border border-dark mt-1 mb-2 p-3">
|
||||||
<h3 class="mb-3">Found Mailbox {{ mailbox.email }}</h3>
|
<h3 class="mb-3">Found Mailbox {{ mailbox.email }}</h3>
|
||||||
{{ list_mailboxes(1, [mailbox]) }}
|
{{ list_mailboxes("Mailbox found", 1, [mailbox]) }}
|
||||||
{{ show_user(mailbox.user) }}
|
{{ show_user(mailbox.user) }}
|
||||||
</div>
|
</div>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@ -208,6 +281,7 @@
|
|||||||
<div class="border border-dark mt-1 mb-2 p-3">
|
<div class="border border-dark mt-1 mb-2 p-3">
|
||||||
<h3 class="mb-3">Found DeletedAlias {{ data.deleted_alias.email }}</h3>
|
<h3 class="mb-3">Found DeletedAlias {{ data.deleted_alias.email }}</h3>
|
||||||
{{ show_deleted_alias(data.deleted_alias) }}
|
{{ show_deleted_alias(data.deleted_alias) }}
|
||||||
|
{{ list_alias_audit_log(data.deleted_alias_audit_log) }}
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if data.domain_deleted_alias %}
|
{% if data.domain_deleted_alias %}
|
||||||
@ -215,6 +289,7 @@
|
|||||||
<div class="border border-dark mt-1 mb-2 p-3">
|
<div class="border border-dark mt-1 mb-2 p-3">
|
||||||
<h3 class="mb-3">Found DomainDeletedAlias {{ data.domain_deleted_alias.email }}</h3>
|
<h3 class="mb-3">Found DomainDeletedAlias {{ data.domain_deleted_alias.email }}</h3>
|
||||||
{{ show_domain_deleted_alias(data.domain_deleted_alias) }}
|
{{ show_domain_deleted_alias(data.domain_deleted_alias) }}
|
||||||
|
{{ list_alias_audit_log(data.domain_deleted_alias_audit_log) }}
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
@ -43,7 +43,7 @@
|
|||||||
You can change the plan at any moment.
|
You can change the plan at any moment.
|
||||||
<br />
|
<br />
|
||||||
Please note that the new billing cycle starts instantly
|
Please note that the new billing cycle starts instantly
|
||||||
i.e. you will be charged <b>immediately</b> the annual fee ($30) when switching from monthly plan or vice-versa
|
i.e. you will be charged <b>immediately</b> the annual fee ($36) when switching from monthly plan or vice-versa
|
||||||
<b>without pro rata computation </b>.
|
<b>without pro rata computation </b>.
|
||||||
<br />
|
<br />
|
||||||
To change the plan you can also cancel the current one and subscribe a new one <b>by the end</b> of this plan.
|
To change the plan you can also cancel the current one and subscribe a new one <b>by the end</b> of this plan.
|
||||||
|
@ -94,4 +94,3 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
@ -91,7 +91,7 @@
|
|||||||
<br />
|
<br />
|
||||||
Some domain registrars (Namecheap, CloudFlare, etc) might also use <em>@</em> for the root domain.
|
Some domain registrars (Namecheap, CloudFlare, etc) might also use <em>@</em> for the root domain.
|
||||||
</div>
|
</div>
|
||||||
{% for priority, email_server in EMAIL_SERVERS_WITH_PRIORITY %}
|
{% for record in expected_mx_records %}
|
||||||
|
|
||||||
<div class="mb-3 p-3 dns-record">
|
<div class="mb-3 p-3 dns-record">
|
||||||
Record: MX
|
Record: MX
|
||||||
@ -99,12 +99,12 @@
|
|||||||
Domain: {{ custom_domain.domain }} or
|
Domain: {{ custom_domain.domain }} or
|
||||||
<b>@</b>
|
<b>@</b>
|
||||||
<br />
|
<br />
|
||||||
Priority: {{ priority }}
|
Priority: {{ record.priority }}
|
||||||
<br />
|
<br />
|
||||||
Target: <em data-toggle="tooltip"
|
Target: <em data-toggle="tooltip"
|
||||||
title="Click to copy"
|
title="Click to copy"
|
||||||
class="clipboard"
|
class="clipboard"
|
||||||
data-clipboard-text="{{ email_server }}">{{ email_server }}</em>
|
data-clipboard-text="{{ record.domain }}">{{ record.domain }}</em>
|
||||||
</div>
|
</div>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
<form method="post" action="#mx-form">
|
<form method="post" action="#mx-form">
|
||||||
|
@ -22,7 +22,8 @@
|
|||||||
<p>Alternatively you can use your Proton credentials to ensure it's you.</p>
|
<p>Alternatively you can use your Proton credentials to ensure it's you.</p>
|
||||||
</div>
|
</div>
|
||||||
<a class="btn btn-primary btn-block mt-2 proton-button"
|
<a class="btn btn-primary btn-block mt-2 proton-button"
|
||||||
href="{{ url_for('auth.proton_login', next=next) }}" style="max-width: 400px">
|
href="{{ url_for('auth.proton_login', next=next) }}"
|
||||||
|
style="max-width: 400px">
|
||||||
<img class="mr-2" src="/static/images/proton.svg" />
|
<img class="mr-2" src="/static/images/proton.svg" />
|
||||||
Authenticate with Proton
|
Authenticate with Proton
|
||||||
</a>
|
</a>
|
||||||
@ -38,4 +39,4 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
<div>
|
<div>
|
||||||
<a class="buy-with-crypto"
|
<a class="buy-with-crypto"
|
||||||
data-custom="{{ current_user.id }}"
|
data-custom="{{ current_user.id }}"
|
||||||
href="{{ coinbase_url }}">Extend for 1 year - $30</a>
|
href="{{ coinbase_url }}">Extend for 1 year - $36</a>
|
||||||
<script src="https://commerce.coinbase.com/v1/checkout.js?version=201807"></script>
|
<script src="https://commerce.coinbase.com/v1/checkout.js?version=201807"></script>
|
||||||
</div>
|
</div>
|
||||||
<div class="mt-2">
|
<div class="mt-2">
|
||||||
|
@ -77,6 +77,11 @@
|
|||||||
<div class="text-center mx-md-auto mb-8 mt-6">
|
<div class="text-center mx-md-auto mb-8 mt-6">
|
||||||
<h1>Upgrade to unlock premium features</h1>
|
<h1>Upgrade to unlock premium features</h1>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="alert alert-info">
|
||||||
|
<span class="badge badge-success">new</span> SimpleLogin Premium now includes Proton Pass premium features.
|
||||||
|
<a href="https://simplelogin.io/blog/sl-premium-including-pass-plus/"
|
||||||
|
target="_blank">Learn more ↗</a>
|
||||||
|
</div>
|
||||||
{% if manual_sub %}
|
{% if manual_sub %}
|
||||||
|
|
||||||
<div class="alert alert-info mt-0 mb-6">
|
<div class="alert alert-info mt-0 mb-6">
|
||||||
@ -306,7 +311,7 @@
|
|||||||
<div class="card-body">
|
<div class="card-body">
|
||||||
<div class="text-center">
|
<div class="text-center">
|
||||||
<div class="h3">SimpleLogin Premium</div>
|
<div class="h3">SimpleLogin Premium</div>
|
||||||
<div class="h3 my-3">$30 / year</div>
|
<div class="h3 my-3">$36 / year</div>
|
||||||
<div class="text-center mt-4 mb-6">
|
<div class="text-center mt-4 mb-6">
|
||||||
<button class="btn btn-primary btn-lg w-100"
|
<button class="btn btn-primary btn-lg w-100"
|
||||||
onclick="upgradePaddle({{ PADDLE_YEARLY_PRODUCT_ID }})">Upgrade to Premium</button>
|
onclick="upgradePaddle({{ PADDLE_YEARLY_PRODUCT_ID }})">Upgrade to Premium</button>
|
||||||
@ -471,7 +476,7 @@
|
|||||||
rel="noopener noreferrer">
|
rel="noopener noreferrer">
|
||||||
Upgrade to Premium - cryptocurrency
|
Upgrade to Premium - cryptocurrency
|
||||||
<br />
|
<br />
|
||||||
$30 / year
|
$36 / year
|
||||||
<i class="fe fe-external-link"></i>
|
<i class="fe fe-external-link"></i>
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
|
@ -511,6 +511,19 @@ def test_create_contact_route_invalid_alias(flask_client):
|
|||||||
assert r.status_code == 403
|
assert r.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_contact_route_non_existing_alias(flask_client):
|
||||||
|
user, api_key = get_new_user_and_api_key()
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
r = flask_client.post(
|
||||||
|
url_for("api.create_contact_route", alias_id=99999999),
|
||||||
|
headers={"Authentication": api_key.code},
|
||||||
|
json={"contact": "First Last <first@example.com>"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert r.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
def test_create_contact_route_free_users(flask_client):
|
def test_create_contact_route_free_users(flask_client):
|
||||||
user, api_key = get_new_user_and_api_key()
|
user, api_key = get_new_user_and_api_key()
|
||||||
|
|
||||||
@ -536,7 +549,7 @@ def test_create_contact_route_free_users(flask_client):
|
|||||||
assert r.status_code == 201
|
assert r.status_code == 201
|
||||||
|
|
||||||
# End trial and disallow for new free users. Config should allow it
|
# End trial and disallow for new free users. Config should allow it
|
||||||
user.flags = User.FLAG_FREE_DISABLE_CREATE_ALIAS
|
user.flags = User.FLAG_DISABLE_CREATE_CONTACTS
|
||||||
Session.commit()
|
Session.commit()
|
||||||
r = flask_client.post(
|
r = flask_client.post(
|
||||||
url_for("api.create_contact_route", alias_id=alias.id),
|
url_for("api.create_contact_route", alias_id=alias.id),
|
||||||
|
@ -5,7 +5,7 @@ from app.models import Mailbox
|
|||||||
from tests.utils import login
|
from tests.utils import login
|
||||||
|
|
||||||
|
|
||||||
def test_create_mailbox(flask_client):
|
def test_create_mailbox_valid(flask_client):
|
||||||
login(flask_client)
|
login(flask_client)
|
||||||
|
|
||||||
r = flask_client.post(
|
r = flask_client.post(
|
||||||
@ -21,10 +21,34 @@ def test_create_mailbox(flask_client):
|
|||||||
assert r.json["default"] is False
|
assert r.json["default"] is False
|
||||||
assert r.json["nb_alias"] == 0
|
assert r.json["nb_alias"] == 0
|
||||||
|
|
||||||
# invalid email address
|
|
||||||
|
def test_create_mailbox_invalid_email(flask_client):
|
||||||
|
login(flask_client)
|
||||||
r = flask_client.post(
|
r = flask_client.post(
|
||||||
"/api/mailboxes",
|
"/api/mailboxes",
|
||||||
json={"email": "gmail.com"},
|
json={"email": "gmail.com"}, # not an email address
|
||||||
|
)
|
||||||
|
|
||||||
|
assert r.status_code == 400
|
||||||
|
assert r.json == {"error": "Invalid email"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_mailbox_empty_payload(flask_client):
|
||||||
|
login(flask_client)
|
||||||
|
r = flask_client.post(
|
||||||
|
"/api/mailboxes",
|
||||||
|
json={},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert r.status_code == 400
|
||||||
|
assert r.json == {"error": "Invalid email"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_mailbox_empty_email(flask_client):
|
||||||
|
login(flask_client)
|
||||||
|
r = flask_client.post(
|
||||||
|
"/api/mailboxes",
|
||||||
|
json={"email": ""},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert r.status_code == 400
|
assert r.status_code == 400
|
||||||
|
@ -4,7 +4,7 @@ from app.models import (
|
|||||||
Alias,
|
Alias,
|
||||||
Contact,
|
Contact,
|
||||||
)
|
)
|
||||||
from tests.utils import login
|
from tests.utils import login, random_email
|
||||||
|
|
||||||
|
|
||||||
def test_add_contact_success(flask_client):
|
def test_add_contact_success(flask_client):
|
||||||
@ -13,26 +13,28 @@ def test_add_contact_success(flask_client):
|
|||||||
|
|
||||||
assert Contact.filter_by(user_id=user.id).count() == 0
|
assert Contact.filter_by(user_id=user.id).count() == 0
|
||||||
|
|
||||||
|
email = random_email()
|
||||||
# <<< Create a new contact >>>
|
# <<< Create a new contact >>>
|
||||||
flask_client.post(
|
flask_client.post(
|
||||||
url_for("dashboard.alias_contact_manager", alias_id=alias.id),
|
url_for("dashboard.alias_contact_manager", alias_id=alias.id),
|
||||||
data={
|
data={
|
||||||
"form-name": "create",
|
"form-name": "create",
|
||||||
"email": "abcd@gmail.com",
|
"email": email,
|
||||||
},
|
},
|
||||||
follow_redirects=True,
|
follow_redirects=True,
|
||||||
)
|
)
|
||||||
# a new contact is added
|
# a new contact is added
|
||||||
assert Contact.filter_by(user_id=user.id).count() == 1
|
assert Contact.filter_by(user_id=user.id).count() == 1
|
||||||
contact = Contact.filter_by(user_id=user.id).first()
|
contact = Contact.filter_by(user_id=user.id).first()
|
||||||
assert contact.website_email == "abcd@gmail.com"
|
assert contact.website_email == email
|
||||||
|
|
||||||
# <<< Create a new contact using a full email format >>>
|
# <<< Create a new contact using a full email format >>>
|
||||||
|
email = random_email()
|
||||||
flask_client.post(
|
flask_client.post(
|
||||||
url_for("dashboard.alias_contact_manager", alias_id=alias.id),
|
url_for("dashboard.alias_contact_manager", alias_id=alias.id),
|
||||||
data={
|
data={
|
||||||
"form-name": "create",
|
"form-name": "create",
|
||||||
"email": "First Last <another@gmail.com>",
|
"email": f"First Last <{email}>",
|
||||||
},
|
},
|
||||||
follow_redirects=True,
|
follow_redirects=True,
|
||||||
)
|
)
|
||||||
@ -41,7 +43,7 @@ def test_add_contact_success(flask_client):
|
|||||||
contact = (
|
contact = (
|
||||||
Contact.filter_by(user_id=user.id).filter(Contact.id != contact.id).first()
|
Contact.filter_by(user_id=user.id).filter(Contact.id != contact.id).first()
|
||||||
)
|
)
|
||||||
assert contact.website_email == "another@gmail.com"
|
assert contact.website_email == email
|
||||||
assert contact.name == "First Last"
|
assert contact.name == "First Last"
|
||||||
|
|
||||||
# <<< Create a new contact with invalid email address >>>
|
# <<< Create a new contact with invalid email address >>>
|
||||||
|
@ -1,38 +1,72 @@
|
|||||||
import app.alias_utils
|
import app.alias_utils
|
||||||
|
from app import config
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.events.event_dispatcher import GlobalDispatcher
|
||||||
from app.models import (
|
from app.models import (
|
||||||
Alias,
|
Alias,
|
||||||
Mailbox,
|
Mailbox,
|
||||||
User,
|
|
||||||
AliasMailbox,
|
AliasMailbox,
|
||||||
)
|
)
|
||||||
|
from tests.events.event_test_utils import (
|
||||||
|
OnMemoryDispatcher,
|
||||||
|
_get_event_from_string,
|
||||||
|
_create_linked_user,
|
||||||
|
)
|
||||||
from tests.utils import login
|
from tests.utils import login
|
||||||
|
|
||||||
|
on_memory_dispatcher = OnMemoryDispatcher()
|
||||||
|
|
||||||
|
|
||||||
|
def setup_module():
|
||||||
|
GlobalDispatcher.set_dispatcher(on_memory_dispatcher)
|
||||||
|
config.EVENT_WEBHOOK = "http://test"
|
||||||
|
|
||||||
|
|
||||||
|
def teardown_module():
|
||||||
|
GlobalDispatcher.set_dispatcher(None)
|
||||||
|
config.EVENT_WEBHOOK = None
|
||||||
|
|
||||||
|
|
||||||
def test_alias_transfer(flask_client):
|
def test_alias_transfer(flask_client):
|
||||||
user = login(flask_client)
|
(source_user, source_user_pu) = _create_linked_user()
|
||||||
mb = Mailbox.create(user_id=user.id, email="mb@gmail.com", commit=True)
|
source_user = login(flask_client, source_user)
|
||||||
|
mb = Mailbox.create(user_id=source_user.id, email="mb@gmail.com", commit=True)
|
||||||
|
|
||||||
alias = Alias.create_new_random(user)
|
alias = Alias.create_new_random(source_user)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id, commit=True)
|
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id, commit=True)
|
||||||
|
|
||||||
new_user = User.create(
|
(target_user, target_user_pu) = _create_linked_user()
|
||||||
email="hey@example.com",
|
|
||||||
password="password",
|
|
||||||
activated=True,
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
Mailbox.create(
|
Mailbox.create(
|
||||||
user_id=new_user.id, email="hey2@example.com", verified=True, commit=True
|
user_id=target_user.id, email="hey2@example.com", verified=True, commit=True
|
||||||
)
|
)
|
||||||
|
|
||||||
app.alias_utils.transfer_alias(alias, new_user, new_user.mailboxes())
|
on_memory_dispatcher.clear()
|
||||||
|
app.alias_utils.transfer_alias(alias, target_user, target_user.mailboxes())
|
||||||
|
|
||||||
# refresh from db
|
# refresh from db
|
||||||
alias = Alias.get(alias.id)
|
alias = Alias.get(alias.id)
|
||||||
assert alias.user == new_user
|
assert alias.user == target_user
|
||||||
assert set(alias.mailboxes) == set(new_user.mailboxes())
|
assert set(alias.mailboxes) == set(target_user.mailboxes())
|
||||||
assert len(alias.mailboxes) == 2
|
assert len(alias.mailboxes) == 2
|
||||||
|
|
||||||
|
# Check events
|
||||||
|
assert len(on_memory_dispatcher.memory) == 2
|
||||||
|
# 1st delete event
|
||||||
|
event_data = on_memory_dispatcher.memory[0]
|
||||||
|
event_content = _get_event_from_string(event_data, source_user, source_user_pu)
|
||||||
|
assert event_content.alias_deleted is not None
|
||||||
|
alias_deleted = event_content.alias_deleted
|
||||||
|
assert alias_deleted.id == alias.id
|
||||||
|
assert alias_deleted.email == alias.email
|
||||||
|
# 2nd create event
|
||||||
|
event_data = on_memory_dispatcher.memory[1]
|
||||||
|
event_content = _get_event_from_string(event_data, target_user, target_user_pu)
|
||||||
|
assert event_content.alias_created is not None
|
||||||
|
alias_created = event_content.alias_created
|
||||||
|
assert alias.id == alias_created.id
|
||||||
|
assert alias.email == alias_created.email
|
||||||
|
assert alias.note or "" == alias_created.note
|
||||||
|
assert alias.enabled == alias_created.enabled
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
from app.events.event_dispatcher import Dispatcher
|
from app.events.event_dispatcher import Dispatcher
|
||||||
|
from app.events.generated import event_pb2
|
||||||
from app.models import PartnerUser, User
|
from app.models import PartnerUser, User
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
from tests.utils import create_new_user, random_token
|
from tests.utils import create_new_user, random_token
|
||||||
@ -30,3 +31,14 @@ def _create_linked_user() -> Tuple[User, PartnerUser]:
|
|||||||
)
|
)
|
||||||
|
|
||||||
return user, partner_user
|
return user, partner_user
|
||||||
|
|
||||||
|
|
||||||
|
def _get_event_from_string(
|
||||||
|
data: str, user: User, pu: PartnerUser
|
||||||
|
) -> event_pb2.EventContent:
|
||||||
|
event = event_pb2.Event()
|
||||||
|
event.ParseFromString(data)
|
||||||
|
assert user.id == event.user_id
|
||||||
|
assert pu.external_user_id == event.external_user_id
|
||||||
|
assert pu.partner_id == event.partner_id
|
||||||
|
return event.content
|
||||||
|
54
app/tests/events/test_dead_letter_event_source.py
Normal file
54
app/tests/events/test_dead_letter_event_source.py
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
import arrow
|
||||||
|
|
||||||
|
from app.db import Session
|
||||||
|
from app.models import SyncEvent
|
||||||
|
from events.event_source import DeadLetterEventSource, _DEAD_LETTER_THRESHOLD_MINUTES
|
||||||
|
|
||||||
|
|
||||||
|
class EventCounter:
|
||||||
|
def __init__(self):
|
||||||
|
self.processed_events = 0
|
||||||
|
|
||||||
|
def on_event(self, event: SyncEvent):
|
||||||
|
self.processed_events += 1
|
||||||
|
|
||||||
|
|
||||||
|
def setup_function(func):
|
||||||
|
Session.query(SyncEvent).delete()
|
||||||
|
|
||||||
|
|
||||||
|
def test_dead_letter_does_not_take_untaken_events():
|
||||||
|
source = DeadLetterEventSource(1)
|
||||||
|
counter = EventCounter()
|
||||||
|
threshold_time = arrow.utcnow().shift(minutes=-(_DEAD_LETTER_THRESHOLD_MINUTES) + 1)
|
||||||
|
SyncEvent.create(
|
||||||
|
content="test".encode("utf-8"), created_at=threshold_time, flush=True
|
||||||
|
)
|
||||||
|
SyncEvent.create(
|
||||||
|
content="test".encode("utf-8"), taken_time=threshold_time, flush=True
|
||||||
|
)
|
||||||
|
events_processed = source.execute_loop(on_event=counter.on_event)
|
||||||
|
assert len(events_processed) == 0
|
||||||
|
assert counter.processed_events == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_dead_letter_takes_untaken_events_created_older_than_threshold():
|
||||||
|
source = DeadLetterEventSource(1)
|
||||||
|
counter = EventCounter()
|
||||||
|
old_create = arrow.utcnow().shift(minutes=-_DEAD_LETTER_THRESHOLD_MINUTES - 1)
|
||||||
|
SyncEvent.create(content="test".encode("utf-8"), created_at=old_create, flush=True)
|
||||||
|
events_processed = source.execute_loop(on_event=counter.on_event)
|
||||||
|
assert len(events_processed) == 1
|
||||||
|
assert events_processed[0].taken_time > old_create
|
||||||
|
assert counter.processed_events == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_dead_letter_takes_taken_events_created_older_than_threshold():
|
||||||
|
source = DeadLetterEventSource(1)
|
||||||
|
counter = EventCounter()
|
||||||
|
old_taken = arrow.utcnow().shift(minutes=-_DEAD_LETTER_THRESHOLD_MINUTES - 1)
|
||||||
|
SyncEvent.create(content="test".encode("utf-8"), taken_time=old_taken, flush=True)
|
||||||
|
events_processed = source.execute_loop(on_event=counter.on_event)
|
||||||
|
assert len(events_processed) == 1
|
||||||
|
assert events_processed[0].taken_time > old_taken
|
||||||
|
assert counter.processed_events == 1
|
@ -1,12 +1,14 @@
|
|||||||
|
import arrow
|
||||||
|
|
||||||
from app import config, alias_utils
|
from app import config, alias_utils
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.events.event_dispatcher import GlobalDispatcher
|
from app.events.event_dispatcher import GlobalDispatcher
|
||||||
from app.events.generated import event_pb2
|
from app.models import Alias, SyncEvent
|
||||||
from app.models import Alias, User, PartnerUser
|
|
||||||
from tests.utils import random_token
|
from tests.utils import random_token
|
||||||
from .event_test_utils import (
|
from .event_test_utils import (
|
||||||
OnMemoryDispatcher,
|
OnMemoryDispatcher,
|
||||||
_create_linked_user,
|
_create_linked_user,
|
||||||
|
_get_event_from_string,
|
||||||
)
|
)
|
||||||
|
|
||||||
on_memory_dispatcher = OnMemoryDispatcher()
|
on_memory_dispatcher = OnMemoryDispatcher()
|
||||||
@ -26,15 +28,31 @@ def setup_function(func):
|
|||||||
on_memory_dispatcher.clear()
|
on_memory_dispatcher.clear()
|
||||||
|
|
||||||
|
|
||||||
def _get_event_from_string(
|
def test_event_taken_updates():
|
||||||
data: str, user: User, pu: PartnerUser
|
event = SyncEvent.create(content="test".encode("utf-8"), flush=True)
|
||||||
) -> event_pb2.EventContent:
|
assert event.taken_time is None
|
||||||
event = event_pb2.Event()
|
assert event.mark_as_taken()
|
||||||
event.ParseFromString(data)
|
assert event.taken_time is not None
|
||||||
assert user.id == event.user_id
|
|
||||||
assert pu.external_user_id == event.external_user_id
|
|
||||||
assert pu.partner_id == event.partner_id
|
def test_event_mark_as_taken_does_nothing_for_taken_events():
|
||||||
return event.content
|
now = arrow.utcnow()
|
||||||
|
event = SyncEvent.create(content="test".encode("utf-8"), taken_time=now, flush=True)
|
||||||
|
assert not event.mark_as_taken()
|
||||||
|
|
||||||
|
|
||||||
|
def test_event_mark_as_taken_does_nothing_for_not_before_events():
|
||||||
|
now = arrow.utcnow()
|
||||||
|
event = SyncEvent.create(content="test".encode("utf-8"), taken_time=now, flush=True)
|
||||||
|
older_than = now.shift(minutes=-1)
|
||||||
|
assert not event.mark_as_taken(allow_taken_older_than=older_than)
|
||||||
|
|
||||||
|
|
||||||
|
def test_event_mark_as_taken_works_for_before_events():
|
||||||
|
now = arrow.utcnow()
|
||||||
|
event = SyncEvent.create(content="test".encode("utf-8"), taken_time=now, flush=True)
|
||||||
|
older_than = now.shift(minutes=+1)
|
||||||
|
assert event.mark_as_taken(allow_taken_older_than=older_than)
|
||||||
|
|
||||||
|
|
||||||
def test_fire_event_on_alias_creation():
|
def test_fire_event_on_alias_creation():
|
||||||
@ -90,7 +108,7 @@ def test_fire_event_on_alias_status_change():
|
|||||||
alias = Alias.create_new_random(user)
|
alias = Alias.create_new_random(user)
|
||||||
Session.flush()
|
Session.flush()
|
||||||
on_memory_dispatcher.clear()
|
on_memory_dispatcher.clear()
|
||||||
alias_utils.change_alias_status(alias, True)
|
alias_utils.change_alias_status(alias, enabled=True)
|
||||||
assert len(on_memory_dispatcher.memory) == 1
|
assert len(on_memory_dispatcher.memory) == 1
|
||||||
event_data = on_memory_dispatcher.memory[0]
|
event_data = on_memory_dispatcher.memory[0]
|
||||||
event_content = _get_event_from_string(event_data, user, pu)
|
event_content = _get_event_from_string(event_data, user, pu)
|
||||||
|
109
app/tests/events/test_subscription_webhook.py
Normal file
109
app/tests/events/test_subscription_webhook.py
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
import arrow
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
from app.events.event_dispatcher import GlobalDispatcher
|
||||||
|
from app.events.generated.event_pb2 import UserPlanChanged
|
||||||
|
from app.models import (
|
||||||
|
Subscription,
|
||||||
|
AppleSubscription,
|
||||||
|
CoinbaseSubscription,
|
||||||
|
ManualSubscription,
|
||||||
|
User,
|
||||||
|
PartnerUser,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .event_test_utils import (
|
||||||
|
OnMemoryDispatcher,
|
||||||
|
_create_linked_user,
|
||||||
|
_get_event_from_string,
|
||||||
|
)
|
||||||
|
from tests.utils import random_token
|
||||||
|
|
||||||
|
from app.subscription_webhook import execute_subscription_webhook
|
||||||
|
|
||||||
|
|
||||||
|
on_memory_dispatcher = OnMemoryDispatcher()
|
||||||
|
|
||||||
|
|
||||||
|
def setup_module():
|
||||||
|
GlobalDispatcher.set_dispatcher(on_memory_dispatcher)
|
||||||
|
config.EVENT_WEBHOOK = "http://test"
|
||||||
|
|
||||||
|
|
||||||
|
def teardown_module():
|
||||||
|
GlobalDispatcher.set_dispatcher(None)
|
||||||
|
config.EVENT_WEBHOOK = None
|
||||||
|
|
||||||
|
|
||||||
|
def setup_function(func):
|
||||||
|
on_memory_dispatcher.clear()
|
||||||
|
|
||||||
|
|
||||||
|
def check_event(user: User, pu: PartnerUser) -> UserPlanChanged:
|
||||||
|
assert len(on_memory_dispatcher.memory) == 1
|
||||||
|
event_data = on_memory_dispatcher.memory[0]
|
||||||
|
event_content = _get_event_from_string(event_data, user, pu)
|
||||||
|
assert event_content.user_plan_change is not None
|
||||||
|
plan_change = event_content.user_plan_change
|
||||||
|
return plan_change
|
||||||
|
|
||||||
|
|
||||||
|
def test_webhook_with_trial():
|
||||||
|
(user, pu) = _create_linked_user()
|
||||||
|
execute_subscription_webhook(user)
|
||||||
|
assert check_event(user, pu).plan_end_time == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_webhook_with_subscription():
|
||||||
|
(user, pu) = _create_linked_user()
|
||||||
|
end_at = arrow.utcnow().shift(days=1).replace(hour=0, minute=0, second=0)
|
||||||
|
Subscription.create(
|
||||||
|
user_id=user.id,
|
||||||
|
cancel_url="",
|
||||||
|
update_url="",
|
||||||
|
subscription_id=random_token(10),
|
||||||
|
event_time=arrow.now(),
|
||||||
|
next_bill_date=end_at.date(),
|
||||||
|
plan="yearly",
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
execute_subscription_webhook(user)
|
||||||
|
assert check_event(user, pu).plan_end_time == end_at.timestamp
|
||||||
|
|
||||||
|
|
||||||
|
def test_webhook_with_apple_subscription():
|
||||||
|
(user, pu) = _create_linked_user()
|
||||||
|
end_at = arrow.utcnow().shift(days=2).replace(hour=0, minute=0, second=0)
|
||||||
|
AppleSubscription.create(
|
||||||
|
user_id=user.id,
|
||||||
|
receipt_data=arrow.now().date().strftime("%Y-%m-%d"),
|
||||||
|
expires_date=end_at.date().strftime("%Y-%m-%d"),
|
||||||
|
original_transaction_id=random_token(10),
|
||||||
|
plan="yearly",
|
||||||
|
product_id="",
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
execute_subscription_webhook(user)
|
||||||
|
assert check_event(user, pu).plan_end_time == end_at.timestamp
|
||||||
|
|
||||||
|
|
||||||
|
def test_webhook_with_coinbase_subscription():
|
||||||
|
(user, pu) = _create_linked_user()
|
||||||
|
end_at = arrow.utcnow().shift(days=3).replace(hour=0, minute=0, second=0)
|
||||||
|
CoinbaseSubscription.create(
|
||||||
|
user_id=user.id, end_at=end_at.date().strftime("%Y-%m-%d"), flush=True
|
||||||
|
)
|
||||||
|
|
||||||
|
execute_subscription_webhook(user)
|
||||||
|
assert check_event(user, pu).plan_end_time == end_at.timestamp
|
||||||
|
|
||||||
|
|
||||||
|
def test_webhook_with_manual_subscription():
|
||||||
|
(user, pu) = _create_linked_user()
|
||||||
|
end_at = arrow.utcnow().shift(days=3).replace(hour=0, minute=0, second=0)
|
||||||
|
ManualSubscription.create(
|
||||||
|
user_id=user.id, end_at=end_at.date().strftime("%Y-%m-%d"), flush=True
|
||||||
|
)
|
||||||
|
|
||||||
|
execute_subscription_webhook(user)
|
||||||
|
assert check_event(user, pu).plan_end_time == end_at.timestamp
|
40
app/tests/jobs/test_send_event_to_webhook.py
Normal file
40
app/tests/jobs/test_send_event_to_webhook.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
import arrow
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
from app.events.generated.event_pb2 import EventContent, AliasDeleted
|
||||||
|
from app.jobs.send_event_job import SendEventToWebhookJob
|
||||||
|
from app.models import PartnerUser
|
||||||
|
from app.proton.utils import get_proton_partner
|
||||||
|
from events.event_sink import ConsoleEventSink
|
||||||
|
from tests.utils import create_new_user, random_token
|
||||||
|
|
||||||
|
|
||||||
|
def test_serialize_and_deserialize_job():
|
||||||
|
user = create_new_user()
|
||||||
|
alias_id = 34
|
||||||
|
alias_email = "a@b.c"
|
||||||
|
event = EventContent(alias_deleted=AliasDeleted(id=alias_id, email=alias_email))
|
||||||
|
run_at = arrow.now().shift(hours=10)
|
||||||
|
db_job = SendEventToWebhookJob(user, event).store_job_in_db(run_at=run_at)
|
||||||
|
assert db_job.run_at == run_at
|
||||||
|
assert db_job.name == config.JOB_SEND_EVENT_TO_WEBHOOK
|
||||||
|
job = SendEventToWebhookJob.create_from_job(db_job)
|
||||||
|
assert job._user.id == user.id
|
||||||
|
assert job._event.alias_deleted.id == alias_id
|
||||||
|
assert job._event.alias_deleted.email == alias_email
|
||||||
|
|
||||||
|
|
||||||
|
def test_send_event_to_webhook():
|
||||||
|
user = create_new_user()
|
||||||
|
PartnerUser.create(
|
||||||
|
user_id=user.id,
|
||||||
|
partner_id=get_proton_partner().id,
|
||||||
|
external_user_id=random_token(10),
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
alias_id = 34
|
||||||
|
alias_email = "a@b.c"
|
||||||
|
event = EventContent(alias_deleted=AliasDeleted(id=alias_id, email=alias_email))
|
||||||
|
job = SendEventToWebhookJob(user, event)
|
||||||
|
sink = ConsoleEventSink()
|
||||||
|
assert job.run(sink)
|
@ -1,5 +1,5 @@
|
|||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import Alias, Mailbox, AliasMailbox, User
|
from app.models import Alias, Mailbox, AliasMailbox, User, CustomDomain
|
||||||
from tests.utils import create_new_user, random_email
|
from tests.utils import create_new_user, random_email
|
||||||
|
|
||||||
|
|
||||||
@ -29,3 +29,23 @@ def test_alias_create_from_partner_flags_also_the_user():
|
|||||||
flush=True,
|
flush=True,
|
||||||
)
|
)
|
||||||
assert alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER > 0
|
assert alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_alias_create_from_partner_domain_flags_the_alias():
|
||||||
|
user = create_new_user()
|
||||||
|
domain = CustomDomain.create(
|
||||||
|
domain=random_email(),
|
||||||
|
verified=True,
|
||||||
|
user_id=user.id,
|
||||||
|
partner_id=1,
|
||||||
|
)
|
||||||
|
Session.flush()
|
||||||
|
email = random_email()
|
||||||
|
alias = Alias.create(
|
||||||
|
user_id=user.id,
|
||||||
|
email=email,
|
||||||
|
mailbox_id=user.default_mailbox_id,
|
||||||
|
custom_domain_id=domain.id,
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
assert alias.flags & Alias.FLAG_PARTNER_CREATED > 0
|
||||||
|
@ -25,15 +25,17 @@ class MockProtonClient(ProtonClient):
|
|||||||
return self.user
|
return self.user
|
||||||
|
|
||||||
|
|
||||||
def check_initial_sync_job(user: User):
|
def check_initial_sync_job(user: User, expected: bool):
|
||||||
|
found = False
|
||||||
for job in Job.yield_per_query(10).filter_by(
|
for job in Job.yield_per_query(10).filter_by(
|
||||||
name=config.JOB_SEND_ALIAS_CREATION_EVENTS,
|
name=config.JOB_SEND_ALIAS_CREATION_EVENTS,
|
||||||
state=JobState.ready.value,
|
state=JobState.ready.value,
|
||||||
):
|
):
|
||||||
if job.payload.get("user_id") == user.id:
|
if job.payload.get("user_id") == user.id:
|
||||||
|
found = True
|
||||||
Job.delete(job.id)
|
Job.delete(job.id)
|
||||||
return
|
break
|
||||||
assert False
|
assert expected == found
|
||||||
|
|
||||||
|
|
||||||
def test_proton_callback_handler_unexistant_sl_user():
|
def test_proton_callback_handler_unexistant_sl_user():
|
||||||
@ -69,10 +71,9 @@ def test_proton_callback_handler_unexistant_sl_user():
|
|||||||
)
|
)
|
||||||
assert partner_user is not None
|
assert partner_user is not None
|
||||||
assert partner_user.external_user_id == external_id
|
assert partner_user.external_user_id == external_id
|
||||||
check_initial_sync_job(res.user)
|
|
||||||
|
|
||||||
|
|
||||||
def test_proton_callback_handler_existant_sl_user():
|
def test_proton_callback_handler_existing_sl_user():
|
||||||
email = random_email()
|
email = random_email()
|
||||||
sl_user = User.create(email, commit=True)
|
sl_user = User.create(email, commit=True)
|
||||||
|
|
||||||
@ -98,7 +99,43 @@ def test_proton_callback_handler_existant_sl_user():
|
|||||||
sa = PartnerUser.get_by(user_id=sl_user.id, partner_id=get_proton_partner().id)
|
sa = PartnerUser.get_by(user_id=sl_user.id, partner_id=get_proton_partner().id)
|
||||||
assert sa is not None
|
assert sa is not None
|
||||||
assert sa.partner_email == user.email
|
assert sa.partner_email == user.email
|
||||||
check_initial_sync_job(res.user)
|
check_initial_sync_job(res.user, True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_proton_callback_handler_linked_sl_user():
|
||||||
|
email = random_email()
|
||||||
|
external_id = random_string()
|
||||||
|
sl_user = User.create(email, commit=True)
|
||||||
|
PartnerUser.create(
|
||||||
|
user_id=sl_user.id,
|
||||||
|
partner_id=get_proton_partner().id,
|
||||||
|
external_user_id=external_id,
|
||||||
|
partner_email=email,
|
||||||
|
commit=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
user = UserInformation(
|
||||||
|
email=email,
|
||||||
|
name=random_string(),
|
||||||
|
id=external_id,
|
||||||
|
plan=SLPlan(type=SLPlanType.Premium, expiration=Arrow.utcnow().shift(hours=2)),
|
||||||
|
)
|
||||||
|
handler = ProtonCallbackHandler(MockProtonClient(user=user))
|
||||||
|
res = handler.handle_login(get_proton_partner())
|
||||||
|
|
||||||
|
assert res.user is not None
|
||||||
|
assert res.user.id == sl_user.id
|
||||||
|
# Ensure the user is not marked as created from partner
|
||||||
|
assert User.FLAG_CREATED_FROM_PARTNER != (
|
||||||
|
res.user.flags & User.FLAG_CREATED_FROM_PARTNER
|
||||||
|
)
|
||||||
|
assert res.user.notification is True
|
||||||
|
assert res.user.trial_end is not None
|
||||||
|
|
||||||
|
sa = PartnerUser.get_by(user_id=sl_user.id, partner_id=get_proton_partner().id)
|
||||||
|
assert sa is not None
|
||||||
|
assert sa.partner_email == user.email
|
||||||
|
check_initial_sync_job(res.user, False)
|
||||||
|
|
||||||
|
|
||||||
def test_proton_callback_handler_none_user_login():
|
def test_proton_callback_handler_none_user_login():
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from typing import List
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from arrow import Arrow
|
from arrow import Arrow
|
||||||
|
|
||||||
@ -16,8 +18,9 @@ from app.account_linking import (
|
|||||||
)
|
)
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.errors import AccountAlreadyLinkedToAnotherPartnerException
|
from app.errors import AccountAlreadyLinkedToAnotherPartnerException
|
||||||
from app.models import Partner, PartnerUser, User
|
from app.models import Partner, PartnerUser, User, UserAuditLog
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
|
from app.user_audit_log_utils import UserAuditLogAction
|
||||||
from app.utils import random_string, canonicalize_email
|
from app.utils import random_string, canonicalize_email
|
||||||
from tests.utils import random_email
|
from tests.utils import random_email
|
||||||
|
|
||||||
@ -91,6 +94,13 @@ def test_login_case_from_partner():
|
|||||||
)
|
)
|
||||||
assert res.user.activated is True
|
assert res.user.activated is True
|
||||||
|
|
||||||
|
audit_logs: List[UserAuditLog] = UserAuditLog.filter_by(
|
||||||
|
user_id=res.user.id,
|
||||||
|
action=UserAuditLogAction.LinkAccount.value,
|
||||||
|
).all()
|
||||||
|
assert len(audit_logs) == 1
|
||||||
|
assert audit_logs[0].user_id == res.user.id
|
||||||
|
|
||||||
|
|
||||||
def test_login_case_from_partner_with_uppercase_email():
|
def test_login_case_from_partner_with_uppercase_email():
|
||||||
partner = get_proton_partner()
|
partner = get_proton_partner()
|
||||||
@ -125,6 +135,29 @@ def test_login_case_from_web():
|
|||||||
assert 0 == (res.user.flags & User.FLAG_CREATED_FROM_PARTNER)
|
assert 0 == (res.user.flags & User.FLAG_CREATED_FROM_PARTNER)
|
||||||
assert res.user.activated is True
|
assert res.user.activated is True
|
||||||
|
|
||||||
|
audit_logs: List[UserAuditLog] = UserAuditLog.filter_by(
|
||||||
|
user_id=res.user.id,
|
||||||
|
action=UserAuditLogAction.LinkAccount.value,
|
||||||
|
).all()
|
||||||
|
assert len(audit_logs) == 1
|
||||||
|
assert audit_logs[0].user_id == res.user.id
|
||||||
|
assert audit_logs[0].action == UserAuditLogAction.LinkAccount.value
|
||||||
|
|
||||||
|
|
||||||
|
def test_new_user_strategy_create_missing_link():
|
||||||
|
email = random_email()
|
||||||
|
user = User.create(email, commit=True)
|
||||||
|
nus = NewUserStrategy(
|
||||||
|
link_request=random_link_request(
|
||||||
|
email=user.email, external_user_id=random_string(), from_partner=False
|
||||||
|
),
|
||||||
|
user=None,
|
||||||
|
partner=get_proton_partner(),
|
||||||
|
)
|
||||||
|
result = nus.create_missing_link(user.email)
|
||||||
|
assert result.user.id == user.id
|
||||||
|
assert result.strategy == ExistingUnlinkedUserStrategy.__name__
|
||||||
|
|
||||||
|
|
||||||
def test_get_strategy_existing_sl_user():
|
def test_get_strategy_existing_sl_user():
|
||||||
email = random_email()
|
email = random_email()
|
||||||
@ -205,6 +238,13 @@ def test_link_account_with_proton_account_same_address(flask_client):
|
|||||||
)
|
)
|
||||||
assert partner_user.partner_id == get_proton_partner().id
|
assert partner_user.partner_id == get_proton_partner().id
|
||||||
assert partner_user.external_user_id == partner_user_id
|
assert partner_user.external_user_id == partner_user_id
|
||||||
|
audit_logs: List[UserAuditLog] = UserAuditLog.filter_by(
|
||||||
|
user_id=res.user.id,
|
||||||
|
action=UserAuditLogAction.LinkAccount.value,
|
||||||
|
).all()
|
||||||
|
assert len(audit_logs) == 1
|
||||||
|
assert audit_logs[0].user_id == res.user.id
|
||||||
|
assert audit_logs[0].action == UserAuditLogAction.LinkAccount.value
|
||||||
|
|
||||||
|
|
||||||
def test_link_account_with_proton_account_different_address(flask_client):
|
def test_link_account_with_proton_account_different_address(flask_client):
|
||||||
@ -229,6 +269,14 @@ def test_link_account_with_proton_account_different_address(flask_client):
|
|||||||
assert partner_user.partner_id == get_proton_partner().id
|
assert partner_user.partner_id == get_proton_partner().id
|
||||||
assert partner_user.external_user_id == partner_user_id
|
assert partner_user.external_user_id == partner_user_id
|
||||||
|
|
||||||
|
audit_logs: List[UserAuditLog] = UserAuditLog.filter_by(
|
||||||
|
user_id=res.user.id,
|
||||||
|
action=UserAuditLogAction.LinkAccount.value,
|
||||||
|
).all()
|
||||||
|
assert len(audit_logs) == 1
|
||||||
|
assert audit_logs[0].user_id == res.user.id
|
||||||
|
assert audit_logs[0].action == UserAuditLogAction.LinkAccount.value
|
||||||
|
|
||||||
|
|
||||||
def test_link_account_with_proton_account_same_address_but_linked_to_other_user(
|
def test_link_account_with_proton_account_same_address_but_linked_to_other_user(
|
||||||
flask_client,
|
flask_client,
|
||||||
@ -248,22 +296,54 @@ def test_link_account_with_proton_account_same_address_but_linked_to_other_user(
|
|||||||
partner_user_id, email=random_email()
|
partner_user_id, email=random_email()
|
||||||
) # User already linked with the proton account
|
) # User already linked with the proton account
|
||||||
|
|
||||||
|
# START Ensure sl_user_2 has a partner_user with the right data
|
||||||
|
partner_user = PartnerUser.get_by(
|
||||||
|
partner_id=get_proton_partner().id, user_id=sl_user_2.id
|
||||||
|
)
|
||||||
|
assert partner_user is not None
|
||||||
|
assert partner_user.partner_id == get_proton_partner().id
|
||||||
|
assert partner_user.external_user_id == partner_user_id
|
||||||
|
assert partner_user.partner_email == sl_user_2.email
|
||||||
|
assert partner_user.user_id == sl_user_2.id
|
||||||
|
# END Ensure sl_user_2 has a partner_user with the right data
|
||||||
|
|
||||||
|
# Proceed to link sl_user_1
|
||||||
res = process_link_case(link_request, sl_user_1, get_proton_partner())
|
res = process_link_case(link_request, sl_user_1, get_proton_partner())
|
||||||
|
|
||||||
|
# Check that the result is linking sl_user_1
|
||||||
assert res.user.id == sl_user_1.id
|
assert res.user.id == sl_user_1.id
|
||||||
assert res.user.email == partner_email
|
assert res.user.email == partner_email
|
||||||
assert res.strategy == "Link"
|
assert res.strategy == "Link"
|
||||||
|
|
||||||
|
# Ensure partner_user for sl_user_1 exists
|
||||||
partner_user = PartnerUser.get_by(
|
partner_user = PartnerUser.get_by(
|
||||||
partner_id=get_proton_partner().id, user_id=sl_user_1.id
|
partner_id=get_proton_partner().id, user_id=sl_user_1.id
|
||||||
)
|
)
|
||||||
assert partner_user.partner_id == get_proton_partner().id
|
assert partner_user.partner_id == get_proton_partner().id
|
||||||
assert partner_user.external_user_id == partner_user_id
|
assert partner_user.external_user_id == partner_user_id
|
||||||
|
|
||||||
|
# Ensure partner_user for sl_user_2 does not exist anymore
|
||||||
partner_user = PartnerUser.get_by(
|
partner_user = PartnerUser.get_by(
|
||||||
partner_id=get_proton_partner().id, user_id=sl_user_2.id
|
partner_id=get_proton_partner().id, user_id=sl_user_2.id
|
||||||
)
|
)
|
||||||
assert partner_user is None
|
assert partner_user is None
|
||||||
|
|
||||||
|
# Ensure audit logs for sl_user_1 show the link action
|
||||||
|
sl_user_1_audit_logs: List[UserAuditLog] = UserAuditLog.filter_by(
|
||||||
|
user_id=sl_user_1.id,
|
||||||
|
action=UserAuditLogAction.LinkAccount.value,
|
||||||
|
).all()
|
||||||
|
assert len(sl_user_1_audit_logs) == 1
|
||||||
|
assert sl_user_1_audit_logs[0].user_id == sl_user_1.id
|
||||||
|
|
||||||
|
# Ensure audit logs for sl_user_2 show the unlink action
|
||||||
|
sl_user_2_audit_logs: List[UserAuditLog] = UserAuditLog.filter_by(
|
||||||
|
user_id=sl_user_2.id,
|
||||||
|
action=UserAuditLogAction.UnlinkAccount.value,
|
||||||
|
).all()
|
||||||
|
assert len(sl_user_2_audit_logs) == 1
|
||||||
|
assert sl_user_2_audit_logs[0].user_id == sl_user_2.id
|
||||||
|
|
||||||
|
|
||||||
def test_link_account_with_proton_account_different_address_and_linked_to_other_user(
|
def test_link_account_with_proton_account_different_address_and_linked_to_other_user(
|
||||||
flask_client,
|
flask_client,
|
||||||
@ -300,6 +380,22 @@ def test_link_account_with_proton_account_different_address_and_linked_to_other_
|
|||||||
)
|
)
|
||||||
assert partner_user_2 is None
|
assert partner_user_2 is None
|
||||||
|
|
||||||
|
# Ensure audit logs for sl_user_1 show the link action
|
||||||
|
sl_user_1_audit_logs: List[UserAuditLog] = UserAuditLog.filter_by(
|
||||||
|
user_id=sl_user_1.id,
|
||||||
|
action=UserAuditLogAction.LinkAccount.value,
|
||||||
|
).all()
|
||||||
|
assert len(sl_user_1_audit_logs) == 1
|
||||||
|
assert sl_user_1_audit_logs[0].user_id == sl_user_1.id
|
||||||
|
|
||||||
|
# Ensure audit logs for sl_user_2 show the unlink action
|
||||||
|
sl_user_2_audit_logs: List[UserAuditLog] = UserAuditLog.filter_by(
|
||||||
|
user_id=sl_user_2.id,
|
||||||
|
action=UserAuditLogAction.UnlinkAccount.value,
|
||||||
|
).all()
|
||||||
|
assert len(sl_user_2_audit_logs) == 1
|
||||||
|
assert sl_user_2_audit_logs[0].user_id == sl_user_2.id
|
||||||
|
|
||||||
|
|
||||||
def test_cannot_create_instance_of_base_strategy():
|
def test_cannot_create_instance_of_base_strategy():
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user