Compare commits

...

37 Commits

Author SHA1 Message Date
f1110506c0 4.39.3 2024-02-27 12:00:07 +00:00
f5bce7d7ff 4.39.2 2024-02-23 12:00:07 +00:00
75f45d9365 4.39.1 2024-02-20 12:00:07 +00:00
ead425e0c2 4.38.3 2024-02-14 12:00:07 +00:00
6c910d62c5 4.38.2 2024-02-06 12:00:07 +00:00
99ffd1ec0c 4.38.0 2024-02-03 16:55:23 +00:00
eda940f8b2 4.37.2 2024-01-27 12:00:07 +00:00
1dad582523 4.37.1 2024-01-25 12:00:08 +00:00
e516266a27 4.37.0 2024-01-18 12:00:07 +00:00
850fc95477 4.36.8 2023-12-28 12:00:07 +00:00
d172825900 4.36.7 2023-12-21 12:00:09 +00:00
026865e5bf 4.36.6 2023-12-17 14:56:57 +00:00
add94ef2a2 4.36.5 2023-11-30 12:00:09 +00:00
1081400948 4.36.4 2023-11-22 12:00:09 +00:00
5776128905 4.36.3 2023-11-08 12:00:06 +00:00
d661860f4c 4.35.6 2023-11-07 12:00:06 +00:00
0a52e32972 4.35.3 2023-10-05 12:00:06 +01:00
703dcbd0eb 4.35.2 2023-10-03 12:00:06 +01:00
ce7ed69547 4.35.1 2023-10-02 12:00:06 +01:00
4f5564df16 4.35.0 2023-09-29 12:00:06 +01:00
2fee569131 4.34.4 2023-08-31 12:00:06 +01:00
7ea45d6f5d 4.34.3 2023-08-29 20:20:00 +01:00
6d24db50bd 4.34.2 2023-08-25 12:00:05 +01:00
88f270c6a1 4.34.1 2023-08-09 12:00:05 +01:00
0962b1cf29 Update .drone.yml 2023-08-06 17:56:31 +00:00
6051d72691 4.33.3 2023-08-06 17:51:04 +01:00
c31a75a9ef Update README.md 2023-08-06 16:04:57 +00:00
ef289385ff Update README.md 2023-08-06 16:04:47 +00:00
9b12a2ad33 Update README.md 2023-08-06 16:04:41 +00:00
8eb19d88f3 Remove provenance [CI SKIP] 2023-08-06 16:01:04 +00:00
e36e9d3077 4.32.4 2023-08-02 16:49:54 +01:00
b2430cbc5b 4.32.1 2023-07-12 11:00:04 +00:00
1258115397 4.32.0 2023-07-11 11:00:05 +00:00
38c134d903 4.31.0 2023-06-30 11:00:06 +00:00
cd77e4cc2d 4.30.1 2023-06-28 11:00:03 +00:00
87aedf3207 4.30.0 2023-06-27 11:00:04 +00:00
3523c9fc15 4.29.4 2023-06-07 11:00:05 +00:00
158 changed files with 3141 additions and 371456 deletions

View File

@ -17,6 +17,7 @@ steps:
image: thegeeklab/drone-docker-buildx
privileged: true
settings:
provenance: false
dockerfile: app/Dockerfile
context: app
registry: git.mrmeeb.stream
@ -35,6 +36,7 @@ steps:
status:
- success
- failure
- killed
settings:
webhook:
from_secret: slack_webhook

View File

@ -1,9 +1,7 @@
# Simple Login
# SimpleLogin
[![Build Status](https://drone.mrmeeb.stream/api/badges/MrMeeb/simple-login/status.svg?ref=refs/heads/main)](https://drone.mrmeeb.stream/MrMeeb/simple-login)
This repo exists to automatically capture any releases of the SaaS edition of SimpleLogin. It checks the simplelogin/app GitHub repo once a day, and builds the latest release automatically if it is newer than the currently built version.
This repo exists to automatically capture any releases of the SaaS edition of SimpleLogin. It checks once a day, and builds the latest one automatically if it is newer than the currentlty built version.
I did this to simplify deployment of my self-hosted SimpleLogin instance. SimpleLogin do not provide an up-to-date version for self-hosting, leaving you with the options of either running a very outdated version with no app support, a beta version, or their `simplelogin/app-ci` version. This last option works well if you use an x86 machine, but I'm running SimpleLogin on an ARM machine. Since I don't want to have to build containers on the machine itself, this repo handles that for me.
This exists to simplify deployment of SimpleLogin in a self-hosted capacity, while also allowing the use of the latest version; SimpleLogin do not provide an up-to-date version for this use.
The image is built for amd64 and arm64 devices.
As a result, this image is built for both amd64 and arm64 devices.

View File

@ -15,9 +15,15 @@ jobs:
- uses: actions/setup-python@v4
with:
python-version: '3.9'
python-version: '3.10'
cache: 'poetry'
- name: Install OS dependencies
if: ${{ matrix.python-version }} == '3.10'
run: |
sudo apt update
sudo apt install -y libre2-dev libpq-dev
- name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
run: poetry install --no-interaction

View File

@ -7,18 +7,19 @@ repos:
hooks:
- id: check-yaml
- id: trailing-whitespace
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 3.9.2
hooks:
- id: flake8
- repo: https://github.com/Riverside-Healthcare/djLint
rev: v1.3.0
hooks:
- id: djlint-jinja
files: '.*\.html'
entry: djlint --reformat
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.5
hooks:
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format

View File

@ -169,6 +169,12 @@ For HTML templates, we use `djlint`. Before creating a pull request, please run
poetry run djlint --check templates
```
If some files aren't properly formatted, you can format all files with
```bash
poetry run djlint --reformat .
```
## Test sending email
[swaks](http://www.jetmore.org/john/code/swaks/) is used for sending test emails to the `email_handler`.

View File

@ -23,15 +23,15 @@ COPY poetry.lock pyproject.toml ./
# Install and setup poetry
RUN pip install -U pip \
&& apt-get update \
&& apt install -y curl netcat gcc python3-dev gnupg git libre2-dev \
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
&& curl -sSL https://install.python-poetry.org | python3 - \
# Remove curl and netcat from the image
&& apt-get purge -y curl netcat \
&& apt-get purge -y curl netcat-traditional \
# Run poetry
&& poetry config virtualenvs.create false \
&& poetry install --no-interaction --no-ansi --no-root \
# Clear apt cache \
&& apt-get purge -y libre2-dev \
&& apt-get purge -y libre2-dev cmake ninja-build\
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

View File

@ -74,7 +74,7 @@ Setting up DKIM is highly recommended to reduce the chance your emails ending up
First you need to generate a private and public key for DKIM:
```bash
openssl genrsa -out dkim.key 1024
openssl genrsa -out dkim.key -traditional 1024
openssl rsa -in dkim.key -pubout -out dkim.pub.key
```
@ -510,11 +510,14 @@ server {
server_name app.mydomain.com;
location / {
proxy_pass http://localhost:7777;
proxy_pass http://localhost:7777;
proxy_set_header Host $host;
}
}
```
Note: If `/etc/nginx/sites-enabled/default` exists, delete it or certbot will fail due to the conflict. The `simplelogin` file should be the only file in `sites-enabled`.
Reload Nginx with the command below
```bash

View File

@ -5,13 +5,15 @@ from typing import Optional
from arrow import Arrow
from newrelic import agent
from sqlalchemy import or_
from app.db import Session
from app.email_utils import send_welcome_email
from app.utils import sanitize_email
from app.utils import sanitize_email, canonicalize_email
from app.errors import (
AccountAlreadyLinkedToAnotherPartnerException,
AccountIsUsingAliasAsEmail,
AccountAlreadyLinkedToAnotherUserException,
)
from app.log import LOG
from app.models import (
@ -130,8 +132,9 @@ class ClientMergeStrategy(ABC):
class NewUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult:
# Will create a new SL User with a random password
canonical_email = canonicalize_email(self.link_request.email)
new_user = User.create(
email=self.link_request.email,
email=canonical_email,
name=self.link_request.name,
password=random_string(20),
activated=True,
@ -165,7 +168,8 @@ class NewUserStrategy(ClientMergeStrategy):
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult:
# IF it was scheduled to be deleted. Unschedule it.
self.user.delete_on = None
partner_user = ensure_partner_user_exists_for_user(
self.link_request, self.user, self.partner
)
@ -179,7 +183,7 @@ class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
class LinkedWithAnotherPartnerUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult:
raise AccountAlreadyLinkedToAnotherPartnerException()
raise AccountAlreadyLinkedToAnotherUserException()
def get_login_strategy(
@ -212,11 +216,21 @@ def process_login_case(
partner_id=partner.id, external_user_id=link_request.external_user_id
)
if partner_user is None:
canonical_email = canonicalize_email(link_request.email)
# We didn't find any SimpleLogin user registered with that partner user id
# Make sure they aren't using an alias as their link email
check_alias(link_request.email)
check_alias(canonical_email)
# Try to find it using the partner's e-mail address
user = User.get_by(email=link_request.email)
users = User.filter(
or_(User.email == link_request.email, User.email == canonical_email)
).all()
if len(users) > 1:
user = [user for user in users if user.email == canonical_email][0]
elif len(users) == 1:
user = users[0]
else:
user = None
return get_login_strategy(link_request, user, partner).process()
else:
# We found the SL user registered with that partner user id
@ -234,6 +248,8 @@ def link_user(
) -> LinkResult:
# Sanitize email just in case
link_request.email = sanitize_email(link_request.email)
# If it was scheduled to be deleted. Unschedule it.
current_user.delete_on = None
partner_user = ensure_partner_user_exists_for_user(
link_request, current_user, partner
)

View File

@ -214,6 +214,20 @@ class UserAdmin(SLModelView):
Session.commit()
@action(
"remove trial",
"Stop trial period",
"Remove trial for this user?",
)
def stop_trial(self, ids):
for user in User.filter(User.id.in_(ids)):
user.trial_end = None
flash(f"Stopped trial for {user}", "success")
AdminAuditLog.stop_trial(current_user.id, user.id)
Session.commit()
@action(
"disable_otp_fido",
"Disable OTP & FIDO",
@ -256,6 +270,17 @@ class UserAdmin(SLModelView):
Session.commit()
@action(
"clear_delete_on",
"Remove scheduled deletion of user",
"This will remove the scheduled deletion for this users",
)
def clean_delete_on(self, ids):
for user in User.filter(User.id.in_(ids)):
user.delete_on = None
Session.commit()
# @action(
# "login_as",
# "Login as this user",
@ -600,6 +625,26 @@ class NewsletterAdmin(SLModelView):
else:
flash(error_msg, "error")
@action(
"clone_newsletter",
"Clone this newsletter",
)
def clone_newsletter(self, newsletter_ids):
if len(newsletter_ids) != 1:
flash("you can only select 1 newsletter", "error")
return
newsletter_id = newsletter_ids[0]
newsletter: Newsletter = Newsletter.get(newsletter_id)
new_newsletter = Newsletter.create(
subject=newsletter.subject,
html=newsletter.html,
plain_text=newsletter.plain_text,
commit=True,
)
flash(f"Newsletter {new_newsletter.subject} has been cloned", "success")
class NewsletterUserAdmin(SLModelView):
column_searchable_list = ["id"]

View File

@ -70,7 +70,6 @@ def verify_prefix_suffix(
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
and not config.DISABLE_ALIAS_SUFFIX
):
if not alias_domain_prefix.startswith("."):
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
return False
@ -162,8 +161,6 @@ def get_alias_suffixes(
or user.default_alias_public_domain_id != sl_domain.id
):
alias_suffixes.append(alias_suffix)
# If no default domain mark it as found
default_domain_found = user.default_alias_public_domain_id is None
else:
default_domain_found = True
alias_suffixes.insert(0, alias_suffix)

View File

@ -21,6 +21,8 @@ from app.email_utils import (
send_cannot_create_directory_alias_disabled,
get_email_local_part,
send_cannot_create_domain_alias,
send_email,
render,
)
from app.errors import AliasInTrashError
from app.log import LOG
@ -36,6 +38,8 @@ from app.models import (
EmailLog,
Contact,
AutoCreateRule,
AliasUsedOn,
ClientUser,
)
from app.regex_utils import regex_match
@ -57,6 +61,8 @@ def get_user_if_alias_would_auto_create(
domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain(
address, notify_user=notify_user
)
if DomainDeletedAlias.get_by(email=address):
return None
if domain_and_rule:
return domain_and_rule[0].user
directory = check_if_alias_can_be_auto_created_for_a_directory(
@ -397,3 +403,58 @@ def alias_export_csv(user, csv_direct_export=False):
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
output.headers["Content-type"] = "text/csv"
return output
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
# cannot transfer alias which is used for receiving newsletter
if User.get_by(newsletter_alias_id=alias.id):
raise Exception("Cannot transfer alias that's used to receive newsletter")
# update user_id
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
{"user_id": new_user.id}
)
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
{"user_id": new_user.id}
)
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
{"user_id": new_user.id}
)
# remove existing mailboxes from the alias
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
# set mailboxes
alias.mailbox_id = new_mailboxes.pop().id
for mb in new_mailboxes:
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
# alias has never been transferred before
if not alias.original_owner_id:
alias.original_owner_id = alias.user_id
# inform previous owner
old_user = alias.user
send_email(
old_user.email,
f"Alias {alias.email} has been received",
render(
"transactional/alias-transferred.txt",
alias=alias,
),
render(
"transactional/alias-transferred.html",
alias=alias,
),
)
# now the alias belongs to the new user
alias.user_id = new_user.id
# set some fields back to default
alias.disable_pgp = False
alias.pinned = False
Session.commit()

View File

@ -16,3 +16,22 @@ from .views import (
sudo,
user,
)
__all__ = [
"alias_options",
"new_custom_alias",
"custom_domain",
"new_random_alias",
"user_info",
"auth",
"auth_mfa",
"alias",
"apple",
"mailbox",
"notification",
"setting",
"export",
"phone",
"sudo",
"user",
]

View File

@ -33,6 +33,9 @@ def authorize_request() -> Optional[Tuple[str, int]]:
if g.user.disabled:
return jsonify(error="Disabled account"), 403
if not g.user.is_active():
return jsonify(error="Account does not exist"), 401
g.api_key = api_key
return None

View File

@ -201,10 +201,10 @@ def get_alias_infos_with_pagination_v3(
q = q.order_by(Alias.pinned.desc())
q = q.order_by(latest_activity.desc())
q = list(q.limit(page_limit).offset(page_id * page_size))
q = q.limit(page_limit).offset(page_id * page_size)
ret = []
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in q:
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in list(q):
ret.append(
AliasInfo(
alias=alias,
@ -358,7 +358,6 @@ def construct_alias_query(user: User):
else_=0,
)
).label("nb_forward"),
func.max(EmailLog.created_at).label("latest_email_log_created_at"),
)
.join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True)
.filter(Alias.user_id == user.id)
@ -366,14 +365,6 @@ def construct_alias_query(user: User):
.subquery()
)
alias_contact_subquery = (
Session.query(Alias.id, func.max(Contact.id).label("max_contact_id"))
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
.filter(Alias.user_id == user.id)
.group_by(Alias.id)
.subquery()
)
return (
Session.query(
Alias,
@ -385,23 +376,7 @@ def construct_alias_query(user: User):
)
.options(joinedload(Alias.hibp_breaches))
.options(joinedload(Alias.custom_domain))
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
.join(EmailLog, Contact.id == EmailLog.contact_id, isouter=True)
.join(EmailLog, Alias.last_email_log_id == EmailLog.id, isouter=True)
.join(Contact, EmailLog.contact_id == Contact.id, isouter=True)
.filter(Alias.id == alias_activity_subquery.c.id)
.filter(Alias.id == alias_contact_subquery.c.id)
.filter(
or_(
EmailLog.created_at
== alias_activity_subquery.c.latest_email_log_created_at,
and_(
# no email log yet for this alias
alias_activity_subquery.c.latest_email_log_created_at.is_(None),
# to make sure only 1 contact is returned in this case
or_(
Contact.id == alias_contact_subquery.c.max_contact_id,
alias_contact_subquery.c.max_contact_id.is_(None),
),
),
)
)
)

View File

@ -24,12 +24,14 @@ from app.errors import (
ErrContactAlreadyExists,
ErrAddressInvalid,
)
from app.extensions import limiter
from app.models import Alias, Contact, Mailbox, AliasMailbox
@deprecated
@api_bp.route("/aliases", methods=["GET", "POST"])
@require_api_auth
@limiter.limit("10/minute", key_func=lambda: g.user.id)
def get_aliases():
"""
Get aliases
@ -72,6 +74,7 @@ def get_aliases():
@api_bp.route("/v2/aliases", methods=["GET", "POST"])
@require_api_auth
@limiter.limit("50/minute", key_func=lambda: g.user.id)
def get_aliases_v2():
"""
Get aliases

View File

@ -17,9 +17,14 @@ from app.models import PlanEnum, AppleSubscription
_MONTHLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.monthly"
_YEARLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.yearly"
# SL Mac app used to be in SL account
_MACAPP_MONTHLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.monthly"
_MACAPP_YEARLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.yearly"
# SL Mac app is moved to Proton account
_MACAPP_MONTHLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.monthly"
_MACAPP_YEARLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.yearly"
# Apple API URL
_SANDBOX_URL = "https://sandbox.itunes.apple.com/verifyReceipt"
_PROD_URL = "https://buy.itunes.apple.com/verifyReceipt"
@ -263,7 +268,11 @@ def apple_update_notification():
plan = (
PlanEnum.monthly
if transaction["product_id"]
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID)
in (
_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
)
else PlanEnum.yearly
)
@ -517,7 +526,11 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
plan = (
PlanEnum.monthly
if latest_transaction["product_id"]
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID)
in (
_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
)
else PlanEnum.yearly
)

View File

@ -63,6 +63,11 @@ def auth_login():
elif user.disabled:
LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send()
return jsonify(error="Account disabled"), 400
elif user.delete_on is not None:
LoginEvent(
LoginEvent.ActionType.scheduled_to_be_deleted, LoginEvent.Source.api
).send()
return jsonify(error="Account scheduled for deletion"), 400
elif not user.activated:
LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send()
return jsonify(error="Account not activated"), 422

View File

@ -13,8 +13,8 @@ from app.db import Session
from app.email_utils import (
mailbox_already_used,
email_can_be_used_as_mailbox,
is_valid_email,
)
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Mailbox, Job
from app.utils import sanitize_email
@ -45,7 +45,7 @@ def create_mailbox():
mailbox_email = sanitize_email(request.get_json().get("email"))
if not user.is_premium():
return jsonify(error=f"Only premium plan can add additional mailbox"), 400
return jsonify(error="Only premium plan can add additional mailbox"), 400
if not is_valid_email(mailbox_email):
return jsonify(error=f"{mailbox_email} invalid"), 400

View File

@ -150,7 +150,7 @@ def new_custom_alias_v3():
if not data:
return jsonify(error="request body cannot be empty"), 400
if type(data) is not dict:
if not isinstance(data, dict):
return jsonify(error="request body does not follow the required format"), 400
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
@ -168,7 +168,7 @@ def new_custom_alias_v3():
return jsonify(error="alias prefix invalid format or too long"), 400
# check if mailbox is not tempered with
if type(mailbox_ids) is not list:
if not isinstance(mailbox_ids, list):
return jsonify(error="mailbox_ids must be an array of id"), 400
mailboxes = []
for mailbox_id in mailbox_ids:

View File

@ -32,6 +32,7 @@ def user_to_dict(user: User) -> dict:
"in_trial": user.in_trial(),
"max_alias_free_plan": user.max_alias_for_free_account(),
"connected_proton_address": None,
"can_create_reverse_alias": user.can_create_contacts(),
}
if config.CONNECT_WITH_PROTON:
@ -58,6 +59,7 @@ def user_info():
- in_trial
- max_alias_free
- is_connected_with_proton
- can_create_reverse_alias
"""
user = g.user

View File

@ -17,3 +17,23 @@ from .views import (
recovery,
api_to_cookie,
)
__all__ = [
"login",
"logout",
"register",
"activate",
"resend_activation",
"reset_password",
"forgot_password",
"github",
"google",
"facebook",
"proton",
"change_email",
"mfa",
"fido",
"social",
"recovery",
"api_to_cookie",
]

View File

@ -62,7 +62,7 @@ def fido():
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
if browser and not browser.is_expired() and browser.user_id == user.id:
login_user(user)
flash(f"Welcome back!", "success")
flash("Welcome back!", "success")
# Redirect user to correct page
return redirect(next_url or url_for("dashboard.index"))
else:
@ -110,7 +110,7 @@ def fido():
session["sudo_time"] = int(time())
login_user(user)
flash(f"Welcome back!", "success")
flash("Welcome back!", "success")
# Redirect user to correct page
response = make_response(redirect(next_url or url_for("dashboard.index")))

View File

@ -7,7 +7,7 @@ from app.config import URL, GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET
from app.db import Session
from app.log import LOG
from app.models import User, File, SocialAuth
from app.utils import random_string, sanitize_email
from app.utils import random_string, sanitize_email, sanitize_next_url
from .login_utils import after_login
_authorization_base_url = "https://accounts.google.com/o/oauth2/v2/auth"
@ -29,7 +29,7 @@ def google_login():
# to avoid flask-login displaying the login error message
session.pop("_flashes", None)
next_url = request.args.get("next")
next_url = sanitize_next_url(request.args.get("next"))
# Google does not allow to append param to redirect_url
# we need to pass the next url by session

View File

@ -54,6 +54,12 @@ def login():
"error",
)
LoginEvent(LoginEvent.ActionType.disabled_login).send()
elif user.delete_on is not None:
flash(
f"Your account is scheduled to be deleted on {user.delete_on}",
"error",
)
LoginEvent(LoginEvent.ActionType.scheduled_to_be_deleted).send()
elif not user.activated:
show_resend_activation = True
flash(

View File

@ -55,7 +55,7 @@ def mfa():
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
if browser and not browser.is_expired() and browser.user_id == user.id:
login_user(user)
flash(f"Welcome back!", "success")
flash("Welcome back!", "success")
# Redirect user to correct page
return redirect(next_url or url_for("dashboard.index"))
else:
@ -73,7 +73,7 @@ def mfa():
Session.commit()
login_user(user)
flash(f"Welcome back!", "success")
flash("Welcome back!", "success")
# Redirect user to correct page
response = make_response(redirect(next_url or url_for("dashboard.index")))

View File

@ -53,7 +53,7 @@ def recovery_route():
del session[MFA_USER_ID]
login_user(user)
flash(f"Welcome back!", "success")
flash("Welcome back!", "success")
recovery_code.used = True
recovery_code.used_at = arrow.now()

View File

@ -94,9 +94,7 @@ def register():
try:
send_activation_email(user, next_url)
RegisterEvent(RegisterEvent.ActionType.success).send()
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += (
1
)
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += 1
Session.commit()
except Exception:
flash("Invalid email, are you sure the email is correct?", "error")

View File

@ -179,6 +179,7 @@ AWS_REGION = os.environ.get("AWS_REGION") or "eu-west-3"
BUCKET = os.environ.get("BUCKET")
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
AWS_ENDPOINT_URL = os.environ.get("AWS_ENDPOINT_URL", None)
# Paddle
try:
@ -488,7 +489,34 @@ def setup_nameservers():
NAMESERVERS = setup_nameservers()
DISABLE_CREATE_CONTACTS_FOR_FREE_USERS = False
DISABLE_CREATE_CONTACTS_FOR_FREE_USERS = os.environ.get(
"DISABLE_CREATE_CONTACTS_FOR_FREE_USERS", False
)
# Expect format hits,seconds:hits,seconds...
# Example 1,10:4,60 means 1 in the last 10 secs or 4 in the last 60 secs
def getRateLimitFromConfig(
env_var: string, default: string = ""
) -> list[tuple[int, int]]:
value = os.environ.get(env_var, default)
if not value:
return []
entries = [entry for entry in value.split(":")]
limits = []
for entry in entries:
fields = entry.split(",")
limit = (int(fields[0]), int(fields[1]))
limits.append(limit)
return limits
ALIAS_CREATE_RATE_LIMIT_FREE = getRateLimitFromConfig(
"ALIAS_CREATE_RATE_LIMIT_FREE", "10,900:50,3600"
)
ALIAS_CREATE_RATE_LIMIT_PAID = getRateLimitFromConfig(
"ALIAS_CREATE_RATE_LIMIT_PAID", "50,900:200,3600"
)
PARTNER_API_TOKEN_SECRET = os.environ.get("PARTNER_API_TOKEN_SECRET") or (
FLASK_SECRET + "partnerapitoken"
)
@ -534,3 +562,8 @@ SKIP_MX_LOOKUP_ON_CHECK = False
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)

View File

@ -33,3 +33,39 @@ from .views import (
notification,
support,
)
__all__ = [
"index",
"pricing",
"setting",
"custom_alias",
"subdomain",
"billing",
"alias_log",
"alias_export",
"unsubscribe",
"api_key",
"custom_domain",
"alias_contact_manager",
"enter_sudo",
"mfa_setup",
"mfa_cancel",
"fido_setup",
"coupon",
"fido_manage",
"domain_detail",
"lifetime_licence",
"directory",
"mailbox",
"mailbox_detail",
"refused_email",
"referral",
"contact_detail",
"setup_done",
"batch_import",
"alias_transfer",
"app",
"delete_account",
"notification",
"support",
]

View File

@ -13,10 +13,10 @@ from app import config, parallel_limiter
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import (
is_valid_email,
generate_reply_email,
parse_full_address,
)
from app.email_validation import is_valid_email
from app.errors import (
CannotCreateContactForReverseAlias,
ErrContactErrorUpgradeNeeded,
@ -51,14 +51,6 @@ def email_validator():
return _check
def user_can_create_contacts(user: User) -> bool:
if user.is_premium():
return True
if user.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
return True
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
"""
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
@ -82,7 +74,7 @@ def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
if contact:
raise ErrContactAlreadyExists(contact)
if not user_can_create_contacts(user):
if not user.can_create_contacts():
raise ErrContactErrorUpgradeNeeded()
contact = Contact.create(
@ -327,6 +319,6 @@ def alias_contact_manager(alias_id):
last_page=last_page,
query=query,
nb_contact=nb_contact,
can_create_contacts=user_can_create_contacts(current_user),
can_create_contacts=current_user.can_create_contacts(),
csrf_form=csrf_form,
)

View File

@ -87,6 +87,6 @@ def get_alias_log(alias: Alias, page_id=0) -> [AliasLog]:
contact=contact,
)
logs.append(al)
logs = sorted(logs, key=lambda l: l.when, reverse=True)
logs = sorted(logs, key=lambda log: log.when, reverse=True)
return logs

View File

@ -7,79 +7,19 @@ from flask import render_template, redirect, url_for, flash, request
from flask_login import login_required, current_user
from app import config
from app.alias_utils import transfer_alias
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.email_utils import send_email, render
from app.extensions import limiter
from app.log import LOG
from app.models import (
Alias,
Contact,
AliasUsedOn,
AliasMailbox,
User,
ClientUser,
)
from app.models import Mailbox
from app.utils import CSRFValidationForm
def transfer(alias, new_user, new_mailboxes: [Mailbox]):
# cannot transfer alias which is used for receiving newsletter
if User.get_by(newsletter_alias_id=alias.id):
raise Exception("Cannot transfer alias that's used to receive newsletter")
# update user_id
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
{"user_id": new_user.id}
)
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
{"user_id": new_user.id}
)
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
{"user_id": new_user.id}
)
# remove existing mailboxes from the alias
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
# set mailboxes
alias.mailbox_id = new_mailboxes.pop().id
for mb in new_mailboxes:
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
# alias has never been transferred before
if not alias.original_owner_id:
alias.original_owner_id = alias.user_id
# inform previous owner
old_user = alias.user
send_email(
old_user.email,
f"Alias {alias.email} has been received",
render(
"transactional/alias-transferred.txt",
alias=alias,
),
render(
"transactional/alias-transferred.html",
alias=alias,
),
)
# now the alias belongs to the new user
alias.user_id = new_user.id
# set some fields back to default
alias.disable_pgp = False
alias.pinned = False
Session.commit()
def hmac_alias_transfer_token(transfer_token: str) -> str:
alias_hmac = hmac.new(
config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"),
@ -214,7 +154,7 @@ def alias_transfer_receive_route():
mailboxes,
token,
)
transfer(alias, current_user, mailboxes)
transfer_alias(alias, current_user, mailboxes)
# reset transfer token
alias.transfer_token = None

View File

@ -3,9 +3,11 @@ from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app import config
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.extensions import limiter
from app.models import ApiKey
from app.utils import CSRFValidationForm
@ -14,9 +16,34 @@ class NewApiKeyForm(FlaskForm):
name = StringField("Name", validators=[validators.DataRequired()])
def clean_up_unused_or_old_api_keys(user_id: int):
total_keys = ApiKey.filter_by(user_id=user_id).count()
if total_keys <= config.MAX_API_KEYS:
return
# Remove oldest unused
for api_key in (
ApiKey.filter_by(user_id=user_id, last_used=None)
.order_by(ApiKey.created_at.asc())
.all()
):
Session.delete(api_key)
total_keys -= 1
if total_keys <= config.MAX_API_KEYS:
return
# Clean up oldest used
for api_key in (
ApiKey.filter_by(user_id=user_id).order_by(ApiKey.last_used.asc()).all()
):
Session.delete(api_key)
total_keys -= 1
if total_keys <= config.MAX_API_KEYS:
return
@dashboard_bp.route("/api_key", methods=["GET", "POST"])
@login_required
@sudo_required
@limiter.limit("10/hour")
def api_key():
api_keys = (
ApiKey.filter(ApiKey.user_id == current_user.id)
@ -50,6 +77,7 @@ def api_key():
elif request.form.get("form-name") == "create":
if new_api_key_form.validate():
clean_up_unused_or_old_api_keys(current_user.id)
new_api_key = ApiKey.create(
name=new_api_key_form.name.data, user_id=current_user.id
)

View File

@ -1,14 +1,9 @@
from app.db import Session
"""
List of apps that user has used via the "Sign in with SimpleLogin"
"""
from flask import render_template, request, flash, redirect
from flask_login import login_required, current_user
from sqlalchemy.orm import joinedload
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.models import (
ClientUser,
)
@ -17,6 +12,10 @@ from app.models import (
@dashboard_bp.route("/app", methods=["GET", "POST"])
@login_required
def app_route():
"""
List of apps that user has used via the "Sign in with SimpleLogin"
"""
client_users = (
ClientUser.filter_by(user_id=current_user.id)
.options(joinedload(ClientUser.client))

View File

@ -100,7 +100,7 @@ def coupon_route():
commit=True,
)
flash(
f"Your account has been upgraded to Premium, thanks for your support!",
"Your account has been upgraded to Premium, thanks for your support!",
"success",
)

View File

@ -24,6 +24,7 @@ from app.models import (
AliasMailbox,
DomainDeletedAlias,
)
from app.utils import CSRFValidationForm
@dashboard_bp.route("/custom_alias", methods=["GET", "POST"])
@ -48,9 +49,13 @@ def custom_alias():
at_least_a_premium_domain = True
break
csrf_form = CSRFValidationForm()
mailboxes = current_user.mailboxes()
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
alias_prefix = request.form.get("prefix").strip().lower().replace(" ", "")
signed_alias_suffix = request.form.get("signed-alias-suffix")
mailbox_ids = request.form.getlist("mailboxes")
@ -164,4 +169,5 @@ def custom_alias():
alias_suffixes=alias_suffixes,
at_least_a_premium_domain=at_least_a_premium_domain,
mailboxes=mailboxes,
csrf_form=csrf_form,
)

View File

@ -67,7 +67,7 @@ def directory():
if request.method == "POST":
if request.form.get("form-name") == "delete":
if not delete_dir_form.validate():
flash(f"Invalid request", "warning")
flash("Invalid request", "warning")
return redirect(url_for("dashboard.directory"))
dir_obj = Directory.get(delete_dir_form.directory_id.data)
@ -87,7 +87,7 @@ def directory():
if request.form.get("form-name") == "toggle-directory":
if not toggle_dir_form.validate():
flash(f"Invalid request", "warning")
flash("Invalid request", "warning")
return redirect(url_for("dashboard.directory"))
dir_id = toggle_dir_form.directory_id.data
dir_obj = Directory.get(dir_id)
@ -109,7 +109,7 @@ def directory():
elif request.form.get("form-name") == "update":
if not update_dir_form.validate():
flash(f"Invalid request", "warning")
flash("Invalid request", "warning")
return redirect(url_for("dashboard.directory"))
dir_id = update_dir_form.directory_id.data
dir_obj = Directory.get(dir_id)

View File

@ -8,6 +8,7 @@ from wtforms import PasswordField, validators
from app.config import CONNECT_WITH_PROTON
from app.dashboard.base import dashboard_bp
from app.extensions import limiter
from app.log import LOG
from app.models import PartnerUser
from app.proton.utils import get_proton_partner
@ -21,6 +22,7 @@ class LoginForm(FlaskForm):
@dashboard_bp.route("/enter_sudo", methods=["GET", "POST"])
@limiter.limit("3/minute")
@login_required
def enter_sudo():
password_check_form = LoginForm()

View File

@ -52,12 +52,13 @@ def get_stats(user: User) -> Stats:
@dashboard_bp.route("/", methods=["GET", "POST"])
@login_required
@limiter.limit(
ALIAS_LIMIT,
methods=["POST"],
exempt_when=lambda: request.form.get("form-name") != "create-random-email",
)
@login_required
@limiter.limit("10/minute", methods=["GET"], key_func=lambda: current_user.id)
@parallel_limiter.lock(
name="alias_creation",
only_when=lambda: request.form.get("form-name") == "create-random-email",

View File

@ -1,3 +1,7 @@
import base64
import binascii
import json
import arrow
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
@ -15,8 +19,8 @@ from app.email_utils import (
mailbox_already_used,
render,
send_email,
is_valid_email,
)
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Mailbox, Job
from app.utils import CSRFValidationForm
@ -180,7 +184,9 @@ def mailbox_route():
def send_verification_email(user, mailbox):
s = TimestampSigner(MAILBOX_SECRET)
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
encoded_data = json.dumps([mailbox.id, mailbox.email]).encode("utf-8")
b64_data = base64.urlsafe_b64encode(encoded_data)
mailbox_id_signed = s.sign(b64_data).decode()
verification_url = (
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
)
@ -205,22 +211,34 @@ def send_verification_email(user, mailbox):
@dashboard_bp.route("/mailbox_verify")
def mailbox_verify():
s = TimestampSigner(MAILBOX_SECRET)
mailbox_id = request.args.get("mailbox_id")
mailbox_verify_request = request.args.get("mailbox_id")
try:
r_id = int(s.unsign(mailbox_id, max_age=900))
mailbox_raw_data = s.unsign(mailbox_verify_request, max_age=900)
except Exception:
flash("Invalid link. Please delete and re-add your mailbox", "error")
return redirect(url_for("dashboard.mailbox_route"))
else:
mailbox = Mailbox.get(r_id)
if not mailbox:
flash("Invalid link", "error")
return redirect(url_for("dashboard.mailbox_route"))
try:
decoded_data = base64.urlsafe_b64decode(mailbox_raw_data)
except binascii.Error:
flash("Invalid link. Please delete and re-add your mailbox", "error")
return redirect(url_for("dashboard.mailbox_route"))
mailbox_data = json.loads(decoded_data)
if not isinstance(mailbox_data, list) or len(mailbox_data) != 2:
flash("Invalid link. Please delete and re-add your mailbox", "error")
return redirect(url_for("dashboard.mailbox_route"))
mailbox_id = mailbox_data[0]
mailbox = Mailbox.get(mailbox_id)
if not mailbox:
flash("Invalid link", "error")
return redirect(url_for("dashboard.mailbox_route"))
mailbox_email = mailbox_data[1]
if mailbox_email != mailbox.email:
flash("Invalid link", "error")
return redirect(url_for("dashboard.mailbox_route"))
mailbox.verified = True
Session.commit()
mailbox.verified = True
Session.commit()
LOG.d("Mailbox %s is verified", mailbox)
LOG.d("Mailbox %s is verified", mailbox)
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)

View File

@ -30,7 +30,7 @@ class ChangeEmailForm(FlaskForm):
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
@login_required
def mailbox_detail_route(mailbox_id):
mailbox = Mailbox.get(mailbox_id)
mailbox: Mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != current_user.id:
flash("You cannot see this page", "warning")
return redirect(url_for("dashboard.index"))
@ -144,6 +144,15 @@ def mailbox_detail_route(mailbox_id):
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
if mailbox.is_proton():
flash(
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
"info",
)
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
mailbox.pgp_public_key = request.form.get("pgp")
try:
mailbox.pgp_finger_print = load_public_key_and_check(
@ -182,25 +191,16 @@ def mailbox_detail_route(mailbox_id):
)
elif request.form.get("form-name") == "generic-subject":
if request.form.get("action") == "save":
if not mailbox.pgp_enabled():
flash(
"Generic subject can only be used on PGP-enabled mailbox",
"error",
)
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
mailbox.generic_subject = request.form.get("generic-subject")
Session.commit()
flash("Generic subject for PGP-encrypted email is enabled", "success")
flash("Generic subject is enabled", "success")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)
elif request.form.get("action") == "remove":
mailbox.generic_subject = None
Session.commit()
flash("Generic subject for PGP-encrypted email is disabled", "success")
flash("Generic subject is disabled", "success")
return redirect(
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
)

View File

@ -128,7 +128,6 @@ def setting():
new_email_valid = True
new_email = canonicalize_email(change_email_form.email.data)
if new_email != current_user.email and not pending_email:
# check if this email is not already used
if personal_email_already_used(new_email) or Alias.get_by(
email=new_email

View File

@ -75,12 +75,11 @@ def block_contact(contact_id):
@dashboard_bp.route("/unsubscribe/encoded/<encoded_request>", methods=["GET"])
@login_required
def encoded_unsubscribe(encoded_request: str):
unsub_data = UnsubscribeHandler().handle_unsubscribe_from_request(
current_user, encoded_request
)
if not unsub_data:
flash(f"Invalid unsubscribe request", "error")
flash("Invalid unsubscribe request", "error")
return redirect(url_for("dashboard.index"))
if unsub_data.action == UnsubscribeAction.DisableAlias:
alias = Alias.get(unsub_data.data)
@ -97,14 +96,14 @@ def encoded_unsubscribe(encoded_request: str):
)
)
if unsub_data.action == UnsubscribeAction.UnsubscribeNewsletter:
flash(f"You've unsubscribed from the newsletter", "success")
flash("You've unsubscribed from the newsletter", "success")
return redirect(
url_for(
"dashboard.index",
)
)
if unsub_data.action == UnsubscribeAction.OriginalUnsubscribeMailto:
flash(f"The original unsubscribe request has been forwarded", "success")
flash("The original unsubscribe request has been forwarded", "success")
return redirect(
url_for(
"dashboard.index",

View File

@ -1 +1,3 @@
from .views import index, new_client, client_detail
__all__ = ["index", "new_client", "client_detail"]

View File

@ -87,7 +87,7 @@ def client_detail(client_id):
)
flash(
f"Thanks for submitting, we are informed and will come back to you asap!",
"Thanks for submitting, we are informed and will come back to you asap!",
"success",
)

View File

@ -1 +1,3 @@
from .views import index
__all__ = ["index"]

View File

@ -34,7 +34,7 @@ def get_cname_record(hostname) -> Optional[str]:
def get_mx_domains(hostname) -> [(int, str)]:
"""return list of (priority, domain name).
"""return list of (priority, domain name) sorted by priority (lowest priority first)
domain name ends with a "." at the end.
"""
try:
@ -50,7 +50,7 @@ def get_mx_domains(hostname) -> [(int, str)]:
ret.append((int(parts[0]), parts[1]))
return ret
return sorted(ret, key=lambda prio_domain: prio_domain[0])
_include_spf = "include:"

View File

@ -20,6 +20,7 @@ X_SPAM_STATUS = "X-Spam-Status"
LIST_UNSUBSCRIBE = "List-Unsubscribe"
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
RETURN_PATH = "Return-Path"
AUTHENTICATION_RESULTS = "Authentication-Results"
# headers used to DKIM sign in order of preference
DKIM_HEADERS = [
@ -32,6 +33,7 @@ DKIM_HEADERS = [
SL_DIRECTION = "X-SimpleLogin-Type"
SL_EMAIL_LOG_ID = "X-SimpleLogin-EmailLog-ID"
SL_ENVELOPE_FROM = "X-SimpleLogin-Envelope-From"
SL_ORIGINAL_FROM = "X-SimpleLogin-Original-From"
SL_ENVELOPE_TO = "X-SimpleLogin-Envelope-To"
SL_CLIENT_IP = "X-SimpleLogin-Client-IP"

View File

@ -93,7 +93,7 @@ def send_welcome_email(user):
send_email(
comm_email,
f"Welcome to SimpleLogin",
"Welcome to SimpleLogin",
render("com/welcome.txt", user=user, alias=alias),
render("com/welcome.html", user=user, alias=alias),
unsubscribe_link,
@ -104,7 +104,7 @@ def send_welcome_email(user):
def send_trial_end_soon_email(user):
send_email(
user.email,
f"Your trial will end soon",
"Your trial will end soon",
render("transactional/trial-end.txt.jinja2", user=user),
render("transactional/trial-end.html", user=user),
ignore_smtp_error=True,
@ -114,7 +114,7 @@ def send_trial_end_soon_email(user):
def send_activation_email(email, activation_link):
send_email(
email,
f"Just one more step to join SimpleLogin",
"Just one more step to join SimpleLogin",
render(
"transactional/activation.txt",
activation_link=activation_link,
@ -583,6 +583,26 @@ def email_can_be_used_as_mailbox(email_address: str) -> bool:
LOG.d("MX Domain %s %s is invalid mailbox domain", mx_domain, domain)
return False
existing_user = User.get_by(email=email_address)
if existing_user and existing_user.disabled:
LOG.d(
f"User {existing_user} is disabled. {email_address} cannot be used for other mailbox"
)
return False
for existing_user in (
User.query()
.join(Mailbox, User.id == Mailbox.user_id)
.filter(Mailbox.email == email_address)
.group_by(User.id)
.all()
):
if existing_user.disabled:
LOG.d(
f"User {existing_user} is disabled and has a mailbox with {email_address}. Id cannot be used for other mailbox"
)
return False
return True
@ -768,7 +788,7 @@ def get_header_unicode(header: Union[str, Header]) -> str:
ret = ""
for to_decoded_str, charset in decode_header(header):
if charset is None:
if type(to_decoded_str) is bytes:
if isinstance(to_decoded_str, bytes):
decoded_str = to_decoded_str.decode()
else:
decoded_str = to_decoded_str
@ -805,13 +825,13 @@ def to_bytes(msg: Message):
for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]:
try:
return msg.as_bytes(policy=generator_policy)
except:
except Exception:
LOG.w("as_bytes() fails with %s policy", policy, exc_info=True)
msg_string = msg.as_string()
try:
return msg_string.encode()
except:
except Exception:
LOG.w("as_string().encode() fails", exc_info=True)
return msg_string.encode(errors="replace")
@ -828,19 +848,6 @@ def should_add_dkim_signature(domain: str) -> bool:
return False
def is_valid_email(email_address: str) -> bool:
"""
Used to check whether an email address is valid
NOT run MX check.
NOT allow unicode.
"""
try:
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
return True
except EmailNotValidError:
return False
class EmailEncoding(enum.Enum):
BASE64 = "base64"
QUOTED = "quoted-printable"
@ -919,7 +926,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
if content_type == "text/plain":
encoding = get_encoding(msg)
payload = msg.get_payload()
if type(payload) is str:
if isinstance(payload, str):
clone_msg = copy(msg)
new_payload = f"""{text_header}
------------------------------
@ -929,7 +936,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
elif content_type == "text/html":
encoding = get_encoding(msg)
payload = msg.get_payload()
if type(payload) is str:
if isinstance(payload, str):
new_payload = f"""<table width="100%" style="width: 100%; -premailer-width: 100%; -premailer-cellpadding: 0;
-premailer-cellspacing: 0; margin: 0; padding: 0;">
<tr>
@ -951,6 +958,8 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
for part in msg.get_payload():
if isinstance(part, Message):
new_parts.append(add_header(part, text_header, html_header))
elif isinstance(part, str):
new_parts.append(MIMEText(part))
else:
new_parts.append(part)
clone_msg = copy(msg)
@ -959,7 +968,14 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
elif content_type in ("multipart/mixed", "multipart/signed"):
new_parts = []
parts = list(msg.get_payload())
payload = msg.get_payload()
if isinstance(payload, str):
# The message is badly formatted inject as new
new_parts = [MIMEText(text_header, "plain"), MIMEText(payload, "plain")]
clone_msg = copy(msg)
clone_msg.set_payload(new_parts)
return clone_msg
parts = list(payload)
LOG.d("only add header for the first part for %s", content_type)
for ix, part in enumerate(parts):
if ix == 0:
@ -976,7 +992,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
if type(msg) is str:
if isinstance(msg, str):
msg = msg.replace(old, new)
return msg
@ -999,7 +1015,7 @@ def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
if content_type in ("text/plain", "text/html"):
encoding = get_encoding(msg)
payload = msg.get_payload()
if type(payload) is str:
if isinstance(payload, str):
if encoding == EmailEncoding.QUOTED:
LOG.d("handle quoted-printable replace %s -> %s", old, new)
# first decode the payload
@ -1107,26 +1123,6 @@ def is_reverse_alias(address: str) -> bool:
)
# allow also + and @ that are present in a reply address
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
def normalize_reply_email(reply_email: str) -> str:
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
if not reply_email.isascii():
reply_email = convert_to_id(reply_email)
ret = []
# drop all control characters like shift, separator, etc
for c in reply_email:
if c not in _ALLOWED_CHARS:
ret.append("_")
else:
ret.append(c)
return "".join(ret)
def should_disable(alias: Alias) -> (bool, str):
"""
Return whether an alias should be disabled and if yes, the reason why

View File

@ -0,0 +1,38 @@
from email_validator import (
validate_email,
EmailNotValidError,
)
from app.utils import convert_to_id
# allow also + and @ that are present in a reply address
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
def is_valid_email(email_address: str) -> bool:
"""
Used to check whether an email address is valid
NOT run MX check.
NOT allow unicode.
"""
try:
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
return True
except EmailNotValidError:
return False
def normalize_reply_email(reply_email: str) -> str:
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
if not reply_email.isascii():
reply_email = convert_to_id(reply_email)
ret = []
# drop all control characters like shift, separator, etc
for c in reply_email:
if c not in _ALLOWED_CHARS:
ret.append("_")
else:
ret.append(c)
return "".join(ret)

View File

@ -84,6 +84,14 @@ class ErrAddressInvalid(SLException):
return f"{self.address} is not a valid email address"
class InvalidContactEmailError(SLException):
def __init__(self, website_email: str): # noqa: F821
self.website_email = website_email
def error_for_user(self) -> str:
return f"Cannot create contact with invalid email {self.website_email}"
class ErrContactAlreadyExists(SLException):
"""raised when a contact already exists"""
@ -113,3 +121,10 @@ class AccountAlreadyLinkedToAnotherUserException(LinkException):
class AccountIsUsingAliasAsEmail(LinkException):
def __init__(self):
super().__init__("Your account has an alias as it's email address")
class ProtonAccountNotVerified(LinkException):
def __init__(self):
super().__init__(
"The Proton account you are trying to use has not been verified"
)

View File

@ -9,6 +9,7 @@ class LoginEvent:
failed = 1
disabled_login = 2
not_activated = 3
scheduled_to_be_deleted = 4
class Source(EnumE):
web = 0

View File

@ -34,10 +34,10 @@ def apply_dmarc_policy_for_forward_phase(
from_header = get_header_unicode(msg[headers.FROM])
warning_plain_text = f"""This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
warning_plain_text = """This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
More info on https://simplelogin.io/docs/getting-started/anti-phishing/
"""
warning_html = f"""
warning_html = """
<p style="color:red">
This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
More info on <a href="https://simplelogin.io/docs/getting-started/anti-phishing/">anti-phishing measure</a>
@ -131,7 +131,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
refused_email = RefusedEmail.create(
full_report_path=s3_report_path, user_id=alias.user_id, flush=True
)
return EmailLog.create(
email_log = EmailLog.create(
user_id=alias.user_id,
mailbox_id=alias.mailbox_id,
contact_id=contact.id,
@ -142,6 +142,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
blocked=True,
commit=True,
)
return email_log
def apply_dmarc_policy_for_reply_phase(

View File

@ -221,7 +221,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
return True
if is_deleted_alias(msg_info.sender_address):
LOG.i(f"Complaint is for deleted alias. Do nothing")
LOG.i("Complaint is for deleted alias. Do nothing")
return True
contact = Contact.get_by(reply_email=msg_info.sender_address)
@ -231,7 +231,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
alias = find_alias_with_address(msg_info.rcpt_address)
if is_deleted_alias(msg_info.rcpt_address):
LOG.i(f"Complaint is for deleted alias. Do nothing")
LOG.i("Complaint is for deleted alias. Do nothing")
return True
if not alias:

View File

@ -54,9 +54,8 @@ class UnsubscribeEncoder:
def encode_subject(
cls, action: UnsubscribeAction, data: Union[int, UnsubscribeOriginalData]
) -> str:
if (
action != UnsubscribeAction.OriginalUnsubscribeMailto
and type(data) is not int
if action != UnsubscribeAction.OriginalUnsubscribeMailto and not isinstance(
data, int
):
raise ValueError(f"Data has to be an int for an action of type {action}")
if action == UnsubscribeAction.OriginalUnsubscribeMailto:
@ -74,8 +73,8 @@ class UnsubscribeEncoder:
)
signed_data = cls._get_signer().sign(serialized_data).decode("utf-8")
encoded_request = f"{UNSUB_PREFIX}.{signed_data}"
if len(encoded_request) > 256:
LOG.e("Encoded request is longer than 256 chars")
if len(encoded_request) > 512:
LOG.w("Encoded request is longer than 512 chars")
return encoded_request
@staticmethod

View File

@ -1,4 +1,5 @@
import urllib
from email.header import Header
from email.message import Message
from app.email import headers
@ -9,6 +10,7 @@ from app.handler.unsubscribe_encoder import (
UnsubscribeData,
UnsubscribeOriginalData,
)
from app.log import LOG
from app.models import Alias, Contact, UnsubscribeBehaviourEnum
@ -30,7 +32,10 @@ class UnsubscribeGenerator:
"""
unsubscribe_data = message[headers.LIST_UNSUBSCRIBE]
if not unsubscribe_data:
LOG.info("Email has no unsubscribe header")
return message
if isinstance(unsubscribe_data, Header):
unsubscribe_data = str(unsubscribe_data.encode())
raw_methods = [method.strip() for method in unsubscribe_data.split(",")]
mailto_unsubs = None
other_unsubs = []
@ -44,7 +49,9 @@ class UnsubscribeGenerator:
if url_data.scheme == "mailto":
query_data = urllib.parse.parse_qs(url_data.query)
mailto_unsubs = (url_data.path, query_data.get("subject", [""])[0])
LOG.debug(f"Unsub is mailto to {mailto_unsubs}")
else:
LOG.debug(f"Unsub has {url_data.scheme} scheme")
other_unsubs.append(method)
# If there are non mailto unsubscribe methods, use those in the header
if other_unsubs:
@ -56,18 +63,19 @@ class UnsubscribeGenerator:
add_or_replace_header(
message, headers.LIST_UNSUBSCRIBE_POST, "List-Unsubscribe=One-Click"
)
LOG.debug(f"Adding click unsub methods to header {other_unsubs}")
return message
if not mailto_unsubs:
message = delete_header(message, headers.LIST_UNSUBSCRIBE)
message = delete_header(message, headers.LIST_UNSUBSCRIBE_POST)
elif not mailto_unsubs:
LOG.debug("No unsubs. Deleting all unsub headers")
delete_header(message, headers.LIST_UNSUBSCRIBE)
delete_header(message, headers.LIST_UNSUBSCRIBE_POST)
return message
return self._add_unsubscribe_header(
message,
UnsubscribeData(
UnsubscribeAction.OriginalUnsubscribeMailto,
UnsubscribeOriginalData(alias.id, mailto_unsubs[0], mailto_unsubs[1]),
),
unsub_data = UnsubscribeData(
UnsubscribeAction.OriginalUnsubscribeMailto,
UnsubscribeOriginalData(alias.id, mailto_unsubs[0], mailto_unsubs[1]),
)
LOG.debug(f"Adding unsub data {unsub_data}")
return self._add_unsubscribe_header(message, unsub_data)
def _add_unsubscribe_header(
self, message: Message, unsub: UnsubscribeData

View File

@ -30,7 +30,7 @@ def handle_batch_import(batch_import: BatchImport):
LOG.d("Download file %s from %s", batch_import.file, file_url)
r = requests.get(file_url)
lines = [line.decode() for line in r.iter_lines()]
lines = [line.decode("utf-8") for line in r.iter_lines()]
import_from_csv(batch_import, user, lines)

View File

@ -1,2 +1,4 @@
from .integrations import set_enable_proton_cookie
from .exit_sudo import exit_sudo_mode
__all__ = ["set_enable_proton_cookie", "exit_sudo_mode"]

View File

@ -39,7 +39,6 @@ from app.models import (
class ExportUserDataJob:
REMOVE_FIELDS = {
"User": ("otp_secret", "password"),
"Alias": ("ts_vector", "transfer_token", "hibp_last_check"),

View File

@ -22,7 +22,6 @@ from app.message_utils import message_to_bytes, message_format_base64_parts
@dataclass
class SendRequest:
SAVE_EXTENSION = "sendrequest"
envelope_from: str
@ -46,6 +45,7 @@ class SendRequest:
"mail_options": self.mail_options,
"rcpt_options": self.rcpt_options,
"is_forward": self.is_forward,
"retries": self.retries,
}
return json.dumps(data).encode("utf-8")
@ -66,6 +66,7 @@ class SendRequest:
mail_options=decoded_data["mail_options"],
rcpt_options=decoded_data["rcpt_options"],
is_forward=decoded_data["is_forward"],
retries=decoded_data.get("retries", 1),
)
def save_request_to_unsent_dir(self, prefix: str = "DeliveryFail"):

View File

@ -27,9 +27,11 @@ from sqlalchemy.orm import deferred
from sqlalchemy.sql import and_
from sqlalchemy_utils import ArrowType
from app import config
from app import config, rate_limiter
from app import s3
from app.db import Session
from app.dns_utils import get_mx_domains
from app.errors import (
AliasInTrashError,
DirectoryInTrashError,
@ -233,6 +235,7 @@ class AuditLogActionEnum(EnumE):
download_provider_complaint = 8
disable_user = 9
enable_user = 10
stop_trial = 11
class Phase(EnumE):
@ -278,6 +281,7 @@ class IntEnumType(sa.types.TypeDecorator):
class AliasOptions:
show_sl_domains: bool = True
show_partner_domains: Optional[Partner] = None
show_partner_premium: Optional[bool] = None
class Hibp(Base, ModelMixin):
@ -341,7 +345,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
sa.Boolean, default=True, nullable=False, server_default="1"
)
activated = sa.Column(sa.Boolean, default=False, nullable=False)
activated = sa.Column(sa.Boolean, default=False, nullable=False, index=True)
# an account can be disabled if having harmful behavior
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
@ -411,7 +415,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
)
referral_id = sa.Column(
sa.ForeignKey("referral.id", ondelete="SET NULL"), nullable=True, default=None
sa.ForeignKey("referral.id", ondelete="SET NULL"),
nullable=True,
default=None,
index=True,
)
referral = orm.relationship("Referral", foreign_keys=[referral_id])
@ -445,7 +452,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
random_alias_suffix = sa.Column(
sa.Integer,
nullable=False,
default=AliasSuffixEnum.random_string.value,
default=AliasSuffixEnum.word.value,
server_default=str(AliasSuffixEnum.random_string.value),
)
@ -514,9 +521,8 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
server_default=BlockBehaviourEnum.return_2xx.name,
)
# to keep existing behavior, the server default is TRUE whereas for new user, the default value is FALSE
include_header_email_header = sa.Column(
sa.Boolean, default=False, nullable=False, server_default="1"
sa.Boolean, default=True, nullable=False, server_default="1"
)
# bitwise flags. Allow for future expansion
@ -535,6 +541,16 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
nullable=False,
)
# Trigger hard deletion of the account at this time
delete_on = sa.Column(ArrowType, default=None)
__table_args__ = (
sa.Index(
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
),
sa.Index("ix_users_delete_on", delete_on),
)
@property
def directory_quota(self):
return min(
@ -569,6 +585,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
@classmethod
def create(cls, email, name="", password=None, from_partner=False, **kwargs):
email = sanitize_email(email)
user: User = super(User, cls).create(email=email, name=name[:100], **kwargs)
if password:
@ -710,6 +727,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return True
def is_active(self) -> bool:
if self.delete_on is None:
return True
return self.delete_on < arrow.now()
def in_trial(self):
"""return True if user does not have lifetime licence or an active subscription AND is in trial period"""
if self.lifetime_or_active_subscription():
@ -811,6 +833,9 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
Whether user can create a new alias. User can't create a new alias if
- has more than 15 aliases in the free plan, *even in the free trial*
"""
if not self.is_active():
return False
if self.disabled:
return False
@ -822,6 +847,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
< self.max_alias_for_free_account()
)
def can_send_or_receive(self) -> bool:
if self.disabled:
LOG.i(f"User {self} is disabled. Cannot receive or send emails")
return False
if self.delete_on is not None:
LOG.i(
f"User {self} is scheduled to be deleted. Cannot receive or send emails"
)
return False
return True
def profile_picture_url(self):
if self.profile_picture_id:
return self.profile_picture.get_url()
@ -880,7 +916,11 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return sub
def verified_custom_domains(self) -> List["CustomDomain"]:
return CustomDomain.filter_by(user_id=self.id, ownership_verified=True).all()
return (
CustomDomain.filter_by(user_id=self.id, ownership_verified=True)
.order_by(CustomDomain.domain.asc())
.all()
)
def mailboxes(self) -> List["Mailbox"]:
"""list of mailbox that user own"""
@ -1012,25 +1052,35 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
) -> list["SLDomain"]:
if alias_options is None:
alias_options = AliasOptions()
conditions = [SLDomain.hidden == False] # noqa: E712
if not self.is_premium():
conditions.append(SLDomain.premium_only == False) # noqa: E712
partner_domain_cond = [] # noqa:E711
top_conds = [SLDomain.hidden == False] # noqa: E712
or_conds = [] # noqa:E711
if self.default_alias_public_domain_id is not None:
default_domain_conds = [SLDomain.id == self.default_alias_public_domain_id]
if not self.is_premium():
default_domain_conds.append(
SLDomain.premium_only == False # noqa: E712
)
or_conds.append(and_(*default_domain_conds).self_group())
if alias_options.show_partner_domains is not None:
partner_user = PartnerUser.filter_by(
user_id=self.id, partner_id=alias_options.show_partner_domains.id
).first()
if partner_user is not None:
partner_domain_cond.append(
SLDomain.partner_id == partner_user.partner_id
)
partner_domain_cond = [SLDomain.partner_id == partner_user.partner_id]
if alias_options.show_partner_premium is None:
alias_options.show_partner_premium = self.is_premium()
if not alias_options.show_partner_premium:
partner_domain_cond.append(
SLDomain.premium_only == False # noqa: E712
)
or_conds.append(and_(*partner_domain_cond).self_group())
if alias_options.show_sl_domains:
partner_domain_cond.append(SLDomain.partner_id == None) # noqa:E711
if len(partner_domain_cond) == 1:
conditions.append(partner_domain_cond[0])
else:
conditions.append(or_(*partner_domain_cond))
query = Session.query(SLDomain).filter(*conditions).order_by(SLDomain.order)
sl_conds = [SLDomain.partner_id == None] # noqa: E711
if not self.is_premium():
sl_conds.append(SLDomain.premium_only == False) # noqa: E712
or_conds.append(and_(*sl_conds).self_group())
top_conds.append(or_(*or_conds))
query = Session.query(SLDomain).filter(*top_conds).order_by(SLDomain.order)
return query.all()
def available_alias_domains(
@ -1076,6 +1126,13 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return random_words(1)
def can_create_contacts(self) -> bool:
if self.is_premium():
return True
if self.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
return True
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
def __repr__(self):
return f"<User {self.id} {self.name} {self.email}>"
@ -1442,7 +1499,7 @@ class Alias(Base, ModelMixin):
)
# have I been pwned
hibp_last_check = sa.Column(ArrowType, default=None)
hibp_last_check = sa.Column(ArrowType, default=None, index=True)
hibp_breaches = orm.relationship("Hibp", secondary="alias_hibp")
# to use Postgres full text search. Only applied on "note" column for now
@ -1451,6 +1508,8 @@ class Alias(Base, ModelMixin):
TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True)
)
last_email_log_id = sa.Column(sa.Integer, default=None, nullable=True)
__table_args__ = (
Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"),
# index on note column using pg_trgm
@ -1469,7 +1528,8 @@ class Alias(Base, ModelMixin):
def mailboxes(self):
ret = [self.mailbox]
for m in self._mailboxes:
ret.append(m)
if m.id is not self.mailbox.id:
ret.append(m)
ret = [mb for mb in ret if mb.verified]
ret = sorted(ret, key=lambda mb: mb.email)
@ -1518,6 +1578,15 @@ class Alias(Base, ModelMixin):
flush = kw.pop("flush", False)
new_alias = cls(**kw)
user = User.get(new_alias.user_id)
if user.is_premium():
limits = config.ALIAS_CREATE_RATE_LIMIT_PAID
else:
limits = config.ALIAS_CREATE_RATE_LIMIT_FREE
# limits is array of (hits,days)
for limit in limits:
key = f"alias_create_{limit[1]}d:{user.id}"
rate_limiter.check_bucket_limit(key, limit[0], limit[1])
email = kw["email"]
# make sure email is lowercase and doesn't have any whitespace
@ -1910,6 +1979,7 @@ class Contact(Base, ModelMixin):
class EmailLog(Base, ModelMixin):
__tablename__ = "email_log"
__table_args__ = (Index("ix_email_log_created_at", "created_at"),)
user_id = sa.Column(
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
@ -1999,6 +2069,20 @@ class EmailLog(Base, ModelMixin):
def get_dashboard_url(self):
return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}"
@classmethod
def create(cls, *args, **kwargs):
commit = kwargs.pop("commit", False)
email_log = super().create(*args, **kwargs)
Session.flush()
if "alias_id" in kwargs:
sql = "UPDATE alias SET last_email_log_id = :el_id WHERE id = :alias_id"
Session.execute(
sql, {"el_id": email_log.id, "alias_id": kwargs["alias_id"]}
)
if commit:
Session.commit()
return email_log
def __repr__(self):
return f"<EmailLog {self.id}>"
@ -2288,6 +2372,7 @@ class CustomDomain(Base, ModelMixin):
@classmethod
def create(cls, **kwargs):
domain = kwargs.get("domain")
kwargs["domain"] = domain.replace("\n", "")
if DeletedSubdomain.get_by(domain=domain):
raise SubdomainInTrashError
@ -2555,6 +2640,28 @@ class Mailbox(Base, ModelMixin):
+ Alias.filter_by(mailbox_id=self.id).count()
)
def is_proton(self) -> bool:
if (
self.email.endswith("@proton.me")
or self.email.endswith("@protonmail.com")
or self.email.endswith("@protonmail.ch")
or self.email.endswith("@proton.ch")
or self.email.endswith("@pm.me")
):
return True
from app.email_utils import get_email_local_part
mx_domains: [(int, str)] = get_mx_domains(get_email_local_part(self.email))
# Proton is the first domain
if mx_domains and mx_domains[0][1] in (
"mail.protonmail.ch.",
"mailsec.protonmail.ch.",
):
return True
return False
@classmethod
def delete(cls, obj_id):
mailbox: Mailbox = cls.get(obj_id)
@ -2587,6 +2694,12 @@ class Mailbox(Base, ModelMixin):
return ret
@classmethod
def create(cls, **kw):
if "email" in kw:
kw["email"] = sanitize_email(kw["email"])
return super().create(**kw)
def __repr__(self):
return f"<Mailbox {self.id} {self.email}>"
@ -2925,6 +3038,8 @@ class Monitoring(Base, ModelMixin):
active_queue = sa.Column(sa.Integer, nullable=False)
deferred_queue = sa.Column(sa.Integer, nullable=False)
__table_args__ = (Index("ix_monitoring_created_at", "created_at"),)
class BatchImport(Base, ModelMixin):
__tablename__ = "batch_import"
@ -3050,6 +3165,8 @@ class Bounce(Base, ModelMixin):
email = sa.Column(sa.String(256), nullable=False, index=True)
info = sa.Column(sa.Text, nullable=True)
__table_args__ = (sa.Index("ix_bounce_created_at", "created_at"),)
class TransactionalEmail(Base, ModelMixin):
"""Storing all email addresses that receive transactional emails, including account email and mailboxes.
@ -3059,6 +3176,8 @@ class TransactionalEmail(Base, ModelMixin):
__tablename__ = "transactional_email"
email = sa.Column(sa.String(256), nullable=False, unique=False)
__table_args__ = (sa.Index("ix_transactional_email_created_at", "created_at"),)
class Payout(Base, ModelMixin):
"""Referral payouts"""
@ -3111,7 +3230,7 @@ class MessageIDMatching(Base, ModelMixin):
# to track what email_log that has created this matching
email_log_id = sa.Column(
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True, index=True
)
email_log = orm.relationship("EmailLog")
@ -3249,6 +3368,15 @@ class AdminAuditLog(Base):
},
)
@classmethod
def stop_trial(cls, admin_user_id: int, user_id: int):
cls.create(
admin_user_id=admin_user_id,
action=AuditLogActionEnum.stop_trial.value,
model="User",
model_id=user_id,
)
@classmethod
def disable_otp_fido(
cls, admin_user_id: int, user_id: int, had_otp: bool, had_fido: bool
@ -3444,7 +3572,7 @@ class PartnerSubscription(Base, ModelMixin):
class Newsletter(Base, ModelMixin):
__tablename__ = "newsletter"
subject = sa.Column(sa.String(), nullable=False, unique=True, index=True)
subject = sa.Column(sa.String(), nullable=False, index=True)
html = sa.Column(sa.Text)
plain_text = sa.Column(sa.Text)

View File

@ -1 +1,3 @@
from . import views
__all__ = ["views"]

View File

@ -1 +1,3 @@
from .views import authorize, token, user_info
__all__ = ["authorize", "token", "user_info"]

View File

@ -140,7 +140,7 @@ def authorize():
Scope=Scope,
)
else: # POST - user allows or denies
if not current_user.is_authenticated or not current_user.is_active:
if not current_user.is_authenticated or not current_user.is_active():
LOG.i(
"Attempt to validate a OAUth allow request by an unauthenticated user"
)

View File

@ -64,7 +64,7 @@ def _split_arg(arg_input: Union[str, list]) -> Set[str]:
- the response_type/scope passed as a list ?scope=scope_1&scope=scope_2
"""
res = set()
if type(arg_input) is str:
if isinstance(arg_input, str):
if " " in arg_input:
for x in arg_input.split(" "):
if x:

View File

@ -5,3 +5,11 @@ from .views import (
account_activated,
extension_redirect,
)
__all__ = [
"index",
"final",
"setup_done",
"account_activated",
"extension_redirect",
]

View File

@ -39,7 +39,6 @@ class _InnerLock:
lock_redis.storage.delete(lock_name)
def __call__(self, f: Callable[..., Any]):
if self.lock_suffix is None:
lock_suffix = f.__name__
else:

View File

@ -5,3 +5,11 @@ from .views import (
provider1_callback,
provider2_callback,
)
__all__ = [
"index",
"phone_reservation",
"twilio_callback",
"provider1_callback",
"provider2_callback",
]

View File

@ -7,11 +7,12 @@ from typing import Optional
from app.account_linking import SLPlan, SLPlanType
from app.config import PROTON_EXTRA_HEADER_NAME, PROTON_EXTRA_HEADER_VALUE
from app.errors import ProtonAccountNotVerified
from app.log import LOG
_APP_VERSION = "OauthClient_1.0.0"
PROTON_ERROR_CODE_NOT_EXISTS = 2501
PROTON_ERROR_CODE_HV_NEEDED = 9001
PLAN_FREE = 1
PLAN_PREMIUM = 2
@ -57,6 +58,15 @@ def convert_access_token(access_token_response: str) -> AccessCredentials:
)
def handle_response_not_ok(status: int, body: dict, text: str) -> Exception:
if status == HTTPStatus.UNPROCESSABLE_ENTITY:
res_code = body.get("Code")
if res_code == PROTON_ERROR_CODE_HV_NEEDED:
return ProtonAccountNotVerified()
return Exception(f"Unexpected status code. Wanted 200 and got {status}: " + text)
class ProtonClient(ABC):
@abstractmethod
def get_user(self) -> Optional[UserInformation]:
@ -124,11 +134,11 @@ class HttpProtonClient(ProtonClient):
@staticmethod
def __validate_response(res: Response) -> dict:
status = res.status_code
if status != HTTPStatus.OK:
raise Exception(
f"Unexpected status code. Wanted 200 and got {status}: " + res.text
)
as_json = res.json()
if status != HTTPStatus.OK:
raise HttpProtonClient.__handle_response_not_ok(
status=status, body=as_json, text=res.text
)
res_code = as_json.get("Code")
if not res_code or res_code != 1000:
raise Exception(

40
app/app/rate_limiter.py Normal file
View File

@ -0,0 +1,40 @@
from datetime import datetime
from typing import Optional
import newrelic.agent
import redis.exceptions
import werkzeug.exceptions
from limits.storage import RedisStorage
from app.log import LOG
lock_redis: Optional[RedisStorage] = None
def set_redis_concurrent_lock(redis: RedisStorage):
global lock_redis
lock_redis = redis
def check_bucket_limit(
lock_name: Optional[str] = None,
max_hits: int = 5,
bucket_seconds: int = 3600,
):
# Calculate current bucket time
int_time = int(datetime.utcnow().timestamp())
bucket_id = int_time - (int_time % bucket_seconds)
bucket_lock_name = f"bl:{lock_name}:{bucket_id}"
if not lock_redis:
return
try:
value = lock_redis.incr(bucket_lock_name, bucket_seconds)
if value > max_hits:
LOG.i(f"Rate limit hit for {bucket_lock_name} -> {value}/{max_hits}")
newrelic.agent.record_custom_event(
"BucketRateLimit",
{"lock_name": lock_name, "bucket_seconds": bucket_seconds},
)
raise werkzeug.exceptions.TooManyRequests()
except (redis.exceptions.RedisError, AttributeError):
LOG.e("Cannot connect to redis")

View File

@ -2,21 +2,23 @@ import flask
import limits.storage
from app.parallel_limiter import set_redis_concurrent_lock
from app.rate_limiter import set_redis_concurrent_lock as rate_limit_set_redis
from app.session import RedisSessionStore
def initialize_redis_services(app: flask.Flask, redis_url: str):
if redis_url.startswith("redis://") or redis_url.startswith("rediss://"):
storage = limits.storage.RedisStorage(redis_url)
app.session_interface = RedisSessionStore(storage.storage, storage.storage, app)
set_redis_concurrent_lock(storage)
rate_limit_set_redis(storage)
elif redis_url.startswith("redis+sentinel://"):
storage = limits.storage.RedisSentinelStorage(redis_url)
app.session_interface = RedisSessionStore(
storage.storage, storage.storage_slave, app
)
set_redis_concurrent_lock(storage)
rate_limit_set_redis(storage)
else:
raise RuntimeError(
f"Tried to set_redis_session with an invalid redis url: ${redis_url}"

View File

@ -13,17 +13,29 @@ from app.config import (
LOCAL_FILE_UPLOAD,
UPLOAD_DIR,
URL,
AWS_ENDPOINT_URL,
)
if not LOCAL_FILE_UPLOAD:
_session = boto3.Session(
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_REGION,
)
from app.log import LOG
def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
_s3_client = None
def _get_s3client():
global _s3_client
if _s3_client is None:
args = {
"aws_access_key_id": AWS_ACCESS_KEY_ID,
"aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
"region_name": AWS_REGION,
}
if AWS_ENDPOINT_URL:
args["endpoint_url"] = AWS_ENDPOINT_URL
_s3_client = boto3.client("s3", **args)
return _s3_client
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
bs.seek(0)
if LOCAL_FILE_UPLOAD:
@ -34,7 +46,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
f.write(bs.read())
else:
_session.resource("s3").Bucket(BUCKET).put_object(
_get_s3client().put_object(
Bucket=BUCKET,
Key=key,
Body=bs,
ContentType=content_type,
@ -52,7 +65,8 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
f.write(bs.read())
else:
_session.resource("s3").Bucket(BUCKET).put_object(
_get_s3client().put_object(
Bucket=BUCKET,
Key=path,
Body=bs,
# Support saving a remote file using Http header
@ -67,12 +81,9 @@ def download_email(path: str) -> Optional[str]:
file_path = os.path.join(UPLOAD_DIR, path)
with open(file_path, "rb") as f:
return f.read()
resp = (
_session.resource("s3")
.Bucket(BUCKET)
.get_object(
Key=path,
)
resp = _get_s3client().get_object(
Bucket=BUCKET,
Key=path,
)
if not resp or "Body" not in resp:
return None
@ -88,8 +99,7 @@ def get_url(key: str, expires_in=3600) -> str:
if LOCAL_FILE_UPLOAD:
return URL + "/static/upload/" + key
else:
s3_client = _session.client("s3")
return s3_client.generate_presigned_url(
return _get_s3client().generate_presigned_url(
ExpiresIn=expires_in,
ClientMethod="get_object",
Params={"Bucket": BUCKET, "Key": key},
@ -100,5 +110,15 @@ def delete(path: str):
if LOCAL_FILE_UPLOAD:
os.remove(os.path.join(UPLOAD_DIR, path))
else:
o = _session.resource("s3").Bucket(BUCKET).Object(path)
o.delete()
_get_s3client().delete_object(Bucket=BUCKET, Key=path)
def create_bucket_if_not_exists():
s3client = _get_s3client()
buckets = s3client.list_buckets()
for bucket in buckets["Buckets"]:
if bucket["Name"] == BUCKET:
LOG.i("Bucket already exists")
return
s3client.create_bucket(Bucket=BUCKET)
LOG.i(f"Bucket {BUCKET} created")

View File

@ -75,7 +75,7 @@ class RedisSessionStore(SessionInterface):
try:
data = pickle.loads(val)
return ServerSession(data, session_id=session_id)
except:
except Exception:
pass
return ServerSession(session_id=str(uuid.uuid4()))

View File

@ -49,11 +49,11 @@ def random_string(length=10, include_digits=False):
def convert_to_id(s: str):
"""convert a string to id-like: remove space, remove special accent"""
s = s.replace(" ", "")
s = s.lower()
s = unidecode(s)
s = s.replace(" ", "")
return s
return s[:256]
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-."
@ -99,7 +99,7 @@ def sanitize_email(email_address: str, not_lower=False) -> str:
email_address = email_address.strip().replace(" ", "").replace("\n", " ")
if not not_lower:
email_address = email_address.lower()
return email_address
return email_address.replace("\u200f", "")
class NextUrlSanitizer:

View File

@ -5,11 +5,11 @@ from typing import List, Tuple
import arrow
import requests
from sqlalchemy import func, desc, or_
from sqlalchemy import func, desc, or_, and_
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.orm import joinedload
from sqlalchemy.orm.exc import ObjectDeletedError
from sqlalchemy.sql import Insert
from sqlalchemy.sql import Insert, text
from app import s3, config
from app.alias_utils import nb_email_log_for_mailbox
@ -22,10 +22,9 @@ from app.email_utils import (
render,
email_can_be_used_as_mailbox,
send_email_with_rate_control,
normalize_reply_email,
is_valid_email,
get_email_domain_part,
)
from app.email_validation import is_valid_email, normalize_reply_email
from app.errors import ProtonPartnerNotSetUp
from app.log import LOG
from app.mail_sender import load_unsent_mails_from_fs_and_resend
@ -63,15 +62,19 @@ from app.proton.utils import get_proton_partner
from app.utils import sanitize_email
from server import create_light_app
DELETE_GRACE_DAYS = 30
def notify_trial_end():
for user in User.filter(
User.activated.is_(True), User.trial_end.isnot(None), User.lifetime.is_(False)
User.activated.is_(True),
User.trial_end.isnot(None),
User.trial_end >= arrow.now().shift(days=2),
User.trial_end < arrow.now().shift(days=3),
User.lifetime.is_(False),
).all():
try:
if user.in_trial() and arrow.now().shift(
days=3
) > user.trial_end >= arrow.now().shift(days=2):
if user.in_trial():
LOG.d("Send trial end email to user %s", user)
send_trial_end_soon_email(user)
# happens if user has been deleted in the meantime
@ -84,27 +87,49 @@ def delete_logs():
delete_refused_emails()
delete_old_monitoring()
for t in TransactionalEmail.filter(
for t_email in TransactionalEmail.filter(
TransactionalEmail.created_at < arrow.now().shift(days=-7)
):
TransactionalEmail.delete(t.id)
TransactionalEmail.delete(t_email.id)
for b in Bounce.filter(Bounce.created_at < arrow.now().shift(days=-7)):
Bounce.delete(b.id)
Session.commit()
LOG.d("Delete EmailLog older than 2 weeks")
LOG.d("Deleting EmailLog older than 2 weeks")
max_dt = arrow.now().shift(weeks=-2)
nb_deleted = EmailLog.filter(EmailLog.created_at < max_dt).delete()
Session.commit()
total_deleted = 0
batch_size = 500
Session.execute("set session statement_timeout=30000").rowcount
queries_done = 0
cutoff_time = arrow.now().shift(days=-14)
rows_to_delete = EmailLog.filter(EmailLog.created_at < cutoff_time).count()
expected_queries = int(rows_to_delete / batch_size)
sql = text(
"DELETE FROM email_log WHERE id IN (SELECT id FROM email_log WHERE created_at < :cutoff_time order by created_at limit :batch_size)"
)
str_cutoff_time = cutoff_time.isoformat()
while total_deleted < rows_to_delete:
deleted_count = Session.execute(
sql, {"cutoff_time": str_cutoff_time, "batch_size": batch_size}
).rowcount
Session.commit()
total_deleted += deleted_count
queries_done += 1
LOG.i(
f"[{queries_done}/{expected_queries}] Deleted {total_deleted} EmailLog entries"
)
if deleted_count < batch_size:
break
LOG.i("Delete %s email logs", nb_deleted)
LOG.i("Deleted %s email logs", total_deleted)
def delete_refused_emails():
for refused_email in RefusedEmail.filter_by(deleted=False).all():
for refused_email in (
RefusedEmail.filter_by(deleted=False).order_by(RefusedEmail.id).all()
):
if arrow.now().shift(days=1) > refused_email.delete_at >= arrow.now():
LOG.d("Delete refused email %s", refused_email)
if refused_email.path:
@ -138,7 +163,7 @@ def notify_premium_end():
send_email(
user.email,
f"Your subscription will end soon",
"Your subscription will end soon",
render(
"transactional/subscription-end.txt",
user=user,
@ -195,7 +220,7 @@ def notify_manual_sub_end():
LOG.d("Remind user %s that their manual sub is ending soon", user)
send_email(
user.email,
f"Your subscription will end soon",
"Your subscription will end soon",
render(
"transactional/manual-subscription-end.txt",
user=user,
@ -272,7 +297,11 @@ def compute_metric2() -> Metric2:
_24h_ago = now.shift(days=-1)
nb_referred_user_paid = 0
for user in User.filter(User.referral_id.isnot(None)):
for user in (
User.filter(User.referral_id.isnot(None))
.yield_per(500)
.enable_eagerloads(False)
):
if user.is_paid():
nb_referred_user_paid += 1
@ -563,21 +592,21 @@ nb_total_bounced_last_24h: {stats_today.nb_total_bounced_last_24h} - {increase_p
"""
monitoring_report += "\n====================================\n"
monitoring_report += f"""
monitoring_report += """
# Account bounce report:
"""
for email, bounces in bounce_report():
monitoring_report += f"{email}: {bounces}\n"
monitoring_report += f"""\n
monitoring_report += """\n
# Alias creation report:
"""
for email, nb_alias, date in alias_creation_report():
monitoring_report += f"{email}, {date}: {nb_alias}\n"
monitoring_report += f"""\n
monitoring_report += """\n
# Full bounce detail report:
"""
monitoring_report += all_bounce_report()
@ -1020,7 +1049,8 @@ async def check_hibp():
)
.filter(Alias.enabled)
.order_by(Alias.hibp_last_check.asc())
.all()
.yield_per(500)
.enable_eagerloads(False)
):
await queue.put(alias.id)
@ -1071,14 +1101,14 @@ def notify_hibp():
)
LOG.d(
f"Send new breaches found email to %s for %s breaches aliases",
"Send new breaches found email to %s for %s breaches aliases",
user,
len(breached_aliases),
)
send_email(
user.email,
f"You were in a data breach",
"You were in a data breach",
render(
"transactional/hibp-new-breaches.txt.jinja2",
user=user,
@ -1098,6 +1128,23 @@ def notify_hibp():
Session.commit()
def clear_users_scheduled_to_be_deleted(dry_run=False):
users = User.filter(
and_(
User.delete_on.isnot(None),
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
)
).all()
for user in users:
LOG.i(
f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}"
)
if dry_run:
continue
User.delete(user.id)
Session.commit()
if __name__ == "__main__":
LOG.d("Start running cronjob")
parser = argparse.ArgumentParser()
@ -1164,3 +1211,6 @@ if __name__ == "__main__":
elif args.job == "send_undelivered_mails":
LOG.d("Sending undelivered emails")
load_unsent_mails_from_fs_and_resend()
elif args.job == "delete_scheduled_users":
LOG.d("Deleting users scheduled to be deleted")
clear_users_scheduled_to_be_deleted(dry_run=True)

View File

@ -5,65 +5,66 @@ jobs:
schedule: "0 0 * * *"
captureStderr: true
- name: SimpleLogin Notify Trial Ends
command: python /code/cron.py -j notify_trial_end
shell: /bin/bash
schedule: "0 8 * * *"
captureStderr: true
- name: SimpleLogin Notify Manual Subscription Ends
command: python /code/cron.py -j notify_manual_subscription_end
shell: /bin/bash
schedule: "0 9 * * *"
captureStderr: true
- name: SimpleLogin Notify Premium Ends
command: python /code/cron.py -j notify_premium_end
shell: /bin/bash
schedule: "0 10 * * *"
captureStderr: true
- name: SimpleLogin Delete Logs
command: python /code/cron.py -j delete_logs
shell: /bin/bash
schedule: "0 11 * * *"
captureStderr: true
- name: SimpleLogin Poll Apple Subscriptions
command: python /code/cron.py -j poll_apple_subscription
shell: /bin/bash
schedule: "0 12 * * *"
captureStderr: true
- name: SimpleLogin Sanity Check
command: python /code/cron.py -j sanity_check
shell: /bin/bash
schedule: "0 2 * * *"
captureStderr: true
- name: SimpleLogin Delete Old Monitoring records
command: python /code/cron.py -j delete_old_monitoring
shell: /bin/bash
schedule: "0 14 * * *"
schedule: "15 1 * * *"
captureStderr: true
- name: SimpleLogin Custom Domain check
command: python /code/cron.py -j check_custom_domain
shell: /bin/bash
schedule: "0 15 * * *"
schedule: "15 2 * * *"
captureStderr: true
- name: SimpleLogin HIBP check
command: python /code/cron.py -j check_hibp
shell: /bin/bash
schedule: "0 18 * * *"
schedule: "15 3 * * *"
captureStderr: true
concurrencyPolicy: Forbid
- name: SimpleLogin Notify HIBP breaches
command: python /code/cron.py -j notify_hibp
shell: /bin/bash
schedule: "0 19 * * *"
schedule: "15 4 * * *"
captureStderr: true
concurrencyPolicy: Forbid
- name: SimpleLogin Delete Logs
command: python /code/cron.py -j delete_logs
shell: /bin/bash
schedule: "15 5 * * *"
captureStderr: true
- name: SimpleLogin Poll Apple Subscriptions
command: python /code/cron.py -j poll_apple_subscription
shell: /bin/bash
schedule: "15 6 * * *"
captureStderr: true
- name: SimpleLogin Notify Trial Ends
command: python /code/cron.py -j notify_trial_end
shell: /bin/bash
schedule: "15 8 * * *"
captureStderr: true
- name: SimpleLogin Notify Manual Subscription Ends
command: python /code/cron.py -j notify_manual_subscription_end
shell: /bin/bash
schedule: "15 9 * * *"
captureStderr: true
- name: SimpleLogin Notify Premium Ends
command: python /code/cron.py -j notify_premium_end
shell: /bin/bash
schedule: "15 10 * * *"
captureStderr: true
- name: SimpleLogin delete users scheduled to be deleted
command: python /code/cron.py -j delete_scheduled_users
shell: /bin/bash
schedule: "15 11 * * *"
captureStderr: true
concurrencyPolicy: Forbid

View File

@ -388,7 +388,7 @@ Input:
- (Optional but recommended) `hostname` passed in query string
- Request Message Body in json (`Content-Type` is `application/json`)
- alias_prefix: string. The first part of the alias that user can choose.
- signed_suffix: should be one of the suffixes returned in the `GET /api/v4/alias/options` endpoint.
- signed_suffix: should be one of the suffixes returned in the `GET /api/v5/alias/options` endpoint.
- mailbox_ids: list of mailbox_id that "owns" this alias
- (Optional) note: alias note
- (Optional) name: alias name

View File

@ -1,4 +1,4 @@
# SSL, HTTPS, and HSTS
# SSL, HTTPS, HSTS and additional security measures
It's highly recommended to enable SSL/TLS on your server, both for the web app and email server.
@ -58,3 +58,124 @@ Now, reload Nginx:
```bash
sudo systemctl reload nginx
```
## Additional security measures
For additional security, we recommend you take some extra steps.
### Enable Certificate Authority Authorization (CAA)
[Certificate Authority Authorization](https://letsencrypt.org/docs/caa/) is a step you can take to restrict the list of certificate authorities that are allowed to issue certificates for your domains.
Use [SSLMates CAA Record Generator](https://sslmate.com/caa/) to create a **CAA record** with the following configuration:
- `flags`: `0`
- `tag`: `issue`
- `value`: `"letsencrypt.org"`
To verify if the DNS works, the following command
```bash
dig @1.1.1.1 mydomain.com caa
```
should return:
```
mydomain.com. 3600 IN CAA 0 issue "letsencrypt.org"
```
### SMTP MTA Strict Transport Security (MTA-STS)
[MTA-STS](https://datatracker.ietf.org/doc/html/rfc8461) is an extra step you can take to broadcast the ability of your instance to receive and, optionally enforce, TSL-secure SMTP connections to protect email traffic.
Enabling MTA-STS requires you serve a specific file from subdomain `mta-sts.domain.com` on a well-known route.
Create a text file `/var/www/.well-known/mta-sts.txt` with the content:
```txt
version: STSv1
mode: testing
mx: app.mydomain.com
max_age: 86400
```
It is recommended to start with `mode: testing` for starters to get time to review failure reports. Add as many `mx:` domain entries as you have matching **MX records** in your DNS configuration.
Create a **TXT record** for `_mta-sts.mydomain.com.` with the following value:
```txt
v=STSv1; id=UNIX_TIMESTAMP
```
With `UNIX_TIMESTAMP` being the current date/time.
Use the following command to generate the record:
```bash
echo "v=STSv1; id=$(date +%s)"
```
To verify if the DNS works, the following command
```bash
dig @1.1.1.1 _mta-sts.mydomain.com txt
```
should return a result similar to this one:
```
_mta-sts.mydomain.com. 3600 IN TXT "v=STSv1; id=1689416399"
```
Create an additional Nginx configuration in `/etc/nginx/sites-enabled/mta-sts` with the following content:
```
server {
server_name mta-sts.mydomain.com;
root /var/www;
listen 80;
location ^~ /.well-known {}
}
```
Restart Nginx with the following command:
```sh
sudo service nginx restart
```
A correct configuration of MTA-STS, however, requires that the certificate used to host the `mta-sts` subdomain matches that of the subdomain referred to by the **MX record** from the DNS. In other words, both `mta-sts.mydomain.com` and `app.mydomain.com` must share the same certificate.
The easiest way to do this is to _expand_the certificate associated with `app.mydomain.com` to also support the `mta-sts` subdomain using the following command:
```sh
certbot --expand --nginx -d app.mydomain.com,mta-sts.mydomain.com
```
## SMTP TLS Reporting
[TLSRPT](https://datatracker.ietf.org/doc/html/rfc8460) is used by SMTP systems to report failures in establishing TLS-secure sessions as broadcast by the MTA-STS configuration.
Configuring MTA-STS in `mode: testing` as shown in the previous section gives you time to review failures from some SMTP senders.
Create a **TXT record** for `_smtp._tls.mydomain.com.` with the following value:
```txt
v=TSLRPTv1; rua=mailto:YOUR_EMAIL
```
The TLSRPT configuration at the DNS level allows SMTP senders that fail to initiate TLS-secure sessions to send reports to a particular email address. We suggest creating a `tls-reports` alias in SimpleLogin for this purpose.
To verify if the DNS works, the following command
```bash
dig @1.1.1.1 _smtp._tls.mydomain.com txt
```
should return a result similar to this one:
```
_smtp._tls.mydomain.com. 3600 IN TXT "v=TSLRPTv1; rua=mailto:tls-reports@mydomain.com"
```

View File

@ -106,8 +106,6 @@ from app.email_utils import (
get_header_unicode,
generate_reply_email,
is_reverse_alias,
normalize_reply_email,
is_valid_email,
replace,
should_disable,
parse_id_from_bounce,
@ -123,6 +121,7 @@ from app.email_utils import (
generate_verp_email,
sl_formataddr,
)
from app.email_validation import is_valid_email, normalize_reply_email
from app.errors import (
NonReverseAliasInReplyPhase,
VERPTransactional,
@ -236,17 +235,17 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
contact.mail_from = mail_from
Session.commit()
else:
try:
contact_email_for_reply = (
contact_email if is_valid_email(contact_email) else ""
)
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_email,
name=contact_name,
mail_from=mail_from,
reply_email=generate_reply_email(contact_email, alias)
if is_valid_email(contact_email)
else NOREPLY,
reply_email=generate_reply_email(contact_email_for_reply, alias),
automatic_created=True,
)
if not contact_email:
@ -262,7 +261,7 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
Session.commit()
except IntegrityError:
LOG.w("Contact %s %s already exist", alias, contact_email)
LOG.w(f"Contact with email {contact_email} for alias {alias} already exist")
Session.rollback()
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
@ -280,6 +279,9 @@ def get_or_create_reply_to_contact(
except ValueError:
return
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
if not is_valid_email(contact_address):
LOG.w(
"invalid reply-to address %s. Parse from %s",
@ -348,6 +350,10 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
continue
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
contact_name = full_address.display_name
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
if contact:
# update the contact name if needed
if contact.name != full_address.display_name:
@ -355,9 +361,9 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
"Update contact %s name %s to %s",
contact,
contact.name,
full_address.display_name,
contact_name,
)
contact.name = full_address.display_name
contact.name = contact_name
Session.commit()
else:
LOG.d(
@ -372,7 +378,7 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_email,
name=full_address.display_name,
name=contact_name,
reply_email=generate_reply_email(contact_email, alias),
is_cc=header.lower() == "cc",
automatic_created=True,
@ -541,12 +547,20 @@ def sign_msg(msg: Message) -> Message:
signature.add_header("Content-Disposition", 'attachment; filename="signature.asc"')
try:
signature.set_payload(sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n")))
payload = sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n"))
if not payload:
raise PGPException("Empty signature by gnupg")
signature.set_payload(payload)
except Exception:
LOG.e("Cannot sign, try using pgpy")
signature.set_payload(
sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
)
payload = sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
if not payload:
raise PGPException("Empty signature by pgpy")
signature.set_payload(payload)
container.attach(signature)
@ -623,8 +637,12 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
user = alias.user
if user.disabled:
LOG.w("User %s disabled, disable forwarding emails for %s", user, alias)
if not user.is_active():
LOG.w(f"User {user} has been soft deleted")
return False, status.E502
if not user.can_send_or_receive():
LOG.i(f"User {user} cannot receive emails")
if should_ignore_bounce(envelope.mail_from):
return [(True, status.E207)]
else:
@ -846,38 +864,40 @@ def forward_email_to_mailbox(
f"""Email sent to {alias.email} from an invalid address and cannot be replied""",
)
delete_all_headers_except(
msg,
[
headers.FROM,
headers.TO,
headers.CC,
headers.SUBJECT,
headers.DATE,
# do not delete original message id
headers.MESSAGE_ID,
# References and In-Reply-To are used for keeping the email thread
headers.REFERENCES,
headers.IN_REPLY_TO,
]
+ headers.MIME_HEADERS,
)
headers_to_keep = [
headers.FROM,
headers.TO,
headers.CC,
headers.SUBJECT,
headers.DATE,
# do not delete original message id
headers.MESSAGE_ID,
# References and In-Reply-To are used for keeping the email thread
headers.REFERENCES,
headers.IN_REPLY_TO,
headers.LIST_UNSUBSCRIBE,
headers.LIST_UNSUBSCRIBE_POST,
] + headers.MIME_HEADERS
if user.include_header_email_header:
headers_to_keep.append(headers.AUTHENTICATION_RESULTS)
delete_all_headers_except(msg, headers_to_keep)
if mailbox.generic_subject:
LOG.d("Use a generic subject for %s", mailbox)
orig_subject = msg[headers.SUBJECT]
orig_subject = get_header_unicode(orig_subject)
add_or_replace_header(msg, "Subject", mailbox.generic_subject)
sender = msg[headers.FROM]
sender = get_header_unicode(sender)
msg = add_header(
msg,
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with "{orig_subject}" as subject""",
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with <b>{orig_subject}</b> as subject""",
)
# create PGP email if needed
if mailbox.pgp_enabled() and user.is_premium() and not alias.disable_pgp:
LOG.d("Encrypt message using mailbox %s", mailbox)
if mailbox.generic_subject:
LOG.d("Use a generic subject for %s", mailbox)
orig_subject = msg[headers.SUBJECT]
orig_subject = get_header_unicode(orig_subject)
add_or_replace_header(msg, "Subject", mailbox.generic_subject)
sender = msg[headers.FROM]
sender = get_header_unicode(sender)
msg = add_header(
msg,
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with "{orig_subject}" as subject""",
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with <b>{orig_subject}</b> as subject""",
)
try:
msg = prepare_pgp_message(
@ -898,6 +918,11 @@ def forward_email_to_mailbox(
msg[headers.SL_EMAIL_LOG_ID] = str(email_log.id)
if user.include_header_email_header:
msg[headers.SL_ENVELOPE_FROM] = envelope.mail_from
if contact.name:
original_from = f"{contact.name} <{contact.website_email}>"
else:
original_from = contact.website_email
msg[headers.SL_ORIGINAL_FROM] = original_from
# when an alias isn't in the To: header, there's no way for users to know what alias has received the email
msg[headers.SL_ENVELOPE_TO] = alias.email
@ -1035,6 +1060,9 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
if not contact:
LOG.w(f"No contact with {reply_email} as reverse alias")
return False, status.E502
if not contact.user.is_active():
LOG.w(f"User {contact.user} has been soft deleted")
return False, status.E502
alias = contact.alias
alias_address: str = contact.alias.email
@ -1049,13 +1077,8 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
user = alias.user
mail_from = envelope.mail_from
if user.disabled:
LOG.e(
"User %s disabled, disable sending emails from %s to %s",
user,
alias,
contact,
)
if not user.can_send_or_receive():
LOG.i(f"User {user} cannot send emails")
return False, status.E504
# Check if we need to reject or quarantine based on dmarc
@ -1181,7 +1204,7 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
)
# replace reverse alias by real address for all contacts
for (reply_email, website_email) in contact_query.values(
for reply_email, website_email in contact_query.values(
Contact.reply_email, Contact.website_email
):
msg = replace(msg, reply_email, website_email)
@ -1236,7 +1259,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
# no need to replace TO header
LOG.d("email is sent in BCC mode")
del msg[headers.TO]
else:
replace_header_when_reply(msg, alias, headers.TO)
@ -1907,6 +1929,9 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
contact,
alias,
)
if not email_log.user.is_active():
LOG.d(f"User {email_log.user} is not active")
return status.E510
if email_log.is_reply:
content_type = msg.get_content_type().lower()
@ -1937,7 +1962,7 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
for is_delivered, smtp_status in handle_forward(envelope, msg, alias.email):
res.append((is_delivered, smtp_status))
for (is_success, smtp_status) in res:
for is_success, smtp_status in res:
# Consider all deliveries successful if 1 delivery is successful
if is_success:
return smtp_status
@ -1968,6 +1993,9 @@ def send_no_reply_response(mail_from: str, msg: Message):
if not mailbox:
LOG.d("Unknown sender. Skipping reply from {}".format(NOREPLY))
return
if not mailbox.user.is_active():
LOG.d(f"User {mailbox.user} is soft-deleted. Skipping sending reply response")
return
send_email_at_most_times(
mailbox.user,
ALERT_TO_NOREPLY,
@ -2257,7 +2285,7 @@ def handle(envelope: Envelope, msg: Message) -> str:
if nb_success > 0 and nb_non_success > 0:
LOG.e(f"some deliveries fail and some success, {mail_from}, {rcpt_tos}, {res}")
for (is_success, smtp_status) in res:
for is_success, smtp_status in res:
# Consider all deliveries successful if 1 delivery is successful
if is_success:
return smtp_status

View File

@ -192,7 +192,6 @@ amigos
amines
amnion
amoeba
amoral
amount
amours
ampere
@ -215,7 +214,6 @@ animus
anions
ankles
anklet
annals
anneal
annoys
annual
@ -364,7 +362,6 @@ auntie
aureus
aurora
author
autism
autumn
avails
avatar
@ -638,14 +635,12 @@ bigwig
bijoux
bikers
biking
bikini
bilges
bilked
bilker
billed
billet
billow
bimbos
binary
binder
binged
@ -710,8 +705,6 @@ blocks
blokes
blonde
blonds
bloods
bloody
blooms
bloops
blotch
@ -817,8 +810,6 @@ bounds
bounty
bovine
bovver
bowels
bowers
bowing
bowled
bowleg
@ -827,10 +818,8 @@ bowman
bowmen
bowwow
boxcar
boxers
boxier
boxing
boyish
braced
bracer
braces
@ -861,7 +850,6 @@ breach
breads
breaks
breams
breast
breath
breech
breeds
@ -872,9 +860,6 @@ brevet
brewed
brewer
briars
bribed
briber
bribes
bricks
bridal
brides
@ -926,13 +911,7 @@ buffed
buffer
buffet
bugged
bugger
bugled
bugler
bugles
builds
bulged
bulges
bulked
bulled
bullet
@ -1340,8 +1319,6 @@ clingy
clinic
clinks
clique
cloaca
cloaks
cloche
clocks
clomps
@ -1448,7 +1425,6 @@ comply
compos
conchs
concur
condom
condor
condos
coneys
@ -1568,8 +1544,6 @@ cranes
cranks
cranky
cranny
crapes
crappy
crated
crater
crates
@ -1585,7 +1559,6 @@ crazes
creaks
creaky
creams
creamy
crease
create
creche
@ -1594,8 +1567,6 @@ credos
creeds
creeks
creels
creeps
creepy
cremes
creole
crepes
@ -1728,9 +1699,6 @@ dainty
daises
damage
damask
dammed
dammit
damned
damped
dampen
damper
@ -1754,7 +1722,6 @@ darers
daring
darken
darker
darkie
darkly
darned
darner
@ -1763,8 +1730,6 @@ darter
dashed
dasher
dashes
daters
dating
dative
daubed
dauber
@ -1921,7 +1886,6 @@ dharma
dhotis
diadem
dialog
diaper
diatom
dibble
dicier
@ -1943,7 +1907,6 @@ digits
diking
diktat
dilate
dildos
dilute
dimity
dimmed
@ -2058,7 +2021,6 @@ dotted
double
doubly
doubts
douche
doughy
dourer
dourly
@ -2139,15 +2101,6 @@ duenna
duffed
duffer
dugout
dulcet
dulled
duller
dumber
dumbly
dumbos
dumdum
dumped
dumper
dunces
dunged
dunked
@ -2285,7 +2238,6 @@ endows
endued
endues
endure
enemas
energy
enfold
engage
@ -2333,7 +2285,6 @@ erects
ermine
eroded
erodes
erotic
errand
errant
errata
@ -2344,7 +2295,6 @@ eructs
erupts
escape
eschew
escort
escrow
escudo
espied
@ -2363,7 +2313,6 @@ ethnic
etudes
euchre
eulogy
eunuch
eureka
evaded
evader
@ -2392,7 +2341,6 @@ exempt
exerts
exeunt
exhale
exhort
exhume
exiled
exiles
@ -2415,7 +2363,6 @@ extant
extend
extent
extols
extort
extras
exuded
exudes
@ -2440,7 +2387,6 @@ faeces
faerie
faffed
fagged
faggot
failed
faille
fainer
@ -2473,18 +2419,10 @@ faring
farmed
farmer
farrow
farted
fascia
fasted
fasten
faster
father
fathom
fating
fatsos
fatten
fatter
fatwas
faucet
faults
faulty
@ -2532,7 +2470,6 @@ fesses
festal
fester
feting
fetish
fetter
fettle
feudal
@ -2617,9 +2554,7 @@ flaked
flakes
flambe
flamed
flamer
flames
flange
flanks
flared
flares
@ -2754,8 +2689,6 @@ franks
frappe
frauds
frayed
freaks
freaky
freely
freest
freeze
@ -2795,8 +2728,6 @@ fryers
frying
ftpers
ftping
fucked
fucker
fuddle
fudged
fudges
@ -2891,10 +2822,7 @@ gasbag
gashed
gashes
gasket
gasman
gasmen
gasped
gassed
gasses
gateau
gather
@ -3104,7 +3032,6 @@ grimed
grimes
grimly
grinds
gringo
griped
griper
gripes
@ -3186,8 +3113,6 @@ gypsum
gyrate
gyving
habits
hacked
hacker
hackle
hadith
haggis
@ -3195,8 +3120,6 @@ haggle
hailed
hairdo
haired
hajjes
hajjis
halest
haling
halite
@ -3223,11 +3146,8 @@ happen
haptic
harass
harden
harder
hardly
harems
haring
harked
harlot
harmed
harped
@ -3407,7 +3327,6 @@ hoofed
hoofer
hookah
hooked
hooker
hookup
hooped
hoopla
@ -3459,8 +3378,6 @@ huffed
hugely
hugest
hugged
hulled
huller
humane
humans
humble
@ -3667,8 +3584,6 @@ jacket
jading
jagged
jaguar
jailed
jailer
jalopy
jammed
jangle
@ -3689,8 +3604,6 @@ jejune
jelled
jellos
jennet
jerked
jerkin
jersey
jested
jester
@ -3814,11 +3727,7 @@ kidded
kidder
kiddie
kiddos
kidnap
kidney
killed
killer
kilned
kilted
kilter
kimono
@ -3827,15 +3736,11 @@ kinder
kindle
kindly
kingly
kinked
kiosks
kipped
kipper
kirsch
kismet
kissed
kisser
kisses
kiting
kitsch
kitted
@ -3847,10 +3752,6 @@ kluges
klutzy
knacks
knaves
kneads
kneels
knells
knifed
knifes
knight
knives
@ -4210,8 +4111,6 @@ lunges
lupine
lupins
luring
lurked
lurker
lusher
lushes
lushly
@ -4608,7 +4507,6 @@ muggle
mukluk
mulcts
mulish
mullah
mulled
mullet
mumble
@ -4721,9 +4619,6 @@ nickel
nicker
nickle
nieces
niggas
niggaz
nigger
niggle
nigher
nights
@ -4736,7 +4631,6 @@ ninjas
ninths
nipped
nipper
nipple
nitric
nitwit
nixing
@ -4781,15 +4675,6 @@ nozzle
nuance
nubbin
nubile
nuclei
nudest
nudged
nudges
nudism
nudist
nudity
nugget
nuking
numbed
number
numbly
@ -4804,7 +4689,6 @@ nutter
nuzzle
nybble
nylons
nympho
nymphs
oafish
oaring
@ -4885,7 +4769,6 @@ opting
option
opuses
oracle
orally
orange
orated
orates
@ -4897,7 +4780,6 @@ ordeal
orders
ordure
organs
orgasm
orgies
oriels
orient
@ -4993,10 +4875,6 @@ pander
panels
panics
panned
panted
pantie
pantos
pantry
papacy
papaya
papers
@ -5078,7 +4956,6 @@ pebble
pebbly
pecans
pecked
pecker
pectic
pectin
pedalo
@ -5151,9 +5028,6 @@ phenom
phials
phlegm
phloem
phobia
phobic
phoebe
phoned
phones
phoney
@ -5228,9 +5102,6 @@ piques
piracy
pirate
pirogi
pissed
pisser
pisses
pistes
pistil
pistol
@ -5311,8 +5182,6 @@ pogrom
points
pointy
poised
poises
poison
pokers
pokeys
pokier
@ -5422,7 +5291,6 @@ preyed
priced
prices
pricey
pricks
prided
prides
priers
@ -5602,14 +5470,9 @@ rabbit
rabble
rabies
raceme
racers
racial
racier
racily
racing
racism
racist
racked
racket
radars
radial
@ -5661,8 +5524,6 @@ rapers
rapids
rapier
rapine
raping
rapist
rapped
rappel
rapper
@ -5747,7 +5608,6 @@ recoup
rectal
rector
rectos
rectum
recurs
recuse
redact
@ -5891,7 +5751,6 @@ resume
retail
retain
retake
retard
retell
retest
retied
@ -6125,8 +5984,6 @@ sadden
sadder
saddle
sadhus
sadism
sadist
safari
safely
safest
@ -6364,16 +6221,6 @@ severs
sewage
sewers
sewing
sexier
sexily
sexing
sexism
sexist
sexpot
sextet
sexton
sexual
shabby
shacks
shaded
shades
@ -6383,10 +6230,7 @@ shaggy
shaken
shaker
shakes
shalom
shaman
shamed
shames
shandy
shanks
shanty
@ -6432,7 +6276,6 @@ shirks
shirrs
shirts
shirty
shitty
shiver
shoals
shoats
@ -6575,9 +6418,6 @@ slangy
slants
slated
slates
slaved
slaver
slaves
slayed
slayer
sleaze
@ -6672,7 +6512,6 @@ snarks
snarky
snarls
snarly
snatch
snazzy
sneaks
sneaky
@ -6716,7 +6555,6 @@ socket
sodded
sodden
sodium
sodomy
soever
soften
softer
@ -7468,7 +7306,6 @@ torrid
torsos
tortes
tossed
tosser
tosses
tossup
totals
@ -7686,7 +7523,6 @@ unhook
unhurt
unions
unique
unisex
unison
united
unites
@ -7793,7 +7629,6 @@ vacant
vacate
vacuum
vagary
vagina
vaguer
vainer
vainly
@ -7930,9 +7765,6 @@ votive
vowels
vowing
voyage
voyeur
vulgar
vulvae
wabbit
wacker
wackos
@ -7975,7 +7807,6 @@ wander
wangle
waning
wanked
wanker
wanner
wanted
wanton

View File

@ -89,7 +89,6 @@ aghast
agile
agility
aging
agnostic
agonize
agonizing
agony
@ -375,8 +374,6 @@ augmented
august
authentic
author
autism
autistic
autograph
automaker
automated
@ -446,7 +443,6 @@ backyard
bacon
bacteria
bacterium
badass
badge
badland
badly
@ -1106,7 +1102,6 @@ clinic
clinking
clip
clique
cloak
clobber
clock
clone
@ -1776,7 +1771,6 @@ diagnosis
diagram
dial
diameter
diaper
diaphragm
diary
dice
@ -1950,7 +1944,6 @@ dosage
dose
dotted
doubling
douche
dove
down
dowry
@ -2032,9 +2025,6 @@ duffel
dugout
duh
duke
duller
dullness
duly
dumping
dumpling
dumpster
@ -2527,8 +2517,6 @@ feisty
feline
felt-tip
feminine
feminism
feminist
feminize
femur
fence
@ -2667,7 +2655,6 @@ fondness
fondue
font
food
fool
footage
football
footbath
@ -2777,7 +2764,6 @@ gag
gainfully
gaining
gains
gala
gallantly
galleria
gallery
@ -3028,7 +3014,6 @@ groom
groove
grooving
groovy
grope
ground
grouped
grout
@ -3148,7 +3133,6 @@ happiness
happy
harbor
hardcopy
hardcore
hardcover
harddisk
hardened
@ -3164,8 +3148,6 @@ hardware
hardwired
hardwood
hardy
harmful
harmless
harmonica
harmonics
harmonize
@ -3340,7 +3322,6 @@ identical
identify
identity
ideology
idiocy
idiom
idly
igloo
@ -3357,7 +3338,6 @@ imaging
imbecile
imitate
imitation
immature
immerse
immersion
imminent
@ -3387,14 +3367,10 @@ implode
implosion
implosive
imply
impolite
important
importer
impose
imposing
impotence
impotency
impotent
impound
imprecise
imprint
@ -3424,8 +3400,6 @@ irritable
irritably
irritant
irritate
islamic
islamist
isolated
isolating
isolation
@ -3524,7 +3498,6 @@ june
junior
juniper
junkie
junkman
junkyard
jurist
juror
@ -3570,9 +3543,6 @@ king
kinship
kinsman
kinswoman
kissable
kisser
kissing
kitchen
kite
kitten
@ -3649,7 +3619,6 @@ laundry
laurel
lavender
lavish
laxative
lazily
laziness
lazy
@ -3690,7 +3659,6 @@ liable
liberty
librarian
library
licking
licorice
lid
life
@ -3741,8 +3709,6 @@ livestock
lividly
living
lizard
lubricant
lubricate
lucid
luckily
luckiness
@ -3878,7 +3844,6 @@ marshland
marshy
marsupial
marvelous
marxism
mascot
masculine
mashed
@ -3914,8 +3879,6 @@ maximum
maybe
mayday
mayflower
moaner
moaning
mobile
mobility
mobilize
@ -4124,7 +4087,6 @@ nemeses
nemesis
neon
nephew
nerd
nervous
nervy
nest
@ -4139,7 +4101,6 @@ never
next
nibble
nickname
nicotine
niece
nifty
nimble
@ -4167,14 +4128,10 @@ nuptials
nursery
nursing
nurture
nutcase
nutlike
nutmeg
nutrient
nutshell
nuttiness
nutty
nuzzle
nylon
oaf
oak
@ -4205,7 +4162,6 @@ obstinate
obstruct
obtain
obtrusive
obtuse
obvious
occultist
occupancy
@ -4446,7 +4402,6 @@ palpitate
paltry
pampered
pamperer
pampers
pamphlet
panama
pancake
@ -4651,7 +4606,6 @@ plated
platform
plating
platinum
platonic
platter
platypus
plausible
@ -4777,8 +4731,6 @@ prancing
pranker
prankish
prankster
prayer
praying
preacher
preaching
preachy
@ -4796,8 +4748,6 @@ prefix
preflight
preformed
pregame
pregnancy
pregnant
preheated
prelaunch
prelaw
@ -4937,7 +4887,6 @@ prudishly
prune
pruning
pry
psychic
public
publisher
pucker
@ -4957,8 +4906,7 @@ punctual
punctuate
punctured
pungent
punisher
punk
punishe
pupil
puppet
puppy
@ -5040,7 +4988,6 @@ quote
rabid
race
racing
racism
rack
racoon
radar
@ -5155,7 +5102,6 @@ recount
recoup
recovery
recreate
rectal
rectangle
rectified
rectify
@ -5622,7 +5568,6 @@ sarcastic
sardine
sash
sasquatch
sassy
satchel
satiable
satin
@ -5651,7 +5596,6 @@ scaling
scallion
scallop
scalping
scam
scandal
scanner
scanning
@ -5928,8 +5872,6 @@ silent
silica
silicon
silk
silliness
silly
silo
silt
silver
@ -5991,7 +5933,6 @@ skimmer
skimming
skimpily
skincare
skinhead
skinless
skinning
skinny
@ -6197,7 +6138,6 @@ splinter
splotchy
splurge
spoilage
spoiled
spoiler
spoiling
spoils
@ -6610,7 +6550,6 @@ swimmer
swimming
swimsuit
swimwear
swinger
swinging
swipe
swirl
@ -7079,7 +7018,6 @@ undocked
undoing
undone
undrafted
undress
undrilled
undusted
undying
@ -7522,9 +7460,7 @@ villain
vindicate
vineyard
vintage
violate
violation
violator
violet
violin
viper

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,42 @@
"""empty message
Revision ID: 01827104004b
Revises: 2634b41f54db
Create Date: 2023-07-28 19:39:28.675490
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '01827104004b'
down_revision = '2634b41f54db'
branch_labels = None
depends_on = None
def upgrade():
with op.get_context().autocommit_block():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_alias_hibp_last_check'), 'alias', ['hibp_last_check'], unique=False, postgresql_concurrently=True)
op.create_index('ix_bounce_created_at', 'bounce', ['created_at'], unique=False, postgresql_concurrently=True)
op.create_index('ix_monitoring_created_at', 'monitoring', ['created_at'], unique=False, postgresql_concurrently=True)
op.create_index('ix_transactional_email_created_at', 'transactional_email', ['created_at'], unique=False, postgresql_concurrently=True)
op.create_index(op.f('ix_users_activated'), 'users', ['activated'], unique=False, postgresql_concurrently=True)
op.create_index('ix_users_activated_trial_end_lifetime', 'users', ['activated', 'trial_end', 'lifetime'], unique=False, postgresql_concurrently=True)
op.create_index(op.f('ix_users_referral_id'), 'users', ['referral_id'], unique=False, postgresql_concurrently=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_referral_id'), table_name='users')
op.drop_index('ix_users_activated_trial_end_lifetime', table_name='users')
op.drop_index(op.f('ix_users_activated'), table_name='users')
op.drop_index('ix_transactional_email_created_at', table_name='transactional_email')
op.drop_index('ix_monitoring_created_at', table_name='monitoring')
op.drop_index('ix_bounce_created_at', table_name='bounce')
op.drop_index(op.f('ix_alias_hibp_last_check'), table_name='alias')
# ### end Alembic commands ###

View File

@ -0,0 +1,33 @@
"""empty message
Revision ID: 0a5701a4f5e4
Revises: 01827104004b
Create Date: 2023-09-07 15:28:10.122756
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0a5701a4f5e4'
down_revision = '01827104004b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('delete_on', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True))
with op.get_context().autocommit_block():
op.create_index('ix_users_delete_on', 'users', ['delete_on'], unique=False, postgresql_concurrently=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.drop_index('ix_users_delete_on', table_name='users', postgresql_concurrently=True)
op.drop_column('users', 'delete_on')
# ### end Alembic commands ###

View File

@ -0,0 +1,34 @@
"""empty message
Revision ID: ec7fdde8da9f
Revises: 0a5701a4f5e4
Create Date: 2023-09-28 18:09:48.016620
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "ec7fdde8da9f"
down_revision = "0a5701a4f5e4"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.create_index(
"ix_email_log_created_at", "email_log", ["created_at"], unique=False
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.drop_index("ix_email_log_created_at", table_name="email_log")
# ### end Alembic commands ###

View File

@ -0,0 +1,39 @@
"""empty message
Revision ID: 46ecb648a47e
Revises: ec7fdde8da9f
Create Date: 2023-10-05 10:43:35.668902
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "46ecb648a47e"
down_revision = "ec7fdde8da9f"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.create_index(
op.f("ix_message_id_matching_email_log_id"),
"message_id_matching",
["email_log_id"],
unique=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.drop_index(
op.f("ix_message_id_matching_email_log_id"),
table_name="message_id_matching",
)
# ### end Alembic commands ###

View File

@ -0,0 +1,31 @@
"""empty message
Revision ID: 4bc54632d9aa
Revises: 46ecb648a47e
Create Date: 2023-11-07 14:02:17.610226
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4bc54632d9aa'
down_revision = '46ecb648a47e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_newsletter_subject', table_name='newsletter')
op.create_index(op.f('ix_newsletter_subject'), 'newsletter', ['subject'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_newsletter_subject'), table_name='newsletter')
op.create_index('ix_newsletter_subject', 'newsletter', ['subject'], unique=True)
# ### end Alembic commands ###

View File

@ -0,0 +1,29 @@
"""empty message
Revision ID: 818b0a956205
Revises: 4bc54632d9aa
Create Date: 2024-02-01 10:43:46.253184
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '818b0a956205'
down_revision = '4bc54632d9aa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('alias', sa.Column('last_email_log_id', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('alias', 'last_email_log_id')
# ### end Alembic commands ###

0
app/monitor/__init__.py Normal file
View File

21
app/monitor/metric.py Normal file
View File

@ -0,0 +1,21 @@
from dataclasses import dataclass
from typing import List
@dataclass
class UpcloudRecord:
db_role: str
label: str
time: str
value: float
@dataclass
class UpcloudMetric:
metric_name: str
records: List[UpcloudRecord]
@dataclass
class UpcloudMetrics:
metrics: List[UpcloudMetric]

View File

@ -0,0 +1,20 @@
from app.config import UPCLOUD_DB_ID, UPCLOUD_PASSWORD, UPCLOUD_USERNAME
from app.log import LOG
from monitor.newrelic import NewRelicClient
from monitor.upcloud import UpcloudClient
class MetricExporter:
def __init__(self, newrelic_license: str):
self.__upcloud = UpcloudClient(
username=UPCLOUD_USERNAME, password=UPCLOUD_PASSWORD
)
self.__newrelic = NewRelicClient(newrelic_license)
def run(self):
try:
metrics = self.__upcloud.get_metrics(UPCLOUD_DB_ID)
self.__newrelic.send(metrics)
LOG.info("Upcloud metrics sent to NewRelic")
except Exception as e:
LOG.warn(f"Could not export metrics: {e}")

26
app/monitor/newrelic.py Normal file
View File

@ -0,0 +1,26 @@
from monitor.metric import UpcloudMetrics
from newrelic_telemetry_sdk import GaugeMetric, MetricClient
_NEWRELIC_BASE_HOST = "metric-api.eu.newrelic.com"
class NewRelicClient:
def __init__(self, license_key: str):
self.__client = MetricClient(license_key=license_key, host=_NEWRELIC_BASE_HOST)
def send(self, metrics: UpcloudMetrics):
batch = []
for metric in metrics.metrics:
for record in metric.records:
batch.append(
GaugeMetric(
name=f"upcloud.db.{metric.metric_name}",
value=record.value,
tags={"host": record.label, "db_role": record.db_role},
)
)
response = self.__client.send_batch(batch)
response.raise_for_status()

82
app/monitor/upcloud.py Normal file
View File

@ -0,0 +1,82 @@
from app.log import LOG
from monitor.metric import UpcloudMetric, UpcloudMetrics, UpcloudRecord
import base64
import requests
from typing import Any
BASE_URL = "https://api.upcloud.com"
def get_metric(json: Any, metric: str) -> UpcloudMetric:
records = []
if metric in json:
metric_data = json[metric]
data = metric_data["data"]
cols = list(map(lambda x: x["label"], data["cols"][1:]))
latest = data["rows"][-1]
time = latest[0]
for column_idx in range(len(cols)):
value = latest[1 + column_idx]
# If the latest value is None, try to fetch the second to last
if value is None:
value = data["rows"][-2][1 + column_idx]
if value is not None:
label = cols[column_idx]
if "(master)" in label:
db_role = "master"
else:
db_role = "standby"
records.append(
UpcloudRecord(time=time, db_role=db_role, label=label, value=value)
)
else:
LOG.warn(f"Could not get value for metric {metric}")
return UpcloudMetric(metric_name=metric, records=records)
def get_metrics(json: Any) -> UpcloudMetrics:
return UpcloudMetrics(
metrics=[
get_metric(json, "cpu_usage"),
get_metric(json, "disk_usage"),
get_metric(json, "diskio_reads"),
get_metric(json, "diskio_writes"),
get_metric(json, "load_average"),
get_metric(json, "mem_usage"),
get_metric(json, "net_receive"),
get_metric(json, "net_send"),
]
)
class UpcloudClient:
def __init__(self, username: str, password: str):
if not username:
raise Exception("UpcloudClient username must be set")
if not password:
raise Exception("UpcloudClient password must be set")
client = requests.Session()
encoded_auth = base64.b64encode(
f"{username}:{password}".encode("utf-8")
).decode("utf-8")
client.headers = {"Authorization": f"Basic {encoded_auth}"}
self.__client = client
def get_metrics(self, db_uuid: str) -> UpcloudMetrics:
url = f"{BASE_URL}/1.3/database/{db_uuid}/metrics?period=hour"
LOG.d(f"Performing request to {url}")
response = self.__client.get(url)
LOG.d(f"Status code: {response.status_code}")
if response.status_code != 200:
return UpcloudMetrics(metrics=[])
as_json = response.json()
return get_metrics(as_json)

View File

@ -1,3 +1,4 @@
import configparser
import os
import subprocess
from time import sleep
@ -7,6 +8,7 @@ import newrelic.agent
from app.db import Session
from app.log import LOG
from monitor.metric_exporter import MetricExporter
# the number of consecutive fails
# if more than _max_nb_fails, alert
@ -19,6 +21,18 @@ _max_nb_fails = 10
# the maximum number of emails in incoming & active queue
_max_incoming = 50
_NR_CONFIG_FILE_LOCATION_VAR = "NEW_RELIC_CONFIG_FILE"
def get_newrelic_license() -> str:
nr_file = os.environ.get(_NR_CONFIG_FILE_LOCATION_VAR, None)
if nr_file is None:
raise Exception(f"{_NR_CONFIG_FILE_LOCATION_VAR} not defined")
config = configparser.ConfigParser()
config.read(nr_file)
return config["newrelic"]["license_key"]
@newrelic.agent.background_task()
def log_postfix_metrics():
@ -80,10 +94,13 @@ def log_nb_db_connection():
if __name__ == "__main__":
exporter = MetricExporter(get_newrelic_license())
while True:
log_postfix_metrics()
log_nb_db_connection()
Session.close()
exporter.run()
# 1 min
sleep(60)

View File

@ -0,0 +1,44 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias
from app.db import Session
parser = argparse.ArgumentParser(
prog="Backfill alias", description="Backfill alias las use"
)
parser.add_argument(
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
)
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
args = parser.parse_args()
alias_id_start = args.start_alias_id
max_alias_id = args.end_alias_id
if max_alias_id == 0:
max_alias_id = Session.query(func.max(Alias.id)).scalar()
print(f"Checking alias {alias_id_start} to {max_alias_id}")
step = 1000
el_query = "SELECT alias_id, MAX(id) from email_log where alias_id>=:start AND alias_id < :end GROUP BY alias_id"
alias_query = "UPDATE alias set last_email_log_id = :el_id where id = :alias_id"
updated = 0
start_time = time.time()
for batch_start in range(alias_id_start, max_alias_id, step):
rows = Session.execute(el_query, {"start": batch_start, "end": batch_start + step})
for row in rows:
Session.execute(alias_query, {"alias_id": row[0], "el_id": row[1]})
Session.commit()
updated += 1
elapsed = time.time() - start_time
time_per_alias = elapsed / (updated + 1)
last_batch_id = batch_start + step
remaining = max_alias_id - last_batch_id
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
print(
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f}hrs remaining"
)
print("")

View File

@ -0,0 +1,53 @@
#!/usr/bin/env python3
import argparse
import time
from app import config
from app.email_utils import generate_reply_email
from app.email_validation import is_valid_email
from app.models import Alias
from app.db import Session
parser = argparse.ArgumentParser(
prog=f"Replace {config.NOREPLY}",
description=f"Replace {config.NOREPLY} from contacts reply email",
)
args = parser.parse_args()
el_query = "SELECT id, alias_id, website_email from contact where id>=:last_id AND reply_email=:reply_email ORDER BY id ASC LIMIT :step"
update_query = "UPDATE contact SET reply_email=:reply_email WHERE id=:contact_id "
updated = 0
start_time = time.time()
step = 100
last_id = 0
print(f"Replacing contacts with reply_email={config.NOREPLY}")
while True:
rows = Session.execute(
el_query, {"last_id": last_id, "reply_email": config.NOREPLY, "step": step}
)
loop_updated = 0
for row in rows:
contact_id = row[0]
alias_id = row[1]
last_id = contact_id
website_email = row[2]
contact_email_for_reply = website_email if is_valid_email(website_email) else ""
alias = Alias.get(alias_id)
if alias is None:
print(f"CANNOT find alias {alias_id} in database for contact {contact_id}")
reply_email = generate_reply_email(contact_email_for_reply, alias)
print(
f"Replacing contact {contact_id} with {website_email} reply_email for {reply_email}"
)
Session.execute(
update_query, {"contact_id": row[0], "reply_email": reply_email}
)
Session.commit()
updated += 1
loop_updated += 1
elapsed = time.time() - start_time
print(f"\rContact {last_id} done")
if loop_updated == 0:
break
print("")

502
app/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -18,6 +18,10 @@ exclude = '''
)
'''
[tool.ruff]
ignore-init-module-imports = true
exclude = [".venv", "migrations"]
[tool.djlint]
indent = 2
profile = "jinja"
@ -53,7 +57,7 @@ packages = [
include = ["templates/*", "templates/**/*", "local_data/*.txt"]
[tool.poetry.dependencies]
python = "^3.7.2"
python = "^3.10"
flask = "^1.1.2"
flask_login = "^0.5.0"
wtforms = "^2.3.3"
@ -96,7 +100,6 @@ pyspf = "^2.0.14"
Flask-Limiter = "^1.4"
memory_profiler = "^0.57.0"
gevent = "22.10.2"
aiospamc = "^0.6.1"
email_validator = "^1.1.1"
PGPy = "0.5.4"
coinbase-commerce = "^1.0.1"
@ -111,6 +114,8 @@ Deprecated = "^1.2.13"
cryptography = "37.0.1"
SQLAlchemy = "1.3.24"
redis = "^4.5.3"
newrelic-telemetry-sdk = "^0.5.0"
aiospamc = "0.10"
[tool.poetry.dev-dependencies]
pytest = "^7.0.0"
@ -120,6 +125,9 @@ black = "^22.1.0"
djlint = "^1.3.0"
pylint = "^2.14.4"
[tool.poetry.group.dev.dependencies]
ruff = "^0.1.5"
[build-system]
requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"

View File

@ -228,6 +228,8 @@ def load_user(alternative_id):
sentry_sdk.set_user({"email": user.email, "id": user.id})
if user.disabled:
return None
if not user.is_active():
return None
return user
@ -407,8 +409,10 @@ def jinja2_filter(app):
@app.context_processor
def inject_stage_and_region():
now = arrow.now()
return dict(
YEAR=arrow.now().year,
YEAR=now.year,
NOW=now,
URL=URL,
SENTRY_DSN=SENTRY_FRONT_END_DSN,
VERSION=SHA1,
@ -641,7 +645,7 @@ def setup_paddle_callback(app: Flask):
@app.route("/paddle_coupon", methods=["GET", "POST"])
def paddle_coupon():
LOG.d(f"paddle coupon callback %s", request.form)
LOG.d("paddle coupon callback %s", request.form)
if not paddle_utils.verify_incoming_request(dict(request.form)):
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))

View File

@ -1,13 +1,12 @@
from time import sleep
import flask_migrate
from IPython import embed
from sqlalchemy_utils import create_database, database_exists, drop_database
from app import models
from app.config import DB_URI
from app.models import *
from app.db import Session
from app.log import LOG
from app.models import User, RecoveryCode
if False:
# noinspection PyUnreachableCode

Some files were not shown because too many files have changed in this diff Show More