Compare commits
11 Commits
Author | SHA1 | Date | |
---|---|---|---|
6051d72691 | |||
c31a75a9ef | |||
ef289385ff | |||
9b12a2ad33 | |||
8eb19d88f3 | |||
e36e9d3077 | |||
b2430cbc5b | |||
1258115397 | |||
38c134d903 | |||
cd77e4cc2d | |||
87aedf3207 |
@ -17,6 +17,7 @@ steps:
|
|||||||
image: thegeeklab/drone-docker-buildx
|
image: thegeeklab/drone-docker-buildx
|
||||||
privileged: true
|
privileged: true
|
||||||
settings:
|
settings:
|
||||||
|
provenance: false
|
||||||
dockerfile: app/Dockerfile
|
dockerfile: app/Dockerfile
|
||||||
context: app
|
context: app
|
||||||
registry: git.mrmeeb.stream
|
registry: git.mrmeeb.stream
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
# SimpleLogin
|
# SimpleLogin
|
||||||
|
|
||||||
[](https://drone.mrmeeb.stream/MrMeeb/simple-login)
|
This repo exists to automatically capture any releases of the SaaS edition of SimpleLogin. It checks the simplelogin/app GitHub repo once a day, and builds the latest release automatically if it is newer than the currently built version.
|
||||||
|
|
||||||
This repo exists to automatically capture any releases of the SaaS edition of SimpleLogin. It checks once a day, and builds the latest one automatically if it is newer than the currentlty built version.
|
I did this to simplify deployment of my self-hosted SimpleLogin instance. SimpleLogin do not provide an up-to-date version for self-hosting, leaving you with the options of either running a very outdated version with no app support, a beta version, or their `simplelogin/app-ci` version. This last option works well if you use an x86 machine, but I'm running SimpleLogin on an ARM machine. Since I don't want to have to build containers on the machine itself, this repo handles that for me.
|
||||||
|
|
||||||
This exists to simplify deployment of SimpleLogin in a self-hosted capacity, while also allowing the use of the latest version; SimpleLogin do not provide an up-to-date version for this use.
|
As a result, this image is built for both amd64 and arm64 devices.
|
||||||
|
|
||||||
The image is built for amd64 and arm64 devices.
|
|
@ -169,6 +169,12 @@ For HTML templates, we use `djlint`. Before creating a pull request, please run
|
|||||||
poetry run djlint --check templates
|
poetry run djlint --check templates
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If some files aren't properly formatted, you can format all files with
|
||||||
|
|
||||||
|
```bash
|
||||||
|
poetry run djlint --reformat .
|
||||||
|
```
|
||||||
|
|
||||||
## Test sending email
|
## Test sending email
|
||||||
|
|
||||||
[swaks](http://www.jetmore.org/john/code/swaks/) is used for sending test emails to the `email_handler`.
|
[swaks](http://www.jetmore.org/john/code/swaks/) is used for sending test emails to the `email_handler`.
|
||||||
|
@ -23,10 +23,10 @@ COPY poetry.lock pyproject.toml ./
|
|||||||
# Install and setup poetry
|
# Install and setup poetry
|
||||||
RUN pip install -U pip \
|
RUN pip install -U pip \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt install -y curl netcat gcc python3-dev gnupg git libre2-dev \
|
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev \
|
||||||
&& curl -sSL https://install.python-poetry.org | python3 - \
|
&& curl -sSL https://install.python-poetry.org | python3 - \
|
||||||
# Remove curl and netcat from the image
|
# Remove curl and netcat from the image
|
||||||
&& apt-get purge -y curl netcat \
|
&& apt-get purge -y curl netcat-traditional \
|
||||||
# Run poetry
|
# Run poetry
|
||||||
&& poetry config virtualenvs.create false \
|
&& poetry config virtualenvs.create false \
|
||||||
&& poetry install --no-interaction --no-ansi --no-root \
|
&& poetry install --no-interaction --no-ansi --no-root \
|
||||||
|
@ -162,8 +162,6 @@ def get_alias_suffixes(
|
|||||||
or user.default_alias_public_domain_id != sl_domain.id
|
or user.default_alias_public_domain_id != sl_domain.id
|
||||||
):
|
):
|
||||||
alias_suffixes.append(alias_suffix)
|
alias_suffixes.append(alias_suffix)
|
||||||
# If no default domain mark it as found
|
|
||||||
default_domain_found = user.default_alias_public_domain_id is None
|
|
||||||
else:
|
else:
|
||||||
default_domain_found = True
|
default_domain_found = True
|
||||||
alias_suffixes.insert(0, alias_suffix)
|
alias_suffixes.insert(0, alias_suffix)
|
||||||
|
@ -13,8 +13,8 @@ from app.db import Session
|
|||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
mailbox_already_used,
|
mailbox_already_used,
|
||||||
email_can_be_used_as_mailbox,
|
email_can_be_used_as_mailbox,
|
||||||
is_valid_email,
|
|
||||||
)
|
)
|
||||||
|
from app.email_validation import is_valid_email
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Mailbox, Job
|
from app.models import Mailbox, Job
|
||||||
from app.utils import sanitize_email
|
from app.utils import sanitize_email
|
||||||
|
@ -534,3 +534,8 @@ SKIP_MX_LOOKUP_ON_CHECK = False
|
|||||||
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
||||||
|
|
||||||
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
|
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
|
||||||
|
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
||||||
|
|
||||||
|
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
||||||
|
UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
|
||||||
|
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
|
||||||
|
@ -13,10 +13,10 @@ from app import config, parallel_limiter
|
|||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
is_valid_email,
|
|
||||||
generate_reply_email,
|
generate_reply_email,
|
||||||
parse_full_address,
|
parse_full_address,
|
||||||
)
|
)
|
||||||
|
from app.email_validation import is_valid_email
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
CannotCreateContactForReverseAlias,
|
CannotCreateContactForReverseAlias,
|
||||||
ErrContactErrorUpgradeNeeded,
|
ErrContactErrorUpgradeNeeded,
|
||||||
|
@ -3,9 +3,11 @@ from flask_login import login_required, current_user
|
|||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, validators
|
from wtforms import StringField, validators
|
||||||
|
|
||||||
|
from app import config
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.extensions import limiter
|
||||||
from app.models import ApiKey
|
from app.models import ApiKey
|
||||||
from app.utils import CSRFValidationForm
|
from app.utils import CSRFValidationForm
|
||||||
|
|
||||||
@ -14,9 +16,34 @@ class NewApiKeyForm(FlaskForm):
|
|||||||
name = StringField("Name", validators=[validators.DataRequired()])
|
name = StringField("Name", validators=[validators.DataRequired()])
|
||||||
|
|
||||||
|
|
||||||
|
def clean_up_unused_or_old_api_keys(user_id: int):
|
||||||
|
total_keys = ApiKey.filter_by(user_id=user_id).count()
|
||||||
|
if total_keys <= config.MAX_API_KEYS:
|
||||||
|
return
|
||||||
|
# Remove oldest unused
|
||||||
|
for api_key in (
|
||||||
|
ApiKey.filter_by(user_id=user_id, last_used=None)
|
||||||
|
.order_by(ApiKey.created_at.asc())
|
||||||
|
.all()
|
||||||
|
):
|
||||||
|
Session.delete(api_key)
|
||||||
|
total_keys -= 1
|
||||||
|
if total_keys <= config.MAX_API_KEYS:
|
||||||
|
return
|
||||||
|
# Clean up oldest used
|
||||||
|
for api_key in (
|
||||||
|
ApiKey.filter_by(user_id=user_id).order_by(ApiKey.last_used.asc()).all()
|
||||||
|
):
|
||||||
|
Session.delete(api_key)
|
||||||
|
total_keys -= 1
|
||||||
|
if total_keys <= config.MAX_API_KEYS:
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/api_key", methods=["GET", "POST"])
|
@dashboard_bp.route("/api_key", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
@sudo_required
|
@sudo_required
|
||||||
|
@limiter.limit("10/hour")
|
||||||
def api_key():
|
def api_key():
|
||||||
api_keys = (
|
api_keys = (
|
||||||
ApiKey.filter(ApiKey.user_id == current_user.id)
|
ApiKey.filter(ApiKey.user_id == current_user.id)
|
||||||
@ -50,6 +77,7 @@ def api_key():
|
|||||||
|
|
||||||
elif request.form.get("form-name") == "create":
|
elif request.form.get("form-name") == "create":
|
||||||
if new_api_key_form.validate():
|
if new_api_key_form.validate():
|
||||||
|
clean_up_unused_or_old_api_keys(current_user.id)
|
||||||
new_api_key = ApiKey.create(
|
new_api_key = ApiKey.create(
|
||||||
name=new_api_key_form.name.data, user_id=current_user.id
|
name=new_api_key_form.name.data, user_id=current_user.id
|
||||||
)
|
)
|
||||||
|
@ -8,6 +8,7 @@ from wtforms import PasswordField, validators
|
|||||||
|
|
||||||
from app.config import CONNECT_WITH_PROTON
|
from app.config import CONNECT_WITH_PROTON
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import PartnerUser
|
from app.models import PartnerUser
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
@ -21,6 +22,7 @@ class LoginForm(FlaskForm):
|
|||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/enter_sudo", methods=["GET", "POST"])
|
@dashboard_bp.route("/enter_sudo", methods=["GET", "POST"])
|
||||||
|
@limiter.limit("3/minute")
|
||||||
@login_required
|
@login_required
|
||||||
def enter_sudo():
|
def enter_sudo():
|
||||||
password_check_form = LoginForm()
|
password_check_form = LoginForm()
|
||||||
|
@ -1,3 +1,7 @@
|
|||||||
|
import base64
|
||||||
|
import binascii
|
||||||
|
import json
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
from flask import render_template, request, redirect, url_for, flash
|
from flask import render_template, request, redirect, url_for, flash
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
@ -15,8 +19,8 @@ from app.email_utils import (
|
|||||||
mailbox_already_used,
|
mailbox_already_used,
|
||||||
render,
|
render,
|
||||||
send_email,
|
send_email,
|
||||||
is_valid_email,
|
|
||||||
)
|
)
|
||||||
|
from app.email_validation import is_valid_email
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Mailbox, Job
|
from app.models import Mailbox, Job
|
||||||
from app.utils import CSRFValidationForm
|
from app.utils import CSRFValidationForm
|
||||||
@ -180,7 +184,9 @@ def mailbox_route():
|
|||||||
|
|
||||||
def send_verification_email(user, mailbox):
|
def send_verification_email(user, mailbox):
|
||||||
s = TimestampSigner(MAILBOX_SECRET)
|
s = TimestampSigner(MAILBOX_SECRET)
|
||||||
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
|
encoded_data = json.dumps([mailbox.id, mailbox.email]).encode("utf-8")
|
||||||
|
b64_data = base64.urlsafe_b64encode(encoded_data)
|
||||||
|
mailbox_id_signed = s.sign(b64_data).decode()
|
||||||
verification_url = (
|
verification_url = (
|
||||||
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
|
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
|
||||||
)
|
)
|
||||||
@ -205,18 +211,30 @@ def send_verification_email(user, mailbox):
|
|||||||
@dashboard_bp.route("/mailbox_verify")
|
@dashboard_bp.route("/mailbox_verify")
|
||||||
def mailbox_verify():
|
def mailbox_verify():
|
||||||
s = TimestampSigner(MAILBOX_SECRET)
|
s = TimestampSigner(MAILBOX_SECRET)
|
||||||
mailbox_id = request.args.get("mailbox_id")
|
mailbox_verify_request = request.args.get("mailbox_id")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r_id = int(s.unsign(mailbox_id, max_age=900))
|
mailbox_raw_data = s.unsign(mailbox_verify_request, max_age=900)
|
||||||
except Exception:
|
except Exception:
|
||||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
else:
|
try:
|
||||||
mailbox = Mailbox.get(r_id)
|
decoded_data = base64.urlsafe_b64decode(mailbox_raw_data)
|
||||||
|
except binascii.Error:
|
||||||
|
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||||
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
|
mailbox_data = json.loads(decoded_data)
|
||||||
|
if not isinstance(mailbox_data, list) or len(mailbox_data) != 2:
|
||||||
|
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||||
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
|
mailbox_id = mailbox_data[0]
|
||||||
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
if not mailbox:
|
if not mailbox:
|
||||||
flash("Invalid link", "error")
|
flash("Invalid link", "error")
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
|
mailbox_email = mailbox_data[1]
|
||||||
|
if mailbox_email != mailbox.email:
|
||||||
|
flash("Invalid link", "error")
|
||||||
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
|
|
||||||
mailbox.verified = True
|
mailbox.verified = True
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
@ -20,6 +20,7 @@ X_SPAM_STATUS = "X-Spam-Status"
|
|||||||
LIST_UNSUBSCRIBE = "List-Unsubscribe"
|
LIST_UNSUBSCRIBE = "List-Unsubscribe"
|
||||||
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
|
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
|
||||||
RETURN_PATH = "Return-Path"
|
RETURN_PATH = "Return-Path"
|
||||||
|
AUTHENTICATION_RESULTS = "Authentication-Results"
|
||||||
|
|
||||||
# headers used to DKIM sign in order of preference
|
# headers used to DKIM sign in order of preference
|
||||||
DKIM_HEADERS = [
|
DKIM_HEADERS = [
|
||||||
@ -32,6 +33,7 @@ DKIM_HEADERS = [
|
|||||||
SL_DIRECTION = "X-SimpleLogin-Type"
|
SL_DIRECTION = "X-SimpleLogin-Type"
|
||||||
SL_EMAIL_LOG_ID = "X-SimpleLogin-EmailLog-ID"
|
SL_EMAIL_LOG_ID = "X-SimpleLogin-EmailLog-ID"
|
||||||
SL_ENVELOPE_FROM = "X-SimpleLogin-Envelope-From"
|
SL_ENVELOPE_FROM = "X-SimpleLogin-Envelope-From"
|
||||||
|
SL_ORIGINAL_FROM = "X-SimpleLogin-Original-From"
|
||||||
SL_ENVELOPE_TO = "X-SimpleLogin-Envelope-To"
|
SL_ENVELOPE_TO = "X-SimpleLogin-Envelope-To"
|
||||||
SL_CLIENT_IP = "X-SimpleLogin-Client-IP"
|
SL_CLIENT_IP = "X-SimpleLogin-Client-IP"
|
||||||
|
|
||||||
|
@ -828,19 +828,6 @@ def should_add_dkim_signature(domain: str) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def is_valid_email(email_address: str) -> bool:
|
|
||||||
"""
|
|
||||||
Used to check whether an email address is valid
|
|
||||||
NOT run MX check.
|
|
||||||
NOT allow unicode.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
|
|
||||||
return True
|
|
||||||
except EmailNotValidError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class EmailEncoding(enum.Enum):
|
class EmailEncoding(enum.Enum):
|
||||||
BASE64 = "base64"
|
BASE64 = "base64"
|
||||||
QUOTED = "quoted-printable"
|
QUOTED = "quoted-printable"
|
||||||
@ -951,6 +938,8 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
|||||||
for part in msg.get_payload():
|
for part in msg.get_payload():
|
||||||
if isinstance(part, Message):
|
if isinstance(part, Message):
|
||||||
new_parts.append(add_header(part, text_header, html_header))
|
new_parts.append(add_header(part, text_header, html_header))
|
||||||
|
elif isinstance(part, str):
|
||||||
|
new_parts.append(MIMEText(part))
|
||||||
else:
|
else:
|
||||||
new_parts.append(part)
|
new_parts.append(part)
|
||||||
clone_msg = copy(msg)
|
clone_msg = copy(msg)
|
||||||
@ -959,7 +948,14 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
|||||||
|
|
||||||
elif content_type in ("multipart/mixed", "multipart/signed"):
|
elif content_type in ("multipart/mixed", "multipart/signed"):
|
||||||
new_parts = []
|
new_parts = []
|
||||||
parts = list(msg.get_payload())
|
payload = msg.get_payload()
|
||||||
|
if isinstance(payload, str):
|
||||||
|
# The message is badly formatted inject as new
|
||||||
|
new_parts = [MIMEText(text_header, "plain"), MIMEText(payload, "plain")]
|
||||||
|
clone_msg = copy(msg)
|
||||||
|
clone_msg.set_payload(new_parts)
|
||||||
|
return clone_msg
|
||||||
|
parts = list(payload)
|
||||||
LOG.d("only add header for the first part for %s", content_type)
|
LOG.d("only add header for the first part for %s", content_type)
|
||||||
for ix, part in enumerate(parts):
|
for ix, part in enumerate(parts):
|
||||||
if ix == 0:
|
if ix == 0:
|
||||||
@ -1107,26 +1103,6 @@ def is_reverse_alias(address: str) -> bool:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# allow also + and @ that are present in a reply address
|
|
||||||
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_reply_email(reply_email: str) -> str:
|
|
||||||
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
|
|
||||||
if not reply_email.isascii():
|
|
||||||
reply_email = convert_to_id(reply_email)
|
|
||||||
|
|
||||||
ret = []
|
|
||||||
# drop all control characters like shift, separator, etc
|
|
||||||
for c in reply_email:
|
|
||||||
if c not in _ALLOWED_CHARS:
|
|
||||||
ret.append("_")
|
|
||||||
else:
|
|
||||||
ret.append(c)
|
|
||||||
|
|
||||||
return "".join(ret)
|
|
||||||
|
|
||||||
|
|
||||||
def should_disable(alias: Alias) -> (bool, str):
|
def should_disable(alias: Alias) -> (bool, str):
|
||||||
"""
|
"""
|
||||||
Return whether an alias should be disabled and if yes, the reason why
|
Return whether an alias should be disabled and if yes, the reason why
|
||||||
|
38
app/app/email_validation.py
Normal file
38
app/app/email_validation.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
from email_validator import (
|
||||||
|
validate_email,
|
||||||
|
EmailNotValidError,
|
||||||
|
)
|
||||||
|
|
||||||
|
from app.utils import convert_to_id
|
||||||
|
|
||||||
|
# allow also + and @ that are present in a reply address
|
||||||
|
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
|
||||||
|
|
||||||
|
|
||||||
|
def is_valid_email(email_address: str) -> bool:
|
||||||
|
"""
|
||||||
|
Used to check whether an email address is valid
|
||||||
|
NOT run MX check.
|
||||||
|
NOT allow unicode.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
|
||||||
|
return True
|
||||||
|
except EmailNotValidError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_reply_email(reply_email: str) -> str:
|
||||||
|
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
|
||||||
|
if not reply_email.isascii():
|
||||||
|
reply_email = convert_to_id(reply_email)
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
# drop all control characters like shift, separator, etc
|
||||||
|
for c in reply_email:
|
||||||
|
if c not in _ALLOWED_CHARS:
|
||||||
|
ret.append("_")
|
||||||
|
else:
|
||||||
|
ret.append(c)
|
||||||
|
|
||||||
|
return "".join(ret)
|
@ -84,6 +84,14 @@ class ErrAddressInvalid(SLException):
|
|||||||
return f"{self.address} is not a valid email address"
|
return f"{self.address} is not a valid email address"
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidContactEmailError(SLException):
|
||||||
|
def __init__(self, website_email: str): # noqa: F821
|
||||||
|
self.website_email = website_email
|
||||||
|
|
||||||
|
def error_for_user(self) -> str:
|
||||||
|
return f"Cannot create contact with invalid email {self.website_email}"
|
||||||
|
|
||||||
|
|
||||||
class ErrContactAlreadyExists(SLException):
|
class ErrContactAlreadyExists(SLException):
|
||||||
"""raised when a contact already exists"""
|
"""raised when a contact already exists"""
|
||||||
|
|
||||||
|
@ -74,8 +74,8 @@ class UnsubscribeEncoder:
|
|||||||
)
|
)
|
||||||
signed_data = cls._get_signer().sign(serialized_data).decode("utf-8")
|
signed_data = cls._get_signer().sign(serialized_data).decode("utf-8")
|
||||||
encoded_request = f"{UNSUB_PREFIX}.{signed_data}"
|
encoded_request = f"{UNSUB_PREFIX}.{signed_data}"
|
||||||
if len(encoded_request) > 256:
|
if len(encoded_request) > 512:
|
||||||
LOG.e("Encoded request is longer than 256 chars")
|
LOG.w("Encoded request is longer than 512 chars")
|
||||||
return encoded_request
|
return encoded_request
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -9,6 +9,7 @@ from app.handler.unsubscribe_encoder import (
|
|||||||
UnsubscribeData,
|
UnsubscribeData,
|
||||||
UnsubscribeOriginalData,
|
UnsubscribeOriginalData,
|
||||||
)
|
)
|
||||||
|
from app.log import LOG
|
||||||
from app.models import Alias, Contact, UnsubscribeBehaviourEnum
|
from app.models import Alias, Contact, UnsubscribeBehaviourEnum
|
||||||
|
|
||||||
|
|
||||||
@ -30,6 +31,7 @@ class UnsubscribeGenerator:
|
|||||||
"""
|
"""
|
||||||
unsubscribe_data = message[headers.LIST_UNSUBSCRIBE]
|
unsubscribe_data = message[headers.LIST_UNSUBSCRIBE]
|
||||||
if not unsubscribe_data:
|
if not unsubscribe_data:
|
||||||
|
LOG.info("Email has no unsubscribe header")
|
||||||
return message
|
return message
|
||||||
raw_methods = [method.strip() for method in unsubscribe_data.split(",")]
|
raw_methods = [method.strip() for method in unsubscribe_data.split(",")]
|
||||||
mailto_unsubs = None
|
mailto_unsubs = None
|
||||||
@ -44,7 +46,9 @@ class UnsubscribeGenerator:
|
|||||||
if url_data.scheme == "mailto":
|
if url_data.scheme == "mailto":
|
||||||
query_data = urllib.parse.parse_qs(url_data.query)
|
query_data = urllib.parse.parse_qs(url_data.query)
|
||||||
mailto_unsubs = (url_data.path, query_data.get("subject", [""])[0])
|
mailto_unsubs = (url_data.path, query_data.get("subject", [""])[0])
|
||||||
|
LOG.debug(f"Unsub is mailto to {mailto_unsubs}")
|
||||||
else:
|
else:
|
||||||
|
LOG.debug(f"Unsub has {url_data.scheme} scheme")
|
||||||
other_unsubs.append(method)
|
other_unsubs.append(method)
|
||||||
# If there are non mailto unsubscribe methods, use those in the header
|
# If there are non mailto unsubscribe methods, use those in the header
|
||||||
if other_unsubs:
|
if other_unsubs:
|
||||||
@ -56,18 +60,19 @@ class UnsubscribeGenerator:
|
|||||||
add_or_replace_header(
|
add_or_replace_header(
|
||||||
message, headers.LIST_UNSUBSCRIBE_POST, "List-Unsubscribe=One-Click"
|
message, headers.LIST_UNSUBSCRIBE_POST, "List-Unsubscribe=One-Click"
|
||||||
)
|
)
|
||||||
|
LOG.debug(f"Adding click unsub methods to header {other_unsubs}")
|
||||||
return message
|
return message
|
||||||
if not mailto_unsubs:
|
elif not mailto_unsubs:
|
||||||
message = delete_header(message, headers.LIST_UNSUBSCRIBE)
|
LOG.debug("No unsubs. Deleting all unsub headers")
|
||||||
message = delete_header(message, headers.LIST_UNSUBSCRIBE_POST)
|
delete_header(message, headers.LIST_UNSUBSCRIBE)
|
||||||
|
delete_header(message, headers.LIST_UNSUBSCRIBE_POST)
|
||||||
return message
|
return message
|
||||||
return self._add_unsubscribe_header(
|
unsub_data = UnsubscribeData(
|
||||||
message,
|
|
||||||
UnsubscribeData(
|
|
||||||
UnsubscribeAction.OriginalUnsubscribeMailto,
|
UnsubscribeAction.OriginalUnsubscribeMailto,
|
||||||
UnsubscribeOriginalData(alias.id, mailto_unsubs[0], mailto_unsubs[1]),
|
UnsubscribeOriginalData(alias.id, mailto_unsubs[0], mailto_unsubs[1]),
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
LOG.debug(f"Adding unsub data {unsub_data}")
|
||||||
|
return self._add_unsubscribe_header(message, unsub_data)
|
||||||
|
|
||||||
def _add_unsubscribe_header(
|
def _add_unsubscribe_header(
|
||||||
self, message: Message, unsub: UnsubscribeData
|
self, message: Message, unsub: UnsubscribeData
|
||||||
|
@ -46,6 +46,7 @@ class SendRequest:
|
|||||||
"mail_options": self.mail_options,
|
"mail_options": self.mail_options,
|
||||||
"rcpt_options": self.rcpt_options,
|
"rcpt_options": self.rcpt_options,
|
||||||
"is_forward": self.is_forward,
|
"is_forward": self.is_forward,
|
||||||
|
"retries": self.retries,
|
||||||
}
|
}
|
||||||
return json.dumps(data).encode("utf-8")
|
return json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
@ -66,6 +67,7 @@ class SendRequest:
|
|||||||
mail_options=decoded_data["mail_options"],
|
mail_options=decoded_data["mail_options"],
|
||||||
rcpt_options=decoded_data["rcpt_options"],
|
rcpt_options=decoded_data["rcpt_options"],
|
||||||
is_forward=decoded_data["is_forward"],
|
is_forward=decoded_data["is_forward"],
|
||||||
|
retries=decoded_data.get("retries", 1),
|
||||||
)
|
)
|
||||||
|
|
||||||
def save_request_to_unsent_dir(self, prefix: str = "DeliveryFail"):
|
def save_request_to_unsent_dir(self, prefix: str = "DeliveryFail"):
|
||||||
|
@ -341,7 +341,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
sa.Boolean, default=True, nullable=False, server_default="1"
|
sa.Boolean, default=True, nullable=False, server_default="1"
|
||||||
)
|
)
|
||||||
|
|
||||||
activated = sa.Column(sa.Boolean, default=False, nullable=False)
|
activated = sa.Column(sa.Boolean, default=False, nullable=False, index=True)
|
||||||
|
|
||||||
# an account can be disabled if having harmful behavior
|
# an account can be disabled if having harmful behavior
|
||||||
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
||||||
@ -411,7 +411,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
)
|
)
|
||||||
|
|
||||||
referral_id = sa.Column(
|
referral_id = sa.Column(
|
||||||
sa.ForeignKey("referral.id", ondelete="SET NULL"), nullable=True, default=None
|
sa.ForeignKey("referral.id", ondelete="SET NULL"),
|
||||||
|
nullable=True,
|
||||||
|
default=None,
|
||||||
|
index=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
referral = orm.relationship("Referral", foreign_keys=[referral_id])
|
referral = orm.relationship("Referral", foreign_keys=[referral_id])
|
||||||
@ -445,7 +448,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
random_alias_suffix = sa.Column(
|
random_alias_suffix = sa.Column(
|
||||||
sa.Integer,
|
sa.Integer,
|
||||||
nullable=False,
|
nullable=False,
|
||||||
default=AliasSuffixEnum.random_string.value,
|
default=AliasSuffixEnum.word.value,
|
||||||
server_default=str(AliasSuffixEnum.random_string.value),
|
server_default=str(AliasSuffixEnum.random_string.value),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -514,9 +517,8 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
server_default=BlockBehaviourEnum.return_2xx.name,
|
server_default=BlockBehaviourEnum.return_2xx.name,
|
||||||
)
|
)
|
||||||
|
|
||||||
# to keep existing behavior, the server default is TRUE whereas for new user, the default value is FALSE
|
|
||||||
include_header_email_header = sa.Column(
|
include_header_email_header = sa.Column(
|
||||||
sa.Boolean, default=False, nullable=False, server_default="1"
|
sa.Boolean, default=True, nullable=False, server_default="1"
|
||||||
)
|
)
|
||||||
|
|
||||||
# bitwise flags. Allow for future expansion
|
# bitwise flags. Allow for future expansion
|
||||||
@ -535,6 +537,12 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
nullable=False,
|
nullable=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
sa.Index(
|
||||||
|
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def directory_quota(self):
|
def directory_quota(self):
|
||||||
return min(
|
return min(
|
||||||
@ -569,6 +577,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, email, name="", password=None, from_partner=False, **kwargs):
|
def create(cls, email, name="", password=None, from_partner=False, **kwargs):
|
||||||
|
email = sanitize_email(email)
|
||||||
user: User = super(User, cls).create(email=email, name=name[:100], **kwargs)
|
user: User = super(User, cls).create(email=email, name=name[:100], **kwargs)
|
||||||
|
|
||||||
if password:
|
if password:
|
||||||
@ -1446,7 +1455,7 @@ class Alias(Base, ModelMixin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# have I been pwned
|
# have I been pwned
|
||||||
hibp_last_check = sa.Column(ArrowType, default=None)
|
hibp_last_check = sa.Column(ArrowType, default=None, index=True)
|
||||||
hibp_breaches = orm.relationship("Hibp", secondary="alias_hibp")
|
hibp_breaches = orm.relationship("Hibp", secondary="alias_hibp")
|
||||||
|
|
||||||
# to use Postgres full text search. Only applied on "note" column for now
|
# to use Postgres full text search. Only applied on "note" column for now
|
||||||
@ -2292,6 +2301,7 @@ class CustomDomain(Base, ModelMixin):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, **kwargs):
|
def create(cls, **kwargs):
|
||||||
domain = kwargs.get("domain")
|
domain = kwargs.get("domain")
|
||||||
|
kwargs["domain"] = domain.replace("\n", "")
|
||||||
if DeletedSubdomain.get_by(domain=domain):
|
if DeletedSubdomain.get_by(domain=domain):
|
||||||
raise SubdomainInTrashError
|
raise SubdomainInTrashError
|
||||||
|
|
||||||
@ -2591,6 +2601,12 @@ class Mailbox(Base, ModelMixin):
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(cls, **kw):
|
||||||
|
if "email" in kw:
|
||||||
|
kw["email"] = sanitize_email(kw["email"])
|
||||||
|
return super().create(**kw)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<Mailbox {self.id} {self.email}>"
|
return f"<Mailbox {self.id} {self.email}>"
|
||||||
|
|
||||||
@ -2929,6 +2945,8 @@ class Monitoring(Base, ModelMixin):
|
|||||||
active_queue = sa.Column(sa.Integer, nullable=False)
|
active_queue = sa.Column(sa.Integer, nullable=False)
|
||||||
deferred_queue = sa.Column(sa.Integer, nullable=False)
|
deferred_queue = sa.Column(sa.Integer, nullable=False)
|
||||||
|
|
||||||
|
__table_args__ = (Index("ix_monitoring_created_at", "created_at"),)
|
||||||
|
|
||||||
|
|
||||||
class BatchImport(Base, ModelMixin):
|
class BatchImport(Base, ModelMixin):
|
||||||
__tablename__ = "batch_import"
|
__tablename__ = "batch_import"
|
||||||
@ -3054,6 +3072,8 @@ class Bounce(Base, ModelMixin):
|
|||||||
email = sa.Column(sa.String(256), nullable=False, index=True)
|
email = sa.Column(sa.String(256), nullable=False, index=True)
|
||||||
info = sa.Column(sa.Text, nullable=True)
|
info = sa.Column(sa.Text, nullable=True)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_bounce_created_at", "created_at"),)
|
||||||
|
|
||||||
|
|
||||||
class TransactionalEmail(Base, ModelMixin):
|
class TransactionalEmail(Base, ModelMixin):
|
||||||
"""Storing all email addresses that receive transactional emails, including account email and mailboxes.
|
"""Storing all email addresses that receive transactional emails, including account email and mailboxes.
|
||||||
@ -3063,6 +3083,8 @@ class TransactionalEmail(Base, ModelMixin):
|
|||||||
__tablename__ = "transactional_email"
|
__tablename__ = "transactional_email"
|
||||||
email = sa.Column(sa.String(256), nullable=False, unique=False)
|
email = sa.Column(sa.String(256), nullable=False, unique=False)
|
||||||
|
|
||||||
|
__table_args__ = (sa.Index("ix_transactional_email_created_at", "created_at"),)
|
||||||
|
|
||||||
|
|
||||||
class Payout(Base, ModelMixin):
|
class Payout(Base, ModelMixin):
|
||||||
"""Referral payouts"""
|
"""Referral payouts"""
|
||||||
|
@ -99,7 +99,7 @@ def sanitize_email(email_address: str, not_lower=False) -> str:
|
|||||||
email_address = email_address.strip().replace(" ", "").replace("\n", " ")
|
email_address = email_address.strip().replace(" ", "").replace("\n", " ")
|
||||||
if not not_lower:
|
if not not_lower:
|
||||||
email_address = email_address.lower()
|
email_address = email_address.lower()
|
||||||
return email_address
|
return email_address.replace("\u200f", "")
|
||||||
|
|
||||||
|
|
||||||
class NextUrlSanitizer:
|
class NextUrlSanitizer:
|
||||||
|
26
app/cron.py
26
app/cron.py
@ -22,10 +22,9 @@ from app.email_utils import (
|
|||||||
render,
|
render,
|
||||||
email_can_be_used_as_mailbox,
|
email_can_be_used_as_mailbox,
|
||||||
send_email_with_rate_control,
|
send_email_with_rate_control,
|
||||||
normalize_reply_email,
|
|
||||||
is_valid_email,
|
|
||||||
get_email_domain_part,
|
get_email_domain_part,
|
||||||
)
|
)
|
||||||
|
from app.email_validation import is_valid_email, normalize_reply_email
|
||||||
from app.errors import ProtonPartnerNotSetUp
|
from app.errors import ProtonPartnerNotSetUp
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.mail_sender import load_unsent_mails_from_fs_and_resend
|
from app.mail_sender import load_unsent_mails_from_fs_and_resend
|
||||||
@ -66,12 +65,14 @@ from server import create_light_app
|
|||||||
|
|
||||||
def notify_trial_end():
|
def notify_trial_end():
|
||||||
for user in User.filter(
|
for user in User.filter(
|
||||||
User.activated.is_(True), User.trial_end.isnot(None), User.lifetime.is_(False)
|
User.activated.is_(True),
|
||||||
|
User.trial_end.isnot(None),
|
||||||
|
User.trial_end >= arrow.now().shift(days=2),
|
||||||
|
User.trial_end < arrow.now().shift(days=3),
|
||||||
|
User.lifetime.is_(False),
|
||||||
).all():
|
).all():
|
||||||
try:
|
try:
|
||||||
if user.in_trial() and arrow.now().shift(
|
if user.in_trial():
|
||||||
days=3
|
|
||||||
) > user.trial_end >= arrow.now().shift(days=2):
|
|
||||||
LOG.d("Send trial end email to user %s", user)
|
LOG.d("Send trial end email to user %s", user)
|
||||||
send_trial_end_soon_email(user)
|
send_trial_end_soon_email(user)
|
||||||
# happens if user has been deleted in the meantime
|
# happens if user has been deleted in the meantime
|
||||||
@ -104,7 +105,9 @@ def delete_logs():
|
|||||||
|
|
||||||
|
|
||||||
def delete_refused_emails():
|
def delete_refused_emails():
|
||||||
for refused_email in RefusedEmail.filter_by(deleted=False).all():
|
for refused_email in (
|
||||||
|
RefusedEmail.filter_by(deleted=False).order_by(RefusedEmail.id).all()
|
||||||
|
):
|
||||||
if arrow.now().shift(days=1) > refused_email.delete_at >= arrow.now():
|
if arrow.now().shift(days=1) > refused_email.delete_at >= arrow.now():
|
||||||
LOG.d("Delete refused email %s", refused_email)
|
LOG.d("Delete refused email %s", refused_email)
|
||||||
if refused_email.path:
|
if refused_email.path:
|
||||||
@ -272,7 +275,11 @@ def compute_metric2() -> Metric2:
|
|||||||
_24h_ago = now.shift(days=-1)
|
_24h_ago = now.shift(days=-1)
|
||||||
|
|
||||||
nb_referred_user_paid = 0
|
nb_referred_user_paid = 0
|
||||||
for user in User.filter(User.referral_id.isnot(None)):
|
for user in (
|
||||||
|
User.filter(User.referral_id.isnot(None))
|
||||||
|
.yield_per(500)
|
||||||
|
.enable_eagerloads(False)
|
||||||
|
):
|
||||||
if user.is_paid():
|
if user.is_paid():
|
||||||
nb_referred_user_paid += 1
|
nb_referred_user_paid += 1
|
||||||
|
|
||||||
@ -1020,7 +1027,8 @@ async def check_hibp():
|
|||||||
)
|
)
|
||||||
.filter(Alias.enabled)
|
.filter(Alias.enabled)
|
||||||
.order_by(Alias.hibp_last_check.asc())
|
.order_by(Alias.hibp_last_check.asc())
|
||||||
.all()
|
.yield_per(500)
|
||||||
|
.enable_eagerloads(False)
|
||||||
):
|
):
|
||||||
await queue.put(alias.id)
|
await queue.put(alias.id)
|
||||||
|
|
||||||
|
@ -5,68 +5,64 @@ jobs:
|
|||||||
schedule: "0 0 * * *"
|
schedule: "0 0 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
|
|
||||||
- name: SimpleLogin Notify Trial Ends
|
|
||||||
command: python /code/cron.py -j notify_trial_end
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "0 8 * * *"
|
|
||||||
captureStderr: true
|
|
||||||
|
|
||||||
- name: SimpleLogin Notify Manual Subscription Ends
|
|
||||||
command: python /code/cron.py -j notify_manual_subscription_end
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "0 9 * * *"
|
|
||||||
captureStderr: true
|
|
||||||
|
|
||||||
- name: SimpleLogin Notify Premium Ends
|
|
||||||
command: python /code/cron.py -j notify_premium_end
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "0 10 * * *"
|
|
||||||
captureStderr: true
|
|
||||||
|
|
||||||
- name: SimpleLogin Delete Logs
|
|
||||||
command: python /code/cron.py -j delete_logs
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "0 11 * * *"
|
|
||||||
captureStderr: true
|
|
||||||
|
|
||||||
- name: SimpleLogin Poll Apple Subscriptions
|
|
||||||
command: python /code/cron.py -j poll_apple_subscription
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "0 12 * * *"
|
|
||||||
captureStderr: true
|
|
||||||
|
|
||||||
- name: SimpleLogin Sanity Check
|
|
||||||
command: python /code/cron.py -j sanity_check
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "0 2 * * *"
|
|
||||||
captureStderr: true
|
|
||||||
|
|
||||||
- name: SimpleLogin Delete Old Monitoring records
|
- name: SimpleLogin Delete Old Monitoring records
|
||||||
command: python /code/cron.py -j delete_old_monitoring
|
command: python /code/cron.py -j delete_old_monitoring
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "0 14 * * *"
|
schedule: "15 1 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
|
|
||||||
- name: SimpleLogin Custom Domain check
|
- name: SimpleLogin Custom Domain check
|
||||||
command: python /code/cron.py -j check_custom_domain
|
command: python /code/cron.py -j check_custom_domain
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "0 15 * * *"
|
schedule: "15 2 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
|
|
||||||
- name: SimpleLogin HIBP check
|
- name: SimpleLogin HIBP check
|
||||||
command: python /code/cron.py -j check_hibp
|
command: python /code/cron.py -j check_hibp
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "0 18 * * *"
|
schedule: "15 3 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
concurrencyPolicy: Forbid
|
||||||
|
|
||||||
- name: SimpleLogin Notify HIBP breaches
|
- name: SimpleLogin Notify HIBP breaches
|
||||||
command: python /code/cron.py -j notify_hibp
|
command: python /code/cron.py -j notify_hibp
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "0 19 * * *"
|
schedule: "15 4 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
concurrencyPolicy: Forbid
|
||||||
|
|
||||||
|
- name: SimpleLogin Delete Logs
|
||||||
|
command: python /code/cron.py -j delete_logs
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "15 5 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin Poll Apple Subscriptions
|
||||||
|
command: python /code/cron.py -j poll_apple_subscription
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "15 6 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin Notify Trial Ends
|
||||||
|
command: python /code/cron.py -j notify_trial_end
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "15 8 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin Notify Manual Subscription Ends
|
||||||
|
command: python /code/cron.py -j notify_manual_subscription_end
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "15 9 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin Notify Premium Ends
|
||||||
|
command: python /code/cron.py -j notify_premium_end
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "15 10 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
- name: SimpleLogin send unsent emails
|
- name: SimpleLogin send unsent emails
|
||||||
command: python /code/cron.py -j send_undelivered_mails
|
command: python /code/cron.py -j send_undelivered_mails
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
|
123
app/docs/ssl.md
123
app/docs/ssl.md
@ -1,4 +1,4 @@
|
|||||||
# SSL, HTTPS, and HSTS
|
# SSL, HTTPS, HSTS and additional security measures
|
||||||
|
|
||||||
It's highly recommended to enable SSL/TLS on your server, both for the web app and email server.
|
It's highly recommended to enable SSL/TLS on your server, both for the web app and email server.
|
||||||
|
|
||||||
@ -58,3 +58,124 @@ Now, reload Nginx:
|
|||||||
```bash
|
```bash
|
||||||
sudo systemctl reload nginx
|
sudo systemctl reload nginx
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Additional security measures
|
||||||
|
|
||||||
|
For additional security, we recommend you take some extra steps.
|
||||||
|
|
||||||
|
### Enable Certificate Authority Authorization (CAA)
|
||||||
|
|
||||||
|
[Certificate Authority Authorization](https://letsencrypt.org/docs/caa/) is a step you can take to restrict the list of certificate authorities that are allowed to issue certificates for your domains.
|
||||||
|
|
||||||
|
Use [SSLMate’s CAA Record Generator](https://sslmate.com/caa/) to create a **CAA record** with the following configuration:
|
||||||
|
|
||||||
|
- `flags`: `0`
|
||||||
|
- `tag`: `issue`
|
||||||
|
- `value`: `"letsencrypt.org"`
|
||||||
|
|
||||||
|
To verify if the DNS works, the following command
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dig @1.1.1.1 mydomain.com caa
|
||||||
|
```
|
||||||
|
|
||||||
|
should return:
|
||||||
|
|
||||||
|
```
|
||||||
|
mydomain.com. 3600 IN CAA 0 issue "letsencrypt.org"
|
||||||
|
```
|
||||||
|
|
||||||
|
### SMTP MTA Strict Transport Security (MTA-STS)
|
||||||
|
|
||||||
|
[MTA-STS](https://datatracker.ietf.org/doc/html/rfc8461) is an extra step you can take to broadcast the ability of your instance to receive and, optionally enforce, TSL-secure SMTP connections to protect email traffic.
|
||||||
|
|
||||||
|
Enabling MTA-STS requires you serve a specific file from subdomain `mta-sts.domain.com` on a well-known route.
|
||||||
|
|
||||||
|
Create a text file `/var/www/.well-known/mta-sts.txt` with the content:
|
||||||
|
|
||||||
|
```txt
|
||||||
|
version: STSv1
|
||||||
|
mode: testing
|
||||||
|
mx: app.mydomain.com
|
||||||
|
max_age: 86400
|
||||||
|
```
|
||||||
|
|
||||||
|
It is recommended to start with `mode: testing` for starters to get time to review failure reports. Add as many `mx:` domain entries as you have matching **MX records** in your DNS configuration.
|
||||||
|
|
||||||
|
Create a **TXT record** for `_mta-sts.mydomain.com.` with the following value:
|
||||||
|
|
||||||
|
```txt
|
||||||
|
v=STSv1; id=UNIX_TIMESTAMP
|
||||||
|
```
|
||||||
|
|
||||||
|
With `UNIX_TIMESTAMP` being the current date/time.
|
||||||
|
|
||||||
|
Use the following command to generate the record:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
echo "v=STSv1; id=$(date +%s)"
|
||||||
|
```
|
||||||
|
|
||||||
|
To verify if the DNS works, the following command
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dig @1.1.1.1 _mta-sts.mydomain.com txt
|
||||||
|
```
|
||||||
|
|
||||||
|
should return a result similar to this one:
|
||||||
|
|
||||||
|
```
|
||||||
|
_mta-sts.mydomain.com. 3600 IN TXT "v=STSv1; id=1689416399"
|
||||||
|
```
|
||||||
|
|
||||||
|
Create an additional Nginx configuration in `/etc/nginx/sites-enabled/mta-sts` with the following content:
|
||||||
|
|
||||||
|
```
|
||||||
|
server {
|
||||||
|
server_name mta-sts.mydomain.com;
|
||||||
|
root /var/www;
|
||||||
|
listen 80;
|
||||||
|
|
||||||
|
location ^~ /.well-known {}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Restart Nginx with the following command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo service nginx restart
|
||||||
|
```
|
||||||
|
|
||||||
|
A correct configuration of MTA-STS, however, requires that the certificate used to host the `mta-sts` subdomain matches that of the subdomain referred to by the **MX record** from the DNS. In other words, both `mta-sts.mydomain.com` and `app.mydomain.com` must share the same certificate.
|
||||||
|
|
||||||
|
The easiest way to do this is to _expand_ the certificate associated with `app.mydomain.com` to also support the `mta-sts` subdomain using the following command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
certbot --expand --nginx -d app.mydomain.com,mta-sts.mydomain.com
|
||||||
|
```
|
||||||
|
|
||||||
|
## SMTP TLS Reporting
|
||||||
|
|
||||||
|
[TLSRPT](https://datatracker.ietf.org/doc/html/rfc8460) is used by SMTP systems to report failures in establishing TLS-secure sessions as broadcast by the MTA-STS configuration.
|
||||||
|
|
||||||
|
Configuring MTA-STS in `mode: testing` as shown in the previous section gives you time to review failures from some SMTP senders.
|
||||||
|
|
||||||
|
Create a **TXT record** for `_smtp._tls.mydomain.com.` with the following value:
|
||||||
|
|
||||||
|
```txt
|
||||||
|
v=TSLRPTv1; rua=mailto:YOUR_EMAIL
|
||||||
|
```
|
||||||
|
|
||||||
|
The TLSRPT configuration at the DNS level allows SMTP senders that fail to initiate TLS-secure sessions to send reports to a particular email address. We suggest creating a `tls-reports` alias in SimpleLogin for this purpose.
|
||||||
|
|
||||||
|
To verify if the DNS works, the following command
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dig @1.1.1.1 _smtp._tls.mydomain.com txt
|
||||||
|
```
|
||||||
|
|
||||||
|
should return a result similar to this one:
|
||||||
|
|
||||||
|
```
|
||||||
|
_smtp._tls.mydomain.com. 3600 IN TXT "v=TSLRPTv1; rua=mailto:tls-reports@mydomain.com"
|
||||||
|
```
|
||||||
|
@ -106,8 +106,6 @@ from app.email_utils import (
|
|||||||
get_header_unicode,
|
get_header_unicode,
|
||||||
generate_reply_email,
|
generate_reply_email,
|
||||||
is_reverse_alias,
|
is_reverse_alias,
|
||||||
normalize_reply_email,
|
|
||||||
is_valid_email,
|
|
||||||
replace,
|
replace,
|
||||||
should_disable,
|
should_disable,
|
||||||
parse_id_from_bounce,
|
parse_id_from_bounce,
|
||||||
@ -123,6 +121,7 @@ from app.email_utils import (
|
|||||||
generate_verp_email,
|
generate_verp_email,
|
||||||
sl_formataddr,
|
sl_formataddr,
|
||||||
)
|
)
|
||||||
|
from app.email_validation import is_valid_email, normalize_reply_email
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
NonReverseAliasInReplyPhase,
|
NonReverseAliasInReplyPhase,
|
||||||
VERPTransactional,
|
VERPTransactional,
|
||||||
@ -262,7 +261,7 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
|||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
LOG.w("Contact %s %s already exist", alias, contact_email)
|
LOG.w(f"Contact with email {contact_email} for alias {alias} already exist")
|
||||||
Session.rollback()
|
Session.rollback()
|
||||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||||
|
|
||||||
@ -280,6 +279,9 @@ def get_or_create_reply_to_contact(
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
|
||||||
|
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
|
||||||
|
|
||||||
if not is_valid_email(contact_address):
|
if not is_valid_email(contact_address):
|
||||||
LOG.w(
|
LOG.w(
|
||||||
"invalid reply-to address %s. Parse from %s",
|
"invalid reply-to address %s. Parse from %s",
|
||||||
@ -348,6 +350,10 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||||
|
contact_name = full_address.display_name
|
||||||
|
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
|
||||||
|
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
|
||||||
|
|
||||||
if contact:
|
if contact:
|
||||||
# update the contact name if needed
|
# update the contact name if needed
|
||||||
if contact.name != full_address.display_name:
|
if contact.name != full_address.display_name:
|
||||||
@ -355,9 +361,9 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
|||||||
"Update contact %s name %s to %s",
|
"Update contact %s name %s to %s",
|
||||||
contact,
|
contact,
|
||||||
contact.name,
|
contact.name,
|
||||||
full_address.display_name,
|
contact_name,
|
||||||
)
|
)
|
||||||
contact.name = full_address.display_name
|
contact.name = contact_name
|
||||||
Session.commit()
|
Session.commit()
|
||||||
else:
|
else:
|
||||||
LOG.d(
|
LOG.d(
|
||||||
@ -372,7 +378,7 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
|||||||
user_id=alias.user_id,
|
user_id=alias.user_id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email=contact_email,
|
website_email=contact_email,
|
||||||
name=full_address.display_name,
|
name=contact_name,
|
||||||
reply_email=generate_reply_email(contact_email, alias),
|
reply_email=generate_reply_email(contact_email, alias),
|
||||||
is_cc=header.lower() == "cc",
|
is_cc=header.lower() == "cc",
|
||||||
automatic_created=True,
|
automatic_created=True,
|
||||||
@ -541,12 +547,20 @@ def sign_msg(msg: Message) -> Message:
|
|||||||
signature.add_header("Content-Disposition", 'attachment; filename="signature.asc"')
|
signature.add_header("Content-Disposition", 'attachment; filename="signature.asc"')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
signature.set_payload(sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n")))
|
payload = sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||||
|
|
||||||
|
if not payload:
|
||||||
|
raise PGPException("Empty signature by gnupg")
|
||||||
|
|
||||||
|
signature.set_payload(payload)
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.e("Cannot sign, try using pgpy")
|
LOG.e("Cannot sign, try using pgpy")
|
||||||
signature.set_payload(
|
payload = sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||||
sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
|
||||||
)
|
if not payload:
|
||||||
|
raise PGPException("Empty signature by pgpy")
|
||||||
|
|
||||||
|
signature.set_payload(payload)
|
||||||
|
|
||||||
container.attach(signature)
|
container.attach(signature)
|
||||||
|
|
||||||
@ -846,9 +860,7 @@ def forward_email_to_mailbox(
|
|||||||
f"""Email sent to {alias.email} from an invalid address and cannot be replied""",
|
f"""Email sent to {alias.email} from an invalid address and cannot be replied""",
|
||||||
)
|
)
|
||||||
|
|
||||||
delete_all_headers_except(
|
headers_to_keep = [
|
||||||
msg,
|
|
||||||
[
|
|
||||||
headers.FROM,
|
headers.FROM,
|
||||||
headers.TO,
|
headers.TO,
|
||||||
headers.CC,
|
headers.CC,
|
||||||
@ -859,9 +871,12 @@ def forward_email_to_mailbox(
|
|||||||
# References and In-Reply-To are used for keeping the email thread
|
# References and In-Reply-To are used for keeping the email thread
|
||||||
headers.REFERENCES,
|
headers.REFERENCES,
|
||||||
headers.IN_REPLY_TO,
|
headers.IN_REPLY_TO,
|
||||||
]
|
headers.LIST_UNSUBSCRIBE,
|
||||||
+ headers.MIME_HEADERS,
|
headers.LIST_UNSUBSCRIBE_POST,
|
||||||
)
|
] + headers.MIME_HEADERS
|
||||||
|
if user.include_header_email_header:
|
||||||
|
headers_to_keep.append(headers.AUTHENTICATION_RESULTS)
|
||||||
|
delete_all_headers_except(msg, headers_to_keep)
|
||||||
|
|
||||||
# create PGP email if needed
|
# create PGP email if needed
|
||||||
if mailbox.pgp_enabled() and user.is_premium() and not alias.disable_pgp:
|
if mailbox.pgp_enabled() and user.is_premium() and not alias.disable_pgp:
|
||||||
@ -898,6 +913,11 @@ def forward_email_to_mailbox(
|
|||||||
msg[headers.SL_EMAIL_LOG_ID] = str(email_log.id)
|
msg[headers.SL_EMAIL_LOG_ID] = str(email_log.id)
|
||||||
if user.include_header_email_header:
|
if user.include_header_email_header:
|
||||||
msg[headers.SL_ENVELOPE_FROM] = envelope.mail_from
|
msg[headers.SL_ENVELOPE_FROM] = envelope.mail_from
|
||||||
|
if contact.name:
|
||||||
|
original_from = f"{contact.name} <{contact.website_email}>"
|
||||||
|
else:
|
||||||
|
original_from = contact.website_email
|
||||||
|
msg[headers.SL_ORIGINAL_FROM] = original_from
|
||||||
# when an alias isn't in the To: header, there's no way for users to know what alias has received the email
|
# when an alias isn't in the To: header, there's no way for users to know what alias has received the email
|
||||||
msg[headers.SL_ENVELOPE_TO] = alias.email
|
msg[headers.SL_ENVELOPE_TO] = alias.email
|
||||||
|
|
||||||
|
42
app/migrations/versions/2023_072819_01827104004b_.py
Normal file
42
app/migrations/versions/2023_072819_01827104004b_.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 01827104004b
|
||||||
|
Revises: 2634b41f54db
|
||||||
|
Create Date: 2023-07-28 19:39:28.675490
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '01827104004b'
|
||||||
|
down_revision = '2634b41f54db'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_index(op.f('ix_alias_hibp_last_check'), 'alias', ['hibp_last_check'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_bounce_created_at', 'bounce', ['created_at'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_monitoring_created_at', 'monitoring', ['created_at'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_transactional_email_created_at', 'transactional_email', ['created_at'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index(op.f('ix_users_activated'), 'users', ['activated'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index('ix_users_activated_trial_end_lifetime', 'users', ['activated', 'trial_end', 'lifetime'], unique=False, postgresql_concurrently=True)
|
||||||
|
op.create_index(op.f('ix_users_referral_id'), 'users', ['referral_id'], unique=False, postgresql_concurrently=True)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index(op.f('ix_users_referral_id'), table_name='users')
|
||||||
|
op.drop_index('ix_users_activated_trial_end_lifetime', table_name='users')
|
||||||
|
op.drop_index(op.f('ix_users_activated'), table_name='users')
|
||||||
|
op.drop_index('ix_transactional_email_created_at', table_name='transactional_email')
|
||||||
|
op.drop_index('ix_monitoring_created_at', table_name='monitoring')
|
||||||
|
op.drop_index('ix_bounce_created_at', table_name='bounce')
|
||||||
|
op.drop_index(op.f('ix_alias_hibp_last_check'), table_name='alias')
|
||||||
|
# ### end Alembic commands ###
|
0
app/monitor/__init__.py
Normal file
0
app/monitor/__init__.py
Normal file
21
app/monitor/metric.py
Normal file
21
app/monitor/metric.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UpcloudRecord:
|
||||||
|
db_role: str
|
||||||
|
label: str
|
||||||
|
time: str
|
||||||
|
value: float
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UpcloudMetric:
|
||||||
|
metric_name: str
|
||||||
|
records: List[UpcloudRecord]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UpcloudMetrics:
|
||||||
|
metrics: List[UpcloudMetric]
|
20
app/monitor/metric_exporter.py
Normal file
20
app/monitor/metric_exporter.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
from app.config import UPCLOUD_DB_ID, UPCLOUD_PASSWORD, UPCLOUD_USERNAME
|
||||||
|
from app.log import LOG
|
||||||
|
from monitor.newrelic import NewRelicClient
|
||||||
|
from monitor.upcloud import UpcloudClient
|
||||||
|
|
||||||
|
|
||||||
|
class MetricExporter:
|
||||||
|
def __init__(self, newrelic_license: str):
|
||||||
|
self.__upcloud = UpcloudClient(
|
||||||
|
username=UPCLOUD_USERNAME, password=UPCLOUD_PASSWORD
|
||||||
|
)
|
||||||
|
self.__newrelic = NewRelicClient(newrelic_license)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
try:
|
||||||
|
metrics = self.__upcloud.get_metrics(UPCLOUD_DB_ID)
|
||||||
|
self.__newrelic.send(metrics)
|
||||||
|
LOG.info("Upcloud metrics sent to NewRelic")
|
||||||
|
except Exception as e:
|
||||||
|
LOG.warn(f"Could not export metrics: {e}")
|
26
app/monitor/newrelic.py
Normal file
26
app/monitor/newrelic.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from monitor.metric import UpcloudMetrics
|
||||||
|
|
||||||
|
from newrelic_telemetry_sdk import GaugeMetric, MetricClient
|
||||||
|
|
||||||
|
_NEWRELIC_BASE_HOST = "metric-api.eu.newrelic.com"
|
||||||
|
|
||||||
|
|
||||||
|
class NewRelicClient:
|
||||||
|
def __init__(self, license_key: str):
|
||||||
|
self.__client = MetricClient(license_key=license_key, host=_NEWRELIC_BASE_HOST)
|
||||||
|
|
||||||
|
def send(self, metrics: UpcloudMetrics):
|
||||||
|
batch = []
|
||||||
|
|
||||||
|
for metric in metrics.metrics:
|
||||||
|
for record in metric.records:
|
||||||
|
batch.append(
|
||||||
|
GaugeMetric(
|
||||||
|
name=f"upcloud.db.{metric.metric_name}",
|
||||||
|
value=record.value,
|
||||||
|
tags={"host": record.label, "db_role": record.db_role},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
response = self.__client.send_batch(batch)
|
||||||
|
response.raise_for_status()
|
82
app/monitor/upcloud.py
Normal file
82
app/monitor/upcloud.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
from app.log import LOG
|
||||||
|
from monitor.metric import UpcloudMetric, UpcloudMetrics, UpcloudRecord
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import requests
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
BASE_URL = "https://api.upcloud.com"
|
||||||
|
|
||||||
|
|
||||||
|
def get_metric(json: Any, metric: str) -> UpcloudMetric:
|
||||||
|
records = []
|
||||||
|
|
||||||
|
if metric in json:
|
||||||
|
metric_data = json[metric]
|
||||||
|
data = metric_data["data"]
|
||||||
|
cols = list(map(lambda x: x["label"], data["cols"][1:]))
|
||||||
|
latest = data["rows"][-1]
|
||||||
|
time = latest[0]
|
||||||
|
for column_idx in range(len(cols)):
|
||||||
|
value = latest[1 + column_idx]
|
||||||
|
|
||||||
|
# If the latest value is None, try to fetch the second to last
|
||||||
|
if value is None:
|
||||||
|
value = data["rows"][-2][1 + column_idx]
|
||||||
|
|
||||||
|
if value is not None:
|
||||||
|
label = cols[column_idx]
|
||||||
|
if "(master)" in label:
|
||||||
|
db_role = "master"
|
||||||
|
else:
|
||||||
|
db_role = "standby"
|
||||||
|
records.append(
|
||||||
|
UpcloudRecord(time=time, db_role=db_role, label=label, value=value)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
LOG.warn(f"Could not get value for metric {metric}")
|
||||||
|
|
||||||
|
return UpcloudMetric(metric_name=metric, records=records)
|
||||||
|
|
||||||
|
|
||||||
|
def get_metrics(json: Any) -> UpcloudMetrics:
|
||||||
|
return UpcloudMetrics(
|
||||||
|
metrics=[
|
||||||
|
get_metric(json, "cpu_usage"),
|
||||||
|
get_metric(json, "disk_usage"),
|
||||||
|
get_metric(json, "diskio_reads"),
|
||||||
|
get_metric(json, "diskio_writes"),
|
||||||
|
get_metric(json, "load_average"),
|
||||||
|
get_metric(json, "mem_usage"),
|
||||||
|
get_metric(json, "net_receive"),
|
||||||
|
get_metric(json, "net_send"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UpcloudClient:
|
||||||
|
def __init__(self, username: str, password: str):
|
||||||
|
if not username:
|
||||||
|
raise Exception("UpcloudClient username must be set")
|
||||||
|
if not password:
|
||||||
|
raise Exception("UpcloudClient password must be set")
|
||||||
|
|
||||||
|
client = requests.Session()
|
||||||
|
encoded_auth = base64.b64encode(
|
||||||
|
f"{username}:{password}".encode("utf-8")
|
||||||
|
).decode("utf-8")
|
||||||
|
client.headers = {"Authorization": f"Basic {encoded_auth}"}
|
||||||
|
self.__client = client
|
||||||
|
|
||||||
|
def get_metrics(self, db_uuid: str) -> UpcloudMetrics:
|
||||||
|
url = f"{BASE_URL}/1.3/database/{db_uuid}/metrics?period=hour"
|
||||||
|
LOG.d(f"Performing request to {url}")
|
||||||
|
response = self.__client.get(url)
|
||||||
|
LOG.d(f"Status code: {response.status_code}")
|
||||||
|
if response.status_code != 200:
|
||||||
|
return UpcloudMetrics(metrics=[])
|
||||||
|
|
||||||
|
as_json = response.json()
|
||||||
|
|
||||||
|
return get_metrics(as_json)
|
@ -1,3 +1,4 @@
|
|||||||
|
import configparser
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
from time import sleep
|
from time import sleep
|
||||||
@ -7,6 +8,7 @@ import newrelic.agent
|
|||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
|
from monitor.metric_exporter import MetricExporter
|
||||||
|
|
||||||
# the number of consecutive fails
|
# the number of consecutive fails
|
||||||
# if more than _max_nb_fails, alert
|
# if more than _max_nb_fails, alert
|
||||||
@ -19,6 +21,18 @@ _max_nb_fails = 10
|
|||||||
# the maximum number of emails in incoming & active queue
|
# the maximum number of emails in incoming & active queue
|
||||||
_max_incoming = 50
|
_max_incoming = 50
|
||||||
|
|
||||||
|
_NR_CONFIG_FILE_LOCATION_VAR = "NEW_RELIC_CONFIG_FILE"
|
||||||
|
|
||||||
|
|
||||||
|
def get_newrelic_license() -> str:
|
||||||
|
nr_file = os.environ.get(_NR_CONFIG_FILE_LOCATION_VAR, None)
|
||||||
|
if nr_file is None:
|
||||||
|
raise Exception(f"{_NR_CONFIG_FILE_LOCATION_VAR} not defined")
|
||||||
|
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
config.read(nr_file)
|
||||||
|
return config["newrelic"]["license_key"]
|
||||||
|
|
||||||
|
|
||||||
@newrelic.agent.background_task()
|
@newrelic.agent.background_task()
|
||||||
def log_postfix_metrics():
|
def log_postfix_metrics():
|
||||||
@ -80,10 +94,13 @@ def log_nb_db_connection():
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
exporter = MetricExporter(get_newrelic_license())
|
||||||
while True:
|
while True:
|
||||||
log_postfix_metrics()
|
log_postfix_metrics()
|
||||||
log_nb_db_connection()
|
log_nb_db_connection()
|
||||||
Session.close()
|
Session.close()
|
||||||
|
|
||||||
|
exporter.run()
|
||||||
|
|
||||||
# 1 min
|
# 1 min
|
||||||
sleep(60)
|
sleep(60)
|
||||||
|
194
app/poetry.lock
generated
194
app/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -111,6 +111,7 @@ Deprecated = "^1.2.13"
|
|||||||
cryptography = "37.0.1"
|
cryptography = "37.0.1"
|
||||||
SQLAlchemy = "1.3.24"
|
SQLAlchemy = "1.3.24"
|
||||||
redis = "^4.5.3"
|
redis = "^4.5.3"
|
||||||
|
newrelic-telemetry-sdk = "^0.5.0"
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "^7.0.0"
|
pytest = "^7.0.0"
|
||||||
|
@ -9,10 +9,13 @@
|
|||||||
<h1 class="card-title">Create new account</h1>
|
<h1 class="card-title">Create new account</h1>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label class="form-label">Email address</label>
|
<label class="form-label">Email address</label>
|
||||||
{{ form.email(class="form-control", type="email") }}
|
{{ form.email(class="form-control", type="email", placeholder="YourName@protonmail.com") }}
|
||||||
<div class="small-text alert alert-info" style="margin-top: 1px">
|
<div class="small-text alert alert-info" style="margin-top: 1px">
|
||||||
Emails sent to your alias will be forwarded to this email address.
|
Emails sent to your alias will be forwarded to this email address.
|
||||||
|
<br>
|
||||||
It can't be a disposable or forwarding email address.
|
It can't be a disposable or forwarding email address.
|
||||||
|
<br>
|
||||||
|
We recommend using a <a href="https://proton.me/mail" target="_blank">Proton Mail</a> address
|
||||||
</div>
|
</div>
|
||||||
{{ render_field_errors(form.email) }}
|
{{ render_field_errors(form.email) }}
|
||||||
</div>
|
</div>
|
||||||
|
@ -684,7 +684,8 @@
|
|||||||
SimpleLogin forwards emails to your mailbox from the <b>reverse-alias</b> and not from the <b>original</b>
|
SimpleLogin forwards emails to your mailbox from the <b>reverse-alias</b> and not from the <b>original</b>
|
||||||
sender address.
|
sender address.
|
||||||
<br />
|
<br />
|
||||||
If this option is enabled, the original sender addresses is stored in the email header <b>X-SimpleLogin-Envelope-From</b>.
|
If this option is enabled, the original sender addresses is stored in the email header <b>X-SimpleLogin-Envelope-From</b>
|
||||||
|
and the original From header is stored in <b>X-SimpleLogin-Original-From<b>.
|
||||||
You can choose to display this header in your email client.
|
You can choose to display this header in your email client.
|
||||||
<br />
|
<br />
|
||||||
As email headers aren't encrypted, your mailbox service can know the sender address via this header.
|
As email headers aren't encrypted, your mailbox service can know the sender address via this header.
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
<form id="supportZendeskForm" method="post" enctype="multipart/form-data">
|
<form id="supportZendeskForm" method="post" enctype="multipart/form-data">
|
||||||
<div class="mt-4 mb-5">
|
<div class="mt-4 mb-5">
|
||||||
<label for="issueDescription" class="form-label font-weight-bold">What happened?</label>
|
<label for="issueDescription" class="form-label font-weight-bold">What happened?</label>
|
||||||
<textarea class="form-control" required name="ticket_content" id="issueDescription" rows="3" placeholder="Please provide as much information as possible. For example which alias(es), mailbox(es) ar affected, if this is a persistent issue...">{{- ticket_content or '' -}}</textarea>
|
<textarea class="form-control" required name="ticket_content" id="issueDescription" rows="3" placeholder="Please provide as much information as possible. For example which alias(es), mailbox(es) are affected, if this is a persistent issue...">{{- ticket_content or '' -}}</textarea>
|
||||||
</div>
|
</div>
|
||||||
<div class="mt-5 font-weight-bold">Attach files to support request</div>
|
<div class="mt-5 font-weight-bold">Attach files to support request</div>
|
||||||
<div class="text-muted">Only images, text and emails are accepted</div>
|
<div class="text-muted">Only images, text and emails are accepted</div>
|
||||||
|
@ -286,6 +286,7 @@
|
|||||||
|
|
||||||
},
|
},
|
||||||
async mounted() {
|
async mounted() {
|
||||||
|
Object.freeze(Object.prototype);
|
||||||
let that = this;
|
let that = this;
|
||||||
let res = await fetch(`/api/notifications?page=${that.page}`, {
|
let res = await fetch(`/api/notifications?page=${that.page}`, {
|
||||||
method: "GET",
|
method: "GET",
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<div class="page-single">
|
<div class="page-single">
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col mx-auto" style="max-width: 28rem">
|
<div class="col mx-auto" style="max-width: 32rem">
|
||||||
<div class="text-center mb-6">
|
<div class="text-center mb-6">
|
||||||
<a href="{{ LANDING_PAGE_URL }}">
|
<a href="{{ LANDING_PAGE_URL }}">
|
||||||
<img src="/static/logo.svg"
|
<img src="/static/logo.svg"
|
||||||
|
@ -17,7 +17,7 @@ def test_get_setting(flask_client):
|
|||||||
"notification": True,
|
"notification": True,
|
||||||
"random_alias_default_domain": "sl.local",
|
"random_alias_default_domain": "sl.local",
|
||||||
"sender_format": "AT",
|
"sender_format": "AT",
|
||||||
"random_alias_suffix": "random_string",
|
"random_alias_suffix": "word",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -95,11 +95,13 @@ def test_get_setting_domains_v2(flask_client):
|
|||||||
def test_update_settings_random_alias_suffix(flask_client):
|
def test_update_settings_random_alias_suffix(flask_client):
|
||||||
user = login(flask_client)
|
user = login(flask_client)
|
||||||
# default random_alias_suffix is random_string
|
# default random_alias_suffix is random_string
|
||||||
assert user.random_alias_suffix == AliasSuffixEnum.random_string.value
|
assert user.random_alias_suffix == AliasSuffixEnum.word.value
|
||||||
|
|
||||||
r = flask_client.patch("/api/setting", json={"random_alias_suffix": "invalid"})
|
r = flask_client.patch("/api/setting", json={"random_alias_suffix": "invalid"})
|
||||||
assert r.status_code == 400
|
assert r.status_code == 400
|
||||||
|
|
||||||
r = flask_client.patch("/api/setting", json={"random_alias_suffix": "word"})
|
r = flask_client.patch(
|
||||||
|
"/api/setting", json={"random_alias_suffix": "random_string"}
|
||||||
|
)
|
||||||
assert r.status_code == 200
|
assert r.status_code == 200
|
||||||
assert user.random_alias_suffix == AliasSuffixEnum.word.value
|
assert user.random_alias_suffix == AliasSuffixEnum.random_string.value
|
||||||
|
@ -1,10 +1,13 @@
|
|||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
|
import arrow
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
from app.dashboard.views.api_key import clean_up_unused_or_old_api_keys
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import User, ApiKey
|
from app.models import User, ApiKey
|
||||||
from tests.utils import login
|
from tests.utils import login, create_new_user
|
||||||
|
|
||||||
|
|
||||||
def test_api_key_page_requires_password(flask_client):
|
def test_api_key_page_requires_password(flask_client):
|
||||||
@ -34,6 +37,17 @@ def test_create_delete_api_key(flask_client):
|
|||||||
assert ApiKey.filter(ApiKey.user_id == user.id).count() == 1
|
assert ApiKey.filter(ApiKey.user_id == user.id).count() == 1
|
||||||
assert api_key.name == "for test"
|
assert api_key.name == "for test"
|
||||||
|
|
||||||
|
# create second api_key
|
||||||
|
create_r = flask_client.post(
|
||||||
|
url_for("dashboard.api_key"),
|
||||||
|
data={"form-name": "create", "name": "for test 2"},
|
||||||
|
follow_redirects=True,
|
||||||
|
)
|
||||||
|
assert create_r.status_code == 200
|
||||||
|
api_key_2 = ApiKey.filter_by(user_id=user.id).order_by(ApiKey.id.desc()).first()
|
||||||
|
assert ApiKey.filter(ApiKey.user_id == user.id).count() == 2
|
||||||
|
assert api_key_2.name == "for test 2"
|
||||||
|
|
||||||
# delete api_key
|
# delete api_key
|
||||||
delete_r = flask_client.post(
|
delete_r = flask_client.post(
|
||||||
url_for("dashboard.api_key"),
|
url_for("dashboard.api_key"),
|
||||||
@ -41,7 +55,7 @@ def test_create_delete_api_key(flask_client):
|
|||||||
follow_redirects=True,
|
follow_redirects=True,
|
||||||
)
|
)
|
||||||
assert delete_r.status_code == 200
|
assert delete_r.status_code == 200
|
||||||
assert ApiKey.count() == nb_api_key
|
assert ApiKey.count() == nb_api_key + 1
|
||||||
|
|
||||||
|
|
||||||
def test_delete_all_api_keys(flask_client):
|
def test_delete_all_api_keys(flask_client):
|
||||||
@ -87,3 +101,26 @@ def test_delete_all_api_keys(flask_client):
|
|||||||
assert (
|
assert (
|
||||||
ApiKey.filter(ApiKey.user_id == user_2.id).count() == 1
|
ApiKey.filter(ApiKey.user_id == user_2.id).count() == 1
|
||||||
) # assert that user 2 still has 1 API key
|
) # assert that user 2 still has 1 API key
|
||||||
|
|
||||||
|
|
||||||
|
def test_cleanup_api_keys():
|
||||||
|
user = create_new_user()
|
||||||
|
ApiKey.create(
|
||||||
|
user_id=user.id, name="used", last_used=arrow.utcnow().shift(days=-3), times=1
|
||||||
|
)
|
||||||
|
ApiKey.create(
|
||||||
|
user_id=user.id, name="keep 1", last_used=arrow.utcnow().shift(days=-2), times=1
|
||||||
|
)
|
||||||
|
ApiKey.create(
|
||||||
|
user_id=user.id, name="keep 2", last_used=arrow.utcnow().shift(days=-1), times=1
|
||||||
|
)
|
||||||
|
ApiKey.create(user_id=user.id, name="not used", last_used=None, times=1)
|
||||||
|
Session.flush()
|
||||||
|
old_max_api_keys = config.MAX_API_KEYS
|
||||||
|
config.MAX_API_KEYS = 2
|
||||||
|
clean_up_unused_or_old_api_keys(user.id)
|
||||||
|
keys = ApiKey.filter_by(user_id=user.id).all()
|
||||||
|
assert len(keys) == 2
|
||||||
|
assert keys[0].name.find("keep") == 0
|
||||||
|
assert keys[1].name.find("keep") == 0
|
||||||
|
config.MAX_API_KEYS = old_max_api_keys
|
||||||
|
21
app/tests/example_emls/add_header_multipart.eml
Normal file
21
app/tests/example_emls/add_header_multipart.eml
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
Sender: somebody@somewhere.net
|
||||||
|
Content-Type: multipart/mixed; boundary="----=_Part_3946_1099248058.1688752298149"
|
||||||
|
|
||||||
|
--0c916c9b5fe3c925d7bafeb988bb6794
|
||||||
|
Content-Type: text/plain; charset="UTF-8"
|
||||||
|
Content-Transfer-Encoding: quoted-printable
|
||||||
|
|
||||||
|
notification test
|
||||||
|
|
||||||
|
--0c916c9b5fe3c925d7bafeb988bb6794
|
||||||
|
Content-Type: text/html; charset="UTF-8"
|
||||||
|
Content-Transfer-Encoding: quoted-printable
|
||||||
|
|
||||||
|
<html><head><meta http-equiv=3D"Content-Type" content=3D"text/html; charset=
|
||||||
|
=3DUTF-8"><meta http-equiv=3D"X-UA-Compatible" content=3D"IE=3Dedge"><meta =
|
||||||
|
name=3D"format-detection" content=3D"telephone=3Dno"><meta name=3D"viewport=
|
||||||
|
" content=3D"width=3Ddevice-width, initial-scale=3D1.0">
|
||||||
|
|
||||||
|
--0c916c9b5fe3c925d7bafeb988bb6794--
|
||||||
|
|
||||||
|
|
27
app/tests/example_emls/email_to_pgp_encrypt.eml
Normal file
27
app/tests/example_emls/email_to_pgp_encrypt.eml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
From: {{sender_address}}
|
||||||
|
To: {{recipient_address}}
|
||||||
|
Subject: Test subject
|
||||||
|
Content-Type: multipart/alternative; boundary="MLF8fvg556fdhFDH7=_?:
|
||||||
|
|
||||||
|
--MLF8fvg556fdhFDH7=_?:
|
||||||
|
Content-Type: text/plain;
|
||||||
|
charset="utf-8"
|
||||||
|
Content-Transfer-Encoding: quoted-printable
|
||||||
|
|
||||||
|
*************************************************************************
|
||||||
|
|
||||||
|
This five-part limited series, based on the brilliant graphic novel by Me
|
||||||
|
|
||||||
|
--MLF8fvg556fdhFDH7=_?:
|
||||||
|
Content-Type: text/html;
|
||||||
|
charset="utf-8"
|
||||||
|
Content-Transfer-Encoding: 8bit
|
||||||
|
--MLF8fvg556fdhFDH7=_?:
|
||||||
|
Content-Type: text/plain;
|
||||||
|
charset="utf-8"
|
||||||
|
Content-Transfer-Encoding: quoted-printable
|
||||||
|
|
||||||
|
*************************************************************************
|
||||||
|
*************************************************************************
|
||||||
|
|
||||||
|
|
65
app/tests/example_emls/replacement_on_forward_phase.eml
Normal file
65
app/tests/example_emls/replacement_on_forward_phase.eml
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
Received: by mail-ed1-f49.google.com with SMTP id ej4so13657316edb.7
|
||||||
|
for <gmail@simplemail.fplante.fr>; Mon, 27 Jun 2022 08:48:15 -0700 (PDT)
|
||||||
|
X-Gm-Message-State: AJIora8exR9DGeRFoKAtjzwLtUpH5hqx6Zt3tm8n4gUQQivGQ3fELjUV
|
||||||
|
yT7RQIfeW9Kv2atuOcgtmGYVU4iQ8VBeLmK1xvOYL4XpXfrT7ZrJNQ==
|
||||||
|
Authentication-Results: mx.google.com;
|
||||||
|
dkim=pass header.i=@matera.eu header.s=fnt header.b=XahYMey7;
|
||||||
|
dkim=pass header.i=@sendgrid.info header.s=smtpapi header.b="QOCS/yjt";
|
||||||
|
spf=pass (google.com: domain of bounces+14445963-ab4e-csyndic.quartz=gmail.com@front-mail.matera.eu designates 168.245.4.42 as permitted sender) smtp.mailfrom="bounces+14445963-ab4e-csyndic.quartz=gmail.com@front-mail.matera.eu";
|
||||||
|
dmarc=pass (p=NONE sp=NONE dis=NONE) header.from=matera.eu
|
||||||
|
Received: from out.frontapp.com (unknown)
|
||||||
|
by geopod-ismtpd-3-0 (SG)
|
||||||
|
with ESMTP id d2gM2N7PT7W8d2-UEC4ESA
|
||||||
|
for <csyndic.quartz@gmail.com>;
|
||||||
|
Mon, 27 Jun 2022 15:48:11.014 +0000 (UTC)
|
||||||
|
Content-Type: multipart/alternative;
|
||||||
|
boundary="----sinikael-?=_1-16563448907660.10629093370416887"
|
||||||
|
In-Reply-To:
|
||||||
|
<imported@frontapp.com_81c5208b4cff8b0633f167fda4e6e8e8f63b7a9b>
|
||||||
|
References:
|
||||||
|
<imported@frontapp.com_t:AssembléeGénérale2022-06-25T16:32:03+02:006b3cdade-982b-47cd-8114-6a037dfb7d60>
|
||||||
|
<imported@frontapp.com_f924cce139940c9935621f067d46443597394f34>
|
||||||
|
<imported@frontapp.com_t:Appeldefonds2022-06-26T10:04:55+02:00d89f5e23-6d98-4f01-95fa-b7c7544b7aa9>
|
||||||
|
<imported@frontapp.com_81c5208b4cff8b0633f167fda4e6e8e8f63b7a9b>
|
||||||
|
<af07e94a66ece6564ae30a2aaac7a34c@frontapp.com>
|
||||||
|
From: {{ sender_address }}
|
||||||
|
To: {{ recipient_address }}
|
||||||
|
CC: {{ cc_address }}
|
||||||
|
Subject: Something
|
||||||
|
Message-ID: <af07e94a66ece6564ae30a2aaac7a34c@frontapp.com>
|
||||||
|
X-Mailer: Front (1.0; +https://frontapp.com;
|
||||||
|
+msgid=af07e94a66ece6564ae30a2aaac7a34c@frontapp.com)
|
||||||
|
X-Feedback-ID: 14445963:SG
|
||||||
|
X-SG-EID:
|
||||||
|
=?us-ascii?Q?XtlxQDg5i3HqMzQY2Upg19JPZBVl1RybInUUL2yta9uBoIU4KU1FMJ5DjWrz6g?=
|
||||||
|
=?us-ascii?Q?fJUK5Qmneg2uc46gwp5BdHdp6Foaq5gg3xJriv3?=
|
||||||
|
=?us-ascii?Q?9OA=2FWRifeylU9O+ngdNbOKXoeJAkROmp2mCgw9x?=
|
||||||
|
=?us-ascii?Q?uud+EclOT9mYVtbZsydOLLm6Y2PPswQl8lnmiku?=
|
||||||
|
=?us-ascii?Q?DAhkG15HTz2FbWGWNDFb7VrSsN5ddjAscr6sIHw?=
|
||||||
|
=?us-ascii?Q?S48R5fnXmfhPbmlCgqFjr0FGphfuBdNAt6z6w8a?=
|
||||||
|
=?us-ascii?Q?o9u1EYDIX7zWHZ+Tr3eyw=3D=3D?=
|
||||||
|
X-SG-ID:
|
||||||
|
=?us-ascii?Q?N2C25iY2uzGMFz6rgvQsb8raWjw0ZPf1VmjsCkspi=2FI9PhcvqXQTpKqqyZkvBe?=
|
||||||
|
=?us-ascii?Q?+2RscnQ4WPkA+BN1vYgz1rezTVIqgp+rlWrKk8o?=
|
||||||
|
=?us-ascii?Q?HoB5dzpX6HKWtWCVRi10zwlDN1+pJnySoIUrlaT?=
|
||||||
|
=?us-ascii?Q?PA2aqQKmMQbjTl0CUAFryR8hhHcxdS0cQowZSd7?=
|
||||||
|
=?us-ascii?Q?XNjJWLvCGF7ODwg=2FKr+4yRE8UvULS2nrdO2wWyQ?=
|
||||||
|
=?us-ascii?Q?AiFHdPdZsRlgNomEo=3D?=
|
||||||
|
X-Spamd-Result: default: False [-2.00 / 13.00];
|
||||||
|
ARC_ALLOW(-1.00)[google.com:s=arc-20160816:i=1];
|
||||||
|
MIME_GOOD(-0.10)[multipart/alternative,text/plain];
|
||||||
|
REPLYTO_ADDR_EQ_FROM(0.00)[];
|
||||||
|
FORGED_RECIPIENTS_FORWARDING(0.00)[];
|
||||||
|
NEURAL_HAM(-0.00)[-0.981];
|
||||||
|
FREEMAIL_TO(0.00)[gmail.com];
|
||||||
|
RCVD_TLS_LAST(0.00)[];
|
||||||
|
FREEMAIL_ENVFROM(0.00)[gmail.com];
|
||||||
|
MIME_TRACE(0.00)[0:+,1:+,2:~];
|
||||||
|
RWL_MAILSPIKE_POSSIBLE(0.00)[209.85.208.49:from]
|
||||||
|
|
||||||
|
------sinikael-?=_1-16563448907660.10629093370416887
|
||||||
|
Content-Type: text/plain; charset=utf-8
|
||||||
|
Content-Transfer-Encoding: quoted-printable
|
||||||
|
|
||||||
|
From {{ sender_address }} To {{ recipient_address }}
|
||||||
|
------sinikael-?=_1-16563448907660.10629093370416887--
|
33
app/tests/handler/test_encrypt_pgp.py
Normal file
33
app/tests/handler/test_encrypt_pgp.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
from aiosmtpd.smtp import Envelope
|
||||||
|
|
||||||
|
import email_handler
|
||||||
|
from app.config import get_abs_path
|
||||||
|
from app.db import Session
|
||||||
|
from app.pgp_utils import load_public_key
|
||||||
|
from tests.utils import create_new_user, load_eml_file, random_email
|
||||||
|
|
||||||
|
from app.models import Alias
|
||||||
|
|
||||||
|
|
||||||
|
def test_encrypt_with_pgp():
|
||||||
|
user = create_new_user()
|
||||||
|
pgp_public_key = open(get_abs_path("local_data/public-pgp.asc")).read()
|
||||||
|
mailbox = user.default_mailbox
|
||||||
|
mailbox.pgp_public_key = pgp_public_key
|
||||||
|
mailbox.generic_subject = True
|
||||||
|
mailbox.pgp_finger_print = load_public_key(pgp_public_key)
|
||||||
|
alias = Alias.create_new_random(user)
|
||||||
|
Session.flush()
|
||||||
|
sender_address = random_email()
|
||||||
|
msg = load_eml_file(
|
||||||
|
"email_to_pgp_encrypt.eml",
|
||||||
|
{
|
||||||
|
"sender_address": sender_address,
|
||||||
|
"recipient_address": alias.email,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
envelope = Envelope()
|
||||||
|
envelope.mail_from = sender_address
|
||||||
|
envelope.rcpt_tos = [alias.email]
|
||||||
|
result = email_handler.MailHandler()._handle(envelope, msg)
|
||||||
|
assert result is not None
|
74
app/tests/handler/test_preserved_headers.py
Normal file
74
app/tests/handler/test_preserved_headers.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
from aiosmtpd.smtp import Envelope
|
||||||
|
|
||||||
|
import email_handler
|
||||||
|
from app.db import Session
|
||||||
|
from app.email import headers, status
|
||||||
|
from app.mail_sender import mail_sender
|
||||||
|
from app.models import Alias
|
||||||
|
from app.utils import random_string
|
||||||
|
from tests.utils import create_new_user, load_eml_file, random_email
|
||||||
|
|
||||||
|
|
||||||
|
@mail_sender.store_emails_test_decorator
|
||||||
|
def test_original_headers_from_preserved():
|
||||||
|
user = create_new_user()
|
||||||
|
alias = Alias.create_new_random(user)
|
||||||
|
Session.flush()
|
||||||
|
assert user.include_header_email_header
|
||||||
|
original_sender_address = random_email()
|
||||||
|
msg = load_eml_file(
|
||||||
|
"replacement_on_forward_phase.eml",
|
||||||
|
{
|
||||||
|
"sender_address": original_sender_address,
|
||||||
|
"recipient_address": alias.email,
|
||||||
|
"cc_address": random_email(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
envelope = Envelope()
|
||||||
|
envelope.mail_from = f"env.{original_sender_address}"
|
||||||
|
envelope.rcpt_tos = [alias.email]
|
||||||
|
result = email_handler.MailHandler()._handle(envelope, msg)
|
||||||
|
assert result == status.E200
|
||||||
|
send_requests = mail_sender.get_stored_emails()
|
||||||
|
assert len(send_requests) == 1
|
||||||
|
request = send_requests[0]
|
||||||
|
assert request.msg[headers.SL_ENVELOPE_FROM] == envelope.mail_from
|
||||||
|
assert request.msg[headers.SL_ORIGINAL_FROM] == original_sender_address
|
||||||
|
assert (
|
||||||
|
request.msg[headers.AUTHENTICATION_RESULTS]
|
||||||
|
== msg[headers.AUTHENTICATION_RESULTS]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mail_sender.store_emails_test_decorator
|
||||||
|
def test_original_headers_from_with_name_preserved():
|
||||||
|
user = create_new_user()
|
||||||
|
alias = Alias.create_new_random(user)
|
||||||
|
Session.flush()
|
||||||
|
assert user.include_header_email_header
|
||||||
|
original_sender_address = random_email()
|
||||||
|
name = random_string(10)
|
||||||
|
msg = load_eml_file(
|
||||||
|
"replacement_on_forward_phase.eml",
|
||||||
|
{
|
||||||
|
"sender_address": f"{name} <{original_sender_address}>",
|
||||||
|
"recipient_address": alias.email,
|
||||||
|
"cc_address": random_email(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
envelope = Envelope()
|
||||||
|
envelope.mail_from = f"env.{original_sender_address}"
|
||||||
|
envelope.rcpt_tos = [alias.email]
|
||||||
|
result = email_handler.MailHandler()._handle(envelope, msg)
|
||||||
|
assert result == status.E200
|
||||||
|
send_requests = mail_sender.get_stored_emails()
|
||||||
|
assert len(send_requests) == 1
|
||||||
|
request = send_requests[0]
|
||||||
|
assert request.msg[headers.SL_ENVELOPE_FROM] == envelope.mail_from
|
||||||
|
assert (
|
||||||
|
request.msg[headers.SL_ORIGINAL_FROM] == f"{name} <{original_sender_address}>"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
request.msg[headers.AUTHENTICATION_RESULTS]
|
||||||
|
== msg[headers.AUTHENTICATION_RESULTS]
|
||||||
|
)
|
0
app/tests/monitor/__init__.py
Normal file
0
app/tests/monitor/__init__.py
Normal file
350
app/tests/monitor/test_upcloud_get_metric.py
Normal file
350
app/tests/monitor/test_upcloud_get_metric.py
Normal file
@ -0,0 +1,350 @@
|
|||||||
|
from monitor.upcloud import get_metric, get_metrics
|
||||||
|
from monitor.metric import UpcloudMetrics, UpcloudMetric, UpcloudRecord
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
MOCK_RESPONSE = """
|
||||||
|
{
|
||||||
|
"cpu_usage": {
|
||||||
|
"data": {
|
||||||
|
"cols": [
|
||||||
|
{ "label": "time", "type": "date" },
|
||||||
|
{ "label": "test-1 (master)", "type": "number" },
|
||||||
|
{ "label": "test-2 (standby)", "type": "number" }
|
||||||
|
],
|
||||||
|
"rows": [
|
||||||
|
["2022-01-21T13:10:30Z", 2.744682398273781, 3.054323473090861],
|
||||||
|
["2022-01-21T13:11:00Z", 3.0735645433218366, 2.972423595745795],
|
||||||
|
["2022-01-21T13:11:30Z", 2.61619694060839, 3.1358378052207883],
|
||||||
|
["2022-01-21T13:12:00Z", 3.275132296130991, 4.196249043309251]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hints": { "title": "CPU usage %" }
|
||||||
|
},
|
||||||
|
"disk_usage": {
|
||||||
|
"data": {
|
||||||
|
"cols": [
|
||||||
|
{ "label": "time", "type": "date" },
|
||||||
|
{ "label": "test-1 (master)", "type": "number" },
|
||||||
|
{ "label": "test-2 (standby)", "type": "number" }
|
||||||
|
],
|
||||||
|
"rows": [
|
||||||
|
["2022-01-21T13:10:30Z", 5.654416415900109, 5.58959125727556],
|
||||||
|
["2022-01-21T13:11:00Z", 5.654416415900109, 5.58959125727556],
|
||||||
|
["2022-01-21T13:11:30Z", 5.654416415900109, 5.58959125727556]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hints": { "title": "Disk space usage %" }
|
||||||
|
},
|
||||||
|
"diskio_reads": {
|
||||||
|
"data": {
|
||||||
|
"cols": [
|
||||||
|
{ "label": "time", "type": "date" },
|
||||||
|
{ "label": "test-1 (master)", "type": "number" },
|
||||||
|
{ "label": "test-2 (standby)", "type": "number" }
|
||||||
|
],
|
||||||
|
"rows": [
|
||||||
|
["2022-01-21T13:10:30Z", 0, 0],
|
||||||
|
["2022-01-21T13:11:00Z", 0, 0],
|
||||||
|
["2022-01-21T13:11:30Z", 0, 0]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hints": { "title": "Disk iops (reads)" }
|
||||||
|
},
|
||||||
|
"diskio_writes": {
|
||||||
|
"data": {
|
||||||
|
"cols": [
|
||||||
|
{ "label": "time", "type": "date" },
|
||||||
|
{ "label": "test-1 (master)", "type": "number" },
|
||||||
|
{ "label": "test-2 (standby)", "type": "number" }
|
||||||
|
],
|
||||||
|
"rows": [
|
||||||
|
["2022-01-21T13:10:30Z", 3, 2],
|
||||||
|
["2022-01-21T13:11:00Z", 2, 3],
|
||||||
|
["2022-01-21T13:11:30Z", 4, 3]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hints": { "title": "Disk iops (writes)" }
|
||||||
|
},
|
||||||
|
"load_average": {
|
||||||
|
"data": {
|
||||||
|
"cols": [
|
||||||
|
{ "label": "time", "type": "date" },
|
||||||
|
{ "label": "test-1 (master)", "type": "number" },
|
||||||
|
{ "label": "test-2 (standby)", "type": "number" }
|
||||||
|
],
|
||||||
|
"rows": [
|
||||||
|
["2022-01-21T13:10:30Z", 0.11, 0.11],
|
||||||
|
["2022-01-21T13:11:00Z", 0.14, 0.1],
|
||||||
|
["2022-01-21T13:11:30Z", 0.14, 0.09]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hints": { "title": "Load average (5 min)" }
|
||||||
|
},
|
||||||
|
"mem_usage": {
|
||||||
|
"data": {
|
||||||
|
"cols": [
|
||||||
|
{ "label": "time", "type": "date" },
|
||||||
|
{ "label": "test-1 (master)", "type": "number" },
|
||||||
|
{ "label": "test-2 (standby)", "type": "number" }
|
||||||
|
],
|
||||||
|
"rows": [
|
||||||
|
["2022-01-21T13:10:30Z", 11.491766148261078, 12.318932883261219],
|
||||||
|
["2022-01-21T13:11:00Z", 11.511967645759277, 12.304403727425075],
|
||||||
|
["2022-01-21T13:11:30Z", 11.488581675749048, 12.272260458006759]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hints": { "title": "Memory usage %" }
|
||||||
|
},
|
||||||
|
"net_receive": {
|
||||||
|
"data": {
|
||||||
|
"cols": [
|
||||||
|
{ "label": "time", "type": "date" },
|
||||||
|
{ "label": "test-1 (master)", "type": "number" },
|
||||||
|
{ "label": "test-2 (standby)", "type": "number" }
|
||||||
|
],
|
||||||
|
"rows": [
|
||||||
|
["2022-01-21T13:10:30Z", 442, 470],
|
||||||
|
["2022-01-21T13:11:00Z", 439, 384],
|
||||||
|
["2022-01-21T13:11:30Z", 466, 458]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hints": { "title": "Network receive (bytes/s)" }
|
||||||
|
},
|
||||||
|
"net_send": {
|
||||||
|
"data": {
|
||||||
|
"cols": [
|
||||||
|
{ "label": "time", "type": "date" },
|
||||||
|
{ "label": "test-1 (master)", "type": "number" },
|
||||||
|
{ "label": "test-2 (standby)", "type": "number" }
|
||||||
|
],
|
||||||
|
"rows": [
|
||||||
|
["2022-01-21T13:10:30Z", 672, 581],
|
||||||
|
["2022-01-21T13:11:00Z", 660, 555],
|
||||||
|
["2022-01-21T13:11:30Z", 694, 573]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hints": { "title": "Network transmit (bytes/s)" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_metrics():
|
||||||
|
response = json.loads(MOCK_RESPONSE)
|
||||||
|
metrics = get_metrics(response)
|
||||||
|
assert metrics == UpcloudMetrics(
|
||||||
|
metrics=[
|
||||||
|
UpcloudMetric(
|
||||||
|
metric_name="cpu_usage",
|
||||||
|
records=[
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="master",
|
||||||
|
label="test-1 " "(master)",
|
||||||
|
time="2022-01-21T13:12:00Z",
|
||||||
|
value=3.275132296130991,
|
||||||
|
),
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="standby",
|
||||||
|
label="test-2 " "(standby)",
|
||||||
|
time="2022-01-21T13:12:00Z",
|
||||||
|
value=4.196249043309251,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
UpcloudMetric(
|
||||||
|
metric_name="disk_usage",
|
||||||
|
records=[
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="master",
|
||||||
|
label="test-1 " "(master)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=5.654416415900109,
|
||||||
|
),
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="standby",
|
||||||
|
label="test-2 " "(standby)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=5.58959125727556,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
UpcloudMetric(
|
||||||
|
metric_name="diskio_reads",
|
||||||
|
records=[
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="master",
|
||||||
|
label="test-1 " "(master)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=0,
|
||||||
|
),
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="standby",
|
||||||
|
label="test-2 " "(standby)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=0,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
UpcloudMetric(
|
||||||
|
metric_name="diskio_writes",
|
||||||
|
records=[
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="master",
|
||||||
|
label="test-1 " "(master)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=4,
|
||||||
|
),
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="standby",
|
||||||
|
label="test-2 " "(standby)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=3,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
UpcloudMetric(
|
||||||
|
metric_name="load_average",
|
||||||
|
records=[
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="master",
|
||||||
|
label="test-1 " "(master)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=0.14,
|
||||||
|
),
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="standby",
|
||||||
|
label="test-2 " "(standby)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=0.09,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
UpcloudMetric(
|
||||||
|
metric_name="mem_usage",
|
||||||
|
records=[
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="master",
|
||||||
|
label="test-1 " "(master)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=11.488581675749048,
|
||||||
|
),
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="standby",
|
||||||
|
label="test-2 " "(standby)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=12.272260458006759,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
UpcloudMetric(
|
||||||
|
metric_name="net_receive",
|
||||||
|
records=[
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="master",
|
||||||
|
label="test-1 " "(master)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=466,
|
||||||
|
),
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="standby",
|
||||||
|
label="test-2 " "(standby)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=458,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
UpcloudMetric(
|
||||||
|
metric_name="net_send",
|
||||||
|
records=[
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="master",
|
||||||
|
label="test-1 " "(master)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=694,
|
||||||
|
),
|
||||||
|
UpcloudRecord(
|
||||||
|
db_role="standby",
|
||||||
|
label="test-2 " "(standby)",
|
||||||
|
time="2022-01-21T13:11:30Z",
|
||||||
|
value=573,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_metric():
|
||||||
|
response = json.loads(MOCK_RESPONSE)
|
||||||
|
metric_name = "cpu_usage"
|
||||||
|
metric = get_metric(response, metric_name)
|
||||||
|
|
||||||
|
assert metric.metric_name == metric_name
|
||||||
|
assert len(metric.records) == 2
|
||||||
|
assert metric.records[0].label == "test-1 (master)"
|
||||||
|
assert metric.records[0].time == "2022-01-21T13:12:00Z"
|
||||||
|
assert metric.records[0].value == 3.275132296130991
|
||||||
|
|
||||||
|
assert metric.records[1].label == "test-2 (standby)"
|
||||||
|
assert metric.records[1].time == "2022-01-21T13:12:00Z"
|
||||||
|
assert metric.records[1].value == 4.196249043309251
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_metric_with_none_value():
|
||||||
|
response_str = """
|
||||||
|
{
|
||||||
|
"cpu_usage": {
|
||||||
|
"data": {
|
||||||
|
"cols": [
|
||||||
|
{ "label": "time", "type": "date" },
|
||||||
|
{ "label": "test-1 (master)", "type": "number" },
|
||||||
|
{ "label": "test-2 (standby)", "type": "number" }
|
||||||
|
],
|
||||||
|
"rows": [
|
||||||
|
["2022-01-21T13:10:30Z", 2.744682398273781, 3.054323473090861],
|
||||||
|
["2022-01-21T13:11:00Z", 3.0735645433218366, 2.972423595745795],
|
||||||
|
["2022-01-21T13:11:30Z", null, 3.1358378052207883],
|
||||||
|
["2022-01-21T13:12:00Z", 3.275132296130991, null]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hints": { "title": "CPU usage %" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
response = json.loads(response_str)
|
||||||
|
metric = get_metric(response, "cpu_usage")
|
||||||
|
|
||||||
|
assert metric.records[0].label == "test-1 (master)"
|
||||||
|
assert metric.records[0].value == 3.275132296130991
|
||||||
|
assert metric.records[1].label == "test-2 (standby)"
|
||||||
|
assert metric.records[1].value == 3.1358378052207883
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_metric_with_none_value_in_last_two_positions():
|
||||||
|
response_str = """
|
||||||
|
{
|
||||||
|
"cpu_usage": {
|
||||||
|
"data": {
|
||||||
|
"cols": [
|
||||||
|
{ "label": "time", "type": "date" },
|
||||||
|
{ "label": "test-1 (master)", "type": "number" },
|
||||||
|
{ "label": "test-2 (standby)", "type": "number" }
|
||||||
|
],
|
||||||
|
"rows": [
|
||||||
|
["2022-01-21T13:10:30Z", 2.744682398273781, 3.054323473090861],
|
||||||
|
["2022-01-21T13:11:00Z", 3.0735645433218366, 2.972423595745795],
|
||||||
|
["2022-01-21T13:11:30Z", null, null],
|
||||||
|
["2022-01-21T13:12:00Z", 3.275132296130991, null]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hints": { "title": "CPU usage %" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
response = json.loads(response_str)
|
||||||
|
metric = get_metric(response, "cpu_usage")
|
||||||
|
|
||||||
|
assert len(metric.records) == 1
|
||||||
|
assert metric.records[0].label == "test-1 (master)"
|
||||||
|
assert metric.records[0].value == 3.275132296130991
|
@ -131,3 +131,22 @@ def test_suffixes_are_valid():
|
|||||||
if len(match.groups()) >= 1:
|
if len(match.groups()) >= 1:
|
||||||
has_prefix += 1
|
has_prefix += 1
|
||||||
assert has_prefix > 0
|
assert has_prefix > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_default_domain_is_only_shown_once():
|
||||||
|
user = create_new_user()
|
||||||
|
default_domain = SLDomain.filter_by(hidden=False).order_by(SLDomain.order).first()
|
||||||
|
user.default_alias_public_domain_id = default_domain.id
|
||||||
|
Session.flush()
|
||||||
|
options = AliasOptions(
|
||||||
|
show_sl_domains=True, show_partner_domains=get_proton_partner()
|
||||||
|
)
|
||||||
|
suffixes = get_alias_suffixes(user, alias_options=options)
|
||||||
|
found_default = False
|
||||||
|
found_domains = set()
|
||||||
|
for suffix in suffixes:
|
||||||
|
assert suffix.domain not in found_domains
|
||||||
|
found_domains.add(suffix.domain)
|
||||||
|
if default_domain.domain == suffix.domain:
|
||||||
|
found_default = True
|
||||||
|
assert found_default
|
||||||
|
@ -19,10 +19,8 @@ from app.email_utils import (
|
|||||||
copy,
|
copy,
|
||||||
get_spam_from_header,
|
get_spam_from_header,
|
||||||
get_header_from_bounce,
|
get_header_from_bounce,
|
||||||
is_valid_email,
|
|
||||||
add_header,
|
add_header,
|
||||||
generate_reply_email,
|
generate_reply_email,
|
||||||
normalize_reply_email,
|
|
||||||
get_encoding,
|
get_encoding,
|
||||||
encode_text,
|
encode_text,
|
||||||
EmailEncoding,
|
EmailEncoding,
|
||||||
@ -41,6 +39,7 @@ from app.email_utils import (
|
|||||||
get_verp_info_from_email,
|
get_verp_info_from_email,
|
||||||
sl_formataddr,
|
sl_formataddr,
|
||||||
)
|
)
|
||||||
|
from app.email_validation import is_valid_email, normalize_reply_email
|
||||||
from app.models import (
|
from app.models import (
|
||||||
CustomDomain,
|
CustomDomain,
|
||||||
Alias,
|
Alias,
|
||||||
@ -810,7 +809,7 @@ def test_add_header_multipart_with_invalid_part():
|
|||||||
if i < 2:
|
if i < 2:
|
||||||
assert part.get_payload().index("INJECT") > -1
|
assert part.get_payload().index("INJECT") > -1
|
||||||
else:
|
else:
|
||||||
assert part == "invalid"
|
assert part.get_payload() == "invalid"
|
||||||
|
|
||||||
|
|
||||||
def test_sl_formataddr():
|
def test_sl_formataddr():
|
||||||
@ -822,3 +821,10 @@ def test_sl_formataddr():
|
|||||||
# test that the same name-address can't be handled by the built-in formataddr
|
# test that the same name-address can't be handled by the built-in formataddr
|
||||||
with pytest.raises(UnicodeEncodeError):
|
with pytest.raises(UnicodeEncodeError):
|
||||||
formataddr(("é", "è@ç.à"))
|
formataddr(("é", "è@ç.à"))
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_header_to_invalid_multipart():
|
||||||
|
msg = load_eml_file("add_header_multipart.eml")
|
||||||
|
msg = add_header(msg, "test", "test")
|
||||||
|
data = msg.as_string()
|
||||||
|
assert data != ""
|
||||||
|
@ -78,20 +78,20 @@ def test_website_send_to(flask_client):
|
|||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email=f"{prefix}@example.com",
|
website_email=f"{prefix}@example.com",
|
||||||
reply_email="rep@SL",
|
reply_email="rep@sl",
|
||||||
name="First Last",
|
name="First Last",
|
||||||
)
|
)
|
||||||
assert c1.website_send_to() == f'"First Last | {prefix} at example.com" <rep@SL>'
|
assert c1.website_send_to() == f'"First Last | {prefix} at example.com" <rep@sl>'
|
||||||
|
|
||||||
# empty name, ascii website_from, easy case
|
# empty name, ascii website_from, easy case
|
||||||
c1.name = None
|
c1.name = None
|
||||||
c1.website_from = f"First Last <{prefix}@example.com>"
|
c1.website_from = f"First Last <{prefix}@example.com>"
|
||||||
assert c1.website_send_to() == f'"First Last | {prefix} at example.com" <rep@SL>'
|
assert c1.website_send_to() == f'"First Last | {prefix} at example.com" <rep@sl>'
|
||||||
|
|
||||||
# empty name, RFC 2047 website_from
|
# empty name, RFC 2047 website_from
|
||||||
c1.name = None
|
c1.name = None
|
||||||
c1.website_from = f"=?UTF-8?B?TmjGoW4gTmd1eeG7hW4=?= <{prefix}@example.com>"
|
c1.website_from = f"=?UTF-8?B?TmjGoW4gTmd1eeG7hW4=?= <{prefix}@example.com>"
|
||||||
assert c1.website_send_to() == f'"Nhơn Nguyễn | {prefix} at example.com" <rep@SL>'
|
assert c1.website_send_to() == f'"Nhơn Nguyễn | {prefix} at example.com" <rep@sl>'
|
||||||
|
|
||||||
|
|
||||||
def test_new_addr_default_sender_format(flask_client):
|
def test_new_addr_default_sender_format(flask_client):
|
||||||
@ -103,16 +103,16 @@ def test_new_addr_default_sender_format(flask_client):
|
|||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email=f"{prefix}@example.com",
|
website_email=f"{prefix}@example.com",
|
||||||
reply_email="rep@SL",
|
reply_email="rep@sl",
|
||||||
name="First Last",
|
name="First Last",
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert contact.new_addr() == f'"First Last - {prefix} at example.com" <rep@SL>'
|
assert contact.new_addr() == f'"First Last - {prefix} at example.com" <rep@sl>'
|
||||||
|
|
||||||
# Make sure email isn't duplicated if sender name equals email
|
# Make sure email isn't duplicated if sender name equals email
|
||||||
contact.name = f"{prefix}@example.com"
|
contact.name = f"{prefix}@example.com"
|
||||||
assert contact.new_addr() == f'"{prefix} at example.com" <rep@SL>'
|
assert contact.new_addr() == f'"{prefix} at example.com" <rep@sl>'
|
||||||
|
|
||||||
|
|
||||||
def test_new_addr_a_sender_format(flask_client):
|
def test_new_addr_a_sender_format(flask_client):
|
||||||
@ -126,12 +126,12 @@ def test_new_addr_a_sender_format(flask_client):
|
|||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email=f"{prefix}@example.com",
|
website_email=f"{prefix}@example.com",
|
||||||
reply_email="rep@SL",
|
reply_email="rep@sl",
|
||||||
name="First Last",
|
name="First Last",
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert contact.new_addr() == f'"First Last - {prefix}(a)example.com" <rep@SL>'
|
assert contact.new_addr() == f'"First Last - {prefix}(a)example.com" <rep@sl>'
|
||||||
|
|
||||||
|
|
||||||
def test_new_addr_no_name_sender_format(flask_client):
|
def test_new_addr_no_name_sender_format(flask_client):
|
||||||
@ -145,12 +145,12 @@ def test_new_addr_no_name_sender_format(flask_client):
|
|||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email=f"{prefix}@example.com",
|
website_email=f"{prefix}@example.com",
|
||||||
reply_email="rep@SL",
|
reply_email="rep@sl",
|
||||||
name="First Last",
|
name="First Last",
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert contact.new_addr() == "rep@SL"
|
assert contact.new_addr() == "rep@sl"
|
||||||
|
|
||||||
|
|
||||||
def test_new_addr_name_only_sender_format(flask_client):
|
def test_new_addr_name_only_sender_format(flask_client):
|
||||||
@ -164,12 +164,12 @@ def test_new_addr_name_only_sender_format(flask_client):
|
|||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email=f"{prefix}@example.com",
|
website_email=f"{prefix}@example.com",
|
||||||
reply_email="rep@SL",
|
reply_email="rep@sl",
|
||||||
name="First Last",
|
name="First Last",
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert contact.new_addr() == "First Last <rep@SL>"
|
assert contact.new_addr() == "First Last <rep@sl>"
|
||||||
|
|
||||||
|
|
||||||
def test_new_addr_at_only_sender_format(flask_client):
|
def test_new_addr_at_only_sender_format(flask_client):
|
||||||
@ -183,12 +183,12 @@ def test_new_addr_at_only_sender_format(flask_client):
|
|||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email=f"{prefix}@example.com",
|
website_email=f"{prefix}@example.com",
|
||||||
reply_email="rep@SL",
|
reply_email="rep@sl",
|
||||||
name="First Last",
|
name="First Last",
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert contact.new_addr() == f'"{prefix} at example.com" <rep@SL>'
|
assert contact.new_addr() == f'"{prefix} at example.com" <rep@sl>'
|
||||||
|
|
||||||
|
|
||||||
def test_new_addr_unicode(flask_client):
|
def test_new_addr_unicode(flask_client):
|
||||||
@ -200,14 +200,14 @@ def test_new_addr_unicode(flask_client):
|
|||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email=f"{random_prefix}@example.com",
|
website_email=f"{random_prefix}@example.com",
|
||||||
reply_email="rep@SL",
|
reply_email="rep@sl",
|
||||||
name="Nhơn Nguyễn",
|
name="Nhơn Nguyễn",
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
contact.new_addr()
|
contact.new_addr()
|
||||||
== f"=?utf-8?q?Nh=C6=A1n_Nguy=E1=BB=85n_-_{random_prefix}_at_example=2Ecom?= <rep@SL>"
|
== f"=?utf-8?q?Nh=C6=A1n_Nguy=E1=BB=85n_-_{random_prefix}_at_example=2Ecom?= <rep@sl>"
|
||||||
)
|
)
|
||||||
|
|
||||||
# sanity check
|
# sanity check
|
||||||
|
Reference in New Issue
Block a user