Compare commits

..

6 Commits

Author SHA1 Message Date
6051d72691 4.33.3 2023-08-06 17:51:04 +01:00
c31a75a9ef Update README.md 2023-08-06 16:04:57 +00:00
ef289385ff Update README.md 2023-08-06 16:04:47 +00:00
9b12a2ad33 Update README.md 2023-08-06 16:04:41 +00:00
8eb19d88f3 Remove provenance [CI SKIP] 2023-08-06 16:01:04 +00:00
e36e9d3077 4.32.4 2023-08-02 16:49:54 +01:00
32 changed files with 894 additions and 305 deletions

View File

@ -17,6 +17,7 @@ steps:
image: thegeeklab/drone-docker-buildx
privileged: true
settings:
provenance: false
dockerfile: app/Dockerfile
context: app
registry: git.mrmeeb.stream

View File

@ -1,9 +1,7 @@
# Simple Login
# SimpleLogin
[![Build Status](https://drone.mrmeeb.stream/api/badges/MrMeeb/simple-login/status.svg?ref=refs/heads/main)](https://drone.mrmeeb.stream/MrMeeb/simple-login)
This repo exists to automatically capture any releases of the SaaS edition of SimpleLogin. It checks the simplelogin/app GitHub repo once a day, and builds the latest release automatically if it is newer than the currently built version.
This repo exists to automatically capture any releases of the SaaS edition of SimpleLogin. It checks once a day, and builds the latest one automatically if it is newer than the currentlty built version.
I did this to simplify deployment of my self-hosted SimpleLogin instance. SimpleLogin do not provide an up-to-date version for self-hosting, leaving you with the options of either running a very outdated version with no app support, a beta version, or their `simplelogin/app-ci` version. This last option works well if you use an x86 machine, but I'm running SimpleLogin on an ARM machine. Since I don't want to have to build containers on the machine itself, this repo handles that for me.
This exists to simplify deployment of SimpleLogin in a self-hosted capacity, while also allowing the use of the latest version; SimpleLogin do not provide an up-to-date version for this use.
The image is built for amd64 and arm64 devices.
As a result, this image is built for both amd64 and arm64 devices.

View File

@ -13,8 +13,8 @@ from app.db import Session
from app.email_utils import (
mailbox_already_used,
email_can_be_used_as_mailbox,
is_valid_email,
)
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Mailbox, Job
from app.utils import sanitize_email

View File

@ -535,3 +535,7 @@ DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)

View File

@ -13,10 +13,10 @@ from app import config, parallel_limiter
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import (
is_valid_email,
generate_reply_email,
parse_full_address,
)
from app.email_validation import is_valid_email
from app.errors import (
CannotCreateContactForReverseAlias,
ErrContactErrorUpgradeNeeded,

View File

@ -18,6 +18,8 @@ class NewApiKeyForm(FlaskForm):
def clean_up_unused_or_old_api_keys(user_id: int):
total_keys = ApiKey.filter_by(user_id=user_id).count()
if total_keys <= config.MAX_API_KEYS:
return
# Remove oldest unused
for api_key in (
ApiKey.filter_by(user_id=user_id, last_used=None)

View File

@ -8,6 +8,7 @@ from wtforms import PasswordField, validators
from app.config import CONNECT_WITH_PROTON
from app.dashboard.base import dashboard_bp
from app.extensions import limiter
from app.log import LOG
from app.models import PartnerUser
from app.proton.utils import get_proton_partner
@ -21,6 +22,7 @@ class LoginForm(FlaskForm):
@dashboard_bp.route("/enter_sudo", methods=["GET", "POST"])
@limiter.limit("3/minute")
@login_required
def enter_sudo():
password_check_form = LoginForm()

View File

@ -19,8 +19,8 @@ from app.email_utils import (
mailbox_already_used,
render,
send_email,
is_valid_email,
)
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Mailbox, Job
from app.utils import CSRFValidationForm

View File

@ -828,19 +828,6 @@ def should_add_dkim_signature(domain: str) -> bool:
return False
def is_valid_email(email_address: str) -> bool:
"""
Used to check whether an email address is valid
NOT run MX check.
NOT allow unicode.
"""
try:
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
return True
except EmailNotValidError:
return False
class EmailEncoding(enum.Enum):
BASE64 = "base64"
QUOTED = "quoted-printable"
@ -1116,26 +1103,6 @@ def is_reverse_alias(address: str) -> bool:
)
# allow also + and @ that are present in a reply address
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
def normalize_reply_email(reply_email: str) -> str:
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
if not reply_email.isascii():
reply_email = convert_to_id(reply_email)
ret = []
# drop all control characters like shift, separator, etc
for c in reply_email:
if c not in _ALLOWED_CHARS:
ret.append("_")
else:
ret.append(c)
return "".join(ret)
def should_disable(alias: Alias) -> (bool, str):
"""
Return whether an alias should be disabled and if yes, the reason why

View File

@ -0,0 +1,38 @@
from email_validator import (
validate_email,
EmailNotValidError,
)
from app.utils import convert_to_id
# allow also + and @ that are present in a reply address
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
def is_valid_email(email_address: str) -> bool:
"""
Used to check whether an email address is valid
NOT run MX check.
NOT allow unicode.
"""
try:
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
return True
except EmailNotValidError:
return False
def normalize_reply_email(reply_email: str) -> str:
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
if not reply_email.isascii():
reply_email = convert_to_id(reply_email)
ret = []
# drop all control characters like shift, separator, etc
for c in reply_email:
if c not in _ALLOWED_CHARS:
ret.append("_")
else:
ret.append(c)
return "".join(ret)

View File

@ -84,6 +84,14 @@ class ErrAddressInvalid(SLException):
return f"{self.address} is not a valid email address"
class InvalidContactEmailError(SLException):
def __init__(self, website_email: str): # noqa: F821
self.website_email = website_email
def error_for_user(self) -> str:
return f"Cannot create contact with invalid email {self.website_email}"
class ErrContactAlreadyExists(SLException):
"""raised when a contact already exists"""

View File

@ -341,7 +341,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
sa.Boolean, default=True, nullable=False, server_default="1"
)
activated = sa.Column(sa.Boolean, default=False, nullable=False)
activated = sa.Column(sa.Boolean, default=False, nullable=False, index=True)
# an account can be disabled if having harmful behavior
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
@ -411,7 +411,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
)
referral_id = sa.Column(
sa.ForeignKey("referral.id", ondelete="SET NULL"), nullable=True, default=None
sa.ForeignKey("referral.id", ondelete="SET NULL"),
nullable=True,
default=None,
index=True,
)
referral = orm.relationship("Referral", foreign_keys=[referral_id])
@ -534,6 +537,12 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
nullable=False,
)
__table_args__ = (
sa.Index(
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
),
)
@property
def directory_quota(self):
return min(
@ -568,6 +577,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
@classmethod
def create(cls, email, name="", password=None, from_partner=False, **kwargs):
email = sanitize_email(email)
user: User = super(User, cls).create(email=email, name=name[:100], **kwargs)
if password:
@ -1445,7 +1455,7 @@ class Alias(Base, ModelMixin):
)
# have I been pwned
hibp_last_check = sa.Column(ArrowType, default=None)
hibp_last_check = sa.Column(ArrowType, default=None, index=True)
hibp_breaches = orm.relationship("Hibp", secondary="alias_hibp")
# to use Postgres full text search. Only applied on "note" column for now
@ -2291,6 +2301,7 @@ class CustomDomain(Base, ModelMixin):
@classmethod
def create(cls, **kwargs):
domain = kwargs.get("domain")
kwargs["domain"] = domain.replace("\n", "")
if DeletedSubdomain.get_by(domain=domain):
raise SubdomainInTrashError
@ -2590,6 +2601,12 @@ class Mailbox(Base, ModelMixin):
return ret
@classmethod
def create(cls, **kw):
if "email" in kw:
kw["email"] = sanitize_email(kw["email"])
return super().create(**kw)
def __repr__(self):
return f"<Mailbox {self.id} {self.email}>"
@ -2928,6 +2945,8 @@ class Monitoring(Base, ModelMixin):
active_queue = sa.Column(sa.Integer, nullable=False)
deferred_queue = sa.Column(sa.Integer, nullable=False)
__table_args__ = (Index("ix_monitoring_created_at", "created_at"),)
class BatchImport(Base, ModelMixin):
__tablename__ = "batch_import"
@ -3053,6 +3072,8 @@ class Bounce(Base, ModelMixin):
email = sa.Column(sa.String(256), nullable=False, index=True)
info = sa.Column(sa.Text, nullable=True)
__table_args__ = (sa.Index("ix_bounce_created_at", "created_at"),)
class TransactionalEmail(Base, ModelMixin):
"""Storing all email addresses that receive transactional emails, including account email and mailboxes.
@ -3062,6 +3083,8 @@ class TransactionalEmail(Base, ModelMixin):
__tablename__ = "transactional_email"
email = sa.Column(sa.String(256), nullable=False, unique=False)
__table_args__ = (sa.Index("ix_transactional_email_created_at", "created_at"),)
class Payout(Base, ModelMixin):
"""Referral payouts"""

View File

@ -99,7 +99,7 @@ def sanitize_email(email_address: str, not_lower=False) -> str:
email_address = email_address.strip().replace(" ", "").replace("\n", " ")
if not not_lower:
email_address = email_address.lower()
return email_address
return email_address.replace("\u200f", "")
class NextUrlSanitizer:

View File

@ -22,10 +22,9 @@ from app.email_utils import (
render,
email_can_be_used_as_mailbox,
send_email_with_rate_control,
normalize_reply_email,
is_valid_email,
get_email_domain_part,
)
from app.email_validation import is_valid_email, normalize_reply_email
from app.errors import ProtonPartnerNotSetUp
from app.log import LOG
from app.mail_sender import load_unsent_mails_from_fs_and_resend
@ -66,12 +65,14 @@ from server import create_light_app
def notify_trial_end():
for user in User.filter(
User.activated.is_(True), User.trial_end.isnot(None), User.lifetime.is_(False)
User.activated.is_(True),
User.trial_end.isnot(None),
User.trial_end >= arrow.now().shift(days=2),
User.trial_end < arrow.now().shift(days=3),
User.lifetime.is_(False),
).all():
try:
if user.in_trial() and arrow.now().shift(
days=3
) > user.trial_end >= arrow.now().shift(days=2):
if user.in_trial():
LOG.d("Send trial end email to user %s", user)
send_trial_end_soon_email(user)
# happens if user has been deleted in the meantime
@ -104,7 +105,9 @@ def delete_logs():
def delete_refused_emails():
for refused_email in RefusedEmail.filter_by(deleted=False).all():
for refused_email in (
RefusedEmail.filter_by(deleted=False).order_by(RefusedEmail.id).all()
):
if arrow.now().shift(days=1) > refused_email.delete_at >= arrow.now():
LOG.d("Delete refused email %s", refused_email)
if refused_email.path:
@ -272,7 +275,11 @@ def compute_metric2() -> Metric2:
_24h_ago = now.shift(days=-1)
nb_referred_user_paid = 0
for user in User.filter(User.referral_id.isnot(None)):
for user in (
User.filter(User.referral_id.isnot(None))
.yield_per(500)
.enable_eagerloads(False)
):
if user.is_paid():
nb_referred_user_paid += 1
@ -1020,7 +1027,8 @@ async def check_hibp():
)
.filter(Alias.enabled)
.order_by(Alias.hibp_last_check.asc())
.all()
.yield_per(500)
.enable_eagerloads(False)
):
await queue.put(alias.id)

View File

@ -5,68 +5,64 @@ jobs:
schedule: "0 0 * * *"
captureStderr: true
- name: SimpleLogin Notify Trial Ends
command: python /code/cron.py -j notify_trial_end
shell: /bin/bash
schedule: "0 8 * * *"
captureStderr: true
- name: SimpleLogin Notify Manual Subscription Ends
command: python /code/cron.py -j notify_manual_subscription_end
shell: /bin/bash
schedule: "0 9 * * *"
captureStderr: true
- name: SimpleLogin Notify Premium Ends
command: python /code/cron.py -j notify_premium_end
shell: /bin/bash
schedule: "0 10 * * *"
captureStderr: true
- name: SimpleLogin Delete Logs
command: python /code/cron.py -j delete_logs
shell: /bin/bash
schedule: "0 11 * * *"
captureStderr: true
- name: SimpleLogin Poll Apple Subscriptions
command: python /code/cron.py -j poll_apple_subscription
shell: /bin/bash
schedule: "0 12 * * *"
captureStderr: true
- name: SimpleLogin Sanity Check
command: python /code/cron.py -j sanity_check
shell: /bin/bash
schedule: "0 2 * * *"
captureStderr: true
- name: SimpleLogin Delete Old Monitoring records
command: python /code/cron.py -j delete_old_monitoring
shell: /bin/bash
schedule: "0 14 * * *"
schedule: "15 1 * * *"
captureStderr: true
- name: SimpleLogin Custom Domain check
command: python /code/cron.py -j check_custom_domain
shell: /bin/bash
schedule: "0 15 * * *"
schedule: "15 2 * * *"
captureStderr: true
- name: SimpleLogin HIBP check
command: python /code/cron.py -j check_hibp
shell: /bin/bash
schedule: "0 18 * * *"
schedule: "15 3 * * *"
captureStderr: true
concurrencyPolicy: Forbid
- name: SimpleLogin Notify HIBP breaches
command: python /code/cron.py -j notify_hibp
shell: /bin/bash
schedule: "0 19 * * *"
schedule: "15 4 * * *"
captureStderr: true
concurrencyPolicy: Forbid
- name: SimpleLogin Delete Logs
command: python /code/cron.py -j delete_logs
shell: /bin/bash
schedule: "15 5 * * *"
captureStderr: true
- name: SimpleLogin Poll Apple Subscriptions
command: python /code/cron.py -j poll_apple_subscription
shell: /bin/bash
schedule: "15 6 * * *"
captureStderr: true
- name: SimpleLogin Notify Trial Ends
command: python /code/cron.py -j notify_trial_end
shell: /bin/bash
schedule: "15 8 * * *"
captureStderr: true
- name: SimpleLogin Notify Manual Subscription Ends
command: python /code/cron.py -j notify_manual_subscription_end
shell: /bin/bash
schedule: "15 9 * * *"
captureStderr: true
- name: SimpleLogin Notify Premium Ends
command: python /code/cron.py -j notify_premium_end
shell: /bin/bash
schedule: "15 10 * * *"
captureStderr: true
- name: SimpleLogin send unsent emails
command: python /code/cron.py -j send_undelivered_mails
shell: /bin/bash

View File

@ -1,4 +1,4 @@
# SSL, HTTPS, and HSTS
# SSL, HTTPS, HSTS and additional security measures
It's highly recommended to enable SSL/TLS on your server, both for the web app and email server.
@ -58,3 +58,124 @@ Now, reload Nginx:
```bash
sudo systemctl reload nginx
```
## Additional security measures
For additional security, we recommend you take some extra steps.
### Enable Certificate Authority Authorization (CAA)
[Certificate Authority Authorization](https://letsencrypt.org/docs/caa/) is a step you can take to restrict the list of certificate authorities that are allowed to issue certificates for your domains.
Use [SSLMates CAA Record Generator](https://sslmate.com/caa/) to create a **CAA record** with the following configuration:
- `flags`: `0`
- `tag`: `issue`
- `value`: `"letsencrypt.org"`
To verify if the DNS works, the following command
```bash
dig @1.1.1.1 mydomain.com caa
```
should return:
```
mydomain.com. 3600 IN CAA 0 issue "letsencrypt.org"
```
### SMTP MTA Strict Transport Security (MTA-STS)
[MTA-STS](https://datatracker.ietf.org/doc/html/rfc8461) is an extra step you can take to broadcast the ability of your instance to receive and, optionally enforce, TSL-secure SMTP connections to protect email traffic.
Enabling MTA-STS requires you serve a specific file from subdomain `mta-sts.domain.com` on a well-known route.
Create a text file `/var/www/.well-known/mta-sts.txt` with the content:
```txt
version: STSv1
mode: testing
mx: app.mydomain.com
max_age: 86400
```
It is recommended to start with `mode: testing` for starters to get time to review failure reports. Add as many `mx:` domain entries as you have matching **MX records** in your DNS configuration.
Create a **TXT record** for `_mta-sts.mydomain.com.` with the following value:
```txt
v=STSv1; id=UNIX_TIMESTAMP
```
With `UNIX_TIMESTAMP` being the current date/time.
Use the following command to generate the record:
```bash
echo "v=STSv1; id=$(date +%s)"
```
To verify if the DNS works, the following command
```bash
dig @1.1.1.1 _mta-sts.mydomain.com txt
```
should return a result similar to this one:
```
_mta-sts.mydomain.com. 3600 IN TXT "v=STSv1; id=1689416399"
```
Create an additional Nginx configuration in `/etc/nginx/sites-enabled/mta-sts` with the following content:
```
server {
server_name mta-sts.mydomain.com;
root /var/www;
listen 80;
location ^~ /.well-known {}
}
```
Restart Nginx with the following command:
```sh
sudo service nginx restart
```
A correct configuration of MTA-STS, however, requires that the certificate used to host the `mta-sts` subdomain matches that of the subdomain referred to by the **MX record** from the DNS. In other words, both `mta-sts.mydomain.com` and `app.mydomain.com` must share the same certificate.
The easiest way to do this is to _expand_the certificate associated with `app.mydomain.com` to also support the `mta-sts` subdomain using the following command:
```sh
certbot --expand --nginx -d app.mydomain.com,mta-sts.mydomain.com
```
## SMTP TLS Reporting
[TLSRPT](https://datatracker.ietf.org/doc/html/rfc8460) is used by SMTP systems to report failures in establishing TLS-secure sessions as broadcast by the MTA-STS configuration.
Configuring MTA-STS in `mode: testing` as shown in the previous section gives you time to review failures from some SMTP senders.
Create a **TXT record** for `_smtp._tls.mydomain.com.` with the following value:
```txt
v=TSLRPTv1; rua=mailto:YOUR_EMAIL
```
The TLSRPT configuration at the DNS level allows SMTP senders that fail to initiate TLS-secure sessions to send reports to a particular email address. We suggest creating a `tls-reports` alias in SimpleLogin for this purpose.
To verify if the DNS works, the following command
```bash
dig @1.1.1.1 _smtp._tls.mydomain.com txt
```
should return a result similar to this one:
```
_smtp._tls.mydomain.com. 3600 IN TXT "v=TSLRPTv1; rua=mailto:tls-reports@mydomain.com"
```

View File

@ -106,8 +106,6 @@ from app.email_utils import (
get_header_unicode,
generate_reply_email,
is_reverse_alias,
normalize_reply_email,
is_valid_email,
replace,
should_disable,
parse_id_from_bounce,
@ -123,6 +121,7 @@ from app.email_utils import (
generate_verp_email,
sl_formataddr,
)
from app.email_validation import is_valid_email, normalize_reply_email
from app.errors import (
NonReverseAliasInReplyPhase,
VERPTransactional,
@ -262,7 +261,7 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
Session.commit()
except IntegrityError:
LOG.w("Contact %s %s already exist", alias, contact_email)
LOG.w(f"Contact with email {contact_email} for alias {alias} already exist")
Session.rollback()
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
@ -280,6 +279,9 @@ def get_or_create_reply_to_contact(
except ValueError:
return
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
if not is_valid_email(contact_address):
LOG.w(
"invalid reply-to address %s. Parse from %s",
@ -348,6 +350,10 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
continue
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
contact_name = full_address.display_name
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
if contact:
# update the contact name if needed
if contact.name != full_address.display_name:
@ -355,9 +361,9 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
"Update contact %s name %s to %s",
contact,
contact.name,
full_address.display_name,
contact_name,
)
contact.name = full_address.display_name
contact.name = contact_name
Session.commit()
else:
LOG.d(
@ -372,7 +378,7 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_email,
name=full_address.display_name,
name=contact_name,
reply_email=generate_reply_email(contact_email, alias),
is_cc=header.lower() == "cc",
automatic_created=True,
@ -541,12 +547,20 @@ def sign_msg(msg: Message) -> Message:
signature.add_header("Content-Disposition", 'attachment; filename="signature.asc"')
try:
signature.set_payload(sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n")))
payload = sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n"))
if not payload:
raise PGPException("Empty signature by gnupg")
signature.set_payload(payload)
except Exception:
LOG.e("Cannot sign, try using pgpy")
signature.set_payload(
sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
)
payload = sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
if not payload:
raise PGPException("Empty signature by pgpy")
signature.set_payload(payload)
container.attach(signature)

View File

@ -0,0 +1,42 @@
"""empty message
Revision ID: 01827104004b
Revises: 2634b41f54db
Create Date: 2023-07-28 19:39:28.675490
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '01827104004b'
down_revision = '2634b41f54db'
branch_labels = None
depends_on = None
def upgrade():
with op.get_context().autocommit_block():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_alias_hibp_last_check'), 'alias', ['hibp_last_check'], unique=False, postgresql_concurrently=True)
op.create_index('ix_bounce_created_at', 'bounce', ['created_at'], unique=False, postgresql_concurrently=True)
op.create_index('ix_monitoring_created_at', 'monitoring', ['created_at'], unique=False, postgresql_concurrently=True)
op.create_index('ix_transactional_email_created_at', 'transactional_email', ['created_at'], unique=False, postgresql_concurrently=True)
op.create_index(op.f('ix_users_activated'), 'users', ['activated'], unique=False, postgresql_concurrently=True)
op.create_index('ix_users_activated_trial_end_lifetime', 'users', ['activated', 'trial_end', 'lifetime'], unique=False, postgresql_concurrently=True)
op.create_index(op.f('ix_users_referral_id'), 'users', ['referral_id'], unique=False, postgresql_concurrently=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_referral_id'), table_name='users')
op.drop_index('ix_users_activated_trial_end_lifetime', table_name='users')
op.drop_index(op.f('ix_users_activated'), table_name='users')
op.drop_index('ix_transactional_email_created_at', table_name='transactional_email')
op.drop_index('ix_monitoring_created_at', table_name='monitoring')
op.drop_index('ix_bounce_created_at', table_name='bounce')
op.drop_index(op.f('ix_alias_hibp_last_check'), table_name='alias')
# ### end Alembic commands ###

0
app/monitor/__init__.py Normal file
View File

21
app/monitor/metric.py Normal file
View File

@ -0,0 +1,21 @@
from dataclasses import dataclass
from typing import List
@dataclass
class UpcloudRecord:
db_role: str
label: str
time: str
value: float
@dataclass
class UpcloudMetric:
metric_name: str
records: List[UpcloudRecord]
@dataclass
class UpcloudMetrics:
metrics: List[UpcloudMetric]

View File

@ -0,0 +1,20 @@
from app.config import UPCLOUD_DB_ID, UPCLOUD_PASSWORD, UPCLOUD_USERNAME
from app.log import LOG
from monitor.newrelic import NewRelicClient
from monitor.upcloud import UpcloudClient
class MetricExporter:
def __init__(self, newrelic_license: str):
self.__upcloud = UpcloudClient(
username=UPCLOUD_USERNAME, password=UPCLOUD_PASSWORD
)
self.__newrelic = NewRelicClient(newrelic_license)
def run(self):
try:
metrics = self.__upcloud.get_metrics(UPCLOUD_DB_ID)
self.__newrelic.send(metrics)
LOG.info("Upcloud metrics sent to NewRelic")
except Exception as e:
LOG.warn(f"Could not export metrics: {e}")

26
app/monitor/newrelic.py Normal file
View File

@ -0,0 +1,26 @@
from monitor.metric import UpcloudMetrics
from newrelic_telemetry_sdk import GaugeMetric, MetricClient
_NEWRELIC_BASE_HOST = "metric-api.eu.newrelic.com"
class NewRelicClient:
def __init__(self, license_key: str):
self.__client = MetricClient(license_key=license_key, host=_NEWRELIC_BASE_HOST)
def send(self, metrics: UpcloudMetrics):
batch = []
for metric in metrics.metrics:
for record in metric.records:
batch.append(
GaugeMetric(
name=f"upcloud.db.{metric.metric_name}",
value=record.value,
tags={"host": record.label, "db_role": record.db_role},
)
)
response = self.__client.send_batch(batch)
response.raise_for_status()

82
app/monitor/upcloud.py Normal file
View File

@ -0,0 +1,82 @@
from app.log import LOG
from monitor.metric import UpcloudMetric, UpcloudMetrics, UpcloudRecord
import base64
import requests
from typing import Any
BASE_URL = "https://api.upcloud.com"
def get_metric(json: Any, metric: str) -> UpcloudMetric:
records = []
if metric in json:
metric_data = json[metric]
data = metric_data["data"]
cols = list(map(lambda x: x["label"], data["cols"][1:]))
latest = data["rows"][-1]
time = latest[0]
for column_idx in range(len(cols)):
value = latest[1 + column_idx]
# If the latest value is None, try to fetch the second to last
if value is None:
value = data["rows"][-2][1 + column_idx]
if value is not None:
label = cols[column_idx]
if "(master)" in label:
db_role = "master"
else:
db_role = "standby"
records.append(
UpcloudRecord(time=time, db_role=db_role, label=label, value=value)
)
else:
LOG.warn(f"Could not get value for metric {metric}")
return UpcloudMetric(metric_name=metric, records=records)
def get_metrics(json: Any) -> UpcloudMetrics:
return UpcloudMetrics(
metrics=[
get_metric(json, "cpu_usage"),
get_metric(json, "disk_usage"),
get_metric(json, "diskio_reads"),
get_metric(json, "diskio_writes"),
get_metric(json, "load_average"),
get_metric(json, "mem_usage"),
get_metric(json, "net_receive"),
get_metric(json, "net_send"),
]
)
class UpcloudClient:
def __init__(self, username: str, password: str):
if not username:
raise Exception("UpcloudClient username must be set")
if not password:
raise Exception("UpcloudClient password must be set")
client = requests.Session()
encoded_auth = base64.b64encode(
f"{username}:{password}".encode("utf-8")
).decode("utf-8")
client.headers = {"Authorization": f"Basic {encoded_auth}"}
self.__client = client
def get_metrics(self, db_uuid: str) -> UpcloudMetrics:
url = f"{BASE_URL}/1.3/database/{db_uuid}/metrics?period=hour"
LOG.d(f"Performing request to {url}")
response = self.__client.get(url)
LOG.d(f"Status code: {response.status_code}")
if response.status_code != 200:
return UpcloudMetrics(metrics=[])
as_json = response.json()
return get_metrics(as_json)

View File

@ -1,3 +1,4 @@
import configparser
import os
import subprocess
from time import sleep
@ -7,6 +8,7 @@ import newrelic.agent
from app.db import Session
from app.log import LOG
from monitor.metric_exporter import MetricExporter
# the number of consecutive fails
# if more than _max_nb_fails, alert
@ -19,6 +21,18 @@ _max_nb_fails = 10
# the maximum number of emails in incoming & active queue
_max_incoming = 50
_NR_CONFIG_FILE_LOCATION_VAR = "NEW_RELIC_CONFIG_FILE"
def get_newrelic_license() -> str:
nr_file = os.environ.get(_NR_CONFIG_FILE_LOCATION_VAR, None)
if nr_file is None:
raise Exception(f"{_NR_CONFIG_FILE_LOCATION_VAR} not defined")
config = configparser.ConfigParser()
config.read(nr_file)
return config["newrelic"]["license_key"]
@newrelic.agent.background_task()
def log_postfix_metrics():
@ -80,10 +94,13 @@ def log_nb_db_connection():
if __name__ == "__main__":
exporter = MetricExporter(get_newrelic_license())
while True:
log_postfix_metrics()
log_nb_db_connection()
Session.close()
exporter.run()
# 1 min
sleep(60)

194
app/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -111,6 +111,7 @@ Deprecated = "^1.2.13"
cryptography = "37.0.1"
SQLAlchemy = "1.3.24"
redis = "^4.5.3"
newrelic-telemetry-sdk = "^0.5.0"
[tool.poetry.dev-dependencies]
pytest = "^7.0.0"

View File

@ -28,7 +28,7 @@
<form id="supportZendeskForm" method="post" enctype="multipart/form-data">
<div class="mt-4 mb-5">
<label for="issueDescription" class="form-label font-weight-bold">What happened?</label>
<textarea class="form-control" required name="ticket_content" id="issueDescription" rows="3" placeholder="Please provide as much information as possible. For example which alias(es), mailbox(es) ar affected, if this is a persistent issue...">{{- ticket_content or '' -}}</textarea>
<textarea class="form-control" required name="ticket_content" id="issueDescription" rows="3" placeholder="Please provide as much information as possible. For example which alias(es), mailbox(es) are affected, if this is a persistent issue...">{{- ticket_content or '' -}}</textarea>
</div>
<div class="mt-5 font-weight-bold">Attach files to support request</div>
<div class="text-muted">Only images, text and emails are accepted</div>

View File

@ -37,6 +37,17 @@ def test_create_delete_api_key(flask_client):
assert ApiKey.filter(ApiKey.user_id == user.id).count() == 1
assert api_key.name == "for test"
# create second api_key
create_r = flask_client.post(
url_for("dashboard.api_key"),
data={"form-name": "create", "name": "for test 2"},
follow_redirects=True,
)
assert create_r.status_code == 200
api_key_2 = ApiKey.filter_by(user_id=user.id).order_by(ApiKey.id.desc()).first()
assert ApiKey.filter(ApiKey.user_id == user.id).count() == 2
assert api_key_2.name == "for test 2"
# delete api_key
delete_r = flask_client.post(
url_for("dashboard.api_key"),
@ -44,7 +55,7 @@ def test_create_delete_api_key(flask_client):
follow_redirects=True,
)
assert delete_r.status_code == 200
assert ApiKey.count() == nb_api_key
assert ApiKey.count() == nb_api_key + 1
def test_delete_all_api_keys(flask_client):

View File

View File

@ -0,0 +1,350 @@
from monitor.upcloud import get_metric, get_metrics
from monitor.metric import UpcloudMetrics, UpcloudMetric, UpcloudRecord
import json
MOCK_RESPONSE = """
{
"cpu_usage": {
"data": {
"cols": [
{ "label": "time", "type": "date" },
{ "label": "test-1 (master)", "type": "number" },
{ "label": "test-2 (standby)", "type": "number" }
],
"rows": [
["2022-01-21T13:10:30Z", 2.744682398273781, 3.054323473090861],
["2022-01-21T13:11:00Z", 3.0735645433218366, 2.972423595745795],
["2022-01-21T13:11:30Z", 2.61619694060839, 3.1358378052207883],
["2022-01-21T13:12:00Z", 3.275132296130991, 4.196249043309251]
]
},
"hints": { "title": "CPU usage %" }
},
"disk_usage": {
"data": {
"cols": [
{ "label": "time", "type": "date" },
{ "label": "test-1 (master)", "type": "number" },
{ "label": "test-2 (standby)", "type": "number" }
],
"rows": [
["2022-01-21T13:10:30Z", 5.654416415900109, 5.58959125727556],
["2022-01-21T13:11:00Z", 5.654416415900109, 5.58959125727556],
["2022-01-21T13:11:30Z", 5.654416415900109, 5.58959125727556]
]
},
"hints": { "title": "Disk space usage %" }
},
"diskio_reads": {
"data": {
"cols": [
{ "label": "time", "type": "date" },
{ "label": "test-1 (master)", "type": "number" },
{ "label": "test-2 (standby)", "type": "number" }
],
"rows": [
["2022-01-21T13:10:30Z", 0, 0],
["2022-01-21T13:11:00Z", 0, 0],
["2022-01-21T13:11:30Z", 0, 0]
]
},
"hints": { "title": "Disk iops (reads)" }
},
"diskio_writes": {
"data": {
"cols": [
{ "label": "time", "type": "date" },
{ "label": "test-1 (master)", "type": "number" },
{ "label": "test-2 (standby)", "type": "number" }
],
"rows": [
["2022-01-21T13:10:30Z", 3, 2],
["2022-01-21T13:11:00Z", 2, 3],
["2022-01-21T13:11:30Z", 4, 3]
]
},
"hints": { "title": "Disk iops (writes)" }
},
"load_average": {
"data": {
"cols": [
{ "label": "time", "type": "date" },
{ "label": "test-1 (master)", "type": "number" },
{ "label": "test-2 (standby)", "type": "number" }
],
"rows": [
["2022-01-21T13:10:30Z", 0.11, 0.11],
["2022-01-21T13:11:00Z", 0.14, 0.1],
["2022-01-21T13:11:30Z", 0.14, 0.09]
]
},
"hints": { "title": "Load average (5 min)" }
},
"mem_usage": {
"data": {
"cols": [
{ "label": "time", "type": "date" },
{ "label": "test-1 (master)", "type": "number" },
{ "label": "test-2 (standby)", "type": "number" }
],
"rows": [
["2022-01-21T13:10:30Z", 11.491766148261078, 12.318932883261219],
["2022-01-21T13:11:00Z", 11.511967645759277, 12.304403727425075],
["2022-01-21T13:11:30Z", 11.488581675749048, 12.272260458006759]
]
},
"hints": { "title": "Memory usage %" }
},
"net_receive": {
"data": {
"cols": [
{ "label": "time", "type": "date" },
{ "label": "test-1 (master)", "type": "number" },
{ "label": "test-2 (standby)", "type": "number" }
],
"rows": [
["2022-01-21T13:10:30Z", 442, 470],
["2022-01-21T13:11:00Z", 439, 384],
["2022-01-21T13:11:30Z", 466, 458]
]
},
"hints": { "title": "Network receive (bytes/s)" }
},
"net_send": {
"data": {
"cols": [
{ "label": "time", "type": "date" },
{ "label": "test-1 (master)", "type": "number" },
{ "label": "test-2 (standby)", "type": "number" }
],
"rows": [
["2022-01-21T13:10:30Z", 672, 581],
["2022-01-21T13:11:00Z", 660, 555],
["2022-01-21T13:11:30Z", 694, 573]
]
},
"hints": { "title": "Network transmit (bytes/s)" }
}
}
"""
def test_get_metrics():
response = json.loads(MOCK_RESPONSE)
metrics = get_metrics(response)
assert metrics == UpcloudMetrics(
metrics=[
UpcloudMetric(
metric_name="cpu_usage",
records=[
UpcloudRecord(
db_role="master",
label="test-1 " "(master)",
time="2022-01-21T13:12:00Z",
value=3.275132296130991,
),
UpcloudRecord(
db_role="standby",
label="test-2 " "(standby)",
time="2022-01-21T13:12:00Z",
value=4.196249043309251,
),
],
),
UpcloudMetric(
metric_name="disk_usage",
records=[
UpcloudRecord(
db_role="master",
label="test-1 " "(master)",
time="2022-01-21T13:11:30Z",
value=5.654416415900109,
),
UpcloudRecord(
db_role="standby",
label="test-2 " "(standby)",
time="2022-01-21T13:11:30Z",
value=5.58959125727556,
),
],
),
UpcloudMetric(
metric_name="diskio_reads",
records=[
UpcloudRecord(
db_role="master",
label="test-1 " "(master)",
time="2022-01-21T13:11:30Z",
value=0,
),
UpcloudRecord(
db_role="standby",
label="test-2 " "(standby)",
time="2022-01-21T13:11:30Z",
value=0,
),
],
),
UpcloudMetric(
metric_name="diskio_writes",
records=[
UpcloudRecord(
db_role="master",
label="test-1 " "(master)",
time="2022-01-21T13:11:30Z",
value=4,
),
UpcloudRecord(
db_role="standby",
label="test-2 " "(standby)",
time="2022-01-21T13:11:30Z",
value=3,
),
],
),
UpcloudMetric(
metric_name="load_average",
records=[
UpcloudRecord(
db_role="master",
label="test-1 " "(master)",
time="2022-01-21T13:11:30Z",
value=0.14,
),
UpcloudRecord(
db_role="standby",
label="test-2 " "(standby)",
time="2022-01-21T13:11:30Z",
value=0.09,
),
],
),
UpcloudMetric(
metric_name="mem_usage",
records=[
UpcloudRecord(
db_role="master",
label="test-1 " "(master)",
time="2022-01-21T13:11:30Z",
value=11.488581675749048,
),
UpcloudRecord(
db_role="standby",
label="test-2 " "(standby)",
time="2022-01-21T13:11:30Z",
value=12.272260458006759,
),
],
),
UpcloudMetric(
metric_name="net_receive",
records=[
UpcloudRecord(
db_role="master",
label="test-1 " "(master)",
time="2022-01-21T13:11:30Z",
value=466,
),
UpcloudRecord(
db_role="standby",
label="test-2 " "(standby)",
time="2022-01-21T13:11:30Z",
value=458,
),
],
),
UpcloudMetric(
metric_name="net_send",
records=[
UpcloudRecord(
db_role="master",
label="test-1 " "(master)",
time="2022-01-21T13:11:30Z",
value=694,
),
UpcloudRecord(
db_role="standby",
label="test-2 " "(standby)",
time="2022-01-21T13:11:30Z",
value=573,
),
],
),
]
)
def test_get_metric():
response = json.loads(MOCK_RESPONSE)
metric_name = "cpu_usage"
metric = get_metric(response, metric_name)
assert metric.metric_name == metric_name
assert len(metric.records) == 2
assert metric.records[0].label == "test-1 (master)"
assert metric.records[0].time == "2022-01-21T13:12:00Z"
assert metric.records[0].value == 3.275132296130991
assert metric.records[1].label == "test-2 (standby)"
assert metric.records[1].time == "2022-01-21T13:12:00Z"
assert metric.records[1].value == 4.196249043309251
def test_get_metric_with_none_value():
response_str = """
{
"cpu_usage": {
"data": {
"cols": [
{ "label": "time", "type": "date" },
{ "label": "test-1 (master)", "type": "number" },
{ "label": "test-2 (standby)", "type": "number" }
],
"rows": [
["2022-01-21T13:10:30Z", 2.744682398273781, 3.054323473090861],
["2022-01-21T13:11:00Z", 3.0735645433218366, 2.972423595745795],
["2022-01-21T13:11:30Z", null, 3.1358378052207883],
["2022-01-21T13:12:00Z", 3.275132296130991, null]
]
},
"hints": { "title": "CPU usage %" }
}
}
"""
response = json.loads(response_str)
metric = get_metric(response, "cpu_usage")
assert metric.records[0].label == "test-1 (master)"
assert metric.records[0].value == 3.275132296130991
assert metric.records[1].label == "test-2 (standby)"
assert metric.records[1].value == 3.1358378052207883
def test_get_metric_with_none_value_in_last_two_positions():
response_str = """
{
"cpu_usage": {
"data": {
"cols": [
{ "label": "time", "type": "date" },
{ "label": "test-1 (master)", "type": "number" },
{ "label": "test-2 (standby)", "type": "number" }
],
"rows": [
["2022-01-21T13:10:30Z", 2.744682398273781, 3.054323473090861],
["2022-01-21T13:11:00Z", 3.0735645433218366, 2.972423595745795],
["2022-01-21T13:11:30Z", null, null],
["2022-01-21T13:12:00Z", 3.275132296130991, null]
]
},
"hints": { "title": "CPU usage %" }
}
}
"""
response = json.loads(response_str)
metric = get_metric(response, "cpu_usage")
assert len(metric.records) == 1
assert metric.records[0].label == "test-1 (master)"
assert metric.records[0].value == 3.275132296130991

View File

@ -19,10 +19,8 @@ from app.email_utils import (
copy,
get_spam_from_header,
get_header_from_bounce,
is_valid_email,
add_header,
generate_reply_email,
normalize_reply_email,
get_encoding,
encode_text,
EmailEncoding,
@ -41,6 +39,7 @@ from app.email_utils import (
get_verp_info_from_email,
sl_formataddr,
)
from app.email_validation import is_valid_email, normalize_reply_email
from app.models import (
CustomDomain,
Alias,

View File

@ -78,20 +78,20 @@ def test_website_send_to(flask_client):
user_id=user.id,
alias_id=alias.id,
website_email=f"{prefix}@example.com",
reply_email="rep@SL",
reply_email="rep@sl",
name="First Last",
)
assert c1.website_send_to() == f'"First Last | {prefix} at example.com" <rep@SL>'
assert c1.website_send_to() == f'"First Last | {prefix} at example.com" <rep@sl>'
# empty name, ascii website_from, easy case
c1.name = None
c1.website_from = f"First Last <{prefix}@example.com>"
assert c1.website_send_to() == f'"First Last | {prefix} at example.com" <rep@SL>'
assert c1.website_send_to() == f'"First Last | {prefix} at example.com" <rep@sl>'
# empty name, RFC 2047 website_from
c1.name = None
c1.website_from = f"=?UTF-8?B?TmjGoW4gTmd1eeG7hW4=?= <{prefix}@example.com>"
assert c1.website_send_to() == f'"Nhơn Nguyễn | {prefix} at example.com" <rep@SL>'
assert c1.website_send_to() == f'"Nhơn Nguyễn | {prefix} at example.com" <rep@sl>'
def test_new_addr_default_sender_format(flask_client):
@ -103,16 +103,16 @@ def test_new_addr_default_sender_format(flask_client):
user_id=user.id,
alias_id=alias.id,
website_email=f"{prefix}@example.com",
reply_email="rep@SL",
reply_email="rep@sl",
name="First Last",
commit=True,
)
assert contact.new_addr() == f'"First Last - {prefix} at example.com" <rep@SL>'
assert contact.new_addr() == f'"First Last - {prefix} at example.com" <rep@sl>'
# Make sure email isn't duplicated if sender name equals email
contact.name = f"{prefix}@example.com"
assert contact.new_addr() == f'"{prefix} at example.com" <rep@SL>'
assert contact.new_addr() == f'"{prefix} at example.com" <rep@sl>'
def test_new_addr_a_sender_format(flask_client):
@ -126,12 +126,12 @@ def test_new_addr_a_sender_format(flask_client):
user_id=user.id,
alias_id=alias.id,
website_email=f"{prefix}@example.com",
reply_email="rep@SL",
reply_email="rep@sl",
name="First Last",
commit=True,
)
assert contact.new_addr() == f'"First Last - {prefix}(a)example.com" <rep@SL>'
assert contact.new_addr() == f'"First Last - {prefix}(a)example.com" <rep@sl>'
def test_new_addr_no_name_sender_format(flask_client):
@ -145,12 +145,12 @@ def test_new_addr_no_name_sender_format(flask_client):
user_id=user.id,
alias_id=alias.id,
website_email=f"{prefix}@example.com",
reply_email="rep@SL",
reply_email="rep@sl",
name="First Last",
commit=True,
)
assert contact.new_addr() == "rep@SL"
assert contact.new_addr() == "rep@sl"
def test_new_addr_name_only_sender_format(flask_client):
@ -164,12 +164,12 @@ def test_new_addr_name_only_sender_format(flask_client):
user_id=user.id,
alias_id=alias.id,
website_email=f"{prefix}@example.com",
reply_email="rep@SL",
reply_email="rep@sl",
name="First Last",
commit=True,
)
assert contact.new_addr() == "First Last <rep@SL>"
assert contact.new_addr() == "First Last <rep@sl>"
def test_new_addr_at_only_sender_format(flask_client):
@ -183,12 +183,12 @@ def test_new_addr_at_only_sender_format(flask_client):
user_id=user.id,
alias_id=alias.id,
website_email=f"{prefix}@example.com",
reply_email="rep@SL",
reply_email="rep@sl",
name="First Last",
commit=True,
)
assert contact.new_addr() == f'"{prefix} at example.com" <rep@SL>'
assert contact.new_addr() == f'"{prefix} at example.com" <rep@sl>'
def test_new_addr_unicode(flask_client):
@ -200,14 +200,14 @@ def test_new_addr_unicode(flask_client):
user_id=user.id,
alias_id=alias.id,
website_email=f"{random_prefix}@example.com",
reply_email="rep@SL",
reply_email="rep@sl",
name="Nhơn Nguyễn",
commit=True,
)
assert (
contact.new_addr()
== f"=?utf-8?q?Nh=C6=A1n_Nguy=E1=BB=85n_-_{random_prefix}_at_example=2Ecom?= <rep@SL>"
== f"=?utf-8?q?Nh=C6=A1n_Nguy=E1=BB=85n_-_{random_prefix}_at_example=2Ecom?= <rep@sl>"
)
# sanity check