Compare commits
19 Commits
Author | SHA1 | Date | |
---|---|---|---|
026865e5bf | |||
add94ef2a2 | |||
1081400948 | |||
5776128905 | |||
d661860f4c | |||
0a52e32972 | |||
703dcbd0eb | |||
ce7ed69547 | |||
4f5564df16 | |||
2fee569131 | |||
7ea45d6f5d | |||
6d24db50bd | |||
88f270c6a1 | |||
0962b1cf29 | |||
6051d72691 | |||
c31a75a9ef | |||
ef289385ff | |||
9b12a2ad33 | |||
8eb19d88f3 |
@ -17,6 +17,7 @@ steps:
|
||||
image: thegeeklab/drone-docker-buildx
|
||||
privileged: true
|
||||
settings:
|
||||
provenance: false
|
||||
dockerfile: app/Dockerfile
|
||||
context: app
|
||||
registry: git.mrmeeb.stream
|
||||
@ -35,6 +36,7 @@ steps:
|
||||
status:
|
||||
- success
|
||||
- failure
|
||||
- killed
|
||||
settings:
|
||||
webhook:
|
||||
from_secret: slack_webhook
|
||||
|
10
README.md
10
README.md
@ -1,9 +1,7 @@
|
||||
# Simple Login
|
||||
# SimpleLogin
|
||||
|
||||
[](https://drone.mrmeeb.stream/MrMeeb/simple-login)
|
||||
This repo exists to automatically capture any releases of the SaaS edition of SimpleLogin. It checks the simplelogin/app GitHub repo once a day, and builds the latest release automatically if it is newer than the currently built version.
|
||||
|
||||
This repo exists to automatically capture any releases of the SaaS edition of SimpleLogin. It checks once a day, and builds the latest one automatically if it is newer than the currentlty built version.
|
||||
I did this to simplify deployment of my self-hosted SimpleLogin instance. SimpleLogin do not provide an up-to-date version for self-hosting, leaving you with the options of either running a very outdated version with no app support, a beta version, or their `simplelogin/app-ci` version. This last option works well if you use an x86 machine, but I'm running SimpleLogin on an ARM machine. Since I don't want to have to build containers on the machine itself, this repo handles that for me.
|
||||
|
||||
This exists to simplify deployment of SimpleLogin in a self-hosted capacity, while also allowing the use of the latest version; SimpleLogin do not provide an up-to-date version for this use.
|
||||
|
||||
The image is built for amd64 and arm64 devices.
|
||||
As a result, this image is built for both amd64 and arm64 devices.
|
8
app/.github/workflows/main.yml
vendored
8
app/.github/workflows/main.yml
vendored
@ -15,9 +15,15 @@ jobs:
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: '3.10'
|
||||
cache: 'poetry'
|
||||
|
||||
- name: Install OS dependencies
|
||||
if: ${{ matrix.python-version }} == '3.10'
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libre2-dev libpq-dev
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||
run: poetry install --no-interaction
|
||||
|
@ -7,18 +7,19 @@ repos:
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 3.9.2
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||
rev: v1.3.0
|
||||
hooks:
|
||||
- id: djlint-jinja
|
||||
files: '.*\.html'
|
||||
entry: djlint --reformat
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.1.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
|
@ -23,7 +23,7 @@ COPY poetry.lock pyproject.toml ./
|
||||
# Install and setup poetry
|
||||
RUN pip install -U pip \
|
||||
&& apt-get update \
|
||||
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev \
|
||||
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
|
||||
&& curl -sSL https://install.python-poetry.org | python3 - \
|
||||
# Remove curl and netcat from the image
|
||||
&& apt-get purge -y curl netcat-traditional \
|
||||
@ -31,7 +31,7 @@ RUN pip install -U pip \
|
||||
&& poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi --no-root \
|
||||
# Clear apt cache \
|
||||
&& apt-get purge -y libre2-dev \
|
||||
&& apt-get purge -y libre2-dev cmake ninja-build\
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
@ -5,13 +5,15 @@ from typing import Optional
|
||||
|
||||
from arrow import Arrow
|
||||
from newrelic import agent
|
||||
from sqlalchemy import or_
|
||||
|
||||
from app.db import Session
|
||||
from app.email_utils import send_welcome_email
|
||||
from app.utils import sanitize_email
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
from app.errors import (
|
||||
AccountAlreadyLinkedToAnotherPartnerException,
|
||||
AccountIsUsingAliasAsEmail,
|
||||
AccountAlreadyLinkedToAnotherUserException,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
@ -130,8 +132,9 @@ class ClientMergeStrategy(ABC):
|
||||
class NewUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
# Will create a new SL User with a random password
|
||||
canonical_email = canonicalize_email(self.link_request.email)
|
||||
new_user = User.create(
|
||||
email=self.link_request.email,
|
||||
email=canonical_email,
|
||||
name=self.link_request.name,
|
||||
password=random_string(20),
|
||||
activated=True,
|
||||
@ -165,7 +168,6 @@ class NewUserStrategy(ClientMergeStrategy):
|
||||
|
||||
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
|
||||
partner_user = ensure_partner_user_exists_for_user(
|
||||
self.link_request, self.user, self.partner
|
||||
)
|
||||
@ -179,7 +181,7 @@ class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||
|
||||
class LinkedWithAnotherPartnerUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
raise AccountAlreadyLinkedToAnotherPartnerException()
|
||||
raise AccountAlreadyLinkedToAnotherUserException()
|
||||
|
||||
|
||||
def get_login_strategy(
|
||||
@ -212,11 +214,21 @@ def process_login_case(
|
||||
partner_id=partner.id, external_user_id=link_request.external_user_id
|
||||
)
|
||||
if partner_user is None:
|
||||
canonical_email = canonicalize_email(link_request.email)
|
||||
# We didn't find any SimpleLogin user registered with that partner user id
|
||||
# Make sure they aren't using an alias as their link email
|
||||
check_alias(link_request.email)
|
||||
check_alias(canonical_email)
|
||||
# Try to find it using the partner's e-mail address
|
||||
user = User.get_by(email=link_request.email)
|
||||
users = User.filter(
|
||||
or_(User.email == link_request.email, User.email == canonical_email)
|
||||
).all()
|
||||
if len(users) > 1:
|
||||
user = [user for user in users if user.email == canonical_email][0]
|
||||
elif len(users) == 1:
|
||||
user = users[0]
|
||||
else:
|
||||
user = None
|
||||
return get_login_strategy(link_request, user, partner).process()
|
||||
else:
|
||||
# We found the SL user registered with that partner user id
|
||||
|
@ -256,6 +256,17 @@ class UserAdmin(SLModelView):
|
||||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"clear_delete_on",
|
||||
"Remove scheduled deletion of user",
|
||||
"This will remove the scheduled deletion for this users",
|
||||
)
|
||||
def clean_delete_on(self, ids):
|
||||
for user in User.filter(User.id.in_(ids)):
|
||||
user.delete_on = None
|
||||
|
||||
Session.commit()
|
||||
|
||||
# @action(
|
||||
# "login_as",
|
||||
# "Login as this user",
|
||||
@ -600,6 +611,26 @@ class NewsletterAdmin(SLModelView):
|
||||
else:
|
||||
flash(error_msg, "error")
|
||||
|
||||
@action(
|
||||
"clone_newsletter",
|
||||
"Clone this newsletter",
|
||||
)
|
||||
def clone_newsletter(self, newsletter_ids):
|
||||
if len(newsletter_ids) != 1:
|
||||
flash("you can only select 1 newsletter", "error")
|
||||
return
|
||||
|
||||
newsletter_id = newsletter_ids[0]
|
||||
newsletter: Newsletter = Newsletter.get(newsletter_id)
|
||||
new_newsletter = Newsletter.create(
|
||||
subject=newsletter.subject,
|
||||
html=newsletter.html,
|
||||
plain_text=newsletter.plain_text,
|
||||
commit=True,
|
||||
)
|
||||
|
||||
flash(f"Newsletter {new_newsletter.subject} has been cloned", "success")
|
||||
|
||||
|
||||
class NewsletterUserAdmin(SLModelView):
|
||||
column_searchable_list = ["id"]
|
||||
|
@ -70,7 +70,6 @@ def verify_prefix_suffix(
|
||||
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
|
||||
and not config.DISABLE_ALIAS_SUFFIX
|
||||
):
|
||||
|
||||
if not alias_domain_prefix.startswith("."):
|
||||
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||
return False
|
||||
|
@ -21,6 +21,8 @@ from app.email_utils import (
|
||||
send_cannot_create_directory_alias_disabled,
|
||||
get_email_local_part,
|
||||
send_cannot_create_domain_alias,
|
||||
send_email,
|
||||
render,
|
||||
)
|
||||
from app.errors import AliasInTrashError
|
||||
from app.log import LOG
|
||||
@ -36,6 +38,8 @@ from app.models import (
|
||||
EmailLog,
|
||||
Contact,
|
||||
AutoCreateRule,
|
||||
AliasUsedOn,
|
||||
ClientUser,
|
||||
)
|
||||
from app.regex_utils import regex_match
|
||||
|
||||
@ -399,3 +403,58 @@ def alias_export_csv(user, csv_direct_export=False):
|
||||
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
|
||||
output.headers["Content-type"] = "text/csv"
|
||||
return output
|
||||
|
||||
|
||||
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
# cannot transfer alias which is used for receiving newsletter
|
||||
if User.get_by(newsletter_alias_id=alias.id):
|
||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||
|
||||
# update user_id
|
||||
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
# remove existing mailboxes from the alias
|
||||
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
||||
|
||||
# set mailboxes
|
||||
alias.mailbox_id = new_mailboxes.pop().id
|
||||
for mb in new_mailboxes:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
||||
|
||||
# alias has never been transferred before
|
||||
if not alias.original_owner_id:
|
||||
alias.original_owner_id = alias.user_id
|
||||
|
||||
# inform previous owner
|
||||
old_user = alias.user
|
||||
send_email(
|
||||
old_user.email,
|
||||
f"Alias {alias.email} has been received",
|
||||
render(
|
||||
"transactional/alias-transferred.txt",
|
||||
alias=alias,
|
||||
),
|
||||
render(
|
||||
"transactional/alias-transferred.html",
|
||||
alias=alias,
|
||||
),
|
||||
)
|
||||
|
||||
# now the alias belongs to the new user
|
||||
alias.user_id = new_user.id
|
||||
|
||||
# set some fields back to default
|
||||
alias.disable_pgp = False
|
||||
alias.pinned = False
|
||||
|
||||
Session.commit()
|
||||
|
@ -16,3 +16,22 @@ from .views import (
|
||||
sudo,
|
||||
user,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"alias_options",
|
||||
"new_custom_alias",
|
||||
"custom_domain",
|
||||
"new_random_alias",
|
||||
"user_info",
|
||||
"auth",
|
||||
"auth_mfa",
|
||||
"alias",
|
||||
"apple",
|
||||
"mailbox",
|
||||
"notification",
|
||||
"setting",
|
||||
"export",
|
||||
"phone",
|
||||
"sudo",
|
||||
"user",
|
||||
]
|
||||
|
@ -24,6 +24,7 @@ from app.errors import (
|
||||
ErrContactAlreadyExists,
|
||||
ErrAddressInvalid,
|
||||
)
|
||||
from app.extensions import limiter
|
||||
from app.models import Alias, Contact, Mailbox, AliasMailbox
|
||||
|
||||
|
||||
@ -71,6 +72,9 @@ def get_aliases():
|
||||
|
||||
|
||||
@api_bp.route("/v2/aliases", methods=["GET", "POST"])
|
||||
@limiter.limit(
|
||||
"5/minute",
|
||||
)
|
||||
@require_api_auth
|
||||
def get_aliases_v2():
|
||||
"""
|
||||
|
@ -63,6 +63,11 @@ def auth_login():
|
||||
elif user.disabled:
|
||||
LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send()
|
||||
return jsonify(error="Account disabled"), 400
|
||||
elif user.delete_on is not None:
|
||||
LoginEvent(
|
||||
LoginEvent.ActionType.scheduled_to_be_deleted, LoginEvent.Source.api
|
||||
).send()
|
||||
return jsonify(error="Account scheduled for deletion"), 400
|
||||
elif not user.activated:
|
||||
LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send()
|
||||
return jsonify(error="Account not activated"), 422
|
||||
|
@ -13,8 +13,8 @@ from app.db import Session
|
||||
from app.email_utils import (
|
||||
mailbox_already_used,
|
||||
email_can_be_used_as_mailbox,
|
||||
is_valid_email,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import Mailbox, Job
|
||||
from app.utils import sanitize_email
|
||||
@ -45,7 +45,7 @@ def create_mailbox():
|
||||
mailbox_email = sanitize_email(request.get_json().get("email"))
|
||||
|
||||
if not user.is_premium():
|
||||
return jsonify(error=f"Only premium plan can add additional mailbox"), 400
|
||||
return jsonify(error="Only premium plan can add additional mailbox"), 400
|
||||
|
||||
if not is_valid_email(mailbox_email):
|
||||
return jsonify(error=f"{mailbox_email} invalid"), 400
|
||||
|
@ -150,7 +150,7 @@ def new_custom_alias_v3():
|
||||
if not data:
|
||||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
if type(data) is not dict:
|
||||
if not isinstance(data, dict):
|
||||
return jsonify(error="request body does not follow the required format"), 400
|
||||
|
||||
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
||||
@ -168,7 +168,7 @@ def new_custom_alias_v3():
|
||||
return jsonify(error="alias prefix invalid format or too long"), 400
|
||||
|
||||
# check if mailbox is not tempered with
|
||||
if type(mailbox_ids) is not list:
|
||||
if not isinstance(mailbox_ids, list):
|
||||
return jsonify(error="mailbox_ids must be an array of id"), 400
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
|
@ -17,3 +17,23 @@ from .views import (
|
||||
recovery,
|
||||
api_to_cookie,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"login",
|
||||
"logout",
|
||||
"register",
|
||||
"activate",
|
||||
"resend_activation",
|
||||
"reset_password",
|
||||
"forgot_password",
|
||||
"github",
|
||||
"google",
|
||||
"facebook",
|
||||
"proton",
|
||||
"change_email",
|
||||
"mfa",
|
||||
"fido",
|
||||
"social",
|
||||
"recovery",
|
||||
"api_to_cookie",
|
||||
]
|
||||
|
@ -62,7 +62,7 @@ def fido():
|
||||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
# Redirect user to correct page
|
||||
return redirect(next_url or url_for("dashboard.index"))
|
||||
else:
|
||||
@ -110,7 +110,7 @@ def fido():
|
||||
|
||||
session["sudo_time"] = int(time())
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
|
||||
# Redirect user to correct page
|
||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||
|
@ -54,6 +54,12 @@ def login():
|
||||
"error",
|
||||
)
|
||||
LoginEvent(LoginEvent.ActionType.disabled_login).send()
|
||||
elif user.delete_on is not None:
|
||||
flash(
|
||||
f"Your account is scheduled to be deleted on {user.delete_on}",
|
||||
"error",
|
||||
)
|
||||
LoginEvent(LoginEvent.ActionType.scheduled_to_be_deleted).send()
|
||||
elif not user.activated:
|
||||
show_resend_activation = True
|
||||
flash(
|
||||
|
@ -55,7 +55,7 @@ def mfa():
|
||||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
# Redirect user to correct page
|
||||
return redirect(next_url or url_for("dashboard.index"))
|
||||
else:
|
||||
@ -73,7 +73,7 @@ def mfa():
|
||||
Session.commit()
|
||||
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
|
||||
# Redirect user to correct page
|
||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||
|
@ -53,7 +53,7 @@ def recovery_route():
|
||||
del session[MFA_USER_ID]
|
||||
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
|
||||
recovery_code.used = True
|
||||
recovery_code.used_at = arrow.now()
|
||||
|
@ -94,9 +94,7 @@ def register():
|
||||
try:
|
||||
send_activation_email(user, next_url)
|
||||
RegisterEvent(RegisterEvent.ActionType.success).send()
|
||||
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += (
|
||||
1
|
||||
)
|
||||
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += 1
|
||||
Session.commit()
|
||||
except Exception:
|
||||
flash("Invalid email, are you sure the email is correct?", "error")
|
||||
|
@ -179,6 +179,7 @@ AWS_REGION = os.environ.get("AWS_REGION") or "eu-west-3"
|
||||
BUCKET = os.environ.get("BUCKET")
|
||||
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
|
||||
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
|
||||
AWS_ENDPOINT_URL = os.environ.get("AWS_ENDPOINT_URL", None)
|
||||
|
||||
# Paddle
|
||||
try:
|
||||
@ -535,3 +536,7 @@ DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
||||
|
||||
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
|
||||
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
||||
|
||||
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
||||
UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
|
||||
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
|
||||
|
@ -33,3 +33,39 @@ from .views import (
|
||||
notification,
|
||||
support,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"index",
|
||||
"pricing",
|
||||
"setting",
|
||||
"custom_alias",
|
||||
"subdomain",
|
||||
"billing",
|
||||
"alias_log",
|
||||
"alias_export",
|
||||
"unsubscribe",
|
||||
"api_key",
|
||||
"custom_domain",
|
||||
"alias_contact_manager",
|
||||
"enter_sudo",
|
||||
"mfa_setup",
|
||||
"mfa_cancel",
|
||||
"fido_setup",
|
||||
"coupon",
|
||||
"fido_manage",
|
||||
"domain_detail",
|
||||
"lifetime_licence",
|
||||
"directory",
|
||||
"mailbox",
|
||||
"mailbox_detail",
|
||||
"refused_email",
|
||||
"referral",
|
||||
"contact_detail",
|
||||
"setup_done",
|
||||
"batch_import",
|
||||
"alias_transfer",
|
||||
"app",
|
||||
"delete_account",
|
||||
"notification",
|
||||
"support",
|
||||
]
|
||||
|
@ -13,10 +13,10 @@ from app import config, parallel_limiter
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
is_valid_email,
|
||||
generate_reply_email,
|
||||
parse_full_address,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.errors import (
|
||||
CannotCreateContactForReverseAlias,
|
||||
ErrContactErrorUpgradeNeeded,
|
||||
|
@ -87,6 +87,6 @@ def get_alias_log(alias: Alias, page_id=0) -> [AliasLog]:
|
||||
contact=contact,
|
||||
)
|
||||
logs.append(al)
|
||||
logs = sorted(logs, key=lambda l: l.when, reverse=True)
|
||||
logs = sorted(logs, key=lambda log: log.when, reverse=True)
|
||||
|
||||
return logs
|
||||
|
@ -7,79 +7,19 @@ from flask import render_template, redirect, url_for, flash, request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import config
|
||||
from app.alias_utils import transfer_alias
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email, render
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
Alias,
|
||||
Contact,
|
||||
AliasUsedOn,
|
||||
AliasMailbox,
|
||||
User,
|
||||
ClientUser,
|
||||
)
|
||||
from app.models import Mailbox
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
def transfer(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
# cannot transfer alias which is used for receiving newsletter
|
||||
if User.get_by(newsletter_alias_id=alias.id):
|
||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||
|
||||
# update user_id
|
||||
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
# remove existing mailboxes from the alias
|
||||
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
||||
|
||||
# set mailboxes
|
||||
alias.mailbox_id = new_mailboxes.pop().id
|
||||
for mb in new_mailboxes:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
||||
|
||||
# alias has never been transferred before
|
||||
if not alias.original_owner_id:
|
||||
alias.original_owner_id = alias.user_id
|
||||
|
||||
# inform previous owner
|
||||
old_user = alias.user
|
||||
send_email(
|
||||
old_user.email,
|
||||
f"Alias {alias.email} has been received",
|
||||
render(
|
||||
"transactional/alias-transferred.txt",
|
||||
alias=alias,
|
||||
),
|
||||
render(
|
||||
"transactional/alias-transferred.html",
|
||||
alias=alias,
|
||||
),
|
||||
)
|
||||
|
||||
# now the alias belongs to the new user
|
||||
alias.user_id = new_user.id
|
||||
|
||||
# set some fields back to default
|
||||
alias.disable_pgp = False
|
||||
alias.pinned = False
|
||||
|
||||
Session.commit()
|
||||
|
||||
|
||||
def hmac_alias_transfer_token(transfer_token: str) -> str:
|
||||
alias_hmac = hmac.new(
|
||||
config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"),
|
||||
@ -214,7 +154,7 @@ def alias_transfer_receive_route():
|
||||
mailboxes,
|
||||
token,
|
||||
)
|
||||
transfer(alias, current_user, mailboxes)
|
||||
transfer_alias(alias, current_user, mailboxes)
|
||||
|
||||
# reset transfer token
|
||||
alias.transfer_token = None
|
||||
|
@ -1,14 +1,9 @@
|
||||
from app.db import Session
|
||||
|
||||
"""
|
||||
List of apps that user has used via the "Sign in with SimpleLogin"
|
||||
"""
|
||||
|
||||
from flask import render_template, request, flash, redirect
|
||||
from flask_login import login_required, current_user
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.models import (
|
||||
ClientUser,
|
||||
)
|
||||
@ -17,6 +12,10 @@ from app.models import (
|
||||
@dashboard_bp.route("/app", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def app_route():
|
||||
"""
|
||||
List of apps that user has used via the "Sign in with SimpleLogin"
|
||||
"""
|
||||
|
||||
client_users = (
|
||||
ClientUser.filter_by(user_id=current_user.id)
|
||||
.options(joinedload(ClientUser.client))
|
||||
|
@ -100,7 +100,7 @@ def coupon_route():
|
||||
commit=True,
|
||||
)
|
||||
flash(
|
||||
f"Your account has been upgraded to Premium, thanks for your support!",
|
||||
"Your account has been upgraded to Premium, thanks for your support!",
|
||||
"success",
|
||||
)
|
||||
|
||||
|
@ -67,7 +67,7 @@ def directory():
|
||||
if request.method == "POST":
|
||||
if request.form.get("form-name") == "delete":
|
||||
if not delete_dir_form.validate():
|
||||
flash(f"Invalid request", "warning")
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_obj = Directory.get(delete_dir_form.directory_id.data)
|
||||
|
||||
@ -87,7 +87,7 @@ def directory():
|
||||
|
||||
if request.form.get("form-name") == "toggle-directory":
|
||||
if not toggle_dir_form.validate():
|
||||
flash(f"Invalid request", "warning")
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = toggle_dir_form.directory_id.data
|
||||
dir_obj = Directory.get(dir_id)
|
||||
@ -109,7 +109,7 @@ def directory():
|
||||
|
||||
elif request.form.get("form-name") == "update":
|
||||
if not update_dir_form.validate():
|
||||
flash(f"Invalid request", "warning")
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = update_dir_form.directory_id.data
|
||||
dir_obj = Directory.get(dir_id)
|
||||
|
@ -57,6 +57,10 @@ def get_stats(user: User) -> Stats:
|
||||
methods=["POST"],
|
||||
exempt_when=lambda: request.form.get("form-name") != "create-random-email",
|
||||
)
|
||||
@limiter.limit(
|
||||
"5/minute",
|
||||
methods=["GET"],
|
||||
)
|
||||
@login_required
|
||||
@parallel_limiter.lock(
|
||||
name="alias_creation",
|
||||
|
@ -19,8 +19,8 @@ from app.email_utils import (
|
||||
mailbox_already_used,
|
||||
render,
|
||||
send_email,
|
||||
is_valid_email,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import Mailbox, Job
|
||||
from app.utils import CSRFValidationForm
|
||||
|
@ -30,7 +30,7 @@ class ChangeEmailForm(FlaskForm):
|
||||
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def mailbox_detail_route(mailbox_id):
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
mailbox: Mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != current_user.id:
|
||||
flash("You cannot see this page", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
@ -144,6 +144,15 @@ def mailbox_detail_route(mailbox_id):
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
if mailbox.is_proton():
|
||||
flash(
|
||||
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
|
||||
"info",
|
||||
)
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
mailbox.pgp_public_key = request.form.get("pgp")
|
||||
try:
|
||||
mailbox.pgp_finger_print = load_public_key_and_check(
|
||||
@ -182,25 +191,16 @@ def mailbox_detail_route(mailbox_id):
|
||||
)
|
||||
elif request.form.get("form-name") == "generic-subject":
|
||||
if request.form.get("action") == "save":
|
||||
if not mailbox.pgp_enabled():
|
||||
flash(
|
||||
"Generic subject can only be used on PGP-enabled mailbox",
|
||||
"error",
|
||||
)
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
mailbox.generic_subject = request.form.get("generic-subject")
|
||||
Session.commit()
|
||||
flash("Generic subject for PGP-encrypted email is enabled", "success")
|
||||
flash("Generic subject is enabled", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
elif request.form.get("action") == "remove":
|
||||
mailbox.generic_subject = None
|
||||
Session.commit()
|
||||
flash("Generic subject for PGP-encrypted email is disabled", "success")
|
||||
flash("Generic subject is disabled", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
@ -128,7 +128,6 @@ def setting():
|
||||
new_email_valid = True
|
||||
new_email = canonicalize_email(change_email_form.email.data)
|
||||
if new_email != current_user.email and not pending_email:
|
||||
|
||||
# check if this email is not already used
|
||||
if personal_email_already_used(new_email) or Alias.get_by(
|
||||
email=new_email
|
||||
|
@ -75,12 +75,11 @@ def block_contact(contact_id):
|
||||
@dashboard_bp.route("/unsubscribe/encoded/<encoded_request>", methods=["GET"])
|
||||
@login_required
|
||||
def encoded_unsubscribe(encoded_request: str):
|
||||
|
||||
unsub_data = UnsubscribeHandler().handle_unsubscribe_from_request(
|
||||
current_user, encoded_request
|
||||
)
|
||||
if not unsub_data:
|
||||
flash(f"Invalid unsubscribe request", "error")
|
||||
flash("Invalid unsubscribe request", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
if unsub_data.action == UnsubscribeAction.DisableAlias:
|
||||
alias = Alias.get(unsub_data.data)
|
||||
@ -97,14 +96,14 @@ def encoded_unsubscribe(encoded_request: str):
|
||||
)
|
||||
)
|
||||
if unsub_data.action == UnsubscribeAction.UnsubscribeNewsletter:
|
||||
flash(f"You've unsubscribed from the newsletter", "success")
|
||||
flash("You've unsubscribed from the newsletter", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.index",
|
||||
)
|
||||
)
|
||||
if unsub_data.action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
||||
flash(f"The original unsubscribe request has been forwarded", "success")
|
||||
flash("The original unsubscribe request has been forwarded", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.index",
|
||||
|
@ -1 +1,3 @@
|
||||
from .views import index, new_client, client_detail
|
||||
|
||||
__all__ = ["index", "new_client", "client_detail"]
|
||||
|
@ -87,7 +87,7 @@ def client_detail(client_id):
|
||||
)
|
||||
|
||||
flash(
|
||||
f"Thanks for submitting, we are informed and will come back to you asap!",
|
||||
"Thanks for submitting, we are informed and will come back to you asap!",
|
||||
"success",
|
||||
)
|
||||
|
||||
|
@ -1 +1,3 @@
|
||||
from .views import index
|
||||
|
||||
__all__ = ["index"]
|
||||
|
@ -34,7 +34,7 @@ def get_cname_record(hostname) -> Optional[str]:
|
||||
|
||||
|
||||
def get_mx_domains(hostname) -> [(int, str)]:
|
||||
"""return list of (priority, domain name).
|
||||
"""return list of (priority, domain name) sorted by priority (lowest priority first)
|
||||
domain name ends with a "." at the end.
|
||||
"""
|
||||
try:
|
||||
@ -50,7 +50,7 @@ def get_mx_domains(hostname) -> [(int, str)]:
|
||||
|
||||
ret.append((int(parts[0]), parts[1]))
|
||||
|
||||
return ret
|
||||
return sorted(ret, key=lambda prio_domain: prio_domain[0])
|
||||
|
||||
|
||||
_include_spf = "include:"
|
||||
|
@ -93,7 +93,7 @@ def send_welcome_email(user):
|
||||
|
||||
send_email(
|
||||
comm_email,
|
||||
f"Welcome to SimpleLogin",
|
||||
"Welcome to SimpleLogin",
|
||||
render("com/welcome.txt", user=user, alias=alias),
|
||||
render("com/welcome.html", user=user, alias=alias),
|
||||
unsubscribe_link,
|
||||
@ -104,7 +104,7 @@ def send_welcome_email(user):
|
||||
def send_trial_end_soon_email(user):
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your trial will end soon",
|
||||
"Your trial will end soon",
|
||||
render("transactional/trial-end.txt.jinja2", user=user),
|
||||
render("transactional/trial-end.html", user=user),
|
||||
ignore_smtp_error=True,
|
||||
@ -114,7 +114,7 @@ def send_trial_end_soon_email(user):
|
||||
def send_activation_email(email, activation_link):
|
||||
send_email(
|
||||
email,
|
||||
f"Just one more step to join SimpleLogin",
|
||||
"Just one more step to join SimpleLogin",
|
||||
render(
|
||||
"transactional/activation.txt",
|
||||
activation_link=activation_link,
|
||||
@ -768,7 +768,7 @@ def get_header_unicode(header: Union[str, Header]) -> str:
|
||||
ret = ""
|
||||
for to_decoded_str, charset in decode_header(header):
|
||||
if charset is None:
|
||||
if type(to_decoded_str) is bytes:
|
||||
if isinstance(to_decoded_str, bytes):
|
||||
decoded_str = to_decoded_str.decode()
|
||||
else:
|
||||
decoded_str = to_decoded_str
|
||||
@ -805,13 +805,13 @@ def to_bytes(msg: Message):
|
||||
for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]:
|
||||
try:
|
||||
return msg.as_bytes(policy=generator_policy)
|
||||
except:
|
||||
except Exception:
|
||||
LOG.w("as_bytes() fails with %s policy", policy, exc_info=True)
|
||||
|
||||
msg_string = msg.as_string()
|
||||
try:
|
||||
return msg_string.encode()
|
||||
except:
|
||||
except Exception:
|
||||
LOG.w("as_string().encode() fails", exc_info=True)
|
||||
|
||||
return msg_string.encode(errors="replace")
|
||||
@ -828,19 +828,6 @@ def should_add_dkim_signature(domain: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def is_valid_email(email_address: str) -> bool:
|
||||
"""
|
||||
Used to check whether an email address is valid
|
||||
NOT run MX check.
|
||||
NOT allow unicode.
|
||||
"""
|
||||
try:
|
||||
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
|
||||
class EmailEncoding(enum.Enum):
|
||||
BASE64 = "base64"
|
||||
QUOTED = "quoted-printable"
|
||||
@ -919,7 +906,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||
if content_type == "text/plain":
|
||||
encoding = get_encoding(msg)
|
||||
payload = msg.get_payload()
|
||||
if type(payload) is str:
|
||||
if isinstance(payload, str):
|
||||
clone_msg = copy(msg)
|
||||
new_payload = f"""{text_header}
|
||||
------------------------------
|
||||
@ -929,7 +916,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||
elif content_type == "text/html":
|
||||
encoding = get_encoding(msg)
|
||||
payload = msg.get_payload()
|
||||
if type(payload) is str:
|
||||
if isinstance(payload, str):
|
||||
new_payload = f"""<table width="100%" style="width: 100%; -premailer-width: 100%; -premailer-cellpadding: 0;
|
||||
-premailer-cellspacing: 0; margin: 0; padding: 0;">
|
||||
<tr>
|
||||
@ -985,7 +972,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||
|
||||
|
||||
def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
||||
if type(msg) is str:
|
||||
if isinstance(msg, str):
|
||||
msg = msg.replace(old, new)
|
||||
return msg
|
||||
|
||||
@ -1008,7 +995,7 @@ def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
||||
if content_type in ("text/plain", "text/html"):
|
||||
encoding = get_encoding(msg)
|
||||
payload = msg.get_payload()
|
||||
if type(payload) is str:
|
||||
if isinstance(payload, str):
|
||||
if encoding == EmailEncoding.QUOTED:
|
||||
LOG.d("handle quoted-printable replace %s -> %s", old, new)
|
||||
# first decode the payload
|
||||
@ -1116,26 +1103,6 @@ def is_reverse_alias(address: str) -> bool:
|
||||
)
|
||||
|
||||
|
||||
# allow also + and @ that are present in a reply address
|
||||
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
|
||||
|
||||
|
||||
def normalize_reply_email(reply_email: str) -> str:
|
||||
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
|
||||
if not reply_email.isascii():
|
||||
reply_email = convert_to_id(reply_email)
|
||||
|
||||
ret = []
|
||||
# drop all control characters like shift, separator, etc
|
||||
for c in reply_email:
|
||||
if c not in _ALLOWED_CHARS:
|
||||
ret.append("_")
|
||||
else:
|
||||
ret.append(c)
|
||||
|
||||
return "".join(ret)
|
||||
|
||||
|
||||
def should_disable(alias: Alias) -> (bool, str):
|
||||
"""
|
||||
Return whether an alias should be disabled and if yes, the reason why
|
||||
|
38
app/app/email_validation.py
Normal file
38
app/app/email_validation.py
Normal file
@ -0,0 +1,38 @@
|
||||
from email_validator import (
|
||||
validate_email,
|
||||
EmailNotValidError,
|
||||
)
|
||||
|
||||
from app.utils import convert_to_id
|
||||
|
||||
# allow also + and @ that are present in a reply address
|
||||
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
|
||||
|
||||
|
||||
def is_valid_email(email_address: str) -> bool:
|
||||
"""
|
||||
Used to check whether an email address is valid
|
||||
NOT run MX check.
|
||||
NOT allow unicode.
|
||||
"""
|
||||
try:
|
||||
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
|
||||
def normalize_reply_email(reply_email: str) -> str:
|
||||
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
|
||||
if not reply_email.isascii():
|
||||
reply_email = convert_to_id(reply_email)
|
||||
|
||||
ret = []
|
||||
# drop all control characters like shift, separator, etc
|
||||
for c in reply_email:
|
||||
if c not in _ALLOWED_CHARS:
|
||||
ret.append("_")
|
||||
else:
|
||||
ret.append(c)
|
||||
|
||||
return "".join(ret)
|
@ -84,6 +84,14 @@ class ErrAddressInvalid(SLException):
|
||||
return f"{self.address} is not a valid email address"
|
||||
|
||||
|
||||
class InvalidContactEmailError(SLException):
|
||||
def __init__(self, website_email: str): # noqa: F821
|
||||
self.website_email = website_email
|
||||
|
||||
def error_for_user(self) -> str:
|
||||
return f"Cannot create contact with invalid email {self.website_email}"
|
||||
|
||||
|
||||
class ErrContactAlreadyExists(SLException):
|
||||
"""raised when a contact already exists"""
|
||||
|
||||
@ -113,3 +121,10 @@ class AccountAlreadyLinkedToAnotherUserException(LinkException):
|
||||
class AccountIsUsingAliasAsEmail(LinkException):
|
||||
def __init__(self):
|
||||
super().__init__("Your account has an alias as it's email address")
|
||||
|
||||
|
||||
class ProtonAccountNotVerified(LinkException):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"The Proton account you are trying to use has not been verified"
|
||||
)
|
||||
|
@ -9,6 +9,7 @@ class LoginEvent:
|
||||
failed = 1
|
||||
disabled_login = 2
|
||||
not_activated = 3
|
||||
scheduled_to_be_deleted = 4
|
||||
|
||||
class Source(EnumE):
|
||||
web = 0
|
||||
|
@ -34,10 +34,10 @@ def apply_dmarc_policy_for_forward_phase(
|
||||
|
||||
from_header = get_header_unicode(msg[headers.FROM])
|
||||
|
||||
warning_plain_text = f"""This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||
warning_plain_text = """This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||
More info on https://simplelogin.io/docs/getting-started/anti-phishing/
|
||||
"""
|
||||
warning_html = f"""
|
||||
warning_html = """
|
||||
<p style="color:red">
|
||||
This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||
More info on <a href="https://simplelogin.io/docs/getting-started/anti-phishing/">anti-phishing measure</a>
|
||||
|
@ -221,7 +221,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
|
||||
return True
|
||||
|
||||
if is_deleted_alias(msg_info.sender_address):
|
||||
LOG.i(f"Complaint is for deleted alias. Do nothing")
|
||||
LOG.i("Complaint is for deleted alias. Do nothing")
|
||||
return True
|
||||
|
||||
contact = Contact.get_by(reply_email=msg_info.sender_address)
|
||||
@ -231,7 +231,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
|
||||
alias = find_alias_with_address(msg_info.rcpt_address)
|
||||
|
||||
if is_deleted_alias(msg_info.rcpt_address):
|
||||
LOG.i(f"Complaint is for deleted alias. Do nothing")
|
||||
LOG.i("Complaint is for deleted alias. Do nothing")
|
||||
return True
|
||||
|
||||
if not alias:
|
||||
|
@ -54,9 +54,8 @@ class UnsubscribeEncoder:
|
||||
def encode_subject(
|
||||
cls, action: UnsubscribeAction, data: Union[int, UnsubscribeOriginalData]
|
||||
) -> str:
|
||||
if (
|
||||
action != UnsubscribeAction.OriginalUnsubscribeMailto
|
||||
and type(data) is not int
|
||||
if action != UnsubscribeAction.OriginalUnsubscribeMailto and not isinstance(
|
||||
data, int
|
||||
):
|
||||
raise ValueError(f"Data has to be an int for an action of type {action}")
|
||||
if action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
||||
|
@ -1,4 +1,5 @@
|
||||
import urllib
|
||||
from email.header import Header
|
||||
from email.message import Message
|
||||
|
||||
from app.email import headers
|
||||
@ -33,6 +34,8 @@ class UnsubscribeGenerator:
|
||||
if not unsubscribe_data:
|
||||
LOG.info("Email has no unsubscribe header")
|
||||
return message
|
||||
if isinstance(unsubscribe_data, Header):
|
||||
unsubscribe_data = str(unsubscribe_data.encode())
|
||||
raw_methods = [method.strip() for method in unsubscribe_data.split(",")]
|
||||
mailto_unsubs = None
|
||||
other_unsubs = []
|
||||
|
@ -30,7 +30,7 @@ def handle_batch_import(batch_import: BatchImport):
|
||||
|
||||
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
||||
r = requests.get(file_url)
|
||||
lines = [line.decode() for line in r.iter_lines()]
|
||||
lines = [line.decode("utf-8") for line in r.iter_lines()]
|
||||
|
||||
import_from_csv(batch_import, user, lines)
|
||||
|
||||
|
@ -1,2 +1,4 @@
|
||||
from .integrations import set_enable_proton_cookie
|
||||
from .exit_sudo import exit_sudo_mode
|
||||
|
||||
__all__ = ["set_enable_proton_cookie", "exit_sudo_mode"]
|
||||
|
@ -39,7 +39,6 @@ from app.models import (
|
||||
|
||||
|
||||
class ExportUserDataJob:
|
||||
|
||||
REMOVE_FIELDS = {
|
||||
"User": ("otp_secret", "password"),
|
||||
"Alias": ("ts_vector", "transfer_token", "hibp_last_check"),
|
||||
|
@ -22,7 +22,6 @@ from app.message_utils import message_to_bytes, message_format_base64_parts
|
||||
|
||||
@dataclass
|
||||
class SendRequest:
|
||||
|
||||
SAVE_EXTENSION = "sendrequest"
|
||||
|
||||
envelope_from: str
|
||||
|
@ -30,6 +30,8 @@ from sqlalchemy_utils import ArrowType
|
||||
from app import config
|
||||
from app import s3
|
||||
from app.db import Session
|
||||
from app.dns_utils import get_mx_domains
|
||||
|
||||
from app.errors import (
|
||||
AliasInTrashError,
|
||||
DirectoryInTrashError,
|
||||
@ -278,6 +280,7 @@ class IntEnumType(sa.types.TypeDecorator):
|
||||
class AliasOptions:
|
||||
show_sl_domains: bool = True
|
||||
show_partner_domains: Optional[Partner] = None
|
||||
show_partner_premium: Optional[bool] = None
|
||||
|
||||
|
||||
class Hibp(Base, ModelMixin):
|
||||
@ -537,10 +540,14 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
# Trigger hard deletion of the account at this time
|
||||
delete_on = sa.Column(ArrowType, default=None)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index(
|
||||
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
||||
),
|
||||
sa.Index("ix_users_delete_on", delete_on),
|
||||
)
|
||||
|
||||
@property
|
||||
@ -577,6 +584,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
|
||||
@classmethod
|
||||
def create(cls, email, name="", password=None, from_partner=False, **kwargs):
|
||||
email = sanitize_email(email)
|
||||
user: User = super(User, cls).create(email=email, name=name[:100], **kwargs)
|
||||
|
||||
if password:
|
||||
@ -830,6 +838,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
< self.max_alias_for_free_account()
|
||||
)
|
||||
|
||||
def can_send_or_receive(self) -> bool:
|
||||
if self.disabled:
|
||||
LOG.i(f"User {self} is disabled. Cannot receive or send emails")
|
||||
return False
|
||||
if self.delete_on is not None:
|
||||
LOG.i(
|
||||
f"User {self} is scheduled to be deleted. Cannot receive or send emails"
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
def profile_picture_url(self):
|
||||
if self.profile_picture_id:
|
||||
return self.profile_picture.get_url()
|
||||
@ -1020,29 +1039,35 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||
) -> list["SLDomain"]:
|
||||
if alias_options is None:
|
||||
alias_options = AliasOptions()
|
||||
conditions = [SLDomain.hidden == False] # noqa: E712
|
||||
if not self.is_premium():
|
||||
conditions.append(SLDomain.premium_only == False) # noqa: E712
|
||||
partner_domain_cond = [] # noqa:E711
|
||||
top_conds = [SLDomain.hidden == False] # noqa: E712
|
||||
or_conds = [] # noqa:E711
|
||||
if self.default_alias_public_domain_id is not None:
|
||||
partner_domain_cond.append(
|
||||
SLDomain.id == self.default_alias_public_domain_id
|
||||
)
|
||||
default_domain_conds = [SLDomain.id == self.default_alias_public_domain_id]
|
||||
if not self.is_premium():
|
||||
default_domain_conds.append(
|
||||
SLDomain.premium_only == False # noqa: E712
|
||||
)
|
||||
or_conds.append(and_(*default_domain_conds).self_group())
|
||||
if alias_options.show_partner_domains is not None:
|
||||
partner_user = PartnerUser.filter_by(
|
||||
user_id=self.id, partner_id=alias_options.show_partner_domains.id
|
||||
).first()
|
||||
if partner_user is not None:
|
||||
partner_domain_cond.append(
|
||||
SLDomain.partner_id == partner_user.partner_id
|
||||
)
|
||||
partner_domain_cond = [SLDomain.partner_id == partner_user.partner_id]
|
||||
if alias_options.show_partner_premium is None:
|
||||
alias_options.show_partner_premium = self.is_premium()
|
||||
if not alias_options.show_partner_premium:
|
||||
partner_domain_cond.append(
|
||||
SLDomain.premium_only == False # noqa: E712
|
||||
)
|
||||
or_conds.append(and_(*partner_domain_cond).self_group())
|
||||
if alias_options.show_sl_domains:
|
||||
partner_domain_cond.append(SLDomain.partner_id == None) # noqa:E711
|
||||
if len(partner_domain_cond) == 1:
|
||||
conditions.append(partner_domain_cond[0])
|
||||
else:
|
||||
conditions.append(or_(*partner_domain_cond))
|
||||
query = Session.query(SLDomain).filter(*conditions).order_by(SLDomain.order)
|
||||
sl_conds = [SLDomain.partner_id == None] # noqa: E711
|
||||
if not self.is_premium():
|
||||
sl_conds.append(SLDomain.premium_only == False) # noqa: E712
|
||||
or_conds.append(and_(*sl_conds).self_group())
|
||||
top_conds.append(or_(*or_conds))
|
||||
query = Session.query(SLDomain).filter(*top_conds).order_by(SLDomain.order)
|
||||
return query.all()
|
||||
|
||||
def available_alias_domains(
|
||||
@ -1922,6 +1947,7 @@ class Contact(Base, ModelMixin):
|
||||
|
||||
class EmailLog(Base, ModelMixin):
|
||||
__tablename__ = "email_log"
|
||||
__table_args__ = (Index("ix_email_log_created_at", "created_at"),)
|
||||
|
||||
user_id = sa.Column(
|
||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||
@ -2300,6 +2326,7 @@ class CustomDomain(Base, ModelMixin):
|
||||
@classmethod
|
||||
def create(cls, **kwargs):
|
||||
domain = kwargs.get("domain")
|
||||
kwargs["domain"] = domain.replace("\n", "")
|
||||
if DeletedSubdomain.get_by(domain=domain):
|
||||
raise SubdomainInTrashError
|
||||
|
||||
@ -2567,6 +2594,28 @@ class Mailbox(Base, ModelMixin):
|
||||
+ Alias.filter_by(mailbox_id=self.id).count()
|
||||
)
|
||||
|
||||
def is_proton(self) -> bool:
|
||||
if (
|
||||
self.email.endswith("@proton.me")
|
||||
or self.email.endswith("@protonmail.com")
|
||||
or self.email.endswith("@protonmail.ch")
|
||||
or self.email.endswith("@proton.ch")
|
||||
or self.email.endswith("@pm.me")
|
||||
):
|
||||
return True
|
||||
|
||||
from app.email_utils import get_email_local_part
|
||||
|
||||
mx_domains: [(int, str)] = get_mx_domains(get_email_local_part(self.email))
|
||||
# Proton is the first domain
|
||||
if mx_domains and mx_domains[0][1] in (
|
||||
"mail.protonmail.ch.",
|
||||
"mailsec.protonmail.ch.",
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def delete(cls, obj_id):
|
||||
mailbox: Mailbox = cls.get(obj_id)
|
||||
@ -2599,6 +2648,12 @@ class Mailbox(Base, ModelMixin):
|
||||
|
||||
return ret
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kw):
|
||||
if "email" in kw:
|
||||
kw["email"] = sanitize_email(kw["email"])
|
||||
return super().create(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Mailbox {self.id} {self.email}>"
|
||||
|
||||
@ -3129,7 +3184,7 @@ class MessageIDMatching(Base, ModelMixin):
|
||||
|
||||
# to track what email_log that has created this matching
|
||||
email_log_id = sa.Column(
|
||||
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True
|
||||
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True, index=True
|
||||
)
|
||||
|
||||
email_log = orm.relationship("EmailLog")
|
||||
@ -3462,7 +3517,7 @@ class PartnerSubscription(Base, ModelMixin):
|
||||
|
||||
class Newsletter(Base, ModelMixin):
|
||||
__tablename__ = "newsletter"
|
||||
subject = sa.Column(sa.String(), nullable=False, unique=True, index=True)
|
||||
subject = sa.Column(sa.String(), nullable=False, index=True)
|
||||
|
||||
html = sa.Column(sa.Text)
|
||||
plain_text = sa.Column(sa.Text)
|
||||
|
@ -1 +1,3 @@
|
||||
from . import views
|
||||
|
||||
__all__ = ["views"]
|
||||
|
@ -1 +1,3 @@
|
||||
from .views import authorize, token, user_info
|
||||
|
||||
__all__ = ["authorize", "token", "user_info"]
|
||||
|
@ -64,7 +64,7 @@ def _split_arg(arg_input: Union[str, list]) -> Set[str]:
|
||||
- the response_type/scope passed as a list ?scope=scope_1&scope=scope_2
|
||||
"""
|
||||
res = set()
|
||||
if type(arg_input) is str:
|
||||
if isinstance(arg_input, str):
|
||||
if " " in arg_input:
|
||||
for x in arg_input.split(" "):
|
||||
if x:
|
||||
|
@ -5,3 +5,11 @@ from .views import (
|
||||
account_activated,
|
||||
extension_redirect,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"index",
|
||||
"final",
|
||||
"setup_done",
|
||||
"account_activated",
|
||||
"extension_redirect",
|
||||
]
|
||||
|
@ -39,7 +39,6 @@ class _InnerLock:
|
||||
lock_redis.storage.delete(lock_name)
|
||||
|
||||
def __call__(self, f: Callable[..., Any]):
|
||||
|
||||
if self.lock_suffix is None:
|
||||
lock_suffix = f.__name__
|
||||
else:
|
||||
|
@ -5,3 +5,11 @@ from .views import (
|
||||
provider1_callback,
|
||||
provider2_callback,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"index",
|
||||
"phone_reservation",
|
||||
"twilio_callback",
|
||||
"provider1_callback",
|
||||
"provider2_callback",
|
||||
]
|
||||
|
@ -7,11 +7,12 @@ from typing import Optional
|
||||
|
||||
from app.account_linking import SLPlan, SLPlanType
|
||||
from app.config import PROTON_EXTRA_HEADER_NAME, PROTON_EXTRA_HEADER_VALUE
|
||||
from app.errors import ProtonAccountNotVerified
|
||||
from app.log import LOG
|
||||
|
||||
_APP_VERSION = "OauthClient_1.0.0"
|
||||
|
||||
PROTON_ERROR_CODE_NOT_EXISTS = 2501
|
||||
PROTON_ERROR_CODE_HV_NEEDED = 9001
|
||||
|
||||
PLAN_FREE = 1
|
||||
PLAN_PREMIUM = 2
|
||||
@ -57,6 +58,15 @@ def convert_access_token(access_token_response: str) -> AccessCredentials:
|
||||
)
|
||||
|
||||
|
||||
def handle_response_not_ok(status: int, body: dict, text: str) -> Exception:
|
||||
if status == HTTPStatus.UNPROCESSABLE_ENTITY:
|
||||
res_code = body.get("Code")
|
||||
if res_code == PROTON_ERROR_CODE_HV_NEEDED:
|
||||
return ProtonAccountNotVerified()
|
||||
|
||||
return Exception(f"Unexpected status code. Wanted 200 and got {status}: " + text)
|
||||
|
||||
|
||||
class ProtonClient(ABC):
|
||||
@abstractmethod
|
||||
def get_user(self) -> Optional[UserInformation]:
|
||||
@ -124,11 +134,11 @@ class HttpProtonClient(ProtonClient):
|
||||
@staticmethod
|
||||
def __validate_response(res: Response) -> dict:
|
||||
status = res.status_code
|
||||
if status != HTTPStatus.OK:
|
||||
raise Exception(
|
||||
f"Unexpected status code. Wanted 200 and got {status}: " + res.text
|
||||
)
|
||||
as_json = res.json()
|
||||
if status != HTTPStatus.OK:
|
||||
raise HttpProtonClient.__handle_response_not_ok(
|
||||
status=status, body=as_json, text=res.text
|
||||
)
|
||||
res_code = as_json.get("Code")
|
||||
if not res_code or res_code != 1000:
|
||||
raise Exception(
|
||||
|
@ -6,7 +6,6 @@ from app.session import RedisSessionStore
|
||||
|
||||
|
||||
def initialize_redis_services(app: flask.Flask, redis_url: str):
|
||||
|
||||
if redis_url.startswith("redis://") or redis_url.startswith("rediss://"):
|
||||
storage = limits.storage.RedisStorage(redis_url)
|
||||
app.session_interface = RedisSessionStore(storage.storage, storage.storage, app)
|
||||
|
@ -13,17 +13,29 @@ from app.config import (
|
||||
LOCAL_FILE_UPLOAD,
|
||||
UPLOAD_DIR,
|
||||
URL,
|
||||
AWS_ENDPOINT_URL,
|
||||
)
|
||||
|
||||
if not LOCAL_FILE_UPLOAD:
|
||||
_session = boto3.Session(
|
||||
aws_access_key_id=AWS_ACCESS_KEY_ID,
|
||||
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
|
||||
region_name=AWS_REGION,
|
||||
)
|
||||
from app.log import LOG
|
||||
|
||||
|
||||
def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
|
||||
_s3_client = None
|
||||
|
||||
|
||||
def _get_s3client():
|
||||
global _s3_client
|
||||
if _s3_client is None:
|
||||
args = {
|
||||
"aws_access_key_id": AWS_ACCESS_KEY_ID,
|
||||
"aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
|
||||
"region_name": AWS_REGION,
|
||||
}
|
||||
if AWS_ENDPOINT_URL:
|
||||
args["endpoint_url"] = AWS_ENDPOINT_URL
|
||||
_s3_client = boto3.client("s3", **args)
|
||||
return _s3_client
|
||||
|
||||
|
||||
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
|
||||
bs.seek(0)
|
||||
|
||||
if LOCAL_FILE_UPLOAD:
|
||||
@ -34,7 +46,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="string"):
|
||||
f.write(bs.read())
|
||||
|
||||
else:
|
||||
_session.resource("s3").Bucket(BUCKET).put_object(
|
||||
_get_s3client().put_object(
|
||||
Bucket=BUCKET,
|
||||
Key=key,
|
||||
Body=bs,
|
||||
ContentType=content_type,
|
||||
@ -52,7 +65,8 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
|
||||
f.write(bs.read())
|
||||
|
||||
else:
|
||||
_session.resource("s3").Bucket(BUCKET).put_object(
|
||||
_get_s3client().put_object(
|
||||
Bucket=BUCKET,
|
||||
Key=path,
|
||||
Body=bs,
|
||||
# Support saving a remote file using Http header
|
||||
@ -67,12 +81,9 @@ def download_email(path: str) -> Optional[str]:
|
||||
file_path = os.path.join(UPLOAD_DIR, path)
|
||||
with open(file_path, "rb") as f:
|
||||
return f.read()
|
||||
resp = (
|
||||
_session.resource("s3")
|
||||
.Bucket(BUCKET)
|
||||
.get_object(
|
||||
Key=path,
|
||||
)
|
||||
resp = _get_s3client().get_object(
|
||||
Bucket=BUCKET,
|
||||
Key=path,
|
||||
)
|
||||
if not resp or "Body" not in resp:
|
||||
return None
|
||||
@ -88,8 +99,7 @@ def get_url(key: str, expires_in=3600) -> str:
|
||||
if LOCAL_FILE_UPLOAD:
|
||||
return URL + "/static/upload/" + key
|
||||
else:
|
||||
s3_client = _session.client("s3")
|
||||
return s3_client.generate_presigned_url(
|
||||
return _get_s3client().generate_presigned_url(
|
||||
ExpiresIn=expires_in,
|
||||
ClientMethod="get_object",
|
||||
Params={"Bucket": BUCKET, "Key": key},
|
||||
@ -100,5 +110,15 @@ def delete(path: str):
|
||||
if LOCAL_FILE_UPLOAD:
|
||||
os.remove(os.path.join(UPLOAD_DIR, path))
|
||||
else:
|
||||
o = _session.resource("s3").Bucket(BUCKET).Object(path)
|
||||
o.delete()
|
||||
_get_s3client().delete_object(Bucket=BUCKET, Key=path)
|
||||
|
||||
|
||||
def create_bucket_if_not_exists():
|
||||
s3client = _get_s3client()
|
||||
buckets = s3client.list_buckets()
|
||||
for bucket in buckets["Buckets"]:
|
||||
if bucket["Name"] == BUCKET:
|
||||
LOG.i("Bucket already exists")
|
||||
return
|
||||
s3client.create_bucket(Bucket=BUCKET)
|
||||
LOG.i(f"Bucket {BUCKET} created")
|
||||
|
@ -75,7 +75,7 @@ class RedisSessionStore(SessionInterface):
|
||||
try:
|
||||
data = pickle.loads(val)
|
||||
return ServerSession(data, session_id=session_id)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
return ServerSession(session_id=str(uuid.uuid4()))
|
||||
|
||||
|
@ -99,7 +99,7 @@ def sanitize_email(email_address: str, not_lower=False) -> str:
|
||||
email_address = email_address.strip().replace(" ", "").replace("\n", " ")
|
||||
if not not_lower:
|
||||
email_address = email_address.lower()
|
||||
return email_address
|
||||
return email_address.replace("\u200f", "")
|
||||
|
||||
|
||||
class NextUrlSanitizer:
|
||||
|
70
app/cron.py
70
app/cron.py
@ -5,11 +5,11 @@ from typing import List, Tuple
|
||||
|
||||
import arrow
|
||||
import requests
|
||||
from sqlalchemy import func, desc, or_
|
||||
from sqlalchemy import func, desc, or_, and_
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.orm import joinedload
|
||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||
from sqlalchemy.sql import Insert
|
||||
from sqlalchemy.sql import Insert, text
|
||||
|
||||
from app import s3, config
|
||||
from app.alias_utils import nb_email_log_for_mailbox
|
||||
@ -22,10 +22,9 @@ from app.email_utils import (
|
||||
render,
|
||||
email_can_be_used_as_mailbox,
|
||||
send_email_with_rate_control,
|
||||
normalize_reply_email,
|
||||
is_valid_email,
|
||||
get_email_domain_part,
|
||||
)
|
||||
from app.email_validation import is_valid_email, normalize_reply_email
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.log import LOG
|
||||
from app.mail_sender import load_unsent_mails_from_fs_and_resend
|
||||
@ -86,23 +85,43 @@ def delete_logs():
|
||||
delete_refused_emails()
|
||||
delete_old_monitoring()
|
||||
|
||||
for t in TransactionalEmail.filter(
|
||||
for t_email in TransactionalEmail.filter(
|
||||
TransactionalEmail.created_at < arrow.now().shift(days=-7)
|
||||
):
|
||||
TransactionalEmail.delete(t.id)
|
||||
TransactionalEmail.delete(t_email.id)
|
||||
|
||||
for b in Bounce.filter(Bounce.created_at < arrow.now().shift(days=-7)):
|
||||
Bounce.delete(b.id)
|
||||
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Delete EmailLog older than 2 weeks")
|
||||
LOG.d("Deleting EmailLog older than 2 weeks")
|
||||
|
||||
max_dt = arrow.now().shift(weeks=-2)
|
||||
nb_deleted = EmailLog.filter(EmailLog.created_at < max_dt).delete()
|
||||
Session.commit()
|
||||
total_deleted = 0
|
||||
batch_size = 500
|
||||
Session.execute("set session statement_timeout=30000").rowcount
|
||||
queries_done = 0
|
||||
cutoff_time = arrow.now().shift(days=-14)
|
||||
rows_to_delete = EmailLog.filter(EmailLog.created_at < cutoff_time).count()
|
||||
expected_queries = int(rows_to_delete / batch_size)
|
||||
sql = text(
|
||||
"DELETE FROM email_log WHERE id IN (SELECT id FROM email_log WHERE created_at < :cutoff_time order by created_at limit :batch_size)"
|
||||
)
|
||||
str_cutoff_time = cutoff_time.isoformat()
|
||||
while total_deleted < rows_to_delete:
|
||||
deleted_count = Session.execute(
|
||||
sql, {"cutoff_time": str_cutoff_time, "batch_size": batch_size}
|
||||
).rowcount
|
||||
Session.commit()
|
||||
total_deleted += deleted_count
|
||||
queries_done += 1
|
||||
LOG.i(
|
||||
f"[{queries_done}/{expected_queries}] Deleted {total_deleted} EmailLog entries"
|
||||
)
|
||||
if deleted_count < batch_size:
|
||||
break
|
||||
|
||||
LOG.i("Delete %s email logs", nb_deleted)
|
||||
LOG.i("Deleted %s email logs", total_deleted)
|
||||
|
||||
|
||||
def delete_refused_emails():
|
||||
@ -142,7 +161,7 @@ def notify_premium_end():
|
||||
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your subscription will end soon",
|
||||
"Your subscription will end soon",
|
||||
render(
|
||||
"transactional/subscription-end.txt",
|
||||
user=user,
|
||||
@ -199,7 +218,7 @@ def notify_manual_sub_end():
|
||||
LOG.d("Remind user %s that their manual sub is ending soon", user)
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your subscription will end soon",
|
||||
"Your subscription will end soon",
|
||||
render(
|
||||
"transactional/manual-subscription-end.txt",
|
||||
user=user,
|
||||
@ -571,21 +590,21 @@ nb_total_bounced_last_24h: {stats_today.nb_total_bounced_last_24h} - {increase_p
|
||||
"""
|
||||
|
||||
monitoring_report += "\n====================================\n"
|
||||
monitoring_report += f"""
|
||||
monitoring_report += """
|
||||
# Account bounce report:
|
||||
"""
|
||||
|
||||
for email, bounces in bounce_report():
|
||||
monitoring_report += f"{email}: {bounces}\n"
|
||||
|
||||
monitoring_report += f"""\n
|
||||
monitoring_report += """\n
|
||||
# Alias creation report:
|
||||
"""
|
||||
|
||||
for email, nb_alias, date in alias_creation_report():
|
||||
monitoring_report += f"{email}, {date}: {nb_alias}\n"
|
||||
|
||||
monitoring_report += f"""\n
|
||||
monitoring_report += """\n
|
||||
# Full bounce detail report:
|
||||
"""
|
||||
monitoring_report += all_bounce_report()
|
||||
@ -1080,14 +1099,14 @@ def notify_hibp():
|
||||
)
|
||||
|
||||
LOG.d(
|
||||
f"Send new breaches found email to %s for %s breaches aliases",
|
||||
"Send new breaches found email to %s for %s breaches aliases",
|
||||
user,
|
||||
len(breached_aliases),
|
||||
)
|
||||
|
||||
send_email(
|
||||
user.email,
|
||||
f"You were in a data breach",
|
||||
"You were in a data breach",
|
||||
render(
|
||||
"transactional/hibp-new-breaches.txt.jinja2",
|
||||
user=user,
|
||||
@ -1107,6 +1126,18 @@ def notify_hibp():
|
||||
Session.commit()
|
||||
|
||||
|
||||
def clear_users_scheduled_to_be_deleted():
|
||||
users = User.filter(
|
||||
and_(User.delete_on.isnot(None), User.delete_on < arrow.now())
|
||||
).all()
|
||||
for user in users:
|
||||
LOG.i(
|
||||
f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}"
|
||||
)
|
||||
User.delete(user.id)
|
||||
Session.commit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
LOG.d("Start running cronjob")
|
||||
parser = argparse.ArgumentParser()
|
||||
@ -1173,3 +1204,6 @@ if __name__ == "__main__":
|
||||
elif args.job == "send_undelivered_mails":
|
||||
LOG.d("Sending undelivered emails")
|
||||
load_unsent_mails_from_fs_and_resend()
|
||||
elif args.job == "delete_scheduled_users":
|
||||
LOG.d("Deleting users scheduled to be deleted")
|
||||
clear_users_scheduled_to_be_deleted()
|
||||
|
@ -5,59 +5,66 @@ jobs:
|
||||
schedule: "0 0 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Trial Ends
|
||||
command: python /code/cron.py -j notify_trial_end
|
||||
shell: /bin/bash
|
||||
schedule: "0 8 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Manual Subscription Ends
|
||||
command: python /code/cron.py -j notify_manual_subscription_end
|
||||
shell: /bin/bash
|
||||
schedule: "0 9 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Premium Ends
|
||||
command: python /code/cron.py -j notify_premium_end
|
||||
shell: /bin/bash
|
||||
schedule: "0 10 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Delete Logs
|
||||
command: python /code/cron.py -j delete_logs
|
||||
shell: /bin/bash
|
||||
schedule: "0 11 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Poll Apple Subscriptions
|
||||
command: python /code/cron.py -j poll_apple_subscription
|
||||
shell: /bin/bash
|
||||
schedule: "0 12 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Delete Old Monitoring records
|
||||
command: python /code/cron.py -j delete_old_monitoring
|
||||
shell: /bin/bash
|
||||
schedule: "0 14 * * *"
|
||||
schedule: "15 1 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Custom Domain check
|
||||
command: python /code/cron.py -j check_custom_domain
|
||||
shell: /bin/bash
|
||||
schedule: "0 15 * * *"
|
||||
schedule: "15 2 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin HIBP check
|
||||
command: python /code/cron.py -j check_hibp
|
||||
shell: /bin/bash
|
||||
schedule: "0 18 * * *"
|
||||
schedule: "15 3 * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
- name: SimpleLogin Notify HIBP breaches
|
||||
command: python /code/cron.py -j notify_hibp
|
||||
shell: /bin/bash
|
||||
schedule: "0 19 * * *"
|
||||
schedule: "15 4 * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
- name: SimpleLogin Delete Logs
|
||||
command: python /code/cron.py -j delete_logs
|
||||
shell: /bin/bash
|
||||
schedule: "15 5 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Poll Apple Subscriptions
|
||||
command: python /code/cron.py -j poll_apple_subscription
|
||||
shell: /bin/bash
|
||||
schedule: "15 6 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Trial Ends
|
||||
command: python /code/cron.py -j notify_trial_end
|
||||
shell: /bin/bash
|
||||
schedule: "15 8 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Manual Subscription Ends
|
||||
command: python /code/cron.py -j notify_manual_subscription_end
|
||||
shell: /bin/bash
|
||||
schedule: "15 9 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Premium Ends
|
||||
command: python /code/cron.py -j notify_premium_end
|
||||
shell: /bin/bash
|
||||
schedule: "15 10 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin delete users scheduled to be deleted
|
||||
command: echo disabled_user_deletion #python /code/cron.py -j delete_scheduled_users
|
||||
shell: /bin/bash
|
||||
schedule: "15 11 * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
|
@ -106,8 +106,6 @@ from app.email_utils import (
|
||||
get_header_unicode,
|
||||
generate_reply_email,
|
||||
is_reverse_alias,
|
||||
normalize_reply_email,
|
||||
is_valid_email,
|
||||
replace,
|
||||
should_disable,
|
||||
parse_id_from_bounce,
|
||||
@ -123,6 +121,7 @@ from app.email_utils import (
|
||||
generate_verp_email,
|
||||
sl_formataddr,
|
||||
)
|
||||
from app.email_validation import is_valid_email, normalize_reply_email
|
||||
from app.errors import (
|
||||
NonReverseAliasInReplyPhase,
|
||||
VERPTransactional,
|
||||
@ -236,7 +235,6 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
||||
contact.mail_from = mail_from
|
||||
Session.commit()
|
||||
else:
|
||||
|
||||
try:
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
@ -262,7 +260,7 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
||||
|
||||
Session.commit()
|
||||
except IntegrityError:
|
||||
LOG.w("Contact %s %s already exist", alias, contact_email)
|
||||
LOG.w(f"Contact with email {contact_email} for alias {alias} already exist")
|
||||
Session.rollback()
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||
|
||||
@ -280,6 +278,9 @@ def get_or_create_reply_to_contact(
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
|
||||
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
|
||||
|
||||
if not is_valid_email(contact_address):
|
||||
LOG.w(
|
||||
"invalid reply-to address %s. Parse from %s",
|
||||
@ -348,6 +349,10 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||
continue
|
||||
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||
contact_name = full_address.display_name
|
||||
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
|
||||
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
|
||||
|
||||
if contact:
|
||||
# update the contact name if needed
|
||||
if contact.name != full_address.display_name:
|
||||
@ -355,9 +360,9 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||
"Update contact %s name %s to %s",
|
||||
contact,
|
||||
contact.name,
|
||||
full_address.display_name,
|
||||
contact_name,
|
||||
)
|
||||
contact.name = full_address.display_name
|
||||
contact.name = contact_name
|
||||
Session.commit()
|
||||
else:
|
||||
LOG.d(
|
||||
@ -372,7 +377,7 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=contact_email,
|
||||
name=full_address.display_name,
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_email, alias),
|
||||
is_cc=header.lower() == "cc",
|
||||
automatic_created=True,
|
||||
@ -541,12 +546,20 @@ def sign_msg(msg: Message) -> Message:
|
||||
signature.add_header("Content-Disposition", 'attachment; filename="signature.asc"')
|
||||
|
||||
try:
|
||||
signature.set_payload(sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n")))
|
||||
payload = sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||
|
||||
if not payload:
|
||||
raise PGPException("Empty signature by gnupg")
|
||||
|
||||
signature.set_payload(payload)
|
||||
except Exception:
|
||||
LOG.e("Cannot sign, try using pgpy")
|
||||
signature.set_payload(
|
||||
sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||
)
|
||||
payload = sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||
|
||||
if not payload:
|
||||
raise PGPException("Empty signature by pgpy")
|
||||
|
||||
signature.set_payload(payload)
|
||||
|
||||
container.attach(signature)
|
||||
|
||||
@ -623,8 +636,8 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
||||
|
||||
user = alias.user
|
||||
|
||||
if user.disabled:
|
||||
LOG.w("User %s disabled, disable forwarding emails for %s", user, alias)
|
||||
if not user.can_send_or_receive():
|
||||
LOG.i(f"User {user} cannot receive emails")
|
||||
if should_ignore_bounce(envelope.mail_from):
|
||||
return [(True, status.E207)]
|
||||
else:
|
||||
@ -864,21 +877,22 @@ def forward_email_to_mailbox(
|
||||
headers_to_keep.append(headers.AUTHENTICATION_RESULTS)
|
||||
delete_all_headers_except(msg, headers_to_keep)
|
||||
|
||||
if mailbox.generic_subject:
|
||||
LOG.d("Use a generic subject for %s", mailbox)
|
||||
orig_subject = msg[headers.SUBJECT]
|
||||
orig_subject = get_header_unicode(orig_subject)
|
||||
add_or_replace_header(msg, "Subject", mailbox.generic_subject)
|
||||
sender = msg[headers.FROM]
|
||||
sender = get_header_unicode(sender)
|
||||
msg = add_header(
|
||||
msg,
|
||||
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with "{orig_subject}" as subject""",
|
||||
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with <b>{orig_subject}</b> as subject""",
|
||||
)
|
||||
|
||||
# create PGP email if needed
|
||||
if mailbox.pgp_enabled() and user.is_premium() and not alias.disable_pgp:
|
||||
LOG.d("Encrypt message using mailbox %s", mailbox)
|
||||
if mailbox.generic_subject:
|
||||
LOG.d("Use a generic subject for %s", mailbox)
|
||||
orig_subject = msg[headers.SUBJECT]
|
||||
orig_subject = get_header_unicode(orig_subject)
|
||||
add_or_replace_header(msg, "Subject", mailbox.generic_subject)
|
||||
sender = msg[headers.FROM]
|
||||
sender = get_header_unicode(sender)
|
||||
msg = add_header(
|
||||
msg,
|
||||
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with "{orig_subject}" as subject""",
|
||||
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with <b>{orig_subject}</b> as subject""",
|
||||
)
|
||||
|
||||
try:
|
||||
msg = prepare_pgp_message(
|
||||
@ -1055,13 +1069,8 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
user = alias.user
|
||||
mail_from = envelope.mail_from
|
||||
|
||||
if user.disabled:
|
||||
LOG.e(
|
||||
"User %s disabled, disable sending emails from %s to %s",
|
||||
user,
|
||||
alias,
|
||||
contact,
|
||||
)
|
||||
if not user.can_send_or_receive():
|
||||
LOG.i(f"User {user} cannot send emails")
|
||||
return False, status.E504
|
||||
|
||||
# Check if we need to reject or quarantine based on dmarc
|
||||
@ -1187,7 +1196,7 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
)
|
||||
|
||||
# replace reverse alias by real address for all contacts
|
||||
for (reply_email, website_email) in contact_query.values(
|
||||
for reply_email, website_email in contact_query.values(
|
||||
Contact.reply_email, Contact.website_email
|
||||
):
|
||||
msg = replace(msg, reply_email, website_email)
|
||||
@ -1242,7 +1251,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
||||
# no need to replace TO header
|
||||
LOG.d("email is sent in BCC mode")
|
||||
del msg[headers.TO]
|
||||
else:
|
||||
replace_header_when_reply(msg, alias, headers.TO)
|
||||
|
||||
@ -1943,7 +1951,7 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
|
||||
for is_delivered, smtp_status in handle_forward(envelope, msg, alias.email):
|
||||
res.append((is_delivered, smtp_status))
|
||||
|
||||
for (is_success, smtp_status) in res:
|
||||
for is_success, smtp_status in res:
|
||||
# Consider all deliveries successful if 1 delivery is successful
|
||||
if is_success:
|
||||
return smtp_status
|
||||
@ -2263,7 +2271,7 @@ def handle(envelope: Envelope, msg: Message) -> str:
|
||||
if nb_success > 0 and nb_non_success > 0:
|
||||
LOG.e(f"some deliveries fail and some success, {mail_from}, {rcpt_tos}, {res}")
|
||||
|
||||
for (is_success, smtp_status) in res:
|
||||
for is_success, smtp_status in res:
|
||||
# Consider all deliveries successful if 1 delivery is successful
|
||||
if is_success:
|
||||
return smtp_status
|
||||
|
@ -89,7 +89,6 @@ aghast
|
||||
agile
|
||||
agility
|
||||
aging
|
||||
agnostic
|
||||
agonize
|
||||
agonizing
|
||||
agony
|
||||
@ -375,8 +374,6 @@ augmented
|
||||
august
|
||||
authentic
|
||||
author
|
||||
autism
|
||||
autistic
|
||||
autograph
|
||||
automaker
|
||||
automated
|
||||
@ -446,7 +443,6 @@ backyard
|
||||
bacon
|
||||
bacteria
|
||||
bacterium
|
||||
badass
|
||||
badge
|
||||
badland
|
||||
badly
|
||||
@ -1106,7 +1102,6 @@ clinic
|
||||
clinking
|
||||
clip
|
||||
clique
|
||||
cloak
|
||||
clobber
|
||||
clock
|
||||
clone
|
||||
@ -1776,7 +1771,6 @@ diagnosis
|
||||
diagram
|
||||
dial
|
||||
diameter
|
||||
diaper
|
||||
diaphragm
|
||||
diary
|
||||
dice
|
||||
@ -2032,9 +2026,6 @@ duffel
|
||||
dugout
|
||||
duh
|
||||
duke
|
||||
duller
|
||||
dullness
|
||||
duly
|
||||
dumping
|
||||
dumpling
|
||||
dumpster
|
||||
@ -2527,8 +2518,6 @@ feisty
|
||||
feline
|
||||
felt-tip
|
||||
feminine
|
||||
feminism
|
||||
feminist
|
||||
feminize
|
||||
femur
|
||||
fence
|
||||
@ -2667,7 +2656,6 @@ fondness
|
||||
fondue
|
||||
font
|
||||
food
|
||||
fool
|
||||
footage
|
||||
football
|
||||
footbath
|
||||
@ -2777,7 +2765,6 @@ gag
|
||||
gainfully
|
||||
gaining
|
||||
gains
|
||||
gala
|
||||
gallantly
|
||||
galleria
|
||||
gallery
|
||||
@ -3164,8 +3151,6 @@ hardware
|
||||
hardwired
|
||||
hardwood
|
||||
hardy
|
||||
harmful
|
||||
harmless
|
||||
harmonica
|
||||
harmonics
|
||||
harmonize
|
||||
@ -3340,7 +3325,6 @@ identical
|
||||
identify
|
||||
identity
|
||||
ideology
|
||||
idiocy
|
||||
idiom
|
||||
idly
|
||||
igloo
|
||||
@ -3357,7 +3341,6 @@ imaging
|
||||
imbecile
|
||||
imitate
|
||||
imitation
|
||||
immature
|
||||
immerse
|
||||
immersion
|
||||
imminent
|
||||
@ -3387,14 +3370,10 @@ implode
|
||||
implosion
|
||||
implosive
|
||||
imply
|
||||
impolite
|
||||
important
|
||||
importer
|
||||
impose
|
||||
imposing
|
||||
impotence
|
||||
impotency
|
||||
impotent
|
||||
impound
|
||||
imprecise
|
||||
imprint
|
||||
@ -3424,8 +3403,6 @@ irritable
|
||||
irritably
|
||||
irritant
|
||||
irritate
|
||||
islamic
|
||||
islamist
|
||||
isolated
|
||||
isolating
|
||||
isolation
|
||||
@ -3524,7 +3501,6 @@ june
|
||||
junior
|
||||
juniper
|
||||
junkie
|
||||
junkman
|
||||
junkyard
|
||||
jurist
|
||||
juror
|
||||
@ -3570,9 +3546,6 @@ king
|
||||
kinship
|
||||
kinsman
|
||||
kinswoman
|
||||
kissable
|
||||
kisser
|
||||
kissing
|
||||
kitchen
|
||||
kite
|
||||
kitten
|
||||
@ -3649,7 +3622,6 @@ laundry
|
||||
laurel
|
||||
lavender
|
||||
lavish
|
||||
laxative
|
||||
lazily
|
||||
laziness
|
||||
lazy
|
||||
@ -3690,7 +3662,6 @@ liable
|
||||
liberty
|
||||
librarian
|
||||
library
|
||||
licking
|
||||
licorice
|
||||
lid
|
||||
life
|
||||
@ -3741,8 +3712,6 @@ livestock
|
||||
lividly
|
||||
living
|
||||
lizard
|
||||
lubricant
|
||||
lubricate
|
||||
lucid
|
||||
luckily
|
||||
luckiness
|
||||
@ -3878,7 +3847,6 @@ marshland
|
||||
marshy
|
||||
marsupial
|
||||
marvelous
|
||||
marxism
|
||||
mascot
|
||||
masculine
|
||||
mashed
|
||||
@ -3914,8 +3882,6 @@ maximum
|
||||
maybe
|
||||
mayday
|
||||
mayflower
|
||||
moaner
|
||||
moaning
|
||||
mobile
|
||||
mobility
|
||||
mobilize
|
||||
@ -4124,7 +4090,6 @@ nemeses
|
||||
nemesis
|
||||
neon
|
||||
nephew
|
||||
nerd
|
||||
nervous
|
||||
nervy
|
||||
nest
|
||||
@ -4139,7 +4104,6 @@ never
|
||||
next
|
||||
nibble
|
||||
nickname
|
||||
nicotine
|
||||
niece
|
||||
nifty
|
||||
nimble
|
||||
@ -4167,14 +4131,10 @@ nuptials
|
||||
nursery
|
||||
nursing
|
||||
nurture
|
||||
nutcase
|
||||
nutlike
|
||||
nutmeg
|
||||
nutrient
|
||||
nutshell
|
||||
nuttiness
|
||||
nutty
|
||||
nuzzle
|
||||
nylon
|
||||
oaf
|
||||
oak
|
||||
@ -4205,7 +4165,6 @@ obstinate
|
||||
obstruct
|
||||
obtain
|
||||
obtrusive
|
||||
obtuse
|
||||
obvious
|
||||
occultist
|
||||
occupancy
|
||||
@ -4446,7 +4405,6 @@ palpitate
|
||||
paltry
|
||||
pampered
|
||||
pamperer
|
||||
pampers
|
||||
pamphlet
|
||||
panama
|
||||
pancake
|
||||
@ -4651,7 +4609,6 @@ plated
|
||||
platform
|
||||
plating
|
||||
platinum
|
||||
platonic
|
||||
platter
|
||||
platypus
|
||||
plausible
|
||||
@ -4777,8 +4734,6 @@ prancing
|
||||
pranker
|
||||
prankish
|
||||
prankster
|
||||
prayer
|
||||
praying
|
||||
preacher
|
||||
preaching
|
||||
preachy
|
||||
@ -4796,8 +4751,6 @@ prefix
|
||||
preflight
|
||||
preformed
|
||||
pregame
|
||||
pregnancy
|
||||
pregnant
|
||||
preheated
|
||||
prelaunch
|
||||
prelaw
|
||||
@ -4937,7 +4890,6 @@ prudishly
|
||||
prune
|
||||
pruning
|
||||
pry
|
||||
psychic
|
||||
public
|
||||
publisher
|
||||
pucker
|
||||
@ -4957,8 +4909,7 @@ punctual
|
||||
punctuate
|
||||
punctured
|
||||
pungent
|
||||
punisher
|
||||
punk
|
||||
punishe
|
||||
pupil
|
||||
puppet
|
||||
puppy
|
||||
@ -5040,7 +4991,6 @@ quote
|
||||
rabid
|
||||
race
|
||||
racing
|
||||
racism
|
||||
rack
|
||||
racoon
|
||||
radar
|
||||
@ -5155,7 +5105,6 @@ recount
|
||||
recoup
|
||||
recovery
|
||||
recreate
|
||||
rectal
|
||||
rectangle
|
||||
rectified
|
||||
rectify
|
||||
@ -5622,7 +5571,6 @@ sarcastic
|
||||
sardine
|
||||
sash
|
||||
sasquatch
|
||||
sassy
|
||||
satchel
|
||||
satiable
|
||||
satin
|
||||
@ -5651,7 +5599,6 @@ scaling
|
||||
scallion
|
||||
scallop
|
||||
scalping
|
||||
scam
|
||||
scandal
|
||||
scanner
|
||||
scanning
|
||||
@ -5928,8 +5875,6 @@ silent
|
||||
silica
|
||||
silicon
|
||||
silk
|
||||
silliness
|
||||
silly
|
||||
silo
|
||||
silt
|
||||
silver
|
||||
@ -5991,7 +5936,6 @@ skimmer
|
||||
skimming
|
||||
skimpily
|
||||
skincare
|
||||
skinhead
|
||||
skinless
|
||||
skinning
|
||||
skinny
|
||||
@ -6197,7 +6141,6 @@ splinter
|
||||
splotchy
|
||||
splurge
|
||||
spoilage
|
||||
spoiled
|
||||
spoiler
|
||||
spoiling
|
||||
spoils
|
||||
@ -7079,7 +7022,6 @@ undocked
|
||||
undoing
|
||||
undone
|
||||
undrafted
|
||||
undress
|
||||
undrilled
|
||||
undusted
|
||||
undying
|
||||
|
@ -158677,16 +158677,6 @@ isis
|
||||
isize
|
||||
isl
|
||||
islay
|
||||
islam
|
||||
islamic
|
||||
islamism
|
||||
islamist
|
||||
islamistic
|
||||
islamite
|
||||
islamitic
|
||||
islamitish
|
||||
islamization
|
||||
islamize
|
||||
island
|
||||
islanded
|
||||
islander
|
||||
|
33
app/migrations/versions/2023_090715_0a5701a4f5e4_.py
Normal file
33
app/migrations/versions/2023_090715_0a5701a4f5e4_.py
Normal file
@ -0,0 +1,33 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 0a5701a4f5e4
|
||||
Revises: 01827104004b
|
||||
Create Date: 2023-09-07 15:28:10.122756
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0a5701a4f5e4'
|
||||
down_revision = '01827104004b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('users', sa.Column('delete_on', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True))
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index('ix_users_delete_on', 'users', ['delete_on'], unique=False, postgresql_concurrently=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index('ix_users_delete_on', table_name='users', postgresql_concurrently=True)
|
||||
op.drop_column('users', 'delete_on')
|
||||
# ### end Alembic commands ###
|
34
app/migrations/versions/2023_092818_ec7fdde8da9f_.py
Normal file
34
app/migrations/versions/2023_092818_ec7fdde8da9f_.py
Normal file
@ -0,0 +1,34 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: ec7fdde8da9f
|
||||
Revises: 0a5701a4f5e4
|
||||
Create Date: 2023-09-28 18:09:48.016620
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "ec7fdde8da9f"
|
||||
down_revision = "0a5701a4f5e4"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index(
|
||||
"ix_email_log_created_at", "email_log", ["created_at"], unique=False
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index("ix_email_log_created_at", table_name="email_log")
|
||||
# ### end Alembic commands ###
|
39
app/migrations/versions/2023_100510_46ecb648a47e_.py
Normal file
39
app/migrations/versions/2023_100510_46ecb648a47e_.py
Normal file
@ -0,0 +1,39 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 46ecb648a47e
|
||||
Revises: ec7fdde8da9f
|
||||
Create Date: 2023-10-05 10:43:35.668902
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "46ecb648a47e"
|
||||
down_revision = "ec7fdde8da9f"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index(
|
||||
op.f("ix_message_id_matching_email_log_id"),
|
||||
"message_id_matching",
|
||||
["email_log_id"],
|
||||
unique=False,
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index(
|
||||
op.f("ix_message_id_matching_email_log_id"),
|
||||
table_name="message_id_matching",
|
||||
)
|
||||
# ### end Alembic commands ###
|
31
app/migrations/versions/2023_110714_4bc54632d9aa_.py
Normal file
31
app/migrations/versions/2023_110714_4bc54632d9aa_.py
Normal file
@ -0,0 +1,31 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 4bc54632d9aa
|
||||
Revises: 46ecb648a47e
|
||||
Create Date: 2023-11-07 14:02:17.610226
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4bc54632d9aa'
|
||||
down_revision = '46ecb648a47e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('ix_newsletter_subject', table_name='newsletter')
|
||||
op.create_index(op.f('ix_newsletter_subject'), 'newsletter', ['subject'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_newsletter_subject'), table_name='newsletter')
|
||||
op.create_index('ix_newsletter_subject', 'newsletter', ['subject'], unique=True)
|
||||
# ### end Alembic commands ###
|
0
app/monitor/__init__.py
Normal file
0
app/monitor/__init__.py
Normal file
21
app/monitor/metric.py
Normal file
21
app/monitor/metric.py
Normal file
@ -0,0 +1,21 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpcloudRecord:
|
||||
db_role: str
|
||||
label: str
|
||||
time: str
|
||||
value: float
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpcloudMetric:
|
||||
metric_name: str
|
||||
records: List[UpcloudRecord]
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpcloudMetrics:
|
||||
metrics: List[UpcloudMetric]
|
20
app/monitor/metric_exporter.py
Normal file
20
app/monitor/metric_exporter.py
Normal file
@ -0,0 +1,20 @@
|
||||
from app.config import UPCLOUD_DB_ID, UPCLOUD_PASSWORD, UPCLOUD_USERNAME
|
||||
from app.log import LOG
|
||||
from monitor.newrelic import NewRelicClient
|
||||
from monitor.upcloud import UpcloudClient
|
||||
|
||||
|
||||
class MetricExporter:
|
||||
def __init__(self, newrelic_license: str):
|
||||
self.__upcloud = UpcloudClient(
|
||||
username=UPCLOUD_USERNAME, password=UPCLOUD_PASSWORD
|
||||
)
|
||||
self.__newrelic = NewRelicClient(newrelic_license)
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
metrics = self.__upcloud.get_metrics(UPCLOUD_DB_ID)
|
||||
self.__newrelic.send(metrics)
|
||||
LOG.info("Upcloud metrics sent to NewRelic")
|
||||
except Exception as e:
|
||||
LOG.warn(f"Could not export metrics: {e}")
|
26
app/monitor/newrelic.py
Normal file
26
app/monitor/newrelic.py
Normal file
@ -0,0 +1,26 @@
|
||||
from monitor.metric import UpcloudMetrics
|
||||
|
||||
from newrelic_telemetry_sdk import GaugeMetric, MetricClient
|
||||
|
||||
_NEWRELIC_BASE_HOST = "metric-api.eu.newrelic.com"
|
||||
|
||||
|
||||
class NewRelicClient:
|
||||
def __init__(self, license_key: str):
|
||||
self.__client = MetricClient(license_key=license_key, host=_NEWRELIC_BASE_HOST)
|
||||
|
||||
def send(self, metrics: UpcloudMetrics):
|
||||
batch = []
|
||||
|
||||
for metric in metrics.metrics:
|
||||
for record in metric.records:
|
||||
batch.append(
|
||||
GaugeMetric(
|
||||
name=f"upcloud.db.{metric.metric_name}",
|
||||
value=record.value,
|
||||
tags={"host": record.label, "db_role": record.db_role},
|
||||
)
|
||||
)
|
||||
|
||||
response = self.__client.send_batch(batch)
|
||||
response.raise_for_status()
|
82
app/monitor/upcloud.py
Normal file
82
app/monitor/upcloud.py
Normal file
@ -0,0 +1,82 @@
|
||||
from app.log import LOG
|
||||
from monitor.metric import UpcloudMetric, UpcloudMetrics, UpcloudRecord
|
||||
|
||||
import base64
|
||||
import requests
|
||||
from typing import Any
|
||||
|
||||
|
||||
BASE_URL = "https://api.upcloud.com"
|
||||
|
||||
|
||||
def get_metric(json: Any, metric: str) -> UpcloudMetric:
|
||||
records = []
|
||||
|
||||
if metric in json:
|
||||
metric_data = json[metric]
|
||||
data = metric_data["data"]
|
||||
cols = list(map(lambda x: x["label"], data["cols"][1:]))
|
||||
latest = data["rows"][-1]
|
||||
time = latest[0]
|
||||
for column_idx in range(len(cols)):
|
||||
value = latest[1 + column_idx]
|
||||
|
||||
# If the latest value is None, try to fetch the second to last
|
||||
if value is None:
|
||||
value = data["rows"][-2][1 + column_idx]
|
||||
|
||||
if value is not None:
|
||||
label = cols[column_idx]
|
||||
if "(master)" in label:
|
||||
db_role = "master"
|
||||
else:
|
||||
db_role = "standby"
|
||||
records.append(
|
||||
UpcloudRecord(time=time, db_role=db_role, label=label, value=value)
|
||||
)
|
||||
else:
|
||||
LOG.warn(f"Could not get value for metric {metric}")
|
||||
|
||||
return UpcloudMetric(metric_name=metric, records=records)
|
||||
|
||||
|
||||
def get_metrics(json: Any) -> UpcloudMetrics:
|
||||
return UpcloudMetrics(
|
||||
metrics=[
|
||||
get_metric(json, "cpu_usage"),
|
||||
get_metric(json, "disk_usage"),
|
||||
get_metric(json, "diskio_reads"),
|
||||
get_metric(json, "diskio_writes"),
|
||||
get_metric(json, "load_average"),
|
||||
get_metric(json, "mem_usage"),
|
||||
get_metric(json, "net_receive"),
|
||||
get_metric(json, "net_send"),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class UpcloudClient:
|
||||
def __init__(self, username: str, password: str):
|
||||
if not username:
|
||||
raise Exception("UpcloudClient username must be set")
|
||||
if not password:
|
||||
raise Exception("UpcloudClient password must be set")
|
||||
|
||||
client = requests.Session()
|
||||
encoded_auth = base64.b64encode(
|
||||
f"{username}:{password}".encode("utf-8")
|
||||
).decode("utf-8")
|
||||
client.headers = {"Authorization": f"Basic {encoded_auth}"}
|
||||
self.__client = client
|
||||
|
||||
def get_metrics(self, db_uuid: str) -> UpcloudMetrics:
|
||||
url = f"{BASE_URL}/1.3/database/{db_uuid}/metrics?period=hour"
|
||||
LOG.d(f"Performing request to {url}")
|
||||
response = self.__client.get(url)
|
||||
LOG.d(f"Status code: {response.status_code}")
|
||||
if response.status_code != 200:
|
||||
return UpcloudMetrics(metrics=[])
|
||||
|
||||
as_json = response.json()
|
||||
|
||||
return get_metrics(as_json)
|
@ -1,3 +1,4 @@
|
||||
import configparser
|
||||
import os
|
||||
import subprocess
|
||||
from time import sleep
|
||||
@ -7,6 +8,7 @@ import newrelic.agent
|
||||
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from monitor.metric_exporter import MetricExporter
|
||||
|
||||
# the number of consecutive fails
|
||||
# if more than _max_nb_fails, alert
|
||||
@ -19,6 +21,18 @@ _max_nb_fails = 10
|
||||
# the maximum number of emails in incoming & active queue
|
||||
_max_incoming = 50
|
||||
|
||||
_NR_CONFIG_FILE_LOCATION_VAR = "NEW_RELIC_CONFIG_FILE"
|
||||
|
||||
|
||||
def get_newrelic_license() -> str:
|
||||
nr_file = os.environ.get(_NR_CONFIG_FILE_LOCATION_VAR, None)
|
||||
if nr_file is None:
|
||||
raise Exception(f"{_NR_CONFIG_FILE_LOCATION_VAR} not defined")
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
config.read(nr_file)
|
||||
return config["newrelic"]["license_key"]
|
||||
|
||||
|
||||
@newrelic.agent.background_task()
|
||||
def log_postfix_metrics():
|
||||
@ -80,10 +94,13 @@ def log_nb_db_connection():
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
exporter = MetricExporter(get_newrelic_license())
|
||||
while True:
|
||||
log_postfix_metrics()
|
||||
log_nb_db_connection()
|
||||
Session.close()
|
||||
|
||||
exporter.run()
|
||||
|
||||
# 1 min
|
||||
sleep(60)
|
||||
|
502
app/poetry.lock
generated
502
app/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -18,6 +18,9 @@ exclude = '''
|
||||
)
|
||||
'''
|
||||
|
||||
[tool.ruff]
|
||||
ignore-init-module-imports = true
|
||||
|
||||
[tool.djlint]
|
||||
indent = 2
|
||||
profile = "jinja"
|
||||
@ -53,7 +56,7 @@ packages = [
|
||||
include = ["templates/*", "templates/**/*", "local_data/*.txt"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.7.2"
|
||||
python = "^3.10"
|
||||
flask = "^1.1.2"
|
||||
flask_login = "^0.5.0"
|
||||
wtforms = "^2.3.3"
|
||||
@ -96,7 +99,6 @@ pyspf = "^2.0.14"
|
||||
Flask-Limiter = "^1.4"
|
||||
memory_profiler = "^0.57.0"
|
||||
gevent = "22.10.2"
|
||||
aiospamc = "^0.6.1"
|
||||
email_validator = "^1.1.1"
|
||||
PGPy = "0.5.4"
|
||||
coinbase-commerce = "^1.0.1"
|
||||
@ -111,6 +113,8 @@ Deprecated = "^1.2.13"
|
||||
cryptography = "37.0.1"
|
||||
SQLAlchemy = "1.3.24"
|
||||
redis = "^4.5.3"
|
||||
newrelic-telemetry-sdk = "^0.5.0"
|
||||
aiospamc = "0.10"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^7.0.0"
|
||||
@ -120,6 +124,9 @@ black = "^22.1.0"
|
||||
djlint = "^1.3.0"
|
||||
pylint = "^2.14.4"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "^0.1.5"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry>=0.12"]
|
||||
build-backend = "poetry.masonry.api"
|
||||
|
@ -407,8 +407,10 @@ def jinja2_filter(app):
|
||||
|
||||
@app.context_processor
|
||||
def inject_stage_and_region():
|
||||
now = arrow.now()
|
||||
return dict(
|
||||
YEAR=arrow.now().year,
|
||||
YEAR=now.year,
|
||||
NOW=now,
|
||||
URL=URL,
|
||||
SENTRY_DSN=SENTRY_FRONT_END_DSN,
|
||||
VERSION=SHA1,
|
||||
@ -641,7 +643,7 @@ def setup_paddle_callback(app: Flask):
|
||||
|
||||
@app.route("/paddle_coupon", methods=["GET", "POST"])
|
||||
def paddle_coupon():
|
||||
LOG.d(f"paddle coupon callback %s", request.form)
|
||||
LOG.d("paddle coupon callback %s", request.form)
|
||||
|
||||
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
||||
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
||||
|
@ -1,13 +1,12 @@
|
||||
from time import sleep
|
||||
|
||||
import flask_migrate
|
||||
from IPython import embed
|
||||
from sqlalchemy_utils import create_database, database_exists, drop_database
|
||||
|
||||
from app import models
|
||||
from app.config import DB_URI
|
||||
from app.models import *
|
||||
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import User, RecoveryCode
|
||||
|
||||
if False:
|
||||
# noinspection PyUnreachableCode
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 38 KiB |
@ -86,6 +86,12 @@
|
||||
</head>
|
||||
<body>
|
||||
<div class="page">
|
||||
{% if NOW.timestamp < 1701475201 and current_user.is_authenticated and current_user.should_show_upgrade_button() %}
|
||||
|
||||
<div class="alert alert-success text-center mb-0" role="alert">
|
||||
Black Friday: $20 for the first year instead of $30. Available until December 1st.
|
||||
</div>
|
||||
{% endif %}
|
||||
{% block announcement %}{% endblock %}
|
||||
<div class="container">
|
||||
<!-- For flash messages -->
|
||||
|
@ -133,6 +133,7 @@
|
||||
<div>
|
||||
<span>
|
||||
<a href="{{ 'mailto:' + contact.website_send_to() }}"
|
||||
target="_blank"
|
||||
data-toggle="tooltip"
|
||||
title="You can click on this to open your email client. Or use the copy button 👉"
|
||||
class="font-weight-bold">
|
||||
|
@ -48,7 +48,7 @@
|
||||
{% if scope == "email" %}
|
||||
|
||||
Email:
|
||||
<a href="mailto:{{ val }}">{{ val }}</a>
|
||||
<a href="mailto:{{ val }}" target="_blank">{{ val }}</a>
|
||||
{% elif scope == "name" %}
|
||||
Name: {{ val }}
|
||||
{% endif %}
|
||||
|
@ -268,7 +268,7 @@
|
||||
If you are using a subdomain, e.g. <i>subdomain.domain.com</i>,
|
||||
you need to use <i>dkim._domainkey.subdomain</i> as the domain instead.
|
||||
<br />
|
||||
That means, if your domain is <i>mail.domain.com</i> you should enter <i>dkim._domainkey.mail.domain.com</i> as the Domain.
|
||||
That means, if your domain is <i>mail.domain.com</i> you should enter <i>dkim._domainkey.mail</i> as the Domain.
|
||||
<br />
|
||||
</div>
|
||||
<div class="alert alert-info">
|
||||
|
@ -71,177 +71,181 @@
|
||||
</form>
|
||||
</div>
|
||||
<!-- END Change email -->
|
||||
{% if mailbox.pgp_finger_print and not mailbox.disable_pgp and current_user.include_sender_in_reverse_alias %}
|
||||
<!-- Not show PGP option for Proton mailbox -->
|
||||
{% if mailbox.is_proton() and not mailbox.pgp_enabled() %}
|
||||
|
||||
<div class="alert alert-info">
|
||||
Email headers like <span class="italic">From, To, Subject</span> aren't encrypted by PGP.
|
||||
Currently, your reverse alias includes the sender address.
|
||||
You can disable this on <a href="/dashboard/setting#sender-in-ra">Settings</a>.
|
||||
As an email is always encrypted at rest in Proton Mail, having SimpleLogin also encrypt your email is redundant and does not add any security benefit.
|
||||
<br>
|
||||
The PGP option on SimpleLogin is instead useful for when your mailbox provider isn't encrypted by default like Gmail, Outlook, etc.
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="card">
|
||||
<div class="card-body">
|
||||
<div class="card-title">
|
||||
<div class="d-flex">
|
||||
Pretty Good Privacy (PGP)
|
||||
<div class="{% if mailbox.is_proton() and not mailbox.pgp_enabled() %}
|
||||
disabled-content{% endif %}">
|
||||
{% if mailbox.pgp_finger_print and not mailbox.disable_pgp and current_user.include_sender_in_reverse_alias and not mailbox.is_proton() %}
|
||||
|
||||
<div class="alert alert-info">
|
||||
Email headers like <span class="italic">From, To, Subject</span> aren't encrypted by PGP.
|
||||
Currently, your reverse alias includes the sender address.
|
||||
You can disable this on <a href="/dashboard/setting#sender-in-ra">Settings</a>.
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="card">
|
||||
<div class="card-body">
|
||||
<div class="card-title">
|
||||
<div class="d-flex">
|
||||
Pretty Good Privacy (PGP)
|
||||
{% if mailbox.pgp_finger_print %}
|
||||
|
||||
<form method="post">
|
||||
{{ csrf_form.csrf_token }}
|
||||
<input type="hidden" name="form-name" value="toggle-pgp">
|
||||
<label class="custom-switch cursor" style="padding-left: 1rem" data-toggle="tooltip" {% if mailbox.disable_pgp %}
|
||||
title="Enable PGP" {% else %} title="Disable PGP" {% endif %}>
|
||||
<input type="checkbox" class="custom-switch-input" name="pgp-enabled" {{ "" if mailbox.disable_pgp else "checked" }}>
|
||||
<span class="custom-switch-indicator"></span>
|
||||
</label>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="small-text mt-1">
|
||||
By importing your PGP Public Key into SimpleLogin, all emails sent to {{ mailbox.email }} are
|
||||
<b>encrypted</b> with your key.
|
||||
<br />
|
||||
{% if PGP_SIGNER %}All forwarded emails will be signed with <b>{{ PGP_SIGNER }}</b>.{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% if not current_user.is_premium() %}
|
||||
|
||||
<div class="alert alert-danger" role="alert">This feature is only available in premium plan.</div>
|
||||
{% endif %}
|
||||
<form method="post">
|
||||
{{ csrf_form.csrf_token }}
|
||||
<div class="form-group">
|
||||
<label class="form-label">PGP Public Key</label>
|
||||
<textarea name="pgp" {% if not current_user.is_premium() %} disabled {% endif %} class="form-control" rows=10 id="pgp-public-key" placeholder="(Drag and drop or paste your pgp public key here) -----BEGIN PGP PUBLIC KEY BLOCK-----">{{ mailbox.pgp_public_key or "" }}</textarea>
|
||||
</div>
|
||||
<input type="hidden" name="form-name" value="pgp">
|
||||
<button class="btn btn-primary" name="action" {% if not current_user.is_premium() %}
|
||||
disabled {% endif %} value="save">
|
||||
Save
|
||||
</button>
|
||||
{% if mailbox.pgp_finger_print %}
|
||||
|
||||
<form method="post">
|
||||
{{ csrf_form.csrf_token }}
|
||||
<input type="hidden" name="form-name" value="toggle-pgp">
|
||||
<label class="custom-switch cursor" style="padding-left: 1rem" data-toggle="tooltip" {% if mailbox.disable_pgp %}
|
||||
title="Enable PGP" {% else %} title="Disable PGP" {% endif %}>
|
||||
<input type="checkbox" class="custom-switch-input" name="pgp-enabled" {{ "" if mailbox.disable_pgp else "checked" }}>
|
||||
<span class="custom-switch-indicator"></span>
|
||||
</label>
|
||||
</form>
|
||||
<button class="btn btn-danger float-right" name="action" value="remove">Remove</button>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="small-text mt-1">
|
||||
By importing your PGP Public Key into SimpleLogin, all emails sent to {{ mailbox.email }} are
|
||||
<b>encrypted</b> with your key.
|
||||
<br />
|
||||
{% if PGP_SIGNER %}All forwarded emails will be signed with <b>{{ PGP_SIGNER }}</b>.{% endif %}
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
{% if not current_user.is_premium() %}
|
||||
|
||||
<div class="alert alert-danger" role="alert">This feature is only available in premium plan.</div>
|
||||
{% endif %}
|
||||
<form method="post">
|
||||
{{ csrf_form.csrf_token }}
|
||||
<div class="form-group">
|
||||
<label class="form-label">PGP Public Key</label>
|
||||
<textarea name="pgp" {% if not current_user.is_premium() %} disabled {% endif %} class="form-control" rows=10 id="pgp-public-key" placeholder="(Drag and drop or paste your pgp public key here) -----BEGIN PGP PUBLIC KEY BLOCK-----">{{ mailbox.pgp_public_key or "" }}</textarea>
|
||||
</div>
|
||||
<input type="hidden" name="form-name" value="pgp">
|
||||
<button class="btn btn-primary" name="action" {% if not current_user.is_premium() %}
|
||||
disabled {% endif %} value="save">
|
||||
Save
|
||||
</button>
|
||||
{% if mailbox.pgp_finger_print %}
|
||||
|
||||
<button class="btn btn-danger float-right" name="action" value="remove">Remove</button>
|
||||
{% endif %}
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card" {% if not mailbox.pgp_enabled() %}
|
||||
disabled {% endif %}>
|
||||
<form method="post">
|
||||
<div class="card" id="generic-subject">
|
||||
<form method="post" action="#generic-subject">
|
||||
{{ csrf_form.csrf_token }}
|
||||
<input type="hidden" name="form-name" value="generic-subject">
|
||||
<div class="card-body">
|
||||
<div class="card-title">
|
||||
Hide email subject when PGP is enabled
|
||||
Hide email subject
|
||||
<div class="small-text mt-1">
|
||||
When PGP is enabled, you can choose to use a <b>generic</b> subject for the forwarded emails.
|
||||
The original subject is then added into the email body.
|
||||
The original subject will be added to the email body and all forwarded emails will have the generic subject.
|
||||
<br />
|
||||
As PGP does not encrypt the email subject and the email subject might contain sensitive information,
|
||||
this option will allow a further protection of your email content.
|
||||
This option is often used when PGP is enabled.
|
||||
As PGP does not encrypt the email subject, it allows a further protection of your email content.
|
||||
</div>
|
||||
</div>
|
||||
<div class="alert alert-info">
|
||||
As the email is encrypted, a subject like "Email for you"
|
||||
will probably be rejected by your mailbox since it sounds like a spam.
|
||||
<br />
|
||||
Something like "Encrypted Email" would work much better :).
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="form-label">Generic Subject</label>
|
||||
<input name="generic-subject" {% if not mailbox.pgp_enabled() %}
|
||||
disabled {% endif %} class="form-control" maxlength="78" placeholder="Generic Subject" value="{{ mailbox.generic_subject or "" }}">
|
||||
</div>
|
||||
<button class="btn btn-primary" name="action" {% if not mailbox.pgp_enabled() %}
|
||||
disabled {% endif %} value="save">
|
||||
Save
|
||||
</button>
|
||||
{% if mailbox.generic_subject %}
|
||||
<input name="generic-subject"
|
||||
class="form-control"
|
||||
maxlength="78"
|
||||
placeholder="Generic Subject"
|
||||
value="{{ mailbox.generic_subject or "" }}">
|
||||
</div>
|
||||
<button class="btn btn-primary" name="action" value="save">Save</button>
|
||||
{% if mailbox.generic_subject %}
|
||||
|
||||
<button class="btn btn-danger float-right" name="action" value="remove">Remove</button>
|
||||
{% endif %}
|
||||
<button class="btn btn-danger float-right" name="action" value="remove">Remove</button>
|
||||
{% endif %}
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<hr />
|
||||
<h2 class="h4">Advanced Options</h2>
|
||||
{% if spf_available %}
|
||||
|
||||
<div class="card" id="spf">
|
||||
<form method="post">
|
||||
{{ csrf_form.csrf_token }}
|
||||
<input type="hidden" name="form-name" value="force-spf">
|
||||
<div class="card-body">
|
||||
<div class="card-title">
|
||||
Enforce SPF
|
||||
<div class="small-text">
|
||||
To avoid email-spoofing, SimpleLogin blocks email that
|
||||
<em data-toggle="tooltip"
|
||||
title="Email that has your mailbox as envelope-sender address">seems</em> to come from your
|
||||
mailbox
|
||||
but sent from <em data-toggle="tooltip"
|
||||
title="IP Address that is not known by your mailbox email service">unknown</em>
|
||||
IP address.
|
||||
<br />
|
||||
Only turn off this option if you know what you're doing :).
|
||||
</div>
|
||||
</div>
|
||||
<label class="custom-switch cursor mt-2 pl-0" data-toggle="tooltip" {% if mailbox.force_spf %}
|
||||
title="Disable SPF enforcement" {% else %} title="Enable SPF enforcement" {% endif %}>
|
||||
<input type="checkbox" name="spf-status" class="custom-switch-input" {{ "checked" if mailbox.force_spf else "" }}>
|
||||
<span class="custom-switch-indicator"></span>
|
||||
</label>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<hr />
|
||||
<h2 class="h4">Advanced Options</h2>
|
||||
{% if spf_available %}
|
||||
|
||||
<div class="card" id="spf">
|
||||
<form method="post">
|
||||
{{ csrf_form.csrf_token }}
|
||||
<input type="hidden" name="form-name" value="force-spf">
|
||||
<div class="card-body">
|
||||
<div class="card-title">
|
||||
Enforce SPF
|
||||
<div class="small-text">
|
||||
To avoid email-spoofing, SimpleLogin blocks email that
|
||||
<em data-toggle="tooltip"
|
||||
title="Email that has your mailbox as envelope-sender address">seems</em> to come from your
|
||||
mailbox
|
||||
but sent from <em data-toggle="tooltip"
|
||||
title="IP Address that is not known by your mailbox email service">unknown</em>
|
||||
IP address.
|
||||
<br />
|
||||
Only turn off this option if you know what you're doing :).
|
||||
</div>
|
||||
</div>
|
||||
<label class="custom-switch cursor mt-2 pl-0" data-toggle="tooltip" {% if mailbox.force_spf %}
|
||||
title="Disable SPF enforcement" {% else %} title="Enable SPF enforcement" {% endif %}>
|
||||
<input type="checkbox" name="spf-status" class="custom-switch-input" {{ "checked" if mailbox.force_spf else "" }}>
|
||||
<span class="custom-switch-indicator"></span>
|
||||
</label>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="card" id="authorized-address">
|
||||
<div class="card-body">
|
||||
<div class="card-title">
|
||||
Authorized addresses
|
||||
<div class="small-text">
|
||||
Emails sent from these addresses to a <b>reverse-alias</b> are considered as being sent
|
||||
from {{ mailbox.email }}
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="card" id="authorized-address">
|
||||
<div class="card-body">
|
||||
<div class="card-title">
|
||||
Authorized addresses
|
||||
<div class="small-text">
|
||||
Emails sent from these addresses to a <b>reverse-alias</b> are considered as being sent
|
||||
from {{ mailbox.email }}
|
||||
</div>
|
||||
{% if mailbox.authorized_addresses | length == 0 %}
|
||||
|
||||
{% else %}
|
||||
<ul>
|
||||
{% for authorized_address in mailbox.authorized_addresses %}
|
||||
|
||||
<li>
|
||||
{{ authorized_address.email }}
|
||||
<form method="post" action="#authorized-address" style="display: inline">
|
||||
{{ csrf_form.csrf_token }}
|
||||
<input type="hidden" name="form-name" value="delete-authorized-address">
|
||||
<input type="hidden"
|
||||
name="authorized-address-id"
|
||||
value="{{ authorized_address.id }}">
|
||||
<input type="submit" class="btn btn-sm btn-outline-warning" value="Delete">
|
||||
</form>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
<form method="post" action="#authorized-address" class="form-inline">
|
||||
{{ csrf_form.csrf_token }}
|
||||
<input type="hidden" name="form-name" value="add-authorized-address">
|
||||
<input type="email" name="email" size="50" class="form-control" required>
|
||||
<input type="submit" class="btn btn-primary" value="Add">
|
||||
</form>
|
||||
</div>
|
||||
{% if mailbox.authorized_addresses | length == 0 %}
|
||||
|
||||
{% else %}
|
||||
<ul>
|
||||
{% for authorized_address in mailbox.authorized_addresses %}
|
||||
|
||||
<li>
|
||||
{{ authorized_address.email }}
|
||||
<form method="post" action="#authorized-address" style="display: inline">
|
||||
{{ csrf_form.csrf_token }}
|
||||
<input type="hidden" name="form-name" value="delete-authorized-address">
|
||||
<input type="hidden"
|
||||
name="authorized-address-id"
|
||||
value="{{ authorized_address.id }}">
|
||||
<input type="submit" class="btn btn-sm btn-outline-warning" value="Delete">
|
||||
</form>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
<form method="post" action="#authorized-address" class="form-inline">
|
||||
{{ csrf_form.csrf_token }}
|
||||
<input type="hidden" name="form-name" value="add-authorized-address">
|
||||
<input type="email" name="email" size="50" class="form-control" required>
|
||||
<input type="submit" class="btn btn-primary" value="Add">
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% block script %}
|
||||
<script src="/static/js/utils/drag-drop-into-text.js"></script>
|
||||
<script>
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% block script %}
|
||||
<script src="/static/js/utils/drag-drop-into-text.js"></script>
|
||||
<script>
|
||||
$(".custom-switch-input").change(function (e) {
|
||||
$(this).closest("form").submit();
|
||||
});
|
||||
enableDragDropForPGPKeys('#pgp-public-key');
|
||||
</script>
|
||||
{% endblock %}
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
@ -57,6 +57,22 @@
|
||||
{% endblock %}
|
||||
{% block default_content %}
|
||||
|
||||
{% if NOW.timestamp < 1701475201 %}
|
||||
|
||||
<div class="alert alert-info">
|
||||
Black Friday Deal: 33% off on the yearly plan for the <b>first</b> year ($20 instead of $30).
|
||||
<br>
|
||||
Please use this coupon code
|
||||
<em data-toggle="tooltip"
|
||||
title="Click to copy"
|
||||
class="clipboard"
|
||||
data-clipboard-text="BF2023">BF2023</em> during the checkout.
|
||||
<br>
|
||||
<img src="/static/images/coupon.png" class="m-2" style="max-width: 300px">
|
||||
<br>
|
||||
Available until December 1, 2023.
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="pb-8">
|
||||
<div class="text-center mx-md-auto mb-8 mt-6">
|
||||
<h1>Upgrade to unlock premium features</h1>
|
||||
@ -207,7 +223,7 @@
|
||||
<div class="card-body">
|
||||
<div class="text-center">
|
||||
<div class="h3">Proton plan</div>
|
||||
<div class="h3 my-3">Starts at $11.99 / month</div>
|
||||
<div class="h3 my-3">Starts at $12.99 / month</div>
|
||||
<div class="text-center mt-4 mb-6">
|
||||
<a class="btn btn-lg btn-outline-primary w-100"
|
||||
role="button"
|
||||
@ -225,10 +241,6 @@
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
500 GB storage
|
||||
</li>
|
||||
<li class="d-flex">
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
15 email addresses
|
||||
</li>
|
||||
<li class="d-flex">
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
Unlimited folders, labels, and filters
|
||||
@ -239,11 +251,7 @@
|
||||
</li>
|
||||
<li class="d-flex">
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
15 email addresses
|
||||
</li>
|
||||
<li class="d-flex">
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
20 Calendars
|
||||
25 calendars
|
||||
</li>
|
||||
<li class="d-flex">
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
@ -376,10 +384,6 @@
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
500 GB storage
|
||||
</li>
|
||||
<li class="d-flex">
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
15 email addresses/aliases
|
||||
</li>
|
||||
<li class="d-flex">
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
Unlimited folders, labels, and filters
|
||||
@ -390,11 +394,7 @@
|
||||
</li>
|
||||
<li class="d-flex">
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
15 email addresses/aliases
|
||||
</li>
|
||||
<li class="d-flex">
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
20 Calendars
|
||||
25 calendars
|
||||
</li>
|
||||
<li class="d-flex">
|
||||
<i class="fe fe-check text-success mr-2 mt-1" aria-hidden="true"></i>
|
||||
@ -478,7 +478,7 @@
|
||||
</a>, which currently supports Bitcoin, Bitcoin Cash, DAI, ApeCoin, Dogecoin, Ethereum, Litecoin, SHIBA INU, Tether and USD Coin.
|
||||
</p>
|
||||
<p>
|
||||
In the future, we are going to support Monero as well. In the meantime, please send us an email at <a href="mailto:support@simplelogin.zendesk.com">support@simplelogin.zendesk.com</a> if you want to use this cryptocurrency.
|
||||
In the future, we are going to support Monero as well. In the meantime, please send us an email at <a href="mailto:support@simplelogin.zendesk.com" target="_blank">support@simplelogin.zendesk.com</a> if you want to use this cryptocurrency.
|
||||
</p>
|
||||
<div class="d-flex justify-content-center">
|
||||
<a class="btn btn-outline-primary text-center"
|
||||
@ -645,7 +645,7 @@
|
||||
</li>
|
||||
</ul>
|
||||
<p>
|
||||
Please send us an email at <a href="mailto:support@simplelogin.zendesk.com">support@simplelogin.zendesk.com</a> for more info.
|
||||
Please send us an email at <a href="mailto:support@simplelogin.zendesk.com" target="_blank">support@simplelogin.zendesk.com</a> for more info.
|
||||
</p>
|
||||
<p>
|
||||
We used to offer free premium accounts for students but this program ended at June 17 2021. Please note this doesn't affect existing accounts who have already benefited from the program or requests sent before this date.
|
||||
@ -708,7 +708,7 @@
|
||||
data-parent="#pricing-faq">
|
||||
<div class="card-body">
|
||||
<p>
|
||||
No we don't have a family plan but offer 30% reduction for additional subscriptions. Please contact us at <a href="mailto:support@simplelogin.zendesk.com">support@simplelogin.zendesk.com</a> for more information.
|
||||
No we don't have a family plan but offer 30% reduction for additional subscriptions. Please contact us at <a href="mailto:support@simplelogin.zendesk.com" target="_blank">support@simplelogin.zendesk.com</a> for more information.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -22,7 +22,7 @@
|
||||
For every user who <b>upgrades</b> and stays with us at least 3 months, you'll get $5 :).
|
||||
<br />
|
||||
The payout can be initiated any time, just send us an email at
|
||||
<a href="mailto:hi@simplelogin.io">hi@simplelogin.io</a>
|
||||
<a href="mailto:hi@simplelogin.io" target="_blank">hi@simplelogin.io</a>
|
||||
when you want to receive the payout.
|
||||
</div>
|
||||
{% if referrals|length == 0 %}
|
||||
|
@ -9,7 +9,7 @@
|
||||
<h1 class="h3">Block alias</h1>
|
||||
<p>
|
||||
You are about to block the alias
|
||||
<a href="mailto:{{ alias }}">{{ alias }}</a>
|
||||
<a href="mailto:{{ alias }}" target="_blank">{{ alias }}</a>
|
||||
</p>
|
||||
<p>After this, you will stop receiving all emails sent to this alias, please confirm.</p>
|
||||
<form method="post">
|
||||
|
@ -61,7 +61,7 @@
|
||||
<img src="{{ user_info[scope.value] }}" class="avatar">
|
||||
{% elif scope == Scope.EMAIL %}
|
||||
{{ scope.value }}:
|
||||
<a href="mailto:{{ user_info[scope.value] }}">{{ user_info[scope.value] }}</a>
|
||||
<a href="mailto:{{ user_info[scope.value] }}" target="_blank">{{ user_info[scope.value] }}</a>
|
||||
{% elif scope == Scope.NAME %}
|
||||
{{ scope.value }}: <b>{{ user_info[scope.value] }}</b>
|
||||
{% endif %}
|
||||
|
@ -58,7 +58,7 @@ def test_different_scenarios_v4_2(flask_client):
|
||||
assert r.json["suffixes"]
|
||||
assert r.json["prefix_suggestion"] == "" # no hostname => no suggestion
|
||||
|
||||
for (suffix, signed_suffix) in r.json["suffixes"]:
|
||||
for suffix, signed_suffix in r.json["suffixes"]:
|
||||
assert signed_suffix.startswith(suffix)
|
||||
|
||||
# <<< with hostname >>>
|
||||
|
@ -1,4 +1,4 @@
|
||||
from app.dashboard.views import alias_transfer
|
||||
import app.alias_utils
|
||||
from app.db import Session
|
||||
from app.models import (
|
||||
Alias,
|
||||
@ -29,7 +29,7 @@ def test_alias_transfer(flask_client):
|
||||
user_id=new_user.id, email="hey2@example.com", verified=True, commit=True
|
||||
)
|
||||
|
||||
alias_transfer.transfer(alias, new_user, new_user.mailboxes())
|
||||
app.alias_utils.transfer_alias(alias, new_user, new_user.mailboxes())
|
||||
|
||||
# refresh from db
|
||||
alias = Alias.get(alias.id)
|
||||
|
@ -56,13 +56,15 @@ def test_get_jobs_to_run(flask_client):
|
||||
run_at=now.shift(hours=3),
|
||||
)
|
||||
# Job out of attempts
|
||||
Job.create(
|
||||
name="",
|
||||
payload="",
|
||||
state=JobState.taken.value,
|
||||
taken_at=now.shift(minutes=-(config.JOB_TAKEN_RETRY_WAIT_MINS + 10)),
|
||||
attempts=config.JOB_MAX_ATTEMPTS + 1,
|
||||
),
|
||||
(
|
||||
Job.create(
|
||||
name="",
|
||||
payload="",
|
||||
state=JobState.taken.value,
|
||||
taken_at=now.shift(minutes=-(config.JOB_TAKEN_RETRY_WAIT_MINS + 10)),
|
||||
attempts=config.JOB_MAX_ATTEMPTS + 1,
|
||||
),
|
||||
)
|
||||
Session.commit()
|
||||
jobs = get_jobs_to_run()
|
||||
assert len(jobs) == len(expected_jobs_to_run)
|
||||
|
0
app/tests/monitor/__init__.py
Normal file
0
app/tests/monitor/__init__.py
Normal file
350
app/tests/monitor/test_upcloud_get_metric.py
Normal file
350
app/tests/monitor/test_upcloud_get_metric.py
Normal file
@ -0,0 +1,350 @@
|
||||
from monitor.upcloud import get_metric, get_metrics
|
||||
from monitor.metric import UpcloudMetrics, UpcloudMetric, UpcloudRecord
|
||||
|
||||
import json
|
||||
|
||||
MOCK_RESPONSE = """
|
||||
{
|
||||
"cpu_usage": {
|
||||
"data": {
|
||||
"cols": [
|
||||
{ "label": "time", "type": "date" },
|
||||
{ "label": "test-1 (master)", "type": "number" },
|
||||
{ "label": "test-2 (standby)", "type": "number" }
|
||||
],
|
||||
"rows": [
|
||||
["2022-01-21T13:10:30Z", 2.744682398273781, 3.054323473090861],
|
||||
["2022-01-21T13:11:00Z", 3.0735645433218366, 2.972423595745795],
|
||||
["2022-01-21T13:11:30Z", 2.61619694060839, 3.1358378052207883],
|
||||
["2022-01-21T13:12:00Z", 3.275132296130991, 4.196249043309251]
|
||||
]
|
||||
},
|
||||
"hints": { "title": "CPU usage %" }
|
||||
},
|
||||
"disk_usage": {
|
||||
"data": {
|
||||
"cols": [
|
||||
{ "label": "time", "type": "date" },
|
||||
{ "label": "test-1 (master)", "type": "number" },
|
||||
{ "label": "test-2 (standby)", "type": "number" }
|
||||
],
|
||||
"rows": [
|
||||
["2022-01-21T13:10:30Z", 5.654416415900109, 5.58959125727556],
|
||||
["2022-01-21T13:11:00Z", 5.654416415900109, 5.58959125727556],
|
||||
["2022-01-21T13:11:30Z", 5.654416415900109, 5.58959125727556]
|
||||
]
|
||||
},
|
||||
"hints": { "title": "Disk space usage %" }
|
||||
},
|
||||
"diskio_reads": {
|
||||
"data": {
|
||||
"cols": [
|
||||
{ "label": "time", "type": "date" },
|
||||
{ "label": "test-1 (master)", "type": "number" },
|
||||
{ "label": "test-2 (standby)", "type": "number" }
|
||||
],
|
||||
"rows": [
|
||||
["2022-01-21T13:10:30Z", 0, 0],
|
||||
["2022-01-21T13:11:00Z", 0, 0],
|
||||
["2022-01-21T13:11:30Z", 0, 0]
|
||||
]
|
||||
},
|
||||
"hints": { "title": "Disk iops (reads)" }
|
||||
},
|
||||
"diskio_writes": {
|
||||
"data": {
|
||||
"cols": [
|
||||
{ "label": "time", "type": "date" },
|
||||
{ "label": "test-1 (master)", "type": "number" },
|
||||
{ "label": "test-2 (standby)", "type": "number" }
|
||||
],
|
||||
"rows": [
|
||||
["2022-01-21T13:10:30Z", 3, 2],
|
||||
["2022-01-21T13:11:00Z", 2, 3],
|
||||
["2022-01-21T13:11:30Z", 4, 3]
|
||||
]
|
||||
},
|
||||
"hints": { "title": "Disk iops (writes)" }
|
||||
},
|
||||
"load_average": {
|
||||
"data": {
|
||||
"cols": [
|
||||
{ "label": "time", "type": "date" },
|
||||
{ "label": "test-1 (master)", "type": "number" },
|
||||
{ "label": "test-2 (standby)", "type": "number" }
|
||||
],
|
||||
"rows": [
|
||||
["2022-01-21T13:10:30Z", 0.11, 0.11],
|
||||
["2022-01-21T13:11:00Z", 0.14, 0.1],
|
||||
["2022-01-21T13:11:30Z", 0.14, 0.09]
|
||||
]
|
||||
},
|
||||
"hints": { "title": "Load average (5 min)" }
|
||||
},
|
||||
"mem_usage": {
|
||||
"data": {
|
||||
"cols": [
|
||||
{ "label": "time", "type": "date" },
|
||||
{ "label": "test-1 (master)", "type": "number" },
|
||||
{ "label": "test-2 (standby)", "type": "number" }
|
||||
],
|
||||
"rows": [
|
||||
["2022-01-21T13:10:30Z", 11.491766148261078, 12.318932883261219],
|
||||
["2022-01-21T13:11:00Z", 11.511967645759277, 12.304403727425075],
|
||||
["2022-01-21T13:11:30Z", 11.488581675749048, 12.272260458006759]
|
||||
]
|
||||
},
|
||||
"hints": { "title": "Memory usage %" }
|
||||
},
|
||||
"net_receive": {
|
||||
"data": {
|
||||
"cols": [
|
||||
{ "label": "time", "type": "date" },
|
||||
{ "label": "test-1 (master)", "type": "number" },
|
||||
{ "label": "test-2 (standby)", "type": "number" }
|
||||
],
|
||||
"rows": [
|
||||
["2022-01-21T13:10:30Z", 442, 470],
|
||||
["2022-01-21T13:11:00Z", 439, 384],
|
||||
["2022-01-21T13:11:30Z", 466, 458]
|
||||
]
|
||||
},
|
||||
"hints": { "title": "Network receive (bytes/s)" }
|
||||
},
|
||||
"net_send": {
|
||||
"data": {
|
||||
"cols": [
|
||||
{ "label": "time", "type": "date" },
|
||||
{ "label": "test-1 (master)", "type": "number" },
|
||||
{ "label": "test-2 (standby)", "type": "number" }
|
||||
],
|
||||
"rows": [
|
||||
["2022-01-21T13:10:30Z", 672, 581],
|
||||
["2022-01-21T13:11:00Z", 660, 555],
|
||||
["2022-01-21T13:11:30Z", 694, 573]
|
||||
]
|
||||
},
|
||||
"hints": { "title": "Network transmit (bytes/s)" }
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def test_get_metrics():
|
||||
response = json.loads(MOCK_RESPONSE)
|
||||
metrics = get_metrics(response)
|
||||
assert metrics == UpcloudMetrics(
|
||||
metrics=[
|
||||
UpcloudMetric(
|
||||
metric_name="cpu_usage",
|
||||
records=[
|
||||
UpcloudRecord(
|
||||
db_role="master",
|
||||
label="test-1 " "(master)",
|
||||
time="2022-01-21T13:12:00Z",
|
||||
value=3.275132296130991,
|
||||
),
|
||||
UpcloudRecord(
|
||||
db_role="standby",
|
||||
label="test-2 " "(standby)",
|
||||
time="2022-01-21T13:12:00Z",
|
||||
value=4.196249043309251,
|
||||
),
|
||||
],
|
||||
),
|
||||
UpcloudMetric(
|
||||
metric_name="disk_usage",
|
||||
records=[
|
||||
UpcloudRecord(
|
||||
db_role="master",
|
||||
label="test-1 " "(master)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=5.654416415900109,
|
||||
),
|
||||
UpcloudRecord(
|
||||
db_role="standby",
|
||||
label="test-2 " "(standby)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=5.58959125727556,
|
||||
),
|
||||
],
|
||||
),
|
||||
UpcloudMetric(
|
||||
metric_name="diskio_reads",
|
||||
records=[
|
||||
UpcloudRecord(
|
||||
db_role="master",
|
||||
label="test-1 " "(master)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=0,
|
||||
),
|
||||
UpcloudRecord(
|
||||
db_role="standby",
|
||||
label="test-2 " "(standby)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=0,
|
||||
),
|
||||
],
|
||||
),
|
||||
UpcloudMetric(
|
||||
metric_name="diskio_writes",
|
||||
records=[
|
||||
UpcloudRecord(
|
||||
db_role="master",
|
||||
label="test-1 " "(master)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=4,
|
||||
),
|
||||
UpcloudRecord(
|
||||
db_role="standby",
|
||||
label="test-2 " "(standby)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=3,
|
||||
),
|
||||
],
|
||||
),
|
||||
UpcloudMetric(
|
||||
metric_name="load_average",
|
||||
records=[
|
||||
UpcloudRecord(
|
||||
db_role="master",
|
||||
label="test-1 " "(master)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=0.14,
|
||||
),
|
||||
UpcloudRecord(
|
||||
db_role="standby",
|
||||
label="test-2 " "(standby)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=0.09,
|
||||
),
|
||||
],
|
||||
),
|
||||
UpcloudMetric(
|
||||
metric_name="mem_usage",
|
||||
records=[
|
||||
UpcloudRecord(
|
||||
db_role="master",
|
||||
label="test-1 " "(master)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=11.488581675749048,
|
||||
),
|
||||
UpcloudRecord(
|
||||
db_role="standby",
|
||||
label="test-2 " "(standby)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=12.272260458006759,
|
||||
),
|
||||
],
|
||||
),
|
||||
UpcloudMetric(
|
||||
metric_name="net_receive",
|
||||
records=[
|
||||
UpcloudRecord(
|
||||
db_role="master",
|
||||
label="test-1 " "(master)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=466,
|
||||
),
|
||||
UpcloudRecord(
|
||||
db_role="standby",
|
||||
label="test-2 " "(standby)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=458,
|
||||
),
|
||||
],
|
||||
),
|
||||
UpcloudMetric(
|
||||
metric_name="net_send",
|
||||
records=[
|
||||
UpcloudRecord(
|
||||
db_role="master",
|
||||
label="test-1 " "(master)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=694,
|
||||
),
|
||||
UpcloudRecord(
|
||||
db_role="standby",
|
||||
label="test-2 " "(standby)",
|
||||
time="2022-01-21T13:11:30Z",
|
||||
value=573,
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_get_metric():
|
||||
response = json.loads(MOCK_RESPONSE)
|
||||
metric_name = "cpu_usage"
|
||||
metric = get_metric(response, metric_name)
|
||||
|
||||
assert metric.metric_name == metric_name
|
||||
assert len(metric.records) == 2
|
||||
assert metric.records[0].label == "test-1 (master)"
|
||||
assert metric.records[0].time == "2022-01-21T13:12:00Z"
|
||||
assert metric.records[0].value == 3.275132296130991
|
||||
|
||||
assert metric.records[1].label == "test-2 (standby)"
|
||||
assert metric.records[1].time == "2022-01-21T13:12:00Z"
|
||||
assert metric.records[1].value == 4.196249043309251
|
||||
|
||||
|
||||
def test_get_metric_with_none_value():
|
||||
response_str = """
|
||||
{
|
||||
"cpu_usage": {
|
||||
"data": {
|
||||
"cols": [
|
||||
{ "label": "time", "type": "date" },
|
||||
{ "label": "test-1 (master)", "type": "number" },
|
||||
{ "label": "test-2 (standby)", "type": "number" }
|
||||
],
|
||||
"rows": [
|
||||
["2022-01-21T13:10:30Z", 2.744682398273781, 3.054323473090861],
|
||||
["2022-01-21T13:11:00Z", 3.0735645433218366, 2.972423595745795],
|
||||
["2022-01-21T13:11:30Z", null, 3.1358378052207883],
|
||||
["2022-01-21T13:12:00Z", 3.275132296130991, null]
|
||||
]
|
||||
},
|
||||
"hints": { "title": "CPU usage %" }
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = json.loads(response_str)
|
||||
metric = get_metric(response, "cpu_usage")
|
||||
|
||||
assert metric.records[0].label == "test-1 (master)"
|
||||
assert metric.records[0].value == 3.275132296130991
|
||||
assert metric.records[1].label == "test-2 (standby)"
|
||||
assert metric.records[1].value == 3.1358378052207883
|
||||
|
||||
|
||||
def test_get_metric_with_none_value_in_last_two_positions():
|
||||
response_str = """
|
||||
{
|
||||
"cpu_usage": {
|
||||
"data": {
|
||||
"cols": [
|
||||
{ "label": "time", "type": "date" },
|
||||
{ "label": "test-1 (master)", "type": "number" },
|
||||
{ "label": "test-2 (standby)", "type": "number" }
|
||||
],
|
||||
"rows": [
|
||||
["2022-01-21T13:10:30Z", 2.744682398273781, 3.054323473090861],
|
||||
["2022-01-21T13:11:00Z", 3.0735645433218366, 2.972423595745795],
|
||||
["2022-01-21T13:11:30Z", null, null],
|
||||
["2022-01-21T13:12:00Z", 3.275132296130991, null]
|
||||
]
|
||||
},
|
||||
"hints": { "title": "CPU usage %" }
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = json.loads(response_str)
|
||||
metric = get_metric(response, "cpu_usage")
|
||||
|
||||
assert len(metric.records) == 1
|
||||
assert metric.records[0].label == "test-1 (master)"
|
||||
assert metric.records[0].value == 3.275132296130991
|
@ -1,5 +1,7 @@
|
||||
import pytest
|
||||
from http import HTTPStatus
|
||||
|
||||
from app.errors import ProtonAccountNotVerified
|
||||
from app.proton import proton_client
|
||||
|
||||
|
||||
@ -19,3 +21,30 @@ def test_convert_access_token_not_containing_invalid_length():
|
||||
for case in cases:
|
||||
with pytest.raises(Exception):
|
||||
proton_client.convert_access_token(case)
|
||||
|
||||
|
||||
def test_handle_response_not_ok_account_not_verified():
|
||||
res = proton_client.handle_response_not_ok(
|
||||
status=HTTPStatus.UNPROCESSABLE_ENTITY,
|
||||
body={"Code": proton_client.PROTON_ERROR_CODE_HV_NEEDED},
|
||||
text="",
|
||||
)
|
||||
assert isinstance(res, ProtonAccountNotVerified)
|
||||
|
||||
|
||||
def test_handle_response_unprocessable_entity_not_account_not_verified():
|
||||
error_text = "some error text"
|
||||
res = proton_client.handle_response_not_ok(
|
||||
status=HTTPStatus.UNPROCESSABLE_ENTITY, body={"Code": 4567}, text=error_text
|
||||
)
|
||||
assert error_text in res.args[0]
|
||||
|
||||
|
||||
def test_handle_response_not_ok_unknown_error():
|
||||
error_text = "some error text"
|
||||
res = proton_client.handle_response_not_ok(
|
||||
status=123,
|
||||
body={"Code": proton_client.PROTON_ERROR_CODE_HV_NEEDED},
|
||||
text=error_text,
|
||||
)
|
||||
assert error_text in res.args[0]
|
||||
|
@ -18,7 +18,7 @@ from app.db import Session
|
||||
from app.errors import AccountAlreadyLinkedToAnotherPartnerException
|
||||
from app.models import Partner, PartnerUser, User
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.utils import random_string
|
||||
from app.utils import random_string, canonicalize_email
|
||||
from tests.utils import random_email
|
||||
|
||||
|
||||
@ -377,3 +377,48 @@ def test_link_account_with_uppercase(flask_client):
|
||||
)
|
||||
assert partner_user.partner_id == get_proton_partner().id
|
||||
assert partner_user.external_user_id == partner_user_id
|
||||
|
||||
|
||||
def test_login_to_account_with_canonical_email(flask_client):
|
||||
email = "a.{rand}@gmail.com".format(rand=random_string(10))
|
||||
canonical_email = canonicalize_email(email)
|
||||
assert email != canonical_email
|
||||
partner_user_id = random_string()
|
||||
link_request = random_link_request(
|
||||
external_user_id=partner_user_id, email=email.upper()
|
||||
)
|
||||
user = create_user(canonical_email)
|
||||
assert user.email == canonical_email
|
||||
res = process_login_case(link_request, get_proton_partner())
|
||||
assert res.user.id == user.id
|
||||
|
||||
|
||||
def test_login_to_account_with_canonical_email_if_there_is_also_non_canonical(
|
||||
flask_client,
|
||||
):
|
||||
email = "a.{rand}@gmail.com".format(rand=random_string(10))
|
||||
canonical_email = canonicalize_email(email)
|
||||
assert email != canonical_email
|
||||
partner_user_id = random_string()
|
||||
link_request = random_link_request(
|
||||
external_user_id=partner_user_id, email=email.upper()
|
||||
)
|
||||
user = create_user(canonical_email)
|
||||
create_user(email)
|
||||
assert user.email == canonical_email
|
||||
res = process_login_case(link_request, get_proton_partner())
|
||||
assert res.user.id == user.id
|
||||
|
||||
|
||||
def test_login_creates_account_with_canonical_email(
|
||||
flask_client,
|
||||
):
|
||||
email = "a.{rand}@gmail.com".format(rand=random_string(10))
|
||||
canonical_email = canonicalize_email(email)
|
||||
assert email != canonical_email
|
||||
partner_user_id = random_string()
|
||||
link_request = random_link_request(
|
||||
external_user_id=partner_user_id, email=email.upper()
|
||||
)
|
||||
res = process_login_case(link_request, get_proton_partner())
|
||||
assert res.user.email == canonical_email
|
||||
|
@ -1,18 +1,17 @@
|
||||
import arrow
|
||||
|
||||
from app.models import CoinbaseSubscription, ApiToCookieToken, ApiKey
|
||||
from cron import notify_manual_sub_end, delete_expired_tokens
|
||||
import cron
|
||||
from app.db import Session
|
||||
from app.models import CoinbaseSubscription, ApiToCookieToken, ApiKey, User
|
||||
from tests.utils import create_new_user
|
||||
|
||||
|
||||
def test_notify_manual_sub_end(flask_client):
|
||||
user = create_new_user()
|
||||
|
||||
CoinbaseSubscription.create(
|
||||
user_id=user.id, end_at=arrow.now().shift(days=13, hours=2), commit=True
|
||||
)
|
||||
|
||||
notify_manual_sub_end()
|
||||
cron.notify_manual_sub_end()
|
||||
|
||||
|
||||
def test_cleanup_tokens(flask_client):
|
||||
@ -33,6 +32,22 @@ def test_cleanup_tokens(flask_client):
|
||||
api_key_id=api_key.id,
|
||||
commit=True,
|
||||
).id
|
||||
delete_expired_tokens()
|
||||
cron.delete_expired_tokens()
|
||||
assert ApiToCookieToken.get(id_to_clean) is None
|
||||
assert ApiToCookieToken.get(id_to_keep) is not None
|
||||
|
||||
|
||||
def test_cleanup_users():
|
||||
u_delete_none_id = create_new_user().id
|
||||
u_delete_after = create_new_user()
|
||||
u_delete_after_id = u_delete_after.id
|
||||
u_delete_before = create_new_user()
|
||||
u_delete_before_id = u_delete_before.id
|
||||
now = arrow.now()
|
||||
u_delete_after.delete_on = now.shift(minutes=1)
|
||||
u_delete_before.delete_on = now.shift(minutes=-1)
|
||||
Session.flush()
|
||||
cron.clear_users_scheduled_to_be_deleted()
|
||||
assert User.get(u_delete_none_id) is not None
|
||||
assert User.get(u_delete_after_id) is not None
|
||||
assert User.get(u_delete_before_id) is None
|
||||
|
@ -199,3 +199,31 @@ def test_get_free_partner_and_hidden_default_domain():
|
||||
assert [d.domain for d in domains] == user.available_sl_domains(
|
||||
alias_options=options
|
||||
)
|
||||
|
||||
|
||||
def test_get_free_partner_and_premium_partner():
|
||||
user = create_new_user()
|
||||
user.trial_end = None
|
||||
PartnerUser.create(
|
||||
partner_id=get_proton_partner().id,
|
||||
user_id=user.id,
|
||||
external_user_id=random_token(10),
|
||||
flush=True,
|
||||
)
|
||||
user.default_alias_public_domain_id = (
|
||||
SLDomain.filter_by(hidden=False, premium_only=False).first().id
|
||||
)
|
||||
Session.flush()
|
||||
options = AliasOptions(
|
||||
show_sl_domains=False,
|
||||
show_partner_domains=get_proton_partner(),
|
||||
show_partner_premium=True,
|
||||
)
|
||||
domains = user.get_sl_domains(alias_options=options)
|
||||
assert len(domains) == 3
|
||||
assert domains[0].domain == "premium_partner"
|
||||
assert domains[1].domain == "free_partner"
|
||||
assert domains[2].domain == "free_non_partner"
|
||||
assert [d.domain for d in domains] == user.available_sl_domains(
|
||||
alias_options=options
|
||||
)
|
||||
|
@ -7,7 +7,7 @@ import arrow
|
||||
import pytest
|
||||
|
||||
from app import config
|
||||
from app.config import MAX_ALERT_24H, EMAIL_DOMAIN, ROOT_DIR
|
||||
from app.config import MAX_ALERT_24H, ROOT_DIR
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
get_email_domain_part,
|
||||
@ -16,13 +16,10 @@ from app.email_utils import (
|
||||
delete_header,
|
||||
add_or_replace_header,
|
||||
send_email_with_rate_control,
|
||||
copy,
|
||||
get_spam_from_header,
|
||||
get_header_from_bounce,
|
||||
is_valid_email,
|
||||
add_header,
|
||||
generate_reply_email,
|
||||
normalize_reply_email,
|
||||
get_encoding,
|
||||
encode_text,
|
||||
EmailEncoding,
|
||||
@ -41,6 +38,7 @@ from app.email_utils import (
|
||||
get_verp_info_from_email,
|
||||
sl_formataddr,
|
||||
)
|
||||
from app.email_validation import is_valid_email, normalize_reply_email
|
||||
from app.models import (
|
||||
CustomDomain,
|
||||
Alias,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user